diff --git a/benchmarks/django-workers/manage.py b/benchmarks/django-workers/manage.py index 3da56b35b..8005262e9 100755 --- a/benchmarks/django-workers/manage.py +++ b/benchmarks/django-workers/manage.py @@ -6,15 +6,17 @@ def main(): """Run administrative tasks.""" - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") try: from django.core.management import execute_from_command_line except ImportError as exc: - raise ImportError("Couldn't import Django. Are you sure it's installed and " - 'available on your PYTHONPATH environment variable? Did you ' - 'forget to activate a virtual environment?') from exc + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc execute_from_command_line(sys.argv) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/benchmarks/django-workers/myapp/apps.py b/benchmarks/django-workers/myapp/apps.py index c34fb20eb..da45bfa47 100644 --- a/benchmarks/django-workers/myapp/apps.py +++ b/benchmarks/django-workers/myapp/apps.py @@ -2,5 +2,5 @@ class MyappConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'myapp' + default_auto_field = "django.db.models.BigAutoField" + name = "myapp" diff --git a/benchmarks/django-workers/myapp/migrations/0001_initial.py b/benchmarks/django-workers/myapp/migrations/0001_initial.py index 914bd97b1..8b02b79ff 100644 --- a/benchmarks/django-workers/myapp/migrations/0001_initial.py +++ b/benchmarks/django-workers/myapp/migrations/0001_initial.py @@ -11,11 +11,11 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='MyModel', + name="MyModel", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), - ('value', models.IntegerField()), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=100)), + ("value", models.IntegerField()), ], ), ] diff --git a/benchmarks/django-workers/myapp/views.py b/benchmarks/django-workers/myapp/views.py index 01f5c91eb..67b9c2458 100644 --- a/benchmarks/django-workers/myapp/views.py +++ b/benchmarks/django-workers/myapp/views.py @@ -15,7 +15,7 @@ aserialize = sync_to_async(serialize) -res = requests.get('https://jsonplaceholder.typicode.com/posts') +res = requests.get("https://jsonplaceholder.typicode.com/posts") stored = res.content @@ -26,126 +26,126 @@ async def async_range(count): async def async_seed(request: HttpRequest, id: Optional[int] = None): async for i in async_range(30): - await MyModel.objects.aget_or_create(name=f'name{i}', value=i) + await MyModel.objects.aget_or_create(name=f"name{i}", value=i) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) def json_view(request: HttpRequest, id: Optional[int] = None): if id is not None: - return JsonResponse({'name': 'name', 'value': 1}) + return JsonResponse({"name": "name", "value": 1}) - return JsonResponse([{'name': 'name', 'value': 1}], safe=False) + return JsonResponse([{"name": "name", "value": 1}], safe=False) async def async_json_view(request: HttpRequest, id: Optional[int] = None): if id is not None: - return JsonResponse({'name': 'name', 'value': 1}) + return JsonResponse({"name": "name", "value": 1}) - return JsonResponse([{'name': 'name', 'value': 1}], safe=False) + return JsonResponse([{"name": "name", "value": 1}], safe=False) def json_query_view(request: HttpRequest, id: Optional[int] = None): if id is not None: res = MyModel.objects.aget(id=id) if res is None: - raise HttpResponseNotFound('not found') + raise HttpResponseNotFound("not found") - return JsonResponse({'name': res.name, 'value': res.value}) + return JsonResponse({"name": res.name, "value": res.value}) keys = request.GET.keys() params = {k: request.GET[k] for k in keys} l = MyModel.objects.filter(**params) - return JsonResponse([{'name': x.name, 'value': x.value} for x in l], safe=False) + return JsonResponse([{"name": x.name, "value": x.value} for x in l], safe=False) async def async_json_query_view(request: HttpRequest, id: Optional[int] = None): if id is not None: res = await MyModel.objects.aget(id=id) if res is None: - raise HttpResponseNotFound('not found') + raise HttpResponseNotFound("not found") - return JsonResponse({'name': res.name, 'value': res.value}) + return JsonResponse({"name": res.name, "value": res.value}) keys = request.GET.keys() params = {k: request.GET[k] for k in keys} l = MyModel.objects.filter(**params) - return JsonResponse([{'name': x.name, 'value': x.value} async for x in l], safe=False) + return JsonResponse([{"name": x.name, "value": x.value} async for x in l], safe=False) def template_view(request: HttpRequest): l = MyModel.objects.filter() - return render(request, 'my_template.html', {'my_objects': l}) + return render(request, "my_template.html", {"my_objects": l}) async def async_template_view(request: HttpRequest): l = MyModel.objects.filter() - return render(request, 'my_template.html', {'my_objects': [x async for x in l]}) + return render(request, "my_template.html", {"my_objects": [x async for x in l]}) def gateway_1s_view(request: HttpRequest): # eventbrite bug time.sleep(1) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) async def async_gateway_1s_view(request: HttpRequest): # eventbrite bug await asyncio.sleep(1) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) def gateway_3s_view(request: HttpRequest): # eventbrite bug time.sleep(3) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) async def async_gateway_3s_view(request: HttpRequest): # eventbrite bug await asyncio.sleep(3) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) def gateway_10s_view(request: HttpRequest): # eventbrite bug time.sleep(10) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) async def async_gateway_10s_view(request: HttpRequest): # eventbrite bug await asyncio.sleep(10) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) def requests_view(request: HttpRequest): # eventbrite bug try: - response = requests.get('https://jsonplaceholder.typicode.com/posts', timeout=60) + response = requests.get("https://jsonplaceholder.typicode.com/posts", timeout=60) json = response.json() except Exception: - return HttpResponse('timeout', status=408) + return HttpResponse("timeout", status=408) return JsonResponse(json, safe=False) async def async_requests_view(request: HttpRequest): # eventbrite bug try: - response = requests.get('https://jsonplaceholder.typicode.com/posts', timeout=60) + response = requests.get("https://jsonplaceholder.typicode.com/posts", timeout=60) json = response.json() except Exception: - return HttpResponse('timeout', status=408) + return HttpResponse("timeout", status=408) return JsonResponse(json, safe=False) def httpx_view(request: HttpRequest): # eventbrite bug try: - response = httpx.get('https://jsonplaceholder.typicode.com/posts', timeout=60) + response = httpx.get("https://jsonplaceholder.typicode.com/posts", timeout=60) json = response.json() except Exception: - return HttpResponse('timeout', status=408) + return HttpResponse("timeout", status=408) return JsonResponse(json, safe=False) @@ -153,10 +153,10 @@ async def async_httpx_view(request: HttpRequest): # eventbrite bug async with httpx.AsyncClient(http2=True) as client: try: - response = await client.get('https://jsonplaceholder.typicode.com/posts', timeout=60) + response = await client.get("https://jsonplaceholder.typicode.com/posts", timeout=60) json = response.json() except Exception: - return HttpResponse('timeout', status=408) + return HttpResponse("timeout", status=408) return JsonResponse(json, safe=False) @@ -165,11 +165,11 @@ async def async_aiohttp_view(request: HttpRequest): # eventbrite bug async with aiohttp.ClientSession() as session: try: - async with session.get('https://jsonplaceholder.typicode.com/posts', timeout=60) as response: + async with session.get("https://jsonplaceholder.typicode.com/posts", timeout=60) as response: json = await response.json() return JsonResponse(json, safe=False) except Exception: - return HttpResponse('timeout', status=408) + return HttpResponse("timeout", status=408) def brotli_view(request: HttpRequest): @@ -185,22 +185,22 @@ async def async_brotli_view(request: HttpRequest): def fake_cache_hit_view(request: HttpRequest): # latency issue time.sleep(0.02) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) async def async_fake_cache_hit_view(request: HttpRequest): # latency issue await asyncio.sleep(0.02) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) def fake_cache_set_view(request: HttpRequest): # with brotli time.sleep(0.2) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) async def async_fake_cache_set_view(request: HttpRequest): # with brotli await asyncio.sleep(0.2) - return JsonResponse({'status': 'ok'}) + return JsonResponse({"status": "ok"}) diff --git a/benchmarks/django-workers/mysite/asgi.py b/benchmarks/django-workers/mysite/asgi.py index b400c194d..2140b37ba 100644 --- a/benchmarks/django-workers/mysite/asgi.py +++ b/benchmarks/django-workers/mysite/asgi.py @@ -11,7 +11,7 @@ from django.core.asgi import get_asgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") application = get_asgi_application() app = application diff --git a/benchmarks/django-workers/mysite/settings.py b/benchmarks/django-workers/mysite/settings.py index 18c1e216a..d24ee319d 100644 --- a/benchmarks/django-workers/mysite/settings.py +++ b/benchmarks/django-workers/mysite/settings.py @@ -19,7 +19,7 @@ # See https://docs.djangoproject.com/en/5.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = 'django-insecure-2y^i^mk2ofr)=*!hu-quhej7q(ji3bv55*h58*6n^%t-#tq@5w' +SECRET_KEY = "django-insecure-2y^i^mk2ofr)=*!hu-quhej7q(ji3bv55*h58*6n^%t-#tq@5w" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False @@ -29,52 +29,52 @@ # Application definition INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'myapp', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "myapp", ] MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] -ROOT_URLCONF = 'mysite.urls' +ROOT_URLCONF = "mysite.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], }, }, ] -WSGI_APPLICATION = 'mysite.wsgi.application' +WSGI_APPLICATION = "mysite.wsgi.application" # Database # https://docs.djangoproject.com/en/5.0/ref/settings/#databases DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': BASE_DIR / 'db.sqlite3', + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": BASE_DIR / "db.sqlite3", } } @@ -83,25 +83,25 @@ AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] # Internationalization # https://docs.djangoproject.com/en/5.0/topics/i18n/ -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" -TIME_ZONE = 'UTC' +TIME_ZONE = "UTC" USE_I18N = True @@ -110,10 +110,10 @@ # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/5.0/howto/static-files/ -STATIC_URL = 'static/' +STATIC_URL = "static/" # Default primary key field type # https://docs.djangoproject.com/en/5.0/ref/settings/#default-auto-field -DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' -ALLOWED_HOSTS = ['*'] +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" +ALLOWED_HOSTS = ["*"] diff --git a/benchmarks/django-workers/mysite/urls.py b/benchmarks/django-workers/mysite/urls.py index 02cfbe846..b19dde682 100644 --- a/benchmarks/django-workers/mysite/urls.py +++ b/benchmarks/django-workers/mysite/urls.py @@ -14,38 +14,39 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ + import myapp.views from django.contrib import admin from django.urls import path urlpatterns = [ - path('admin/', admin.site.urls), - path('myapp/sync/json', myapp.views.json_view), - path('myapp/sync/json/', myapp.views.json_view), - path('myapp/sync/json_query', myapp.views.json_query_view), - path('myapp/sync/json_query/', myapp.views.json_query_view), - path('myapp/sync/template_query', myapp.views.template_view), - path('myapp/sync/gateway_1s', myapp.views.gateway_1s_view), - path('myapp/sync/gateway_3s', myapp.views.gateway_3s_view), - path('myapp/sync/gateway_10s', myapp.views.gateway_10s_view), - path('myapp/sync/requests', myapp.views.requests_view), - path('myapp/sync/httpx', myapp.views.httpx_view), - path('myapp/sync/brotli', myapp.views.brotli_view), - path('myapp/sync/cache_hit', myapp.views.fake_cache_hit_view), - path('myapp/sync/cache_set', myapp.views.fake_cache_set_view), - path('myapp/async/seed', myapp.views.async_seed), - path('myapp/async/json', myapp.views.async_json_view), - path('myapp/async/json/', myapp.views.async_json_view), - path('myapp/async/json_query', myapp.views.async_json_query_view), - path('myapp/async/json_query/', myapp.views.async_json_query_view), - path('myapp/async/template_query', myapp.views.async_template_view), - path('myapp/async/gateway_1s', myapp.views.async_gateway_1s_view), - path('myapp/async/gateway_3s', myapp.views.async_gateway_1s_view), - path('myapp/async/gateway_10s', myapp.views.async_gateway_10s_view), - path('myapp/async/requests', myapp.views.async_requests_view), - path('myapp/async/httpx', myapp.views.async_httpx_view), - path('myapp/async/aiohttp', myapp.views.async_aiohttp_view), - path('myapp/async/brotli', myapp.views.async_brotli_view), - path('myapp/async/cache_hit', myapp.views.async_fake_cache_hit_view), - path('myapp/async/cache_set', myapp.views.async_fake_cache_set_view), + path("admin/", admin.site.urls), + path("myapp/sync/json", myapp.views.json_view), + path("myapp/sync/json/", myapp.views.json_view), + path("myapp/sync/json_query", myapp.views.json_query_view), + path("myapp/sync/json_query/", myapp.views.json_query_view), + path("myapp/sync/template_query", myapp.views.template_view), + path("myapp/sync/gateway_1s", myapp.views.gateway_1s_view), + path("myapp/sync/gateway_3s", myapp.views.gateway_3s_view), + path("myapp/sync/gateway_10s", myapp.views.gateway_10s_view), + path("myapp/sync/requests", myapp.views.requests_view), + path("myapp/sync/httpx", myapp.views.httpx_view), + path("myapp/sync/brotli", myapp.views.brotli_view), + path("myapp/sync/cache_hit", myapp.views.fake_cache_hit_view), + path("myapp/sync/cache_set", myapp.views.fake_cache_set_view), + path("myapp/async/seed", myapp.views.async_seed), + path("myapp/async/json", myapp.views.async_json_view), + path("myapp/async/json/", myapp.views.async_json_view), + path("myapp/async/json_query", myapp.views.async_json_query_view), + path("myapp/async/json_query/", myapp.views.async_json_query_view), + path("myapp/async/template_query", myapp.views.async_template_view), + path("myapp/async/gateway_1s", myapp.views.async_gateway_1s_view), + path("myapp/async/gateway_3s", myapp.views.async_gateway_1s_view), + path("myapp/async/gateway_10s", myapp.views.async_gateway_10s_view), + path("myapp/async/requests", myapp.views.async_requests_view), + path("myapp/async/httpx", myapp.views.async_httpx_view), + path("myapp/async/aiohttp", myapp.views.async_aiohttp_view), + path("myapp/async/brotli", myapp.views.async_brotli_view), + path("myapp/async/cache_hit", myapp.views.async_fake_cache_hit_view), + path("myapp/async/cache_set", myapp.views.async_fake_cache_set_view), ] diff --git a/benchmarks/django-workers/mysite/wsgi.py b/benchmarks/django-workers/mysite/wsgi.py index 192503749..f7d2a956f 100644 --- a/benchmarks/django-workers/mysite/wsgi.py +++ b/benchmarks/django-workers/mysite/wsgi.py @@ -11,6 +11,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") application = get_wsgi_application() diff --git a/benchmarks/http/async.py b/benchmarks/http/async.py index 5f548f74e..c8759ca06 100644 --- a/benchmarks/http/async.py +++ b/benchmarks/http/async.py @@ -5,7 +5,7 @@ import httpx times = 1000 -url = 'https://www.google.com' +url = "https://www.google.com" timeout = 100000 @@ -20,14 +20,14 @@ async def main(): tasks = [httpx_client.get(url, timeout=timeout) for _ in range(times)] await asyncio.gather(*tasks) end_time = time.perf_counter() - print(f'HTTPX: {end_time - start_time:.2f} seconds') + print(f"HTTPX: {end_time - start_time:.2f} seconds") # Send 100 asynchronous GET requests using AIOHTTP start_time = time.perf_counter() tasks = [aiohttp_client.get(url, timeout=timeout) for _ in range(times)] await asyncio.gather(*tasks) end_time = time.perf_counter() - print(f'AIOHTTP: {end_time - start_time:.2f} seconds') + print(f"AIOHTTP: {end_time - start_time:.2f} seconds") finally: # Close client sessions await aiohttp_client.close() diff --git a/benchmarks/http/async2.py b/benchmarks/http/async2.py index db3e52fb5..05abcacc7 100644 --- a/benchmarks/http/async2.py +++ b/benchmarks/http/async2.py @@ -7,26 +7,26 @@ timeout = 100000 url_list = [ - 'https://jsonplaceholder.typicode.com/posts', - 'https://api.thedogapi.com/v1/breeds', - 'https://pokeapi.co/api/v2/pokemon/1/', - 'https://jsonplaceholder.typicode.com/comments', - 'https://jsonplaceholder.typicode.com/users', - 'https://dog.ceo/api/breeds/image/random', - 'https://pokeapi.co/api/v2/pokemon/25/', - 'https://api.adviceslip.com/advice', - 'https://catfact.ninja/fact', - 'https://api.thecatapi.com/v1/breeds', - 'https://api.chucknorris.io/jokes/random', - 'https://official-joke-api.appspot.com/jokes/random', - 'https://jsonplaceholder.typicode.com/photos', - 'https://jsonplaceholder.typicode.com/todos', - 'https://jsonplaceholder.typicode.com/albums', - 'https://api.thecatapi.com/v1/images/search?limit=5', - 'https://api.adviceslip.com/advice/search/cats', - 'https://api.thedogapi.com/v1/breeds?limit=5', - 'https://pokeapi.co/api/v2/ability/1/', - 'https://api.chucknorris.io/jokes/categories', + "https://jsonplaceholder.typicode.com/posts", + "https://api.thedogapi.com/v1/breeds", + "https://pokeapi.co/api/v2/pokemon/1/", + "https://jsonplaceholder.typicode.com/comments", + "https://jsonplaceholder.typicode.com/users", + "https://dog.ceo/api/breeds/image/random", + "https://pokeapi.co/api/v2/pokemon/25/", + "https://api.adviceslip.com/advice", + "https://catfact.ninja/fact", + "https://api.thecatapi.com/v1/breeds", + "https://api.chucknorris.io/jokes/random", + "https://official-joke-api.appspot.com/jokes/random", + "https://jsonplaceholder.typicode.com/photos", + "https://jsonplaceholder.typicode.com/todos", + "https://jsonplaceholder.typicode.com/albums", + "https://api.thecatapi.com/v1/images/search?limit=5", + "https://api.adviceslip.com/advice/search/cats", + "https://api.thedogapi.com/v1/breeds?limit=5", + "https://pokeapi.co/api/v2/ability/1/", + "https://api.chucknorris.io/jokes/categories", ] @@ -41,14 +41,14 @@ async def main(): tasks = [httpx_client.get(url, timeout=timeout) for url in url_list] await asyncio.gather(*tasks) end_time = time.perf_counter() - print(f'HTTPX: {end_time - start_time:.2f} seconds') + print(f"HTTPX: {end_time - start_time:.2f} seconds") # Send 100 asynchronous GET requests using AIOHTTP start_time = time.perf_counter() tasks = [aiohttp_client.get(url, timeout=timeout) for url in url_list] await asyncio.gather(*tasks) end_time = time.perf_counter() - print(f'AIOHTTP: {end_time - start_time:.2f} seconds') + print(f"AIOHTTP: {end_time - start_time:.2f} seconds") finally: # Close client sessions await aiohttp_client.close() diff --git a/benchmarks/http/check.py b/benchmarks/http/check.py index 8e2f392d2..bb5c5daef 100644 --- a/benchmarks/http/check.py +++ b/benchmarks/http/check.py @@ -1,32 +1,32 @@ import requests url_list = [ - 'https://jsonplaceholder.typicode.com/posts', - 'https://api.thedogapi.com/v1/breeds', - 'https://pokeapi.co/api/v2/pokemon/1/', - 'https://jsonplaceholder.typicode.com/comments', - 'https://jsonplaceholder.typicode.com/users', - 'https://dog.ceo/api/breeds/image/random', - 'https://pokeapi.co/api/v2/pokemon/25/', - 'https://api.adviceslip.com/advice', - 'https://catfact.ninja/fact', - 'https://api.thecatapi.com/v1/breeds', - 'https://api.chucknorris.io/jokes/random', - 'https://official-joke-api.appspot.com/jokes/random', - 'https://jsonplaceholder.typicode.com/photos', - 'https://jsonplaceholder.typicode.com/todos', - 'https://jsonplaceholder.typicode.com/albums', - 'https://api.thecatapi.com/v1/images/search?limit=5', - 'https://api.adviceslip.com/advice/search/cats', - 'https://api.thedogapi.com/v1/breeds?limit=5', - 'https://pokeapi.co/api/v2/ability/1/', - 'https://api.chucknorris.io/jokes/categories', + "https://jsonplaceholder.typicode.com/posts", + "https://api.thedogapi.com/v1/breeds", + "https://pokeapi.co/api/v2/pokemon/1/", + "https://jsonplaceholder.typicode.com/comments", + "https://jsonplaceholder.typicode.com/users", + "https://dog.ceo/api/breeds/image/random", + "https://pokeapi.co/api/v2/pokemon/25/", + "https://api.adviceslip.com/advice", + "https://catfact.ninja/fact", + "https://api.thecatapi.com/v1/breeds", + "https://api.chucknorris.io/jokes/random", + "https://official-joke-api.appspot.com/jokes/random", + "https://jsonplaceholder.typicode.com/photos", + "https://jsonplaceholder.typicode.com/todos", + "https://jsonplaceholder.typicode.com/albums", + "https://api.thecatapi.com/v1/images/search?limit=5", + "https://api.adviceslip.com/advice/search/cats", + "https://api.thedogapi.com/v1/breeds?limit=5", + "https://pokeapi.co/api/v2/ability/1/", + "https://api.chucknorris.io/jokes/categories", ] for url in url_list: try: response = requests.get(url) response.raise_for_status() # Raise an HTTPError for bad responses - print(f'Success: {url} - Status Code: {response.status_code}') + print(f"Success: {url} - Status Code: {response.status_code}") except requests.exceptions.RequestException as e: - print(f'Error: {url} - {e}') + print(f"Error: {url} - {e}") diff --git a/benchmarks/http/sync.py b/benchmarks/http/sync.py index 7eebfc4e3..2926888fb 100644 --- a/benchmarks/http/sync.py +++ b/benchmarks/http/sync.py @@ -4,7 +4,7 @@ import requests times = 100 -url = 'https://www.google.com' +url = "https://www.google.com" timeout = 100000 @@ -21,7 +21,7 @@ def main(): secs = t2 - t1 father_time.append(secs) - print('HTTPX: ', sum(father_time)) + print("HTTPX: ", sum(father_time)) father_time = [] for url in url_list: @@ -31,8 +31,8 @@ def main(): secs = t2 - t1 father_time.append(secs) - print('REQUESTS: ', sum(father_time)) + print("REQUESTS: ", sum(father_time)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/benchmarks/http/sync2.py b/benchmarks/http/sync2.py index 5c9a033d3..788d69491 100644 --- a/benchmarks/http/sync2.py +++ b/benchmarks/http/sync2.py @@ -6,26 +6,26 @@ timeout = 100000 url_list = [ - 'https://jsonplaceholder.typicode.com/posts', - 'https://api.thedogapi.com/v1/breeds', - 'https://pokeapi.co/api/v2/pokemon/1/', - 'https://jsonplaceholder.typicode.com/comments', - 'https://jsonplaceholder.typicode.com/users', - 'https://dog.ceo/api/breeds/image/random', - 'https://pokeapi.co/api/v2/pokemon/25/', - 'https://api.adviceslip.com/advice', - 'https://catfact.ninja/fact', - 'https://api.thecatapi.com/v1/breeds', - 'https://api.chucknorris.io/jokes/random', - 'https://official-joke-api.appspot.com/jokes/random', - 'https://jsonplaceholder.typicode.com/photos', - 'https://jsonplaceholder.typicode.com/todos', - 'https://jsonplaceholder.typicode.com/albums', - 'https://api.thecatapi.com/v1/images/search?limit=5', - 'https://api.adviceslip.com/advice/search/cats', - 'https://api.thedogapi.com/v1/breeds?limit=5', - 'https://pokeapi.co/api/v2/ability/1/', - 'https://api.chucknorris.io/jokes/categories', + "https://jsonplaceholder.typicode.com/posts", + "https://api.thedogapi.com/v1/breeds", + "https://pokeapi.co/api/v2/pokemon/1/", + "https://jsonplaceholder.typicode.com/comments", + "https://jsonplaceholder.typicode.com/users", + "https://dog.ceo/api/breeds/image/random", + "https://pokeapi.co/api/v2/pokemon/25/", + "https://api.adviceslip.com/advice", + "https://catfact.ninja/fact", + "https://api.thecatapi.com/v1/breeds", + "https://api.chucknorris.io/jokes/random", + "https://official-joke-api.appspot.com/jokes/random", + "https://jsonplaceholder.typicode.com/photos", + "https://jsonplaceholder.typicode.com/todos", + "https://jsonplaceholder.typicode.com/albums", + "https://api.thecatapi.com/v1/images/search?limit=5", + "https://api.adviceslip.com/advice/search/cats", + "https://api.thedogapi.com/v1/breeds?limit=5", + "https://pokeapi.co/api/v2/ability/1/", + "https://api.chucknorris.io/jokes/categories", ] @@ -41,7 +41,7 @@ def main(): secs = t2 - t1 father_time.append(secs) - print('HTTPX: ', sum(father_time)) + print("HTTPX: ", sum(father_time)) father_time = [] for url in url_list: @@ -51,8 +51,8 @@ def main(): secs = t2 - t1 father_time.append(secs) - print('REQUESTS: ', sum(father_time)) + print("REQUESTS: ", sum(father_time)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/breathecode/__init__.py b/breathecode/__init__.py index 4f6da4cbc..0165ba0dd 100644 --- a/breathecode/__init__.py +++ b/breathecode/__init__.py @@ -4,4 +4,4 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app -__all__ = ('celery_app', ) +__all__ = ("celery_app",) diff --git a/breathecode/activity/actions.py b/breathecode/activity/actions.py index ffa80375b..b032fe946 100644 --- a/breathecode/activity/actions.py +++ b/breathecode/activity/actions.py @@ -7,45 +7,45 @@ from task_manager.core.exceptions import AbortTask, RetryTask ALLOWED_TYPES = { - 'auth.UserInvite': [ - 'invite_created', - 'invite_status_updated', + "auth.UserInvite": [ + "invite_created", + "invite_status_updated", ], - 'feedback.Answer': [ - 'nps_answered', + "feedback.Answer": [ + "nps_answered", ], - 'auth.User': [ - 'login', + "auth.User": [ + "login", ], - 'admissions.CohortUser': [ - 'joined_cohort', + "admissions.CohortUser": [ + "joined_cohort", ], - 'assignments.Task': [ - 'open_syllabus_module', - 'read_assignment', - 'assignment_review_status_updated', - 'assignment_status_updated', + "assignments.Task": [ + "open_syllabus_module", + "read_assignment", + "assignment_review_status_updated", + "assignment_status_updated", ], - 'events.EventCheckin': [ - 'event_checkin_created', - 'event_checkin_assisted', + "events.EventCheckin": [ + "event_checkin_created", + "event_checkin_assisted", ], - 'payments.Bag': [ - 'bag_created', + "payments.Bag": [ + "bag_created", ], - 'payments.Subscription': [ - 'checkout_completed', + "payments.Subscription": [ + "checkout_completed", ], - 'payments.PlanFinancing': [ - 'checkout_completed', + "payments.PlanFinancing": [ + "checkout_completed", ], - 'mentorship.MentorshipSession': [ - 'mentoring_session_scheduled', - 'mentorship_session_checkin', - 'mentorship_session_checkout', + "mentorship.MentorshipSession": [ + "mentoring_session_scheduled", + "mentorship_session_checkin", + "mentorship_session_checkout", ], - 'payments.Invoice': [ - 'checkout_completed', + "payments.Invoice": [ + "checkout_completed", ], } @@ -57,207 +57,205 @@ def _get_query(related_id: Optional[str | int] = None, related_slug: Optional[st kwargs = {} if related_id: - kwargs['pk'] = related_id + kwargs["pk"] = related_id if related_slug: - kwargs['slug'] = related_slug + kwargs["slug"] = related_slug return kwargs @classmethod - def user_invite(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def user_invite( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.authenticate.models import UserInvite kwargs = cls._get_query(related_id, related_slug) instance = UserInvite.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'UserInvite {related_id or related_slug} not found') + raise RetryTask(f"UserInvite {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'email': instance.email, - 'phone': instance.phone, - 'status': instance.status, - 'process_status': instance.process_status, - 'first_name': instance.first_name, - 'last_name': instance.last_name, + "id": instance.id, + "email": instance.email, + "phone": instance.phone, + "status": instance.status, + "process_status": instance.process_status, + "first_name": instance.first_name, + "last_name": instance.last_name, } if instance.author: - obj['author_email'] = instance.author.email - obj['author_username'] = instance.author.username + obj["author_email"] = instance.author.email + obj["author_username"] = instance.author.username if instance.user: - obj['user_email'] = instance.user.email - obj['user_username'] = instance.user.username - obj['user_first_name'] = instance.user.first_name - obj['user_last_name'] = instance.user.last_name + obj["user_email"] = instance.user.email + obj["user_username"] = instance.user.username + obj["user_first_name"] = instance.user.first_name + obj["user_last_name"] = instance.user.last_name if instance.role: - obj['role'] = instance.role.slug + obj["role"] = instance.role.slug if instance.academy: - obj['academy'] = instance.academy.id + obj["academy"] = instance.academy.id if instance.cohort: - obj['cohort'] = instance.cohort.id + obj["cohort"] = instance.cohort.id return obj @classmethod - def answer(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def answer( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.feedback.models import Answer kwargs = cls._get_query(related_id, related_slug) instance = Answer.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'Answer {related_id or related_slug} not found') + raise RetryTask(f"Answer {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'title': instance.title, - 'lowest': instance.lowest, - 'highest': instance.highest, - 'lang': instance.lang, - 'score': instance.score, - 'comment': instance.comment, - 'status': instance.status, + "id": instance.id, + "title": instance.title, + "lowest": instance.lowest, + "highest": instance.highest, + "lang": instance.lang, + "score": instance.score, + "comment": instance.comment, + "status": instance.status, } if instance.user: - obj['user_email'] = instance.user.email - obj['user_username'] = instance.user.username - obj['user_first_name'] = instance.user.first_name - obj['user_last_name'] = instance.user.last_name + obj["user_email"] = instance.user.email + obj["user_username"] = instance.user.username + obj["user_first_name"] = instance.user.first_name + obj["user_last_name"] = instance.user.last_name if instance.mentor: - obj['mentor_email'] = instance.mentor.email - obj['mentor_username'] = instance.mentor.username - obj['mentor_first_name'] = instance.mentor.first_name - obj['mentor_last_name'] = instance.mentor.last_name + obj["mentor_email"] = instance.mentor.email + obj["mentor_username"] = instance.mentor.username + obj["mentor_first_name"] = instance.mentor.first_name + obj["mentor_last_name"] = instance.mentor.last_name if instance.academy: - obj['academy'] = instance.academy.id + obj["academy"] = instance.academy.id if instance.cohort: - obj['cohort'] = instance.cohort.id + obj["cohort"] = instance.cohort.id if instance.survey: - obj['survey'] = instance.survey.id + obj["survey"] = instance.survey.id if instance.mentorship_session: - obj['mentorship_session'] = instance.mentorship_session.name + obj["mentorship_session"] = instance.mentorship_session.name if instance.event: - obj['event'] = instance.event.slug + obj["event"] = instance.event.slug if instance.opened_at: - obj['opened_at'] = instance.opened_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["opened_at"] = instance.opened_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.sent_at: - obj['sent_at'] = instance.sent_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["sent_at"] = instance.sent_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj @classmethod - def user(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def user( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.authenticate.models import User kwargs = cls._get_query(related_id, related_slug) instance = User.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'User {related_id or related_slug} not found') + raise RetryTask(f"User {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'email': instance.email, - 'username': instance.username, + "id": instance.id, + "email": instance.email, + "username": instance.username, } return obj @classmethod - def cohort(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def cohort( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.admissions.models import Cohort kwargs = cls._get_query(related_id, related_slug) instance = Cohort.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'Cohort {related_id or related_slug} not found') + raise RetryTask(f"Cohort {related_id or related_slug} not found") - syllabus = (f'{instance.syllabus_version.syllabus.slug}.v{instance.syllabus_version.version}' - if instance.syllabus_version else None) + syllabus = ( + f"{instance.syllabus_version.syllabus.slug}.v{instance.syllabus_version.version}" + if instance.syllabus_version + else None + ) obj = { - 'id': instance.id, - 'slug': instance.slug, - 'name': instance.name, - 'current_day': instance.current_day, - 'current_module': instance.current_module, - 'stage': instance.stage, - 'private': instance.private, - 'accepts_enrollment_suggestions': instance.accepts_enrollment_suggestions, - 'never_ends': instance.never_ends, - 'remote_available': instance.remote_available, - 'online_meeting_url': instance.online_meeting_url, - 'timezone': instance.timezone, - 'syllabus': syllabus, - 'intro_video': instance.intro_video, - 'is_hidden_on_prework': instance.is_hidden_on_prework, - 'available_as_saas': instance.available_as_saas, - 'language': instance.language, + "id": instance.id, + "slug": instance.slug, + "name": instance.name, + "current_day": instance.current_day, + "current_module": instance.current_module, + "stage": instance.stage, + "private": instance.private, + "accepts_enrollment_suggestions": instance.accepts_enrollment_suggestions, + "never_ends": instance.never_ends, + "remote_available": instance.remote_available, + "online_meeting_url": instance.online_meeting_url, + "timezone": instance.timezone, + "syllabus": syllabus, + "intro_video": instance.intro_video, + "is_hidden_on_prework": instance.is_hidden_on_prework, + "available_as_saas": instance.available_as_saas, + "language": instance.language, } if instance.academy: - obj['academy'] = instance.academy.id + obj["academy"] = instance.academy.id if instance.schedule: - obj['schedule'] = instance.schedule.name + obj["schedule"] = instance.schedule.name if instance.kickoff_date: - obj['kickoff_date'] = instance.kickoff_date.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["kickoff_date"] = instance.kickoff_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.ending_date: - obj['ending_date'] = instance.ending_date.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["ending_date"] = instance.ending_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj @classmethod - def cohort_user(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def cohort_user( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.admissions.models import CohortUser kwargs = cls._get_query(related_id, related_slug) instance = CohortUser.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'CohortUser {related_id or related_slug} not found') + raise RetryTask(f"CohortUser {related_id or related_slug} not found") # syllabus = ( # f'{instance.cohort.syllabus_version.syllabus.slug}.v{instance.cohort.syllabus_version.version}' # if instance.cohort.syllabus_version else None) obj = { - 'id': instance.id, - 'user_first_name': instance.user.first_name, - 'user_last_name': instance.user.last_name, - 'cohort': instance.cohort.id, + "id": instance.id, + "user_first_name": instance.user.first_name, + "user_last_name": instance.user.last_name, + "cohort": instance.cohort.id, # 'available_as_saas': instance.cohort.available_as_saas, # 'syllabus': syllabus, # 'user_id': instance.user.id, @@ -269,366 +267,362 @@ def cohort_user(cls, } if instance.cohort.academy: - obj['academy'] = instance.cohort.academy.id + obj["academy"] = instance.cohort.academy.id return obj @classmethod - def task(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def task( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.assignments.models import Task kwargs = cls._get_query(related_id, related_slug) instance = Task.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'Task {related_id or related_slug} not found') + raise RetryTask(f"Task {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'associated_slug': instance.associated_slug, - 'title': instance.title, - 'task_status': instance.task_status, - 'revision_status': instance.revision_status, - 'task_type': instance.task_type, - 'github_url': instance.github_url, - 'live_url': instance.live_url, + "id": instance.id, + "associated_slug": instance.associated_slug, + "title": instance.title, + "task_status": instance.task_status, + "revision_status": instance.revision_status, + "task_type": instance.task_type, + "github_url": instance.github_url, + "live_url": instance.live_url, } if instance.user: - obj['user_email'] = instance.user.email - obj['user_username'] = instance.user.username - obj['user_first_name'] = instance.user.first_name - obj['user_last_name'] = instance.user.last_name + obj["user_email"] = instance.user.email + obj["user_username"] = instance.user.username + obj["user_first_name"] = instance.user.first_name + obj["user_last_name"] = instance.user.last_name if instance.cohort: - obj['cohort'] = instance.cohort.id - obj['academy'] = instance.cohort.academy.id + obj["cohort"] = instance.cohort.id + obj["academy"] = instance.cohort.academy.id if instance.opened_at: - obj['opened_at'] = instance.opened_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["opened_at"] = instance.opened_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj @classmethod - def event_checkin(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def event_checkin( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.events.models import EventCheckin kwargs = cls._get_query(related_id, related_slug) instance = EventCheckin.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'EventCheckin {related_id or related_slug} not found') + raise RetryTask(f"EventCheckin {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'email': instance.email, - 'event_id': instance.event.id, - 'event_slug': instance.event.slug, - 'status': instance.status, + "id": instance.id, + "email": instance.email, + "event_id": instance.event.id, + "event_slug": instance.event.slug, + "status": instance.status, } if instance.attendee: - obj['attendee_email'] = instance.attendee.email - obj['attendee_username'] = instance.attendee.username - obj['attendee_first_name'] = instance.attendee.first_name - obj['attendee_last_name'] = instance.attendee.last_name + obj["attendee_email"] = instance.attendee.email + obj["attendee_username"] = instance.attendee.username + obj["attendee_first_name"] = instance.attendee.first_name + obj["attendee_last_name"] = instance.attendee.last_name if instance.attended_at: - obj['attended_at'] = instance.attended_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["attended_at"] = instance.attended_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj @classmethod - def mentorship_session(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def mentorship_session( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.mentorship.models import MentorshipSession kwargs = cls._get_query(related_id, related_slug) instance = MentorshipSession.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'MentorshipSession {related_id or related_slug} not found') + raise RetryTask(f"MentorshipSession {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'name': instance.name, - 'is_online': instance.is_online, - 'latitude': instance.latitude, - 'longitude': instance.longitude, - 'online_meeting_url': instance.online_meeting_url, - 'online_recording_url': instance.online_recording_url, - 'status': instance.status, - 'allow_billing': instance.allow_billing, - 'suggested_accounted_duration': instance.suggested_accounted_duration, - 'accounted_duration': instance.accounted_duration, + "id": instance.id, + "name": instance.name, + "is_online": instance.is_online, + "latitude": instance.latitude, + "longitude": instance.longitude, + "online_meeting_url": instance.online_meeting_url, + "online_recording_url": instance.online_recording_url, + "status": instance.status, + "allow_billing": instance.allow_billing, + "suggested_accounted_duration": instance.suggested_accounted_duration, + "accounted_duration": instance.accounted_duration, } if instance.mentor: - obj['mentor_id'] = instance.mentor.id - obj['mentor_slug'] = instance.mentor.slug - obj['mentor_name'] = instance.mentor.name + obj["mentor_id"] = instance.mentor.id + obj["mentor_slug"] = instance.mentor.slug + obj["mentor_name"] = instance.mentor.name if instance.mentee: - obj['mentee_email'] = instance.mentee.email - obj['mentee_username'] = instance.mentee.username - obj['mentee_first_name'] = instance.mentee.first_name - obj['mentee_last_name'] = instance.mentee.last_name + obj["mentee_email"] = instance.mentee.email + obj["mentee_username"] = instance.mentee.username + obj["mentee_first_name"] = instance.mentee.first_name + obj["mentee_last_name"] = instance.mentee.last_name if instance.service: - obj['service'] = instance.service.slug - obj['academy'] = instance.service.academy.id + obj["service"] = instance.service.slug + obj["academy"] = instance.service.academy.id if instance.bill: - obj['bill'] = instance.bill.id + obj["bill"] = instance.bill.id if instance.starts_at: - obj['starts_at'] = instance.starts_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["starts_at"] = instance.starts_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.ends_at: - obj['ends_at'] = instance.ends_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["ends_at"] = instance.ends_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.started_at: - obj['started_at'] = instance.started_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["started_at"] = instance.started_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.ended_at: - obj['ended_at'] = instance.ended_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["ended_at"] = instance.ended_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.mentor_joined_at: - obj['mentor_joined_at'] = instance.mentor_joined_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["mentor_joined_at"] = instance.mentor_joined_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.mentor_left_at: - obj['mentor_left_at'] = instance.mentor_left_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["mentor_left_at"] = instance.mentor_left_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.mentee_left_at: - obj['mentee_left_at'] = instance.mentee_left_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["mentee_left_at"] = instance.mentee_left_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj @classmethod - def invoice(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def invoice( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.payments.models import Invoice kwargs = cls._get_query(related_id, related_slug) instance = Invoice.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'Invoice {related_id or related_slug} not found') + raise RetryTask(f"Invoice {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'amount': instance.amount, - 'currency': instance.currency.code, - 'status': instance.status, - 'bag': instance.bag.id, - 'academy': instance.academy.id, - 'user_email': instance.user.email, - 'user_username': instance.user.username, - 'user_first_name': instance.user.first_name, - 'user_last_name': instance.user.last_name, + "id": instance.id, + "amount": instance.amount, + "currency": instance.currency.code, + "status": instance.status, + "bag": instance.bag.id, + "academy": instance.academy.id, + "user_email": instance.user.email, + "user_username": instance.user.username, + "user_first_name": instance.user.first_name, + "user_last_name": instance.user.last_name, } if instance.paid_at: - obj['paid_at'] = instance.paid_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["paid_at"] = instance.paid_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.refunded_at: - obj['refunded_at'] = instance.refunded_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["refunded_at"] = instance.refunded_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj @classmethod - def bag(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def bag( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.payments.models import Bag kwargs = cls._get_query(related_id, related_slug) instance = Bag.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'Bag {related_id or related_slug} not found') + raise RetryTask(f"Bag {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'status': instance.status, - 'type': instance.type, - 'chosen_period': instance.chosen_period, - 'how_many_installments': instance.how_many_installments, - 'academy': instance.academy.id, - 'user_email': instance.user.email, - 'user_username': instance.user.username, - 'user_first_name': instance.user.first_name, - 'user_last_name': instance.user.last_name, - 'is_recurrent': instance.is_recurrent, - 'was_delivered': instance.was_delivered, + "id": instance.id, + "status": instance.status, + "type": instance.type, + "chosen_period": instance.chosen_period, + "how_many_installments": instance.how_many_installments, + "academy": instance.academy.id, + "user_email": instance.user.email, + "user_username": instance.user.username, + "user_first_name": instance.user.first_name, + "user_last_name": instance.user.last_name, + "is_recurrent": instance.is_recurrent, + "was_delivered": instance.was_delivered, } if instance.expires_at: - obj['expires_at'] = instance.expires_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["expires_at"] = instance.expires_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj @classmethod - def subscription(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def subscription( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.payments.models import Subscription kwargs = cls._get_query(related_id, related_slug) instance = Subscription.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'Subscription {related_id or related_slug} not found') + raise RetryTask(f"Subscription {related_id or related_slug} not found") obj = { - 'id': instance.id, - 'status': instance.status, - 'user_email': instance.user.email, - 'user_username': instance.user.username, - 'user_first_name': instance.user.first_name, - 'user_last_name': instance.user.last_name, - 'academy': instance.academy.id, - 'is_refundable': instance.is_refundable, - 'pay_every': instance.pay_every, - 'pay_every_unit': instance.pay_every_unit, + "id": instance.id, + "status": instance.status, + "user_email": instance.user.email, + "user_username": instance.user.username, + "user_first_name": instance.user.first_name, + "user_last_name": instance.user.last_name, + "academy": instance.academy.id, + "is_refundable": instance.is_refundable, + "pay_every": instance.pay_every, + "pay_every_unit": instance.pay_every_unit, } if instance.selected_cohort_set: - obj['selected_cohort_set'] = instance.selected_cohort_set.slug + obj["selected_cohort_set"] = instance.selected_cohort_set.slug if instance.selected_mentorship_service_set: - obj['selected_mentorship_service_set'] = instance.selected_mentorship_service_set.slug + obj["selected_mentorship_service_set"] = instance.selected_mentorship_service_set.slug if instance.selected_event_type_set: - obj['selected_event_type_set'] = instance.selected_event_type_set.slug + obj["selected_event_type_set"] = instance.selected_event_type_set.slug if instance.paid_at: - obj['paid_at'] = instance.paid_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["paid_at"] = instance.paid_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.next_payment_at: - obj['next_payment_at'] = instance.next_payment_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["next_payment_at"] = instance.next_payment_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.valid_until: - obj['valid_until'] = instance.valid_until.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["valid_until"] = instance.valid_until.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj @classmethod - def plan_financing(cls, - kind: str, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: + def plan_financing( + cls, kind: str, related_id: Optional[str | int] = None, related_slug: Optional[str] = None + ) -> dict[str, Any]: from breathecode.payments.models import PlanFinancing kwargs = cls._get_query(related_id, related_slug) instance = PlanFinancing.objects.filter(**kwargs).first() if not instance: - raise RetryTask(f'PlanFinancing {related_id or related_slug} not found') + raise RetryTask(f"PlanFinancing {related_id or related_slug} not found") - selected_mentorship_service_set = (instance.selected_mentorship_service_set.slug - if instance.selected_mentorship_service_set else None) + selected_mentorship_service_set = ( + instance.selected_mentorship_service_set.slug if instance.selected_mentorship_service_set else None + ) - selected_event_type_set = (instance.selected_event_type_set.slug if instance.selected_event_type_set else None) + selected_event_type_set = instance.selected_event_type_set.slug if instance.selected_event_type_set else None obj = { - 'id': instance.id, - 'status': instance.status, - 'user_email': instance.user.email, - 'user_username': instance.user.username, - 'user_first_name': instance.user.first_name, - 'user_last_name': instance.user.last_name, - 'academy': instance.academy.id, - 'selected_mentorship_service_set': selected_mentorship_service_set, - 'selected_event_type_set': selected_event_type_set, - 'monthly_price': instance.monthly_price, + "id": instance.id, + "status": instance.status, + "user_email": instance.user.email, + "user_username": instance.user.username, + "user_first_name": instance.user.first_name, + "user_last_name": instance.user.last_name, + "academy": instance.academy.id, + "selected_mentorship_service_set": selected_mentorship_service_set, + "selected_event_type_set": selected_event_type_set, + "monthly_price": instance.monthly_price, } if instance.selected_cohort_set: - obj['selected_cohort_set'] = instance.selected_cohort_set.slug + obj["selected_cohort_set"] = instance.selected_cohort_set.slug if instance.next_payment_at: - obj['next_payment_at'] = instance.next_payment_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["next_payment_at"] = instance.next_payment_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.valid_until: - obj['valid_until'] = instance.valid_until.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["valid_until"] = instance.valid_until.strftime("%Y-%m-%dT%H:%M:%S.%fZ") if instance.plan_expires_at: - obj['plan_expires_at'] = instance.plan_expires_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + obj["plan_expires_at"] = instance.plan_expires_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") return obj -def get_activity_meta(kind: str, - related_type: Optional[str] = None, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None) -> dict[str, Any]: +def get_activity_meta( + kind: str, + related_type: Optional[str] = None, + related_id: Optional[str | int] = None, + related_slug: Optional[str] = None, +) -> dict[str, Any]: if not related_type: return {} if related_type and not related_id and not related_slug: - raise AbortTask('related_id or related_slug must be present') + raise AbortTask("related_id or related_slug must be present") if related_type not in ALLOWED_TYPES: - raise AbortTask(f'{related_type} is not supported yet') + raise AbortTask(f"{related_type} is not supported yet") args = (kind, related_id, related_slug) - if related_type == 'auth.UserInvite' and kind in ALLOWED_TYPES['auth.UserInvite']: + if related_type == "auth.UserInvite" and kind in ALLOWED_TYPES["auth.UserInvite"]: return FillActivityMeta.user_invite(*args) - if related_type == 'feedback.Answer' and kind in ALLOWED_TYPES['feedback.Answer']: + if related_type == "feedback.Answer" and kind in ALLOWED_TYPES["feedback.Answer"]: return FillActivityMeta.answer(*args) - if related_type == 'auth.User' and kind in ALLOWED_TYPES['auth.User']: + if related_type == "auth.User" and kind in ALLOWED_TYPES["auth.User"]: return FillActivityMeta.user(*args) - if related_type == 'admissions.Cohort' and kind in ALLOWED_TYPES['admissions.Cohort']: + if related_type == "admissions.Cohort" and kind in ALLOWED_TYPES["admissions.Cohort"]: return FillActivityMeta.cohort(*args) - if related_type == 'admissions.CohortUser' and kind in ALLOWED_TYPES['admissions.CohortUser']: + if related_type == "admissions.CohortUser" and kind in ALLOWED_TYPES["admissions.CohortUser"]: return FillActivityMeta.cohort_user(*args) - if related_type == 'assignments.Task' and kind in ALLOWED_TYPES['assignments.Task']: + if related_type == "assignments.Task" and kind in ALLOWED_TYPES["assignments.Task"]: return FillActivityMeta.task(*args) - if related_type == 'events.EventCheckin' and kind in ALLOWED_TYPES['events.EventCheckin']: + if related_type == "events.EventCheckin" and kind in ALLOWED_TYPES["events.EventCheckin"]: return FillActivityMeta.event_checkin(*args) - if related_type == 'payments.Bag' and kind in ALLOWED_TYPES['payments.Bag']: + if related_type == "payments.Bag" and kind in ALLOWED_TYPES["payments.Bag"]: return FillActivityMeta.bag(*args) - if related_type == 'payments.Subscription' and kind in ALLOWED_TYPES['payments.Subscription']: + if related_type == "payments.Subscription" and kind in ALLOWED_TYPES["payments.Subscription"]: return FillActivityMeta.subscription(*args) - if related_type == 'payments.PlanFinancing' and kind in ALLOWED_TYPES['payments.PlanFinancing']: + if related_type == "payments.PlanFinancing" and kind in ALLOWED_TYPES["payments.PlanFinancing"]: return FillActivityMeta.plan_financing(*args) - if related_type == 'mentorship.MentorshipSession' and kind in ALLOWED_TYPES['mentorship.MentorshipSession']: + if related_type == "mentorship.MentorshipSession" and kind in ALLOWED_TYPES["mentorship.MentorshipSession"]: return FillActivityMeta.mentorship_session(*args) - if related_type == 'payments.Invoice' and kind in ALLOWED_TYPES['payments.Invoice']: + if related_type == "payments.Invoice" and kind in ALLOWED_TYPES["payments.Invoice"]: return FillActivityMeta.invoice(*args) - raise AbortTask(f'kind {kind} is not supported by {related_type} yet') + raise AbortTask(f"kind {kind} is not supported by {related_type} yet") @functools.lru_cache(maxsize=1) def get_workers_amount(): - dynos = int(os.getenv('CELERY_DYNOS') or 1) - workers = int(os.getenv('CELERY_MAX_WORKERS') or 1) + dynos = int(os.getenv("CELERY_DYNOS") or 1) + workers = int(os.getenv("CELERY_MAX_WORKERS") or 1) return dynos * workers @@ -647,14 +641,15 @@ def get_current_worker_number() -> int: Assumes the worker data is stored in the cache. """ - if os.getenv('CELERY_POOL', '') == 'gevent': + if os.getenv("CELERY_POOL", "") == "gevent": from gevent import getcurrent + worker_pid = id(getcurrent()) else: worker_pid = os.getpid() # Retrieve worker data from the cache - workers_data: Workers = cache.get('workers', {}) + workers_data: Workers = cache.get("workers", {}) if not workers_data: return 0 @@ -662,7 +657,7 @@ def get_current_worker_number() -> int: # Find the worker number for the current process for worker_number, worker_processes in workers_data.items(): for worker_process in worker_processes: - if worker_process['pid'] == worker_pid: + if worker_process["pid"] == worker_pid: return worker_number # If the process is not found, return None or raise an exception based on your requirements diff --git a/breathecode/activity/apps.py b/breathecode/activity/apps.py index 99580dce9..b96671ae9 100644 --- a/breathecode/activity/apps.py +++ b/breathecode/activity/apps.py @@ -2,4 +2,4 @@ class ActivityConfig(AppConfig): - name = 'breathecode.activity' + name = "breathecode.activity" diff --git a/breathecode/activity/management/commands/upload_activities.py b/breathecode/activity/management/commands/upload_activities.py index 98e6517e1..db037716f 100644 --- a/breathecode/activity/management/commands/upload_activities.py +++ b/breathecode/activity/management/commands/upload_activities.py @@ -4,15 +4,15 @@ from django.core.cache import cache from breathecode.activity import tasks -IS_DJANGO_REDIS = hasattr(cache, 'delete_pattern') +IS_DJANGO_REDIS = hasattr(cache, "delete_pattern") def db_backup_bucket(): - return os.getenv('DB_BACKUP_BUCKET') + return os.getenv("DB_BACKUP_BUCKET") def get_activity_sampling_rate(): - env = os.getenv('ACTIVITY_SAMPLING_RATE') + env = os.getenv("ACTIVITY_SAMPLING_RATE") if env: return int(env) @@ -20,7 +20,7 @@ def get_activity_sampling_rate(): class Command(BaseCommand): - help = 'Delete duplicate cohort users imported from old breathecode' + help = "Delete duplicate cohort users imported from old breathecode" def handle(self, *args, **options): utc_now = timezone.now() @@ -33,4 +33,4 @@ def handle(self, *args, **options): cursor += timezone.timedelta(seconds=sampling_rate) tasks.upload_activities.apply_async(args=(), eta=cursor) - self.stdout.write(self.style.SUCCESS('Done!')) + self.stdout.write(self.style.SUCCESS("Done!")) diff --git a/breathecode/activity/models.py b/breathecode/activity/models.py index 4415f851f..dd62e126c 100644 --- a/breathecode/activity/models.py +++ b/breathecode/activity/models.py @@ -8,7 +8,7 @@ def is_test_env(): - return os.getenv('ENV') == 'test' or True + return os.getenv("ENV") == "test" or True class StudentActivity(ndb.Model): @@ -25,11 +25,11 @@ class StudentActivity(ndb.Model): @classmethod def _get_kind(cls): - return 'student_activity' + return "student_activity" class ActivityMeta(BigQueryBase): - __tablename__ = '4geeks.activity_nested' + __tablename__ = "4geeks.activity_nested" email = Column(String(36), primary_key=True) related = Column(Integer, nullable=False) @@ -37,21 +37,21 @@ class ActivityMeta(BigQueryBase): # related resource = Column(String(30), nullable=True) resource_id = Column(String(30), nullable=True) - meta = Column(String, default='{}') - meta = Column(JSON, default='{}') + meta = Column(String, default="{}") + meta = Column(JSON, default="{}") timestamp = Column(TIMESTAMP, nullable=False) # this model is a example, it's useless because google can't support JSON on they own SQLAlchemy dialect class Activity(BigQueryBase): - __tablename__ = '4geeks.activity' + __tablename__ = "4geeks.activity" id = Column(String(36), primary_key=True) user_id = Column(Integer, nullable=False) kind = Column(String(25), nullable=False) related = Column(String(30), nullable=True) related_id = Column(String(30), nullable=True) - meta = Column(String, default='{}') + meta = Column(String, default="{}") timestamp = Column(TIMESTAMP, nullable=False) @property @@ -67,23 +67,27 @@ def json(self, value): # test_support(__name__) ACTIVITY = [ - bigquery.SchemaField('id', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('user_id', 'INTEGER', mode='REQUIRED'), - bigquery.SchemaField('kind', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('related', - 'RECORD', - mode='NULLABLE', - fields=[ - bigquery.SchemaField('type', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('id', 'INT64', mode='NULLABLE'), - bigquery.SchemaField('slug', 'STRING', mode='NULLABLE'), - ]), - bigquery.SchemaField('meta', - 'RECORD', - mode='REQUIRED', - fields=[ - bigquery.SchemaField('email', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('username', 'STRING', mode='NULLABLE'), - ]), - bigquery.SchemaField('timestamp', 'TIMESTAMP', mode='REQUIRED'), + bigquery.SchemaField("id", "STRING", mode="REQUIRED"), + bigquery.SchemaField("user_id", "INTEGER", mode="REQUIRED"), + bigquery.SchemaField("kind", "STRING", mode="REQUIRED"), + bigquery.SchemaField( + "related", + "RECORD", + mode="NULLABLE", + fields=[ + bigquery.SchemaField("type", "STRING", mode="NULLABLE"), + bigquery.SchemaField("id", "INT64", mode="NULLABLE"), + bigquery.SchemaField("slug", "STRING", mode="NULLABLE"), + ], + ), + bigquery.SchemaField( + "meta", + "RECORD", + mode="REQUIRED", + fields=[ + bigquery.SchemaField("email", "STRING", mode="NULLABLE"), + bigquery.SchemaField("username", "STRING", mode="NULLABLE"), + ], + ), + bigquery.SchemaField("timestamp", "TIMESTAMP", mode="REQUIRED"), ] diff --git a/breathecode/activity/tasks.py b/breathecode/activity/tasks.py index 8d211245a..a86658d70 100644 --- a/breathecode/activity/tasks.py +++ b/breathecode/activity/tasks.py @@ -30,58 +30,60 @@ @functools.lru_cache(maxsize=1) def get_activity_sampling_rate(): - env = os.getenv('ACTIVITY_SAMPLING_RATE') + env = os.getenv("ACTIVITY_SAMPLING_RATE") if env: return int(env) return 60 -IS_DJANGO_REDIS = hasattr(cache, 'delete_pattern') +IS_DJANGO_REDIS = hasattr(cache, "delete_pattern") -API_URL = os.getenv('API_URL', '') +API_URL = os.getenv("API_URL", "") logger = logging.getLogger(__name__) ISO_STRING_PATTERN = re.compile( - r'^\d{4}-(0[1-9]|1[0-2])-([12]\d|0[1-9]|3[01])T([01]\d|2[0-3]):([0-5]\d):([0-5]\d)\.\d{6}(Z|\+\d{2}:\d{2})?$') + r"^\d{4}-(0[1-9]|1[0-2])-([12]\d|0[1-9]|3[01])T([01]\d|2[0-3]):([0-5]\d):([0-5]\d)\.\d{6}(Z|\+\d{2}:\d{2})?$" +) @shared_task(bind=True, priority=TaskPriority.ACADEMY.value) def get_attendancy_log(self, cohort_id: int): - logger.info('Executing get_attendancy_log') + logger.info("Executing get_attendancy_log") cohort = Cohort.objects.filter(id=cohort_id).first() if not cohort: - logger.error('Cohort not found') + logger.error("Cohort not found") return if not cohort.syllabus_version: - logger.error(f'Cohort {cohort.slug} does not have syllabus assigned') + logger.error(f"Cohort {cohort.slug} does not have syllabus assigned") return try: # json has days? - syllabus = cohort.syllabus_version.json['days'] + syllabus = cohort.syllabus_version.json["days"] # days is list? assert isinstance(syllabus, list) # the child has the correct attributes? for day in syllabus: - assert isinstance(day['id'], int) - duration_in_days = day.get('duration_in_days') + assert isinstance(day["id"], int) + duration_in_days = day.get("duration_in_days") assert isinstance(duration_in_days, int) or duration_in_days == None - assert isinstance(day['label'], str) + assert isinstance(day["label"], str) except Exception: - logger.error(f'Cohort {cohort.slug} have syllabus with bad format') + logger.error(f"Cohort {cohort.slug} have syllabus with bad format") return client = NDB(StudentActivity) - attendance = client.fetch([StudentActivity.cohort == cohort.slug, StudentActivity.slug == 'classroom_attendance']) + attendance = client.fetch([StudentActivity.cohort == cohort.slug, StudentActivity.slug == "classroom_attendance"]) unattendance = client.fetch( - [StudentActivity.cohort == cohort.slug, StudentActivity.slug == 'classroom_unattendance']) + [StudentActivity.cohort == cohort.slug, StudentActivity.slug == "classroom_unattendance"] + ) days = {} @@ -91,22 +93,24 @@ def get_attendancy_log(self, cohort_id: int): if current_day > cohort.current_day: break - for n in range(day.get('duration_in_days', 1)): + for n in range(day.get("duration_in_days", 1)): current_day += 1 if current_day > cohort.current_day: break - attendance_ids = list([x['user_id'] for x in attendance if int(x['day']) == current_day]) - unattendance_ids = list([x['user_id'] for x in unattendance if int(x['day']) == current_day]) + attendance_ids = list([x["user_id"] for x in attendance if int(x["day"]) == current_day]) + unattendance_ids = list([x["user_id"] for x in unattendance if int(x["day"]) == current_day]) has_attendance = bool(attendance_ids or unattendance_ids) - days[day['label']] = CohortDayLog(**{ - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids if has_attendance else None, - 'unattendance_ids': unattendance_ids if has_attendance else None - }, - allow_empty=True).serialize() + days[day["label"]] = CohortDayLog( + **{ + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids if has_attendance else None, + "unattendance_ids": unattendance_ids if has_attendance else None, + }, + allow_empty=True, + ).serialize() if n: offset += 1 @@ -114,53 +118,53 @@ def get_attendancy_log(self, cohort_id: int): cohort.history_log = days cohort.save_history_log() - logger.info('History log saved') + logger.info("History log saved") - for cohort_user in CohortUser.objects.filter(cohort=cohort).exclude(educational_status='DROPPED'): + for cohort_user in CohortUser.objects.filter(cohort=cohort).exclude(educational_status="DROPPED"): get_attendancy_log_per_cohort_user.delay(cohort_user.id) @shared_task(bind=False, priority=TaskPriority.ACADEMY.value) def get_attendancy_log_per_cohort_user(cohort_user_id: int): - logger.info('Executing get_attendancy_log_per_cohort_user') + logger.info("Executing get_attendancy_log_per_cohort_user") cohort_user = CohortUser.objects.filter(id=cohort_user_id).first() if not cohort_user: - logger.error('Cohort user not found') + logger.error("Cohort user not found") return cohort = cohort_user.cohort user = cohort_user.user if not cohort.history_log: - logger.error(f'Cohort {cohort.slug} has no log yet') + logger.error(f"Cohort {cohort.slug} has no log yet") return cohort_history_log = cohort.history_log or {} user_history_log = cohort_user.history_log or {} - user_history_log['attendance'] = {} - user_history_log['unattendance'] = {} + user_history_log["attendance"] = {} + user_history_log["unattendance"] = {} for day in cohort_history_log: - updated_at = cohort_history_log[day]['updated_at'] - current_module = cohort_history_log[day]['current_module'] + updated_at = cohort_history_log[day]["updated_at"] + current_module = cohort_history_log[day]["current_module"] log = { - 'updated_at': updated_at, - 'current_module': current_module, + "updated_at": updated_at, + "current_module": current_module, } - if user.id in cohort_history_log[day]['attendance_ids']: - user_history_log['attendance'][day] = log + if user.id in cohort_history_log[day]["attendance_ids"]: + user_history_log["attendance"][day] = log else: - user_history_log['unattendance'][day] = log + user_history_log["unattendance"][day] = log cohort_user.history_log = user_history_log cohort_user.save() - logger.info('History log saved') + logger.info("History log saved") @task(bind=True, priority=TaskPriority.ACADEMY.value) @@ -171,12 +175,12 @@ def extract_data(): client = None if IS_DJANGO_REDIS: - client = get_redis_connection('default') + client = get_redis_connection("default") while True: try: - with Lock(client, f'lock:activity:worker-{worker}', timeout=30, blocking_timeout=30): - worker_key = f'activity:worker-{worker}' + with Lock(client, f"lock:activity:worker-{worker}", timeout=30, blocking_timeout=30): + worker_key = f"activity:worker-{worker}" data = cache.get(worker_key) cache.delete(worker_key) @@ -187,7 +191,7 @@ def extract_data(): res += data except LockError: - raise RetryTask('Could not acquire lock for activity, operation timed out.') + raise RetryTask("Could not acquire lock for activity, operation timed out.") # this will keeping working even if the worker amount changes if worker >= workers and data is None: @@ -200,23 +204,25 @@ def extract_data(): # prevent to run the same task multiple times before the sampling rate time task_cls = self.task_manager.__class__ - task_cls.objects.filter(status='SCHEDULED', - task_module=self.task_manager.task_module, - task_name=self.task_manager.task_name, - created_at__lt=limit).exclude(id=task_manager_id).delete() + task_cls.objects.filter( + status="SCHEDULED", + task_module=self.task_manager.task_module, + task_name=self.task_manager.task_name, + created_at__lt=limit, + ).exclude(id=task_manager_id).delete() workers = actions.get_workers_amount() res = [] worker = 0 - has_not_backup = self.task_manager.status == 'PENDING' and self.task_manager.attempts == 1 - backup_key = f'activity:backup:{task_manager_id}' + has_not_backup = self.task_manager.status == "PENDING" and self.task_manager.attempts == 1 + backup_key = f"activity:backup:{task_manager_id}" if has_not_backup: extract_data() else: - backup_key = f'activity:backup:{task_manager_id}' + backup_key = f"activity:backup:{task_manager_id}" data = cache.get(backup_key) if data: @@ -234,13 +240,13 @@ def extract_data(): if not has_not_backup: cache.delete(backup_key) - raise AbortTask('No data to upload') + raise AbortTask("No data to upload") - table = BigQuery.table('activity') + table = BigQuery.table("activity") schema = table.schema() - rows = [x['data'] for x in res] - new_schema = BigQuery.join_schemas(*[x['schema'] for x in res]) + rows = [x["data"] for x in res] + new_schema = BigQuery.join_schemas(*[x["schema"] for x in res]) diff = BigQuery.schema_difference(schema, new_schema) @@ -260,35 +266,38 @@ def extract_data(): @task(priority=TaskPriority.BACKGROUND.value) -def add_activity(user_id: int, - kind: str, - related_type: Optional[str] = None, - related_id: Optional[str | int] = None, - related_slug: Optional[str] = None, - timestamp: Optional[str] = None, - **_): - - logger.info(f'Executing add_activity related to {str(kind)}') +def add_activity( + user_id: int, + kind: str, + related_type: Optional[str] = None, + related_id: Optional[str | int] = None, + related_slug: Optional[str] = None, + timestamp: Optional[str] = None, + **_, +): + + logger.info(f"Executing add_activity related to {str(kind)}") if timestamp is None: timestamp = timezone.now().isoformat() if related_type and not (bool(related_id) ^ bool(related_slug)): raise AbortTask( - 'If related_type is provided, either related_id or related_slug must be provided, but not both.') + "If related_type is provided, either related_id or related_slug must be provided, but not both." + ) if not related_type and (related_id or related_slug): - raise AbortTask('If related_type is not provided, both related_id and related_slug must also be absent.') + raise AbortTask("If related_type is not provided, both related_id and related_slug must also be absent.") client = None if IS_DJANGO_REDIS: - client = get_redis_connection('default') + client = get_redis_connection("default") worker = actions.get_current_worker_number() try: - with Lock(client, f'lock:activity:worker-{worker}', timeout=30, blocking_timeout=30): - worker_storage_key = f'activity:worker-{worker}' + with Lock(client, f"lock:activity:worker-{worker}", timeout=30, blocking_timeout=30): + worker_storage_key = f"activity:worker-{worker}" data = cache.get(worker_storage_key) if data: @@ -299,32 +308,32 @@ def add_activity(user_id: int, data = [] res = { - 'schema': [ - bigquery.SchemaField('user_id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('kind', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('timestamp', bigquery.enums.SqlTypeNames.TIMESTAMP, 'NULLABLE'), - bigquery.SchemaField('related', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField('type', bigquery.enums.SqlTypeNames.STRING, - 'NULLABLE'), - bigquery.SchemaField('id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('slug', bigquery.enums.SqlTypeNames.STRING, - 'NULLABLE'), - ]), + "schema": [ + bigquery.SchemaField("user_id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("kind", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("timestamp", bigquery.enums.SqlTypeNames.TIMESTAMP, "NULLABLE"), + bigquery.SchemaField( + "related", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[ + bigquery.SchemaField("type", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("slug", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + ], + ), ], - 'data': { - 'id': uuid.uuid4().hex, - 'user_id': user_id, - 'kind': kind, - 'timestamp': timestamp, - 'related': { - 'type': related_type, - 'id': related_id, - 'slug': related_slug, + "data": { + "id": uuid.uuid4().hex, + "user_id": user_id, + "kind": kind, + "timestamp": timestamp, + "related": { + "type": related_type, + "id": related_id, + "slug": related_slug, }, - 'meta': {}, + "meta": {}, }, } @@ -336,8 +345,9 @@ def add_activity(user_id: int, t = bigquery.enums.SqlTypeNames.STRING # keep it adobe than the date conditional - if isinstance(meta[key], datetime) or (isinstance(meta[key], str) - and ISO_STRING_PATTERN.match(meta[key])): + if isinstance(meta[key], datetime) or ( + isinstance(meta[key], str) and ISO_STRING_PATTERN.match(meta[key]) + ): t = bigquery.enums.SqlTypeNames.TIMESTAMP elif isinstance(meta[key], date): t = bigquery.enums.SqlTypeNames.DATE @@ -354,11 +364,11 @@ def add_activity(user_id: int, # res.append(serialize_field(key, meta[key], t, struct='meta')) fields.append(bigquery.SchemaField(key, t)) - res['data']['meta'][key] = meta[key] + res["data"]["meta"][key] = meta[key] - meta_field = bigquery.SchemaField('meta', bigquery.enums.SqlTypeNames.STRUCT, 'NULLABLE', fields=fields) + meta_field = bigquery.SchemaField("meta", bigquery.enums.SqlTypeNames.STRUCT, "NULLABLE", fields=fields) # meta_field = bigquery.SchemaField('meta', 'STRUCT', 'NULLABLE', fields=fields) - res['schema'].append(meta_field) + res["schema"].append(meta_field) # res['schema']['meta'] = meta_field data.append(res) @@ -368,4 +378,4 @@ def add_activity(user_id: int, cache.set(worker_storage_key, data) except LockError: - raise RetryTask('Could not acquire lock for activity, operation timed out.') + raise RetryTask("Could not acquire lock for activity, operation timed out.") diff --git a/breathecode/activity/tests/actions/tests_fill_activity_meta.py b/breathecode/activity/tests/actions/tests_fill_activity_meta.py index 21c46dde7..e340adea0 100644 --- a/breathecode/activity/tests/actions/tests_fill_activity_meta.py +++ b/breathecode/activity/tests/actions/tests_fill_activity_meta.py @@ -17,7 +17,7 @@ class UserTestSuite(MediaTestCase): def test_id_not_found(self): kind = self.bc.fake.slug() - with self.assertRaisesMessage(RetryTask, f'User 1 not found'): + with self.assertRaisesMessage(RetryTask, f"User 1 not found"): FillActivityMeta.user(kind, 1, None) def test_slug_not_found(self): @@ -27,10 +27,10 @@ def test_slug_not_found(self): meta = FillActivityMeta.user(kind, 1, None) expected = { - 'email': model.user.email, - 'id': model.user.id, - 'username': model.user.username, + "email": model.user.email, + "id": model.user.id, + "username": model.user.username, } self.assertEqual(meta, expected) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) diff --git a/breathecode/activity/tests/actions/tests_get_activity_meta.py b/breathecode/activity/tests/actions/tests_get_activity_meta.py index 12dfbf54c..5862c36fc 100644 --- a/breathecode/activity/tests/actions/tests_get_activity_meta.py +++ b/breathecode/activity/tests/actions/tests_get_activity_meta.py @@ -1,6 +1,7 @@ """ Test /answer """ + from unittest.mock import MagicMock, call, patch from django.utils import timezone @@ -25,22 +26,22 @@ def obj(): ALLOWED_TYPES = [ - ('auth.UserInvite', 'user_invite', 'invite_created', obj()), - ('auth.UserInvite', 'user_invite', 'invite_status_updated', obj()), - ('feedback.Answer', 'answer', 'nps_answered', obj()), - ('auth.User', 'user', 'login', obj()), - ('assignments.Task', 'task', 'open_syllabus_module', obj()), - ('assignments.Task', 'task', 'read_assignment', obj()), - ('assignments.Task', 'task', 'assignment_review_status_updated', obj()), - ('assignments.Task', 'task', 'assignment_status_updated', obj()), - ('events.EventCheckin', 'event_checkin', 'event_checkin_created', obj()), - ('events.EventCheckin', 'event_checkin', 'event_checkin_assisted', obj()), - ('payments.Bag', 'bag', 'bag_created', obj()), - ('payments.Subscription', 'subscription', 'checkout_completed', obj()), - ('payments.PlanFinancing', 'plan_financing', 'checkout_completed', obj()), - ('mentorship.MentorshipSession', 'mentorship_session', 'mentoring_session_scheduled', obj()), - ('mentorship.MentorshipSession', 'mentorship_session', 'mentorship_session_checkin', obj()), - ('mentorship.MentorshipSession', 'mentorship_session', 'mentorship_session_checkout', obj()), + ("auth.UserInvite", "user_invite", "invite_created", obj()), + ("auth.UserInvite", "user_invite", "invite_status_updated", obj()), + ("feedback.Answer", "answer", "nps_answered", obj()), + ("auth.User", "user", "login", obj()), + ("assignments.Task", "task", "open_syllabus_module", obj()), + ("assignments.Task", "task", "read_assignment", obj()), + ("assignments.Task", "task", "assignment_review_status_updated", obj()), + ("assignments.Task", "task", "assignment_status_updated", obj()), + ("events.EventCheckin", "event_checkin", "event_checkin_created", obj()), + ("events.EventCheckin", "event_checkin", "event_checkin_assisted", obj()), + ("payments.Bag", "bag", "bag_created", obj()), + ("payments.Subscription", "subscription", "checkout_completed", obj()), + ("payments.PlanFinancing", "plan_financing", "checkout_completed", obj()), + ("mentorship.MentorshipSession", "mentorship_session", "mentoring_session_scheduled", obj()), + ("mentorship.MentorshipSession", "mentorship_session", "mentorship_session_checkin", obj()), + ("mentorship.MentorshipSession", "mentorship_session", "mentorship_session_checkout", obj()), ] @@ -57,14 +58,14 @@ def test_just_kind(self): def test_type_and_no_id_or_slug(self): kind = self.bc.fake.slug() - with self.assertRaisesMessage(AbortTask, 'related_id or related_slug must be present'): - get_activity_meta(kind, related_type='auth.User') + with self.assertRaisesMessage(AbortTask, "related_id or related_slug must be present"): + get_activity_meta(kind, related_type="auth.User") def test_bad_related_type(self): related_type = self.bc.fake.slug() kind = self.bc.fake.slug() - with self.assertRaisesMessage(AbortTask, f'{related_type} is not supported yet'): + with self.assertRaisesMessage(AbortTask, f"{related_type} is not supported yet"): get_activity_meta(kind, related_type=related_type, related_id=1) def test_kind_not_sopported_by_related_type(self): @@ -72,12 +73,19 @@ def test_kind_not_sopported_by_related_type(self): kind = self.bc.fake.slug() allowed = [ - 'auth.UserInvite', 'feedback.Answer', 'auth.User', 'assignments.Task', 'events.EventCheckin', - 'payments.Bag', 'payments.Subscription', 'payments.PlanFinancing', 'mentorship.MentorshipSession' + "auth.UserInvite", + "feedback.Answer", + "auth.User", + "assignments.Task", + "events.EventCheckin", + "payments.Bag", + "payments.Subscription", + "payments.PlanFinancing", + "mentorship.MentorshipSession", ] for related_type in allowed: - with self.assertRaisesMessage(AbortTask, f'kind {kind} is not supported by {related_type}'): + with self.assertRaisesMessage(AbortTask, f"kind {kind} is not supported by {related_type}"): get_activity_meta(kind, related_type=related_type, related_id=1) def test_kind_sopported_by_related_type(self): diff --git a/breathecode/activity/tests/management/commands/tests_upload_activities.py b/breathecode/activity/tests/management/commands/tests_upload_activities.py index 59945f4ff..214b5ea48 100644 --- a/breathecode/activity/tests/management/commands/tests_upload_activities.py +++ b/breathecode/activity/tests/management/commands/tests_upload_activities.py @@ -21,10 +21,11 @@ def get_calls(): def apply_patch(db, monkeypatch): m1 = MagicMock() - monkeypatch.setattr('breathecode.activity.management.commands.upload_activities.get_activity_sampling_rate', - lambda: 60) - monkeypatch.setattr('breathecode.activity.tasks.upload_activities.apply_async', m1) - monkeypatch.setattr('django.utils.timezone.now', lambda: UTC_NOW) + monkeypatch.setattr( + "breathecode.activity.management.commands.upload_activities.get_activity_sampling_rate", lambda: 60 + ) + monkeypatch.setattr("breathecode.activity.tasks.upload_activities.apply_async", m1) + monkeypatch.setattr("django.utils.timezone.now", lambda: UTC_NOW) yield m1 diff --git a/breathecode/activity/tests/mixins/__init__.py b/breathecode/activity/tests/mixins/__init__.py index b73c178bc..8f66ab70d 100644 --- a/breathecode/activity/tests/mixins/__init__.py +++ b/breathecode/activity/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Media mixins """ + from .media_test_case import MediaTestCase # noqa: F401 diff --git a/breathecode/activity/tests/mixins/media_test_case.py b/breathecode/activity/tests/mixins/media_test_case.py index f5f211652..ff00750f0 100644 --- a/breathecode/activity/tests/mixins/media_test_case.py +++ b/breathecode/activity/tests/mixins/media_test_case.py @@ -1,17 +1,35 @@ """ Collections of mixins used to login in authorize microservice """ + import os from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, Sha256Mixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + Sha256Mixin, + BreathecodeMixin, +) from breathecode.utils.sqlalchemy import BigQueryBase from sqlalchemy import create_engine from breathecode.services.google_cloud import BigQuery -class MediaTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, Sha256Mixin, BreathecodeMixin): +class MediaTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + Sha256Mixin, + BreathecodeMixin, +): """FeedbackTestCase with auth methods""" def tearDown(self): @@ -20,7 +38,7 @@ def tearDown(self): def setUp(self): self.generate_queries() - os.environ['MEDIA_GALLERY_BUCKET'] = 'bucket-name' + os.environ["MEDIA_GALLERY_BUCKET"] = "bucket-name" self.set_test_instance(self) BigQuery.setup() diff --git a/breathecode/activity/tests/tasks/tests_add_activity.py b/breathecode/activity/tests/tasks/tests_add_activity.py index facebab12..2ee4895f1 100644 --- a/breathecode/activity/tests/tasks/tests_add_activity.py +++ b/breathecode/activity/tests/tasks/tests_add_activity.py @@ -45,19 +45,19 @@ def apply_patch(db, monkeypatch): def uuid4(): nonlocal v v += 1 - return UUID4(f'c5d8cbc54a894dd0983caae1b850709{v}') + return UUID4(f"c5d8cbc54a894dd0983caae1b850709{v}") m1 = MagicMock() m2 = MagicMock() m3 = MagicMock(return_value={}) - monkeypatch.setattr('logging.Logger.info', m1) - monkeypatch.setattr('logging.Logger.error', m2) + monkeypatch.setattr("logging.Logger.info", m1) + monkeypatch.setattr("logging.Logger.error", m2) # monkeypatch.setattr('breathecode.services.google_cloud.credentials.resolve_credentials', lambda: None) - monkeypatch.setattr('breathecode.activity.actions.get_activity_meta', m3) - monkeypatch.setattr('django.utils.timezone.now', lambda: UTC_NOW) - monkeypatch.setattr('uuid.uuid4', uuid4) - monkeypatch.setattr('breathecode.activity.actions.get_workers_amount', lambda: 2) + monkeypatch.setattr("breathecode.activity.actions.get_activity_meta", m3) + monkeypatch.setattr("django.utils.timezone.now", lambda: UTC_NOW) + monkeypatch.setattr("uuid.uuid4", uuid4) + monkeypatch.setattr("breathecode.activity.actions.get_workers_amount", lambda: 2) yield m1, m2, m3 @@ -71,7 +71,7 @@ def wrapper(meta: Optional[dict] = None, exc: Optional[str] = None): else: m3 = MagicMock(return_value=meta) - monkeypatch.setattr('breathecode.activity.actions.get_activity_meta', m3) + monkeypatch.setattr("breathecode.activity.actions.get_activity_meta", m3) return m3 yield wrapper @@ -91,7 +91,7 @@ def wrapper(data: str): def get_attrs_from_meta(meta: dict): for key in meta: - yield {'key': key, 'struct': 'meta', 'type': 'STRING', 'value': meta[key]} + yield {"key": key, "struct": "meta", "type": "STRING", "value": meta[key]} if not meta: return [] @@ -101,54 +101,47 @@ def test_type_and_no_id_or_slug(bc: Breathecode): kind = bc.fake.slug() cache.set( - 'workers', { - 0: [{ - 'pid': os.getpid(), - 'created_at': timezone.now() - }], - 1: [{ - 'pid': os.getpid() + random.randint(1, 100), - 'created_at': timezone.now() - }], - }) - - add_activity.delay(1, kind, related_type='auth.User') - - assert logging.Logger.info.call_args_list == [call(f'Executing add_activity related to {kind}')] + "workers", + { + 0: [{"pid": os.getpid(), "created_at": timezone.now()}], + 1: [{"pid": os.getpid() + random.randint(1, 100), "created_at": timezone.now()}], + }, + ) + + add_activity.delay(1, kind, related_type="auth.User") + + assert logging.Logger.info.call_args_list == [call(f"Executing add_activity related to {kind}")] assert logging.Logger.error.call_args_list == [ - call('If related_type is provided, either related_id or related_slug must be provided, ' - 'but not both.', - exc_info=True), + call( + "If related_type is provided, either related_id or related_slug must be provided, " "but not both.", + exc_info=True, + ), ] assert actions.get_activity_meta.call_args_list == [] - assert cache.get('activity:worker-0') is None + assert cache.get("activity:worker-0") is None def test_type_with_id_and_slug(bc: Breathecode): kind = bc.fake.slug() cache.set( - 'workers', { - 0: [{ - 'pid': os.getpid(), - 'created_at': timezone.now() - }], - 1: [{ - 'pid': os.getpid() + random.randint(1, 100), - 'created_at': timezone.now() - }], - }) - - add_activity.delay(1, kind, related_id=1, related_slug='slug') - - assert logging.Logger.info.call_args_list == [call(f'Executing add_activity related to {kind}')] + "workers", + { + 0: [{"pid": os.getpid(), "created_at": timezone.now()}], + 1: [{"pid": os.getpid() + random.randint(1, 100), "created_at": timezone.now()}], + }, + ) + + add_activity.delay(1, kind, related_id=1, related_slug="slug") + + assert logging.Logger.info.call_args_list == [call(f"Executing add_activity related to {kind}")] assert logging.Logger.error.call_args_list == [ - call('If related_type is not provided, both related_id and related_slug must also be absent.', exc_info=True), + call("If related_type is not provided, both related_id and related_slug must also be absent.", exc_info=True), ] assert actions.get_activity_meta.call_args_list == [] - assert cache.get('activity:worker-0') is None + assert cache.get("activity:worker-0") is None def test_adding_the_resource_with_id_and_no_meta(bc: Breathecode, decompress_and_parse): @@ -157,51 +150,49 @@ def test_adding_the_resource_with_id_and_no_meta(bc: Breathecode, decompress_and logging.Logger.info.call_args_list = [] cache.set( - 'workers', { - 0: [{ - 'pid': os.getpid(), - 'created_at': timezone.now() - }], - 1: [{ - 'pid': os.getpid() + random.randint(1, 100), - 'created_at': timezone.now() - }], - }) - - add_activity.delay(1, kind, related_type='auth.User', related_id=1) - - assert logging.Logger.info.call_args_list == [call(f'Executing add_activity related to {kind}')] + "workers", + { + 0: [{"pid": os.getpid(), "created_at": timezone.now()}], + 1: [{"pid": os.getpid() + random.randint(1, 100), "created_at": timezone.now()}], + }, + ) + + add_activity.delay(1, kind, related_type="auth.User", related_id=1) + + assert logging.Logger.info.call_args_list == [call(f"Executing add_activity related to {kind}")] assert logging.Logger.error.call_args_list == [] - assert actions.get_activity_meta.call_args_list == [call(kind, 'auth.User', 1, None)] + assert actions.get_activity_meta.call_args_list == [call(kind, "auth.User", 1, None)] - assert decompress_and_parse(cache.get('activity:worker-0')) == [ + assert decompress_and_parse(cache.get("activity:worker-0")) == [ { - 'data': { - 'id': 'c5d8cbc54a894dd0983caae1b8507091', - 'user_id': 1, - 'kind': kind, - 'timestamp': UTC_NOW.isoformat(), - 'related': { - 'type': 'auth.User', - 'slug': None, - 'id': 1, + "data": { + "id": "c5d8cbc54a894dd0983caae1b8507091", + "user_id": 1, + "kind": kind, + "timestamp": UTC_NOW.isoformat(), + "related": { + "type": "auth.User", + "slug": None, + "id": 1, }, - 'meta': {} + "meta": {}, }, - 'schema': [ - bigquery.SchemaField('user_id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('kind', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('timestamp', bigquery.enums.SqlTypeNames.TIMESTAMP, 'NULLABLE'), - bigquery.SchemaField('related', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField('type', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('slug', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - ]), - bigquery.SchemaField('meta', bigquery.enums.SqlTypeNames.STRUCT, 'NULLABLE', fields=[]), + "schema": [ + bigquery.SchemaField("user_id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("kind", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("timestamp", bigquery.enums.SqlTypeNames.TIMESTAMP, "NULLABLE"), + bigquery.SchemaField( + "related", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[ + bigquery.SchemaField("type", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("slug", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + ], + ), + bigquery.SchemaField("meta", bigquery.enums.SqlTypeNames.STRUCT, "NULLABLE", fields=[]), ], }, ] @@ -215,51 +206,49 @@ def test_adding_the_resource_with_slug_and_no_meta(bc: Breathecode, decompress_a related_slug = bc.fake.slug() cache.set( - 'workers', { - 0: [{ - 'pid': os.getpid(), - 'created_at': timezone.now() - }], - 1: [{ - 'pid': os.getpid() + random.randint(1, 100), - 'created_at': timezone.now() - }], - }) - - add_activity.delay(1, kind, related_type='auth.User', related_slug=related_slug) - - assert logging.Logger.info.call_args_list == [call(f'Executing add_activity related to {kind}')] + "workers", + { + 0: [{"pid": os.getpid(), "created_at": timezone.now()}], + 1: [{"pid": os.getpid() + random.randint(1, 100), "created_at": timezone.now()}], + }, + ) + + add_activity.delay(1, kind, related_type="auth.User", related_slug=related_slug) + + assert logging.Logger.info.call_args_list == [call(f"Executing add_activity related to {kind}")] assert logging.Logger.error.call_args_list == [] - assert actions.get_activity_meta.call_args_list == [call(kind, 'auth.User', None, related_slug)] + assert actions.get_activity_meta.call_args_list == [call(kind, "auth.User", None, related_slug)] - assert decompress_and_parse(cache.get('activity:worker-0')) == [ + assert decompress_and_parse(cache.get("activity:worker-0")) == [ { - 'data': { - 'id': 'c5d8cbc54a894dd0983caae1b8507091', - 'user_id': 1, - 'kind': kind, - 'timestamp': UTC_NOW.isoformat(), - 'related': { - 'type': 'auth.User', - 'slug': related_slug, - 'id': None, + "data": { + "id": "c5d8cbc54a894dd0983caae1b8507091", + "user_id": 1, + "kind": kind, + "timestamp": UTC_NOW.isoformat(), + "related": { + "type": "auth.User", + "slug": related_slug, + "id": None, }, - 'meta': {}, + "meta": {}, }, - 'schema': [ - bigquery.SchemaField('user_id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('kind', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('timestamp', bigquery.enums.SqlTypeNames.TIMESTAMP, 'NULLABLE'), - bigquery.SchemaField('related', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField('type', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('slug', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - ]), - bigquery.SchemaField('meta', bigquery.enums.SqlTypeNames.STRUCT, 'NULLABLE', fields=[]), + "schema": [ + bigquery.SchemaField("user_id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("kind", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("timestamp", bigquery.enums.SqlTypeNames.TIMESTAMP, "NULLABLE"), + bigquery.SchemaField( + "related", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[ + bigquery.SchemaField("type", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("slug", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + ], + ), + bigquery.SchemaField("meta", bigquery.enums.SqlTypeNames.STRUCT, "NULLABLE", fields=[]), ], }, ] @@ -269,9 +258,9 @@ def test_adding_the_resource_with_meta(bc: Breathecode, set_activity_meta, decom kind = bc.fake.slug() meta = { - bc.fake.slug().replace('-', '_'): bc.fake.slug(), - bc.fake.slug().replace('-', '_'): bc.fake.slug(), - bc.fake.slug().replace('-', '_'): bc.fake.slug(), + bc.fake.slug().replace("-", "_"): bc.fake.slug(), + bc.fake.slug().replace("-", "_"): bc.fake.slug(), + bc.fake.slug().replace("-", "_"): bc.fake.slug(), } set_activity_meta(meta) @@ -279,57 +268,56 @@ def test_adding_the_resource_with_meta(bc: Breathecode, set_activity_meta, decom logging.Logger.info.call_args_list = [] cache.set( - 'workers', { - 0: [{ - 'pid': os.getpid(), - 'created_at': timezone.now() - }], - 1: [{ - 'pid': os.getpid() + random.randint(1, 100), - 'created_at': timezone.now() - }], - }) - - add_activity.delay(1, kind, related_type='auth.User', related_id=1) + "workers", + { + 0: [{"pid": os.getpid(), "created_at": timezone.now()}], + 1: [{"pid": os.getpid() + random.randint(1, 100), "created_at": timezone.now()}], + }, + ) + + add_activity.delay(1, kind, related_type="auth.User", related_id=1) assert actions.get_activity_meta.call_args_list == [ - call(kind, 'auth.User', 1, None), + call(kind, "auth.User", 1, None), ] - assert logging.Logger.info.call_args_list == [call(f'Executing add_activity related to {kind}')] + assert logging.Logger.info.call_args_list == [call(f"Executing add_activity related to {kind}")] assert logging.Logger.error.call_args_list == [] - assert decompress_and_parse(cache.get('activity:worker-0')) == [ + assert decompress_and_parse(cache.get("activity:worker-0")) == [ { - 'data': { - 'id': 'c5d8cbc54a894dd0983caae1b8507091', - 'user_id': 1, - 'kind': kind, - 'timestamp': UTC_NOW.isoformat(), - 'related': { - 'type': 'auth.User', - 'slug': None, - 'id': 1, + "data": { + "id": "c5d8cbc54a894dd0983caae1b8507091", + "user_id": 1, + "kind": kind, + "timestamp": UTC_NOW.isoformat(), + "related": { + "type": "auth.User", + "slug": None, + "id": 1, }, - 'meta': meta + "meta": meta, }, - 'schema': [ - bigquery.SchemaField('user_id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('kind', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('timestamp', bigquery.enums.SqlTypeNames.TIMESTAMP, 'NULLABLE'), - bigquery.SchemaField('related', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField('type', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('slug', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - ]), + "schema": [ + bigquery.SchemaField("user_id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("kind", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("timestamp", bigquery.enums.SqlTypeNames.TIMESTAMP, "NULLABLE"), + bigquery.SchemaField( + "related", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[ + bigquery.SchemaField("type", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("slug", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + ], + ), bigquery.SchemaField( - 'meta', + "meta", bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[bigquery.SchemaField(x, bigquery.enums.SqlTypeNames.STRING, 'NULLABLE') for x in meta]), + "NULLABLE", + fields=[bigquery.SchemaField(x, bigquery.enums.SqlTypeNames.STRING, "NULLABLE") for x in meta], + ), ], }, ] @@ -345,37 +333,34 @@ def test_adding_the_resource_with_meta__it_fails(bc: Breathecode, set_activity_m set_activity_meta(exc=exc) cache.set( - 'workers', { - 0: [{ - 'pid': os.getpid(), - 'created_at': timezone.now() - }], - 1: [{ - 'pid': os.getpid() + random.randint(1, 100), - 'created_at': timezone.now() - }], - }) - - add_activity.delay(1, kind, related_type='auth.User', related_id=1) + "workers", + { + 0: [{"pid": os.getpid(), "created_at": timezone.now()}], + 1: [{"pid": os.getpid() + random.randint(1, 100), "created_at": timezone.now()}], + }, + ) + + add_activity.delay(1, kind, related_type="auth.User", related_id=1) assert actions.get_activity_meta.call_args_list == [ - call(kind, 'auth.User', 1, None), + call(kind, "auth.User", 1, None), ] - assert logging.Logger.info.call_args_list == [call(f'Executing add_activity related to {kind}')] + assert logging.Logger.info.call_args_list == [call(f"Executing add_activity related to {kind}")] assert logging.Logger.error.call_args_list == [call(exc, exc_info=True)] - assert cache.get('activity:worker-0') is None + assert cache.get("activity:worker-0") is None -def test_adding_the_resource_with_meta__called_two_times(bc: Breathecode, monkeypatch, set_activity_meta, - decompress_and_parse): +def test_adding_the_resource_with_meta__called_two_times( + bc: Breathecode, monkeypatch, set_activity_meta, decompress_and_parse +): kind = bc.fake.slug() meta = { - bc.fake.slug().replace('-', '_'): bc.fake.slug(), - bc.fake.slug().replace('-', '_'): bc.fake.slug(), - bc.fake.slug().replace('-', '_'): bc.fake.slug(), + bc.fake.slug().replace("-", "_"): bc.fake.slug(), + bc.fake.slug().replace("-", "_"): bc.fake.slug(), + bc.fake.slug().replace("-", "_"): bc.fake.slug(), } set_activity_meta(meta) @@ -383,110 +368,108 @@ def test_adding_the_resource_with_meta__called_two_times(bc: Breathecode, monkey logging.Logger.info.call_args_list = [] cache.set( - 'workers', { - 0: [{ - 'pid': os.getpid(), - 'created_at': timezone.now() - }], - 1: [{ - 'pid': os.getpid() + random.randint(1, 100), - 'created_at': timezone.now() - }], - }) - - add_activity.delay(1, kind, related_type='auth.User', related_id=1) + "workers", + { + 0: [{"pid": os.getpid(), "created_at": timezone.now()}], + 1: [{"pid": os.getpid() + random.randint(1, 100), "created_at": timezone.now()}], + }, + ) + + add_activity.delay(1, kind, related_type="auth.User", related_id=1) cache.set( - 'workers', { - 0: [{ - 'pid': os.getpid() + random.randint(1, 100), - 'created_at': timezone.now() - }], - 1: [{ - 'pid': os.getpid(), - 'created_at': timezone.now() - }], - }) - - add_activity.delay(1, kind, related_type='auth.User', related_id=1) + "workers", + { + 0: [{"pid": os.getpid() + random.randint(1, 100), "created_at": timezone.now()}], + 1: [{"pid": os.getpid(), "created_at": timezone.now()}], + }, + ) + + add_activity.delay(1, kind, related_type="auth.User", related_id=1) assert logging.Logger.error.call_args_list == [] assert actions.get_activity_meta.call_args_list == [ - call(kind, 'auth.User', 1, None), - call(kind, 'auth.User', 1, None), + call(kind, "auth.User", 1, None), + call(kind, "auth.User", 1, None), ] assert logging.Logger.info.call_args_list == [ - call(f'Executing add_activity related to {kind}'), - call(f'Executing add_activity related to {kind}'), + call(f"Executing add_activity related to {kind}"), + call(f"Executing add_activity related to {kind}"), ] assert logging.Logger.error.call_args_list == [] - assert decompress_and_parse(cache.get('activity:worker-0')) == [ + assert decompress_and_parse(cache.get("activity:worker-0")) == [ { - 'data': { - 'id': 'c5d8cbc54a894dd0983caae1b8507091', - 'user_id': 1, - 'kind': kind, - 'timestamp': UTC_NOW.isoformat(), - 'related': { - 'type': 'auth.User', - 'slug': None, - 'id': 1, + "data": { + "id": "c5d8cbc54a894dd0983caae1b8507091", + "user_id": 1, + "kind": kind, + "timestamp": UTC_NOW.isoformat(), + "related": { + "type": "auth.User", + "slug": None, + "id": 1, }, - 'meta': meta + "meta": meta, }, - 'schema': [ - bigquery.SchemaField('user_id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('kind', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('timestamp', bigquery.enums.SqlTypeNames.TIMESTAMP, 'NULLABLE'), - bigquery.SchemaField('related', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField('type', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('slug', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - ]), + "schema": [ + bigquery.SchemaField("user_id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("kind", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("timestamp", bigquery.enums.SqlTypeNames.TIMESTAMP, "NULLABLE"), bigquery.SchemaField( - 'meta', + "related", bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[bigquery.SchemaField(x, bigquery.enums.SqlTypeNames.STRING, 'NULLABLE') for x in meta]), + "NULLABLE", + fields=[ + bigquery.SchemaField("type", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("slug", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + ], + ), + bigquery.SchemaField( + "meta", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[bigquery.SchemaField(x, bigquery.enums.SqlTypeNames.STRING, "NULLABLE") for x in meta], + ), ], }, ] - assert decompress_and_parse(cache.get('activity:worker-1')) == [ + assert decompress_and_parse(cache.get("activity:worker-1")) == [ { - 'data': { - 'id': 'c5d8cbc54a894dd0983caae1b8507092', - 'user_id': 1, - 'kind': kind, - 'timestamp': UTC_NOW.isoformat(), - 'related': { - 'type': 'auth.User', - 'slug': None, - 'id': 1, + "data": { + "id": "c5d8cbc54a894dd0983caae1b8507092", + "user_id": 1, + "kind": kind, + "timestamp": UTC_NOW.isoformat(), + "related": { + "type": "auth.User", + "slug": None, + "id": 1, }, - 'meta': meta + "meta": meta, }, - 'schema': [ - bigquery.SchemaField('user_id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('kind', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('timestamp', bigquery.enums.SqlTypeNames.TIMESTAMP, 'NULLABLE'), - bigquery.SchemaField('related', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField('type', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('slug', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - ]), + "schema": [ + bigquery.SchemaField("user_id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("kind", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("timestamp", bigquery.enums.SqlTypeNames.TIMESTAMP, "NULLABLE"), + bigquery.SchemaField( + "related", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[ + bigquery.SchemaField("type", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("slug", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + ], + ), bigquery.SchemaField( - 'meta', + "meta", bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[bigquery.SchemaField(x, bigquery.enums.SqlTypeNames.STRING, 'NULLABLE') for x in meta]), + "NULLABLE", + fields=[bigquery.SchemaField(x, bigquery.enums.SqlTypeNames.STRING, "NULLABLE") for x in meta], + ), ], }, ] diff --git a/breathecode/activity/tests/tasks/tests_get_attendancy_log.py b/breathecode/activity/tests/tasks/tests_get_attendancy_log.py index c43d165ac..18523d5ac 100644 --- a/breathecode/activity/tests/tasks/tests_get_attendancy_log.py +++ b/breathecode/activity/tests/tasks/tests_get_attendancy_log.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -19,18 +20,18 @@ def get_datastore_seed(slug, day, data={}): return { - 'academy_id': 1, - 'cohort': slug, - 'created_at': (timezone.now() + timedelta(days=1)).isoformat() + 'Z', - 'data': { - 'cohort': slug, - 'day': str(day), + "academy_id": 1, + "cohort": slug, + "created_at": (timezone.now() + timedelta(days=1)).isoformat() + "Z", + "data": { + "cohort": slug, + "day": str(day), }, - 'day': day, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, + "day": day, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, **data, } @@ -40,20 +41,20 @@ class MediaTestSuite(MediaTestCase): 🔽🔽🔽 Cohort not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(NDB, '__init__', MagicMock(return_value=None)) - @patch.object(NDB, 'fetch', MagicMock(return_value=[])) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(NDB, "__init__", MagicMock(return_value=None)) + @patch.object(NDB, "fetch", MagicMock(return_value=[])) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_not_found(self): get_attendancy_log.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [call('Executing get_attendancy_log')]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Cohort not found')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Executing get_attendancy_log")]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Cohort not found")]) self.assertEqual(NDB.__init__.call_args_list, []) self.assertEqual(NDB.fetch.call_args_list, []) @@ -63,29 +64,35 @@ def test_not_found(self): 🔽🔽🔽 Syllabus not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(NDB, '__init__', MagicMock(return_value=None)) - @patch.object(NDB, 'fetch', MagicMock(return_value=[])) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(NDB, "__init__", MagicMock(return_value=None)) + @patch.object(NDB, "fetch", MagicMock(return_value=[])) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_syllabus_not_found(self): - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort=1) logging.Logger.info.call_args_list = [] get_attendancy_log.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) - self.assertEqual(logging.Logger.info.call_args_list, [call('Executing get_attendancy_log')]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'Cohort {model.cohort.slug} does not have syllabus assigned'), - ]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Executing get_attendancy_log")]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f"Cohort {model.cohort.slug} does not have syllabus assigned"), + ], + ) self.assertEqual(NDB.__init__.call_args_list, []) self.assertEqual(NDB.fetch.call_args_list, []) @@ -95,60 +102,26 @@ def test_syllabus_not_found(self): 🔽🔽🔽 SyllabusVersion has json with bad format """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(NDB, '__init__', MagicMock(return_value=None)) - @patch.object(NDB, 'fetch', MagicMock(return_value=[])) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(NDB, "__init__", MagicMock(return_value=None)) + @patch.object(NDB, "fetch", MagicMock(return_value=[])) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_syllabus_version_with_json_with_bad_format(self): syllabus_versions = [ - { - 'json': {} - }, - { - 'json': [] - }, - { - 'json': { - 'days': None - } - }, - { - 'json': { - 'days': {} - } - }, - { - 'json': { - 'days': [{}] - } - }, - { - 'json': { - 'days': [{ - 'id': 1 - }] - } - }, - { - 'json': { - 'days': [{ - 'duration_in_days': 1 - }] - } - }, - { - 'json': { - 'days': [{ - 'label': 1 - }] - } - }, + {"json": {}}, + {"json": []}, + {"json": {"days": None}}, + {"json": {"days": {}}}, + {"json": {"days": [{}]}}, + {"json": {"days": [{"id": 1}]}}, + {"json": {"days": [{"duration_in_days": 1}]}}, + {"json": {"days": [{"label": 1}]}}, ] for syllabus_version in syllabus_versions: - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort=1, syllabus_version=syllabus_version) logging.Logger.info.call_args_list = [] @@ -156,57 +129,73 @@ def test_syllabus_version_with_json_with_bad_format(self): get_attendancy_log.delay(model.cohort.id) - self.assertEqual(logging.Logger.info.call_args_list, [call('Executing get_attendancy_log')]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'Cohort {model.cohort.slug} have syllabus with bad format'), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusVersion'), [ - self.bc.format.to_dict(model.syllabus_version), - ]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Executing get_attendancy_log")]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f"Cohort {model.cohort.slug} have syllabus with bad format"), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusVersion"), + [ + self.bc.format.to_dict(model.syllabus_version), + ], + ) self.assertEqual(NDB.__init__.call_args_list, []) self.assertEqual(NDB.fetch.call_args_list, []) # teardown - self.bc.database.delete('admissions.Cohort') - self.bc.database.delete('admissions.SyllabusVersion') + self.bc.database.delete("admissions.Cohort") + self.bc.database.delete("admissions.SyllabusVersion") self.assertEqual(tasks.get_attendancy_log_per_cohort_user.delay.call_args_list, []) """ 🔽🔽🔽 The student attended the first day, the rest of day are ignored """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(NDB, '__init__', MagicMock(return_value=None)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(NDB, "__init__", MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_the_student_attended_the_first_day(self): cases = [ - ([get_datastore_seed(self.bc.fake.slug(), 1, {'slug': 'classroom_attendance'})], [], [1], []), - ([], [get_datastore_seed(self.bc.fake.slug(), 1, {'slug': 'classroom_unattendance'})], [], [1]), - ([get_datastore_seed(self.bc.fake.slug(), 1, {'slug': 'classroom_attendance'})], - [get_datastore_seed(self.bc.fake.slug(), 1, {'slug': 'classroom_unattendance'})], [1], [1]), + ([get_datastore_seed(self.bc.fake.slug(), 1, {"slug": "classroom_attendance"})], [], [1], []), + ([], [get_datastore_seed(self.bc.fake.slug(), 1, {"slug": "classroom_unattendance"})], [], [1]), + ( + [get_datastore_seed(self.bc.fake.slug(), 1, {"slug": "classroom_attendance"})], + [get_datastore_seed(self.bc.fake.slug(), 1, {"slug": "classroom_unattendance"})], + [1], + [1], + ), ] syllabus_version = { - 'json': { - 'days': [{ - 'id': x, - 'duration_in_days': 1, - 'label': self.bc.fake.slug(), - } for x in range(1, 4)] + "json": { + "days": [ + { + "id": x, + "duration_in_days": 1, + "label": self.bc.fake.slug(), + } + for x in range(1, 4) + ] } } - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort=1, syllabus_version=syllabus_version) for attendance_seed, unattendance_seed, attendance_ids, unattendance_ids in cases: @@ -216,42 +205,65 @@ def test_the_student_attended_the_first_day(self): logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch.object(NDB, 'fetch', MagicMock(side_effect=[attendance_seed, unattendance_seed])): + with patch.object(NDB, "fetch", MagicMock(side_effect=[attendance_seed, unattendance_seed])): get_attendancy_log.delay(model.cohort.id) self.assertEqual(NDB.__init__.call_args_list, [call(StudentActivity)]) - self.assertEqual(NDB.fetch.call_args_list, [ - call([StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_attendance']), - call([ - StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_unattendance' - ]), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Executing get_attendancy_log'), - call('History log saved'), - ]) + self.assertEqual( + NDB.fetch.call_args_list, + [ + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_attendance", + ] + ), + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_unattendance", + ] + ), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Executing get_attendancy_log"), + call("History log saved"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) history_log = {} - for day in model.syllabus_version.json['days'][:1]: - history_log[day['label']] = { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + for day in model.syllabus_version.json["days"][:1]: + history_log[day["label"]] = { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), } - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - **self.bc.format.to_dict(model.cohort), - 'history_log': history_log, - }]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusVersion'), [{ - **self.bc.format.to_dict(model.syllabus_version), - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort), + "history_log": history_log, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusVersion"), + [ + { + **self.bc.format.to_dict(model.syllabus_version), + } + ], + ) # teardown NDB.__init__.call_args_list = [] @@ -261,156 +273,270 @@ def test_the_student_attended_the_first_day(self): 🔽🔽🔽 The students attended all days """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(NDB, '__init__', MagicMock(return_value=None)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(NDB, "__init__", MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_the_students_attended_all_days(self): cases = [ - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 3, - }), - ], [], [1, 2], []), - ([], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 3, - }), - ], [], [1, 2]), - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 3, - }), - ], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 3, - }), - ], [1, 2], [1, 2]), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 3, + }, + ), + ], + [], + [1, 2], + [], + ), + ( + [], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 3, + }, + ), + ], + [], + [1, 2], + ), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 3, + }, + ), + ], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 3, + }, + ), + ], + [1, 2], + [1, 2], + ), ] syllabus_version = { - 'json': { - 'days': [{ - 'id': x, - 'duration_in_days': 1, - 'label': self.bc.fake.slug(), - } for x in range(1, 4)] + "json": { + "days": [ + { + "id": x, + "duration_in_days": 1, + "label": self.bc.fake.slug(), + } + for x in range(1, 4) + ] } } - cohort = {'current_day': 3} + cohort = {"current_day": 3} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort=cohort, syllabus_version=syllabus_version) for attendance_seed, unattendance_seed, attendance_ids, unattendance_ids in cases: @@ -420,42 +546,65 @@ def test_the_students_attended_all_days(self): logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch.object(NDB, 'fetch', MagicMock(side_effect=[attendance_seed, unattendance_seed])): + with patch.object(NDB, "fetch", MagicMock(side_effect=[attendance_seed, unattendance_seed])): get_attendancy_log.delay(model.cohort.id) self.assertEqual(NDB.__init__.call_args_list, [call(StudentActivity)]) - self.assertEqual(NDB.fetch.call_args_list, [ - call([StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_attendance']), - call([ - StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_unattendance' - ]), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Executing get_attendancy_log'), - call('History log saved'), - ]) + self.assertEqual( + NDB.fetch.call_args_list, + [ + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_attendance", + ] + ), + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_unattendance", + ] + ), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Executing get_attendancy_log"), + call("History log saved"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) history_log = {} - for day in model.syllabus_version.json['days']: - history_log[day['label']] = { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + for day in model.syllabus_version.json["days"]: + history_log[day["label"]] = { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), } - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - **self.bc.format.to_dict(model.cohort), - 'history_log': history_log, - }]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusVersion'), [{ - **self.bc.format.to_dict(model.syllabus_version), - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort), + "history_log": history_log, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusVersion"), + [ + { + **self.bc.format.to_dict(model.syllabus_version), + } + ], + ) # teardown NDB.__init__.call_args_list = [] @@ -465,202 +614,339 @@ def test_the_students_attended_all_days(self): 🔽🔽🔽 The students attended all days and the days is string """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.utils.ndb.NDB.__init__', MagicMock(return_value=None)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.utils.ndb.NDB.__init__", MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_the_students_attended_all_days__the_days_is_string(self): cases = [ - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': '1', - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': '2', - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': '3', - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': '1', - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': '2', - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': '3', - }), - ], [], [1, 2], []), - ([], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': '1', - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': '2', - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': '3', - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': '1', - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': '2', - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': '3', - }), - ], [], [1, 2]), - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': '1', - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': '2', - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': '3', - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': '1', - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': '2', - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': '3', - }), - ], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': '1', - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': '2', - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': '3', - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': '1', - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': '2', - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': '3', - }), - ], [1, 2], [1, 2]), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": "1", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": "2", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": "3", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": "1", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": "2", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": "3", + }, + ), + ], + [], + [1, 2], + [], + ), + ( + [], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": "1", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": "2", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": "3", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": "1", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": "2", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": "3", + }, + ), + ], + [], + [1, 2], + ), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": "1", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": "2", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": "3", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": "1", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": "2", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": "3", + }, + ), + ], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": "1", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": "2", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": "3", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": "1", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": "2", + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": "3", + }, + ), + ], + [1, 2], + [1, 2], + ), ] syllabus_version = { - 'json': { - 'days': [{ - 'id': x, - 'duration_in_days': 1, - 'label': self.bc.fake.slug(), - } for x in range(1, 4)] + "json": { + "days": [ + { + "id": x, + "duration_in_days": 1, + "label": self.bc.fake.slug(), + } + for x in range(1, 4) + ] } } - cohort = {'current_day': 3} + cohort = {"current_day": 3} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort=cohort, syllabus_version=syllabus_version) for attendance_seed, unattendance_seed, attendance_ids, unattendance_ids in cases: - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model.cohort.history_log = {} model.cohort.save() logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.utils.ndb.NDB.fetch', MagicMock(side_effect=[attendance_seed, unattendance_seed])): + with patch("breathecode.utils.ndb.NDB.fetch", MagicMock(side_effect=[attendance_seed, unattendance_seed])): get_attendancy_log.delay(model.cohort.id) self.assertEqual(NDB.__init__.call_args_list, [call(StudentActivity)]) - self.assertEqual(NDB.fetch.call_args_list, [ - call([StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_attendance']), - call([ - StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_unattendance' - ]), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Executing get_attendancy_log'), - call('History log saved'), - ]) + self.assertEqual( + NDB.fetch.call_args_list, + [ + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_attendance", + ] + ), + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_unattendance", + ] + ), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Executing get_attendancy_log"), + call("History log saved"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) history_log = {} - for day in model.syllabus_version.json['days']: - history_log[day['label']] = { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + for day in model.syllabus_version.json["days"]: + history_log[day["label"]] = { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), } - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - **self.bc.format.to_dict(model.cohort), - 'history_log': history_log, - }]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusVersion'), [{ - **self.bc.format.to_dict(model.syllabus_version), - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort), + "history_log": history_log, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusVersion"), + [ + { + **self.bc.format.to_dict(model.syllabus_version), + } + ], + ) # teardown NDB.__init__.call_args_list = [] @@ -671,315 +957,557 @@ def test_the_students_attended_all_days__the_days_is_string(self): because the syllabus does'nt include that """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(NDB, '__init__', MagicMock(return_value=None)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(NDB, "__init__", MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_the_students_attended_all_days__duration_in_days(self): cases = [ - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 7, - }), - ], [], [1, 2], []), - ([], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 7, - }), - ], [], [1, 2]), - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 7, - }), - ], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 7, - }), - ], [1, 2], [1, 2]), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [], + [1, 2], + [], + ), + ( + [], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [], + [1, 2], + ), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [1, 2], + [1, 2], + ), ] syllabus_version = { - 'json': { - 'days': [{ - 'id': x, - 'duration_in_days': x, - 'label': self.bc.fake.slug(), - } for x in range(1, 4)] + "json": { + "days": [ + { + "id": x, + "duration_in_days": x, + "label": self.bc.fake.slug(), + } + for x in range(1, 4) + ] } } - cohort = {'current_day': 6} + cohort = {"current_day": 6} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort=cohort, syllabus_version=syllabus_version) for attendance_seed, unattendance_seed, attendance_ids, unattendance_ids in cases: @@ -989,80 +1517,102 @@ def test_the_students_attended_all_days__duration_in_days(self): logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch.object(NDB, 'fetch', MagicMock(side_effect=[attendance_seed, unattendance_seed])): + with patch.object(NDB, "fetch", MagicMock(side_effect=[attendance_seed, unattendance_seed])): get_attendancy_log.delay(model.cohort.id) self.assertEqual(NDB.__init__.call_args_list, [call(StudentActivity)]) - self.assertEqual(NDB.fetch.call_args_list, [ - call([StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_attendance']), - call([ - StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_unattendance' - ]), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Executing get_attendancy_log'), - call('History log saved'), - ]) + self.assertEqual( + NDB.fetch.call_args_list, + [ + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_attendance", + ] + ), + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_unattendance", + ] + ), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Executing get_attendancy_log"), + call("History log saved"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - day1 = model.syllabus_version.json['days'][0] - day2 = model.syllabus_version.json['days'][1] - day3 = model.syllabus_version.json['days'][2] + day1 = model.syllabus_version.json["days"][0] + day2 = model.syllabus_version.json["days"][1] + day3 = model.syllabus_version.json["days"][2] history_log = { - day1['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day1["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day2['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day2["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day2['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day2["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, } - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort), - 'history_log': history_log, - }, - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusVersion'), [{ - **self.bc.format.to_dict(model.syllabus_version), - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort), + "history_log": history_log, + }, + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusVersion"), + [ + { + **self.bc.format.to_dict(model.syllabus_version), + } + ], + ) # teardown NDB.__init__.call_args_list = [] @@ -1074,806 +1624,1334 @@ def test_the_students_attended_all_days__duration_in_days(self): because the syllabus does'nt include that, with two CohortUser """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(NDB, '__init__', MagicMock(return_value=None)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(NDB, "__init__", MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_the_students_attended_all_days__duration_in_days__two_cohort_users(self): cases = [ - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 7, - }), - ], [], [1, 2], []), - ([], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 7, - }), - ], [], [1, 2]), - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 7, - }), - ], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 7, - }), - ], [1, 2], [1, 2]), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [], + [1, 2], + [], + ), + ( + [], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [], + [1, 2], + ), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [1, 2], + [1, 2], + ), ] syllabus_version = { - 'json': { - 'days': [{ - 'id': x, - 'duration_in_days': x, - 'label': self.bc.fake.slug(), - } for x in range(1, 4)] + "json": { + "days": [ + { + "id": x, + "duration_in_days": x, + "label": self.bc.fake.slug(), + } + for x in range(1, 4) + ] } } - cohort = {'current_day': 6} + cohort = {"current_day": 6} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort=cohort, cohort_user=2, syllabus_version=syllabus_version) for attendance_seed, unattendance_seed, attendance_ids, unattendance_ids in cases: - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model.cohort.history_log = {} model.cohort.save() logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch.object(NDB, 'fetch', MagicMock(side_effect=[attendance_seed, unattendance_seed])): + with patch.object(NDB, "fetch", MagicMock(side_effect=[attendance_seed, unattendance_seed])): tasks.get_attendancy_log_per_cohort_user.delay.call_args_list = [] get_attendancy_log.delay(model.cohort.id) self.assertEqual(NDB.__init__.call_args_list, [call(StudentActivity)]) - self.assertEqual(NDB.fetch.call_args_list, [ - call([StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_attendance']), - call([ - StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_unattendance' - ]), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Executing get_attendancy_log'), - call('History log saved'), - ]) + self.assertEqual( + NDB.fetch.call_args_list, + [ + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_attendance", + ] + ), + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_unattendance", + ] + ), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Executing get_attendancy_log"), + call("History log saved"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - day1 = model.syllabus_version.json['days'][0] - day2 = model.syllabus_version.json['days'][1] - day3 = model.syllabus_version.json['days'][2] + day1 = model.syllabus_version.json["days"][0] + day2 = model.syllabus_version.json["days"][1] + day3 = model.syllabus_version.json["days"][2] history_log = { - day1['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day1["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day2['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day2["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day2['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day2["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, } - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort), - 'history_log': history_log, - }, - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusVersion'), [{ - **self.bc.format.to_dict(model.syllabus_version), - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort), + "history_log": history_log, + }, + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusVersion"), + [ + { + **self.bc.format.to_dict(model.syllabus_version), + } + ], + ) self.assertEqual(tasks.get_attendancy_log_per_cohort_user.delay.call_args_list, [call(1), call(2)]) # teardown NDB.__init__.call_args_list = [] - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(NDB, '__init__', MagicMock(return_value=None)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(NDB, "__init__", MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.activity.tasks.get_attendancy_log_per_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_the_students_attended_all_days__duration_in_days__two_cohort_users__they_was_deleted(self): cases = [ - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 7, - }), - ], [], [1, 2], []), - ([], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 7, - }), - ], [], [1, 2]), - ([ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_attendance', - 'user_id': 2, - 'day': 7, - }), - ], [ - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 1, - 'day': 7, - }), - get_datastore_seed(self.bc.fake.slug(), 1, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 1, - }), - get_datastore_seed(self.bc.fake.slug(), 2, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 2, - }), - get_datastore_seed(self.bc.fake.slug(), 3, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 3, - }), - get_datastore_seed(self.bc.fake.slug(), 4, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 4, - }), - get_datastore_seed(self.bc.fake.slug(), 5, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 5, - }), - get_datastore_seed(self.bc.fake.slug(), 6, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 6, - }), - get_datastore_seed(self.bc.fake.slug(), 7, { - 'slug': 'classroom_unattendance', - 'user_id': 2, - 'day': 7, - }), - ], [1, 2], [1, 2]), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [], + [1, 2], + [], + ), + ( + [], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [], + [1, 2], + ), + ( + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_attendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [ + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 1, + "day": 7, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 1, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 1, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 2, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 2, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 3, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 3, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 4, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 4, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 5, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 5, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 6, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 6, + }, + ), + get_datastore_seed( + self.bc.fake.slug(), + 7, + { + "slug": "classroom_unattendance", + "user_id": 2, + "day": 7, + }, + ), + ], + [1, 2], + [1, 2], + ), ] syllabus_version = { - 'json': { - 'days': [{ - 'id': x, - 'duration_in_days': x, - 'label': self.bc.fake.slug(), - } for x in range(1, 4)] + "json": { + "days": [ + { + "id": x, + "duration_in_days": x, + "label": self.bc.fake.slug(), + } + for x in range(1, 4) + ] } } - cohort = {'current_day': 6} - cohort_user = {'educational_status': 'DROPPED'} + cohort = {"current_day": 6} + cohort_user = {"educational_status": "DROPPED"} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.bc.database.create(cohort=cohort, - cohort_user=(2, cohort_user), - syllabus_version=syllabus_version) + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.bc.database.create( + cohort=cohort, cohort_user=(2, cohort_user), syllabus_version=syllabus_version + ) for attendance_seed, unattendance_seed, attendance_ids, unattendance_ids in cases: - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model.cohort.history_log = {} model.cohort.save() logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch.object(NDB, 'fetch', MagicMock(side_effect=[attendance_seed, unattendance_seed])): + with patch.object(NDB, "fetch", MagicMock(side_effect=[attendance_seed, unattendance_seed])): tasks.get_attendancy_log_per_cohort_user.delay.call_args_list = [] get_attendancy_log.delay(model.cohort.id) self.assertEqual(NDB.__init__.call_args_list, [call(StudentActivity)]) - self.assertEqual(NDB.fetch.call_args_list, [ - call([StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_attendance']), - call([ - StudentActivity.cohort == model.cohort.slug, StudentActivity.slug == 'classroom_unattendance' - ]), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Executing get_attendancy_log'), - call('History log saved'), - ]) + self.assertEqual( + NDB.fetch.call_args_list, + [ + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_attendance", + ] + ), + call( + [ + StudentActivity.cohort == model.cohort.slug, + StudentActivity.slug == "classroom_unattendance", + ] + ), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Executing get_attendancy_log"), + call("History log saved"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - day1 = model.syllabus_version.json['days'][0] - day2 = model.syllabus_version.json['days'][1] - day3 = model.syllabus_version.json['days'][2] + day1 = model.syllabus_version.json["days"][0] + day2 = model.syllabus_version.json["days"][1] + day3 = model.syllabus_version.json["days"][2] history_log = { - day1['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day1["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day2['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day2["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day2['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day2["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, - day3['label']: { - 'current_module': 'unknown', - 'teacher_comments': None, - 'attendance_ids': attendance_ids, - 'unattendance_ids': unattendance_ids, - 'updated_at': str(UTC_NOW), + day3["label"]: { + "current_module": "unknown", + "teacher_comments": None, + "attendance_ids": attendance_ids, + "unattendance_ids": unattendance_ids, + "updated_at": str(UTC_NOW), }, } - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort), - 'history_log': history_log, - }, - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusVersion'), [{ - **self.bc.format.to_dict(model.syllabus_version), - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort), + "history_log": history_log, + }, + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusVersion"), + [ + { + **self.bc.format.to_dict(model.syllabus_version), + } + ], + ) self.assertEqual(tasks.get_attendancy_log_per_cohort_user.delay.call_args_list, []) diff --git a/breathecode/activity/tests/tasks/tests_get_attendancy_log_per_cohort_user.py b/breathecode/activity/tests/tasks/tests_get_attendancy_log_per_cohort_user.py index b707da041..5a801b84d 100644 --- a/breathecode/activity/tests/tasks/tests_get_attendancy_log_per_cohort_user.py +++ b/breathecode/activity/tests/tasks/tests_get_attendancy_log_per_cohort_user.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from datetime import timedelta @@ -19,18 +20,18 @@ def get_datastore_seed(slug, day, data={}): return { - 'academy_id': 1, - 'cohort': slug, - 'created_at': (timezone.now() + timedelta(days=1)).isoformat() + 'Z', - 'data': { - 'cohort': slug, - 'day': str(day), + "academy_id": 1, + "cohort": slug, + "created_at": (timezone.now() + timedelta(days=1)).isoformat() + "Z", + "data": { + "cohort": slug, + "day": str(day), }, - 'day': day, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, + "day": day, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, **data, } @@ -40,62 +41,74 @@ class MediaTestSuite(MediaTestCase): 🔽🔽🔽 Cohort not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_not_found(self): get_attendancy_log_per_cohort_user.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [call('Executing get_attendancy_log_per_cohort_user')]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Cohort user not found')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Executing get_attendancy_log_per_cohort_user")]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Cohort user not found")]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 With Cohort and CohortUser """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_cohort_user(self): - cohort = {'history_log': random.choice(['', None, {}, []])} + cohort = {"history_log": random.choice(["", None, {}, []])} model = self.bc.database.create(cohort=cohort, cohort_user=1) logging.Logger.info.call_args_list = [] get_attendancy_log_per_cohort_user.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - self.bc.format.to_dict(model.cohort_user), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Executing get_attendancy_log_per_cohort_user'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'Cohort {model.cohort.slug} has no log yet'), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + self.bc.format.to_dict(model.cohort_user), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Executing get_attendancy_log_per_cohort_user"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f"Cohort {model.cohort.slug} has no log yet"), + ], + ) """ 🔽🔽🔽 With Cohort and CohortUser """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_cohort_user__user_do_not_assist(self): utc_now = timezone.now() day1 = str(random.randint(1, 9)) @@ -103,20 +116,20 @@ def test_with_cohort_user__user_do_not_assist(self): current_module1 = random.randint(1, 9) current_module2 = random.randint(1, 9) cohort = { - 'history_log': { + "history_log": { day1: { - 'attendance_ids': [5453, 5417, 5448, 5337, 5424, 5351, 5358], - 'current_module': current_module1, - 'teacher_comments': '', - 'unattendance_ids': [5435], - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "attendance_ids": [5453, 5417, 5448, 5337, 5424, 5351, 5358], + "current_module": current_module1, + "teacher_comments": "", + "unattendance_ids": [5435], + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, day2: { - 'attendance_ids': [5453, 5417, 5448, 5337, 5424, 5351, 5358], - 'current_module': current_module2, - 'teacher_comments': '', - 'unattendance_ids': [5435], - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "attendance_ids": [5453, 5417, 5448, 5337, 5424, 5351, 5358], + "current_module": current_module2, + "teacher_comments": "", + "unattendance_ids": [5435], + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, }, } @@ -126,41 +139,50 @@ def test_with_cohort_user__user_do_not_assist(self): get_attendancy_log_per_cohort_user.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'attendance': {}, - 'unattendance': { - day1: { - 'current_module': current_module1, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), - }, - day2: { - 'current_module': current_module2, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "attendance": {}, + "unattendance": { + day1: { + "current_module": current_module1, + "updated_at": self.bc.datetime.to_iso_string(utc_now), + }, + day2: { + "current_module": current_module2, + "updated_at": self.bc.datetime.to_iso_string(utc_now), + }, + }, }, - }, - }, - }]) - - self.assertEqual(logging.Logger.info.call_args_list, - [call('Executing get_attendancy_log_per_cohort_user'), - call('History log saved')]) + } + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [call("Executing get_attendancy_log_per_cohort_user"), call("History log saved")], + ) self.assertEqual(logging.Logger.error.call_args_list, []) """ 🔽🔽🔽 With Cohort and CohortUser """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_cohort_user__user_assist(self): utc_now = timezone.now() day1 = str(random.randint(1, 9)) @@ -168,20 +190,20 @@ def test_with_cohort_user__user_assist(self): current_module1 = random.randint(1, 9) current_module2 = random.randint(1, 9) cohort = { - 'history_log': { + "history_log": { day1: { - 'attendance_ids': [1, 5453, 5417, 5448, 5337, 5424, 5351, 5358], - 'current_module': current_module1, - 'teacher_comments': '', - 'unattendance_ids': [5435], - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "attendance_ids": [1, 5453, 5417, 5448, 5337, 5424, 5351, 5358], + "current_module": current_module1, + "teacher_comments": "", + "unattendance_ids": [5435], + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, day2: { - 'attendance_ids': [1, 5453, 5417, 5448, 5337, 5424, 5351, 5358], - 'current_module': current_module2, - 'teacher_comments': '', - 'unattendance_ids': [5435], - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "attendance_ids": [1, 5453, 5417, 5448, 5337, 5424, 5351, 5358], + "current_module": current_module2, + "teacher_comments": "", + "unattendance_ids": [5435], + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, } } @@ -191,43 +213,50 @@ def test_with_cohort_user__user_assist(self): get_attendancy_log_per_cohort_user.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'attendance': { - day1: { - 'current_module': current_module1, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), - }, - day2: { - 'current_module': current_module2, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "attendance": { + day1: { + "current_module": current_module1, + "updated_at": self.bc.datetime.to_iso_string(utc_now), + }, + day2: { + "current_module": current_module2, + "updated_at": self.bc.datetime.to_iso_string(utc_now), + }, }, + "unattendance": {}, }, - 'unattendance': {}, }, - }, - ]) + ], + ) - self.assertEqual(logging.Logger.info.call_args_list, - [call('Executing get_attendancy_log_per_cohort_user'), - call('History log saved')]) + self.assertEqual( + logging.Logger.info.call_args_list, + [call("Executing get_attendancy_log_per_cohort_user"), call("History log saved")], + ) self.assertEqual(logging.Logger.error.call_args_list, []) """ 🔽🔽🔽 With Cohort and CohortUser with bad history log """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_cohort_user__with_bad_user_log(self): utc_now = timezone.now() available_days = {str(random.randint(1, 9)) for _ in range(4)} @@ -246,58 +275,58 @@ def test_with_cohort_user__with_bad_user_log(self): current_module3 = random.randint(1, 9) current_module4 = random.randint(1, 9) cohort = { - 'history_log': { + "history_log": { day1: { - 'attendance_ids': [1, 5453, 5417, 5448, 5337, 5424, 5351, 5358], - 'current_module': current_module1, - 'teacher_comments': '', - 'unattendance_ids': [5435], - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "attendance_ids": [1, 5453, 5417, 5448, 5337, 5424, 5351, 5358], + "current_module": current_module1, + "teacher_comments": "", + "unattendance_ids": [5435], + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, day2: { - 'attendance_ids': [5453, 5417, 5448, 5337, 5424, 5351, 5358], - 'current_module': current_module2, - 'teacher_comments': '', - 'unattendance_ids': [1, 5435], - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "attendance_ids": [5453, 5417, 5448, 5337, 5424, 5351, 5358], + "current_module": current_module2, + "teacher_comments": "", + "unattendance_ids": [1, 5435], + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, day3: { - 'attendance_ids': [1, 5453, 5417, 5448, 5337, 5424, 5351, 5358], - 'current_module': current_module3, - 'teacher_comments': '', - 'unattendance_ids': [5435], - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "attendance_ids": [1, 5453, 5417, 5448, 5337, 5424, 5351, 5358], + "current_module": current_module3, + "teacher_comments": "", + "unattendance_ids": [5435], + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, day4: { - 'attendance_ids': [5453, 5417, 5448, 5337, 5424, 5351, 5358], - 'current_module': current_module4, - 'teacher_comments': '', - 'unattendance_ids': [1, 5435], - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "attendance_ids": [5453, 5417, 5448, 5337, 5424, 5351, 5358], + "current_module": current_module4, + "teacher_comments": "", + "unattendance_ids": [1, 5435], + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, }, } cohort_user = { - 'history_log': { - 'attendance': { + "history_log": { + "attendance": { day2: { - 'current_module': current_module1, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "current_module": current_module1, + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, day4: { - 'current_module': current_module2, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "current_module": current_module2, + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, }, - 'unattendance': { + "unattendance": { day1: { - 'current_module': current_module1, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "current_module": current_module1, + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, day3: { - 'current_module': current_module2, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + "current_module": current_module2, + "updated_at": self.bc.datetime.to_iso_string(utc_now), }, }, }, @@ -308,37 +337,46 @@ def test_with_cohort_user__with_bad_user_log(self): get_attendancy_log_per_cohort_user.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'attendance': { - day1: { - 'current_module': current_module1, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), - }, - day3: { - 'current_module': current_module3, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), - }, - }, - 'unattendance': { - day2: { - 'current_module': current_module2, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), - }, - day4: { - 'current_module': current_module4, - 'updated_at': self.bc.datetime.to_iso_string(utc_now), + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "attendance": { + day1: { + "current_module": current_module1, + "updated_at": self.bc.datetime.to_iso_string(utc_now), + }, + day3: { + "current_module": current_module3, + "updated_at": self.bc.datetime.to_iso_string(utc_now), + }, + }, + "unattendance": { + day2: { + "current_module": current_module2, + "updated_at": self.bc.datetime.to_iso_string(utc_now), + }, + day4: { + "current_module": current_module4, + "updated_at": self.bc.datetime.to_iso_string(utc_now), + }, + }, }, - }, - }, - }]) - - self.assertEqual(logging.Logger.info.call_args_list, - [call('Executing get_attendancy_log_per_cohort_user'), - call('History log saved')]) + } + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [call("Executing get_attendancy_log_per_cohort_user"), call("History log saved")], + ) self.assertEqual(logging.Logger.error.call_args_list, []) diff --git a/breathecode/activity/tests/tasks/tests_upload_activities.py b/breathecode/activity/tests/tasks/tests_upload_activities.py index 8ff2bc3b1..0772e508d 100644 --- a/breathecode/activity/tests/tasks/tests_upload_activities.py +++ b/breathecode/activity/tests/tasks/tests_upload_activities.py @@ -1,6 +1,7 @@ """ Test /answer """ + import pickle import random from unittest.mock import MagicMock, call @@ -35,39 +36,45 @@ def apply_patch(db, monkeypatch): m2 = MagicMock() m3 = MagicMock() - m3.return_value = TableMock([ - bigquery.SchemaField('character', 'STRING', 'NULLABLE'), - bigquery.SchemaField('related', - 'RECORD', - 'NULLABLE', - fields=( - bigquery.SchemaField('name', 'STRING', 'NULLABLE'), - bigquery.SchemaField('amount', 'INT64', 'NULLABLE'), - )), - bigquery.SchemaField('meta', - 'RECORD', - 'NULLABLE', - fields=( - bigquery.SchemaField('knife', 'BOOL', 'NULLABLE'), - bigquery.SchemaField('pistol', 'FLOAT64', 'NULLABLE'), - )), - ]) + m3.return_value = TableMock( + [ + bigquery.SchemaField("character", "STRING", "NULLABLE"), + bigquery.SchemaField( + "related", + "RECORD", + "NULLABLE", + fields=( + bigquery.SchemaField("name", "STRING", "NULLABLE"), + bigquery.SchemaField("amount", "INT64", "NULLABLE"), + ), + ), + bigquery.SchemaField( + "meta", + "RECORD", + "NULLABLE", + fields=( + bigquery.SchemaField("knife", "BOOL", "NULLABLE"), + bigquery.SchemaField("pistol", "FLOAT64", "NULLABLE"), + ), + ), + ] + ) m4 = MagicMock() m5 = MagicMock(return_value=[]) - monkeypatch.setattr('logging.Logger.info', m1) - monkeypatch.setattr('logging.Logger.error', m2) - monkeypatch.setattr('breathecode.activity.actions.get_workers_amount', lambda: 2) - monkeypatch.setattr('django.utils.timezone.now', lambda: UTC_NOW) - monkeypatch.setattr('google.cloud.bigquery.Client.get_table', m3) - monkeypatch.setattr('google.cloud.bigquery.Client.update_table', m4) - monkeypatch.setattr('google.cloud.bigquery.Client.insert_rows', m5) + monkeypatch.setattr("logging.Logger.info", m1) + monkeypatch.setattr("logging.Logger.error", m2) + monkeypatch.setattr("breathecode.activity.actions.get_workers_amount", lambda: 2) + monkeypatch.setattr("django.utils.timezone.now", lambda: UTC_NOW) + monkeypatch.setattr("google.cloud.bigquery.Client.get_table", m3) + monkeypatch.setattr("google.cloud.bigquery.Client.update_table", m4) + monkeypatch.setattr("google.cloud.bigquery.Client.insert_rows", m5) - monkeypatch.setattr('breathecode.services.google_cloud.credentials.resolve_credentials', lambda: None) + monkeypatch.setattr("breathecode.services.google_cloud.credentials.resolve_credentials", lambda: None) - monkeypatch.setenv('GOOGLE_PROJECT_ID', 'project') - monkeypatch.setenv('BIGQUERY_DATASET', 'dataset') + monkeypatch.setenv("GOOGLE_PROJECT_ID", "project") + monkeypatch.setenv("BIGQUERY_DATASET", "dataset") yield m1, m2, m3, m4, m5 @@ -75,17 +82,19 @@ def apply_patch(db, monkeypatch): @pytest.fixture def get_schema(): return lambda extra=[]: [ - bigquery.SchemaField('user_id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('kind', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('timestamp', bigquery.enums.SqlTypeNames.TIMESTAMP, 'NULLABLE'), - bigquery.SchemaField('related', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField('type', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - bigquery.SchemaField('id', bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('slug', bigquery.enums.SqlTypeNames.STRING, 'NULLABLE'), - ]), + bigquery.SchemaField("user_id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("kind", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("timestamp", bigquery.enums.SqlTypeNames.TIMESTAMP, "NULLABLE"), + bigquery.SchemaField( + "related", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[ + bigquery.SchemaField("type", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + bigquery.SchemaField("id", bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("slug", bigquery.enums.SqlTypeNames.STRING, "NULLABLE"), + ], + ), *extra, ] @@ -93,16 +102,16 @@ def get_schema(): @pytest.fixture def get_data(fake): return lambda data={}: { - 'id': fake.uuid4(), - 'user_id': random.randint(1, 100), - 'kind': fake.slug(), - 'timestamp': UTC_NOW.isoformat(), - 'related': { - 'type': f'{fake.slug()}.{fake.slug()}', - 'slug': fake.slug(), - 'id': random.randint(1, 100), + "id": fake.uuid4(), + "user_id": random.randint(1, 100), + "kind": fake.slug(), + "timestamp": UTC_NOW.isoformat(), + "related": { + "type": f"{fake.slug()}.{fake.slug()}", + "slug": fake.slug(), + "id": random.randint(1, 100), }, - 'meta': {}, + "meta": {}, **data, } @@ -130,7 +139,7 @@ def sort_schema(table): schema = sorted(table.schema, key=lambda v: v.name) for field in schema: - if field.field_type == 'RECORD': + if field.field_type == "RECORD": field._fields = sorted(field._fields, key=lambda v: v.name) return schema @@ -154,14 +163,14 @@ def test_no_data(bc: Breathecode, apply_patch): upload_activities.delay() assert info_mock.call_args_list == [] - assert error_mock.call_args_list == [call('No data to upload', exc_info=True)] + assert error_mock.call_args_list == [call("No data to upload", exc_info=True)] - assert get_cache('activity:worker-0') == None - assert get_cache('activity:worker-1') == None + assert get_cache("activity:worker-0") == None + assert get_cache("activity:worker-1") == None - task = bc.database.get('task_manager.TaskManager', 1, dict=False) + task = bc.database.get("task_manager.TaskManager", 1, dict=False) - assert get_cache(f'activity:backup:{task.id}') == None + assert get_cache(f"activity:backup:{task.id}") == None assert get_table_mock.call_args_list == [] assert update_table_mock.call_args_list == [] @@ -176,63 +185,75 @@ def test_with_data_in_both_workers(bc: Breathecode, fake, apply_patch, get_schem attr3 = fake.slug() attr4 = fake.slug() - schema1 = get_schema([ - bigquery.SchemaField('meta', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField(attr1, bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - ]), - ]) - - schema2 = get_schema([ - bigquery.SchemaField('meta', - bigquery.enums.SqlTypeNames.STRUCT, - 'NULLABLE', - fields=[ - bigquery.SchemaField(attr2, bigquery.enums.SqlTypeNames.BOOL, 'NULLABLE'), - bigquery.SchemaField(attr3, bigquery.enums.SqlTypeNames.FLOAT64, 'NULLABLE'), - bigquery.SchemaField(attr4, bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - ]), - ]) - - data1 = get_data({'meta': {attr1: 1}}) - data2 = get_data({ - 'meta': { - attr2: bool(random.randint(0, 1)), - attr3: random.random() * 100, - attr4: random.randint(1, 100), - }, - }) + schema1 = get_schema( + [ + bigquery.SchemaField( + "meta", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[ + bigquery.SchemaField(attr1, bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + ], + ), + ] + ) - data3 = get_data({ - 'meta': { - attr2: bool(random.randint(0, 1)), - attr3: random.random() * 100, - attr4: random.randint(1, 100), - }, - }) + schema2 = get_schema( + [ + bigquery.SchemaField( + "meta", + bigquery.enums.SqlTypeNames.STRUCT, + "NULLABLE", + fields=[ + bigquery.SchemaField(attr2, bigquery.enums.SqlTypeNames.BOOL, "NULLABLE"), + bigquery.SchemaField(attr3, bigquery.enums.SqlTypeNames.FLOAT64, "NULLABLE"), + bigquery.SchemaField(attr4, bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + ], + ), + ] + ) + + data1 = get_data({"meta": {attr1: 1}}) + data2 = get_data( + { + "meta": { + attr2: bool(random.randint(0, 1)), + attr3: random.random() * 100, + attr4: random.randint(1, 100), + }, + } + ) + + data3 = get_data( + { + "meta": { + attr2: bool(random.randint(0, 1)), + attr3: random.random() * 100, + attr4: random.randint(1, 100), + }, + } + ) set_cache( - 'activity:worker-0', + "activity:worker-0", [ { - 'data': data1, - 'schema': schema1, + "data": data1, + "schema": schema1, }, ], ) set_cache( - 'activity:worker-1', + "activity:worker-1", [ { - 'data': data2, - 'schema': schema2, + "data": data2, + "schema": schema2, }, { - 'data': data3, - 'schema': schema2, + "data": data3, + "schema": schema2, }, ], ) @@ -242,45 +263,56 @@ def test_with_data_in_both_workers(bc: Breathecode, fake, apply_patch, get_schem assert info_mock.call_args_list == [] assert error_mock.call_args_list == [] - assert get_cache('activity:worker-0') == None - assert get_cache('activity:worker-1') == None + assert get_cache("activity:worker-0") == None + assert get_cache("activity:worker-1") == None - task = bc.database.get('task_manager.TaskManager', 1, dict=False) + task = bc.database.get("task_manager.TaskManager", 1, dict=False) - assert get_cache(f'activity:backup:{task.id}') == None + assert get_cache(f"activity:backup:{task.id}") == None assert get_table_mock.call_args_list == [ - call('dataset.activity'), + call("dataset.activity"), ] - both_schema_are_equal(update_table_mock.call_args_list, [ - call( - TableMock([ - bigquery.SchemaField('character', 'STRING', 'NULLABLE', None, None, (), None), - bigquery.SchemaField('kind', 'STRING', 'NULLABLE'), - bigquery.SchemaField('user_id', 'INTEGER', 'NULLABLE'), - bigquery.SchemaField('timestamp', 'TIMESTAMP', 'NULLABLE'), - bigquery.SchemaField('meta', - 'RECORD', - 'NULLABLE', - fields=( - bigquery.SchemaField(attr1, bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField(attr2, bigquery.enums.SqlTypeNames.BOOL, 'NULLABLE'), - bigquery.SchemaField(attr3, bigquery.enums.SqlTypeNames.FLOAT64, 'NULLABLE'), - bigquery.SchemaField(attr4, bigquery.enums.SqlTypeNames.INT64, 'NULLABLE'), - bigquery.SchemaField('knife', 'BOOL', 'NULLABLE'), - bigquery.SchemaField('pistol', 'FLOAT64', 'NULLABLE'), - )), - bigquery.SchemaField('related', - 'RECORD', - 'NULLABLE', - fields=( - bigquery.SchemaField('amount', 'INT64', 'NULLABLE'), - bigquery.SchemaField('id', 'INTEGER', 'NULLABLE'), - bigquery.SchemaField('name', 'STRING', 'NULLABLE'), - bigquery.SchemaField('type', 'STRING', 'NULLABLE'), - bigquery.SchemaField('slug', 'STRING', 'NULLABLE'), - )), - ]), ['schema']) - ]) + both_schema_are_equal( + update_table_mock.call_args_list, + [ + call( + TableMock( + [ + bigquery.SchemaField("character", "STRING", "NULLABLE", None, None, (), None), + bigquery.SchemaField("kind", "STRING", "NULLABLE"), + bigquery.SchemaField("user_id", "INTEGER", "NULLABLE"), + bigquery.SchemaField("timestamp", "TIMESTAMP", "NULLABLE"), + bigquery.SchemaField( + "meta", + "RECORD", + "NULLABLE", + fields=( + bigquery.SchemaField(attr1, bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField(attr2, bigquery.enums.SqlTypeNames.BOOL, "NULLABLE"), + bigquery.SchemaField(attr3, bigquery.enums.SqlTypeNames.FLOAT64, "NULLABLE"), + bigquery.SchemaField(attr4, bigquery.enums.SqlTypeNames.INT64, "NULLABLE"), + bigquery.SchemaField("knife", "BOOL", "NULLABLE"), + bigquery.SchemaField("pistol", "FLOAT64", "NULLABLE"), + ), + ), + bigquery.SchemaField( + "related", + "RECORD", + "NULLABLE", + fields=( + bigquery.SchemaField("amount", "INT64", "NULLABLE"), + bigquery.SchemaField("id", "INTEGER", "NULLABLE"), + bigquery.SchemaField("name", "STRING", "NULLABLE"), + bigquery.SchemaField("type", "STRING", "NULLABLE"), + bigquery.SchemaField("slug", "STRING", "NULLABLE"), + ), + ), + ] + ), + ["schema"], + ) + ], + ) assert insert_rows_mock.call_args_list == [call(get_table_mock.return_value, [data1, data2, data3])] diff --git a/breathecode/activity/tests/urls/v1/tests_academy_cohort_id.py b/breathecode/activity/tests/urls/v1/tests_academy_cohort_id.py index 9b4849c69..ff18727b8 100644 --- a/breathecode/activity/tests/urls/v1/tests_academy_cohort_id.py +++ b/breathecode/activity/tests/urls/v1/tests_academy_cohort_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + from django.utils import timezone from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -15,17 +16,19 @@ TOTAL = 15 -DATASTORE_SEED = [{ - 'academy_id': 0, - 'cohort': None, - 'created_at': (timezone.now() + timedelta(days=1)).isoformat() + 'Z', - 'data': None, - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, -}] +DATASTORE_SEED = [ + { + "academy_id": 0, + "cohort": None, + "created_at": (timezone.now() + timedelta(days=1)).isoformat() + "Z", + "data": None, + "day": 13, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, + } +] def generate_data(num_objs): @@ -37,7 +40,7 @@ def generate_data(num_objs): def datastore_fetch_mock(first_fetch=[]): - class Vars(): + class Vars: fetch_call_counter = 0 fetch_call_one = first_fetch @@ -48,13 +51,13 @@ def fetch(**kwargs): if Vars.fetch_call_counter % 2 == 1: result = Vars.fetch_call_one - offset = kwargs['offset'] if 'offset' in kwargs else 0 + offset = kwargs["offset"] if "offset" in kwargs else 0 try: - limit = kwargs['limit'] + limit = kwargs["limit"] except: return result if limit is not None: - return result[offset:offset + limit] + return result[offset : offset + limit] return [] return MagicMock(side_effect=fetch) @@ -74,364 +77,428 @@ class MediaTestSuite(MediaTestCase): """ def test_type__without_auth(self): - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + '?slug=breathecode_login' + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + "?slug=breathecode_login" response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_type__wrong_academy(self): self.headers(academy=1) - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + '?slug=breathecode_login' + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + "?slug=breathecode_login" response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_type__without_capability(self): self.headers(academy=1) - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + '?slug=breathecode_login' + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + "?slug=breathecode_login" self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'detail': ("You (user: 1) don't have this capability: classroom_activity for " - 'academy 1'), - 'status_code': 403, - }) + json, + { + "detail": ("You (user: 1) don't have this capability: classroom_activity for " "academy 1"), + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 Without pagination """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=generate_data(3))) - @patch.object(Datastore, 'count', new=datastore_count_mock(TOTAL)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=generate_data(3))) + @patch.object(Datastore, "count", new=datastore_count_mock(TOTAL)) def test_get_activities_without_pagination(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() expected = [DATASTORE_SEED[0], DATASTORE_SEED[0], DATASTORE_SEED[0]] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call( - kind='student_activity', - cohort='miami-downtown-pt-xx', - ), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call( + kind="student_activity", + cohort="miami-downtown-pt-xx", + ), + ], + ) self.assertEqual(mock.count.call_args_list, []) """ 🔽🔽🔽 With limit """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=generate_data(10))) - @patch.object(Datastore, 'count', new=datastore_count_mock(TOTAL)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=generate_data(10))) + @patch.object(Datastore, "count", new=datastore_count_mock(TOTAL)) def test_get_activities_limit(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + '?limit=5' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + "?limit=5" response = self.client.get(url) json = response.json() data = { - 'academy_id': 0, - 'cohort': None, - 'data': None, - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1 + "academy_id": 0, + "cohort": None, + "data": None, + "day": 13, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, } wrapper = { - 'count': TOTAL, - 'first': None, - 'next': 'http://testserver/v1/activity/academy/cohort/1?limit=5&offset=5', - 'previous': None, - 'last': 'http://testserver/v1/activity/academy/cohort/1?limit=5&offset=10', - 'results': [data for _ in range(0, 5)] + "count": TOTAL, + "first": None, + "next": "http://testserver/v1/activity/academy/cohort/1?limit=5&offset=5", + "previous": None, + "last": "http://testserver/v1/activity/academy/cohort/1?limit=5&offset=10", + "results": [data for _ in range(0, 5)], } - for r in json['results']: - self.assertDatetime(r['created_at']) - del r['created_at'] + for r in json["results"]: + self.assertDatetime(r["created_at"]) + del r["created_at"] self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(json, wrapper) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', cohort='miami-downtown-pt-xx', limit=5), - ]) - self.assertEqual(mock.count.call_args_list, [call( - kind='student_activity', - cohort='miami-downtown-pt-xx', - )]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", cohort="miami-downtown-pt-xx", limit=5), + ], + ) + self.assertEqual( + mock.count.call_args_list, + [ + call( + kind="student_activity", + cohort="miami-downtown-pt-xx", + ) + ], + ) """ 🔽🔽🔽 With limit and offset """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=generate_data(10))) - @patch.object(Datastore, 'count', new=datastore_count_mock(TOTAL)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=generate_data(10))) + @patch.object(Datastore, "count", new=datastore_count_mock(TOTAL)) def test_get_activities_offset(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + '?offset=5&limit=5' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + "?offset=5&limit=5" response = self.client.get(url) json = response.json() data = { - 'academy_id': 0, - 'cohort': None, - 'data': None, - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1 + "academy_id": 0, + "cohort": None, + "data": None, + "day": 13, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, } wrapper = { - 'count': TOTAL, - 'first': 'http://testserver/v1/activity/academy/cohort/1?limit=5', - 'next': 'http://testserver/v1/activity/academy/cohort/1?limit=5&offset=10', - 'previous': 'http://testserver/v1/activity/academy/cohort/1?limit=5', - 'last': 'http://testserver/v1/activity/academy/cohort/1?limit=5&offset=10', - 'results': [data for _ in range(0, 5)] + "count": TOTAL, + "first": "http://testserver/v1/activity/academy/cohort/1?limit=5", + "next": "http://testserver/v1/activity/academy/cohort/1?limit=5&offset=10", + "previous": "http://testserver/v1/activity/academy/cohort/1?limit=5", + "last": "http://testserver/v1/activity/academy/cohort/1?limit=5&offset=10", + "results": [data for _ in range(0, 5)], } - for r in json['results']: - self.assertDatetime(r['created_at']) - del r['created_at'] + for r in json["results"]: + self.assertDatetime(r["created_at"]) + del r["created_at"] self.assertEqual(json, wrapper) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', cohort='miami-downtown-pt-xx', limit=5, offset=5), - ]) - self.assertEqual(mock.count.call_args_list, [call( - kind='student_activity', - cohort='miami-downtown-pt-xx', - )]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", cohort="miami-downtown-pt-xx", limit=5, offset=5), + ], + ) + self.assertEqual( + mock.count.call_args_list, + [ + call( + kind="student_activity", + cohort="miami-downtown-pt-xx", + ) + ], + ) """ 🔽🔽🔽 With offset above the total items """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=generate_data(15))) - @patch.object(Datastore, 'count', new=datastore_count_mock(TOTAL)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=generate_data(15))) + @patch.object(Datastore, "count", new=datastore_count_mock(TOTAL)) def test_get_activities_with_limit_and_offset(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + '?offset=10&limit=5' + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + "?offset=10&limit=5" response = self.client.get(url) json = response.json() wrapper = { - 'count': TOTAL, - 'first': 'http://testserver/v1/activity/academy/cohort/1?limit=5', - 'next': None, - 'previous': 'http://testserver/v1/activity/academy/cohort/1?limit=5&offset=5', - 'last': None, - 'results': [DATASTORE_SEED[0] for _ in range(0, 5)] + "count": TOTAL, + "first": "http://testserver/v1/activity/academy/cohort/1?limit=5", + "next": None, + "previous": "http://testserver/v1/activity/academy/cohort/1?limit=5&offset=5", + "last": None, + "results": [DATASTORE_SEED[0] for _ in range(0, 5)], } self.assertEqual(json, wrapper) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', cohort='miami-downtown-pt-xx', limit=5, offset=10), - ]) - self.assertEqual(mock.count.call_args_list, [call( - kind='student_activity', - cohort='miami-downtown-pt-xx', - )]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", cohort="miami-downtown-pt-xx", limit=5, offset=10), + ], + ) + self.assertEqual( + mock.count.call_args_list, + [ + call( + kind="student_activity", + cohort="miami-downtown-pt-xx", + ) + ], + ) """ 🔽🔽🔽 Without cohort """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) def test_get_activities_slug_filtered(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + '?slug=breathecode_login' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + "?slug=breathecode_login" response = self.client.get(url) json = response.json() expected = [ { - 'academy_id': 0, - 'cohort': None, - 'created_at': DATASTORE_SEED[0]['created_at'], - 'data': None, - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 0, + "cohort": None, + "created_at": DATASTORE_SEED[0]["created_at"], + "data": None, + "day": 13, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', cohort='miami-downtown-pt-xx', slug='breathecode_login'), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", cohort="miami-downtown-pt-xx", slug="breathecode_login"), + ], + ) """ 🔽🔽🔽 Without valid cohort """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) def test_get_activities_without_valid_cohort(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 'potato300'}) + '?slug=breathecode_login' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": "potato300"}) + "?slug=breathecode_login" response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'cohort-not-found', 'status_code': 400}) + self.assertEqual(json, {"detail": "cohort-not-found", "status_code": 400}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) """ 🔽🔽🔽 Without valid slug activity """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) def test_get_activities_without_valid_activity(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1}) + '?slug=logout' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + "?slug=logout" response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'activity-not-found', 'status_code': 400}) + self.assertEqual(json, {"detail": "activity-not-found", "status_code": 400}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) """ 🔽🔽🔽 Invalid user_id """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) def test_get_activities_invalid_user_id(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1 - }) + '?slug=breathecode_login' + '&user_id=batman' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) + + url = ( + reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + + "?slug=breathecode_login" + + "&user_id=batman" + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'bad-user-id', 'status_code': 400}) + self.assertEqual(json, {"detail": "bad-user-id", "status_code": 400}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) """ 🔽🔽🔽 User no exist """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_SEED)) def test_get_activities_user_no_exists(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='classroom_activity', - role='potato', - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:academy_cohort_id', kwargs={'cohort_id': 1 - }) + '?slug=breathecode_login' + '&user_id=300' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="classroom_activity", + role="potato", + cohort_kwargs=cohort_kwargs, + ) + + url = ( + reverse_lazy("activity:academy_cohort_id", kwargs={"cohort_id": 1}) + + "?slug=breathecode_login" + + "&user_id=300" + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'user-not-exists', 'status_code': 400}) + self.assertEqual(json, {"detail": "user-not-exists", "status_code": 400}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/breathecode/activity/tests/urls/v1/tests_cohort_id.py b/breathecode/activity/tests/urls/v1/tests_cohort_id.py index 141d54d74..9871df9a9 100644 --- a/breathecode/activity/tests/urls/v1/tests_cohort_id.py +++ b/breathecode/activity/tests/urls/v1/tests_cohort_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + from breathecode.activity.models import StudentActivity from django.utils import timezone from datetime import timedelta @@ -17,15 +18,15 @@ DATASTORE_PRIVATE_SEED = [ { - 'academy_id': 1, - 'cohort': 'miami-downtown-pt-xx', - 'created_at': (timezone.now() + timedelta(days=2)).isoformat() + 'Z', - 'data': '{"cohort": "miami-downtown-pt-xx", "day": "13"}', - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'classroom_attendance', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 1, + "cohort": "miami-downtown-pt-xx", + "created_at": (timezone.now() + timedelta(days=2)).isoformat() + "Z", + "data": '{"cohort": "miami-downtown-pt-xx", "day": "13"}', + "day": 13, + "email": "konan@naruto.io", + "slug": "classroom_attendance", + "user_agent": "bc/test", + "user_id": 1, }, ] @@ -55,60 +56,62 @@ def count(query): class MediaTestSuite(MediaTestCase): """Test /answer""" + """ 🔽🔽🔽 Auth """ def test_cohort_id__without_auth(self): - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_cohort_id__wrong_academy(self): self.headers(academy=1) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_cohort_id__without_capability(self): self.headers(academy=1) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': ("You (user: 1) don't have this capability: read_activity for " - 'academy 1'), - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": ("You (user: 1) don't have this capability: read_activity for " "academy 1"), + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 Cohort not exists """ - @patch.object(NDB, '__init__', new=ndb_init_mock) - @patch.object(NDB, 'fetch', new=ndb_fetch_mock([])) + @patch.object(NDB, "__init__", new=ndb_init_mock) + @patch.object(NDB, "fetch", new=ndb_fetch_mock([])) def test_cohort_id__without_cohort(self): from breathecode.utils import NDB as mock + ndb_init_mock.call_args_list = [] mock.fetch.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - skip_cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_activity", role="potato", skip_cohort=True + ) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'cohort-not-found', 'status_code': 400} + expected = {"detail": "cohort-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -119,21 +122,20 @@ def test_cohort_id__without_cohort(self): 🔽🔽🔽 Without data """ - @patch.object(NDB, '__init__', new=ndb_init_mock) - @patch.object(NDB, 'fetch', new=ndb_fetch_mock([])) + @patch.object(NDB, "__init__", new=ndb_init_mock) + @patch.object(NDB, "fetch", new=ndb_fetch_mock([])) def test_cohort_id__without_data(self): from breathecode.utils import NDB as mock + ndb_init_mock.call_args_list = [] mock.fetch.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() @@ -143,29 +145,31 @@ def test_cohort_id__without_data(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(ndb_init_mock.call_args_list, [call(StudentActivity)]) - self.assertEqual(mock.fetch.call_args_list, [ - call([FilterNode('cohort', '=', model.cohort.slug)], limit=None, offset=None), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call([FilterNode("cohort", "=", model.cohort.slug)], limit=None, offset=None), + ], + ) """ 🔽🔽🔽 With data """ - @patch.object(NDB, '__init__', new=ndb_init_mock) - @patch.object(NDB, 'fetch', new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) + @patch.object(NDB, "__init__", new=ndb_init_mock) + @patch.object(NDB, "fetch", new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) def test_cohort_id(self): from breathecode.utils import NDB as mock + ndb_init_mock.call_args_list = [] mock.fetch.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() @@ -175,29 +179,31 @@ def test_cohort_id(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(ndb_init_mock.call_args_list, [call(StudentActivity)]) - self.assertEqual(mock.fetch.call_args_list, [ - call([FilterNode('cohort', '=', model.cohort.slug)], limit=None, offset=None), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call([FilterNode("cohort", "=", model.cohort.slug)], limit=None, offset=None), + ], + ) """ 🔽🔽🔽 With slug """ - @patch.object(NDB, '__init__', new=ndb_init_mock) - @patch.object(NDB, 'fetch', new=ndb_fetch_mock([])) + @patch.object(NDB, "__init__", new=ndb_init_mock) + @patch.object(NDB, "fetch", new=ndb_fetch_mock([])) def test_cohort_id__with_bad_slug_in_querystring(self): from breathecode.utils import NDB as mock + ndb_init_mock.call_args_list = [] mock.fetch.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + '?slug=breathecode_login' + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) + "?slug=breathecode_login" response = self.client.get(url) json = response.json() @@ -207,28 +213,31 @@ def test_cohort_id__with_bad_slug_in_querystring(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(ndb_init_mock.call_args_list, [call(StudentActivity)]) - self.assertEqual(mock.fetch.call_args_list, [ - call([FilterNode('slug', '=', 'breathecode_login'), - FilterNode('cohort', '=', model.cohort.slug)], - limit=None, - offset=None) - ]) - - @patch.object(NDB, '__init__', new=ndb_init_mock) - @patch.object(NDB, 'fetch', new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) + self.assertEqual( + mock.fetch.call_args_list, + [ + call( + [FilterNode("slug", "=", "breathecode_login"), FilterNode("cohort", "=", model.cohort.slug)], + limit=None, + offset=None, + ) + ], + ) + + @patch.object(NDB, "__init__", new=ndb_init_mock) + @patch.object(NDB, "fetch", new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) def test_cohort_id__with_slug_in_querystring(self): from breathecode.utils import NDB as mock + ndb_init_mock.call_args_list = [] mock.fetch.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + '?slug=classroom_attendance' + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) + "?slug=classroom_attendance" response = self.client.get(url) json = response.json() @@ -238,124 +247,128 @@ def test_cohort_id__with_slug_in_querystring(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(ndb_init_mock.call_args_list, [call(StudentActivity)]) - self.assertEqual(mock.fetch.call_args_list, [ - call([FilterNode('slug', '=', 'classroom_attendance'), - FilterNode('cohort', '=', model.cohort.slug)], - limit=None, - offset=None) - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call( + [FilterNode("slug", "=", "classroom_attendance"), FilterNode("cohort", "=", model.cohort.slug)], + limit=None, + offset=None, + ) + ], + ) """ 🔽🔽🔽 With pagination """ - @patch.object(NDB, '__init__', new=ndb_init_mock) - @patch.object(NDB, 'fetch', new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) - @patch.object(NDB, 'count', new=ndb_count_mock(15)) + @patch.object(NDB, "__init__", new=ndb_init_mock) + @patch.object(NDB, "fetch", new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) + @patch.object(NDB, "count", new=ndb_count_mock(15)) def test_cohort_id__with_pagination__first_five(self): from breathecode.utils import NDB as mock + ndb_init_mock.call_args_list = [] mock.fetch.call_args_list = [] mock.count.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + '?limit=5&offset=0' + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) + "?limit=5&offset=0" response = self.client.get(url) json = response.json() expected = { - 'count': 15, - 'first': None, - 'next': 'http://testserver/v1/activity/cohort/1?limit=5&offset=5', - 'previous': None, - 'last': 'http://testserver/v1/activity/cohort/1?limit=5&offset=10', - 'results': [DATASTORE_PRIVATE_SEED] + "count": 15, + "first": None, + "next": "http://testserver/v1/activity/cohort/1?limit=5&offset=5", + "previous": None, + "last": "http://testserver/v1/activity/cohort/1?limit=5&offset=10", + "results": [DATASTORE_PRIVATE_SEED], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(ndb_init_mock.call_args_list, [call(StudentActivity)]) - self.assertEqual(mock.fetch.call_args_list, - [call([FilterNode('cohort', '=', model.cohort.slug)], limit=5, offset=0)]) - self.assertEqual(mock.count.call_args_list, [call([FilterNode('cohort', '=', model.cohort.slug)])]) - - @patch.object(NDB, '__init__', new=ndb_init_mock) - @patch.object(NDB, 'fetch', new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) - @patch.object(NDB, 'count', new=ndb_count_mock(15)) + self.assertEqual( + mock.fetch.call_args_list, [call([FilterNode("cohort", "=", model.cohort.slug)], limit=5, offset=0)] + ) + self.assertEqual(mock.count.call_args_list, [call([FilterNode("cohort", "=", model.cohort.slug)])]) + + @patch.object(NDB, "__init__", new=ndb_init_mock) + @patch.object(NDB, "fetch", new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) + @patch.object(NDB, "count", new=ndb_count_mock(15)) def test_cohort_id__with_pagination__second_five(self): from breathecode.utils import NDB as mock + ndb_init_mock.call_args_list = [] mock.fetch.call_args_list = [] mock.count.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + '?limit=5&offset=5' + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) + "?limit=5&offset=5" response = self.client.get(url) json = response.json() expected = { - 'count': 15, - 'first': 'http://testserver/v1/activity/cohort/1?limit=5', - 'next': 'http://testserver/v1/activity/cohort/1?limit=5&offset=10', - 'previous': 'http://testserver/v1/activity/cohort/1?limit=5', - 'last': 'http://testserver/v1/activity/cohort/1?limit=5&offset=10', - 'results': [DATASTORE_PRIVATE_SEED] + "count": 15, + "first": "http://testserver/v1/activity/cohort/1?limit=5", + "next": "http://testserver/v1/activity/cohort/1?limit=5&offset=10", + "previous": "http://testserver/v1/activity/cohort/1?limit=5", + "last": "http://testserver/v1/activity/cohort/1?limit=5&offset=10", + "results": [DATASTORE_PRIVATE_SEED], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(ndb_init_mock.call_args_list, [call(StudentActivity)]) - self.assertEqual(mock.fetch.call_args_list, - [call([FilterNode('cohort', '=', model.cohort.slug)], limit=5, offset=5)]) - self.assertEqual(mock.count.call_args_list, [call([FilterNode('cohort', '=', model.cohort.slug)])]) - - @patch.object(NDB, '__init__', new=ndb_init_mock) - @patch.object(NDB, 'fetch', new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) - @patch.object(NDB, 'count', new=ndb_count_mock(15)) + self.assertEqual( + mock.fetch.call_args_list, [call([FilterNode("cohort", "=", model.cohort.slug)], limit=5, offset=5)] + ) + self.assertEqual(mock.count.call_args_list, [call([FilterNode("cohort", "=", model.cohort.slug)])]) + + @patch.object(NDB, "__init__", new=ndb_init_mock) + @patch.object(NDB, "fetch", new=ndb_fetch_mock([DATASTORE_PRIVATE_SEED])) + @patch.object(NDB, "count", new=ndb_count_mock(15)) def test_cohort_id__with_pagination__last_five(self): from breathecode.utils import NDB as mock + ndb_init_mock.call_args_list = [] mock.fetch.call_args_list = [] mock.count.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:cohort_id', kwargs={'cohort_id': 1}) + '?limit=5&offset=10' + url = reverse_lazy("activity:cohort_id", kwargs={"cohort_id": 1}) + "?limit=5&offset=10" response = self.client.get(url) json = response.json() expected = { - 'count': 15, - 'first': 'http://testserver/v1/activity/cohort/1?limit=5', - 'next': None, - 'previous': 'http://testserver/v1/activity/cohort/1?limit=5&offset=5', - 'last': None, - 'results': [DATASTORE_PRIVATE_SEED] + "count": 15, + "first": "http://testserver/v1/activity/cohort/1?limit=5", + "next": None, + "previous": "http://testserver/v1/activity/cohort/1?limit=5&offset=5", + "last": None, + "results": [DATASTORE_PRIVATE_SEED], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(ndb_init_mock.call_args_list, [call(StudentActivity)]) - self.assertEqual(mock.fetch.call_args_list, - [call([FilterNode('cohort', '=', model.cohort.slug)], limit=5, offset=10)]) - self.assertEqual(mock.count.call_args_list, [call([FilterNode('cohort', '=', model.cohort.slug)])]) + self.assertEqual( + mock.fetch.call_args_list, [call([FilterNode("cohort", "=", model.cohort.slug)], limit=5, offset=10)] + ) + self.assertEqual(mock.count.call_args_list, [call([FilterNode("cohort", "=", model.cohort.slug)])]) diff --git a/breathecode/activity/tests/urls/v1/tests_root.py b/breathecode/activity/tests/urls/v1/tests_root.py index 3b9854ce4..a8f6ea16d 100644 --- a/breathecode/activity/tests/urls/v1/tests_root.py +++ b/breathecode/activity/tests/urls/v1/tests_root.py @@ -1,6 +1,7 @@ """ Test /answer """ + from django.utils import timezone from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -14,35 +15,35 @@ DATASTORE_SHARED_SEED = [ { - 'academy_id': 0, - 'cohort': None, - 'created_at': (timezone.now() + timedelta(days=1)).isoformat() + 'Z', - 'data': None, - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 0, + "cohort": None, + "created_at": (timezone.now() + timedelta(days=1)).isoformat() + "Z", + "data": None, + "day": 13, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, }, ] DATASTORE_PRIVATE_SEED = [ { - 'academy_id': 1, - 'cohort': 'miami-downtown-pt-xx', - 'created_at': (timezone.now() + timedelta(days=2)).isoformat() + 'Z', - 'data': '{"cohort": "miami-downtown-pt-xx", "day": "13"}', - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'classroom_attendance', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 1, + "cohort": "miami-downtown-pt-xx", + "created_at": (timezone.now() + timedelta(days=2)).isoformat() + "Z", + "data": '{"cohort": "miami-downtown-pt-xx", "day": "13"}', + "day": 13, + "email": "konan@naruto.io", + "slug": "classroom_attendance", + "user_agent": "bc/test", + "user_id": 1, }, ] def datastore_fetch_mock(first_fetch=[], second_fetch=[]): - class Vars(): + class Vars: fetch_call_counter = 0 fetch_call_one = first_fetch fetch_call_two = second_fetch @@ -73,51 +74,55 @@ def update(key: str, data: dict): class MediaTestSuite(MediaTestCase): """Test /answer""" + """ 🔽🔽🔽 Auth """ def test_type__without_auth(self): - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_type__wrong_academy(self): self.headers(academy=1) - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_type__without_capability(self): self.headers(academy=1) - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': ("You (user: 1) don't have this capability: read_activity for " - 'academy 1'), - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": ("You (user: 1) don't have this capability: read_activity for " "academy 1"), + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 Without data """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__without_data(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") response = self.client.get(url) json = response.json() @@ -125,166 +130,185 @@ def test_type__without_data(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', academy_id=1), - call(kind='student_activity', academy_id=0), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", academy_id=1), + call(kind="student_activity", academy_id=0), + ], + ) """ 🔽🔽🔽 With data """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=DATASTORE_SHARED_SEED)) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=DATASTORE_SHARED_SEED)) def test_type__just_have_public_activities(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") response = self.client.get(url) json = response.json() expected = [ { - 'academy_id': 0, - 'cohort': None, - 'created_at': DATASTORE_SHARED_SEED[0]['created_at'], - 'data': None, - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 0, + "cohort": None, + "created_at": DATASTORE_SHARED_SEED[0]["created_at"], + "data": None, + "day": 13, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', academy_id=1), - call(kind='student_activity', academy_id=0), - ]) - - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=[])) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", academy_id=1), + call(kind="student_activity", academy_id=0), + ], + ) + + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=[])) def test_type__just_have_activities_from_current_academy(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") response = self.client.get(url) json = response.json() expected = [ { - 'academy_id': 1, - 'cohort': 'miami-downtown-pt-xx', - 'created_at': DATASTORE_PRIVATE_SEED[0]['created_at'], - 'data': '{"cohort": "miami-downtown-pt-xx", "day": "13"}', - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'classroom_attendance', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 1, + "cohort": "miami-downtown-pt-xx", + "created_at": DATASTORE_PRIVATE_SEED[0]["created_at"], + "data": '{"cohort": "miami-downtown-pt-xx", "day": "13"}', + "day": 13, + "email": "konan@naruto.io", + "slug": "classroom_attendance", + "user_agent": "bc/test", + "user_id": 1, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', academy_id=1), - call(kind='student_activity', academy_id=0), - ]) - - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, - 'fetch', - new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=DATASTORE_SHARED_SEED)) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", academy_id=1), + call(kind="student_activity", academy_id=0), + ], + ) + + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object( + Datastore, + "fetch", + new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=DATASTORE_SHARED_SEED), + ) def test_type__have_activities_public_and_from_current_academy(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") response = self.client.get(url) json = response.json() expected = [ { - 'academy_id': 1, - 'cohort': 'miami-downtown-pt-xx', - 'created_at': DATASTORE_PRIVATE_SEED[0]['created_at'], - 'data': '{"cohort": "miami-downtown-pt-xx", "day": "13"}', - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'classroom_attendance', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 1, + "cohort": "miami-downtown-pt-xx", + "created_at": DATASTORE_PRIVATE_SEED[0]["created_at"], + "data": '{"cohort": "miami-downtown-pt-xx", "day": "13"}', + "day": 13, + "email": "konan@naruto.io", + "slug": "classroom_attendance", + "user_agent": "bc/test", + "user_id": 1, }, { - 'academy_id': 0, - 'cohort': None, - 'created_at': DATASTORE_SHARED_SEED[0]['created_at'], - 'data': None, - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 0, + "cohort": None, + "created_at": DATASTORE_SHARED_SEED[0]["created_at"], + "data": None, + "day": 13, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', academy_id=1), - call(kind='student_activity', academy_id=0), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", academy_id=1), + call(kind="student_activity", academy_id=0), + ], + ) """ 🔽🔽🔽 Slug in querystring """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__bad_slug_by_querystring(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + '?slug=asdasd' + url = reverse_lazy("activity:root") + "?slug=asdasd" response = self.client.get(url) json = response.json() expected = { - 'detail': 'activity-not-found', - 'status_code': 400, + "detail": "activity-not-found", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(mock.fetch.call_args_list, []) - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__slug_by_querystring__its_not_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + '?slug=lesson_opened' + url = reverse_lazy("activity:root") + "?slug=lesson_opened" response = self.client.get(url) json = response.json() @@ -292,87 +316,98 @@ def test_type__with_data__slug_by_querystring__its_not_exist(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', slug='lesson_opened', academy_id=1), - call(kind='student_activity', slug='lesson_opened', academy_id=0), - ]) - - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=DATASTORE_SHARED_SEED)) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", slug="lesson_opened", academy_id=1), + call(kind="student_activity", slug="lesson_opened", academy_id=0), + ], + ) + + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=DATASTORE_SHARED_SEED)) def test_type__with_data__slug_by_querystring__its_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + '?slug=breathecode_login' + url = reverse_lazy("activity:root") + "?slug=breathecode_login" response = self.client.get(url) json = response.json() expected = [ { - 'academy_id': 0, - 'cohort': None, - 'created_at': DATASTORE_SHARED_SEED[0]['created_at'], - 'data': None, - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 0, + "cohort": None, + "created_at": DATASTORE_SHARED_SEED[0]["created_at"], + "data": None, + "day": 13, + "email": "konan@naruto.io", + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', slug='breathecode_login', academy_id=1), - call(kind='student_activity', slug='breathecode_login', academy_id=0), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", slug="breathecode_login", academy_id=1), + call(kind="student_activity", slug="breathecode_login", academy_id=0), + ], + ) """ 🔽🔽🔽 Cohort in querystring """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__bad_cohort_by_querystring(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + '?cohort=asdasd' + url = reverse_lazy("activity:root") + "?cohort=asdasd" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cohort-not-found', - 'status_code': 400, + "detail": "cohort-not-found", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(mock.fetch.call_args_list, []) - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__cohort_by_querystring__its_not_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:root') + '?cohort=miami-downtown-pt-xx' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_activity", + role="potato", + cohort=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:root") + "?cohort=miami-downtown-pt-xx" response = self.client.get(url) json = response.json() @@ -380,115 +415,129 @@ def test_type__with_data__cohort_by_querystring__its_not_exist(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', cohort='miami-downtown-pt-xx', academy_id=1), - call(kind='student_activity', cohort='miami-downtown-pt-xx', academy_id=0), - ]) - - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=[])) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", cohort="miami-downtown-pt-xx", academy_id=1), + call(kind="student_activity", cohort="miami-downtown-pt-xx", academy_id=0), + ], + ) + + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=[])) def test_type__with_data__cohort_by_querystring__its_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:root') + '?cohort=miami-downtown-pt-xx' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_activity", + role="potato", + cohort=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:root") + "?cohort=miami-downtown-pt-xx" response = self.client.get(url) json = response.json() expected = [ { - 'academy_id': 1, - 'cohort': 'miami-downtown-pt-xx', - 'created_at': DATASTORE_PRIVATE_SEED[0]['created_at'], - 'data': '{"cohort": "miami-downtown-pt-xx", "day": "13"}', - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'classroom_attendance', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 1, + "cohort": "miami-downtown-pt-xx", + "created_at": DATASTORE_PRIVATE_SEED[0]["created_at"], + "data": '{"cohort": "miami-downtown-pt-xx", "day": "13"}', + "day": 13, + "email": "konan@naruto.io", + "slug": "classroom_attendance", + "user_agent": "bc/test", + "user_id": 1, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', cohort='miami-downtown-pt-xx', academy_id=1), - call(kind='student_activity', cohort='miami-downtown-pt-xx', academy_id=0), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", cohort="miami-downtown-pt-xx", academy_id=1), + call(kind="student_activity", cohort="miami-downtown-pt-xx", academy_id=0), + ], + ) """ 🔽🔽🔽 User id in querystring """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__bad_user_id_by_querystring(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + '?user_id=0' + url = reverse_lazy("activity:root") + "?user_id=0" response = self.client.get(url) json = response.json() expected = { - 'detail': 'user-not-exists', - 'status_code': 400, + "detail": "user-not-exists", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(mock.fetch.call_args_list, []) - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__user_id_is_string_by_querystring(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + '?user_id=they-killed-kenny' + url = reverse_lazy("activity:root") + "?user_id=they-killed-kenny" response = self.client.get(url) json = response.json() expected = { - 'detail': 'bad-user-id', - 'status_code': 400, + "detail": "bad-user-id", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(mock.fetch.call_args_list, []) - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__user_id_by_querystring__its_not_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:root') + '?user_id=1' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_activity", + role="potato", + cohort=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:root") + "?user_id=1" response = self.client.get(url) json = response.json() @@ -496,95 +545,108 @@ def test_type__with_data__user_id_by_querystring__its_not_exist(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', user_id=1, academy_id=1), - call(kind='student_activity', user_id=1, academy_id=0), - ]) - - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=[])) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", user_id=1, academy_id=1), + call(kind="student_activity", user_id=1, academy_id=0), + ], + ) + + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=[])) def test_type__with_data__user_id_by_querystring__its_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('activity:root') + '?user_id=1' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_activity", + role="potato", + cohort=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("activity:root") + "?user_id=1" response = self.client.get(url) json = response.json() expected = [ { - 'academy_id': 1, - 'cohort': 'miami-downtown-pt-xx', - 'created_at': DATASTORE_PRIVATE_SEED[0]['created_at'], - 'data': '{"cohort": "miami-downtown-pt-xx", "day": "13"}', - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'classroom_attendance', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 1, + "cohort": "miami-downtown-pt-xx", + "created_at": DATASTORE_PRIVATE_SEED[0]["created_at"], + "data": '{"cohort": "miami-downtown-pt-xx", "day": "13"}', + "day": 13, + "email": "konan@naruto.io", + "slug": "classroom_attendance", + "user_agent": "bc/test", + "user_id": 1, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', user_id=1, academy_id=1), - call(kind='student_activity', user_id=1, academy_id=0), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", user_id=1, academy_id=1), + call(kind="student_activity", user_id=1, academy_id=0), + ], + ) """ 🔽🔽🔽 Email in querystring """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__bad_email_by_querystring(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:root') + '?email=xyz' + url = reverse_lazy("activity:root") + "?email=xyz" response = self.client.get(url) json = response.json() expected = { - 'detail': 'user-not-exists', - 'status_code': 400, + "detail": "user-not-exists", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(mock.fetch.call_args_list, []) - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=[], second_fetch=[])) def test_type__with_data__email_by_querystring__its_not_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - user_kwargs = {'email': 'konan@naruto.io'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True, - cohort_kwargs=cohort_kwargs, - user_kwargs=user_kwargs) - - url = reverse_lazy('activity:root') + '?email=konan@naruto.io' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + user_kwargs = {"email": "konan@naruto.io"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_activity", + role="potato", + cohort=True, + cohort_kwargs=cohort_kwargs, + user_kwargs=user_kwargs, + ) + + url = reverse_lazy("activity:root") + "?email=konan@naruto.io" response = self.client.get(url) json = response.json() @@ -592,92 +654,103 @@ def test_type__with_data__email_by_querystring__its_not_exist(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', email='konan@naruto.io', academy_id=1), - call(kind='student_activity', email='konan@naruto.io', academy_id=0), - ]) - - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'fetch', new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=[])) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", email="konan@naruto.io", academy_id=1), + call(kind="student_activity", email="konan@naruto.io", academy_id=0), + ], + ) + + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "fetch", new=datastore_fetch_mock(first_fetch=DATASTORE_PRIVATE_SEED, second_fetch=[])) def test_type__with_data__email_by_querystring__its_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.fetch.call_args_list = [] self.headers(academy=1) - cohort_kwargs = {'slug': 'miami-downtown-pt-xx'} - user_kwargs = {'email': 'konan@naruto.io'} - self.generate_models(authenticate=True, - profile_academy=True, - capability='read_activity', - role='potato', - cohort=True, - cohort_kwargs=cohort_kwargs, - user_kwargs=user_kwargs) - - url = reverse_lazy('activity:root') + '?email=konan@naruto.io' + cohort_kwargs = {"slug": "miami-downtown-pt-xx"} + user_kwargs = {"email": "konan@naruto.io"} + self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_activity", + role="potato", + cohort=True, + cohort_kwargs=cohort_kwargs, + user_kwargs=user_kwargs, + ) + + url = reverse_lazy("activity:root") + "?email=konan@naruto.io" response = self.client.get(url) json = response.json() expected = [ { - 'academy_id': 1, - 'cohort': 'miami-downtown-pt-xx', - 'created_at': DATASTORE_PRIVATE_SEED[0]['created_at'], - 'data': '{"cohort": "miami-downtown-pt-xx", "day": "13"}', - 'day': 13, - 'email': 'konan@naruto.io', - 'slug': 'classroom_attendance', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 1, + "cohort": "miami-downtown-pt-xx", + "created_at": DATASTORE_PRIVATE_SEED[0]["created_at"], + "data": '{"cohort": "miami-downtown-pt-xx", "day": "13"}', + "day": 13, + "email": "konan@naruto.io", + "slug": "classroom_attendance", + "user_agent": "bc/test", + "user_id": 1, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(mock.fetch.call_args_list, [ - call(kind='student_activity', email='konan@naruto.io', academy_id=1), - call(kind='student_activity', email='konan@naruto.io', academy_id=0), - ]) + self.assertEqual( + mock.fetch.call_args_list, + [ + call(kind="student_activity", email="konan@naruto.io", academy_id=1), + call(kind="student_activity", email="konan@naruto.io", academy_id=0), + ], + ) """ 🔽🔽🔽 Post missing fields """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__missing_slug(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='crud_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="crud_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = {} response = self.client.post(url, data) json = response.json() - expected = {'detail': 'missing-slug', 'status_code': 400} + expected = {"detail": "missing-slug", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(mock.update.call_args_list, []) - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__missing_user_agent(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='crud_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="crud_activity", role="potato") - url = reverse_lazy('activity:root') - data = {'slug': 'they-killed-kenny'} + url = reverse_lazy("activity:root") + data = {"slug": "they-killed-kenny"} response = self.client.post(url, data) json = response.json() - expected = {'detail': 'missing-user-agent', 'status_code': 400} + expected = {"detail": "missing-user-agent", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -687,23 +760,24 @@ def test_user_id__post__missing_user_agent(self): 🔽🔽🔽 Post bad slug """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__with_bad_slug(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='crud_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="crud_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = { - 'slug': 'they-killed-kenny', - 'user_agent': 'bc/test', + "slug": "they-killed-kenny", + "user_agent": "bc/test", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'activity-not-found', 'status_code': 400} + expected = {"detail": "activity-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -713,73 +787,78 @@ def test_user_id__post__with_bad_slug(self): 🔽🔽🔽 Post with public slug """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__with_public_slug(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_activity', role='potato') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = { - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', + "slug": "breathecode_login", + "user_agent": "bc/test", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - created_at = json['created_at'] - del json['created_at'] + self.assertDatetime(json["created_at"]) + created_at = json["created_at"] + del json["created_at"] expected = { - 'academy_id': 0, - 'email': model.user.email, - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 0, + "email": model.user.email, + "slug": "breathecode_login", + "user_agent": "bc/test", + "user_id": 1, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(mock.update.call_args_list, [ - call( - 'student_activity', - { - 'slug': 'breathecode_login', - 'user_agent': 'bc/test', - 'created_at': self.iso_to_datetime(created_at), - 'user_id': 1, - 'email': model.user.email, - 'academy_id': 0, - }, - ), - ]) + self.assertEqual( + mock.update.call_args_list, + [ + call( + "student_activity", + { + "slug": "breathecode_login", + "user_agent": "bc/test", + "created_at": self.iso_to_datetime(created_at), + "user_id": 1, + "email": model.user.email, + "academy_id": 0, + }, + ), + ], + ) """ 🔽🔽🔽 Post with private slug missing cohort """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__missing_cohort(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='crud_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="crud_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = { - 'slug': 'nps_survey_answered', - 'user_agent': 'bc/test', + "slug": "nps_survey_answered", + "user_agent": "bc/test", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'missing-cohort', 'status_code': 400} + expected = {"detail": "missing-cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -789,24 +868,25 @@ def test_user_id__post__missing_cohort(self): 🔽🔽🔽 Post with private slug without cohort """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__with_private_slug__slug_require_a_cohort(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='crud_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="crud_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = { - 'cohort': 'they-killed-kenny', - 'slug': 'nps_survey_answered', - 'user_agent': 'bc/test', + "cohort": "they-killed-kenny", + "slug": "nps_survey_answered", + "user_agent": "bc/test", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'missing-data', 'status_code': 400} + expected = {"detail": "missing-data", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -816,25 +896,26 @@ def test_user_id__post__with_private_slug__slug_require_a_cohort(self): 🔽🔽🔽 Post with private slug without data """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__with_private_slug__slug_require_a_data(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='crud_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="crud_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = { - 'data': '', - 'cohort': 'they-killed-kenny', - 'slug': 'nps_survey_answered', - 'user_agent': 'bc/test', + "data": "", + "cohort": "they-killed-kenny", + "slug": "nps_survey_answered", + "user_agent": "bc/test", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'data-is-not-a-json', 'status_code': 400} + expected = {"detail": "data-is-not-a-json", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -844,25 +925,26 @@ def test_user_id__post__with_private_slug__slug_require_a_data(self): 🔽🔽🔽 Post with private slug bad cohort """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__with_private_slug__cohort_not_exist(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='crud_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="crud_activity", role="potato") - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = { - 'data': '{"name": "Freyja"}', - 'cohort': 'they-killed-kenny', - 'slug': 'nps_survey_answered', - 'user_agent': 'bc/test', + "data": '{"name": "Freyja"}', + "cohort": "they-killed-kenny", + "slug": "nps_survey_answered", + "user_agent": "bc/test", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'cohort-not-exists', 'status_code': 400} + expected = {"detail": "cohort-not-exists", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -872,30 +954,29 @@ def test_user_id__post__with_private_slug__cohort_not_exist(self): 🔽🔽🔽 Post with private slug field not allowed """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__with_private_slug__field_not_allowed(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = { - 'data': '{"name": "Freyja"}', - 'cohort': model.cohort.slug, - 'slug': 'nps_survey_answered', - 'user_agent': 'bc/test', - 'id': 1, + "data": '{"name": "Freyja"}', + "cohort": model.cohort.slug, + "slug": "nps_survey_answered", + "user_agent": "bc/test", + "id": 1, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'id-not-allowed', 'status_code': 400} + expected = {"detail": "id-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -905,56 +986,58 @@ def test_user_id__post__with_private_slug__field_not_allowed(self): 🔽🔽🔽 Post with private slug """ - @patch.object(Datastore, '__init__', new=lambda x: None) - @patch.object(Datastore, 'update', new=datastore_update_mock()) + @patch.object(Datastore, "__init__", new=lambda x: None) + @patch.object(Datastore, "update", new=datastore_update_mock()) def test_user_id__post__with_private_slug__cohort_not_exist___(self): from breathecode.services.google_cloud import Datastore as mock + mock.update.call_args_list = [] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_activity', - role='potato', - cohort=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_activity", role="potato", cohort=True + ) - url = reverse_lazy('activity:root') + url = reverse_lazy("activity:root") data = { - 'data': '{"name": "Freyja"}', - 'cohort': model.cohort.slug, - 'slug': 'nps_survey_answered', - 'user_agent': 'bc/test', + "data": '{"name": "Freyja"}', + "cohort": model.cohort.slug, + "slug": "nps_survey_answered", + "user_agent": "bc/test", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'academy_id': 1, - 'cohort': model.cohort.slug, - 'data': '{"name": "Freyja"}', - 'email': model.user.email, - 'slug': 'nps_survey_answered', - 'user_agent': 'bc/test', - 'user_id': 1, + "academy_id": 1, + "cohort": model.cohort.slug, + "data": '{"name": "Freyja"}', + "email": model.user.email, + "slug": "nps_survey_answered", + "user_agent": "bc/test", + "user_id": 1, } - self.assertDatetime(json['created_at']) - created_at = json['created_at'] - del json['created_at'] + self.assertDatetime(json["created_at"]) + created_at = json["created_at"] + del json["created_at"] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(mock.update.call_args_list, [ - call( - 'student_activity', - { - 'cohort': model.cohort.slug, - 'data': '{"name": "Freyja"}', - 'user_agent': 'bc/test', - 'created_at': self.iso_to_datetime(created_at), - 'slug': 'nps_survey_answered', - 'user_id': 1, - 'email': model.user.email, - 'academy_id': 1, - }, - ), - ]) + self.assertEqual( + mock.update.call_args_list, + [ + call( + "student_activity", + { + "cohort": model.cohort.slug, + "data": '{"name": "Freyja"}', + "user_agent": "bc/test", + "created_at": self.iso_to_datetime(created_at), + "slug": "nps_survey_answered", + "user_id": 1, + "email": model.user.email, + "academy_id": 1, + }, + ), + ], + ) diff --git a/breathecode/activity/tests/urls/v1/tests_type.py b/breathecode/activity/tests/urls/v1/tests_type.py index 5db0877d1..c4015f5e5 100644 --- a/breathecode/activity/tests/urls/v1/tests_type.py +++ b/breathecode/activity/tests/urls/v1/tests_type.py @@ -1,129 +1,135 @@ """ Test /answer """ + from unittest.mock import patch from django.urls.base import reverse_lazy from rest_framework import status -from breathecode.tests.mocks import (GOOGLE_CLOUD_PATH, apply_google_cloud_blob_mock, apply_google_cloud_bucket_mock, - apply_google_cloud_client_mock) +from breathecode.tests.mocks import ( + GOOGLE_CLOUD_PATH, + apply_google_cloud_blob_mock, + apply_google_cloud_bucket_mock, + apply_google_cloud_client_mock, +) from ...mixins import MediaTestCase class MediaTestSuite(MediaTestCase): """Test /answer""" + """ 🔽🔽🔽 Auth """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type__without_auth(self): """Test /answer without auth""" - url = reverse_lazy('activity:type') + url = reverse_lazy("activity:type") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type__wrong_academy(self): """Test /answer without auth""" self.headers(academy=1) - url = reverse_lazy('activity:type') + url = reverse_lazy("activity:type") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type__without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('activity:type') + url = reverse_lazy("activity:type") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_activity for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_activity for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 Get """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type(self): """Test /answer without auth""" self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:type') + url = reverse_lazy("activity:type") response = self.client.get(url) json = response.json() expected = [ { - 'description': 'Every time it logs in', - 'slug': 'breathecode_login', - }, - { - 'description': 'First day using breathecode', - 'slug': 'online_platform_registration', + "description": "Every time it logs in", + "slug": "breathecode_login", }, { - 'description': 'Attendy on an eventbrite event', - 'slug': 'public_event_attendance', + "description": "First day using breathecode", + "slug": "online_platform_registration", }, { - 'description': 'When the student attent to class', - 'slug': 'classroom_attendance', + "description": "Attendy on an eventbrite event", + "slug": "public_event_attendance", }, { - 'description': 'When the student miss class', - 'slug': 'classroom_unattendance', + "description": "When the student attent to class", + "slug": "classroom_attendance", }, { - 'description': 'When a lessons is opened on the platform', - 'slug': 'lesson_opened', + "description": "When the student miss class", + "slug": "classroom_unattendance", }, { - 'description': ('When the office raspberry pi detects the student'), - 'slug': 'office_attendance', + "description": "When a lessons is opened on the platform", + "slug": "lesson_opened", }, { - 'description': 'When a nps survey is answered by the student', - 'slug': 'nps_survey_answered', + "description": ("When the office raspberry pi detects the student"), + "slug": "office_attendance", }, { - 'description': 'When student successfully tests exercise', - 'slug': 'exercise_success', + "description": "When a nps survey is answered by the student", + "slug": "nps_survey_answered", }, { - 'description': 'When student successfully joins breathecode', - 'slug': 'registration' + "description": "When student successfully tests exercise", + "slug": "exercise_success", }, + {"description": "When student successfully joins breathecode", "slug": "registration"}, { - 'description': 'Student cohort changes like: starts, drop, postpone, etc', - 'slug': 'educational_status_change' + "description": "Student cohort changes like: starts, drop, postpone, etc", + "slug": "educational_status_change", }, { - 'description': "Notes that can be added by teachers, TA's or anyone involved " - 'in the student education', - 'slug': 'educational_note' + "description": "Notes that can be added by teachers, TA's or anyone involved " + "in the student education", + "slug": "educational_note", }, { - 'description': 'Notes related to the student career', - 'slug': 'career_note', + "description": "Notes related to the student career", + "slug": "career_note", }, ] diff --git a/breathecode/activity/tests/urls/v1/tests_type_slug.py b/breathecode/activity/tests/urls/v1/tests_type_slug.py index 7319cd180..39f614b8a 100644 --- a/breathecode/activity/tests/urls/v1/tests_type_slug.py +++ b/breathecode/activity/tests/urls/v1/tests_type_slug.py @@ -1,77 +1,86 @@ """ Test /answer """ + from unittest.mock import patch from django.urls.base import reverse_lazy from rest_framework import status -from breathecode.tests.mocks import (GOOGLE_CLOUD_PATH, apply_google_cloud_blob_mock, apply_google_cloud_bucket_mock, - apply_google_cloud_client_mock) +from breathecode.tests.mocks import ( + GOOGLE_CLOUD_PATH, + apply_google_cloud_blob_mock, + apply_google_cloud_bucket_mock, + apply_google_cloud_client_mock, +) from ...mixins import MediaTestCase class MediaTestSuite(MediaTestCase): """Test /answer""" + """ 🔽🔽🔽 Auth """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type_slug__without_auth(self): """Test /answer without auth""" - url = reverse_lazy('activity:type_slug', kwargs={'activity_slug': 'they-killed-kenny'}) + url = reverse_lazy("activity:type_slug", kwargs={"activity_slug": "they-killed-kenny"}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type_slug__wrong_academy(self): """Test /answer without auth""" self.headers(academy=1) - url = reverse_lazy('activity:type_slug', kwargs={'activity_slug': 'they-killed-kenny'}) + url = reverse_lazy("activity:type_slug", kwargs={"activity_slug": "they-killed-kenny"}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type_slug__without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('activity:type_slug', kwargs={'activity_slug': 'they-killed-kenny'}) + url = reverse_lazy("activity:type_slug", kwargs={"activity_slug": "they-killed-kenny"}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_activity for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_activity for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 Bad slug """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type_slug__without_data(self): """Test /answer without auth""" self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:type_slug', kwargs={'activity_slug': 'they-killed-kenny'}) + url = reverse_lazy("activity:type_slug", kwargs={"activity_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'activity-not-found', 'status_code': 400} + expected = {"detail": "activity-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -80,20 +89,20 @@ def test_type_slug__without_data(self): 🔽🔽🔽 Get """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_type_slug(self): """Test /answer without auth""" self.headers(academy=1) - self.generate_models(authenticate=True, profile_academy=True, capability='read_activity', role='potato') + self.generate_models(authenticate=True, profile_academy=True, capability="read_activity", role="potato") - url = reverse_lazy('activity:type_slug', kwargs={'activity_slug': 'career_note'}) + url = reverse_lazy("activity:type_slug", kwargs={"activity_slug": "career_note"}) response = self.client.get(url) json = response.json() expected = { - 'description': 'Notes related to the student career', - 'slug': 'career_note', + "description": "Notes related to the student career", + "slug": "career_note", } self.assertEqual(json, expected) diff --git a/breathecode/activity/tests/urls/v2/tests_academy_activity.py b/breathecode/activity/tests/urls/v2/tests_academy_activity.py index 36956ec6b..c2388ddc7 100644 --- a/breathecode/activity/tests/urls/v2/tests_academy_activity.py +++ b/breathecode/activity/tests/urls/v2/tests_academy_activity.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random from unittest.mock import MagicMock, call, patch from uuid import uuid4 @@ -18,22 +19,25 @@ def bigquery_client_mock(self, n=1, user_id=1, kind=None, date_start=None, date_end=None): - rows_to_insert = [{ - 'id': uuid4().hex, - 'user_id': user_id, - 'kind': kind if kind else self.bc.fake.slug(), - 'related': { - 'type': f'{self.bc.fake.slug()}.{self.bc.fake.slug()}', - 'id': random.randint(1, 100), - 'slug': self.bc.fake.slug(), - }, - 'meta': { - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - }, - 'timestamp': timezone.now().isoformat(), - } for _ in range(n)] + rows_to_insert = [ + { + "id": uuid4().hex, + "user_id": user_id, + "kind": kind if kind else self.bc.fake.slug(), + "related": { + "type": f"{self.bc.fake.slug()}.{self.bc.fake.slug()}", + "id": random.randint(1, 100), + "slug": self.bc.fake.slug(), + }, + "meta": { + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + }, + "timestamp": timezone.now().isoformat(), + } + for _ in range(n) + ] result_mock = MagicMock() result_mock.result.return_value = [AttrDict(**kwargs) for kwargs in rows_to_insert] @@ -41,8 +45,8 @@ def bigquery_client_mock(self, n=1, user_id=1, kind=None, date_start=None, date_ client_mock = MagicMock() client_mock.query.return_value = result_mock - project_id = 'test' - dataset = '4geeks' + project_id = "test" + dataset = "4geeks" query = f""" SELECT * @@ -63,58 +67,58 @@ def bigquery_client_mock(self, n=1, user_id=1, kind=None, date_start=None, date_ class MediaTestSuite(MediaTestCase): def test_no_auth(self): - url = reverse_lazy('v2:activity:academy_activity') + url = reverse_lazy("v2:activity:academy_activity") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_two(self): - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('v2:activity:academy_activity') + url = reverse_lazy("v2:activity:academy_activity") val = bigquery_client_mock(self, n=2, user_id=1) (client_mock, result_mock, query, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() self.bc.check.calls(BigQuery.client.call_args_list, [call()]) assert client_mock.query.call_args[0][0] == query - assert 'AND meta.kind = @kind' not in query + assert "AND meta.kind = @kind" not in query self.bc.check.calls(result_mock.result.call_args_list, [call()]) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_filter_by_kind(self): - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) kind = self.bc.fake.slug() - url = reverse_lazy('v2:activity:academy_activity') + f'?kind={kind}' + url = reverse_lazy("v2:activity:academy_activity") + f"?kind={kind}" val = bigquery_client_mock(self, n=2, user_id=1, kind=kind) (client_mock, result_mock, query, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() self.bc.check.calls(BigQuery.client.call_args_list, [call()]) assert client_mock.query.call_args[0][0] == query - assert 'AND kind = @kind' in query + assert "AND kind = @kind" in query self.bc.check.calls(result_mock.result.call_args_list, [call()]) self.assertEqual(json, expected) diff --git a/breathecode/activity/tests/urls/v2/tests_academy_activity_id.py b/breathecode/activity/tests/urls/v2/tests_academy_activity_id.py index 9a801e3d0..4dfec52df 100644 --- a/breathecode/activity/tests/urls/v2/tests_academy_activity_id.py +++ b/breathecode/activity/tests/urls/v2/tests_academy_activity_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random from unittest.mock import MagicMock, call, patch from uuid import uuid4 @@ -19,22 +20,25 @@ def bigquery_client_mock(self, user_id=1): n = 1 - rows_to_insert = [{ - 'id': uuid4().hex, - 'user_id': user_id, - 'kind': self.bc.fake.slug(), - 'related': { - 'type': f'{self.bc.fake.slug()}.{self.bc.fake.slug()}', - 'id': random.randint(1, 100), - 'slug': self.bc.fake.slug(), - }, - 'meta': { - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - }, - 'timestamp': timezone.now().isoformat(), - } for _ in range(n)] + rows_to_insert = [ + { + "id": uuid4().hex, + "user_id": user_id, + "kind": self.bc.fake.slug(), + "related": { + "type": f"{self.bc.fake.slug()}.{self.bc.fake.slug()}", + "id": random.randint(1, 100), + "slug": self.bc.fake.slug(), + }, + "meta": { + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + }, + "timestamp": timezone.now().isoformat(), + } + for _ in range(n) + ] result_mock = MagicMock() result_mock.result.return_value = iter([AttrDict(**kwargs) for kwargs in rows_to_insert]) @@ -42,8 +46,8 @@ def bigquery_client_mock(self, user_id=1): client_mock = MagicMock() client_mock.query.return_value = result_mock - project_id = 'test' - dataset = '4geeks' + project_id = "test" + dataset = "4geeks" query = f""" SELECT * @@ -61,23 +65,23 @@ def bigquery_client_mock(self, user_id=1): class MediaTestSuite(MediaTestCase): def test_no_auth(self): - url = reverse_lazy('v2:activity:academy_activity_id', kwargs={'activity_id': '1234'}) + url = reverse_lazy("v2:activity:academy_activity_id", kwargs={"activity_id": "1234"}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_get_one(self): - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('v2:activity:academy_activity_id', kwargs={'activity_id': '1234'}) + url = reverse_lazy("v2:activity:academy_activity_id", kwargs={"activity_id": "1234"}) val = bigquery_client_mock(self, user_id=1) (client_mock, result_mock, query, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() diff --git a/breathecode/activity/tests/urls/v2/tests_me_activity.py b/breathecode/activity/tests/urls/v2/tests_me_activity.py index 2eee8056c..ba134297a 100644 --- a/breathecode/activity/tests/urls/v2/tests_me_activity.py +++ b/breathecode/activity/tests/urls/v2/tests_me_activity.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random from unittest.mock import MagicMock, call, patch from uuid import uuid4 @@ -18,22 +19,25 @@ def bigquery_client_mock(self, n=1, user_id=1, kind=None): - rows_to_insert = [{ - 'id': uuid4().hex, - 'user_id': user_id, - 'kind': kind if kind else self.bc.fake.slug(), - 'related': { - 'type': f'{self.bc.fake.slug()}.{self.bc.fake.slug()}', - 'id': random.randint(1, 100), - 'slug': self.bc.fake.slug(), - }, - 'meta': { - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - }, - 'timestamp': timezone.now().isoformat(), - } for _ in range(n)] + rows_to_insert = [ + { + "id": uuid4().hex, + "user_id": user_id, + "kind": kind if kind else self.bc.fake.slug(), + "related": { + "type": f"{self.bc.fake.slug()}.{self.bc.fake.slug()}", + "id": random.randint(1, 100), + "slug": self.bc.fake.slug(), + }, + "meta": { + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + }, + "timestamp": timezone.now().isoformat(), + } + for _ in range(n) + ] result_mock = MagicMock() result_mock.result.return_value = [AttrDict(**kwargs) for kwargs in rows_to_insert] @@ -41,8 +45,8 @@ def bigquery_client_mock(self, n=1, user_id=1, kind=None): client_mock = MagicMock() client_mock.query.return_value = result_mock - project_id = 'test' - dataset = '4geeks' + project_id = "test" + dataset = "4geeks" query = f""" SELECT * @@ -60,20 +64,20 @@ def bigquery_client_mock(self, n=1, user_id=1, kind=None): class MediaTestSuite(MediaTestCase): def test_no_auth(self): - url = reverse_lazy('v2:activity:me_activity') + url = reverse_lazy("v2:activity:me_activity") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_two(self): - url = reverse_lazy('v2:activity:me_activity') + url = reverse_lazy("v2:activity:me_activity") self.generate_models(authenticate=True) val = bigquery_client_mock(self, n=2, user_id=1) (client_mock, result_mock, query, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -85,26 +89,26 @@ def test_get_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_filter_by_kind(self): model = self.bc.database.create(user=1, academy=1) self.client.force_authenticate(model.user) kind = self.bc.fake.slug() - url = reverse_lazy('v2:activity:me_activity') + f'?kind={kind}' + url = reverse_lazy("v2:activity:me_activity") + f"?kind={kind}" val = bigquery_client_mock(self, n=2, user_id=1, kind=kind) (client_mock, result_mock, query, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() self.bc.check.calls(BigQuery.client.call_args_list, [call()]) assert client_mock.query.call_args[0][0] == query - assert 'AND kind = @kind' in query + assert "AND kind = @kind" in query self.bc.check.calls(result_mock.result.call_args_list, [call()]) self.assertEqual(json, expected) diff --git a/breathecode/activity/tests/urls/v2/tests_me_activity_id.py b/breathecode/activity/tests/urls/v2/tests_me_activity_id.py index 62c8c4176..3313a5d52 100644 --- a/breathecode/activity/tests/urls/v2/tests_me_activity_id.py +++ b/breathecode/activity/tests/urls/v2/tests_me_activity_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random from uuid import uuid4 from django.utils import timezone @@ -20,22 +21,25 @@ def bigquery_client_mock(self, user_id=1): n = 1 - rows_to_insert = [{ - 'id': uuid4().hex, - 'user_id': user_id, - 'kind': self.bc.fake.slug(), - 'related': { - 'type': f'{self.bc.fake.slug()}.{self.bc.fake.slug()}', - 'id': random.randint(1, 100), - 'slug': self.bc.fake.slug(), - }, - 'meta': { - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - }, - 'timestamp': timezone.now().isoformat(), - } for _ in range(n)] + rows_to_insert = [ + { + "id": uuid4().hex, + "user_id": user_id, + "kind": self.bc.fake.slug(), + "related": { + "type": f"{self.bc.fake.slug()}.{self.bc.fake.slug()}", + "id": random.randint(1, 100), + "slug": self.bc.fake.slug(), + }, + "meta": { + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + }, + "timestamp": timezone.now().isoformat(), + } + for _ in range(n) + ] result_mock = MagicMock() result_mock.result.return_value = iter([AttrDict(**kwargs) for kwargs in rows_to_insert]) @@ -43,8 +47,8 @@ def bigquery_client_mock(self, user_id=1): client_mock = MagicMock() client_mock.query.return_value = result_mock - project_id = 'test' - dataset = '4geeks' + project_id = "test" + dataset = "4geeks" query = f""" SELECT * @@ -61,19 +65,19 @@ def bigquery_client_mock(self, user_id=1): class MediaTestSuite(MediaTestCase): def test_no_auth(self): - url = reverse_lazy('v2:activity:me_activity_id', kwargs={'activity_id': '1234'}) + url = reverse_lazy("v2:activity:me_activity_id", kwargs={"activity_id": "1234"}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_get_one(self): - url = reverse_lazy('v2:activity:me_activity_id', kwargs={'activity_id': '1234'}) + url = reverse_lazy("v2:activity:me_activity_id", kwargs={"activity_id": "1234"}) self.generate_models(authenticate=True) val = bigquery_client_mock(self, user_id=1) (client_mock, result_mock, query, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() diff --git a/breathecode/activity/tests/urls/v2/tests_report.py b/breathecode/activity/tests/urls/v2/tests_report.py index e4717952a..6ee6a718d 100644 --- a/breathecode/activity/tests/urls/v2/tests_report.py +++ b/breathecode/activity/tests/urls/v2/tests_report.py @@ -1,6 +1,7 @@ """ Test /report """ + import functools import random from unittest.mock import MagicMock, call, patch @@ -19,22 +20,25 @@ def bigquery_client_mock(self, n=1, user_id=1, kind=None): - rows_to_insert = [{ - 'id': uuid4().hex, - 'user_id': user_id, - 'kind': kind if kind else self.bc.fake.slug(), - 'related': { - 'type': f'{self.bc.fake.slug()}.{self.bc.fake.slug()}', - 'id': random.randint(1, 100), - 'slug': self.bc.fake.slug(), - }, - 'meta': { - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - }, - 'timestamp': timezone.now().isoformat(), - } for _ in range(n)] + rows_to_insert = [ + { + "id": uuid4().hex, + "user_id": user_id, + "kind": kind if kind else self.bc.fake.slug(), + "related": { + "type": f"{self.bc.fake.slug()}.{self.bc.fake.slug()}", + "id": random.randint(1, 100), + "slug": self.bc.fake.slug(), + }, + "meta": { + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + }, + "timestamp": timezone.now().isoformat(), + } + for _ in range(n) + ] result_mock = MagicMock() result_mock.result.return_value = [AttrDict(**kwargs) for kwargs in rows_to_insert] @@ -42,29 +46,32 @@ def bigquery_client_mock(self, n=1, user_id=1, kind=None): client_mock = MagicMock() client_mock.query.return_value = result_mock - project_id = 'test' - dataset = '4geeks' + project_id = "test" + dataset = "4geeks" return (client_mock, result_mock, project_id, dataset, rows_to_insert) -def bigquery_client_mock_group(self, n=1, user_id=1, kind=None, fields=[], by=''): - rows_to_insert = [{ - 'id': uuid4().hex, - 'user_id': user_id, - 'kind': kind if kind else self.bc.fake.slug(), - 'related': { - 'type': f'{self.bc.fake.slug()}.{self.bc.fake.slug()}', - 'id': random.randint(1, 100), - 'slug': self.bc.fake.slug(), - }, - 'meta': { - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - }, - 'timestamp': timezone.now().isoformat(), - } for _ in range(n)] +def bigquery_client_mock_group(self, n=1, user_id=1, kind=None, fields=[], by=""): + rows_to_insert = [ + { + "id": uuid4().hex, + "user_id": user_id, + "kind": kind if kind else self.bc.fake.slug(), + "related": { + "type": f"{self.bc.fake.slug()}.{self.bc.fake.slug()}", + "id": random.randint(1, 100), + "slug": self.bc.fake.slug(), + }, + "meta": { + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + }, + "timestamp": timezone.now().isoformat(), + } + for _ in range(n) + ] query_rows = [] grouped_data = [] @@ -82,39 +89,42 @@ def bigquery_client_mock_group(self, n=1, user_id=1, kind=None, fields=[], by='' client_mock = MagicMock() client_mock.query.return_value = result_mock - project_id = 'test' - dataset = '4geeks' + project_id = "test" + dataset = "4geeks" return (client_mock, result_mock, project_id, dataset, grouped_data) def bigquery_client_mock_filters(self, n=1, user_id=1, kind=None, filters={}): - rows_to_insert = [{ - 'id': uuid4().hex, - 'user_id': user_id, - 'kind': kind if kind else self.bc.fake.slug(), - 'related': { - 'type': f'{self.bc.fake.slug()}.{self.bc.fake.slug()}', - 'id': random.randint(1, 100), - 'slug': self.bc.fake.slug(), - }, - 'meta': { - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - }, - 'timestamp': timezone.now().isoformat(), - } for _ in range(n)] + rows_to_insert = [ + { + "id": uuid4().hex, + "user_id": user_id, + "kind": kind if kind else self.bc.fake.slug(), + "related": { + "type": f"{self.bc.fake.slug()}.{self.bc.fake.slug()}", + "id": random.randint(1, 100), + "slug": self.bc.fake.slug(), + }, + "meta": { + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + }, + "timestamp": timezone.now().isoformat(), + } + for _ in range(n) + ] for key in filters: literal = filters[key] - if key.endswith('__lte'): + if key.endswith("__lte"): rows_to_insert = list(filter(lambda x: x[key[:-5]] <= literal, rows_to_insert)) - elif key.endswith('__lt'): + elif key.endswith("__lt"): rows_to_insert = list(filter(lambda x: x[key[:-4]] < literal, rows_to_insert)) - elif key.endswith('__gte'): + elif key.endswith("__gte"): rows_to_insert = list(filter(lambda x: x[key[:-5]] >= literal, rows_to_insert)) - elif key.endswith('__gt'): + elif key.endswith("__gt"): rows_to_insert = list(filter(lambda x: x[key[:-4]] > literal, rows_to_insert)) else: rows_to_insert = list(filter(lambda x: x[key] == literal, rows_to_insert)) @@ -125,29 +135,32 @@ def bigquery_client_mock_filters(self, n=1, user_id=1, kind=None, filters={}): client_mock = MagicMock() client_mock.query.return_value = result_mock - project_id = 'test' - dataset = '4geeks' + project_id = "test" + dataset = "4geeks" return (client_mock, result_mock, project_id, dataset, rows_to_insert) def bigquery_client_mock_aggregation(self, n=1, user_id=1, kind=None, aggregation={}): - rows_to_insert = [{ - 'id': uuid4().hex, - 'user_id': user_id, - 'kind': kind if kind else self.bc.fake.slug(), - 'related': { - 'type': f'{self.bc.fake.slug()}.{self.bc.fake.slug()}', - 'id': random.randint(1, 100), - 'slug': self.bc.fake.slug(), - }, - 'meta': { - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - self.bc.fake.slug().replace('-', '_'): self.bc.fake.slug(), - }, - 'timestamp': timezone.now().isoformat(), - } for _ in range(n)] + rows_to_insert = [ + { + "id": uuid4().hex, + "user_id": user_id, + "kind": kind if kind else self.bc.fake.slug(), + "related": { + "type": f"{self.bc.fake.slug()}.{self.bc.fake.slug()}", + "id": random.randint(1, 100), + "slug": self.bc.fake.slug(), + }, + "meta": { + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + self.bc.fake.slug().replace("-", "_"): self.bc.fake.slug(), + }, + "timestamp": timezone.now().isoformat(), + } + for _ in range(n) + ] query_rows = [] grouped_data = [] @@ -156,19 +169,19 @@ def bigquery_client_mock_aggregation(self, n=1, user_id=1, kind=None, aggregatio query_rows.append(new_row) res_dict = {} - if 'sum' in aggregation: - val = aggregation['sum'] - new_key = f'sum__{val}' + if "sum" in aggregation: + val = aggregation["sum"] + new_key = f"sum__{val}" res_dict[new_key] = functools.reduce(lambda x, y: x[val] + y[val], query_rows) - if 'count' in aggregation: - val = aggregation['count'] - new_key = f'count__{val}' + if "count" in aggregation: + val = aggregation["count"] + new_key = f"count__{val}" res_dict[new_key] = len(query_rows) - if 'avg' in aggregation: - val = aggregation['avg'] - new_key = f'avg__{val}' + if "avg" in aggregation: + val = aggregation["avg"] + new_key = f"avg__{val}" res_dict[new_key] = functools.reduce(lambda x, y: x[val] + y[val], query_rows) / len(query_rows) grouped_data.append(res_dict) @@ -179,8 +192,8 @@ def bigquery_client_mock_aggregation(self, n=1, user_id=1, kind=None, aggregatio client_mock = MagicMock() client_mock.query.return_value = result_mock - project_id = 'test' - dataset = '4geeks' + project_id = "test" + dataset = "4geeks" return (client_mock, result_mock, project_id, dataset, grouped_data) @@ -188,16 +201,16 @@ def bigquery_client_mock_aggregation(self, n=1, user_id=1, kind=None, aggregatio class MediaTestSuite(MediaTestCase): def test_no_auth(self): - url = reverse_lazy('v2:activity:report') + url = reverse_lazy("v2:activity:report") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_all_fields(self): - expected_query = 'SELECT * FROM `test.4geeks.activity` ' - url = reverse_lazy('v2:activity:report') - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT * FROM `test.4geeks.activity` " + url = reverse_lazy("v2:activity:report") + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) @@ -205,7 +218,7 @@ def test_get_all_fields(self): val = bigquery_client_mock(self) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -217,11 +230,11 @@ def test_get_all_fields(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_all_fields_limit(self): - expected_query = 'SELECT * FROM `test.4geeks.activity` LIMIT 5' - url = reverse_lazy('v2:activity:report') + f'?limit=5' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT * FROM `test.4geeks.activity` LIMIT 5" + url = reverse_lazy("v2:activity:report") + f"?limit=5" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) @@ -230,7 +243,7 @@ def test_get_all_fields_limit(self): (client_mock, result_mock, project_id, dataset, expected) = val expected = expected[0:5] - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -242,19 +255,19 @@ def test_get_all_fields_limit(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_group(self): - expected_query = 'SELECT kind FROM `test.4geeks.activity` GROUP BY kind' - url = reverse_lazy('v2:activity:report') + f'?by=kind&fields=kind' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT kind FROM `test.4geeks.activity` GROUP BY kind" + url = reverse_lazy("v2:activity:report") + f"?by=kind&fields=kind" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - val = bigquery_client_mock_group(self, by='kind', fields=['kind']) + val = bigquery_client_mock_group(self, by="kind", fields=["kind"]) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -266,20 +279,20 @@ def test_get_group(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_filters_lte(self): json_query = '{ "filter": { "user_id__lte": 5 } }' - expected_query = 'SELECT * FROM `test.4geeks.activity` WHERE user_id <= @x__user_id' - url = reverse_lazy('v2:activity:report') + f'?query={json_query}' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT * FROM `test.4geeks.activity` WHERE user_id <= @x__user_id" + url = reverse_lazy("v2:activity:report") + f"?query={json_query}" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - val = bigquery_client_mock_filters(self, filters={'user_id__lte': 5}) + val = bigquery_client_mock_filters(self, filters={"user_id__lte": 5}) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -291,20 +304,20 @@ def test_get_filters_lte(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_filters_lt(self): json_query = '{ "filter": { "user_id__lt": 5 } }' - expected_query = 'SELECT * FROM `test.4geeks.activity` WHERE user_id < @x__user_id' - url = reverse_lazy('v2:activity:report') + f'?query={json_query}' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT * FROM `test.4geeks.activity` WHERE user_id < @x__user_id" + url = reverse_lazy("v2:activity:report") + f"?query={json_query}" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - val = bigquery_client_mock_filters(self, filters={'user_id__lt': 5}) + val = bigquery_client_mock_filters(self, filters={"user_id__lt": 5}) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -316,20 +329,20 @@ def test_get_filters_lt(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_filters_gte(self): json_query = '{ "filter": { "user_id__gte": 5 } }' - expected_query = 'SELECT * FROM `test.4geeks.activity` WHERE user_id >= @x__user_id' - url = reverse_lazy('v2:activity:report') + f'?query={json_query}' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT * FROM `test.4geeks.activity` WHERE user_id >= @x__user_id" + url = reverse_lazy("v2:activity:report") + f"?query={json_query}" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - val = bigquery_client_mock_filters(self, filters={'user_id__gte': 5}) + val = bigquery_client_mock_filters(self, filters={"user_id__gte": 5}) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -341,20 +354,20 @@ def test_get_filters_gte(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_filters_gt(self): json_query = '{ "filter": { "user_id__gt": 5 } }' - expected_query = 'SELECT * FROM `test.4geeks.activity` WHERE user_id > @x__user_id' - url = reverse_lazy('v2:activity:report') + f'?query={json_query}' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT * FROM `test.4geeks.activity` WHERE user_id > @x__user_id" + url = reverse_lazy("v2:activity:report") + f"?query={json_query}" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - val = bigquery_client_mock_filters(self, filters={'user_id__gt': 5}) + val = bigquery_client_mock_filters(self, filters={"user_id__gt": 5}) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -366,20 +379,20 @@ def test_get_filters_gt(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_aggregation_sum(self): json_query = '{ "grouping_function": { "sum": ["id"] } }' - expected_query = 'SELECT SUM(id) AS sum__id FROM `test.4geeks.activity` ' - url = reverse_lazy('v2:activity:report') + f'?query={json_query}' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT SUM(id) AS sum__id FROM `test.4geeks.activity` " + url = reverse_lazy("v2:activity:report") + f"?query={json_query}" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - val = bigquery_client_mock_aggregation(self, aggregation={'sum': 'id'}) + val = bigquery_client_mock_aggregation(self, aggregation={"sum": "id"}) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -391,20 +404,20 @@ def test_get_aggregation_sum(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_aggregation_count(self): json_query = '{ "grouping_function": { "count": ["kind"] } }' - expected_query = 'SELECT COUNT(kind) AS count__kind FROM `test.4geeks.activity` ' - url = reverse_lazy('v2:activity:report') + f'?query={json_query}' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT COUNT(kind) AS count__kind FROM `test.4geeks.activity` " + url = reverse_lazy("v2:activity:report") + f"?query={json_query}" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - val = bigquery_client_mock_aggregation(self, n=2, aggregation={'count': 'kind'}) + val = bigquery_client_mock_aggregation(self, n=2, aggregation={"count": "kind"}) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() @@ -416,20 +429,20 @@ def test_get_aggregation_count(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_get_aggregation_avg(self): json_query = '{ "grouping_function": { "avg": ["user_id"] } }' - expected_query = 'SELECT AVG(user_id) AS avg__user_id FROM `test.4geeks.activity` ' - url = reverse_lazy('v2:activity:report') + f'?query={json_query}' - model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability='read_activity', role=1) + expected_query = "SELECT AVG(user_id) AS avg__user_id FROM `test.4geeks.activity` " + url = reverse_lazy("v2:activity:report") + f"?query={json_query}" + model = self.bc.database.create(user=1, academy=1, profile_academy=1, capability="read_activity", role=1) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - val = bigquery_client_mock_aggregation(self, n=2, aggregation={'avg': 'user_id'}) + val = bigquery_client_mock_aggregation(self, n=2, aggregation={"avg": "user_id"}) (client_mock, result_mock, project_id, dataset, expected) = val - with patch('breathecode.services.google_cloud.big_query.BigQuery.client') as mock: + with patch("breathecode.services.google_cloud.big_query.BigQuery.client") as mock: mock.return_value = (client_mock, project_id, dataset) response = self.client.get(url) json = response.json() diff --git a/breathecode/activity/urls/v1.py b/breathecode/activity/urls/v1.py index 699475ea8..42d5b247f 100644 --- a/breathecode/activity/urls/v1.py +++ b/breathecode/activity/urls/v1.py @@ -1,14 +1,14 @@ from django.urls import path -from ..views import (ActivityCohortView, ActivityTypeView, ActivityMeView, ActivityClassroomView, StudentActivityView) +from ..views import ActivityCohortView, ActivityTypeView, ActivityMeView, ActivityClassroomView, StudentActivityView -app_name = 'activity' +app_name = "activity" urlpatterns = [ - path('me', ActivityMeView.as_view(), name='root'), - path('type/', ActivityTypeView.as_view(), name='type'), - path('type/', ActivityTypeView.as_view(), name='type_slug'), - path('academy/cohort/', ActivityClassroomView.as_view(), name='academy_cohort_id'), - path('academy/student/', StudentActivityView.as_view(), name='academy_student_id'), - path('cohort/', ActivityCohortView.as_view(), name='cohort_id') + path("me", ActivityMeView.as_view(), name="root"), + path("type/", ActivityTypeView.as_view(), name="type"), + path("type/", ActivityTypeView.as_view(), name="type_slug"), + path("academy/cohort/", ActivityClassroomView.as_view(), name="academy_cohort_id"), + path("academy/student/", StudentActivityView.as_view(), name="academy_student_id"), + path("cohort/", ActivityCohortView.as_view(), name="cohort_id"), ] diff --git a/breathecode/activity/urls/v2.py b/breathecode/activity/urls/v2.py index 0a9dc74f1..b6768d1cc 100644 --- a/breathecode/activity/urls/v2.py +++ b/breathecode/activity/urls/v2.py @@ -5,17 +5,17 @@ from .v1 import urlpatterns as urlpatterns_v1 deprecation_list = [ - 'me', - 'type', - 'type/', + "me", + "type", + "type/", ] -app_name = 'activity' +app_name = "activity" urlpatterns = [ - path('me/activity', V2MeActivityView.as_view(), name='me_activity'), - path('me/activity/', V2MeActivityView.as_view(), name='me_activity_id'), - path('academy/activity', V2AcademyActivityView.as_view(), name='academy_activity'), - path('academy/activity/', V2AcademyActivityView.as_view(), name='academy_activity_id'), - path('report', V2AcademyActivityReportView.as_view(), name='report'), + path("me/activity", V2MeActivityView.as_view(), name="me_activity"), + path("me/activity/", V2MeActivityView.as_view(), name="me_activity_id"), + path("academy/activity", V2AcademyActivityView.as_view(), name="academy_activity"), + path("academy/activity/", V2AcademyActivityView.as_view(), name="academy_activity_id"), + path("report", V2AcademyActivityReportView.as_view(), name="report"), *[r for r in urlpatterns_v1 if r.pattern._route not in deprecation_list], ] diff --git a/breathecode/activity/utils.py b/breathecode/activity/utils.py index 659eac905..c4cfe9353 100644 --- a/breathecode/activity/utils.py +++ b/breathecode/activity/utils.py @@ -5,62 +5,62 @@ from capyc.rest_framework.exceptions import ValidationException ACTIVITY_FIELDS = [ - 'cohort', - 'data', - 'day', - 'slug', - 'user_agent', + "cohort", + "data", + "day", + "slug", + "user_agent", ] ACTIVITY_REQUIRED_FIELDS = [ - 'slug', - 'user_agent', + "slug", + "user_agent", ] ACTIVITY_TYPE_DONT_NEED_A_COHORT = [ - 'breathecode_login', - 'online_platform_registration', + "breathecode_login", + "online_platform_registration", ] ACTIVITY_TYPE_DONT_NEED_A_DATA = [ - 'breathecode_login', - 'online_platform_registration', + "breathecode_login", + "online_platform_registration", ] def validate_activity_fields(data): for field in data: if field not in ACTIVITY_FIELDS: - slug = field.replace('_', '-') - raise ValidationException(f'Field {field} is not allowed in the request', slug=f'{slug}-not-allowed') + slug = field.replace("_", "-") + raise ValidationException(f"Field {field} is not allowed in the request", slug=f"{slug}-not-allowed") def validate_require_activity_fields(data): for field in ACTIVITY_REQUIRED_FIELDS: if field not in data: - slug = field.replace('_', '-') - raise ValidationException(f'Missing {field} in the request', slug=f'missing-{slug}') + slug = field.replace("_", "-") + raise ValidationException(f"Missing {field} in the request", slug=f"missing-{slug}") def validate_if_activity_need_field_cohort(data): - slug = data.get('slug') - if 'cohort' not in data and slug not in ACTIVITY_TYPE_DONT_NEED_A_COHORT: - raise ValidationException('This activity type need a cohort in the request', slug='missing-cohort') + slug = data.get("slug") + if "cohort" not in data and slug not in ACTIVITY_TYPE_DONT_NEED_A_COHORT: + raise ValidationException("This activity type need a cohort in the request", slug="missing-cohort") def validate_if_activity_need_field_data(data): - slug = data.get('slug') - if 'data' not in data and slug not in ACTIVITY_TYPE_DONT_NEED_A_DATA: - raise ValidationException('This activity type need a data field in the request', slug='missing-data') + slug = data.get("slug") + if "data" not in data and slug not in ACTIVITY_TYPE_DONT_NEED_A_DATA: + raise ValidationException("This activity type need a data field in the request", slug="missing-data") def validate_activity_have_correct_data_field(data): - if 'data' in data and data['data'] is not None: + if "data" in data and data["data"] is not None: try: - json.loads(data['data']) + json.loads(data["data"]) except Exception: - raise ValidationException('Data is not a JSON: ' + str(data), slug='data-is-not-a-json') + raise ValidationException("Data is not a JSON: " + str(data), slug="data-is-not-a-json") def generate_created_at(): diff --git a/breathecode/activity/views.py b/breathecode/activity/views.py index a189aaae1..d5928d2cd 100644 --- a/breathecode/activity/views.py +++ b/breathecode/activity/views.py @@ -36,24 +36,24 @@ logger = getLogger(__name__) ACTIVITIES = { - 'breathecode_login': 'Every time it logs in', - 'online_platform_registration': 'First day using breathecode', - 'public_event_attendance': 'Attendy on an eventbrite event', - 'classroom_attendance': 'When the student attent to class', - 'classroom_unattendance': 'When the student miss class', - 'lesson_opened': 'When a lessons is opened on the platform', - 'office_attendance': 'When the office raspberry pi detects the student', - 'nps_survey_answered': 'When a nps survey is answered by the student', - 'exercise_success': 'When student successfully tests exercise', - 'registration': 'When student successfully joins breathecode', - 'educational_status_change': 'Student cohort changes like: starts, drop, postpone, etc', - 'educational_note': 'Notes that can be added by teachers, TA\'s or anyone involved in the student education', - 'career_note': 'Notes related to the student career', + "breathecode_login": "Every time it logs in", + "online_platform_registration": "First day using breathecode", + "public_event_attendance": "Attendy on an eventbrite event", + "classroom_attendance": "When the student attent to class", + "classroom_unattendance": "When the student miss class", + "lesson_opened": "When a lessons is opened on the platform", + "office_attendance": "When the office raspberry pi detects the student", + "nps_survey_answered": "When a nps survey is answered by the student", + "exercise_success": "When student successfully tests exercise", + "registration": "When student successfully joins breathecode", + "educational_status_change": "Student cohort changes like: starts, drop, postpone, etc", + "educational_note": "Notes that can be added by teachers, TA's or anyone involved in the student education", + "career_note": "Notes related to the student career", } ACTIVITY_PUBLIC_SLUGS = [ - 'breathecode_login', - 'online_platform_registration', + "breathecode_login", + "online_platform_registration", ] @@ -61,13 +61,13 @@ class ActivityViewMixin(APIView): queryargs = [] def filter_by_slugs(self): - slugs = self.request.GET.get('slug', []) + slugs = self.request.GET.get("slug", []) if slugs: - slugs = slugs.split(',') + slugs = slugs.split(",") for slug in slugs: if slug and slug not in ACTIVITIES: - raise ValidationException(f'Activity type {slug} not found', slug='activity-not-found') + raise ValidationException(f"Activity type {slug} not found", slug="activity-not-found") if len(slugs) > 1: self.queryargs.append(OR(*[StudentActivity.slug == slug for slug in slugs])) @@ -83,34 +83,34 @@ def filter_by_cohort(self, academy_id, cohort_id_or_slug): # this parse prevent a call to the db if the cohort slug doesn't exist cohort_id = int(cohort_id_or_slug) except Exception: - raise ValidationException('Cohort not found', slug='cohort-not-found') + raise ValidationException("Cohort not found", slug="cohort-not-found") - slug = Cohort.objects.filter(academy__id=academy_id, pk=cohort_id).values_list('slug', flat=True).first() + slug = Cohort.objects.filter(academy__id=academy_id, pk=cohort_id).values_list("slug", flat=True).first() if not slug: - raise ValidationException('Cohort not found', slug='cohort-not-found') + raise ValidationException("Cohort not found", slug="cohort-not-found") self.queryargs.append(StudentActivity.cohort == slug) def filter_by_cohorts(self, academy_id): - cohorts = self.request.GET.get('cohort', []) + cohorts = self.request.GET.get("cohort", []) slugs = [] if cohorts: - cohorts = cohorts.split(',') + cohorts = cohorts.split(",") for cohort in cohorts: - if (cohort and not Cohort.objects.filter(slug=cohort, academy__id=academy_id).exists()): + if cohort and not Cohort.objects.filter(slug=cohort, academy__id=academy_id).exists(): try: # this parse prevent a call to the db if the cohort slug doesn't exist cohort_id = int(cohort) c = Cohort.objects.filter(id=cohort_id, academy__id=academy_id).first() if not c: - raise ValidationException('Cohort not found', slug='cohort-not-found') + raise ValidationException("Cohort not found", slug="cohort-not-found") slugs.append(c.slug) except Exception: - raise ValidationException('Cohort not found', slug='cohort-not-found') + raise ValidationException("Cohort not found", slug="cohort-not-found") slugs.append(cohort) @@ -120,19 +120,19 @@ def filter_by_cohorts(self, academy_id): self.queryargs.append(StudentActivity.cohort == slugs[0]) def filter_by_user_ids(self): - user_ids = self.request.GET.get('user_id', []) + user_ids = self.request.GET.get("user_id", []) if user_ids: - user_ids = user_ids.split(',') + user_ids = user_ids.split(",") for user_id in user_ids: try: int(user_id) except ValueError: - raise ValidationException('user_id is not a integer', slug='bad-user-id') + raise ValidationException("user_id is not a integer", slug="bad-user-id") for user_id in user_ids: if not User.objects.filter(id=user_id).exists(): - raise ValidationException('User not exists', slug='user-not-exists') + raise ValidationException("User not exists", slug="user-not-exists") if len(user_ids) > 1: self.queryargs.append(OR(*[StudentActivity.user_id == int(user_id) for user_id in user_ids])) @@ -140,13 +140,13 @@ def filter_by_user_ids(self): self.queryargs.append(StudentActivity.user_id == int(user_ids[0])) def filter_by_emails(self): - emails = self.request.GET.get('email', []) + emails = self.request.GET.get("email", []) if emails: - emails = emails.split(',') + emails = emails.split(",") for email in emails: if not User.objects.filter(email=email).exists(): - raise ValidationException('User not exists', slug='user-not-exists') + raise ValidationException("User not exists", slug="user-not-exists") if len(emails) > 1: self.queryargs.append(OR(*[StudentActivity.email == email for email in emails])) @@ -154,7 +154,7 @@ def filter_by_emails(self): self.queryargs.append(StudentActivity.email == emails[0]) def get_limit_from_query(self): - limit = self.request.GET.get('limit') + limit = self.request.GET.get("limit") if limit is not None: limit = int(limit) @@ -162,7 +162,7 @@ def get_limit_from_query(self): return limit def get_offset_from_query(self): - offset = self.request.GET.get('offset') + offset = self.request.GET.get("offset") if offset is not None: offset = int(offset) @@ -173,13 +173,13 @@ def get_offset_from_query(self): class ActivityTypeView(APIView): def get_activity_object(self, slug): - return {'slug': slug, 'description': ACTIVITIES[slug]} + return {"slug": slug, "description": ACTIVITIES[slug]} - @capable_of('read_activity') + @capable_of("read_activity") def get(self, request, activity_slug=None, academy_id=None): if activity_slug: if activity_slug not in ACTIVITIES: - raise ValidationException(f'Activity type {activity_slug} not found', slug='activity-not-found') + raise ValidationException(f"Activity type {activity_slug} not found", slug="activity-not-found") res = self.get_activity_object(activity_slug) return Response(res) @@ -190,7 +190,7 @@ def get(self, request, activity_slug=None, academy_id=None): class ActivityCohortView(ActivityViewMixin, HeaderLimitOffsetPagination): - @capable_of('read_activity') + @capable_of("read_activity") def get(self, request, cohort_id=None, academy_id=None): self.queryargs = [] from breathecode.utils import NDB @@ -213,40 +213,40 @@ def get(self, request, cohort_id=None, academy_id=None): class ActivityMeView(APIView): - @capable_of('read_activity') + @capable_of("read_activity") def get(self, request, academy_id=None): from breathecode.services.google_cloud import Datastore - kwargs = {'kind': 'student_activity'} + kwargs = {"kind": "student_activity"} - slug = request.GET.get('slug') + slug = request.GET.get("slug") if slug: - kwargs['slug'] = slug + kwargs["slug"] = slug if slug and slug not in ACTIVITIES: - raise ValidationException(f'Activity type {slug} not found', slug='activity-not-found') + raise ValidationException(f"Activity type {slug} not found", slug="activity-not-found") - cohort = request.GET.get('cohort') + cohort = request.GET.get("cohort") if cohort: - kwargs['cohort'] = cohort + kwargs["cohort"] = cohort - if (cohort and not Cohort.objects.filter(slug=cohort, academy__id=academy_id).exists()): - raise ValidationException('Cohort not found', slug='cohort-not-found') + if cohort and not Cohort.objects.filter(slug=cohort, academy__id=academy_id).exists(): + raise ValidationException("Cohort not found", slug="cohort-not-found") - user_id = request.GET.get('user_id') + user_id = request.GET.get("user_id") if user_id: try: - kwargs['user_id'] = int(user_id) + kwargs["user_id"] = int(user_id) except ValueError: - raise ValidationException('user_id is not a integer', slug='bad-user-id') + raise ValidationException("user_id is not a integer", slug="bad-user-id") - email = request.GET.get('email') + email = request.GET.get("email") if email: - kwargs['email'] = email + kwargs["email"] = email user = User.objects.filter(Q(id=user_id) | Q(email=email)) if (user_id or email) and not user: - raise ValidationException('User not exists', slug='user-not-exists') + raise ValidationException("User not exists", slug="user-not-exists") datastore = Datastore() @@ -254,11 +254,11 @@ def get(self, request, academy_id=None): public_iter = datastore.fetch(**kwargs, academy_id=0) query_iter = academy_iter + public_iter - query_iter.sort(key=lambda x: x['created_at'], reverse=True) + query_iter.sort(key=lambda x: x["created_at"], reverse=True) return Response(query_iter) - @capable_of('crud_activity') + @capable_of("crud_activity") def post(self, request, academy_id=None): data = request.data @@ -271,10 +271,10 @@ def post(self, request, academy_id=None): class ActivityClassroomView(APIView, HeaderLimitOffsetPagination): - @capable_of('classroom_activity') + @capable_of("classroom_activity") def post(self, request, cohort_id=None, academy_id=None): - cu = CohortUser.objects.filter(user__id=request.user.id).filter(Q(role='TEACHER') | Q(role='ASSISTANT')) + cu = CohortUser.objects.filter(user__id=request.user.id).filter(Q(role="TEACHER") | Q(role="ASSISTANT")) if cohort_id.isnumeric(): cu = cu.filter(cohort__id=cohort_id) @@ -284,7 +284,8 @@ def post(self, request, cohort_id=None, academy_id=None): cu = cu.first() if cu is None: raise ValidationException( - 'Only teachers or assistants from this cohort can report classroom activities on the student timeline') + "Only teachers or assistants from this cohort can report classroom activities on the student timeline" + ) data = request.data if isinstance(data, list) == False: @@ -292,22 +293,23 @@ def post(self, request, cohort_id=None, academy_id=None): new_activities = [] for activity in data: - student_id = activity['user_id'] - del activity['user_id'] - cohort_user = CohortUser.objects.filter(role='STUDENT', user__id=student_id, - cohort__id=cu.cohort.id).first() + student_id = activity["user_id"] + del activity["user_id"] + cohort_user = CohortUser.objects.filter( + role="STUDENT", user__id=student_id, cohort__id=cu.cohort.id + ).first() if cohort_user is None: - raise ValidationException('Student not found in this cohort', slug='not-found-in-cohort') + raise ValidationException("Student not found in this cohort", slug="not-found-in-cohort") new_activities.append(add_student_activity(cohort_user.user, activity, academy_id)) return Response(new_activities, status=status.HTTP_201_CREATED) - @capable_of('classroom_activity') + @capable_of("classroom_activity") def get(self, request, cohort_id=None, academy_id=None): from breathecode.services.google_cloud import Datastore - kwargs = {'kind': 'student_activity'} + kwargs = {"kind": "student_activity"} # get the cohort cohort = Cohort.objects.filter(academy__id=academy_id) @@ -317,55 +319,56 @@ def get(self, request, cohort_id=None, academy_id=None): cohort = cohort.filter(slug=cohort_id) cohort = cohort.first() if cohort is None: - raise ValidationException(f'Cohort {cohort_id} not found at this academy {academy_id}', - slug='cohort-not-found') - kwargs['cohort'] = cohort.slug + raise ValidationException( + f"Cohort {cohort_id} not found at this academy {academy_id}", slug="cohort-not-found" + ) + kwargs["cohort"] = cohort.slug - slug = request.GET.get('slug') + slug = request.GET.get("slug") if slug: - kwargs['slug'] = slug + kwargs["slug"] = slug if slug and slug not in ACTIVITIES: - raise ValidationException(f'Activity type {slug} not found', slug='activity-not-found') + raise ValidationException(f"Activity type {slug} not found", slug="activity-not-found") - user_id = request.GET.get('user_id') + user_id = request.GET.get("user_id") if user_id: try: - kwargs['user_id'] = int(user_id) + kwargs["user_id"] = int(user_id) except ValueError: - raise ValidationException('user_id is not a integer', slug='bad-user-id') + raise ValidationException("user_id is not a integer", slug="bad-user-id") - email = request.GET.get('email') + email = request.GET.get("email") if email: - kwargs['email'] = email + kwargs["email"] = email user = User.objects.filter(Q(id=user_id) | Q(email=email)) if (user_id or email) and not user: - raise ValidationException('User not exists', slug='user-not-exists') + raise ValidationException("User not exists", slug="user-not-exists") datastore = Datastore() - #academy_iter = datastore.fetch(**kwargs, academy_id=int(academy_id)) + # academy_iter = datastore.fetch(**kwargs, academy_id=int(academy_id)) - limit = request.GET.get('limit') - offset = request.GET.get('offset') + limit = request.GET.get("limit") + offset = request.GET.get("offset") # get the the total entities on db by kind if limit is not None or offset is not None: count = datastore.count(**kwargs) if limit: - kwargs['limit'] = int(limit) + kwargs["limit"] = int(limit) if offset: - kwargs['offset'] = int(offset) + kwargs["offset"] = int(offset) public_iter = datastore.fetch( **kwargs ) # TODO: remove this in the future because the academy_id was not present brefore and students didn't have it # query_iter = academy_iter + public_iter - public_iter.sort(key=lambda x: x['created_at'], reverse=True) + public_iter.sort(key=lambda x: x["created_at"], reverse=True) page = self.paginate_queryset(public_iter, request) @@ -381,100 +384,103 @@ def add_student_activity(user, data, academy_id): validate_activity_fields(data) validate_require_activity_fields(data) - slug = data['slug'] + slug = data["slug"] academy_id = academy_id if slug not in ACTIVITY_PUBLIC_SLUGS else 0 if slug not in ACTIVITIES: - raise ValidationException(f'Activity type {slug} not found', slug='activity-not-found') + raise ValidationException(f"Activity type {slug} not found", slug="activity-not-found") validate_if_activity_need_field_cohort(data) validate_if_activity_need_field_data(data) validate_activity_have_correct_data_field(data) - if 'cohort' in data: + if "cohort" in data: _query = Cohort.objects.filter(academy__id=academy_id) - if data['cohort'].isnumeric(): - _query = _query.filter(id=data['cohort']) + if data["cohort"].isnumeric(): + _query = _query.filter(id=data["cohort"]) else: - _query = _query.filter(slug=data['cohort']) + _query = _query.filter(slug=data["cohort"]) if not _query.exists(): - raise ValidationException(f"Cohort {str(data['cohort'])} doesn't exist in this academy", - slug='cohort-not-exists') + raise ValidationException( + f"Cohort {str(data['cohort'])} doesn't exist in this academy", slug="cohort-not-exists" + ) fields = { **data, - 'created_at': generate_created_at(), - 'slug': slug, - 'user_id': user.id, - 'email': user.email, - 'academy_id': int(academy_id), + "created_at": generate_created_at(), + "slug": slug, + "user_id": user.id, + "email": user.email, + "academy_id": int(academy_id), } datastore = Datastore() - datastore.update('student_activity', fields) + datastore.update("student_activity", fields) return fields class StudentActivityView(APIView, HeaderLimitOffsetPagination): - @capable_of('read_activity') + @capable_of("read_activity") def get(self, request, student_id=None, academy_id=None): from breathecode.services.google_cloud import Datastore - cohort_user = CohortUser.objects.filter(role='STUDENT', user__id=student_id, - cohort__academy__id=academy_id).first() + cohort_user = CohortUser.objects.filter( + role="STUDENT", user__id=student_id, cohort__academy__id=academy_id + ).first() if cohort_user is None: raise ValidationException( - 'There is not student with that ID that belongs to any cohort within your academy', - slug='student-no-cohort') + "There is not student with that ID that belongs to any cohort within your academy", + slug="student-no-cohort", + ) - kwargs = {'kind': 'student_activity'} + kwargs = {"kind": "student_activity"} - slug = request.GET.get('slug') + slug = request.GET.get("slug") if slug: - kwargs['slug'] = slug + kwargs["slug"] = slug if slug and slug not in ACTIVITIES: - raise ValidationException(f'Activity type {slug} not found', slug='activity-not-found') + raise ValidationException(f"Activity type {slug} not found", slug="activity-not-found") if student_id: try: - kwargs['user_id'] = int(student_id) + kwargs["user_id"] = int(student_id) except ValueError: - raise ValidationException('student_id is not a integer', slug='bad-student-id') + raise ValidationException("student_id is not a integer", slug="bad-student-id") - email = request.GET.get('email') + email = request.GET.get("email") if email: - kwargs['email'] = email + kwargs["email"] = email user = User.objects.filter(Q(id=student_id) | Q(email=email)) if (student_id or email) and not user: - raise ValidationException('User not exists', slug='user-not-exists') + raise ValidationException("User not exists", slug="user-not-exists") datastore = Datastore() - #academy_iter = datastore.fetch(**kwargs, academy_id=int(academy_id)) + # academy_iter = datastore.fetch(**kwargs, academy_id=int(academy_id)) - limit = request.GET.get('limit') - offset = request.GET.get('offset') + limit = request.GET.get("limit") + offset = request.GET.get("offset") # get the the total entities on db by kind if limit is not None or offset is not None: count = datastore.count(**kwargs) if limit: - kwargs['limit'] = int(limit) + kwargs["limit"] = int(limit) if offset: - kwargs['offset'] = int(offset) + kwargs["offset"] = int(offset) public_iter = datastore.fetch( **kwargs ) # TODO: remove this in the future because the academy_id was not present before and students didn't have it # query_iter = academy_iter + public_iter - public_iter.sort(key=lambda x: x['created_at'], reverse=True) + public_iter.sort(key=lambda x: x["created_at"], reverse=True) page = self.paginate_queryset(public_iter, request) @@ -483,7 +489,7 @@ def get(self, request, student_id=None, academy_id=None): else: return Response(page, status=status.HTTP_200_OK) - @capable_of('crud_activity') + @capable_of("crud_activity") def post(self, request, student_id=None, academy_id=None): data = request.data @@ -493,19 +499,20 @@ def post(self, request, student_id=None, academy_id=None): new_activities = [] for activity in data: - if 'cohort' not in activity: - raise ValidationException('Every activity specified for each student must have a cohort (slug)', - slug='missing-cohort') - elif activity['cohort'].isnumeric(): - raise ValidationException('Cohort must be a slug, not a numeric ID', slug='invalid-cohort') - - student_id = activity['user_id'] - del activity['user_id'] - cohort_user = CohortUser.objects.filter(role='STUDENT', - user__id=student_id, - cohort__slug=activity['cohort']).first() + if "cohort" not in activity: + raise ValidationException( + "Every activity specified for each student must have a cohort (slug)", slug="missing-cohort" + ) + elif activity["cohort"].isnumeric(): + raise ValidationException("Cohort must be a slug, not a numeric ID", slug="invalid-cohort") + + student_id = activity["user_id"] + del activity["user_id"] + cohort_user = CohortUser.objects.filter( + role="STUDENT", user__id=student_id, cohort__slug=activity["cohort"] + ).first() if cohort_user is None: - raise ValidationException('Student not found in this cohort', slug='not-found-in-cohort') + raise ValidationException("Student not found in this cohort", slug="not-found-in-cohort") new_activities.append(add_student_activity(cohort_user.user, activity, academy_id)) @@ -529,10 +536,12 @@ def get(self, request, activity_id=None): LIMIT 1 """ - job_config = bigquery.QueryJobConfig(query_parameters=[ - bigquery.ScalarQueryParameter('activity_id', 'STRING', activity_id), - bigquery.ScalarQueryParameter('user_id', 'INT64', request.user.id), - ]) + job_config = bigquery.QueryJobConfig( + query_parameters=[ + bigquery.ScalarQueryParameter("activity_id", "STRING", activity_id), + bigquery.ScalarQueryParameter("user_id", "INT64", request.user.id), + ] + ) # Run the query query_job = client.query(query, job_config=job_config) @@ -540,18 +549,17 @@ def get(self, request, activity_id=None): result = next(results) if not result: - raise ValidationException(translation(lang, - en='activity not found', - es='actividad no encontrada', - slug='activity-not-found'), - code=404) + raise ValidationException( + translation(lang, en="activity not found", es="actividad no encontrada", slug="activity-not-found"), + code=404, + ) serializer = ActivitySerializer(result, many=False) return Response(serializer.data) - limit = int(request.GET.get('limit', 100)) - offset = (int(request.GET.get('page', 1)) - 1) * limit - kind = request.GET.get('kind', None) + limit = int(request.GET.get("limit", 100)) + offset = (int(request.GET.get("page", 1)) - 1) * limit + kind = request.GET.get("kind", None) query = f""" SELECT * @@ -564,13 +572,13 @@ def get(self, request, activity_id=None): """ data = [ - bigquery.ScalarQueryParameter('user_id', 'INT64', request.user.id), - bigquery.ScalarQueryParameter('limit', 'INT64', limit), - bigquery.ScalarQueryParameter('offset', 'INT64', offset), + bigquery.ScalarQueryParameter("user_id", "INT64", request.user.id), + bigquery.ScalarQueryParameter("limit", "INT64", limit), + bigquery.ScalarQueryParameter("offset", "INT64", offset), ] if kind: - data.append(bigquery.ScalarQueryParameter('kind', 'STRING', kind)) + data.append(bigquery.ScalarQueryParameter("kind", "STRING", kind)) job_config = bigquery.QueryJobConfig(query_parameters=data) @@ -584,12 +592,12 @@ def get(self, request, activity_id=None): class V2AcademyActivityView(APIView): - @capable_of('read_activity') + @capable_of("read_activity") def get(self, request, activity_id=None, academy_id=None): lang = get_user_language(request) client, project_id, dataset = BigQuery.client() - user_id = request.GET.get('user_id', None) + user_id = request.GET.get("user_id", None) if user_id is None: user_id = request.user.id @@ -605,11 +613,13 @@ def get(self, request, activity_id=None, academy_id=None): LIMIT 1 """ - job_config = bigquery.QueryJobConfig(query_parameters=[ - bigquery.ScalarQueryParameter('activity_id', 'STRING', activity_id), - bigquery.ScalarQueryParameter('academy_id', 'INT64', academy_id), - bigquery.ScalarQueryParameter('user_id', 'INT64', user_id), - ]) + job_config = bigquery.QueryJobConfig( + query_parameters=[ + bigquery.ScalarQueryParameter("activity_id", "STRING", activity_id), + bigquery.ScalarQueryParameter("academy_id", "INT64", academy_id), + bigquery.ScalarQueryParameter("user_id", "INT64", user_id), + ] + ) # Run the query query_job = client.query(query, job_config=job_config) @@ -617,20 +627,19 @@ def get(self, request, activity_id=None, academy_id=None): result = next(results) if not result: - raise ValidationException(translation(lang, - en='activity not found', - es='actividad no encontrada', - slug='activity-not-found'), - code=404) + raise ValidationException( + translation(lang, en="activity not found", es="actividad no encontrada", slug="activity-not-found"), + code=404, + ) serializer = ActivitySerializer(result, many=False) return Response(serializer.data) - limit = int(request.GET.get('limit', 100)) - offset = (int(request.GET.get('page', 1)) - 1) * limit - kind = request.GET.get('kind', None) - date_start = request.GET.get('date_start', None) - date_end = request.GET.get('date_end', None) + limit = int(request.GET.get("limit", 100)) + offset = (int(request.GET.get("page", 1)) - 1) * limit + kind = request.GET.get("kind", None) + date_start = request.GET.get("date_start", None) + date_end = request.GET.get("date_end", None) query = f""" SELECT * @@ -646,20 +655,20 @@ def get(self, request, activity_id=None, academy_id=None): """ data = [ - bigquery.ScalarQueryParameter('academy_id', 'INT64', int(academy_id)), - bigquery.ScalarQueryParameter('user_id', 'INT64', user_id), - bigquery.ScalarQueryParameter('limit', 'INT64', limit), - bigquery.ScalarQueryParameter('offset', 'INT64', offset), + bigquery.ScalarQueryParameter("academy_id", "INT64", int(academy_id)), + bigquery.ScalarQueryParameter("user_id", "INT64", user_id), + bigquery.ScalarQueryParameter("limit", "INT64", limit), + bigquery.ScalarQueryParameter("offset", "INT64", offset), ] if kind: - data.append(bigquery.ScalarQueryParameter('kind', 'STRING', kind)) + data.append(bigquery.ScalarQueryParameter("kind", "STRING", kind)) if date_start: - data.append(bigquery.ScalarQueryParameter('date_start', 'TIMESTAMP', date_start)) + data.append(bigquery.ScalarQueryParameter("date_start", "TIMESTAMP", date_start)) if date_end: - data.append(bigquery.ScalarQueryParameter('date_end', 'TIMESTAMP', date_end)) + data.append(bigquery.ScalarQueryParameter("date_end", "TIMESTAMP", date_end)) job_config = bigquery.QueryJobConfig(query_parameters=data) @@ -673,45 +682,45 @@ def get(self, request, activity_id=None, academy_id=None): class V2AcademyActivityReportView(APIView): - @capable_of('read_activity') + @capable_of("read_activity") def get(self, request, academy_id=None): - query = request.GET.get('query', '{}') + query = request.GET.get("query", "{}") query = json.loads(query) - result = BigQuery.table('activity') + result = BigQuery.table("activity") - fields = request.GET.get('fields', None) + fields = request.GET.get("fields", None) if fields is not None: - result = result.select(*fields.split(',')) + result = result.select(*fields.split(",")) - by = request.GET.get('by', None) + by = request.GET.get("by", None) if by is not None: - result = result.group_by(*by.split(',')) + result = result.group_by(*by.split(",")) - order = request.GET.get('order', None) + order = request.GET.get("order", None) if order is not None: - result = result.order_by(*order.split(',')) + result = result.order_by(*order.split(",")) - limit = request.GET.get('limit', None) + limit = request.GET.get("limit", None) if limit is not None: result = result.limit_by(limit) - if 'filter' in query: - result = result.filter(**query['filter']) + if "filter" in query: + result = result.filter(**query["filter"]) - if 'grouping_function' in query: - grouping_function = query['grouping_function'] + if "grouping_function" in query: + grouping_function = query["grouping_function"] aggs = [] - if 'sum' in grouping_function: - for value in grouping_function['sum']: + if "sum" in grouping_function: + for value in grouping_function["sum"]: aggs.append(Sum(value)) - if 'count' in grouping_function: - for value in grouping_function['count']: + if "count" in grouping_function: + for value in grouping_function["count"]: aggs.append(Count(value)) - if 'avg' in grouping_function: - for value in grouping_function['avg']: + if "avg" in grouping_function: + for value in grouping_function["avg"]: aggs.append(Avg(value)) result = result.aggregate(*aggs) diff --git a/breathecode/admissions/actions.py b/breathecode/admissions/actions.py index 7e49e1e6f..76acc08c6 100644 --- a/breathecode/admissions/actions.py +++ b/breathecode/admissions/actions.py @@ -11,7 +11,7 @@ from .models import Cohort, CohortUser, SyllabusScheduleTimeSlot, SyllabusVersion from .signals import syllabus_asset_slug_updated -BUCKET_NAME = 'admissions-breathecode' +BUCKET_NAME = "admissions-breathecode" logger = logging.getLogger(__name__) @@ -27,7 +27,7 @@ def haversine(lon1, lat1, lon2, lat2): # haversine formula dlon = lon2 - lon1 dlat = lat2 - lat1 - a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2 + a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2 c = 2 * asin(sqrt(a)) r = 6371 # Radius of earth in kilometers. Use 3956 for miles. Determines return value units. return c * r @@ -49,7 +49,7 @@ def __init__(self, cohort_id: int) -> None: self.cohort = Cohort.objects.filter(id=cohort_id).first() if not self.cohort: - logger.error(f'Cohort {cohort_id} not found') + logger.error(f"Cohort {cohort_id} not found") return def clean(self) -> None: @@ -66,29 +66,34 @@ def sync(self) -> None: timezone = self.cohort.timezone or self.cohort.academy.timezone if not timezone: slug = self.cohort.slug - logger.warning(f'Cohort `{slug}` was skipped because not have a timezone') + logger.warning(f"Cohort `{slug}` was skipped because not have a timezone") return - certificate_timeslots = SyllabusScheduleTimeSlot.objects.filter(Q(schedule__academy__id=self.cohort.academy.id) - | Q(schedule__syllabus__private=False), - schedule__id=self.cohort.schedule.id) + certificate_timeslots = SyllabusScheduleTimeSlot.objects.filter( + Q(schedule__academy__id=self.cohort.academy.id) | Q(schedule__syllabus__private=False), + schedule__id=self.cohort.schedule.id, + ) - timeslots = CohortTimeSlot.objects.bulk_create([ - self._fill_timeslot(certificate_timeslot, self.cohort.id, timezone) - for certificate_timeslot in certificate_timeslots - ]) + timeslots = CohortTimeSlot.objects.bulk_create( + [ + self._fill_timeslot(certificate_timeslot, self.cohort.id, timezone) + for certificate_timeslot in certificate_timeslots + ] + ) return [self._append_id_of_timeslot(x) for x in timeslots] def _fill_timeslot(self, certificate_timeslot: SyllabusScheduleTimeSlot, cohort_id: int, timezone: str) -> None: from breathecode.admissions.models import CohortTimeSlot - cohort_timeslot = CohortTimeSlot(cohort_id=cohort_id, - starting_at=certificate_timeslot.starting_at, - ending_at=certificate_timeslot.ending_at, - recurrent=certificate_timeslot.recurrent, - recurrency_type=certificate_timeslot.recurrency_type, - timezone=timezone) + cohort_timeslot = CohortTimeSlot( + cohort_id=cohort_id, + starting_at=certificate_timeslot.starting_at, + ending_at=certificate_timeslot.ending_at, + recurrent=certificate_timeslot.recurrent, + recurrency_type=certificate_timeslot.recurrency_type, + timezone=timezone, + ) return cohort_timeslot @@ -96,16 +101,20 @@ def _append_id_of_timeslot(self, cohort_timeslot): from breathecode.admissions.models import CohortTimeSlot if not cohort_timeslot.id: - cohort_timeslot.id = CohortTimeSlot.objects.filter( - created_at=cohort_timeslot.created_at, - updated_at=cohort_timeslot.updated_at, - cohort_id=cohort_timeslot.cohort_id, - starting_at=cohort_timeslot.starting_at, - ending_at=cohort_timeslot.ending_at, - recurrent=cohort_timeslot.recurrent, - recurrency_type=cohort_timeslot.recurrency_type, - timezone=cohort_timeslot.timezone, - ).values_list('id', flat=True).first() + cohort_timeslot.id = ( + CohortTimeSlot.objects.filter( + created_at=cohort_timeslot.created_at, + updated_at=cohort_timeslot.updated_at, + cohort_id=cohort_timeslot.cohort_id, + starting_at=cohort_timeslot.starting_at, + ending_at=cohort_timeslot.ending_at, + recurrent=cohort_timeslot.recurrent, + recurrency_type=cohort_timeslot.recurrency_type, + timezone=cohort_timeslot.timezone, + ) + .values_list("id", flat=True) + .first() + ) return cohort_timeslot @@ -113,18 +122,18 @@ def _append_id_of_timeslot(self, cohort_timeslot): def find_asset_on_json(asset_slug, asset_type=None): from breathecode.certificate.actions import syllabus_weeks_to_days - logger.debug(f'Searching slug {asset_slug} in all the syllabus and versions') + logger.debug(f"Searching slug {asset_slug} in all the syllabus and versions") syllabus_list = SyllabusVersion.objects.all() key_map = { - 'QUIZ': 'quizzes', - 'LESSON': 'lessons', - 'EXERCISE': 'replits', - 'PROJECT': 'assignments', + "QUIZ": "quizzes", + "LESSON": "lessons", + "EXERCISE": "replits", + "PROJECT": "assignments", } findings = [] for s in syllabus_list: - logger.debug(f'Starting with syllabus {s.syllabus.slug} version {str(s.version)}') + logger.debug(f"Starting with syllabus {s.syllabus.slug} version {str(s.version)}") module_index = -1 if isinstance(s.json, str): s.json = json.loads(s.json) @@ -132,7 +141,7 @@ def find_asset_on_json(asset_slug, asset_type=None): # in case the json contains "weeks" instead of "days" s.json = syllabus_weeks_to_days(s.json) - for day in s.json['days']: + for day in s.json["days"]: module_index += 1 asset_index = -1 @@ -144,21 +153,25 @@ def find_asset_on_json(asset_slug, asset_type=None): asset_index += 1 if isinstance(a, dict): - if a['slug'] == asset_slug: - findings.append({ - 'module': module_index, - 'version': s.version, - 'type': atype, - 'syllabus': s.syllabus.slug - }) + if a["slug"] == asset_slug: + findings.append( + { + "module": module_index, + "version": s.version, + "type": atype, + "syllabus": s.syllabus.slug, + } + ) else: if a == asset_slug: - findings.append({ - 'module': module_index, - 'version': s.version, - 'type': atype, - 'syllabus': s.syllabus.slug - }) + findings.append( + { + "module": module_index, + "version": s.version, + "type": atype, + "syllabus": s.syllabus.slug, + } + ) return findings @@ -167,18 +180,18 @@ def update_asset_on_json(from_slug, to_slug, asset_type, simulate=True): from breathecode.certificate.actions import syllabus_weeks_to_days asset_type = asset_type.upper() - logger.debug(f'Replacing {asset_type} slug {from_slug} with {to_slug} in all the syllabus and versions') + logger.debug(f"Replacing {asset_type} slug {from_slug} with {to_slug} in all the syllabus and versions") syllabus_list = SyllabusVersion.objects.all() key_map = { - 'QUIZ': 'quizzes', - 'LESSON': 'lessons', - 'EXERCISE': 'replits', - 'PROJECT': 'assignments', + "QUIZ": "quizzes", + "LESSON": "lessons", + "EXERCISE": "replits", + "PROJECT": "assignments", } findings = [] for s in syllabus_list: - logger.debug(f'Starting with syllabus {s.syllabus.slug} version {str(s.version)}') + logger.debug(f"Starting with syllabus {s.syllabus.slug} version {str(s.version)}") module_index = -1 if isinstance(s.json, str): s.json = json.loads(s.json) @@ -186,7 +199,7 @@ def update_asset_on_json(from_slug, to_slug, asset_type, simulate=True): # in case the json contains "weeks" instead of "days" s.json = syllabus_weeks_to_days(s.json) - for day in s.json['days']: + for day in s.json["days"]: module_index += 1 asset_index = -1 if key_map[asset_type] not in day: @@ -196,21 +209,20 @@ def update_asset_on_json(from_slug, to_slug, asset_type, simulate=True): asset_index += 1 if isinstance(a, dict): - if a['slug'] == from_slug: - findings.append({'module': module_index, 'version': s.version, 'syllabus': s.syllabus.slug}) - s.json['days'][module_index][key_map[asset_type]][asset_index]['slug'] = to_slug + if a["slug"] == from_slug: + findings.append({"module": module_index, "version": s.version, "syllabus": s.syllabus.slug}) + s.json["days"][module_index][key_map[asset_type]][asset_index]["slug"] = to_slug else: if a == from_slug: - findings.append({'module': module_index, 'version': s.version, 'syllabus': s.syllabus.slug}) - s.json['days'][module_index][key_map[asset_type]][asset_index] = to_slug + findings.append({"module": module_index, "version": s.version, "syllabus": s.syllabus.slug}) + s.json["days"][module_index][key_map[asset_type]][asset_index] = to_slug if not simulate: s.save() if not simulate and len(findings) > 0: - syllabus_asset_slug_updated.send_robust(sender=update_asset_on_json, - from_slug=from_slug, - to_slug=to_slug, - asset_type=asset_type) + syllabus_asset_slug_updated.send_robust( + sender=update_asset_on_json, from_slug=from_slug, to_slug=to_slug, asset_type=asset_type + ) return findings @@ -225,7 +237,7 @@ def __init__(self): def error(self, msg): if len(self.errors) == 10: - self.errors.append('Can only log first 10 errors on syllabus') + self.errors.append("Can only log first 10 errors on syllabus") if len(self.errors) > 10: return None @@ -239,7 +251,7 @@ def concat(self, log): self.warnings += log.warnings def serialize(self): - return {'errors': self.errors, 'warnings': self.warnings} + return {"errors": self.errors, "warnings": self.warnings} def http_status(self): if len(self.errors) == 0: @@ -257,67 +269,71 @@ def test_syllabus(syl, validate_assets=False, ignore=None): syl = json.loads(syl) syllabus_log = SyllabusLog() - if 'days' not in syl: + if "days" not in syl: syllabus_log.error("Syllabus must have a 'days' or 'modules' property") return syllabus_log def validate(_type, _log, day, index): if _type not in day: - _log.error(f'Missing {_type} property on module {index}') + _log.error(f"Missing {_type} property on module {index}") return False for a in day[_type]: - if 'slug' not in a: - _log.error(f'Missing slug on {_type} property on module {index}') - if not isinstance(a['slug'], str): - _log.error(f'Slug property must be a string for {_type} on module {index}') + if "slug" not in a: + _log.error(f"Missing slug on {_type} property on module {index}") + if not isinstance(a["slug"], str): + _log.error(f"Slug property must be a string for {_type} on module {index}") if validate_assets: - exists = AssetAlias.objects.filter(slug=a['slug']).first() - if exists is None and not ('target' in a and a['target'] == 'blank'): + exists = AssetAlias.objects.filter(slug=a["slug"]).first() + if exists is None and not ("target" in a and a["target"] == "blank"): _log.error(f'Missing {_type} with slug {a["slug"]} on module {index}') return True count = 0 - types_to_validate = ['lessons', 'quizzes', 'replits', 'assignments'] + types_to_validate = ["lessons", "quizzes", "replits", "assignments"] - #ignore: an array with types to ignore, for example: ['lessons'] + # ignore: an array with types to ignore, for example: ['lessons'] types_to_validate = [a for a in types_to_validate if a not in ignore] - for day in syl['days']: + for day in syl["days"]: count += 1 for _name in types_to_validate: validate(_name, syllabus_log, day, count) - if 'teacher_instructions' not in day or day['teacher_instructions'] == '': - syllabus_log.warn(f'Empty teacher instructions on module {count}') + if "teacher_instructions" not in day or day["teacher_instructions"] == "": + syllabus_log.warn(f"Empty teacher instructions on module {count}") if len(syllabus_log.errors) > 11: return syllabus_log return syllabus_log -def is_no_saas_student_up_to_date_in_any_cohort(user: User, - cohort: Optional[Cohort] = None, - academy: Optional[Cohort] = None) -> str: - no_available_as_saas = Q(cohort__available_as_saas=False) | Q(cohort__available_as_saas=None, - cohort__academy__available_as_saas=False) +def is_no_saas_student_up_to_date_in_any_cohort( + user: User, cohort: Optional[Cohort] = None, academy: Optional[Cohort] = None +) -> str: + no_available_as_saas = Q(cohort__available_as_saas=False) | Q( + cohort__available_as_saas=None, cohort__academy__available_as_saas=False + ) extra = {} if cohort: - extra['cohort'] = cohort + extra["cohort"] = cohort if academy: - extra['cohort__academy'] = academy - - if cohort is None and CohortUser.objects.filter( - no_available_as_saas, user=user, educational_status__in=['ACTIVE', 'GRADUATED'], ** - extra).exclude(finantial_status='LATE').exists(): + extra["cohort__academy"] = academy + + if ( + cohort is None + and CohortUser.objects.filter( + no_available_as_saas, user=user, educational_status__in=["ACTIVE", "GRADUATED"], **extra + ) + .exclude(finantial_status="LATE") + .exists() + ): return True - if CohortUser.objects.filter(no_available_as_saas, - user=user, - finantial_status='LATE', - educational_status='ACTIVE', - **extra).exists(): + if CohortUser.objects.filter( + no_available_as_saas, user=user, finantial_status="LATE", educational_status="ACTIVE", **extra + ).exists(): return False # if no cohorts were found, we assume that the user is up to date diff --git a/breathecode/admissions/admin.py b/breathecode/admissions/admin.py index d27d7d183..3568f017b 100644 --- a/breathecode/admissions/admin.py +++ b/breathecode/admissions/admin.py @@ -16,8 +16,19 @@ from breathecode.activity.tasks import get_attendancy_log from breathecode.marketing.tasks import add_cohort_slug_as_acp_tag, add_cohort_task_to_student -from .models import (Academy, SyllabusSchedule, Cohort, CohortUser, Country, City, SyllabusVersion, UserAdmissions, - Syllabus, CohortTimeSlot, SyllabusScheduleTimeSlot) +from .models import ( + Academy, + SyllabusSchedule, + Cohort, + CohortUser, + Country, + City, + SyllabusVersion, + UserAdmissions, + Syllabus, + CohortTimeSlot, + SyllabusScheduleTimeSlot, +) from .actions import ImportCohortTimeSlots, test_syllabus from .tasks import async_test_syllabus from breathecode.assignments.actions import sync_student_tasks @@ -27,31 +38,31 @@ logger = getLogger(__name__) # Register your models here. -admin.site.site_header = '4Geeks' -admin.site.index_title = 'Administration Portal' -admin.site.site_title = 'Administration Portal' +admin.site.site_header = "4Geeks" +admin.site.index_title = "Administration Portal" +admin.site.site_title = "Administration Portal" timezones = [(x, x) for x in pytz.common_timezones] @admin.register(UserAdmissions) class UserAdmin(UserAdmin): - list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff') + list_display = ("username", "email", "first_name", "last_name", "is_staff") class AcademyForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(AcademyForm, self).__init__(*args, **kwargs) - self.fields['timezone'] = forms.ChoiceField(choices=timezones) + self.fields["timezone"] = forms.ChoiceField(choices=timezones) -@admin.display(description='Mark as available as SAAS') +@admin.display(description="Mark as available as SAAS") def mark_as_available_as_saas(modeladmin, request, queryset): queryset.update(available_as_saas=True) -@admin.display(description='Mark as unavailable as SAAS') +@admin.display(description="Mark as unavailable as SAAS") def mark_as_unavailable_as_saas(modeladmin, request, queryset): queryset.update(available_as_saas=False) @@ -59,61 +70,61 @@ def mark_as_unavailable_as_saas(modeladmin, request, queryset): @admin.register(Academy) class AcademyAdmin(admin.ModelAdmin): form = AcademyForm - list_display = ('id', 'slug', 'name', 'city') + list_display = ("id", "slug", "name", "city") actions = [mark_as_available_as_saas, mark_as_unavailable_as_saas] @admin.register(Country) class CountryAdmin(admin.ModelAdmin): - list_display = ('code', 'name') + list_display = ("code", "name") @admin.register(City) class CityAdmin(admin.ModelAdmin): - list_display = ('name', 'country') + list_display = ("name", "country") -@admin.display(description='Make him/her an ASSISTANT') +@admin.display(description="Make him/her an ASSISTANT") def make_assistant(modeladmin, request, queryset): cohort_users = queryset.all() for cu in cohort_users: - cu.role = 'ASSISTANT' + cu.role = "ASSISTANT" cu.save() -@admin.display(description='Make him/her a TEACHER') +@admin.display(description="Make him/her a TEACHER") def make_teacher(modeladmin, request, queryset): cohort_users = queryset.all() for cu in cohort_users: - cu.role = 'TEACHER' + cu.role = "TEACHER" cu.save() -@admin.display(description='Make him/her a STUDENT') +@admin.display(description="Make him/her a STUDENT") def make_student(modeladmin, request, queryset): cohort_users = queryset.all() for cu in cohort_users: - cu.role = 'STUDENT' + cu.role = "STUDENT" cu.save() -@admin.display(description='Educational_status = ACTIVE') +@admin.display(description="Educational_status = ACTIVE") def make_edu_stat_active(modeladmin, request, queryset): cohort_users = queryset.all() for cu in cohort_users: - cu.educational_status = 'ACTIVE' + cu.educational_status = "ACTIVE" cu.save() -@admin.display(description='Educational_status = GRADUATED') +@admin.display(description="Educational_status = GRADUATED") def make_edu_stat_graduate(modeladmin, request, queryset): cohort_users = queryset.all() for cu in cohort_users: - cu.educational_status = 'GRADUATED' + cu.educational_status = "GRADUATED" cu.save() -@admin.display(description='Add student tag to active campaign') +@admin.display(description="Add student tag to active campaign") def add_student_tag_to_active_campaign(modeladmin, request, queryset): cohort_users = queryset.all() for v in cohort_users: @@ -122,40 +133,40 @@ def add_student_tag_to_active_campaign(modeladmin, request, queryset): @admin.register(CohortUser) class CohortUserAdmin(admin.ModelAdmin): - search_fields = ['user__email', 'user__first_name', 'user__last_name', 'cohort__name', 'cohort__slug'] - list_display = ('get_student', 'cohort', 'role', 'educational_status', 'finantial_status', 'created_at') - list_filter = ['role', 'educational_status', 'finantial_status'] - raw_id_fields = ['user', 'cohort'] + search_fields = ["user__email", "user__first_name", "user__last_name", "cohort__name", "cohort__slug"] + list_display = ("get_student", "cohort", "role", "educational_status", "finantial_status", "created_at") + list_filter = ["role", "educational_status", "finantial_status"] + raw_id_fields = ["user", "cohort"] actions = [make_assistant, make_teacher, make_student, make_edu_stat_active, add_student_tag_to_active_campaign] def get_student(self, obj): - return obj.user.first_name + ' ' + obj.user.last_name + '(' + obj.user.email + ')' + return obj.user.first_name + " " + obj.user.last_name + "(" + obj.user.email + ")" -@admin.display(description='Sync Tasks') +@admin.display(description="Sync Tasks") def sync_tasks(modeladmin, request, queryset): - cohort_ids = queryset.values_list('id', flat=True) + cohort_ids = queryset.values_list("id", flat=True) cohort_user = CohortUser.objects.filter(cohort__id__in=[cohort_ids]) for cu in cohort_user: sync_student_tasks(cu.user) -@admin.display(description='Mark as ENDED') +@admin.display(description="Mark as ENDED") def mark_as_ended(modeladmin, request, queryset): - queryset.update(stage='ENDED') + queryset.update(stage="ENDED") -@admin.display(description='Mark as STARTED') +@admin.display(description="Mark as STARTED") def mark_as_started(modeladmin, request, queryset): - queryset.update(stage='STARTED') + queryset.update(stage="STARTED") -@admin.display(description='Mark as INACTIVE') +@admin.display(description="Mark as INACTIVE") def mark_as_inactive(modeladmin, request, queryset): - queryset.update(stage='INACTIVE') + queryset.update(stage="INACTIVE") -@admin.display(description='Sync Timeslots With Certificate') +@admin.display(description="Sync Timeslots With Certificate") def sync_timeslots(modeladmin, request, queryset): cohorts = queryset.all() count = 0 @@ -164,21 +175,21 @@ def sync_timeslots(modeladmin, request, queryset): x.clean() ids = x.sync() - logger.info(f'{len(ids)} timeslots created for cohort {str(c.slug)}') + logger.info(f"{len(ids)} timeslots created for cohort {str(c.slug)}") if len(ids) > 0: count += 1 - messages.add_message(request, messages.INFO, f'{count} of {cohorts.count()} cohorts timeslots were updated') + messages.add_message(request, messages.INFO, f"{count} of {cohorts.count()} cohorts timeslots were updated") class CohortForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(CohortForm, self).__init__(*args, **kwargs) - self.fields['timezone'] = forms.ChoiceField(choices=timezones) + self.fields["timezone"] = forms.ChoiceField(choices=timezones) -@admin.display(description='Add cohort slug to active campaign') +@admin.display(description="Add cohort slug to active campaign") def add_cohort_slug_to_active_campaign(modeladmin, request, queryset): cohorts = queryset.all() for cohort in cohorts: @@ -191,15 +202,20 @@ def get_attendancy_logs(modeladmin, request, queryset): cohort_actions = [ - sync_tasks, mark_as_ended, mark_as_started, mark_as_inactive, sync_timeslots, add_cohort_slug_to_active_campaign, - get_attendancy_logs + sync_tasks, + mark_as_ended, + mark_as_started, + mark_as_inactive, + sync_timeslots, + add_cohort_slug_to_active_campaign, + get_attendancy_logs, ] -if os.getenv('ENVIRONMENT') == 'DEVELOPMENT': +if os.getenv("ENVIRONMENT") == "DEVELOPMENT": pass -@admin.display(description='Link randomly relations to cohorts') +@admin.display(description="Link randomly relations to cohorts") def link_randomly_relations_to_cohorts(modeladmin, request, queryset): academies_instances = {} schedules_instances = {} @@ -211,12 +227,15 @@ def link_randomly_relations_to_cohorts(modeladmin, request, queryset): for cohort in cohorts: if not cohort.syllabus_version: - if cohort.academy.id in academies_instances and 'syllabus_versions' in academies_instances[ - cohort.academy.id]: - syllabus_versions = academies_instances[cohort.academy.id]['syllabus_versions'] + if ( + cohort.academy.id in academies_instances + and "syllabus_versions" in academies_instances[cohort.academy.id] + ): + syllabus_versions = academies_instances[cohort.academy.id]["syllabus_versions"] else: syllabus_versions = SyllabusVersion.objects.filter( - Q(syllabus__academy_owner=cohort.academy) | Q(syllabus__private=False)) + Q(syllabus__academy_owner=cohort.academy) | Q(syllabus__private=False) + ) if not syllabus_versions: continue @@ -249,11 +268,11 @@ def link_randomly_relations_to_cohorts(modeladmin, request, queryset): @admin.register(Cohort) class CohortAdmin(admin.ModelAdmin): form = CohortForm - search_fields = ['slug', 'name', 'academy__city__name'] - list_display = ('id', 'slug', 'stage', 'name', 'kickoff_date', 'syllabus_version', 'schedule', 'academy') - list_filter = ['stage', 'academy__slug', 'schedule__name', 'syllabus_version__version'] + search_fields = ["slug", "name", "academy__city__name"] + list_display = ("id", "slug", "stage", "name", "kickoff_date", "syllabus_version", "schedule", "academy") + list_filter = ["stage", "academy__slug", "schedule__name", "syllabus_version__version"] - if os.getenv('ENV') == 'development': + if os.getenv("ENV") == "development": actions = cohort_actions + [link_randomly_relations_to_cohorts] else: actions = cohort_actions @@ -262,10 +281,10 @@ def academy_name(self, obj): return obj.academy.name def certificate_name(self, obj): - return obj.certificate.slug + '.v' + str(obj.version) + return obj.certificate.slug + ".v" + str(obj.version) -@admin.display(description='Sync from Github') +@admin.display(description="Sync from Github") def pull_from_github(modeladmin, request, queryset): all_syllabus = queryset.all() @@ -273,66 +292,84 @@ def pull_from_github(modeladmin, request, queryset): try: credentials = request.user.credentialsgithub except Exception: - logger.error('No github credentials found') - messages.error(request, 'No github credentials found') + logger.error("No github credentials found") + messages.error(request, "No github credentials found") else: for syl in all_syllabus: - #/repos/:owner/:repo/contents/:path - regex = r'github\.com\/([0-9a-zA-Z-]+)\/([0-9a-zA-Z-]+)\/blob\/([0-9a-zA-Z-]+)\/([0-9a-zA-Z-\/\.]+)' + # /repos/:owner/:repo/contents/:path + regex = r"github\.com\/([0-9a-zA-Z-]+)\/([0-9a-zA-Z-]+)\/blob\/([0-9a-zA-Z-]+)\/([0-9a-zA-Z-\/\.]+)" matches = re.findall(regex, syl.github_url) if matches is None: - logger.error('Invalid github url, make sure it follows this format: ' - 'https://github.com/:user/:repo/blob/:branch/:path') + logger.error( + "Invalid github url, make sure it follows this format: " + "https://github.com/:user/:repo/blob/:branch/:path" + ) messages.error( - request, 'Invalid github url, make sure it follows this format: ' - 'https://github.com/:user/:repo/blob/:branch/:path') + request, + "Invalid github url, make sure it follows this format: " + "https://github.com/:user/:repo/blob/:branch/:path", + ) continue - headers = {'Authorization': f'token {credentials.token}'} + headers = {"Authorization": f"token {credentials.token}"} response = requests.get( - f'https://api.github.com/repos/{matches[0][0]}/{matches[0][1]}/contents/{matches[0][3]}?ref=' + - matches[0][2], + f"https://api.github.com/repos/{matches[0][0]}/{matches[0][1]}/contents/{matches[0][3]}?ref=" + + matches[0][2], headers=headers, - timeout=2) + timeout=2, + ) if response.status_code == 200: _file = response.json() - syl.json = json.loads(base64.b64decode(_file['content']).decode()) + syl.json = json.loads(base64.b64decode(_file["content"]).decode()) syl.save() else: - logger.error(f'Error {response.status_code} updating syllabus from github, make sure you have the ' - 'correct access rights to the repository') + logger.error( + f"Error {response.status_code} updating syllabus from github, make sure you have the " + "correct access rights to the repository" + ) messages.error( - request, f'Error {response.status_code} updating syllabus from github, make sure you have the ' - 'correct access rights to the repository') + request, + f"Error {response.status_code} updating syllabus from github, make sure you have the " + "correct access rights to the repository", + ) @admin.register(Syllabus) class SyllabusAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'academy_owner', 'private', 'github_url', 'duration_in_hours', 'duration_in_days', - 'week_hours', 'logo') + list_display = ( + "slug", + "name", + "academy_owner", + "private", + "github_url", + "duration_in_hours", + "duration_in_days", + "week_hours", + "logo", + ) actions = [pull_from_github] def test_syllabus_integrity(modeladmin, request, queryset): syllabus_versions = queryset.all() for version in syllabus_versions: - version.integrity_status = 'PENDING' + version.integrity_status = "PENDING" version.integrity_check_at = timezone.now() version.save() try: report = test_syllabus(version.json) version.integrity_report = report.serialize() if report.http_status() == 200: - version.integrity_status = 'OK' + version.integrity_status = "OK" else: - version.integrity_status = 'ERROR' + version.integrity_status = "ERROR" version.save() except Exception as e: - version.integrity_report = {'errors': [str(e)], 'warnings': []} - version.integrity_status = 'ERROR' + version.integrity_report = {"errors": [str(e)], "warnings": []} + version.integrity_status = "ERROR" version.save() raise e @@ -345,9 +382,9 @@ def async_test_syllabus_integrity(modeladmin, request, queryset): @admin.register(SyllabusVersion) class SyllabusVersionAdmin(admin.ModelAdmin): - list_display = ('version', 'syllabus', 'integrity', 'owner') - search_fields = ['syllabus__name', 'syllabus__slug'] - list_filter = ['syllabus__private', 'syllabus__academy_owner'] + list_display = ("version", "syllabus", "integrity", "owner") + search_fields = ["syllabus__name", "syllabus__slug"] + list_filter = ["syllabus__private", "syllabus__academy_owner"] actions = [ test_syllabus_integrity, async_test_syllabus_integrity, @@ -357,40 +394,42 @@ def owner(self, obj): if obj.syllabus.academy_owner is None: return format_html('No academy owner') - return format_html(f'{obj.syllabus.academy_owner.name}') + return format_html(f"{obj.syllabus.academy_owner.name}") def integrity(self, obj): colors = { - 'PENDING': 'bg-warning', - 'OK': 'bg-success', - 'ERROR': 'bg-error', - 'WARNING': 'bg-warning', + "PENDING": "bg-warning", + "OK": "bg-success", + "ERROR": "bg-error", + "WARNING": "bg-warning", } - when = 'Never tested' + when = "Never tested" if obj.integrity_check_at is not None: - when = from_now(obj.integrity_check_at) + ' ago' - return format_html(f""" + when = from_now(obj.integrity_check_at) + " ago" + return format_html( + f"""

{obj.integrity_status}

{when} -
""") +""" + ) class CohortTimeSlotForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(CohortTimeSlotForm, self).__init__(*args, **kwargs) - self.fields['timezone'] = forms.ChoiceField(choices=timezones) + self.fields["timezone"] = forms.ChoiceField(choices=timezones) @admin.register(CohortTimeSlot) class CohortTimeSlotAdmin(admin.ModelAdmin): form = CohortTimeSlotForm - list_display = ('cohort', 'timezone', 'starting_at', 'ending_at', 'recurrent', 'recurrency_type') - list_filter = ['cohort__academy__slug', 'timezone', 'recurrent', 'recurrency_type'] - search_fields = ['cohort__slug', 'timezone', 'cohort__name', 'cohort__academy__city__name'] + list_display = ("cohort", "timezone", "starting_at", "ending_at", "recurrent", "recurrency_type") + list_filter = ["cohort__academy__slug", "timezone", "recurrent", "recurrency_type"] + search_fields = ["cohort__slug", "timezone", "cohort__name", "cohort__academy__city__name"] -@admin.display(description='Replicate same timeslots in all academies') +@admin.display(description="Replicate same timeslots in all academies") def replicate_in_all(modeladmin, request, queryset: QuerySet[SyllabusSchedule]): from django.contrib import messages @@ -411,15 +450,15 @@ def replicate_in_all(modeladmin, request, queryset: QuerySet[SyllabusSchedule]): continue schedule_kwargs = { - 'academy': academy, - 'name': syllabus_schedule.name, - 'schedule_type': syllabus_schedule.schedule_type, - 'description': syllabus_schedule.description, - 'syllabus': syllabus_schedule.syllabus, + "academy": academy, + "name": syllabus_schedule.name, + "schedule_type": syllabus_schedule.schedule_type, + "description": syllabus_schedule.description, + "syllabus": syllabus_schedule.syllabus, } if SyllabusSchedule.objects.filter(**schedule_kwargs).first(): - already_exist_schedule_name.append(f'{academy.slug}/{syllabus_schedule.name}') + already_exist_schedule_name.append(f"{academy.slug}/{syllabus_schedule.name}") continue replica_of_schedule = SyllabusSchedule(**schedule_kwargs) @@ -428,42 +467,50 @@ def replicate_in_all(modeladmin, request, queryset: QuerySet[SyllabusSchedule]): timeslots = SyllabusScheduleTimeSlot.objects.filter(schedule=syllabus_schedule) for timeslot in timeslots: - replica_of_timeslot = SyllabusScheduleTimeSlot(recurrent=timeslot.recurrent, - starting_at=timeslot.starting_at, - ending_at=timeslot.ending_at, - schedule=replica_of_schedule, - timezone=academy.timezone) + replica_of_timeslot = SyllabusScheduleTimeSlot( + recurrent=timeslot.recurrent, + starting_at=timeslot.starting_at, + ending_at=timeslot.ending_at, + schedule=replica_of_schedule, + timezone=academy.timezone, + ) replica_of_timeslot.save(force_insert=True) if without_timezone_slugs and already_exist_schedule_name: messages.add_message( - request, messages.ERROR, + request, + messages.ERROR, f'The following academies ({", ".join(without_timezone_slugs)}) was skipped because it doesn\'t ' - 'have a timezone assigned and the following syllabus schedules ' - f'({", ".join(already_exist_schedule_name)}) was skipped because it already exists') + "have a timezone assigned and the following syllabus schedules " + f'({", ".join(already_exist_schedule_name)}) was skipped because it already exists', + ) elif without_timezone_slugs: messages.add_message( - request, messages.ERROR, + request, + messages.ERROR, f'The following academies ({", ".join(without_timezone_slugs)}) was skipped because it doesn\'t ' - 'have a timezone assigned') + "have a timezone assigned", + ) elif already_exist_schedule_name: messages.add_message( - request, messages.ERROR, + request, + messages.ERROR, f'The following syllabus schedules ({", ".join(already_exist_schedule_name)}) was skipped ' - 'because it already exists') + "because it already exists", + ) else: - messages.add_message(request, messages.INFO, 'All academies in sync with those syllabus schedules') + messages.add_message(request, messages.INFO, "All academies in sync with those syllabus schedules") @admin.register(SyllabusSchedule) class SyllabusScheduleAdmin(admin.ModelAdmin): - list_display = ('name', 'academy') - list_filter = ['name', 'academy__slug', 'schedule_type'] - search_fields = ['name', 'academy__slug', 'schedule_type'] + list_display = ("name", "academy") + list_filter = ["name", "academy__slug", "schedule_type"] + search_fields = ["name", "academy__slug", "schedule_type"] actions = [replicate_in_all] @@ -471,12 +518,12 @@ class SyllabusScheduleTimeSlotForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(SyllabusScheduleTimeSlotForm, self).__init__(*args, **kwargs) - self.fields['timezone'] = forms.ChoiceField(choices=timezones) + self.fields["timezone"] = forms.ChoiceField(choices=timezones) @admin.register(SyllabusScheduleTimeSlot) class SyllabusScheduleTimeSlotAdmin(admin.ModelAdmin): form = SyllabusScheduleTimeSlotForm - list_display = ('id', 'schedule', 'timezone', 'starting_at', 'ending_at', 'recurrent', 'recurrency_type') - list_filter = ['schedule__name', 'timezone', 'recurrent', 'recurrency_type'] - search_fields = ['schedule__name', 'timezone', 'schedule__name'] + list_display = ("id", "schedule", "timezone", "starting_at", "ending_at", "recurrent", "recurrency_type") + list_filter = ["schedule__name", "timezone", "recurrent", "recurrency_type"] + search_fields = ["schedule__name", "timezone", "schedule__name"] diff --git a/breathecode/admissions/apps.py b/breathecode/admissions/apps.py index ab659e66c..4b064e492 100644 --- a/breathecode/admissions/apps.py +++ b/breathecode/admissions/apps.py @@ -2,7 +2,7 @@ class AcademyConfig(AppConfig): - name = 'breathecode.admissions' + name = "breathecode.admissions" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/admissions/caches.py b/breathecode/admissions/caches.py index 9ae1736ab..edc1950e0 100644 --- a/breathecode/admissions/caches.py +++ b/breathecode/admissions/caches.py @@ -3,7 +3,7 @@ from breathecode.authenticate.models import ProfileAcademy from django.contrib.auth.models import User -MODULE = 'admissions' +MODULE = "admissions" class CohortCache(Cache): diff --git a/breathecode/admissions/management/commands/add_legacy_to_all_users.py b/breathecode/admissions/management/commands/add_legacy_to_all_users.py index 7be6ba0f6..e554c2116 100644 --- a/breathecode/admissions/management/commands/add_legacy_to_all_users.py +++ b/breathecode/admissions/management/commands/add_legacy_to_all_users.py @@ -3,13 +3,13 @@ class Command(BaseCommand): - help = 'Add Legacy group to all current users' + help = "Add Legacy group to all current users" def handle(self, *args, **options): try: - legacy_group = Group.objects.filter(name='Legacy').first() + legacy_group = Group.objects.filter(name="Legacy").first() for user in User.objects.all(): if legacy_group not in user.groups.all(): user.groups.add(legacy_group) except Exception: - self.stderr.write('Failed to add the Legacy group to all users') + self.stderr.write("Failed to add the Legacy group to all users") diff --git a/breathecode/admissions/management/commands/backup.py b/breathecode/admissions/management/commands/backup.py index 8d6064311..58a7189ac 100644 --- a/breathecode/admissions/management/commands/backup.py +++ b/breathecode/admissions/management/commands/backup.py @@ -12,34 +12,35 @@ from pathlib import Path from decimal import Decimal -PROJECT = 'breathecode' +PROJECT = "breathecode" MODULES = [ - x.replace('breathecode.', '') for x in INSTALLED_APPS - if x.startswith('breathecode.') and x != 'breathecode.admin_styles' + x.replace("breathecode.", "") + for x in INSTALLED_APPS + if x.startswith("breathecode.") and x != "breathecode.admin_styles" ] def db_backup_bucket(): - return os.getenv('DB_BACKUP_BUCKET') + return os.getenv("DB_BACKUP_BUCKET") class Command(BaseCommand, DatetimeMixin): - help = 'Backup models' + help = "Backup models" def add_arguments(self, parser): - parser.add_argument('mode', type=str, choices=['storage', 'console', 'bucket']) - parser.add_argument('module', nargs='?', type=str, default='') - parser.add_argument('model', nargs='?', type=str, default='') + parser.add_argument("mode", type=str, choices=["storage", "console", "bucket"]) + parser.add_argument("module", nargs="?", type=str, default="") + parser.add_argument("model", nargs="?", type=str, default="") def handle(self, *args, **options): self.all_model_names = [] - if not 'mode' in options: - return self.stderr.write(self.style.ERROR('missing mode arguments')) + if not "mode" in options: + return self.stderr.write(self.style.ERROR("missing mode arguments")) - module_name = options['module'] - model_name = options['model'] - self.mode = options['mode'] + module_name = options["module"] + model_name = options["model"] + self.mode = options["mode"] if module_name and model_name: self.backup(module_name, model_name) @@ -54,7 +55,7 @@ def handle(self, *args, **options): self.backup(module_name, model_name) def find_modules(self, module_name): - path = f'breathecode.{module_name}.models' + path = f"breathecode.{module_name}.models" module = importlib.import_module(path) models = [] @@ -66,10 +67,10 @@ def find_modules(self, module_name): if not issubclass(model_cls, Model): continue - if (hasattr(model_cls, 'Meta') and hasattr(model_cls.Meta, 'abstract') and model_cls.__name__ != 'User'): + if hasattr(model_cls, "Meta") and hasattr(model_cls.Meta, "abstract") and model_cls.__name__ != "User": continue - if (hasattr(model_cls, 'Meta') and hasattr(model_cls.Meta, 'proxy') and model_cls.__name__ != 'User'): + if hasattr(model_cls, "Meta") and hasattr(model_cls.Meta, "proxy") and model_cls.__name__ != "User": continue if model_cls.__name__ in self.all_model_names: @@ -83,30 +84,32 @@ def find_modules(self, module_name): def backup(self, module_name, model_name): self.module_name = module_name self.model_name = model_name - path = f'breathecode.{self.module_name}.models' + path = f"breathecode.{self.module_name}.models" try: module = importlib.import_module(path) except ModuleNotFoundError: return self.stderr.write( - self.style.ERROR(f'module `{self.module_name}` not found or it not have models too')) + self.style.ERROR(f"module `{self.module_name}` not found or it not have models too") + ) if not hasattr(module, self.model_name): return self.stderr.write( - self.style.ERROR(f'module `{self.module_name}` not have a model called `{self.model_name}`')) + self.style.ERROR(f"module `{self.module_name}` not have a model called `{self.model_name}`") + ) model_cls = getattr(module, self.model_name) results = model_cls.objects.all() dicts = [self.prepare_data(x) for x in results] try: - if self.mode == 'storage': + if self.mode == "storage": self.backup_in_storage(json.dumps(dicts)) - elif self.mode == 'console': + elif self.mode == "console": self.backup_in_console(json.dumps(dicts)) - elif self.mode == 'bucket': + elif self.mode == "bucket": self.backup_in_bucket(json.dumps(dicts)) except Exception as e: @@ -116,7 +119,7 @@ def backup(self, module_name, model_name): def prepare_data(self, model): data = vars(model) - private_attrs = [x for x in data if x.startswith('_')] + private_attrs = [x for x in data if x.startswith("_")] datetime_attrs = [x for x in data if isinstance(data[x], datetime)] decimal_attrs = [x for x in data if isinstance(data[x], Decimal)] timedelta_attrs = [x for x in data if isinstance(data[x], timedelta)] @@ -137,13 +140,13 @@ def prepare_data(self, model): def backup_in_storage(self, data): current_path = Path(os.getcwd()) - backup_path = current_path / 'backup' - file_path = current_path / 'backup' / f'{self.module_name}.{self.model_name.lower()}.json' + backup_path = current_path / "backup" + file_path = current_path / "backup" / f"{self.module_name}.{self.model_name.lower()}.json" - if not os.path.exists(current_path / 'backup'): + if not os.path.exists(current_path / "backup"): os.mkdir(backup_path) - with open(file_path, 'w') as file: + with open(file_path, "w") as file: file.write(data) def backup_in_console(self, data): @@ -153,7 +156,7 @@ def backup_in_bucket(self, data): from ....services.google_cloud import Storage storage = Storage() - name = f'{self.module_name}.{self.model_name.lower()}.json' + name = f"{self.module_name}.{self.model_name.lower()}.json" cloud_file = storage.file(db_backup_bucket(), name) cloud_file.upload(data) diff --git a/breathecode/admissions/management/commands/delete_admissions.py b/breathecode/admissions/management/commands/delete_admissions.py index 5a27dbbb5..c4880fc82 100644 --- a/breathecode/admissions/management/commands/delete_admissions.py +++ b/breathecode/admissions/management/commands/delete_admissions.py @@ -2,23 +2,23 @@ from django.core.management.base import BaseCommand from ...models import User -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" class Command(BaseCommand): - help = 'Sync academies from old breathecode' + help = "Sync academies from old breathecode" def add_arguments(self, parser): - parser.add_argument('entity', type=str) + parser.add_argument("entity", type=str) def handle(self, *args, **options): try: - func = getattr(self, options['entity'], 'entity_not_found') + func = getattr(self, options["entity"], "entity_not_found") except TypeError: print(f'Delete method for {options["entity"]} no Found!') func(options) def students(self, options): - User.objects.exclude(username='alesanchezr').delete() + User.objects.exclude(username="alesanchezr").delete() diff --git a/breathecode/admissions/management/commands/delete_duplicates.py b/breathecode/admissions/management/commands/delete_duplicates.py index 021708120..f2584d187 100644 --- a/breathecode/admissions/management/commands/delete_duplicates.py +++ b/breathecode/admissions/management/commands/delete_duplicates.py @@ -2,12 +2,12 @@ from django.core.management.base import BaseCommand from ...models import CohortUser -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" class Command(BaseCommand): - help = 'Delete duplicate cohort users imported from old breathecode' + help = "Delete duplicate cohort users imported from old breathecode" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -17,16 +17,17 @@ def handle(self, *args, **options): result = [] # collector - qs = CohortUser.objects.order_by('id') - for user_id, cohort_id in set(qs.values_list('user__id', 'cohort__id')): + qs = CohortUser.objects.order_by("id") + for user_id, cohort_id in set(qs.values_list("user__id", "cohort__id")): result.append( - qs.filter(user__id=user_id, cohort__id=cohort_id).values('id', 'user__id', 'cohort__id').first()) + qs.filter(user__id=user_id, cohort__id=cohort_id).values("id", "user__id", "cohort__id").first() + ) # remove dups for data in result: - id = data['id'] - user = data['user__id'] - cohort = data['cohort__id'] + id = data["id"] + user = data["user__id"] + cohort = data["cohort__id"] # # first graduated students # pref = CohortUser.objects.filter(user__id=user, cohort__id=cohort, @@ -55,4 +56,4 @@ def handle(self, *args, **options): # bulk delete but cohort user with that id (CohortUser.objects.filter(user__id=user, cohort__id=cohort).exclude(id=id).delete()) - self.stdout.write(self.style.SUCCESS('Remove duplicates from cohort users has ended')) + self.stdout.write(self.style.SUCCESS("Remove duplicates from cohort users has ended")) diff --git a/breathecode/admissions/management/commands/migrate_or_delete_syllabus_schedule.py b/breathecode/admissions/management/commands/migrate_or_delete_syllabus_schedule.py index 81317b274..3a80d189a 100644 --- a/breathecode/admissions/management/commands/migrate_or_delete_syllabus_schedule.py +++ b/breathecode/admissions/management/commands/migrate_or_delete_syllabus_schedule.py @@ -1,24 +1,24 @@ import os from django.core.management.base import BaseCommand from django.db.models import Q -from breathecode.admissions.models import (Cohort, SyllabusSchedule, SyllabusScheduleTimeSlot) +from breathecode.admissions.models import Cohort, SyllabusSchedule, SyllabusScheduleTimeSlot def db_backup_bucket(): - return os.getenv('DB_BACKUP_BUCKET') + return os.getenv("DB_BACKUP_BUCKET") class Command(BaseCommand): - help = 'Delete duplicate cohort users imported from old breathecode' + help = "Delete duplicate cohort users imported from old breathecode" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def handle(self, *args, **options): cache = {} - for cohort in Cohort.objects.filter(schedule__academy=None, - schedule__isnull=False, - academy__timezone__isnull=False): + for cohort in Cohort.objects.filter( + schedule__academy=None, schedule__isnull=False, academy__timezone__isnull=False + ): if cohort.schedule.id not in cache: cache[cohort.schedule.id] = cohort.schedule @@ -29,11 +29,11 @@ def handle(self, *args, **options): pass schedule_kwargs = { - 'academy': cohort.academy, - 'name': schedule.name, - 'schedule_type': schedule.schedule_type, - 'description': schedule.description, - 'syllabus': schedule.syllabus, + "academy": cohort.academy, + "name": schedule.name, + "schedule_type": schedule.schedule_type, + "description": schedule.description, + "syllabus": schedule.syllabus, } replica_of_schedule = SyllabusSchedule(**schedule_kwargs) @@ -42,11 +42,13 @@ def handle(self, *args, **options): timeslots = SyllabusScheduleTimeSlot.objects.filter(schedule=schedule) for timeslot in timeslots: - replica_of_timeslot = SyllabusScheduleTimeSlot(recurrent=timeslot.recurrent, - starting_at=timeslot.starting_at, - ending_at=timeslot.ending_at, - schedule=replica_of_schedule, - timezone=cohort.timezone or cohort.academy.timezone) + replica_of_timeslot = SyllabusScheduleTimeSlot( + recurrent=timeslot.recurrent, + starting_at=timeslot.starting_at, + ending_at=timeslot.ending_at, + schedule=replica_of_schedule, + timezone=cohort.timezone or cohort.academy.timezone, + ) replica_of_timeslot.save() @@ -60,9 +62,10 @@ def handle(self, *args, **options): schedule.save() SyllabusScheduleTimeSlot.objects.filter(schedule=schedule).update( - timezone=cohort.timezone or cohort.academy.timezone) + timezone=cohort.timezone or cohort.academy.timezone + ) cache[schedule.id] = schedule SyllabusSchedule.objects.filter(Q(academy=None) | Q(academy__timezone=None)).delete() - self.stdout.write(self.style.SUCCESS('Done!')) + self.stdout.write(self.style.SUCCESS("Done!")) diff --git a/breathecode/admissions/management/commands/migrate_syllabus_feature_models.py b/breathecode/admissions/management/commands/migrate_syllabus_feature_models.py index 605fb112f..c269742b9 100644 --- a/breathecode/admissions/management/commands/migrate_syllabus_feature_models.py +++ b/breathecode/admissions/management/commands/migrate_syllabus_feature_models.py @@ -7,11 +7,11 @@ def db_backup_bucket(): - return os.getenv('DB_BACKUP_BUCKET') + return os.getenv("DB_BACKUP_BUCKET") class Command(BaseCommand): - help = 'Delete duplicate cohort users imported from old breathecode' + help = "Delete duplicate cohort users imported from old breathecode" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -27,44 +27,45 @@ def print_debug_info(self): from pprint import pprint if self.cohorts: - print('') - print('cohorts') + print("") + print("cohorts") pprint(self.cohorts[0]) if self.syllabus: - del self.syllabus[0]['json'] - print('') - print('syllabus') + del self.syllabus[0]["json"] + print("") + print("syllabus") pprint(self.syllabus[0]) if self.certificates: - print('') - print('certificates') + print("") + print("certificates") pprint(self.certificates[0]) if self.specialties: - print('') - print('specialties') + print("") + print("specialties") pprint(self.specialties[0]) def get_root_path(self): - if not hasattr(self, 'root_path'): + if not hasattr(self, "root_path"): self.root_path = Path(os.getcwd()) return self.root_path def get_backups(self): - self.cohorts = self.get_json_model_from_bucket('admissions', 'cohort') - self.syllabus = self.get_json_model_from_bucket('admissions', 'syllabus') - self.certificates = self.get_json_model_from_bucket('admissions', 'certificate') - self.specialties = self.get_json_model_from_bucket('certificate', 'specialty') + self.cohorts = self.get_json_model_from_bucket("admissions", "cohort") + self.syllabus = self.get_json_model_from_bucket("admissions", "syllabus") + self.certificates = self.get_json_model_from_bucket("admissions", "certificate") + self.specialties = self.get_json_model_from_bucket("certificate", "specialty") def get_json_model_from_bucket(self, module_name, model_name): from breathecode.services import Storage + storage = Storage() - file_name = f'{module_name}.{model_name.lower()}.json' - print('--->', db_backup_bucket(), file_name) + file_name = f"{module_name}.{model_name.lower()}.json" + print("--->", db_backup_bucket(), file_name) file = storage.file(db_backup_bucket(), file_name) content = file.download() @@ -80,7 +81,7 @@ def get_json_model_from_bucket(self, module_name, model_name): # cohort['id'] = new_cohort.id def get_json_model_from_backup(self, root_path, module_name, model_name): - with open(root_path / 'backup' / f'{module_name}.{model_name.lower()}.json', 'r') as file: + with open(root_path / "backup" / f"{module_name}.{model_name.lower()}.json", "r") as file: result = json.load(file) return result @@ -89,20 +90,20 @@ def fix_cohorts(self): cohort_instances = {} for cohort in self.cohorts: - if not cohort['syllabus_id']: + if not cohort["syllabus_id"]: continue - syllabus = [x for x in self.syllabus if x['id'] == cohort['syllabus_id']] + syllabus = [x for x in self.syllabus if x["id"] == cohort["syllabus_id"]] - syllabus[0]['certificate_id'] + syllabus[0]["certificate_id"] - certificates = [x for x in self.certificates if x['id'] == syllabus[0]['certificate_id']] + certificates = [x for x in self.certificates if x["id"] == syllabus[0]["certificate_id"]] - version = syllabus[0]['version'] - slug = certificates[0]['slug'] + version = syllabus[0]["version"] + slug = certificates[0]["slug"] - x = Cohort.objects.filter(id=cohort['id']).first() - x.syllabus_version = self.syllabus_version_instances[f'{slug}.v{version}'] + x = Cohort.objects.filter(id=cohort["id"]).first() + x.syllabus_version = self.syllabus_version_instances[f"{slug}.v{version}"] x.save() self.cohort_instances = cohort_instances @@ -112,7 +113,7 @@ def fix_specialties(self): for specialty in self.specialties: syllabus = [ - self.syllabus_instances[x['slug']] for x in self.certificates if x['id'] == specialty['certificate_id'] + self.syllabus_instances[x["slug"]] for x in self.certificates if x["id"] == specialty["certificate_id"] ] if syllabus: @@ -120,11 +121,11 @@ def fix_specialties(self): else: syllabus = None - specialty_instances[specialty['id']] = Specialty.objects.filter(id=specialty['id']).first() + specialty_instances[specialty["id"]] = Specialty.objects.filter(id=specialty["id"]).first() if syllabus: - specialty_instances[specialty['id']].syllabus = syllabus - specialty_instances[specialty['id']].save() + specialty_instances[specialty["id"]].syllabus = syllabus + specialty_instances[specialty["id"]].save() self.specialty_instances = specialty_instances @@ -135,31 +136,31 @@ def fix_certificates(self): syllabus_instances = {} for certificate in self.certificates: - syllabus_versions = [x for x in self.syllabus if certificate['id'] == x['certificate_id']] + syllabus_versions = [x for x in self.syllabus if certificate["id"] == x["certificate_id"]] kwargs = {} if syllabus_versions: kwargs = { - 'academy_owner': None, - 'private': syllabus_versions[0]['private'], - 'github_url': syllabus_versions[0]['github_url'], + "academy_owner": None, + "private": syllabus_versions[0]["private"], + "github_url": syllabus_versions[0]["github_url"], } - academy_id = syllabus_versions[0]['academy_owner_id'] + academy_id = syllabus_versions[0]["academy_owner_id"] if academy_id: - kwargs['academy_owner'] = Academy.objects.filter(id=academy_id).first() + kwargs["academy_owner"] = Academy.objects.filter(id=academy_id).first() syllabus = Syllabus( - slug=certificate['slug'], - name=certificate['name'], - duration_in_hours=certificate['duration_in_hours'], - duration_in_days=certificate['duration_in_days'], - week_hours=certificate['week_hours'], - logo=certificate['logo'], + slug=certificate["slug"], + name=certificate["name"], + duration_in_hours=certificate["duration_in_hours"], + duration_in_days=certificate["duration_in_days"], + week_hours=certificate["week_hours"], + logo=certificate["logo"], **kwargs, ) syllabus.save() - syllabus_instances[certificate['slug']] = syllabus + syllabus_instances[certificate["slug"]] = syllabus self.syllabus_instances = syllabus_instances @@ -170,13 +171,13 @@ def fix_syllabus(self): syllabus_version_instances = {} for certificate in self.certificates: - syllabus_versions = [x for x in self.syllabus if certificate['id'] == x['certificate_id']] + syllabus_versions = [x for x in self.syllabus if certificate["id"] == x["certificate_id"]] for syllabus_version in syllabus_versions: x = SyllabusVersion( - version=syllabus_version['version'], - json=syllabus_version['json'], - syllabus=self.syllabus_instances[certificate['slug']], + version=syllabus_version["version"], + json=syllabus_version["json"], + syllabus=self.syllabus_instances[certificate["slug"]], ) x.save() diff --git a/breathecode/admissions/management/commands/remove_ending_date.py b/breathecode/admissions/management/commands/remove_ending_date.py index aab0433fc..cd3342c32 100644 --- a/breathecode/admissions/management/commands/remove_ending_date.py +++ b/breathecode/admissions/management/commands/remove_ending_date.py @@ -4,7 +4,7 @@ class Command(BaseCommand): - help = 'Remove the ending_date from the never ending cohorts' + help = "Remove the ending_date from the never ending cohorts" def handle(self, *args, **options): for element in Cohort.objects.filter(~Q(ending_date=None), never_ends=True): diff --git a/breathecode/admissions/management/commands/sync_admissions.py b/breathecode/admissions/management/commands/sync_admissions.py index 97b3330f0..508b36351 100644 --- a/breathecode/admissions/management/commands/sync_admissions.py +++ b/breathecode/admissions/management/commands/sync_admissions.py @@ -5,67 +5,67 @@ from ...models import Academy, SyllabusSchedule, Cohort, User, CohortUser, Syllabus from breathecode.authenticate.models import Profile -HOST_ASSETS = 'https://assets.breatheco.de/apis' -API_URL = os.getenv('API_URL', '') -HOST_ASSETS = 'https://assets.breatheco.de/apis' -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +HOST_ASSETS = "https://assets.breatheco.de/apis" +API_URL = os.getenv("API_URL", "") +HOST_ASSETS = "https://assets.breatheco.de/apis" +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" class Command(BaseCommand): - help = 'Sync academies from old breathecode' + help = "Sync academies from old breathecode" def add_arguments(self, parser): - parser.add_argument('entity', type=str) + parser.add_argument("entity", type=str) parser.add_argument( - '--override', - action='store_true', - help='Delete and add again', + "--override", + action="store_true", + help="Delete and add again", ) - parser.add_argument('--limit', action='store', dest='limit', type=int, default=0, help='How many to import') + parser.add_argument("--limit", action="store", dest="limit", type=int, default=0, help="How many to import") def handle(self, *args, **options): try: - func = getattr(self, options['entity'], 'entity_not_found') + func = getattr(self, options["entity"], "entity_not_found") except TypeError: print(f'Sync method for {options["entity"]} no Found!') func(options) def academies(self, options): - response = requests.get(f'{HOST}/locations/', timeout=2) + response = requests.get(f"{HOST}/locations/", timeout=2) locations = response.json() - for loc in locations['data']: - aca = Academy.objects.filter(slug=loc['slug']).first() + for loc in locations["data"]: + aca = Academy.objects.filter(slug=loc["slug"]).first() if aca is None: a = Academy( - slug=loc['slug'], - active_campaign_slug=loc['slug'], - name=loc['name'], - street_address=loc['address'], + slug=loc["slug"], + active_campaign_slug=loc["slug"], + name=loc["name"], + street_address=loc["address"], ) a.save() - self.stdout.write(self.style.SUCCESS(f'Academy {a.slug} added')) + self.stdout.write(self.style.SUCCESS(f"Academy {a.slug} added")) else: - self.stdout.write(self.style.NOTICE(f'Academy {aca.slug} skipped')) + self.stdout.write(self.style.NOTICE(f"Academy {aca.slug} skipped")) def certificates(self, options): - response = requests.get(f'{HOST}/profiles/', timeout=2) + response = requests.get(f"{HOST}/profiles/", timeout=2) profiles = response.json() - for pro in profiles['data']: - cert = SyllabusSchedule.objects.filter(slug=pro['slug']).first() + for pro in profiles["data"]: + cert = SyllabusSchedule.objects.filter(slug=pro["slug"]).first() if cert is None: cert = SyllabusSchedule( - slug=pro['slug'], - name=pro['name'], - description=pro['description'], - duration_in_hours=pro['duration_in_hours'], - week_hours=pro['week_hours'], - duration_in_days=pro['duration_in_days'], - logo=pro['logo'], + slug=pro["slug"], + name=pro["name"], + description=pro["description"], + duration_in_hours=pro["duration_in_hours"], + week_hours=pro["week_hours"], + duration_in_days=pro["duration_in_days"], + logo=pro["logo"], ) cert.save() self.stdout.write(self.style.SUCCESS(f"Certificate {pro['slug']} added")) @@ -74,29 +74,30 @@ def certificates(self, options): def syllabus(self, options): - response = requests.get(f'{HOST_ASSETS}/syllabus/all', timeout=2) + response = requests.get(f"{HOST_ASSETS}/syllabus/all", timeout=2) syllabus = response.json() for syl in syllabus: - certificate_slug, version = syl['slug'].split('.') + certificate_slug, version = syl["slug"].split(".") cert = SyllabusSchedule.objects.filter(slug=certificate_slug).first() if cert is None: self.stdout.write( self.style.NOTICE( - f'Certificate slug {certificate_slug} not found: skipping syllabus {certificate_slug}.{version}' - )) + f"Certificate slug {certificate_slug} not found: skipping syllabus {certificate_slug}.{version}" + ) + ) continue - #remove letter "v" at the beginning of version number - #FIXME: this will fail until the version 10 + # remove letter "v" at the beginning of version number + # FIXME: this will fail until the version 10 version = version[1:] if not version.isnumeric(): - self.stdout.write(self.style.NOTICE(f'Syllabus version {version} must be number: skipping')) + self.stdout.write(self.style.NOTICE(f"Syllabus version {version} must be number: skipping")) continue _syl = Syllabus.objects.filter(version=version, certificate=cert).first() if _syl is None: - response = requests.get(f'{HOST_ASSETS}/syllabus/{certificate_slug}?v={version}', timeout=2) + response = requests.get(f"{HOST_ASSETS}/syllabus/{certificate_slug}?v={version}", timeout=2) _syl = Syllabus( version=version, certificate=cert, @@ -104,22 +105,23 @@ def syllabus(self, options): private=False, ) _syl.save() - self.stdout.write(self.style.SUCCESS(f'Syllabus {certificate_slug}{version} added')) + self.stdout.write(self.style.SUCCESS(f"Syllabus {certificate_slug}{version} added")) else: - self.stdout.write(self.style.NOTICE(f'Syllabus {certificate_slug}{version} skipped')) + self.stdout.write(self.style.NOTICE(f"Syllabus {certificate_slug}{version} skipped")) def cohorts(self, options): - response = requests.get(f'{HOST}/cohorts/', timeout=2) + response = requests.get(f"{HOST}/cohorts/", timeout=2) cohorts = response.json() - for _cohort in cohorts['data']: - co = Cohort.objects.filter(slug=_cohort['slug']).first() + for _cohort in cohorts["data"]: + co = Cohort.objects.filter(slug=_cohort["slug"]).first() if co is None: try: self.add_cohort(_cohort) self.stdout.write( - self.style.SUCCESS(f"Cohort {_cohort['slug']} with syllabus {_cohort['slug']} added")) + self.style.SUCCESS(f"Cohort {_cohort['slug']} with syllabus {_cohort['slug']} added") + ) except Exception as e: self.stdout.write(self.style.NOTICE(f"Error adding cohort {_cohort['slug']}: {str(e)}")) # raise e @@ -133,27 +135,28 @@ def cohorts(self, options): def students(self, options): - if options['override']: - ids = CohortUser.objects.filter(role='STUDENT').values_list('user__id', flat=True) + if options["override"]: + ids = CohortUser.objects.filter(role="STUDENT").values_list("user__id", flat=True) User.objects.filter(id__in=ids).delete() limit = False - if 'limit' in options and options['limit']: - limit = options['limit'] + if "limit" in options and options["limit"]: + limit = options["limit"] - response = requests.get(f'{HOST}/students/', timeout=2) + response = requests.get(f"{HOST}/students/", timeout=2) students = response.json() total = 0 - for _student in students['data']: + for _student in students["data"]: total += 1 # if limited number of sync options if limit and limit > 0 and total > limit: self.stdout.write( - self.style.SUCCESS(f'Stopped at {total} because there was a limit on the command arguments')) + self.style.SUCCESS(f"Stopped at {total} because there was a limit on the command arguments") + ) return - user = User.objects.filter(email=_student['email']).first() + user = User.objects.filter(email=_student["email"]).first() if user is None: try: user = self.add_user(_student) @@ -175,36 +178,37 @@ def students(self, options): except Profile.DoesNotExist: avatar_number = randint(1, 21) profile = Profile(user=user) - profile.avatar_url = API_URL + f'/static/img/avatar-{avatar_number}.png' - profile.bio = _student['bio'] - profile.phone = _student['phone'] if _student['phone'] is not None else '' - profile.github_username = _student['github'] + profile.avatar_url = API_URL + f"/static/img/avatar-{avatar_number}.png" + profile.bio = _student["bio"] + profile.phone = _student["phone"] if _student["phone"] is not None else "" + profile.github_username = _student["github"] profile.save() def teachers(self, options): - if options['override']: - ids = CohortUser.objects.filter(role__in=['STUDENT', 'ASSISTANT']).values_list('user__id', flat=True) + if options["override"]: + ids = CohortUser.objects.filter(role__in=["STUDENT", "ASSISTANT"]).values_list("user__id", flat=True) User.objects.filter(id__in=ids).delete() limit = False - if 'limit' in options and options['limit']: - limit = options['limit'] + if "limit" in options and options["limit"]: + limit = options["limit"] - response = requests.get(f'{HOST}/teachers/', timeout=2) + response = requests.get(f"{HOST}/teachers/", timeout=2) teachers = response.json() total = 0 - for _teacher in teachers['data']: - _teacher['email'] = _teacher['username'] + for _teacher in teachers["data"]: + _teacher["email"] = _teacher["username"] total += 1 # if limited number of sync options if limit and limit > 0 and total > limit: self.stdout.write( - self.style.SUCCESS(f'Stopped at {total} because there was a limit on the command arguments')) + self.style.SUCCESS(f"Stopped at {total} because there was a limit on the command arguments") + ) return - user = User.objects.filter(email=_teacher['email']).first() + user = User.objects.filter(email=_teacher["email"]).first() if user is None: user = self.add_user(_teacher) @@ -216,64 +220,68 @@ def teachers(self, options): # self.stdout.write(self.style.SUCCESS(f"Error adding cohort {_cohort['slug']}: {str(e)}")) def add_cohort(self, _cohort): - academy = Academy.objects.filter(slug=_cohort['location_slug']).first() + academy = Academy.objects.filter(slug=_cohort["location_slug"]).first() if academy is None: raise CommandError(f"Academy {_cohort['location_slug']} does not exist") - syllabus = Syllabus.objects.filter(certificate__slug=_cohort['profile_slug']).order_by('-version').first() + syllabus = Syllabus.objects.filter(certificate__slug=_cohort["profile_slug"]).order_by("-version").first() if syllabus is None: raise CommandError(f"syllabus for certificate {_cohort['profile_slug']} does not exist") stages = { - 'finished': 'ENDED', - 'on-prework': 'PREWORK', - 'not-started': 'INACTIVE', - 'on-course': 'STARTED', - 'on-final-project': 'FINAL_PROJECT', + "finished": "ENDED", + "on-prework": "PREWORK", + "not-started": "INACTIVE", + "on-course": "STARTED", + "on-final-project": "FINAL_PROJECT", } - if _cohort['stage'] not in stages: + if _cohort["stage"] not in stages: raise CommandError(f"Invalid cohort stage {_cohort['stage']}") co = Cohort( - slug=_cohort['slug'], - name=_cohort['name'], - kickoff_date=datetime.strptime(_cohort['kickoff_date'], - DATETIME_FORMAT).replace(tzinfo=pytz.timezone('UTC')), - current_day=_cohort['current_day'], - stage=stages[_cohort['stage']], - language=_cohort['language'].lower(), + slug=_cohort["slug"], + name=_cohort["name"], + kickoff_date=datetime.strptime(_cohort["kickoff_date"], DATETIME_FORMAT).replace( + tzinfo=pytz.timezone("UTC") + ), + current_day=_cohort["current_day"], + stage=stages[_cohort["stage"]], + language=_cohort["language"].lower(), academy=academy, syllabus=syllabus, ) - if _cohort['ending_date'] is not None: - co.ending_date = datetime.strptime(_cohort['ending_date'], - DATETIME_FORMAT).replace(tzinfo=pytz.timezone('UTC')) + if _cohort["ending_date"] is not None: + co.ending_date = datetime.strptime(_cohort["ending_date"], DATETIME_FORMAT).replace( + tzinfo=pytz.timezone("UTC") + ) co.save() return co def update_cohort(self, cohort, data): # return stages = { - 'finished': 'ENDED', - 'on-prework': 'PREWORK', - 'not-started': 'INACTIVE', - 'on-course': 'STARTED', - 'on-final-project': 'FINAL_PROJECT', + "finished": "ENDED", + "on-prework": "PREWORK", + "not-started": "INACTIVE", + "on-course": "STARTED", + "on-final-project": "FINAL_PROJECT", } - if data['stage'] not in stages: + if data["stage"] not in stages: raise CommandError(f"Invalid cohort stage {data['stage']}") - cohort.name = data['name'] - if 'kickoff_date' in data and data['kickoff_date'] is not None: - cohort.kickoff_date = datetime.strptime(data['kickoff_date'], - DATETIME_FORMAT).replace(tzinfo=pytz.timezone('UTC')) - cohort.current_day = data['current_day'] - cohort.stage = stages[data['stage']] - cohort.language = data['language'].lower() - if 'kickoff_date' in data and data['ending_date'] is not None: - cohort.ending_date = datetime.strptime(data['ending_date'], - DATETIME_FORMAT).replace(tzinfo=pytz.timezone('UTC')) - - syllabus = Syllabus.objects.filter(certificate__slug=data['profile_slug']).order_by('-version').first() + cohort.name = data["name"] + if "kickoff_date" in data and data["kickoff_date"] is not None: + cohort.kickoff_date = datetime.strptime(data["kickoff_date"], DATETIME_FORMAT).replace( + tzinfo=pytz.timezone("UTC") + ) + cohort.current_day = data["current_day"] + cohort.stage = stages[data["stage"]] + cohort.language = data["language"].lower() + if "kickoff_date" in data and data["ending_date"] is not None: + cohort.ending_date = datetime.strptime(data["ending_date"], DATETIME_FORMAT).replace( + tzinfo=pytz.timezone("UTC") + ) + + syllabus = Syllabus.objects.filter(certificate__slug=data["profile_slug"]).order_by("-version").first() if syllabus is None: raise CommandError(f"syllabus for certificate {data['profile_slug']} does not exist") cohort.syllabus = syllabus @@ -282,54 +290,56 @@ def update_cohort(self, cohort, data): def add_user(self, _user): us = User( - email=_user['email'], - username=_user['email'], - first_name=_user['first_name'], + email=_user["email"], + username=_user["email"], + first_name=_user["first_name"], ) - if 'last_name' in _user and _user['last_name'] is not None and _user['last_name'] != '': - us.last_name = _user['last_name'] + if "last_name" in _user and _user["last_name"] is not None and _user["last_name"] != "": + us.last_name = _user["last_name"] us.save() return us def add_teacher_cohorts(self, _teacher, us): - for cohort_slug in _teacher['cohorts']: + for cohort_slug in _teacher["cohorts"]: cohort = Cohort.objects.filter(slug=cohort_slug).first() if cohort and not CohortUser.objects.filter(user=us, cohort=cohort).count(): cohort_user = CohortUser( user=us, cohort=cohort, - role='TEACHER', + role="TEACHER", ) cohort_user.save() def add_student_cohorts(self, _student, us): financial_status = { - 'late': 'LATE', - 'fully_paid': 'FULLY_PAID', - 'up_to_date': 'UP_TO_DATE', - 'uknown': None, + "late": "LATE", + "fully_paid": "FULLY_PAID", + "up_to_date": "UP_TO_DATE", + "uknown": None, } - if _student['financial_status'] not in financial_status: + if _student["financial_status"] not in financial_status: raise CommandError(f"Invalid finantial status {_student['financial_status']}") educational_status = { - 'under_review': 'ACTIVE', - 'currently_active': 'ACTIVE', - 'blocked': 'SUSPENDED', - 'postponed': 'POSTPONED', - 'studies_finished': 'GRADUATED', - 'student_dropped': 'DROPPED', + "under_review": "ACTIVE", + "currently_active": "ACTIVE", + "blocked": "SUSPENDED", + "postponed": "POSTPONED", + "studies_finished": "GRADUATED", + "student_dropped": "DROPPED", } - if _student['status'] not in educational_status: + if _student["status"] not in educational_status: raise CommandError(f"Invalid educational_status {_student['status']}") - for cohort_slug in _student['cohorts']: + for cohort_slug in _student["cohorts"]: cohort = Cohort.objects.filter(slug=cohort_slug).first() if cohort and not CohortUser.objects.filter(user=us, cohort=cohort).count(): - cohort_user = CohortUser(user=us, - cohort=cohort, - role='STUDENT', - finantial_status=financial_status[_student['financial_status']], - educational_status=educational_status[_student['status']]) + cohort_user = CohortUser( + user=us, + cohort=cohort, + role="STUDENT", + finantial_status=financial_status[_student["financial_status"]], + educational_status=educational_status[_student["status"]], + ) cohort_user.save() diff --git a/breathecode/admissions/migrations/0001_initial.py b/breathecode/admissions/migrations/0001_initial.py index fe073b78d..2f9d43118 100644 --- a/breathecode/admissions/migrations/0001_initial.py +++ b/breathecode/admissions/migrations/0001_initial.py @@ -15,97 +15,125 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Academy', + name="Academy", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.CharField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('street_address', models.CharField(max_length=250)), - ('country', models.CharField(max_length=30)), - ('city', models.CharField(max_length=30)), - ('latitude', models.DecimalField(decimal_places=6, max_digits=9)), - ('longitude', models.DecimalField(decimal_places=6, max_digits=9)), - ('state', models.CharField(max_length=30)), - ('zip_code', models.IntegerField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('logistical_information', models.CharField(max_length=150)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.CharField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("street_address", models.CharField(max_length=250)), + ("country", models.CharField(max_length=30)), + ("city", models.CharField(max_length=30)), + ("latitude", models.DecimalField(decimal_places=6, max_digits=9)), + ("longitude", models.DecimalField(decimal_places=6, max_digits=9)), + ("state", models.CharField(max_length=30)), + ("zip_code", models.IntegerField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("logistical_information", models.CharField(max_length=150)), ], ), migrations.CreateModel( - name='Certificate', + name="Certificate", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.CharField(max_length=150)), - ('name', models.CharField(max_length=150)), - ('logo', models.CharField(blank=True, max_length=250)), - ('duration_in_hours', models.IntegerField()), - ('duration_in_days', models.IntegerField()), - ('week_hours', models.IntegerField()), - ('description', models.TextField(max_length=450)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.CharField(max_length=150)), + ("name", models.CharField(max_length=150)), + ("logo", models.CharField(blank=True, max_length=250)), + ("duration_in_hours", models.IntegerField()), + ("duration_in_days", models.IntegerField()), + ("week_hours", models.IntegerField()), + ("description", models.TextField(max_length=450)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Cohort', + name="Cohort", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.CharField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('kickoff_date', models.DateTimeField()), - ('ending_date', models.DateTimeField()), - ('current_day', models.IntegerField()), - ('stage', - models.CharField(choices=[('INACTIVE', 'Inactive'), ('PREWORK', 'Prework'), ('STARTED', 'Started'), - ('FINAL_PROJECT', 'Final Project'), ('ENDED', 'Ended'), - ('DELETED', 'Deleted')], - default='INACTIVE', - max_length=15)), - ('language', models.CharField(default='en', max_length=2)), - ('online_room_url', models.CharField(blank=True, max_length=250, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.Academy')), - ('certificate', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.Certificate')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.CharField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("kickoff_date", models.DateTimeField()), + ("ending_date", models.DateTimeField()), + ("current_day", models.IntegerField()), + ( + "stage", + models.CharField( + choices=[ + ("INACTIVE", "Inactive"), + ("PREWORK", "Prework"), + ("STARTED", "Started"), + ("FINAL_PROJECT", "Final Project"), + ("ENDED", "Ended"), + ("DELETED", "Deleted"), + ], + default="INACTIVE", + max_length=15, + ), + ), + ("language", models.CharField(default="en", max_length=2)), + ("online_room_url", models.CharField(blank=True, max_length=250, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.Academy")), + ( + "certificate", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.Certificate"), + ), ], ), migrations.CreateModel( - name='CohortUser', + name="CohortUser", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('role', - models.CharField(choices=[('TEACHER', 'Teacher'), ('ASSISTANT', 'Assistant'), ('STUDENT', 'Student')], - default='STUDENT', - max_length=9)), - ('finantial_status', - models.CharField(choices=[('FULLY_PAID', 'Fully Paid'), ('UP_TO_DATE', 'Up to date'), - ('LATE', 'Late')], - default=None, - max_length=15, - null=True)), - ('educational_status', - models.CharField(choices=[('ACTIVE', 'Active'), ('POSTPONED', 'Postponed'), ('GRADUATED', 'Graduated'), - ('DROPPED', 'Dropped')], - default=None, - max_length=15, - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('cohort', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.Cohort')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "role", + models.CharField( + choices=[("TEACHER", "Teacher"), ("ASSISTANT", "Assistant"), ("STUDENT", "Student")], + default="STUDENT", + max_length=9, + ), + ), + ( + "finantial_status", + models.CharField( + choices=[("FULLY_PAID", "Fully Paid"), ("UP_TO_DATE", "Up to date"), ("LATE", "Late")], + default=None, + max_length=15, + null=True, + ), + ), + ( + "educational_status", + models.CharField( + choices=[ + ("ACTIVE", "Active"), + ("POSTPONED", "Postponed"), + ("GRADUATED", "Graduated"), + ("DROPPED", "Dropped"), + ], + default=None, + max_length=15, + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("cohort", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.Cohort")), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( - name='AcademyCertificate', + name="AcademyCertificate", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.Academy')), - ('certificate', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.Certificate')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.Academy")), + ( + "certificate", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.Certificate"), + ), ], ), ] diff --git a/breathecode/admissions/migrations/0002_auto_20200703_1722.py b/breathecode/admissions/migrations/0002_auto_20200703_1722.py index b89a180af..d0641b5b5 100644 --- a/breathecode/admissions/migrations/0002_auto_20200703_1722.py +++ b/breathecode/admissions/migrations/0002_auto_20200703_1722.py @@ -6,33 +6,33 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0001_initial'), + ("admissions", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='academy', - name='latitude', + model_name="academy", + name="latitude", field=models.DecimalField(blank=True, decimal_places=6, max_digits=9, null=True), ), migrations.AlterField( - model_name='academy', - name='logistical_information', + model_name="academy", + name="logistical_information", field=models.CharField(blank=True, max_length=150, null=True), ), migrations.AlterField( - model_name='academy', - name='longitude', + model_name="academy", + name="longitude", field=models.DecimalField(blank=True, decimal_places=6, max_digits=9, null=True), ), migrations.AlterField( - model_name='academy', - name='state', + model_name="academy", + name="state", field=models.CharField(blank=True, max_length=30, null=True), ), migrations.AlterField( - model_name='academy', - name='zip_code', + model_name="academy", + name="zip_code", field=models.IntegerField(blank=True, null=True), ), ] diff --git a/breathecode/admissions/migrations/0003_auto_20200703_1741.py b/breathecode/admissions/migrations/0003_auto_20200703_1741.py index dac68dd3a..ae2565f70 100644 --- a/breathecode/admissions/migrations/0003_auto_20200703_1741.py +++ b/breathecode/admissions/migrations/0003_auto_20200703_1741.py @@ -7,50 +7,50 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0002_auto_20200703_1722'), + ("admissions", "0002_auto_20200703_1722"), ] operations = [ migrations.CreateModel( - name='Country', + name="Country", fields=[ - ('code', models.CharField(max_length=3, primary_key=True, serialize=False)), - ('name', models.CharField(max_length=30)), + ("code", models.CharField(max_length=3, primary_key=True, serialize=False)), + ("name", models.CharField(max_length=30)), ], ), migrations.RemoveField( - model_name='academy', - name='state', + model_name="academy", + name="state", ), migrations.AddField( - model_name='academy', - name='status', - field=models.CharField(choices=[('INACTIVE', 'Inactive'), ('ACTIVE', 'Active'), ('DELETED', 'Deleted')], - default='ACTIVE', - max_length=15), + model_name="academy", + name="status", + field=models.CharField( + choices=[("INACTIVE", "Inactive"), ("ACTIVE", "Active"), ("DELETED", "Deleted")], + default="ACTIVE", + max_length=15, + ), ), migrations.CreateModel( - name='City', + name="City", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=30)), - ('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.Country')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=30)), + ("country", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.Country")), ], ), migrations.AlterField( - model_name='academy', - name='city', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.City'), + model_name="academy", + name="city", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="admissions.City" + ), ), migrations.AlterField( - model_name='academy', - name='country', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.Country'), + model_name="academy", + name="country", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="admissions.Country" + ), ), ] diff --git a/breathecode/admissions/migrations/0004_auto_20200703_1751.py b/breathecode/admissions/migrations/0004_auto_20200703_1751.py index d7557db7f..3cd62d824 100644 --- a/breathecode/admissions/migrations/0004_auto_20200703_1751.py +++ b/breathecode/admissions/migrations/0004_auto_20200703_1751.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0003_auto_20200703_1741'), + ("admissions", "0003_auto_20200703_1741"), ] operations = [ migrations.AlterField( - model_name='certificate', - name='logo', + model_name="certificate", + name="logo", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), ] diff --git a/breathecode/admissions/migrations/0005_auto_20200703_1752.py b/breathecode/admissions/migrations/0005_auto_20200703_1752.py index ece723c0a..0235fb76d 100644 --- a/breathecode/admissions/migrations/0005_auto_20200703_1752.py +++ b/breathecode/admissions/migrations/0005_auto_20200703_1752.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0004_auto_20200703_1751'), + ("admissions", "0004_auto_20200703_1751"), ] operations = [ migrations.AlterField( - model_name='certificate', - name='week_hours', + model_name="certificate", + name="week_hours", field=models.IntegerField(default=None, null=True), ), ] diff --git a/breathecode/admissions/migrations/0006_auto_20200703_1951.py b/breathecode/admissions/migrations/0006_auto_20200703_1951.py index a4d6b94f4..10efe4857 100644 --- a/breathecode/admissions/migrations/0006_auto_20200703_1951.py +++ b/breathecode/admissions/migrations/0006_auto_20200703_1951.py @@ -6,17 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0005_auto_20200703_1752'), + ("admissions", "0005_auto_20200703_1752"), ] operations = [ migrations.RemoveField( - model_name='cohort', - name='online_room_url', + model_name="cohort", + name="online_room_url", ), migrations.AlterField( - model_name='cohort', - name='ending_date', + model_name="cohort", + name="ending_date", field=models.DateTimeField(blank=True, null=True), ), ] diff --git a/breathecode/admissions/migrations/0007_auto_20200703_2205.py b/breathecode/admissions/migrations/0007_auto_20200703_2205.py index 4507645e4..b6b8c28dd 100644 --- a/breathecode/admissions/migrations/0007_auto_20200703_2205.py +++ b/breathecode/admissions/migrations/0007_auto_20200703_2205.py @@ -6,18 +6,24 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0006_auto_20200703_1951'), + ("admissions", "0006_auto_20200703_1951"), ] operations = [ migrations.AlterField( - model_name='cohortuser', - name='educational_status', - field=models.CharField(choices=[('ACTIVE', 'Active'), ('POSTPONED', 'Postponed'), - ('GRADUATED', 'Graduated'), ('SUSPENDED', 'Suspended'), - ('DROPPED', 'Dropped')], - default=None, - max_length=15, - null=True), + model_name="cohortuser", + name="educational_status", + field=models.CharField( + choices=[ + ("ACTIVE", "Active"), + ("POSTPONED", "Postponed"), + ("GRADUATED", "Graduated"), + ("SUSPENDED", "Suspended"), + ("DROPPED", "Dropped"), + ], + default=None, + max_length=15, + null=True, + ), ), ] diff --git a/breathecode/admissions/migrations/0008_auto_20200708_0049.py b/breathecode/admissions/migrations/0008_auto_20200708_0049.py index 0bc35b002..a332380ea 100644 --- a/breathecode/admissions/migrations/0008_auto_20200708_0049.py +++ b/breathecode/admissions/migrations/0008_auto_20200708_0049.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0007_auto_20200703_2205'), + ("admissions", "0007_auto_20200703_2205"), ] operations = [ migrations.AlterField( - model_name='academy', - name='slug', + model_name="academy", + name="slug", field=models.SlugField(max_length=100, unique=True), ), migrations.AlterField( - model_name='certificate', - name='slug', + model_name="certificate", + name="slug", field=models.SlugField(max_length=100), ), ] diff --git a/breathecode/admissions/migrations/0009_academy_logo_url.py b/breathecode/admissions/migrations/0009_academy_logo_url.py index 9ec2fa6a7..982c6f79d 100644 --- a/breathecode/admissions/migrations/0009_academy_logo_url.py +++ b/breathecode/admissions/migrations/0009_academy_logo_url.py @@ -6,14 +6,14 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0008_auto_20200708_0049'), + ("admissions", "0008_auto_20200708_0049"), ] operations = [ migrations.AddField( - model_name='academy', - name='logo_url', - field=models.CharField(default='', max_length=255), + model_name="academy", + name="logo_url", + field=models.CharField(default="", max_length=255), preserve_default=False, ), ] diff --git a/breathecode/admissions/migrations/0010_auto_20200929_1812.py b/breathecode/admissions/migrations/0010_auto_20200929_1812.py index d41e97a64..41d63d216 100644 --- a/breathecode/admissions/migrations/0010_auto_20200929_1812.py +++ b/breathecode/admissions/migrations/0010_auto_20200929_1812.py @@ -7,27 +7,27 @@ class Migration(migrations.Migration): dependencies = [ - ('auth', '0012_alter_user_first_name_max_length'), - ('admissions', '0009_academy_logo_url'), + ("auth", "0012_alter_user_first_name_max_length"), + ("admissions", "0009_academy_logo_url"), ] operations = [ migrations.CreateModel( - name='UserAdmissions', + name="UserAdmissions", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), migrations.AddField( - model_name='academy', - name='website_url', + model_name="academy", + name="website_url", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), ] diff --git a/breathecode/admissions/migrations/0011_auto_20201006_0058.py b/breathecode/admissions/migrations/0011_auto_20201006_0058.py index 0db324604..3407439a2 100644 --- a/breathecode/admissions/migrations/0011_auto_20201006_0058.py +++ b/breathecode/admissions/migrations/0011_auto_20201006_0058.py @@ -6,20 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0010_auto_20200929_1812'), + ("admissions", "0010_auto_20200929_1812"), ] operations = [ migrations.AddField( - model_name='academy', - name='active_campaign_slug', + model_name="academy", + name="active_campaign_slug", field=models.SlugField(default=None, max_length=100, null=True), ), migrations.AddField( - model_name='certificate', - name='schedule_type', - field=models.CharField(choices=[('PART-TIME', 'Part-Time'), ('FULL-TIME', 'Full-Time')], - default='PART-TIME', - max_length=15), + model_name="certificate", + name="schedule_type", + field=models.CharField( + choices=[("PART-TIME", "Part-Time"), ("FULL-TIME", "Full-Time")], default="PART-TIME", max_length=15 + ), ), ] diff --git a/breathecode/admissions/migrations/0012_auto_20201124_1737.py b/breathecode/admissions/migrations/0012_auto_20201124_1737.py index d7d89b4bb..b5b4e91da 100644 --- a/breathecode/admissions/migrations/0012_auto_20201124_1737.py +++ b/breathecode/admissions/migrations/0012_auto_20201124_1737.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), + ("admissions", "0011_auto_20201006_0058"), ] operations = [ migrations.AddField( - model_name='academy', - name='timezone', + model_name="academy", + name="timezone", field=models.CharField(default=None, max_length=50, null=True), ), migrations.AddField( - model_name='cohort', - name='timezone', + model_name="cohort", + name="timezone", field=models.CharField(default=None, max_length=50, null=True), ), ] diff --git a/breathecode/admissions/migrations/0013_auto_20201209_0216.py b/breathecode/admissions/migrations/0013_auto_20201209_0216.py index 3f4722835..c50c3bcec 100644 --- a/breathecode/admissions/migrations/0013_auto_20201209_0216.py +++ b/breathecode/admissions/migrations/0013_auto_20201209_0216.py @@ -7,18 +7,18 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0012_auto_20201124_1737'), + ("admissions", "0012_auto_20201124_1737"), ] operations = [ migrations.AlterField( - model_name='academy', - name='city', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='admissions.city'), + model_name="academy", + name="city", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.city"), ), migrations.AlterField( - model_name='academy', - name='country', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='admissions.country'), + model_name="academy", + name="country", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.country"), ), ] diff --git a/breathecode/admissions/migrations/0014_auto_20201218_0534.py b/breathecode/admissions/migrations/0014_auto_20201218_0534.py index 5bffd3b9e..a9a4a7360 100644 --- a/breathecode/admissions/migrations/0014_auto_20201218_0534.py +++ b/breathecode/admissions/migrations/0014_auto_20201218_0534.py @@ -7,38 +7,38 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0013_auto_20201209_0216'), + ("admissions", "0013_auto_20201209_0216"), ] operations = [ migrations.AddField( - model_name='academy', - name='facebook_handle', + model_name="academy", + name="facebook_handle", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AddField( - model_name='academy', - name='github_handle', + model_name="academy", + name="github_handle", field=models.CharField(blank=True, default=None, max_length=20, null=True), ), migrations.AddField( - model_name='academy', - name='instagram_handle', + model_name="academy", + name="instagram_handle", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AddField( - model_name='academy', - name='linkedin_url', + model_name="academy", + name="linkedin_url", field=models.URLField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='academy', - name='marketing_email', + model_name="academy", + name="marketing_email", field=models.EmailField(blank=True, default=None, max_length=254, null=True), ), migrations.AddField( - model_name='academy', - name='marketing_phone', + model_name="academy", + name="marketing_phone", field=models.CharField( blank=True, default=None, @@ -47,17 +47,19 @@ class Migration(migrations.Migration): validators=[ django.core.validators.RegexValidator( message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", - regex='^\\+?1?\\d{9,15}$') - ]), + regex="^\\+?1?\\d{9,15}$", + ) + ], + ), ), migrations.AddField( - model_name='academy', - name='twitter_handle', + model_name="academy", + name="twitter_handle", field=models.CharField(blank=True, default=None, max_length=15, null=True), ), migrations.AddField( - model_name='academy', - name='youtube_url', + model_name="academy", + name="youtube_url", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/admissions/migrations/0015_auto_20210302_0254.py b/breathecode/admissions/migrations/0015_auto_20210302_0254.py index 004d7b31d..8c6a6bd45 100644 --- a/breathecode/admissions/migrations/0015_auto_20210302_0254.py +++ b/breathecode/admissions/migrations/0015_auto_20210302_0254.py @@ -7,39 +7,41 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0014_auto_20201218_0534'), + ("admissions", "0014_auto_20201218_0534"), ] operations = [ migrations.RemoveField( - model_name='cohort', - name='certificate', + model_name="cohort", + name="certificate", ), migrations.CreateModel( - name='Syllabus', + name="Syllabus", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('version', models.PositiveSmallIntegerField()), - ('json', models.JSONField()), - ('github_url', models.URLField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('private', models.BooleanField(default=False)), - ('academy_owner', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('certificate', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.certificate')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("version", models.PositiveSmallIntegerField()), + ("json", models.JSONField()), + ("github_url", models.URLField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("private", models.BooleanField(default=False)), + ( + "academy_owner", + models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + ( + "certificate", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.certificate"), + ), ], ), migrations.AddField( - model_name='cohort', - name='syllabus', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.syllabus'), + model_name="cohort", + name="syllabus", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.syllabus" + ), ), ] diff --git a/breathecode/admissions/migrations/0016_cohort_private.py b/breathecode/admissions/migrations/0016_cohort_private.py index 397e19ab8..275a40146 100644 --- a/breathecode/admissions/migrations/0016_cohort_private.py +++ b/breathecode/admissions/migrations/0016_cohort_private.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0015_auto_20210302_0254'), + ("admissions", "0015_auto_20210302_0254"), ] operations = [ migrations.AddField( - model_name='cohort', - name='private', + model_name="cohort", + name="private", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/admissions/migrations/0017_cohort_never_ends.py b/breathecode/admissions/migrations/0017_cohort_never_ends.py index 5e17aed2e..48d4dd6e6 100644 --- a/breathecode/admissions/migrations/0017_cohort_never_ends.py +++ b/breathecode/admissions/migrations/0017_cohort_never_ends.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0016_cohort_private'), + ("admissions", "0016_cohort_private"), ] operations = [ migrations.AddField( - model_name='cohort', - name='never_ends', + model_name="cohort", + name="never_ends", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/admissions/migrations/0018_alter_cohortuser_role.py b/breathecode/admissions/migrations/0018_alter_cohortuser_role.py index 50bd619c8..216d76463 100644 --- a/breathecode/admissions/migrations/0018_alter_cohortuser_role.py +++ b/breathecode/admissions/migrations/0018_alter_cohortuser_role.py @@ -6,16 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0017_cohort_never_ends'), + ("admissions", "0017_cohort_never_ends"), ] operations = [ migrations.AlterField( - model_name='cohortuser', - name='role', - field=models.CharField(choices=[('TEACHER', 'Teacher'), ('ASSISTANT', 'Assistant'), - ('REVIEWER', 'Reviewer'), ('STUDENT', 'Student')], - default='STUDENT', - max_length=9), + model_name="cohortuser", + name="role", + field=models.CharField( + choices=[ + ("TEACHER", "Teacher"), + ("ASSISTANT", "Assistant"), + ("REVIEWER", "Reviewer"), + ("STUDENT", "Student"), + ], + default="STUDENT", + max_length=9, + ), ), ] diff --git a/breathecode/admissions/migrations/0019_certificatetimeslot_cohorttimeslot.py b/breathecode/admissions/migrations/0019_certificatetimeslot_cohorttimeslot.py index 534ef322d..0b03af879 100644 --- a/breathecode/admissions/migrations/0019_certificatetimeslot_cohorttimeslot.py +++ b/breathecode/admissions/migrations/0019_certificatetimeslot_cohorttimeslot.py @@ -7,48 +7,58 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0018_alter_cohortuser_role'), + ("admissions", "0018_alter_cohortuser_role"), ] operations = [ migrations.CreateModel( - name='CohortTimeSlot', + name="CohortTimeSlot", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('starting_at', models.DateTimeField()), - ('ending_at', models.DateTimeField()), - ('recurrent', models.BooleanField(default=True)), - ('recurrency_type', - models.CharField(choices=[('DAILY', 'Daily'), ('WEEKLY', 'Weekly'), ('MONTHLY', 'Monthly')], - default='WEEKLY', - max_length=10)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('cohort', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.cohort')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("starting_at", models.DateTimeField()), + ("ending_at", models.DateTimeField()), + ("recurrent", models.BooleanField(default=True)), + ( + "recurrency_type", + models.CharField( + choices=[("DAILY", "Daily"), ("WEEKLY", "Weekly"), ("MONTHLY", "Monthly")], + default="WEEKLY", + max_length=10, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("cohort", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort")), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='CertificateTimeSlot', + name="CertificateTimeSlot", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('starting_at', models.DateTimeField()), - ('ending_at', models.DateTimeField()), - ('recurrent', models.BooleanField(default=True)), - ('recurrency_type', - models.CharField(choices=[('DAILY', 'Daily'), ('WEEKLY', 'Weekly'), ('MONTHLY', 'Monthly')], - default='WEEKLY', - max_length=10)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('certificate', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.certificate')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("starting_at", models.DateTimeField()), + ("ending_at", models.DateTimeField()), + ("recurrent", models.BooleanField(default=True)), + ( + "recurrency_type", + models.CharField( + choices=[("DAILY", "Daily"), ("WEEKLY", "Weekly"), ("MONTHLY", "Monthly")], + default="WEEKLY", + max_length=10, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "certificate", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.certificate"), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), ] diff --git a/breathecode/admissions/migrations/0020_auto_20210727_1106.py b/breathecode/admissions/migrations/0020_auto_20210727_1106.py index 281e99201..e649091ea 100644 --- a/breathecode/admissions/migrations/0020_auto_20210727_1106.py +++ b/breathecode/admissions/migrations/0020_auto_20210727_1106.py @@ -6,104 +6,114 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0019_certificatetimeslot_cohorttimeslot'), + ("admissions", "0019_certificatetimeslot_cohorttimeslot"), ] operations = [ migrations.CreateModel( - name='AcademySpecialtyMode', + name="AcademySpecialtyMode", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='SpecialtyMode', + name="SpecialtyMode", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=100)), - ('name', models.CharField(max_length=150)), - ('schedule_type', - models.CharField(choices=[('PART-TIME', 'Part-Time'), ('FULL-TIME', 'Full-Time')], - default='PART-TIME', - max_length=15)), - ('description', models.TextField(max_length=450)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=100)), + ("name", models.CharField(max_length=150)), + ( + "schedule_type", + models.CharField( + choices=[("PART-TIME", "Part-Time"), ("FULL-TIME", "Full-Time")], + default="PART-TIME", + max_length=15, + ), + ), + ("description", models.TextField(max_length=450)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='SpecialtyModeTimeSlot', + name="SpecialtyModeTimeSlot", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('starting_at', models.DateTimeField()), - ('ending_at', models.DateTimeField()), - ('recurrent', models.BooleanField(default=True)), - ('recurrency_type', - models.CharField(choices=[('DAILY', 'Daily'), ('WEEKLY', 'Weekly'), ('MONTHLY', 'Monthly')], - default='WEEKLY', - max_length=10)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("starting_at", models.DateTimeField()), + ("ending_at", models.DateTimeField()), + ("recurrent", models.BooleanField(default=True)), + ( + "recurrency_type", + models.CharField( + choices=[("DAILY", "Daily"), ("WEEKLY", "Weekly"), ("MONTHLY", "Monthly")], + default="WEEKLY", + max_length=10, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='SyllabusVersion', + name="SyllabusVersion", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('json', models.JSONField()), - ('version', models.PositiveSmallIntegerField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("json", models.JSONField()), + ("version", models.PositiveSmallIntegerField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.RemoveField( - model_name='certificatetimeslot', - name='academy', + model_name="certificatetimeslot", + name="academy", ), migrations.RemoveField( - model_name='certificatetimeslot', - name='certificate', + model_name="certificatetimeslot", + name="certificate", ), migrations.RemoveField( - model_name='cohort', - name='syllabus', + model_name="cohort", + name="syllabus", ), migrations.RemoveField( - model_name='syllabus', - name='certificate', + model_name="syllabus", + name="certificate", ), migrations.RemoveField( - model_name='syllabus', - name='json', + model_name="syllabus", + name="json", ), migrations.RemoveField( - model_name='syllabus', - name='version', + model_name="syllabus", + name="version", ), migrations.AddField( - model_name='syllabus', - name='duration_in_days', + model_name="syllabus", + name="duration_in_days", field=models.IntegerField(default=None, null=True), ), migrations.AddField( - model_name='syllabus', - name='duration_in_hours', + model_name="syllabus", + name="duration_in_hours", field=models.IntegerField(default=None, null=True), ), migrations.AddField( - model_name='syllabus', - name='logo', + model_name="syllabus", + name="logo", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), migrations.AddField( - model_name='syllabus', - name='week_hours', + model_name="syllabus", + name="week_hours", field=models.IntegerField(default=None, null=True), ), - migrations.DeleteModel(name='AcademyCertificate', ), + migrations.DeleteModel( + name="AcademyCertificate", + ), ] diff --git a/breathecode/admissions/migrations/0021_auto_20210727_1106.py b/breathecode/admissions/migrations/0021_auto_20210727_1106.py index 648b8cdea..b43e3a615 100644 --- a/breathecode/admissions/migrations/0021_auto_20210727_1106.py +++ b/breathecode/admissions/migrations/0021_auto_20210727_1106.py @@ -7,52 +7,54 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0020_auto_20210727_1106'), - ('certificate', '0012_auto_20210727_1106'), + ("admissions", "0020_auto_20210727_1106"), + ("certificate", "0012_auto_20210727_1106"), ] operations = [ - migrations.DeleteModel(name='Certificate', ), - migrations.DeleteModel(name='CertificateTimeSlot', ), + migrations.DeleteModel( + name="Certificate", + ), + migrations.DeleteModel( + name="CertificateTimeSlot", + ), migrations.AddField( - model_name='syllabusversion', - name='syllabus', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.syllabus'), + model_name="syllabusversion", + name="syllabus", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.syllabus"), ), migrations.AddField( - model_name='specialtymodetimeslot', - name='academy', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy'), + model_name="specialtymodetimeslot", + name="academy", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy"), ), migrations.AddField( - model_name='specialtymodetimeslot', - name='specialty_mode', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.specialtymode'), + model_name="specialtymodetimeslot", + name="specialty_mode", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.specialtymode"), ), migrations.AddField( - model_name='academyspecialtymode', - name='academy', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy'), + model_name="academyspecialtymode", + name="academy", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy"), ), migrations.AddField( - model_name='academyspecialtymode', - name='specialty_mode', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.specialtymode'), + model_name="academyspecialtymode", + name="specialty_mode", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.specialtymode"), ), migrations.AddField( - model_name='cohort', - name='specialty_mode', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.specialtymode'), + model_name="cohort", + name="specialty_mode", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.specialtymode" + ), ), migrations.AddField( - model_name='cohort', - name='syllabus_version', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.syllabusversion'), + model_name="cohort", + name="syllabus_version", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.syllabusversion" + ), ), ] diff --git a/breathecode/admissions/migrations/0022_specialtymode_syllabus.py b/breathecode/admissions/migrations/0022_specialtymode_syllabus.py index e8d9f7538..67239d4c6 100644 --- a/breathecode/admissions/migrations/0022_specialtymode_syllabus.py +++ b/breathecode/admissions/migrations/0022_specialtymode_syllabus.py @@ -7,16 +7,15 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0021_auto_20210727_1106'), + ("admissions", "0021_auto_20210727_1106"), ] operations = [ migrations.AddField( - model_name='specialtymode', - name='syllabus', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.syllabus'), + model_name="specialtymode", + name="syllabus", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.syllabus" + ), ), ] diff --git a/breathecode/admissions/migrations/0023_auto_20210812_2153.py b/breathecode/admissions/migrations/0023_auto_20210812_2153.py index 0594e5a9f..a41e6c45c 100644 --- a/breathecode/admissions/migrations/0023_auto_20210812_2153.py +++ b/breathecode/admissions/migrations/0023_auto_20210812_2153.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0022_specialtymode_syllabus'), + ("admissions", "0022_specialtymode_syllabus"), ] operations = [ migrations.AddField( - model_name='syllabus', - name='name', + model_name="syllabus", + name="name", field=models.CharField(blank=True, default=None, max_length=150, null=True), ), migrations.AddField( - model_name='syllabus', - name='slug', + model_name="syllabus", + name="slug", field=models.SlugField(blank=True, default=None, max_length=100, null=True), ), ] diff --git a/breathecode/admissions/migrations/0024_academy_feedback_email.py b/breathecode/admissions/migrations/0024_academy_feedback_email.py index a4677673a..384c552e7 100644 --- a/breathecode/admissions/migrations/0024_academy_feedback_email.py +++ b/breathecode/admissions/migrations/0024_academy_feedback_email.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0023_auto_20210812_2153'), + ("admissions", "0023_auto_20210812_2153"), ] operations = [ migrations.AddField( - model_name='academy', - name='feedback_email', + model_name="academy", + name="feedback_email", field=models.EmailField(blank=True, default=None, max_length=254, null=True), ), ] diff --git a/breathecode/admissions/migrations/0024_auto_20211016_0015.py b/breathecode/admissions/migrations/0024_auto_20211016_0015.py index 7c0c18103..5d1c89287 100644 --- a/breathecode/admissions/migrations/0024_auto_20211016_0015.py +++ b/breathecode/admissions/migrations/0024_auto_20211016_0015.py @@ -6,19 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0023_auto_20210812_2153'), + ("admissions", "0023_auto_20210812_2153"), ] operations = [ migrations.AddField( - model_name='cohort', - name='online_meeting_url', + model_name="cohort", + name="online_meeting_url", field=models.URLField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( - model_name='cohort', - name='remote_available', + model_name="cohort", + name="remote_available", field=models.BooleanField( - default=True, help_text='True (default) if the students from other cities can take it from home'), + default=True, help_text="True (default) if the students from other cities can take it from home" + ), ), ] diff --git a/breathecode/admissions/migrations/0025_merge_20211018_2259.py b/breathecode/admissions/migrations/0025_merge_20211018_2259.py index eacc6d075..fc009fa92 100644 --- a/breathecode/admissions/migrations/0025_merge_20211018_2259.py +++ b/breathecode/admissions/migrations/0025_merge_20211018_2259.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0024_academy_feedback_email'), - ('admissions', '0024_auto_20211016_0015'), + ("admissions", "0024_academy_feedback_email"), + ("admissions", "0024_auto_20211016_0015"), ] operations = [] diff --git a/breathecode/admissions/migrations/0026_remove_specialtymode_slug.py b/breathecode/admissions/migrations/0026_remove_specialtymode_slug.py index c1f868305..6c1139385 100644 --- a/breathecode/admissions/migrations/0026_remove_specialtymode_slug.py +++ b/breathecode/admissions/migrations/0026_remove_specialtymode_slug.py @@ -6,12 +6,12 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0025_merge_20211018_2259'), + ("admissions", "0025_merge_20211018_2259"), ] operations = [ migrations.RemoveField( - model_name='specialtymode', - name='slug', + model_name="specialtymode", + name="slug", ), ] diff --git a/breathecode/admissions/migrations/0027_auto_20211205_2249.py b/breathecode/admissions/migrations/0027_auto_20211205_2249.py index ee432d791..1fa9b04c2 100644 --- a/breathecode/admissions/migrations/0027_auto_20211205_2249.py +++ b/breathecode/admissions/migrations/0027_auto_20211205_2249.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0026_remove_specialtymode_slug'), + ("admissions", "0026_remove_specialtymode_slug"), ] operations = [ migrations.AddField( - model_name='cohorttimeslot', - name='timezone', - field=models.CharField(default='America/New_York', max_length=50), + model_name="cohorttimeslot", + name="timezone", + field=models.CharField(default="America/New_York", max_length=50), ), migrations.AddField( - model_name='specialtymodetimeslot', - name='timezone', - field=models.CharField(default='America/New_York', max_length=50), + model_name="specialtymodetimeslot", + name="timezone", + field=models.CharField(default="America/New_York", max_length=50), ), ] diff --git a/breathecode/admissions/migrations/0028_auto_20211217_0134.py b/breathecode/admissions/migrations/0028_auto_20211217_0134.py index d03e807ed..5a858a9a2 100644 --- a/breathecode/admissions/migrations/0028_auto_20211217_0134.py +++ b/breathecode/admissions/migrations/0028_auto_20211217_0134.py @@ -6,24 +6,24 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0027_auto_20211205_2249'), + ("admissions", "0027_auto_20211205_2249"), ] operations = [ migrations.RemoveField( - model_name='cohorttimeslot', - name='ending_at', + model_name="cohorttimeslot", + name="ending_at", ), migrations.RemoveField( - model_name='cohorttimeslot', - name='starting_at', + model_name="cohorttimeslot", + name="starting_at", ), migrations.RemoveField( - model_name='specialtymodetimeslot', - name='ending_at', + model_name="specialtymodetimeslot", + name="ending_at", ), migrations.RemoveField( - model_name='specialtymodetimeslot', - name='starting_at', + model_name="specialtymodetimeslot", + name="starting_at", ), ] diff --git a/breathecode/admissions/migrations/0029_auto_20211217_0248.py b/breathecode/admissions/migrations/0029_auto_20211217_0248.py index 543ad86e9..dd87bd7b2 100644 --- a/breathecode/admissions/migrations/0029_auto_20211217_0248.py +++ b/breathecode/admissions/migrations/0029_auto_20211217_0248.py @@ -7,56 +7,56 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0028_auto_20211217_0134'), + ("admissions", "0028_auto_20211217_0134"), ] operations = [ migrations.AddField( - model_name='cohorttimeslot', - name='ending_at', + model_name="cohorttimeslot", + name="ending_at", field=models.BigIntegerField( default=202101010000, - help_text= - 'The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second', + help_text="The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second", validators=[ django.core.validators.MaxValueValidator(300000000000), - django.core.validators.MinValueValidator(202101010000) - ]), + django.core.validators.MinValueValidator(202101010000), + ], + ), ), migrations.AddField( - model_name='cohorttimeslot', - name='starting_at', + model_name="cohorttimeslot", + name="starting_at", field=models.BigIntegerField( default=202101010000, - help_text= - 'The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second', + help_text="The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second", validators=[ django.core.validators.MaxValueValidator(300000000000), - django.core.validators.MinValueValidator(202101010000) - ]), + django.core.validators.MinValueValidator(202101010000), + ], + ), ), migrations.AddField( - model_name='specialtymodetimeslot', - name='ending_at', + model_name="specialtymodetimeslot", + name="ending_at", field=models.BigIntegerField( default=202101010000, - help_text= - 'The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second', + help_text="The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second", validators=[ django.core.validators.MaxValueValidator(300000000000), - django.core.validators.MinValueValidator(202101010000) - ]), + django.core.validators.MinValueValidator(202101010000), + ], + ), ), migrations.AddField( - model_name='specialtymodetimeslot', - name='starting_at', + model_name="specialtymodetimeslot", + name="starting_at", field=models.BigIntegerField( default=202101010000, - help_text= - 'The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second', + help_text="The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second", validators=[ django.core.validators.MaxValueValidator(300000000000), - django.core.validators.MinValueValidator(202101010000) - ]), + django.core.validators.MinValueValidator(202101010000), + ], + ), ), ] diff --git a/breathecode/admissions/migrations/0030_auto_20220223_1953.py b/breathecode/admissions/migrations/0030_auto_20220223_1953.py index ff68f3200..c5783f1d3 100644 --- a/breathecode/admissions/migrations/0030_auto_20220223_1953.py +++ b/breathecode/admissions/migrations/0030_auto_20220223_1953.py @@ -6,23 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0029_auto_20211217_0248'), + ("admissions", "0029_auto_20211217_0248"), ] operations = [ migrations.AddField( - model_name='cohort', - name='current_module', + model_name="cohort", + name="current_module", field=models.IntegerField( blank=True, default=None, - help_text= - 'The syllabus is separated by modules, from 1 to N and the teacher decides when to start a new mobule (after a couple of days)', - null=True), + help_text="The syllabus is separated by modules, from 1 to N and the teacher decides when to start a new mobule (after a couple of days)", + null=True, + ), ), migrations.AlterField( - model_name='cohort', - name='current_day', - field=models.IntegerField(help_text='Each day the teacher takes attendancy and increases the day in one'), + model_name="cohort", + name="current_day", + field=models.IntegerField(help_text="Each day the teacher takes attendancy and increases the day in one"), ), ] diff --git a/breathecode/admissions/migrations/0031_academy_icon_url.py b/breathecode/admissions/migrations/0031_academy_icon_url.py index 88d1d0196..c0f690c2d 100644 --- a/breathecode/admissions/migrations/0031_academy_icon_url.py +++ b/breathecode/admissions/migrations/0031_academy_icon_url.py @@ -6,15 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0030_auto_20220223_1953'), + ("admissions", "0030_auto_20220223_1953"), ] operations = [ migrations.AddField( - model_name='academy', - name='icon_url', - field=models.CharField(default='/static/icons/picture.png', - help_text='It has to be a square', - max_length=255), + model_name="academy", + name="icon_url", + field=models.CharField( + default="/static/icons/picture.png", help_text="It has to be a square", max_length=255 + ), ), ] diff --git a/breathecode/admissions/migrations/0032_auto_20220304_1747.py b/breathecode/admissions/migrations/0032_auto_20220304_1747.py index 8c4823ff3..9862c98c0 100644 --- a/breathecode/admissions/migrations/0032_auto_20220304_1747.py +++ b/breathecode/admissions/migrations/0032_auto_20220304_1747.py @@ -7,21 +7,22 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0031_academy_icon_url'), + ("admissions", "0031_academy_icon_url"), ] operations = [ migrations.RemoveField( - model_name='specialtymodetimeslot', - name='academy', + model_name="specialtymodetimeslot", + name="academy", ), migrations.AddField( - model_name='specialtymode', - name='academy', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="specialtymode", + name="academy", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + migrations.DeleteModel( + name="AcademySpecialtyMode", ), - migrations.DeleteModel(name='AcademySpecialtyMode', ), ] diff --git a/breathecode/admissions/migrations/0033_rename_specialtymode_syllabusschedule.py b/breathecode/admissions/migrations/0033_rename_specialtymode_syllabusschedule.py index 1d7e0a82f..97f738d79 100644 --- a/breathecode/admissions/migrations/0033_rename_specialtymode_syllabusschedule.py +++ b/breathecode/admissions/migrations/0033_rename_specialtymode_syllabusschedule.py @@ -6,12 +6,12 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0032_auto_20220304_1747'), + ("admissions", "0032_auto_20220304_1747"), ] operations = [ migrations.RenameModel( - old_name='SpecialtyMode', - new_name='SyllabusSchedule', + old_name="SpecialtyMode", + new_name="SyllabusSchedule", ), ] diff --git a/breathecode/admissions/migrations/0034_rename_specialtymodetimeslot_syllabusscheduletimeslot.py b/breathecode/admissions/migrations/0034_rename_specialtymodetimeslot_syllabusscheduletimeslot.py index ee326a801..144fd4f28 100644 --- a/breathecode/admissions/migrations/0034_rename_specialtymodetimeslot_syllabusscheduletimeslot.py +++ b/breathecode/admissions/migrations/0034_rename_specialtymodetimeslot_syllabusscheduletimeslot.py @@ -6,12 +6,12 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0033_rename_specialtymode_syllabusschedule'), + ("admissions", "0033_rename_specialtymode_syllabusschedule"), ] operations = [ migrations.RenameModel( - old_name='SpecialtyModeTimeSlot', - new_name='SyllabusScheduleTimeSlot', + old_name="SpecialtyModeTimeSlot", + new_name="SyllabusScheduleTimeSlot", ), ] diff --git a/breathecode/admissions/migrations/0035_rename_specialty_mode_syllabusscheduletimeslot_schedule.py b/breathecode/admissions/migrations/0035_rename_specialty_mode_syllabusscheduletimeslot_schedule.py index 17de3d400..dee612e7c 100644 --- a/breathecode/admissions/migrations/0035_rename_specialty_mode_syllabusscheduletimeslot_schedule.py +++ b/breathecode/admissions/migrations/0035_rename_specialty_mode_syllabusscheduletimeslot_schedule.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0034_rename_specialtymodetimeslot_syllabusscheduletimeslot'), + ("admissions", "0034_rename_specialtymodetimeslot_syllabusscheduletimeslot"), ] operations = [ migrations.RenameField( - model_name='syllabusscheduletimeslot', - old_name='specialty_mode', - new_name='schedule', + model_name="syllabusscheduletimeslot", + old_name="specialty_mode", + new_name="schedule", ), ] diff --git a/breathecode/admissions/migrations/0036_rename_specialty_mode_cohort_schedule.py b/breathecode/admissions/migrations/0036_rename_specialty_mode_cohort_schedule.py index 2f27e20a9..1c9365456 100644 --- a/breathecode/admissions/migrations/0036_rename_specialty_mode_cohort_schedule.py +++ b/breathecode/admissions/migrations/0036_rename_specialty_mode_cohort_schedule.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0035_rename_specialty_mode_syllabusscheduletimeslot_schedule'), + ("admissions", "0035_rename_specialty_mode_syllabusscheduletimeslot_schedule"), ] operations = [ migrations.RenameField( - model_name='cohort', - old_name='specialty_mode', - new_name='schedule', + model_name="cohort", + old_name="specialty_mode", + new_name="schedule", ), ] diff --git a/breathecode/admissions/migrations/0037_alter_cohort_schedule.py b/breathecode/admissions/migrations/0037_alter_cohort_schedule.py index 2a57d58e3..c58a8e948 100644 --- a/breathecode/admissions/migrations/0037_alter_cohort_schedule.py +++ b/breathecode/admissions/migrations/0037_alter_cohort_schedule.py @@ -7,16 +7,15 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0036_rename_specialty_mode_cohort_schedule'), + ("admissions", "0036_rename_specialty_mode_cohort_schedule"), ] operations = [ migrations.AlterField( - model_name='cohort', - name='schedule', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.syllabusschedule'), + model_name="cohort", + name="schedule", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to="admissions.syllabusschedule" + ), ), ] diff --git a/breathecode/admissions/migrations/0038_alter_cohort_syllabus_version.py b/breathecode/admissions/migrations/0038_alter_cohort_syllabus_version.py index 613f67a08..4160b84a1 100644 --- a/breathecode/admissions/migrations/0038_alter_cohort_syllabus_version.py +++ b/breathecode/admissions/migrations/0038_alter_cohort_syllabus_version.py @@ -7,16 +7,15 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0037_alter_cohort_schedule'), + ("admissions", "0037_alter_cohort_schedule"), ] operations = [ migrations.AlterField( - model_name='cohort', - name='syllabus_version', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.syllabusversion'), + model_name="cohort", + name="syllabus_version", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to="admissions.syllabusversion" + ), ), ] diff --git a/breathecode/admissions/migrations/0039_syllabus_main_technologies.py b/breathecode/admissions/migrations/0039_syllabus_main_technologies.py index 907cc8601..7132ece52 100644 --- a/breathecode/admissions/migrations/0039_syllabus_main_technologies.py +++ b/breathecode/admissions/migrations/0039_syllabus_main_technologies.py @@ -6,17 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0038_alter_cohort_syllabus_version'), + ("admissions", "0038_alter_cohort_syllabus_version"), ] operations = [ migrations.AddField( - model_name='syllabus', - name='main_technologies', - field=models.CharField(blank=True, - default=None, - help_text='Coma separated, E.g: HTML, CSS, Javascript', - max_length=150, - null=True), + model_name="syllabus", + name="main_technologies", + field=models.CharField( + blank=True, + default=None, + help_text="Coma separated, E.g: HTML, CSS, Javascript", + max_length=150, + null=True, + ), ), ] diff --git a/breathecode/admissions/migrations/0040_auto_20220510_2208.py b/breathecode/admissions/migrations/0040_auto_20220510_2208.py index d5849a77d..18d8a1a10 100644 --- a/breathecode/admissions/migrations/0040_auto_20220510_2208.py +++ b/breathecode/admissions/migrations/0040_auto_20220510_2208.py @@ -6,20 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0039_syllabus_main_technologies'), + ("admissions", "0039_syllabus_main_technologies"), ] operations = [ migrations.AddField( - model_name='syllabusversion', - name='change_log_details', + model_name="syllabusversion", + name="change_log_details", field=models.TextField(blank=True, default=None, max_length=450, null=True), ), migrations.AddField( - model_name='syllabusversion', - name='status', - field=models.CharField(choices=[('PUBLISHED', 'Published'), ('DRAFT', 'Draft')], - default='PUBLISHED', - max_length=15), + model_name="syllabusversion", + name="status", + field=models.CharField( + choices=[("PUBLISHED", "Published"), ("DRAFT", "Draft")], default="PUBLISHED", max_length=15 + ), ), ] diff --git a/breathecode/admissions/migrations/0041_cohortuser_watching.py b/breathecode/admissions/migrations/0041_cohortuser_watching.py index 8de0cc50e..3d67c9ca5 100644 --- a/breathecode/admissions/migrations/0041_cohortuser_watching.py +++ b/breathecode/admissions/migrations/0041_cohortuser_watching.py @@ -6,14 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0040_auto_20220510_2208'), + ("admissions", "0040_auto_20220510_2208"), ] operations = [ migrations.AddField( - model_name='cohortuser', - name='watching', - field=models.BooleanField(default=False, - help_text='You can active students to the watch list and monitor them closely'), + model_name="cohortuser", + name="watching", + field=models.BooleanField( + default=False, help_text="You can active students to the watch list and monitor them closely" + ), ), ] diff --git a/breathecode/admissions/migrations/0042_academy_white_labeled.py b/breathecode/admissions/migrations/0042_academy_white_labeled.py index fac2eb020..290bc6246 100644 --- a/breathecode/admissions/migrations/0042_academy_white_labeled.py +++ b/breathecode/admissions/migrations/0042_academy_white_labeled.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0041_cohortuser_watching'), + ("admissions", "0041_cohortuser_watching"), ] operations = [ migrations.AddField( - model_name='academy', - name='white_labeled', + model_name="academy", + name="white_labeled", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/admissions/migrations/0043_auto_20220830_1505.py b/breathecode/admissions/migrations/0043_auto_20220830_1505.py index 9a5495100..57cbbc2c6 100644 --- a/breathecode/admissions/migrations/0043_auto_20220830_1505.py +++ b/breathecode/admissions/migrations/0043_auto_20220830_1505.py @@ -6,23 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0042_academy_white_labeled'), + ("admissions", "0042_academy_white_labeled"), ] operations = [ migrations.AddField( - model_name='cohort', - name='history_log', + model_name="cohort", + name="history_log", field=models.JSONField( blank=True, default=None, - help_text='The cohort history will save attendancy and information about progress on each class', - null=True), + help_text="The cohort history will save attendancy and information about progress on each class", + null=True, + ), ), migrations.AlterField( - model_name='cohort', - name='current_day', - field=models.IntegerField(default=1, - help_text='Each day the teacher takes attendancy and increases the day in one'), + model_name="cohort", + name="current_day", + field=models.IntegerField( + default=1, help_text="Each day the teacher takes attendancy and increases the day in one" + ), ), ] diff --git a/breathecode/admissions/migrations/0044_auto_20220912_1928.py b/breathecode/admissions/migrations/0044_auto_20220912_1928.py index 38540cb38..bd8bd201c 100644 --- a/breathecode/admissions/migrations/0044_auto_20220912_1928.py +++ b/breathecode/admissions/migrations/0044_auto_20220912_1928.py @@ -6,25 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0043_auto_20220830_1505'), + ("admissions", "0043_auto_20220830_1505"), ] operations = [ migrations.AddField( - model_name='syllabusversion', - name='integrity_check_at', + model_name="syllabusversion", + name="integrity_check_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='syllabusversion', - name='integrity_report', + model_name="syllabusversion", + name="integrity_report", field=models.JSONField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='syllabusversion', - name='integrity_status', - field=models.CharField(choices=[('ERROR', 'Error'), ('PENDING', 'Pending'), ('OK', 'Ok')], - default='PENDING', - max_length=15), + model_name="syllabusversion", + name="integrity_status", + field=models.CharField( + choices=[("ERROR", "Error"), ("PENDING", "Pending"), ("OK", "Ok")], default="PENDING", max_length=15 + ), ), ] diff --git a/breathecode/admissions/migrations/0045_alter_syllabusversion_integrity_status.py b/breathecode/admissions/migrations/0045_alter_syllabusversion_integrity_status.py index 47a772332..9cf8c705b 100644 --- a/breathecode/admissions/migrations/0045_alter_syllabusversion_integrity_status.py +++ b/breathecode/admissions/migrations/0045_alter_syllabusversion_integrity_status.py @@ -6,16 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0044_auto_20220912_1928'), + ("admissions", "0044_auto_20220912_1928"), ] operations = [ migrations.AlterField( - model_name='syllabusversion', - name='integrity_status', - field=models.CharField(choices=[('ERROR', 'Error'), ('PENDING', 'Pending'), ('WARNING', 'Warning'), - ('OK', 'Ok')], - default='PENDING', - max_length=15), + model_name="syllabusversion", + name="integrity_status", + field=models.CharField( + choices=[("ERROR", "Error"), ("PENDING", "Pending"), ("WARNING", "Warning"), ("OK", "Ok")], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/admissions/migrations/0046_academy_available_as_saas.py b/breathecode/admissions/migrations/0046_academy_available_as_saas.py index a201f8584..7f1113d96 100644 --- a/breathecode/admissions/migrations/0046_academy_available_as_saas.py +++ b/breathecode/admissions/migrations/0046_academy_available_as_saas.py @@ -6,14 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0045_alter_syllabusversion_integrity_status'), + ("admissions", "0045_alter_syllabusversion_integrity_status"), ] operations = [ migrations.AddField( - model_name='academy', - name='available_as_saas', - field=models.BooleanField(default=False, - help_text='Academies available as SAAS will be sold thru 4Geeks.com'), + model_name="academy", + name="available_as_saas", + field=models.BooleanField( + default=False, help_text="Academies available as SAAS will be sold thru 4Geeks.com" + ), ), ] diff --git a/breathecode/admissions/migrations/0046_auto_20220920_1117.py b/breathecode/admissions/migrations/0046_auto_20220920_1117.py index 0e58d9dbd..80d1c53d5 100644 --- a/breathecode/admissions/migrations/0046_auto_20220920_1117.py +++ b/breathecode/admissions/migrations/0046_auto_20220920_1117.py @@ -7,62 +7,73 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0045_alter_syllabusversion_integrity_status'), + ("admissions", "0045_alter_syllabusversion_integrity_status"), ] operations = [ migrations.AlterField( - model_name='academy', - name='active_campaign_slug', + model_name="academy", + name="active_campaign_slug", field=models.SlugField(blank=True, default=None, max_length=100, null=True), ), migrations.AlterField( - model_name='academy', - name='timezone', + model_name="academy", + name="timezone", field=models.CharField(blank=True, default=None, max_length=50, null=True), ), migrations.AlterField( - model_name='cohort', - name='schedule', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.syllabusschedule'), + model_name="cohort", + name="schedule", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.syllabusschedule", + ), ), migrations.AlterField( - model_name='cohort', - name='syllabus_version', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.syllabusversion'), + model_name="cohort", + name="syllabus_version", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.syllabusversion", + ), ), migrations.AlterField( - model_name='cohort', - name='timezone', + model_name="cohort", + name="timezone", field=models.CharField(blank=True, default=None, max_length=50, null=True), ), migrations.AlterField( - model_name='cohortuser', - name='educational_status', - field=models.CharField(blank=True, - choices=[('ACTIVE', 'Active'), ('POSTPONED', 'Postponed'), - ('GRADUATED', 'Graduated'), ('SUSPENDED', 'Suspended'), - ('DROPPED', 'Dropped')], - default=None, - max_length=15, - null=True), + model_name="cohortuser", + name="educational_status", + field=models.CharField( + blank=True, + choices=[ + ("ACTIVE", "Active"), + ("POSTPONED", "Postponed"), + ("GRADUATED", "Graduated"), + ("SUSPENDED", "Suspended"), + ("DROPPED", "Dropped"), + ], + default=None, + max_length=15, + null=True, + ), ), migrations.AlterField( - model_name='cohortuser', - name='finantial_status', - field=models.CharField(blank=True, - choices=[('FULLY_PAID', 'Fully Paid'), ('UP_TO_DATE', 'Up to date'), - ('LATE', 'Late')], - default=None, - max_length=15, - null=True), + model_name="cohortuser", + name="finantial_status", + field=models.CharField( + blank=True, + choices=[("FULLY_PAID", "Fully Paid"), ("UP_TO_DATE", "Up to date"), ("LATE", "Late")], + default=None, + max_length=15, + null=True, + ), ), ] diff --git a/breathecode/admissions/migrations/0047_merge_20220924_0611.py b/breathecode/admissions/migrations/0047_merge_20220924_0611.py index 8d6e0edd9..36f8bfc0a 100644 --- a/breathecode/admissions/migrations/0047_merge_20220924_0611.py +++ b/breathecode/admissions/migrations/0047_merge_20220924_0611.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0046_academy_available_as_saas'), - ('admissions', '0046_auto_20220920_1117'), + ("admissions", "0046_academy_available_as_saas"), + ("admissions", "0046_auto_20220920_1117"), ] operations = [] diff --git a/breathecode/admissions/migrations/0048_academy_main_currency.py b/breathecode/admissions/migrations/0048_academy_main_currency.py index cf1bf0669..5b5735c23 100644 --- a/breathecode/admissions/migrations/0048_academy_main_currency.py +++ b/breathecode/admissions/migrations/0048_academy_main_currency.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0001_initial'), - ('admissions', '0047_merge_20220924_0611'), + ("payments", "0001_initial"), + ("admissions", "0047_merge_20220924_0611"), ] operations = [ migrations.AddField( - model_name='academy', - name='main_currency', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name='+', - to='payments.currency'), + model_name="academy", + name="main_currency", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="+", + to="payments.currency", + ), ), ] diff --git a/breathecode/admissions/migrations/0049_auto_20221229_1616.py b/breathecode/admissions/migrations/0049_auto_20221229_1616.py index b0194324c..80e509648 100644 --- a/breathecode/admissions/migrations/0049_auto_20221229_1616.py +++ b/breathecode/admissions/migrations/0049_auto_20221229_1616.py @@ -6,24 +6,26 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), + ("admissions", "0048_academy_main_currency"), ] operations = [ migrations.AddField( - model_name='academy', - name='is_hidden_on_prework', + model_name="academy", + name="is_hidden_on_prework", field=models.BooleanField( default=True, - help_text="Determines if the cohorts will be shown in the dashboard if it's status is 'PREWORK'"), + help_text="Determines if the cohorts will be shown in the dashboard if it's status is 'PREWORK'", + ), ), migrations.AddField( - model_name='cohort', - name='is_hidden_on_prework', + model_name="cohort", + name="is_hidden_on_prework", field=models.BooleanField( blank=True, default=None, help_text="Determines if the cohort will be shown in the dashboard if it's status is 'PREWORK'", - null=True), + null=True, + ), ), ] diff --git a/breathecode/admissions/migrations/0050_alter_cohort_is_hidden_on_prework.py b/breathecode/admissions/migrations/0050_alter_cohort_is_hidden_on_prework.py index b7f1f8083..fcc70f0bf 100644 --- a/breathecode/admissions/migrations/0050_alter_cohort_is_hidden_on_prework.py +++ b/breathecode/admissions/migrations/0050_alter_cohort_is_hidden_on_prework.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0049_auto_20221229_1616'), + ("admissions", "0049_auto_20221229_1616"), ] operations = [ migrations.AlterField( - model_name='cohort', - name='is_hidden_on_prework', + model_name="cohort", + name="is_hidden_on_prework", field=models.BooleanField( blank=True, default=True, help_text="Determines if the cohort will be shown in the dashboard if it's status is 'PREWORK'", - null=True), + null=True, + ), ), ] diff --git a/breathecode/admissions/migrations/0051_auto_20230117_2219.py b/breathecode/admissions/migrations/0051_auto_20230117_2219.py index a6d5417e3..448714cbe 100644 --- a/breathecode/admissions/migrations/0051_auto_20230117_2219.py +++ b/breathecode/admissions/migrations/0051_auto_20230117_2219.py @@ -6,24 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0050_alter_cohort_is_hidden_on_prework'), + ("admissions", "0050_alter_cohort_is_hidden_on_prework"), ] operations = [ migrations.AddField( - model_name='cohorttimeslot', - name='removed_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='This will be available until this date', - null=True), + model_name="cohorttimeslot", + name="removed_at", + field=models.DateTimeField( + blank=True, default=None, help_text="This will be available until this date", null=True + ), ), migrations.AddField( - model_name='syllabusscheduletimeslot', - name='removed_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='This will be available until this date', - null=True), + model_name="syllabusscheduletimeslot", + name="removed_at", + field=models.DateTimeField( + blank=True, default=None, help_text="This will be available until this date", null=True + ), ), ] diff --git a/breathecode/admissions/migrations/0052_alter_cohort_kickoff_date.py b/breathecode/admissions/migrations/0052_alter_cohort_kickoff_date.py index 9cccd8c12..64ba555dc 100644 --- a/breathecode/admissions/migrations/0052_alter_cohort_kickoff_date.py +++ b/breathecode/admissions/migrations/0052_alter_cohort_kickoff_date.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0051_auto_20230117_2219'), + ("admissions", "0051_auto_20230117_2219"), ] operations = [ migrations.AlterField( - model_name='cohort', - name='kickoff_date', + model_name="cohort", + name="kickoff_date", field=models.DateTimeField(blank=True, null=True), ), ] diff --git a/breathecode/admissions/migrations/0053_alter_cohort_kickoff_date.py b/breathecode/admissions/migrations/0053_alter_cohort_kickoff_date.py index 152721750..0c86336af 100644 --- a/breathecode/admissions/migrations/0053_alter_cohort_kickoff_date.py +++ b/breathecode/admissions/migrations/0053_alter_cohort_kickoff_date.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0052_alter_cohort_kickoff_date'), + ("admissions", "0052_alter_cohort_kickoff_date"), ] operations = [ migrations.AlterField( - model_name='cohort', - name='kickoff_date', + model_name="cohort", + name="kickoff_date", field=models.DateTimeField(), ), ] diff --git a/breathecode/admissions/migrations/0054_cohortuser_history_log.py b/breathecode/admissions/migrations/0054_cohortuser_history_log.py index 87f3d5599..c597538c9 100644 --- a/breathecode/admissions/migrations/0054_cohortuser_history_log.py +++ b/breathecode/admissions/migrations/0054_cohortuser_history_log.py @@ -6,16 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0053_alter_cohort_kickoff_date'), + ("admissions", "0053_alter_cohort_kickoff_date"), ] operations = [ migrations.AddField( - model_name='cohortuser', - name='history_log', + model_name="cohortuser", + name="history_log", field=models.JSONField( blank=True, default={}, - help_text='The cohort user log will save attendancy and information about progress on each class'), + help_text="The cohort user log will save attendancy and information about progress on each class", + ), ), ] diff --git a/breathecode/admissions/migrations/0055_cohort_available_as_saas.py b/breathecode/admissions/migrations/0055_cohort_available_as_saas.py index 40b0a6109..3f1d2a77a 100644 --- a/breathecode/admissions/migrations/0055_cohort_available_as_saas.py +++ b/breathecode/admissions/migrations/0055_cohort_available_as_saas.py @@ -6,14 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0054_cohortuser_history_log'), + ("admissions", "0054_cohortuser_history_log"), ] operations = [ migrations.AddField( - model_name='cohort', - name='available_as_saas', - field=models.BooleanField(default=False, - help_text='Cohorts available as SAAS will be sold through plans at 4Geeks.com'), + model_name="cohort", + name="available_as_saas", + field=models.BooleanField( + default=False, help_text="Cohorts available as SAAS will be sold through plans at 4Geeks.com" + ), ), ] diff --git a/breathecode/admissions/migrations/0056_auto_20230317_1657.py b/breathecode/admissions/migrations/0056_auto_20230317_1657.py index 72a997646..80957307a 100644 --- a/breathecode/admissions/migrations/0056_auto_20230317_1657.py +++ b/breathecode/admissions/migrations/0056_auto_20230317_1657.py @@ -6,22 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0055_cohort_available_as_saas'), + ("admissions", "0055_cohort_available_as_saas"), ] operations = [ migrations.AddField( - model_name='cohort', - name='accepts_enrollment_suggestions', - field=models.BooleanField(default=True, - help_text='The system will suggest won leads to be added to this cohort'), + model_name="cohort", + name="accepts_enrollment_suggestions", + field=models.BooleanField( + default=True, help_text="The system will suggest won leads to be added to this cohort" + ), ), migrations.AlterField( - model_name='cohort', - name='private', + model_name="cohort", + name="private", field=models.BooleanField( default=False, - help_text= - 'It will not show on the public API endpoints but you will still be able to add people manually'), + help_text="It will not show on the public API endpoints but you will still be able to add people manually", + ), ), ] diff --git a/breathecode/admissions/migrations/0057_cohort_intro_video.py b/breathecode/admissions/migrations/0057_cohort_intro_video.py index 3e9013652..c515cee5f 100644 --- a/breathecode/admissions/migrations/0057_cohort_intro_video.py +++ b/breathecode/admissions/migrations/0057_cohort_intro_video.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0056_auto_20230317_1657'), + ("admissions", "0056_auto_20230317_1657"), ] operations = [ migrations.AddField( - model_name='cohort', - name='intro_video', + model_name="cohort", + name="intro_video", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/admissions/migrations/0058_alter_cohort_available_as_saas.py b/breathecode/admissions/migrations/0058_alter_cohort_available_as_saas.py index b1349f5c5..1b88638ba 100644 --- a/breathecode/admissions/migrations/0058_alter_cohort_available_as_saas.py +++ b/breathecode/admissions/migrations/0058_alter_cohort_available_as_saas.py @@ -6,16 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0057_cohort_intro_video'), + ("admissions", "0057_cohort_intro_video"), ] operations = [ migrations.AlterField( - model_name='cohort', - name='available_as_saas', - field=models.BooleanField(blank=True, - default=False, - help_text='Cohorts available as SAAS will be sold through plans at 4Geeks.com', - null=True), + model_name="cohort", + name="available_as_saas", + field=models.BooleanField( + blank=True, + default=False, + help_text="Cohorts available as SAAS will be sold through plans at 4Geeks.com", + null=True, + ), ), ] diff --git a/breathecode/admissions/migrations/0059_alter_cohortuser_history_log.py b/breathecode/admissions/migrations/0059_alter_cohortuser_history_log.py index ed2311d99..7b662e2c4 100644 --- a/breathecode/admissions/migrations/0059_alter_cohortuser_history_log.py +++ b/breathecode/admissions/migrations/0059_alter_cohortuser_history_log.py @@ -6,16 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0058_alter_cohort_available_as_saas'), + ("admissions", "0058_alter_cohort_available_as_saas"), ] operations = [ migrations.AlterField( - model_name='cohortuser', - name='history_log', + model_name="cohortuser", + name="history_log", field=models.JSONField( blank=True, default=dict, - help_text='The cohort user log will save attendancy and information about progress on each class'), + help_text="The cohort user log will save attendancy and information about progress on each class", + ), ), ] diff --git a/breathecode/admissions/migrations/0060_alter_cohortuser_educational_status.py b/breathecode/admissions/migrations/0060_alter_cohortuser_educational_status.py index 262774f94..7978e46b3 100644 --- a/breathecode/admissions/migrations/0060_alter_cohortuser_educational_status.py +++ b/breathecode/admissions/migrations/0060_alter_cohortuser_educational_status.py @@ -6,19 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0059_alter_cohortuser_history_log'), + ("admissions", "0059_alter_cohortuser_history_log"), ] operations = [ migrations.AlterField( - model_name='cohortuser', - name='educational_status', - field=models.CharField(blank=True, - choices=[('ACTIVE', 'Active'), ('POSTPONED', 'Postponed'), - ('GRADUATED', 'Graduated'), ('SUSPENDED', 'Suspended'), - ('DROPPED', 'Dropped')], - default='ACTIVE', - max_length=15, - null=True), + model_name="cohortuser", + name="educational_status", + field=models.CharField( + blank=True, + choices=[ + ("ACTIVE", "Active"), + ("POSTPONED", "Postponed"), + ("GRADUATED", "Graduated"), + ("SUSPENDED", "Suspended"), + ("DROPPED", "Dropped"), + ], + default="ACTIVE", + max_length=15, + null=True, + ), ), ] diff --git a/breathecode/admissions/migrations/0061_academy_white_label_url.py b/breathecode/admissions/migrations/0061_academy_white_label_url.py index f057086c0..60fd51f58 100644 --- a/breathecode/admissions/migrations/0061_academy_white_label_url.py +++ b/breathecode/admissions/migrations/0061_academy_white_label_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0060_alter_cohortuser_educational_status'), + ("admissions", "0060_alter_cohortuser_educational_status"), ] operations = [ migrations.AddField( - model_name='academy', - name='white_label_url', + model_name="academy", + name="white_label_url", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), ] diff --git a/breathecode/admissions/migrations/0062_syllabus_is_documentation.py b/breathecode/admissions/migrations/0062_syllabus_is_documentation.py index f02b45e84..36e1edf3e 100644 --- a/breathecode/admissions/migrations/0062_syllabus_is_documentation.py +++ b/breathecode/admissions/migrations/0062_syllabus_is_documentation.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0061_academy_white_label_url'), + ("admissions", "0061_academy_white_label_url"), ] operations = [ migrations.AddField( - model_name='syllabus', - name='is_documentation', + model_name="syllabus", + name="is_documentation", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/admissions/migrations/0063_auto_20231017_0605.py b/breathecode/admissions/migrations/0063_auto_20231017_0605.py index 314c7a24a..187817c2c 100644 --- a/breathecode/admissions/migrations/0063_auto_20231017_0605.py +++ b/breathecode/admissions/migrations/0063_auto_20231017_0605.py @@ -7,364 +7,398 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0062_syllabus_is_documentation'), + ("admissions", "0062_syllabus_is_documentation"), ] operations = [ migrations.AlterField( - model_name='academy', - name='available_as_saas', - field=models.BooleanField(db_index=True, - default=False, - help_text='Academies available as SAAS will be sold thru 4Geeks.com'), + model_name="academy", + name="available_as_saas", + field=models.BooleanField( + db_index=True, default=False, help_text="Academies available as SAAS will be sold thru 4Geeks.com" + ), ), migrations.AlterField( - model_name='academy', - name='is_hidden_on_prework', + model_name="academy", + name="is_hidden_on_prework", field=models.BooleanField( db_index=True, default=True, - help_text="Determines if the cohorts will be shown in the dashboard if it's status is 'PREWORK'"), + help_text="Determines if the cohorts will be shown in the dashboard if it's status is 'PREWORK'", + ), ), migrations.AlterField( - model_name='academy', - name='latitude', + model_name="academy", + name="latitude", field=models.DecimalField(blank=True, db_index=True, decimal_places=6, max_digits=9, null=True), ), migrations.AlterField( - model_name='academy', - name='longitude', + model_name="academy", + name="longitude", field=models.DecimalField(blank=True, db_index=True, decimal_places=6, max_digits=9, null=True), ), migrations.AlterField( - model_name='academy', - name='name', + model_name="academy", + name="name", field=models.CharField(db_index=True, max_length=150), ), migrations.AlterField( - model_name='academy', - name='status', - field=models.CharField(choices=[('INACTIVE', 'Inactive'), ('ACTIVE', 'Active'), ('DELETED', 'Deleted')], - db_index=True, - default='ACTIVE', - max_length=15), + model_name="academy", + name="status", + field=models.CharField( + choices=[("INACTIVE", "Inactive"), ("ACTIVE", "Active"), ("DELETED", "Deleted")], + db_index=True, + default="ACTIVE", + max_length=15, + ), ), migrations.AlterField( - model_name='academy', - name='timezone', + model_name="academy", + name="timezone", field=models.CharField(blank=True, db_index=True, default=None, max_length=50, null=True), ), migrations.AlterField( - model_name='academy', - name='zip_code', + model_name="academy", + name="zip_code", field=models.IntegerField(blank=True, db_index=True, null=True), ), migrations.AlterField( - model_name='city', - name='name', + model_name="city", + name="name", field=models.CharField(db_index=True, max_length=30), ), migrations.AlterField( - model_name='cohort', - name='accepts_enrollment_suggestions', - field=models.BooleanField(db_index=True, - default=True, - help_text='The system will suggest won leads to be added to this cohort'), + model_name="cohort", + name="accepts_enrollment_suggestions", + field=models.BooleanField( + db_index=True, default=True, help_text="The system will suggest won leads to be added to this cohort" + ), ), migrations.AlterField( - model_name='cohort', - name='available_as_saas', - field=models.BooleanField(blank=True, - db_index=True, - default=False, - help_text='Cohorts available as SAAS will be sold through plans at 4Geeks.com', - null=True), + model_name="cohort", + name="available_as_saas", + field=models.BooleanField( + blank=True, + db_index=True, + default=False, + help_text="Cohorts available as SAAS will be sold through plans at 4Geeks.com", + null=True, + ), ), migrations.AlterField( - model_name='cohort', - name='ending_date', + model_name="cohort", + name="ending_date", field=models.DateTimeField(blank=True, db_index=True, null=True), ), migrations.AlterField( - model_name='cohort', - name='is_hidden_on_prework', + model_name="cohort", + name="is_hidden_on_prework", field=models.BooleanField( blank=True, db_index=True, default=True, help_text="Determines if the cohort will be shown in the dashboard if it's status is 'PREWORK'", - null=True), + null=True, + ), ), migrations.AlterField( - model_name='cohort', - name='kickoff_date', + model_name="cohort", + name="kickoff_date", field=models.DateTimeField(db_index=True), ), migrations.AlterField( - model_name='cohort', - name='language', - field=models.CharField(db_index=True, default='en', max_length=2), + model_name="cohort", + name="language", + field=models.CharField(db_index=True, default="en", max_length=2), ), migrations.AlterField( - model_name='cohort', - name='name', + model_name="cohort", + name="name", field=models.CharField(db_index=True, max_length=150), ), migrations.AlterField( - model_name='cohort', - name='never_ends', + model_name="cohort", + name="never_ends", field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( - model_name='cohort', - name='private', + model_name="cohort", + name="private", field=models.BooleanField( db_index=True, default=False, - help_text= - 'It will not show on the public API endpoints but you will still be able to add people manually'), + help_text="It will not show on the public API endpoints but you will still be able to add people manually", + ), ), migrations.AlterField( - model_name='cohort', - name='remote_available', + model_name="cohort", + name="remote_available", field=models.BooleanField( db_index=True, default=True, - help_text='True (default) if the students from other cities can take it from home'), + help_text="True (default) if the students from other cities can take it from home", + ), ), migrations.AlterField( - model_name='cohort', - name='slug', + model_name="cohort", + name="slug", field=models.CharField(db_index=True, max_length=150, unique=True), ), migrations.AlterField( - model_name='cohort', - name='stage', - field=models.CharField(choices=[('INACTIVE', 'Inactive'), ('PREWORK', 'Prework'), ('STARTED', 'Started'), - ('FINAL_PROJECT', 'Final Project'), ('ENDED', 'Ended'), - ('DELETED', 'Deleted')], - db_index=True, - default='INACTIVE', - max_length=15), + model_name="cohort", + name="stage", + field=models.CharField( + choices=[ + ("INACTIVE", "Inactive"), + ("PREWORK", "Prework"), + ("STARTED", "Started"), + ("FINAL_PROJECT", "Final Project"), + ("ENDED", "Ended"), + ("DELETED", "Deleted"), + ], + db_index=True, + default="INACTIVE", + max_length=15, + ), ), migrations.AlterField( - model_name='cohort', - name='timezone', + model_name="cohort", + name="timezone", field=models.CharField(blank=True, db_index=True, default=None, max_length=50, null=True), ), migrations.AlterField( - model_name='cohorttimeslot', - name='ending_at', + model_name="cohorttimeslot", + name="ending_at", field=models.BigIntegerField( db_index=True, default=202101010000, - help_text= - 'The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second', + help_text="The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second", validators=[ django.core.validators.MaxValueValidator(300000000000), - django.core.validators.MinValueValidator(202101010000) - ]), + django.core.validators.MinValueValidator(202101010000), + ], + ), ), migrations.AlterField( - model_name='cohorttimeslot', - name='recurrency_type', - field=models.CharField(choices=[('DAILY', 'Daily'), ('WEEKLY', 'Weekly'), ('MONTHLY', 'Monthly')], - db_index=True, - default='WEEKLY', - max_length=10), + model_name="cohorttimeslot", + name="recurrency_type", + field=models.CharField( + choices=[("DAILY", "Daily"), ("WEEKLY", "Weekly"), ("MONTHLY", "Monthly")], + db_index=True, + default="WEEKLY", + max_length=10, + ), ), migrations.AlterField( - model_name='cohorttimeslot', - name='recurrent', + model_name="cohorttimeslot", + name="recurrent", field=models.BooleanField(db_index=True, default=True), ), migrations.AlterField( - model_name='cohorttimeslot', - name='removed_at', - field=models.DateTimeField(blank=True, - db_index=True, - default=None, - help_text='This will be available until this date', - null=True), + model_name="cohorttimeslot", + name="removed_at", + field=models.DateTimeField( + blank=True, db_index=True, default=None, help_text="This will be available until this date", null=True + ), ), migrations.AlterField( - model_name='cohorttimeslot', - name='starting_at', + model_name="cohorttimeslot", + name="starting_at", field=models.BigIntegerField( db_index=True, default=202101010000, - help_text= - 'The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second', + help_text="The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second", validators=[ django.core.validators.MaxValueValidator(300000000000), - django.core.validators.MinValueValidator(202101010000) - ]), + django.core.validators.MinValueValidator(202101010000), + ], + ), ), migrations.AlterField( - model_name='cohorttimeslot', - name='timezone', - field=models.CharField(db_index=True, default='America/New_York', max_length=50), + model_name="cohorttimeslot", + name="timezone", + field=models.CharField(db_index=True, default="America/New_York", max_length=50), ), migrations.AlterField( - model_name='cohortuser', - name='educational_status', - field=models.CharField(blank=True, - choices=[('ACTIVE', 'Active'), ('POSTPONED', 'Postponed'), - ('GRADUATED', 'Graduated'), ('SUSPENDED', 'Suspended'), - ('DROPPED', 'Dropped')], - db_index=True, - default='ACTIVE', - max_length=15, - null=True), - ), - migrations.AlterField( - model_name='cohortuser', - name='finantial_status', - field=models.CharField(blank=True, - choices=[('FULLY_PAID', 'Fully Paid'), ('UP_TO_DATE', 'Up to date'), - ('LATE', 'Late')], - db_index=True, - default=None, - max_length=15, - null=True), + model_name="cohortuser", + name="educational_status", + field=models.CharField( + blank=True, + choices=[ + ("ACTIVE", "Active"), + ("POSTPONED", "Postponed"), + ("GRADUATED", "Graduated"), + ("SUSPENDED", "Suspended"), + ("DROPPED", "Dropped"), + ], + db_index=True, + default="ACTIVE", + max_length=15, + null=True, + ), ), migrations.AlterField( - model_name='cohortuser', - name='role', - field=models.CharField(choices=[('TEACHER', 'Teacher'), ('ASSISTANT', 'Assistant'), - ('REVIEWER', 'Reviewer'), ('STUDENT', 'Student')], - db_index=True, - default='STUDENT', - max_length=9), + model_name="cohortuser", + name="finantial_status", + field=models.CharField( + blank=True, + choices=[("FULLY_PAID", "Fully Paid"), ("UP_TO_DATE", "Up to date"), ("LATE", "Late")], + db_index=True, + default=None, + max_length=15, + null=True, + ), + ), + migrations.AlterField( + model_name="cohortuser", + name="role", + field=models.CharField( + choices=[ + ("TEACHER", "Teacher"), + ("ASSISTANT", "Assistant"), + ("REVIEWER", "Reviewer"), + ("STUDENT", "Student"), + ], + db_index=True, + default="STUDENT", + max_length=9, + ), ), migrations.AlterField( - model_name='cohortuser', - name='watching', - field=models.BooleanField(db_index=True, - default=False, - help_text='You can active students to the watch list and monitor them closely'), + model_name="cohortuser", + name="watching", + field=models.BooleanField( + db_index=True, + default=False, + help_text="You can active students to the watch list and monitor them closely", + ), ), migrations.AlterField( - model_name='country', - name='code', + model_name="country", + name="code", field=models.CharField(db_index=True, max_length=3, primary_key=True, serialize=False), ), migrations.AlterField( - model_name='country', - name='name', + model_name="country", + name="name", field=models.CharField(db_index=True, max_length=30), ), migrations.AlterField( - model_name='syllabus', - name='is_documentation', + model_name="syllabus", + name="is_documentation", field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( - model_name='syllabus', - name='name', + model_name="syllabus", + name="name", field=models.CharField(blank=True, db_index=True, default=None, max_length=150, null=True), ), migrations.AlterField( - model_name='syllabus', - name='private', + model_name="syllabus", + name="private", field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( - model_name='syllabusschedule', - name='description', + model_name="syllabusschedule", + name="description", field=models.TextField(db_index=True, max_length=450), ), migrations.AlterField( - model_name='syllabusschedule', - name='name', + model_name="syllabusschedule", + name="name", field=models.CharField(db_index=True, max_length=150), ), migrations.AlterField( - model_name='syllabusschedule', - name='schedule_type', - field=models.CharField(choices=[('PART-TIME', 'Part-Time'), ('FULL-TIME', 'Full-Time')], - db_index=True, - default='PART-TIME', - max_length=15), + model_name="syllabusschedule", + name="schedule_type", + field=models.CharField( + choices=[("PART-TIME", "Part-Time"), ("FULL-TIME", "Full-Time")], + db_index=True, + default="PART-TIME", + max_length=15, + ), ), migrations.AlterField( - model_name='syllabusscheduletimeslot', - name='ending_at', + model_name="syllabusscheduletimeslot", + name="ending_at", field=models.BigIntegerField( db_index=True, default=202101010000, - help_text= - 'The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second', + help_text="The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second", validators=[ django.core.validators.MaxValueValidator(300000000000), - django.core.validators.MinValueValidator(202101010000) - ]), + django.core.validators.MinValueValidator(202101010000), + ], + ), ), migrations.AlterField( - model_name='syllabusscheduletimeslot', - name='recurrency_type', - field=models.CharField(choices=[('DAILY', 'Daily'), ('WEEKLY', 'Weekly'), ('MONTHLY', 'Monthly')], - db_index=True, - default='WEEKLY', - max_length=10), + model_name="syllabusscheduletimeslot", + name="recurrency_type", + field=models.CharField( + choices=[("DAILY", "Daily"), ("WEEKLY", "Weekly"), ("MONTHLY", "Monthly")], + db_index=True, + default="WEEKLY", + max_length=10, + ), ), migrations.AlterField( - model_name='syllabusscheduletimeslot', - name='recurrent', + model_name="syllabusscheduletimeslot", + name="recurrent", field=models.BooleanField(db_index=True, default=True), ), migrations.AlterField( - model_name='syllabusscheduletimeslot', - name='removed_at', - field=models.DateTimeField(blank=True, - db_index=True, - default=None, - help_text='This will be available until this date', - null=True), + model_name="syllabusscheduletimeslot", + name="removed_at", + field=models.DateTimeField( + blank=True, db_index=True, default=None, help_text="This will be available until this date", null=True + ), ), migrations.AlterField( - model_name='syllabusscheduletimeslot', - name='starting_at', + model_name="syllabusscheduletimeslot", + name="starting_at", field=models.BigIntegerField( db_index=True, default=202101010000, - help_text= - 'The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second', + help_text="The first 4 number are year, the next 2 number are month, the next 2 number are day, the next 2 number are hour and the last 2 number are second", validators=[ django.core.validators.MaxValueValidator(300000000000), - django.core.validators.MinValueValidator(202101010000) - ]), + django.core.validators.MinValueValidator(202101010000), + ], + ), ), migrations.AlterField( - model_name='syllabusscheduletimeslot', - name='timezone', - field=models.CharField(db_index=True, default='America/New_York', max_length=50), + model_name="syllabusscheduletimeslot", + name="timezone", + field=models.CharField(db_index=True, default="America/New_York", max_length=50), ), migrations.AlterField( - model_name='syllabusversion', - name='integrity_check_at', + model_name="syllabusversion", + name="integrity_check_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='syllabusversion', - name='integrity_status', - field=models.CharField(choices=[('ERROR', 'Error'), ('PENDING', 'Pending'), ('WARNING', 'Warning'), - ('OK', 'Ok')], - db_index=True, - default='PENDING', - max_length=15), + model_name="syllabusversion", + name="integrity_status", + field=models.CharField( + choices=[("ERROR", "Error"), ("PENDING", "Pending"), ("WARNING", "Warning"), ("OK", "Ok")], + db_index=True, + default="PENDING", + max_length=15, + ), ), migrations.AlterField( - model_name='syllabusversion', - name='status', - field=models.CharField(choices=[('PUBLISHED', 'Published'), ('DRAFT', 'Draft')], - db_index=True, - default='PUBLISHED', - max_length=15), + model_name="syllabusversion", + name="status", + field=models.CharField( + choices=[("PUBLISHED", "Published"), ("DRAFT", "Draft")], + db_index=True, + default="PUBLISHED", + max_length=15, + ), ), migrations.AlterField( - model_name='syllabusversion', - name='version', + model_name="syllabusversion", + name="version", field=models.PositiveSmallIntegerField(db_index=True), ), ] diff --git a/breathecode/admissions/migrations/0064_academy_legal_name.py b/breathecode/admissions/migrations/0064_academy_legal_name.py index 1f5e43832..b16a82b26 100644 --- a/breathecode/admissions/migrations/0064_academy_legal_name.py +++ b/breathecode/admissions/migrations/0064_academy_legal_name.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0063_auto_20231017_0605'), + ("admissions", "0063_auto_20231017_0605"), ] operations = [ migrations.AddField( - model_name='academy', - name='legal_name', + model_name="academy", + name="legal_name", field=models.CharField(blank=True, db_index=True, default=None, max_length=150, null=True), ), ] diff --git a/breathecode/admissions/models.py b/breathecode/admissions/models.py index 3f2276134..faa6b8cea 100644 --- a/breathecode/admissions/models.py +++ b/breathecode/admissions/models.py @@ -12,17 +12,17 @@ from . import signals from .signals import syllabus_version_json_updated -GOOGLE_APPLICATION_CREDENTIALS = os.getenv('GOOGLE_APPLICATION_CREDENTIALS', None) +GOOGLE_APPLICATION_CREDENTIALS = os.getenv("GOOGLE_APPLICATION_CREDENTIALS", None) logger = logging.getLogger(__name__) def get_user_label(self): - return f'{self.first_name} {self.last_name} ({self.email})' + return f"{self.first_name} {self.last_name} ({self.email})" -User.add_to_class('__str__', get_user_label) +User.add_to_class("__str__", get_user_label) -__all__ = ['UserAdmissions', 'Country', 'City', 'Academy', 'Syllabus', 'Cohort', 'CohortUser', 'CohortTimeSlot'] +__all__ = ["UserAdmissions", "Country", "City", "Academy", "Syllabus", "Cohort", "CohortUser", "CohortTimeSlot"] class UserAdmissions(User): @@ -36,7 +36,7 @@ class Country(models.Model): name = models.CharField(max_length=30, db_index=True) def __str__(self): - return f'{self.name} ({self.code})' + return f"{self.name} ({self.code})" class City(models.Model): @@ -47,13 +47,13 @@ def __str__(self): return self.name -INACTIVE = 'INACTIVE' -ACTIVE = 'ACTIVE' -DELETED = 'DELETED' +INACTIVE = "INACTIVE" +ACTIVE = "ACTIVE" +DELETED = "DELETED" ACADEMY_STATUS = ( - (INACTIVE, 'Inactive'), - (ACTIVE, 'Active'), - (DELETED, 'Deleted'), + (INACTIVE, "Inactive"), + (ACTIVE, "Active"), + (DELETED, "Deleted"), ) @@ -67,7 +67,7 @@ def __init__(self, *args, **kwargs): name = models.CharField(max_length=150, db_index=True) legal_name = models.CharField(max_length=150, db_index=True, default=None, null=True, blank=True) logo_url = models.CharField(max_length=255) - icon_url = models.CharField(max_length=255, help_text='It has to be a square', default='/static/icons/picture.png') + icon_url = models.CharField(max_length=255, help_text="It has to be a square", default="/static/icons/picture.png") website_url = models.CharField(max_length=255, blank=True, null=True, default=None) white_label_url = models.CharField(max_length=255, blank=True, null=True, default=None) @@ -77,10 +77,12 @@ def __init__(self, *args, **kwargs): feedback_email = models.EmailField(blank=True, null=True, default=None) phone_regex = RegexValidator( - regex=r'^\+?1?\d{9,15}$', - message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.") - marketing_phone = models.CharField(validators=[phone_regex], max_length=17, blank=True, null=True, - default=None) # validators should be a list + regex=r"^\+?1?\d{9,15}$", + message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", + ) + marketing_phone = models.CharField( + validators=[phone_regex], max_length=17, blank=True, null=True, default=None + ) # validators should be a list twitter_handle = models.CharField(max_length=15, blank=True, null=True, default=None) facebook_handle = models.CharField(max_length=30, blank=True, null=True, default=None) @@ -96,30 +98,26 @@ def __init__(self, *args, **kwargs): zip_code = models.IntegerField(blank=True, null=True, db_index=True) white_labeled = models.BooleanField(default=False) - active_campaign_slug = models.SlugField(max_length=100, - unique=False, - null=True, - default=None, - blank=True, - db_index=True) + active_campaign_slug = models.SlugField( + max_length=100, unique=False, null=True, default=None, blank=True, db_index=True + ) - available_as_saas = models.BooleanField(default=False, - help_text='Academies available as SAAS will be sold thru 4Geeks.com', - db_index=True) + available_as_saas = models.BooleanField( + default=False, help_text="Academies available as SAAS will be sold thru 4Geeks.com", db_index=True + ) is_hidden_on_prework = models.BooleanField( default=True, null=False, blank=False, - help_text='Determines if the cohorts will be shown in the dashboard if it\'s status is \'PREWORK\'', - db_index=True) + help_text="Determines if the cohorts will be shown in the dashboard if it's status is 'PREWORK'", + db_index=True, + ) status = models.CharField(max_length=15, choices=ACADEMY_STATUS, default=ACTIVE, db_index=True) - main_currency = models.ForeignKey('payments.Currency', - on_delete=models.CASCADE, - null=True, - blank=True, - related_name='+') + main_currency = models.ForeignKey( + "payments.Currency", on_delete=models.CASCADE, null=True, blank=True, related_name="+" + ) timezone = models.CharField(max_length=50, null=True, default=None, blank=True, db_index=True) @@ -149,13 +147,13 @@ def save(self, *args, **kwargs): self.full_clean() created = not self.id - if os.getenv('ENV', '') == 'production': - obj = get_bucket_object(f'location-{self.slug}') + if os.getenv("ENV", "") == "production": + obj = get_bucket_object(f"location-{self.slug}") if obj is not None: self.logo_url = obj.public_url if not created and self.__old_slug != self.slug: - raise Exception('Academy slug cannot be updated') + raise Exception("Academy slug cannot be updated") super().save(*args, **kwargs) # Call the "real" save() method. @@ -165,22 +163,20 @@ def save(self, *args, **kwargs): academy_saved.send_robust(instance=self, sender=self.__class__, created=created) -PARTIME = 'PART-TIME' -FULLTIME = 'FULL-TIME' +PARTIME = "PART-TIME" +FULLTIME = "FULL-TIME" SCHEDULE_TYPE = ( - (PARTIME, 'Part-Time'), - (FULLTIME, 'Full-Time'), + (PARTIME, "Part-Time"), + (FULLTIME, "Full-Time"), ) class Syllabus(models.Model): slug = models.SlugField(max_length=100, blank=True, null=True, default=None, db_index=True) name = models.CharField(max_length=150, blank=True, null=True, default=None, db_index=True) - main_technologies = models.CharField(max_length=150, - blank=True, - null=True, - default=None, - help_text='Coma separated, E.g: HTML, CSS, Javascript') + main_technologies = models.CharField( + max_length=150, blank=True, null=True, default=None, help_text="Coma separated, E.g: HTML, CSS, Javascript" + ) github_url = models.URLField(max_length=255, blank=True, null=True, default=None) duration_in_hours = models.IntegerField(null=True, default=None) @@ -199,25 +195,25 @@ class Syllabus(models.Model): academy_owner = models.ForeignKey(Academy, on_delete=models.CASCADE, null=True, default=None) def __str__(self): - return self.slug if self.slug else 'unknown' + return self.slug if self.slug else "unknown" -PUBLISHED = 'PUBLISHED' -DRAFT = 'DRAFT' +PUBLISHED = "PUBLISHED" +DRAFT = "DRAFT" VERSION_STATUS = ( - (PUBLISHED, 'Published'), - (DRAFT, 'Draft'), + (PUBLISHED, "Published"), + (DRAFT, "Draft"), ) -ERROR = 'ERROR' -OK = 'OK' -PENDING = 'PENDING' -WARNING = 'WARNING' +ERROR = "ERROR" +OK = "OK" +PENDING = "PENDING" +WARNING = "WARNING" INTEGRITY_STATUS = ( - (ERROR, 'Error'), - (PENDING, 'Pending'), - (WARNING, 'Warning'), - (OK, 'Ok'), + (ERROR, "Error"), + (PENDING, "Pending"), + (WARNING, "Warning"), + (OK, "Ok"), ) @@ -241,13 +237,13 @@ def __init__(self, *args, **kwargs): self.__json_hash = self.hashed_json() def __str__(self): - return f'{self.syllabus.slug}.v{self.version}' + return f"{self.syllabus.slug}.v{self.version}" def hashed_json(self): if self.json is None: - return '' + return "" - encoded = base64.b64encode(json.dumps(self.json, sort_keys=True).encode('utf-8')) + encoded = base64.b64encode(json.dumps(self.json, sort_keys=True).encode("utf-8")) return hashlib.sha256(encoded).hexdigest() def save(self, *args, **kwargs): @@ -259,13 +255,14 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) - if json_modified: syllabus_version_json_updated.send_robust(instance=self, sender=SyllabusVersion) + if json_modified: + syllabus_version_json_updated.send_robust(instance=self, sender=SyllabusVersion) class SyllabusSchedule(models.Model): name = models.CharField(max_length=150, db_index=True) - schedule_type = models.CharField(max_length=15, choices=SCHEDULE_TYPE, default='PART-TIME', db_index=True) + schedule_type = models.CharField(max_length=15, choices=SCHEDULE_TYPE, default="PART-TIME", db_index=True) description = models.TextField(max_length=450, db_index=True) syllabus = models.ForeignKey(Syllabus, on_delete=models.CASCADE, default=None, null=True) @@ -278,19 +275,19 @@ def __str__(self): return self.name -INACTIVE = 'INACTIVE' -PREWORK = 'PREWORK' -STARTED = 'STARTED' -FINAL_PROJECT = 'FINAL_PROJECT' -ENDED = 'ENDED' -DELETED = 'DELETED' +INACTIVE = "INACTIVE" +PREWORK = "PREWORK" +STARTED = "STARTED" +FINAL_PROJECT = "FINAL_PROJECT" +ENDED = "ENDED" +DELETED = "DELETED" COHORT_STAGE = ( - (INACTIVE, 'Inactive'), - (PREWORK, 'Prework'), - (STARTED, 'Started'), - (FINAL_PROJECT, 'Final Project'), - (ENDED, 'Ended'), - (DELETED, 'Deleted'), + (INACTIVE, "Inactive"), + (PREWORK, "Prework"), + (STARTED, "Started"), + (FINAL_PROJECT, "Final Project"), + (ENDED, "Ended"), + (DELETED, "Deleted"), ) @@ -303,27 +300,30 @@ class Cohort(models.Model): kickoff_date = models.DateTimeField(db_index=True) ending_date = models.DateTimeField(blank=True, null=True, db_index=True) - current_day = models.IntegerField(help_text='Each day the teacher takes attendancy and increases the day in one', - default=1) + current_day = models.IntegerField( + help_text="Each day the teacher takes attendancy and increases the day in one", default=1 + ) current_module = models.IntegerField( null=True, default=None, blank=True, - help_text= - 'The syllabus is separated by modules, from 1 to N and the teacher decides when to start a new mobule (after a couple of days)' + help_text="The syllabus is separated by modules, from 1 to N and the teacher decides when to start a new mobule (after a couple of days)", ) stage = models.CharField(max_length=15, choices=COHORT_STAGE, default=INACTIVE, db_index=True) private = models.BooleanField( default=False, - help_text='It will not show on the public API endpoints but you will still be able to add people manually', - db_index=True) + help_text="It will not show on the public API endpoints but you will still be able to add people manually", + db_index=True, + ) accepts_enrollment_suggestions = models.BooleanField( - default=True, help_text='The system will suggest won leads to be added to this cohort', db_index=True) + default=True, help_text="The system will suggest won leads to be added to this cohort", db_index=True + ) never_ends = models.BooleanField(default=False, db_index=True) remote_available = models.BooleanField( - default=True, help_text='True (default) if the students from other cities can take it from home', db_index=True) + default=True, help_text="True (default) if the students from other cities can take it from home", db_index=True + ) online_meeting_url = models.URLField(max_length=255, blank=True, default=None, null=True) timezone = models.CharField(max_length=50, null=True, default=None, blank=True, db_index=True) @@ -334,13 +334,12 @@ class Cohort(models.Model): default=None, blank=True, null=True, - help_text='The cohort history will save attendancy and information about progress on each class') + help_text="The cohort history will save attendancy and information about progress on each class", + ) - syllabus_version = models.ForeignKey(SyllabusVersion, - on_delete=models.SET_NULL, - default=None, - null=True, - blank=True) + syllabus_version = models.ForeignKey( + SyllabusVersion, on_delete=models.SET_NULL, default=None, null=True, blank=True + ) intro_video = models.URLField(null=True, blank=True, default=None) schedule = models.ForeignKey(SyllabusSchedule, on_delete=models.SET_NULL, default=None, null=True, blank=True) @@ -349,17 +348,19 @@ class Cohort(models.Model): default=True, null=True, blank=True, - help_text='Determines if the cohort will be shown in the dashboard if it\'s status is \'PREWORK\'', - db_index=True) + help_text="Determines if the cohort will be shown in the dashboard if it's status is 'PREWORK'", + db_index=True, + ) available_as_saas = models.BooleanField( default=False, null=True, blank=True, - help_text='Cohorts available as SAAS will be sold through plans at 4Geeks.com', - db_index=True) + help_text="Cohorts available as SAAS will be sold through plans at 4Geeks.com", + db_index=True, + ) - language = models.CharField(max_length=2, default='en', db_index=True) + language = models.CharField(max_length=2, default="en", db_index=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -374,10 +375,10 @@ def clean(self): self.stage = self.stage.upper() if self.never_ends and self.ending_date: - raise forms.ValidationError('If the cohort never ends, it cannot have ending date') + raise forms.ValidationError("If the cohort never ends, it cannot have ending date") if not self.kickoff_date: - raise forms.ValidationError('Kickoff date is required') + raise forms.ValidationError("Kickoff date is required") def save_history_log(self, *args, **kwargs): """ @@ -417,40 +418,40 @@ def save(self, *args, **kwargs): self._current_history_log = self.history_log def __str__(self): - return self.name + '(' + self.slug + ')' + return self.name + "(" + self.slug + ")" -TEACHER = 'TEACHER' -ASSISTANT = 'ASSISTANT' -STUDENT = 'STUDENT' -REVIEWER = 'REVIEWER' +TEACHER = "TEACHER" +ASSISTANT = "ASSISTANT" +STUDENT = "STUDENT" +REVIEWER = "REVIEWER" COHORT_ROLE = ( - (TEACHER, 'Teacher'), - (ASSISTANT, 'Assistant'), - (REVIEWER, 'Reviewer'), - (STUDENT, 'Student'), + (TEACHER, "Teacher"), + (ASSISTANT, "Assistant"), + (REVIEWER, "Reviewer"), + (STUDENT, "Student"), ) -FULLY_PAID = 'FULLY_PAID' -UP_TO_DATE = 'UP_TO_DATE' -LATE = 'LATE' +FULLY_PAID = "FULLY_PAID" +UP_TO_DATE = "UP_TO_DATE" +LATE = "LATE" FINANTIAL_STATUS = ( - (FULLY_PAID, 'Fully Paid'), - (UP_TO_DATE, 'Up to date'), - (LATE, 'Late'), + (FULLY_PAID, "Fully Paid"), + (UP_TO_DATE, "Up to date"), + (LATE, "Late"), ) -ACTIVE = 'ACTIVE' -POSTPONED = 'POSTPONED' -SUSPENDED = 'SUSPENDED' -GRADUATED = 'GRADUATED' -DROPPED = 'DROPPED' +ACTIVE = "ACTIVE" +POSTPONED = "POSTPONED" +SUSPENDED = "SUSPENDED" +GRADUATED = "GRADUATED" +DROPPED = "DROPPED" EDU_STATUS = ( - (ACTIVE, 'Active'), - (POSTPONED, 'Postponed'), - (GRADUATED, 'Graduated'), - (SUSPENDED, 'Suspended'), - (DROPPED, 'Dropped'), + (ACTIVE, "Active"), + (POSTPONED, "Postponed"), + (GRADUATED, "Graduated"), + (SUSPENDED, "Suspended"), + (DROPPED, "Dropped"), ) @@ -464,30 +465,25 @@ def __init__(self, *args, **kwargs): cohort = models.ForeignKey(Cohort, on_delete=models.CASCADE) role = models.CharField(max_length=9, choices=COHORT_ROLE, default=STUDENT, db_index=True) - watching = models.BooleanField(default=False, - help_text='You can active students to the watch list and monitor them closely', - db_index=True) + watching = models.BooleanField( + default=False, help_text="You can active students to the watch list and monitor them closely", db_index=True + ) history_log = models.JSONField( default=dict, blank=True, null=False, - help_text='The cohort user log will save attendancy and information about progress on each class') - - #FIXME: this have a typo - finantial_status = models.CharField(max_length=15, - choices=FINANTIAL_STATUS, - default=None, - null=True, - blank=True, - db_index=True) - - educational_status = models.CharField(max_length=15, - choices=EDU_STATUS, - default=ACTIVE, - null=True, - blank=True, - db_index=True) + help_text="The cohort user log will save attendancy and information about progress on each class", + ) + + # FIXME: this have a typo + finantial_status = models.CharField( + max_length=15, choices=FINANTIAL_STATUS, default=None, null=True, blank=True, db_index=True + ) + + educational_status = models.CharField( + max_length=15, choices=EDU_STATUS, default=ACTIVE, null=True, blank=True, db_index=True + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -530,20 +526,22 @@ def save(self, *args, **kwargs): return result -DAILY = 'DAILY' -WEEKLY = 'WEEKLY' -MONTHLY = 'MONTHLY' +DAILY = "DAILY" +WEEKLY = "WEEKLY" +MONTHLY = "MONTHLY" # YEARLY = 'YEARLY' RECURRENCY_TYPE = ( - (DAILY, 'Daily'), - (WEEKLY, 'Weekly'), - (MONTHLY, 'Monthly'), + (DAILY, "Daily"), + (WEEKLY, "Weekly"), + (MONTHLY, "Monthly"), # (YEARLY, 'Yearly'), ) # YYYYMMDDHHMM -date_integer_description = ('The first 4 number are year, the next 2 number are month, the next 2 number ' - 'are day, the next 2 number are hour and the last 2 number are second') +date_integer_description = ( + "The first 4 number are year, the next 2 number are month, the next 2 number " + "are day, the next 2 number are hour and the last 2 number are second" +) class TimeSlot(models.Model): @@ -554,7 +552,8 @@ class TimeSlot(models.Model): MaxValueValidator(300000000000), # year 3000 MinValueValidator(202101010000), # year 2021, month 1 and day 1 ], - db_index=True) + db_index=True, + ) ending_at = models.BigIntegerField( help_text=date_integer_description, @@ -563,17 +562,16 @@ class TimeSlot(models.Model): MaxValueValidator(300000000000), # year 3000 MinValueValidator(202101010000), # year 2021, month 1 and day 1 ], - db_index=True) + db_index=True, + ) - timezone = models.CharField(max_length=50, default='America/New_York', db_index=True) + timezone = models.CharField(max_length=50, default="America/New_York", db_index=True) recurrent = models.BooleanField(default=True, db_index=True) recurrency_type = models.CharField(max_length=10, choices=RECURRENCY_TYPE, default=WEEKLY, db_index=True) - removed_at = models.DateTimeField(null=True, - default=None, - blank=True, - help_text='This will be available until this date', - db_index=True) + removed_at = models.DateTimeField( + null=True, default=None, blank=True, help_text="This will be available until this date", db_index=True + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -589,7 +587,7 @@ def clean(self): self.recurrency_type = self.recurrency_type.upper() if self.starting_at > self.ending_at: - raise forms.ValidationError('The starting date must be before the ending date') + raise forms.ValidationError("The starting date must be before the ending date") def save(self, *args, **kwargs): # created = not self.id @@ -608,7 +606,7 @@ def clean(self): self.recurrency_type = self.recurrency_type.upper() if self.starting_at > self.ending_at: - raise forms.ValidationError('The starting date must be before the ending date') + raise forms.ValidationError("The starting date must be before the ending date") def save(self, *args, **kwargs): created = not self.id diff --git a/breathecode/admissions/permissions/consumers.py b/breathecode/admissions/permissions/consumers.py index 702caf46a..c2b6d7fa9 100644 --- a/breathecode/admissions/permissions/consumers.py +++ b/breathecode/admissions/permissions/consumers.py @@ -4,23 +4,23 @@ def cohort_by_url_param(context: ServiceContext, args: tuple, kwargs: dict) -> tuple[dict, tuple, dict]: - context['consumables'] = context['consumables'].filter( - Q(cohort__id=kwargs.get('cohort_id')) - | Q(cohort__slug=kwargs.get('cohort_slug'))) + context["consumables"] = context["consumables"].filter( + Q(cohort__id=kwargs.get("cohort_id")) | Q(cohort__slug=kwargs.get("cohort_slug")) + ) return (context, args, kwargs) def cohort_by_header(context: ServiceContext, args: tuple, kwargs: dict) -> tuple[dict, tuple, dict]: - cohort = context['request'].META.get('HTTP_COHORT', '') + cohort = context["request"].META.get("HTTP_COHORT", "") kwargs = {} if cohort.isnumeric(): - kwargs['cohort__id'] = int(cohort) + kwargs["cohort__id"] = int(cohort) else: - kwargs['cohort__slug'] = cohort + kwargs["cohort__slug"] = cohort - context['consumables'] = context['consumables'].filter(**kwargs) + context["consumables"] = context["consumables"].filter(**kwargs) return (context, args, kwargs) diff --git a/breathecode/admissions/permissions/contexts.py b/breathecode/admissions/permissions/contexts.py index 5a67af0a1..cc3cbad55 100644 --- a/breathecode/admissions/permissions/contexts.py +++ b/breathecode/admissions/permissions/contexts.py @@ -3,16 +3,16 @@ def academy(client: LaunchDarkly, academy: Academy): - key = f'{academy.id}' - name = f'{academy.name} ({academy.slug})' - kind = 'academy' + key = f"{academy.id}" + name = f"{academy.name} ({academy.slug})" + kind = "academy" context = { - 'id': academy.id, - 'slug': academy.slug, - 'city': academy.city.name, - 'country': academy.country.name, - 'zip_code': academy.zip_code, - 'timezone': academy.timezone, + "id": academy.id, + "slug": academy.slug, + "city": academy.city.name, + "country": academy.country.name, + "zip_code": academy.zip_code, + "timezone": academy.timezone, } return client.context(key, name, kind, context) diff --git a/breathecode/admissions/permissions/flags.py b/breathecode/admissions/permissions/flags.py index b9e2626a9..4d5f36ff1 100644 --- a/breathecode/admissions/permissions/flags.py +++ b/breathecode/admissions/permissions/flags.py @@ -1,8 +1,7 @@ -__all__ = ['api'] +__all__ = ["api"] -class API: - ... +class API: ... api = API() diff --git a/breathecode/admissions/receivers.py b/breathecode/admissions/receivers.py index e4cb40ac3..c36398c0b 100644 --- a/breathecode/admissions/receivers.py +++ b/breathecode/admissions/receivers.py @@ -19,31 +19,33 @@ @receiver(cohort_log_saved, sender=Cohort) def process_cohort_history_log(sender: Type[Cohort], instance: Cohort, **kwargs: Any): - logger.info('Processing Cohort history log for cohort: ' + str(instance.id)) + logger.info("Processing Cohort history log for cohort: " + str(instance.id)) activity_tasks.get_attendancy_log.delay(instance.id) @receiver(cohort_user_created, sender=Cohort) async def new_cohort_user(sender: Type[Cohort], instance: Cohort, **kwargs: Any): - logger.info('Processing Cohort history log for cohort: ' + str(instance.id)) - - await authenticate_actions.send_webhook('rigobot', - 'cohort_user.created', - user=instance.user, - data={ - 'user': { - 'id': instance.user.id, - 'email': instance.user.email, - 'first_name': instance.user.first_name, - 'last_name': instance.user.last_name, - }, - }) + logger.info("Processing Cohort history log for cohort: " + str(instance.id)) + + await authenticate_actions.send_webhook( + "rigobot", + "cohort_user.created", + user=instance.user, + data={ + "user": { + "id": instance.user.id, + "email": instance.user.email, + "first_name": instance.user.first_name, + "last_name": instance.user.last_name, + }, + }, + ) @receiver(revision_status_updated, sender=Task, weak=False) def mark_saas_student_as_graduated(sender: Type[Task], instance: Task, **kwargs: Any): - logger.info('Processing available as saas student\'s tasks and marking as GRADUATED if it is') + logger.info("Processing available as saas student's tasks and marking as GRADUATED if it is") if instance.cohort is None: return @@ -53,12 +55,11 @@ def mark_saas_student_as_graduated(sender: Type[Task], instance: Task, **kwargs: if not cohort.available_as_saas: return - pending_tasks = how_many_pending_tasks(cohort.syllabus_version, - instance.user, - task_types=['PROJECT'], - only_mandatory=True) + pending_tasks = how_many_pending_tasks( + cohort.syllabus_version, instance.user, task_types=["PROJECT"], only_mandatory=True + ) if pending_tasks == 0: cohort_user = CohortUser.objects.filter(user=instance.user.id, cohort=cohort.id).first() - cohort_user.educational_status = 'GRADUATED' + cohort_user.educational_status = "GRADUATED" cohort_user.save() diff --git a/breathecode/admissions/schema.py b/breathecode/admissions/schema.py index 6d563980c..3065732d8 100644 --- a/breathecode/admissions/schema.py +++ b/breathecode/admissions/schema.py @@ -25,7 +25,7 @@ def fields_requested(info): def to_snake_case(name): - return re.sub(r'(? 90 or latitude < -90: - raise GraphQLError('Bad latitude', slug='bad-latitude') + raise GraphQLError("Bad latitude", slug="bad-latitude") if longitude > 180 or longitude < -180: - raise GraphQLError('Bad longitude', slug='bad-longitude') + raise GraphQLError("Bad longitude", slug="bad-longitude") items = items.annotate(longitude=Value(longitude, FloatField()), latitude=Value(latitude, FloatField())) - saas = kwargs.get('saas', '').lower() - if saas == 'true': + saas = kwargs.get("saas", "").lower() + if saas == "true": items = items.filter(academy__available_as_saas=True) - elif saas == 'false': + elif saas == "false": items = items.filter(academy__available_as_saas=False) - syllabus_slug = kwargs.get('syllabus_slug', '') + syllabus_slug = kwargs.get("syllabus_slug", "") if syllabus_slug: items = items.filter(syllabus_version__syllabus__slug=syllabus_slug) - plan = kwargs.get('plan', '') - if plan == 'true': + plan = kwargs.get("plan", "") + if plan == "true": items = items.filter(academy__main_currency__isnull=False, cohortset__isnull=False).distinct() - elif plan == 'false': + elif plan == "false": items = items.filter().exclude(cohortset__isnull=True).distinct() elif plan: kwargs = {} if isinstance(plan, int) or plan.isnumeric(): - kwargs['cohortset__plan__id'] = plan + kwargs["cohortset__plan__id"] = plan else: - kwargs['cohortset__plan__slug'] = plan + kwargs["cohortset__plan__slug"] = plan items = items.filter(**kwargs).distinct() diff --git a/breathecode/admissions/serializers.py b/breathecode/admissions/serializers.py index a8ba24f72..9d82b16cf 100644 --- a/breathecode/admissions/serializers.py +++ b/breathecode/admissions/serializers.py @@ -29,6 +29,7 @@ class CountrySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. code = serpy.Field() name = serpy.Field() @@ -36,6 +37,7 @@ class CountrySerializer(serpy.Serializer): class GetSyllabusSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. slug = serpy.Field() name = serpy.Field() @@ -46,12 +48,14 @@ class GetSyllabusSmallSerializer(serpy.Serializer): class CitySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. name = serpy.Field() class UserSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() email = serpy.Field() @@ -59,12 +63,14 @@ class UserSmallSerializer(serpy.Serializer): class PublicProfileSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() class ProfileSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() avatar_url = serpy.Field() @@ -74,6 +80,7 @@ class ProfileSerializer(serpy.Serializer): class GetSmallAcademySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -85,6 +92,7 @@ class GetSmallAcademySerializer(serpy.Serializer): class GetProfileAcademySmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -95,6 +103,7 @@ class GetProfileAcademySmallSerializer(serpy.Serializer): class ProfileAcademySmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. academy = GetSmallAcademySerializer() role = serpy.MethodField() @@ -105,6 +114,7 @@ def get_role(self, obj): class UserPublicSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -114,6 +124,7 @@ class UserPublicSerializer(serpy.Serializer): class UserSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -148,6 +159,7 @@ class GetTinnyCertificateSerializer(serpy.Serializer): class GithubSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() name = serpy.Field() @@ -200,6 +212,7 @@ class GetBigAcademySerializer(serpy.Serializer): class SyllabusVersionSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. version = serpy.Field() status = serpy.Field() @@ -243,6 +256,7 @@ def get_github_url(self, obj): class GetSyllabusVersionSerializer(serpy.Serializer): """The serializer schema definition.""" + json = serpy.Field() version = serpy.Field() status = serpy.Field() @@ -300,6 +314,7 @@ def get_github_url(self, obj): class SmallCohortTimeSlotSerializer(serpy.Serializer): """The serializer schema definition.""" + id = serpy.Field() starting_at = serpy.DatetimeIntegerField() ending_at = serpy.DatetimeIntegerField() @@ -309,6 +324,7 @@ class SmallCohortTimeSlotSerializer(serpy.Serializer): class GetCohortSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -338,6 +354,7 @@ def get_timeslots(self, obj): class PublicCohortSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -353,7 +370,7 @@ class PublicCohortSerializer(serpy.Serializer): distance = serpy.MethodField() timezone = serpy.Field() schedule = GetSmallSyllabusScheduleSerializer(required=False) - timeslots = serpy.ManyToManyField(SmallCohortTimeSlotSerializer(attr='cohorttimeslot_set', many=True)) + timeslots = serpy.ManyToManyField(SmallCohortTimeSlotSerializer(attr="cohorttimeslot_set", many=True)) def get_distance(self, obj): if not obj.latitude or not obj.longitude or not obj.academy.latitude or not obj.academy.longitude: @@ -364,6 +381,7 @@ def get_distance(self, obj): class GetSmallCohortSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -376,6 +394,7 @@ class GetSmallCohortSerializer(serpy.Serializer): class GetTeacherAcademySmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -396,15 +415,18 @@ def get_cohorts(self, obj): if obj.user is None: return [] - return GetSmallCohortSerializer(Cohort.objects.filter( - cohortuser__user__id=obj.user.id, - cohortuser__role__in=['TEACHER', - 'ASSISTANT']).exclude(stage__iexact='DELETED').order_by('-ending_date').all(), - many=True).data + return GetSmallCohortSerializer( + Cohort.objects.filter(cohortuser__user__id=obj.user.id, cohortuser__role__in=["TEACHER", "ASSISTANT"]) + .exclude(stage__iexact="DELETED") + .order_by("-ending_date") + .all(), + many=True, + ).data class GetMeCohortSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -439,6 +461,7 @@ class CohortUserHookSerializer(serpy.Serializer): class CohortHookSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -461,6 +484,7 @@ class CohortHookSerializer(serpy.Serializer): class GetCohortUserSerializer(serpy.Serializer): """The serializer schema definition.""" + id = serpy.Field() user = UserSerializer() cohort = GetSmallCohortSerializer() @@ -478,6 +502,7 @@ def get_profile_academy(self, obj): class GetCohortUserTasksSerializer(GetCohortUserSerializer): """The serializer schema definition.""" + tasks = serpy.MethodField() def get_tasks(self, obj): @@ -487,6 +512,7 @@ def get_tasks(self, obj): class GETCohortTimeSlotSerializer(serpy.Serializer): """The serializer schema definition.""" + id = serpy.Field() cohort = serpy.MethodField() starting_at = serpy.DatetimeIntegerField() @@ -502,6 +528,7 @@ def get_cohort(self, obj): class GETSyllabusScheduleTimeSlotSerializer(serpy.Serializer): """The serializer schema definition.""" + id = serpy.Field() schedule = serpy.MethodField() starting_at = serpy.DatetimeIntegerField() @@ -517,6 +544,7 @@ def get_schedule(self, obj): class GETCohortUserSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. cohort = GetMeCohortSerializer() role = serpy.Field() @@ -530,6 +558,7 @@ class GETCohortUserSmallSerializer(serpy.Serializer): class UserMeSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() email = serpy.Field() @@ -552,14 +581,15 @@ def get_roles(self, obj): return ProfileAcademySmallSerializer(roles, many=True).data def get_cohorts(self, obj): - cohorts = CohortUser.objects.filter( - user__id=obj.id).exclude(Q(educational_status='DROPPED') - | Q(educational_status='SUSPENDED')) + cohorts = CohortUser.objects.filter(user__id=obj.id).exclude( + Q(educational_status="DROPPED") | Q(educational_status="SUSPENDED") + ) return GETCohortUserSmallSerializer(cohorts, many=True).data class GetSyllabusSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -582,23 +612,23 @@ class GetSyllabusSerializer(serpy.Serializer): # ↓ EDIT SERIALIZERS ↓ class AcademySerializer(serializers.ModelSerializer): - status_fields = ['status'] + status_fields = ["status"] country = CountrySerializer(required=True) city = CitySerializer(required=True) class Meta: model = Academy - fields = ['id', 'slug', 'name', 'street_address', 'country', 'city', 'is_hidden_on_prework'] + fields = ["id", "slug", "name", "street_address", "country", "city", "is_hidden_on_prework"] def validate(self, data): - if 'slug' in data and data['slug'] != self.instance.slug: - raise ValidationException('Academy slug cannot be updated') + if "slug" in data and data["slug"] != self.instance.slug: + raise ValidationException("Academy slug cannot be updated") return data def update(self, instance, validated_data): - del validated_data['slug'] + del validated_data["slug"] return super().update(instance, validated_data) @@ -606,11 +636,11 @@ class SyllabusPOSTSerializer(serializers.ModelSerializer): class Meta: model = Syllabus - fields = ['id', 'slug'] + fields = ["id", "slug"] class CohortSerializerMixin(serializers.ModelSerializer): - status_fields = ['stage'] + status_fields = ["stage"] syllabus = serializers.CharField(required=False) syllabus_version = serializers.CharField(required=False) @@ -618,97 +648,116 @@ class CohortSerializerMixin(serializers.ModelSerializer): def validate(self, data): - kickoff_date = (data['kickoff_date'] if 'kickoff_date' in data else None) or (self.instance.kickoff_date - if self.instance else None) + kickoff_date = (data["kickoff_date"] if "kickoff_date" in data else None) or ( + self.instance.kickoff_date if self.instance else None + ) - ending_date = (data['ending_date'] if 'ending_date' in data else None) or (self.instance.ending_date - if self.instance else None) + ending_date = (data["ending_date"] if "ending_date" in data else None) or ( + self.instance.ending_date if self.instance else None + ) if kickoff_date and ending_date and kickoff_date > ending_date: - raise ValidationException('kickoff_date cannot be greather than ending_date', - slug='kickoff-date-greather-than-ending-date') + raise ValidationException( + "kickoff_date cannot be greather than ending_date", slug="kickoff-date-greather-than-ending-date" + ) - if 'stage' in data: + if "stage" in data: possible_stages = [stage_slug for stage_slug, stage_label in COHORT_STAGE] - if data['stage'] not in possible_stages: - raise ValidationException(f"Invalid cohort stage {data['stage']}", slug='invalid-cohort-stage') + if data["stage"] not in possible_stages: + raise ValidationException(f"Invalid cohort stage {data['stage']}", slug="invalid-cohort-stage") - if 'syllabus' in data: - strings = data['syllabus'].split('.v') + if "syllabus" in data: + strings = data["syllabus"].split(".v") if len(strings) != 2: - raise ValidationException('Syllabus field marformed(`${syllabus.slug}.v{syllabus_version.version}`)', - slug='syllabus-field-marformed') + raise ValidationException( + "Syllabus field marformed(`${syllabus.slug}.v{syllabus_version.version}`)", + slug="syllabus-field-marformed", + ) [syllabus_slug, syllabus_version_number] = strings syllabus_version = None - if syllabus_version_number == 'latest': - syllabus_version = SyllabusVersion.objects.filter( - Q(syllabus__academy_owner__id=self.context['academy'].id) | Q(syllabus__private=False), - syllabus__slug=syllabus_slug, - ).filter(status='PUBLISHED').order_by('-version').first() + if syllabus_version_number == "latest": + syllabus_version = ( + SyllabusVersion.objects.filter( + Q(syllabus__academy_owner__id=self.context["academy"].id) | Q(syllabus__private=False), + syllabus__slug=syllabus_slug, + ) + .filter(status="PUBLISHED") + .order_by("-version") + .first() + ) else: syllabus_version = SyllabusVersion.objects.filter( - Q(syllabus__private=False) | Q(syllabus__academy_owner__id=self.context['academy'].id), + Q(syllabus__private=False) | Q(syllabus__academy_owner__id=self.context["academy"].id), syllabus__slug=syllabus_slug, - version=syllabus_version_number).first() + version=syllabus_version_number, + ).first() if not syllabus_version: - raise ValidationException(f'Syllabus {syllabus_version} doesn\'t exist', - slug='syllabus-version-not-found') + raise ValidationException( + f"Syllabus {syllabus_version} doesn't exist", slug="syllabus-version-not-found" + ) - if syllabus_version_number == '1': + if syllabus_version_number == "1": raise ValidationException( - 'Syllabus version 1 is only used for marketing purposes and it cannot be assigned to ' - 'any cohort', - slug='assigning-a-syllabus-version-1') + "Syllabus version 1 is only used for marketing purposes and it cannot be assigned to " "any cohort", + slug="assigning-a-syllabus-version-1", + ) - data['syllabus_version'] = syllabus_version + data["syllabus_version"] = syllabus_version - if 'syllabus' in data: - del data['syllabus'] + if "syllabus" in data: + del data["syllabus"] - if 'slug' in data: - cohort = Cohort.objects.filter(slug=data['slug']).first() - if cohort is not None and self.instance.slug != data['slug']: - raise ValidationException('Slug already exists for another cohort', slug='slug-already-exists') + if "slug" in data: + cohort = Cohort.objects.filter(slug=data["slug"]).first() + if cohort is not None and self.instance.slug != data["slug"]: + raise ValidationException("Slug already exists for another cohort", slug="slug-already-exists") - if 'available_as_saas' not in data or data['available_as_saas'] is None: - data['available_as_saas'] = self.context['academy'].available_as_saas + if "available_as_saas" not in data or data["available_as_saas"] is None: + data["available_as_saas"] = self.context["academy"].available_as_saas if self.instance: - never_ends = (data['never_ends'] if 'never_ends' in data else self.instance.never_ends) + never_ends = data["never_ends"] if "never_ends" in data else self.instance.never_ends - ending_date = (data['ending_date'] if 'ending_date' in data else self.instance.ending_date) + ending_date = data["ending_date"] if "ending_date" in data else self.instance.ending_date else: - never_ends = 'never_ends' in data and data['never_ends'] - ending_date = 'ending_date' in data and data['ending_date'] + never_ends = "never_ends" in data and data["never_ends"] + ending_date = "ending_date" in data and data["ending_date"] if never_ends and ending_date: - raise ValidationException('A cohort that never ends cannot have ending date', - slug='cohort-with-ending-date-and-never-ends') + raise ValidationException( + "A cohort that never ends cannot have ending date", slug="cohort-with-ending-date-and-never-ends" + ) if not never_ends and not ending_date: - raise ValidationException('A cohort most have ending date or it should be marked as ever_ends', - slug='cohort-without-ending-date-and-never-ends') + raise ValidationException( + "A cohort most have ending date or it should be marked as ever_ends", + slug="cohort-without-ending-date-and-never-ends", + ) - if 'language' in data: - language = data['language'] + if "language" in data: + language = data["language"] if type(language) == str: - data['language'] = language.lower() + data["language"] = language.lower() else: - raise ValidationException(f'Language property should be a string not a {type(language)}') + raise ValidationException(f"Language property should be a string not a {type(language)}") # if cohort is being activated the online_meeting_url should not be null - if self.instance is not None and (self.instance.online_meeting_url is None - or self.instance.online_meeting_url == '') and self.instance.remote_available: - stage = (data['stage'] if 'stage' in data else self.instance.stage) - if stage in ['STARTED', 'FINAL_PROJECT'] and stage != self.instance.stage: + if ( + self.instance is not None + and (self.instance.online_meeting_url is None or self.instance.online_meeting_url == "") + and self.instance.remote_available + ): + stage = data["stage"] if "stage" in data else self.instance.stage + if stage in ["STARTED", "FINAL_PROJECT"] and stage != self.instance.stage: raise ValidationException( - 'This cohort has a remote option but no online meeting URL has been specified', - slug='remove-without-online-meeting') + "This cohort has a remote option but no online meeting URL has been specified", + slug="remove-without-online-meeting", + ) return data @@ -720,12 +769,31 @@ class CohortSerializer(CohortSerializerMixin): class Meta: model = Cohort - fields = ('id', 'slug', 'name', 'remote_available', 'kickoff_date', 'current_day', 'academy', 'syllabus', - 'schedule', 'syllabus_version', 'ending_date', 'stage', 'language', 'created_at', 'updated_at', - 'never_ends', 'online_meeting_url', 'timezone', 'is_hidden_on_prework', 'available_as_saas') + fields = ( + "id", + "slug", + "name", + "remote_available", + "kickoff_date", + "current_day", + "academy", + "syllabus", + "schedule", + "syllabus_version", + "ending_date", + "stage", + "language", + "created_at", + "updated_at", + "never_ends", + "online_meeting_url", + "timezone", + "is_hidden_on_prework", + "available_as_saas", + ) def create(self, validated_data): - del self.context['request'] + del self.context["request"] cohort = Cohort.objects.create(**validated_data, **self.context) if cohort.schedule: @@ -752,14 +820,32 @@ class CohortPUTSerializer(CohortSerializerMixin): class Meta: model = Cohort - fields = ('id', 'slug', 'name', 'kickoff_date', 'ending_date', 'remote_available', 'current_day', 'stage', - 'language', 'syllabus', 'syllabus_version', 'schedule', 'never_ends', 'private', 'online_meeting_url', - 'timezone', 'current_module', 'is_hidden_on_prework', 'available_as_saas') + fields = ( + "id", + "slug", + "name", + "kickoff_date", + "ending_date", + "remote_available", + "current_day", + "stage", + "language", + "syllabus", + "syllabus_version", + "schedule", + "never_ends", + "private", + "online_meeting_url", + "timezone", + "current_module", + "is_hidden_on_prework", + "available_as_saas", + ) def update(self, instance, validated_data): last_schedule = instance.schedule - update_timeslots = 'schedule' in validated_data and last_schedule != validated_data['schedule'] + update_timeslots = "schedule" in validated_data and last_schedule != validated_data["schedule"] cohort = super().update(instance, validated_data) if update_timeslots: @@ -772,6 +858,7 @@ def update(self, instance, validated_data): class UserDJangoRestSerializer(serializers.ModelSerializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. # id = serializers.IntegerField() # first_name = serializers.CharField() @@ -782,30 +869,32 @@ class UserDJangoRestSerializer(serializers.ModelSerializer): class Meta: model = User - fields = ['id', 'first_name', 'last_name', 'email'] + fields = ["id", "first_name", "last_name", "email"] class CohortUserSerializerMixin(serializers.ModelSerializer): - status_fields = ['role', 'finantial_status', 'educational_status'] + status_fields = ["role", "finantial_status", "educational_status"] def count_certificates_by_cohort(self, cohort, user_id): - return CohortUser.objects.filter(Q(educational_status='ACTIVE') | Q(educational_status__isnull=True), - user_id=user_id, - role='STUDENT', - cohort__schedule=cohort.schedule).count() + return CohortUser.objects.filter( + Q(educational_status="ACTIVE") | Q(educational_status__isnull=True), + user_id=user_id, + role="STUDENT", + cohort__schedule=cohort.schedule, + ).count() def validate(self, data: OrderedDict): - self.context['index'] += 1 - request = self.context['request'] + self.context["index"] += 1 + request = self.context["request"] is_post_method = not self.instance if isinstance(self.initial_data, list): - id = self.initial_data[self.context['index']].get('id') + id = self.initial_data[self.context["index"]].get("id") else: - id = self.initial_data.get('id') + id = self.initial_data.get("id") - user = data.get('user') - cohort = data.get('cohort') + user = data.get("user") + cohort = data.get("cohort") if isinstance(self.instance, CohortUser): instance = self.instance @@ -828,84 +917,97 @@ def validate(self, data: OrderedDict): cohorts = localize_query(cohorts, request).first() if not cohorts: - logger.debug('Cohort not be found in related academies') - raise ValidationException('Specified cohort not be found') + logger.debug("Cohort not be found in related academies") + raise ValidationException("Specified cohort not be found") - prohibited_stages = ['INACTIVE', 'DELETED', 'ENDED'] + prohibited_stages = ["INACTIVE", "DELETED", "ENDED"] - if is_post_method and 'cohort' in data and data['cohort'].stage in prohibited_stages: + if is_post_method and "cohort" in data and data["cohort"].stage in prohibited_stages: - stage = data['cohort'].stage + stage = data["cohort"].stage - raise ValidationException(f'You cannot add a student to a cohort that is {stage}.', - slug='adding-student-to-a-closed-cohort') + raise ValidationException( + f"You cannot add a student to a cohort that is {stage}.", slug="adding-student-to-a-closed-cohort" + ) - if cohort.stage == 'DELETED': - raise ValidationException('cannot add or edit a user to a cohort that has been deleted', - slug='cohort-with-stage-deleted', - code=400) + if cohort.stage == "DELETED": + raise ValidationException( + "cannot add or edit a user to a cohort that has been deleted", + slug="cohort-with-stage-deleted", + code=400, + ) count_cohort_users = CohortUser.objects.filter(user_id=user.id, cohort_id=cohort.id).count() if is_post_method and count_cohort_users: - raise ValidationException('That user already exists in this cohort') - - if ('role' in data and data['role'] != 'STUDENT' and not ProfileAcademy.objects.filter( - user_id=user.id, academy__id=cohort.academy.id).exclude(role__slug='student').exists()): - raise ValidationException('The user must be staff member to this academy before it can be a teacher') - - if (is_post_method and cohort.schedule and self.count_certificates_by_cohort(cohort, user.id) > 0): + raise ValidationException("That user already exists in this cohort") + + if ( + "role" in data + and data["role"] != "STUDENT" + and not ProfileAcademy.objects.filter(user_id=user.id, academy__id=cohort.academy.id) + .exclude(role__slug="student") + .exists() + ): + raise ValidationException("The user must be staff member to this academy before it can be a teacher") + + if is_post_method and cohort.schedule and self.count_certificates_by_cohort(cohort, user.id) > 0: raise ValidationException( - 'This student is already in another cohort for the same certificate, please mark him/her hi ' - 'educational status on this prior cohort different than ACTIVE before cotinuing') + "This student is already in another cohort for the same certificate, please mark him/her hi " + "educational status on this prior cohort different than ACTIVE before cotinuing" + ) - role = data.get('role') + role = data.get("role") - exclude_params = {'id': instance.id} if instance else {} - if role == 'TEACHER' and (CohortUser.objects.filter(role=role, - cohort_id=cohort.id).exclude(**exclude_params).count()): - raise ValidationException('There can only be one main instructor in a cohort') + exclude_params = {"id": instance.id} if instance else {} + if role == "TEACHER" and ( + CohortUser.objects.filter(role=role, cohort_id=cohort.id).exclude(**exclude_params).count() + ): + raise ValidationException("There can only be one main instructor in a cohort") cohort_user = CohortUser.objects.filter(user__id=user.id, cohort__id=cohort.id).first() # move it in the view if not is_post_method and not cohort_user: - raise ValidationException('Cannot find CohortUser') + raise ValidationException("Cannot find CohortUser") - watching = data.get('watching') == True - if watching and cohort_user.educational_status != 'ACTIVE': - raise ValidationException('The student is not active in this cohort', slug='student-not-active') + watching = data.get("watching") == True + if watching and cohort_user.educational_status != "ACTIVE": + raise ValidationException("The student is not active in this cohort", slug="student-not-active") - is_graduated = ((data.get('educational_status') - or (instance and instance.educational_status or '')) == 'GRADUATED') + is_graduated = ( + data.get("educational_status") or (instance and instance.educational_status or "") + ) == "GRADUATED" - is_late = ((data.get('finantial_status') or (instance and instance.finantial_status or '')) == 'LATE') + is_late = (data.get("finantial_status") or (instance and instance.finantial_status or "")) == "LATE" if is_graduated and is_late: - raise ValidationException('Cannot be marked as `GRADUATED` if its financial ' - 'status is `LATE`') + raise ValidationException("Cannot be marked as `GRADUATED` if its financial " "status is `LATE`") - tasks_pending = Task.objects.filter(user_id=user.id, - task_status='PENDING', - task_type='PROJECT', - cohort__id=cohort.id).exclude(revision_status='IGNORED') + tasks_pending = Task.objects.filter( + user_id=user.id, task_status="PENDING", task_type="PROJECT", cohort__id=cohort.id + ).exclude(revision_status="IGNORED") mandatory_slugs = [] for task in tasks_pending: - if 'days' in task.cohort.syllabus_version.__dict__['json']: - for day in task.cohort.syllabus_version.__dict__['json']['days']: - for assignment in day['assignments']: - if 'mandatory' not in assignment or ('mandatory' in assignment - and assignment['mandatory'] == True): - mandatory_slugs.append(assignment['slug']) - - has_tasks = Task.objects.filter(associated_slug__in=mandatory_slugs).exclude( - revision_status__in=['APPROVED', 'IGNORED']).count() + if "days" in task.cohort.syllabus_version.__dict__["json"]: + for day in task.cohort.syllabus_version.__dict__["json"]["days"]: + for assignment in day["assignments"]: + if "mandatory" not in assignment or ( + "mandatory" in assignment and assignment["mandatory"] == True + ): + mandatory_slugs.append(assignment["slug"]) + + has_tasks = ( + Task.objects.filter(associated_slug__in=mandatory_slugs) + .exclude(revision_status__in=["APPROVED", "IGNORED"]) + .count() + ) if is_graduated and has_tasks: - raise ValidationException('User has tasks with status pending the educational status cannot be GRADUATED') + raise ValidationException("User has tasks with status pending the educational status cannot be GRADUATED") - return {**data, 'cohort': cohort, 'user': user, 'id': id} + return {**data, "cohort": cohort, "user": user, "id": id} class CohortUserListSerializer(serializers.ListSerializer): @@ -918,15 +1020,18 @@ def create(self, validated_data): for key in range(0, len(items)): item = items[key] - items[key].id = CohortUser.objects.filter(cohort__id=item.cohort_id, - user__id=item.user_id).values_list('id', flat=True).first() + items[key].id = ( + CohortUser.objects.filter(cohort__id=item.cohort_id, user__id=item.user_id) + .values_list("id", flat=True) + .first() + ) return items def update(self, instance, validated_data): # Maps for id->instance and id->data item. model_mapping = {model.id: model for model in instance} - data_mapping = {item['id']: item for item in validated_data} + data_mapping = {item["id"]: item for item in validated_data} # Perform creations and updates. ret = [] @@ -952,22 +1057,22 @@ class CohortUserSerializer(CohortUserSerializerMixin): class Meta: model = CohortUser - fields = ['id', 'user', 'cohort', 'role', 'educational_status', 'finantial_status'] + fields = ["id", "user", "cohort", "role", "educational_status", "finantial_status"] list_serializer_class = CohortUserListSerializer class CohortTimeSlotSerializer(serializers.ModelSerializer): - status_fields = ['recurrency_type'] + status_fields = ["recurrency_type"] starting_at = serializers.IntegerField(write_only=True) ending_at = serializers.IntegerField(write_only=True) class Meta: model = CohortTimeSlot - fields = ['id', 'cohort', 'starting_at', 'ending_at', 'recurrent', 'recurrency_type', 'timezone'] + fields = ["id", "cohort", "starting_at", "ending_at", "recurrent", "recurrency_type", "timezone"] class SyllabusScheduleSerializer(serializers.ModelSerializer): - status_fields = ['schedule_type'] + status_fields = ["schedule_type"] class Meta: model = SyllabusSchedule @@ -975,7 +1080,7 @@ class Meta: class SyllabusSchedulePUTSerializer(serializers.ModelSerializer): - status_fields = ['schedule_type'] + status_fields = ["schedule_type"] id = serializers.IntegerField(required=False) name = serializers.CharField(required=False) @@ -989,25 +1094,26 @@ class Meta: class SyllabusScheduleTimeSlotSerializer(serializers.ModelSerializer): - status_fields = ['recurrency_type'] + status_fields = ["recurrency_type"] starting_at = serializers.IntegerField(write_only=True) ending_at = serializers.IntegerField(write_only=True) class Meta: model = SyllabusScheduleTimeSlot fields = [ - 'id', - 'schedule', - 'starting_at', - 'ending_at', - 'recurrent', - 'recurrency_type', - 'timezone', + "id", + "schedule", + "starting_at", + "ending_at", + "recurrent", + "recurrency_type", + "timezone", ] class CohortUserPOSTSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() cohort = serpy.Field() @@ -1020,7 +1126,7 @@ class CohortUserPUTSerializer(CohortUserSerializerMixin): class Meta: model = CohortUser - fields = ['id', 'user', 'cohort', 'role', 'educational_status', 'finantial_status', 'watching'] + fields = ["id", "user", "cohort", "role", "educational_status", "finantial_status", "watching"] list_serializer_class = CohortUserListSerializer @@ -1029,8 +1135,16 @@ class SyllabusSerializer(serializers.ModelSerializer): class Meta: model = Syllabus fields = [ - 'id', 'slug', 'name', 'academy_owner', 'duration_in_days', 'duration_in_hours', 'week_hours', 'github_url', - 'logo', 'private' + "id", + "slug", + "name", + "academy_owner", + "duration_in_days", + "duration_in_hours", + "week_hours", + "github_url", + "logo", + "private", ] exclude = () @@ -1040,48 +1154,47 @@ class SyllabusVersionSerializer(serializers.ModelSerializer): class Meta: model = SyllabusVersion - fields = ['json', 'version', 'syllabus', 'status', 'change_log_details'] + fields = ["json", "version", "syllabus", "status", "change_log_details"] exclude = () extra_kwargs = { - 'syllabus': { - 'read_only': True - }, - 'version': { - 'read_only': True - }, + "syllabus": {"read_only": True}, + "version": {"read_only": True}, } def validate(self, data): - request = self.context['request'] + request = self.context["request"] _data = super().validate(data) - if 'json' in data: + if "json" in data: try: - ignore = request.GET.get('ignore', '') - _log = test_syllabus(data['json'], ignore=ignore.lower().split(',')) + ignore = request.GET.get("ignore", "") + _log = test_syllabus(data["json"], ignore=ignore.lower().split(",")) if _log.http_status() != 200: raise ValidationException( - f'There are {len(_log.errors)} errors in your syllabus, please validate before submitting', - slug='syllabus-with-errors') + f"There are {len(_log.errors)} errors in your syllabus, please validate before submitting", + slug="syllabus-with-errors", + ) except Exception as e: - raise ValidationException(f'Error when testing the syllabus: {str(e)}', slug='syllabus-with-errors') + raise ValidationException(f"Error when testing the syllabus: {str(e)}", slug="syllabus-with-errors") return _data def create(self, validated_data): - syllabus = self.context['syllabus'] + syllabus = self.context["syllabus"] - previous_syllabus = SyllabusVersion.objects.filter(syllabus=syllabus).order_by('-version').first() + previous_syllabus = SyllabusVersion.objects.filter(syllabus=syllabus).order_by("-version").first() version = 1 if previous_syllabus is not None: version = previous_syllabus.version + 1 - return super(SyllabusVersionSerializer, self).create({ - **validated_data, - 'syllabus': syllabus, - 'version': version, - }) + return super(SyllabusVersionSerializer, self).create( + { + **validated_data, + "syllabus": syllabus, + "version": version, + } + ) class SyllabusVersionPutSerializer(serializers.ModelSerializer): @@ -1090,37 +1203,35 @@ class SyllabusVersionPutSerializer(serializers.ModelSerializer): class Meta: model = SyllabusVersion - fields = ['json', 'version', 'syllabus', 'status'] + fields = ["json", "version", "syllabus", "status"] exclude = () extra_kwargs = { - 'syllabus': { - 'read_only': True - }, - 'version': { - 'read_only': True - }, + "syllabus": {"read_only": True}, + "version": {"read_only": True}, } def validate(self, data): - request = self.context['request'] + request = self.context["request"] _data = super().validate(data) - if 'json' in data: + if "json" in data: try: - ignore = request.GET.get('ignore', '') - _log = test_syllabus(data['json'], ignore=ignore.lower().split(',')) + ignore = request.GET.get("ignore", "") + _log = test_syllabus(data["json"], ignore=ignore.lower().split(",")) if _log.http_status() != 200: raise ValidationException( - f'There are {len(_log.errors)} errors in your syllabus, please validate before submitting', - slug='syllabus-with-errors') + f"There are {len(_log.errors)} errors in your syllabus, please validate before submitting", + slug="syllabus-with-errors", + ) except Exception as e: - raise ValidationException(f'Error when testing the syllabus: {str(e)}', slug='syllabus-with-errors') + raise ValidationException(f"Error when testing the syllabus: {str(e)}", slug="syllabus-with-errors") return _data class AcademyReportSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -1136,29 +1247,29 @@ class AcademyReportSerializer(serpy.Serializer): def get_students(self, obj): - query = CohortUser.objects.filter(cohort__academy__id=obj.id, role='STUDENT') + query = CohortUser.objects.filter(cohort__academy__id=obj.id, role="STUDENT") return { - 'total': query.count(), - 'active': query.filter(educational_status='ACTIVE').count(), - 'suspended': query.filter(educational_status='SUSPENDED').count(), - 'graduated': query.filter(educational_status='GRADUATED').count(), - 'dropped': query.filter(educational_status='DROPPED').count(), + "total": query.count(), + "active": query.filter(educational_status="ACTIVE").count(), + "suspended": query.filter(educational_status="SUSPENDED").count(), + "graduated": query.filter(educational_status="GRADUATED").count(), + "dropped": query.filter(educational_status="DROPPED").count(), } teachers = serpy.MethodField() def get_teachers(self, obj): - query = CohortUser.objects.filter(cohort__academy__id=obj.id, cohort__stage__in=['STARTED', 'FINAL_PROJECT']) + query = CohortUser.objects.filter(cohort__academy__id=obj.id, cohort__stage__in=["STARTED", "FINAL_PROJECT"]) active = { - 'main': query.filter(role='TEACHER').count(), - 'assistant': query.filter(role='ASSISTANT').count(), - 'reviewer': query.filter(role='REVIEWER').count(), + "main": query.filter(role="TEACHER").count(), + "assistant": query.filter(role="ASSISTANT").count(), + "reviewer": query.filter(role="REVIEWER").count(), } - active['total'] = int(active['main']) + int(active['assistant']) + int(active['reviewer']) + active["total"] = int(active["main"]) + int(active["assistant"]) + int(active["reviewer"]) - total = ProfileAcademy.objects.filter(role__slug__in=['teacher', 'assistant']) + total = ProfileAcademy.objects.filter(role__slug__in=["teacher", "assistant"]) return { - 'total': total.count(), - 'active': active, + "total": total.count(), + "active": active, } diff --git a/breathecode/admissions/signals.py b/breathecode/admissions/signals.py index ae6660cc2..5e4e87f82 100644 --- a/breathecode/admissions/signals.py +++ b/breathecode/admissions/signals.py @@ -1,19 +1,19 @@ from task_manager.django.dispatch import Emisor -emisor = Emisor('breathecode.admissions') +emisor = Emisor("breathecode.admissions") # add your receives here -student_edu_status_updated = emisor.signal('student_edu_status_updated') -cohort_saved = emisor.signal('cohort_saved') -cohort_log_saved = emisor.signal('cohort_log_saved') -cohort_user_created = emisor.signal('cohort_user_created') -cohort_stage_updated = emisor.signal('cohort_stage_updated') +student_edu_status_updated = emisor.signal("student_edu_status_updated") +cohort_saved = emisor.signal("cohort_saved") +cohort_log_saved = emisor.signal("cohort_log_saved") +cohort_user_created = emisor.signal("cohort_user_created") +cohort_stage_updated = emisor.signal("cohort_stage_updated") -academy_saved = emisor.signal('academy_saved') +academy_saved = emisor.signal("academy_saved") # happens when any asset gets update inside the syllabus json for any version -syllabus_asset_slug_updated = emisor.signal('syllabus_asset_slug_updated') +syllabus_asset_slug_updated = emisor.signal("syllabus_asset_slug_updated") -syllabus_version_json_updated = emisor.signal('syllabus_version_json_updated') +syllabus_version_json_updated = emisor.signal("syllabus_version_json_updated") -timeslot_saved = emisor.signal('timeslot_saved') +timeslot_saved = emisor.signal("timeslot_saved") diff --git a/breathecode/admissions/tasks.py b/breathecode/admissions/tasks.py index 977d6e8a6..bb639e78f 100644 --- a/breathecode/admissions/tasks.py +++ b/breathecode/admissions/tasks.py @@ -15,123 +15,128 @@ from .actions import test_syllabus from .models import Academy, Cohort, CohortUser, SyllabusVersion -API_URL = os.getenv('API_URL', '') +API_URL = os.getenv("API_URL", "") logger = logging.getLogger(__name__) @shared_task(priority=TaskPriority.ACADEMY.value) def async_test_syllabus(syllabus_slug, syllabus_version) -> None: - logger.debug('Process async_test_syllabus') + logger.debug("Process async_test_syllabus") syl_version = SyllabusVersion.objects.filter(syllabus__slug=syllabus_slug, version=syllabus_version).first() if syl_version is None: - logger.error(f'Syllabus {syllabus_slug} v{syllabus_version} not found') + logger.error(f"Syllabus {syllabus_slug} v{syllabus_version} not found") - syl_version.integrity_status = 'PENDING' + syl_version.integrity_status = "PENDING" syl_version.integrity_check_at = timezone.now() try: report = test_syllabus(syl_version.json) syl_version.integrity_report = report.serialize() if report.http_status() == 200: - syl_version.integrity_status = 'OK' + syl_version.integrity_status = "OK" else: - syl_version.integrity_status = 'ERROR' + syl_version.integrity_status = "ERROR" except Exception as e: - syl_version.integrity_report = {'errors': [str(e)], 'warnings': []} - syl_version.integrity_status = 'ERROR' + syl_version.integrity_report = {"errors": [str(e)], "warnings": []} + syl_version.integrity_status = "ERROR" syl_version.save() @task(priority=TaskPriority.STUDENT.value) -def build_cohort_user(cohort_id: int, user_id: int, role: str = 'STUDENT', **_: Any) -> None: - logger.info(f'Starting build_cohort_user for cohort {cohort_id} and user {user_id}') +def build_cohort_user(cohort_id: int, user_id: int, role: str = "STUDENT", **_: Any) -> None: + logger.info(f"Starting build_cohort_user for cohort {cohort_id} and user {user_id}") - bad_stages = ['DELETED', 'ENDED', 'FINAL_PROJECT', 'STARTED'] + bad_stages = ["DELETED", "ENDED", "FINAL_PROJECT", "STARTED"] if not (cohort := Cohort.objects.filter(id=cohort_id).exclude(stage__in=bad_stages).first()): - raise AbortTask(f'Cohort with id {cohort_id} not found') + raise AbortTask(f"Cohort with id {cohort_id} not found") if not (user := User.objects.filter(id=user_id, is_active=True).first()): - raise AbortTask(f'User with id {user_id} not found') - - cohort_user, created = CohortUser.objects.get_or_create(cohort=cohort, - user=user, - role=role, - defaults={ - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'ACTIVE', - }) + raise AbortTask(f"User with id {user_id} not found") + + cohort_user, created = CohortUser.objects.get_or_create( + cohort=cohort, + user=user, + role=role, + defaults={ + "finantial_status": "UP_TO_DATE", + "educational_status": "ACTIVE", + }, + ) if created: - logger.info('User added to cohort') + logger.info("User added to cohort") - if role == 'TEACHER': - role = 'teacher' + if role == "TEACHER": + role = "teacher" - elif role == 'ASSISTANT': - role = 'assistant' + elif role == "ASSISTANT": + role = "assistant" - elif role == 'REVIEWER': - role = 'homework_reviewer' + elif role == "REVIEWER": + role = "homework_reviewer" else: - role = 'student' + role = "student" role = Role.objects.filter(slug=role).first() - profile, created = ProfileAcademy.objects.get_or_create(academy=cohort.academy, - user=user, - role=role, - defaults={ - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'status': 'ACTIVE', - }) - - if profile.status != 'ACTIVE': - profile.status = 'ACTIVE' + profile, created = ProfileAcademy.objects.get_or_create( + academy=cohort.academy, + user=user, + role=role, + defaults={ + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, + "status": "ACTIVE", + }, + ) + + if profile.status != "ACTIVE": + profile.status = "ACTIVE" profile.save() - logger.info('ProfileAcademy mark as active') + logger.info("ProfileAcademy mark as active") if created: - logger.info('ProfileAcademy added') + logger.info("ProfileAcademy added") - tasks_activity.add_activity.delay(user_id, - 'joined_cohort', - related_type='admissions.CohortUser', - related_id=cohort_user.id) + tasks_activity.add_activity.delay( + user_id, "joined_cohort", related_type="admissions.CohortUser", related_id=cohort_user.id + ) @task(priority=TaskPriority.STUDENT.value) -def build_profile_academy(academy_id: int, user_id: int, role: str = 'student', **_: Any) -> None: - logger.info(f'Starting build_profile_academy for cohort {academy_id} and user {user_id}') +def build_profile_academy(academy_id: int, user_id: int, role: str = "student", **_: Any) -> None: + logger.info(f"Starting build_profile_academy for cohort {academy_id} and user {user_id}") if not (user := User.objects.filter(id=user_id, is_active=True).first()): - raise AbortTask(f'User with id {user_id} not found') + raise AbortTask(f"User with id {user_id} not found") if not (academy := Academy.objects.filter(id=academy_id).first()): - raise AbortTask(f'Academy with id {academy_id} not found') + raise AbortTask(f"Academy with id {academy_id} not found") if not (role := Role.objects.filter(slug=role).first()): - raise AbortTask(f'Role with slug {role} not found') - - profile, created = ProfileAcademy.objects.get_or_create(academy=academy, - user=user, - role=role, - defaults={ - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'status': 'ACTIVE', - }) - - if profile.status != 'ACTIVE': - profile.status = 'ACTIVE' + raise AbortTask(f"Role with slug {role} not found") + + profile, created = ProfileAcademy.objects.get_or_create( + academy=academy, + user=user, + role=role, + defaults={ + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, + "status": "ACTIVE", + }, + ) + + if profile.status != "ACTIVE": + profile.status = "ACTIVE" profile.save() - logger.info('ProfileAcademy mark as active') + logger.info("ProfileAcademy mark as active") if created: - logger.info('ProfileAcademy added') + logger.info("ProfileAcademy added") diff --git a/breathecode/admissions/tests/actions/tests_update_asset_on_syllabus.py b/breathecode/admissions/tests/actions/tests_update_asset_on_syllabus.py index 91a64157b..4bf2a7a08 100644 --- a/breathecode/admissions/tests/actions/tests_update_asset_on_syllabus.py +++ b/breathecode/admissions/tests/actions/tests_update_asset_on_syllabus.py @@ -1,6 +1,7 @@ """ Test mentorhips """ + import json from breathecode.authenticate.models import Token from ..mixins import AdmissionsTestCase @@ -16,33 +17,27 @@ def test_create_session_mentor_first_no_previous_nothing(self): if should create a room with status 'pending' """ - data1 = json.load(open('breathecode/admissions/tests/actions/sample_syllabus1.json', 'r')) - data2 = json.load(open('breathecode/admissions/tests/actions/sample_syllabus2.json', 'r')) - models1 = self.bc.database.create(syllabus=True, - syllabus_version={'json': data1}, - authenticate=True, - capability='crud_syllabus') - models2 = self.bc.database.create(syllabus=True, - syllabus_version={'json': data2}, - authenticate=True, - capability='crud_syllabus') + data1 = json.load(open("breathecode/admissions/tests/actions/sample_syllabus1.json", "r")) + data2 = json.load(open("breathecode/admissions/tests/actions/sample_syllabus2.json", "r")) + models1 = self.bc.database.create( + syllabus=True, syllabus_version={"json": data1}, authenticate=True, capability="crud_syllabus" + ) + models2 = self.bc.database.create( + syllabus=True, syllabus_version={"json": data2}, authenticate=True, capability="crud_syllabus" + ) changes = { - 'QUIZ': { - 'from': 'html', - 'to': 'html-test' - }, - 'LESSON': { - 'from': 'learn-in-public', - 'to': 'learn-in-public-test' - } + "QUIZ": {"from": "html", "to": "html-test"}, + "LESSON": {"from": "learn-in-public", "to": "learn-in-public-test"}, } for asset_type in changes: - findings = update_asset_on_json(from_slug=changes[asset_type]['from'], - to_slug=changes[asset_type]['to'], - asset_type=asset_type, - simulate=False) + findings = update_asset_on_json( + from_slug=changes[asset_type]["from"], + to_slug=changes[asset_type]["to"], + asset_type=asset_type, + simulate=False, + ) self.assertEqual(len(findings), 2) @@ -50,15 +45,15 @@ def test_create_session_mentor_first_no_previous_nothing(self): results = {} for s in syllabus: for v in s.syllabusversion_set.all(): - for d in v.json['days']: + for d in v.json["days"]: for asset_type in d: - if asset_type in ['quizzes', 'assignments', 'projects', 'replits', 'lessons']: + if asset_type in ["quizzes", "assignments", "projects", "replits", "lessons"]: if asset_type not in results: results[asset_type] = {} for a in d[asset_type]: - if a['slug'] not in results[asset_type]: - results[asset_type][a['slug']] = 0 - results[asset_type][a['slug']] += 1 + if a["slug"] not in results[asset_type]: + results[asset_type][a["slug"]] = 0 + results[asset_type][a["slug"]] += 1 # test that new slugs are present in syllabus def test_for_existance(results, existance): @@ -70,18 +65,19 @@ def test_for_existance(results, existance): test_for_existance( results, { - 'quizzes': { + "quizzes": { # one html-test should now be found on each syllabus - 'html-test': 2, + "html-test": 2, }, - 'replits': { + "replits": { # replits should be the same, we replaced the "html" quiz, not the "html" replit - 'html': 2, + "html": 2, }, - 'lessons': { - 'learn-in-public-test': 2, + "lessons": { + "learn-in-public-test": 2, }, - }) + }, + ) # test that old slugs are gone from syllabus def test_for_removals(results, removals): @@ -89,7 +85,10 @@ def test_for_removals(results, removals): for slug in removals[asset_type]: self.assertEqual(hasattr(results[asset_type], slug), False) - test_for_removals(results, { - 'quizzes': ['html'], - 'lessons': ['learn-in-public'], - }) + test_for_removals( + results, + { + "quizzes": ["html"], + "lessons": ["learn-in-public"], + }, + ) diff --git a/breathecode/admissions/tests/admin/tests_add_cohort_slug_to_active_campaign.py b/breathecode/admissions/tests/admin/tests_add_cohort_slug_to_active_campaign.py index ecb315ab8..311f594e6 100644 --- a/breathecode/admissions/tests/admin/tests_add_cohort_slug_to_active_campaign.py +++ b/breathecode/admissions/tests/admin/tests_add_cohort_slug_to_active_campaign.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + from unittest.mock import MagicMock, call, patch from breathecode.admissions.models import Cohort from breathecode.admissions.admin import add_cohort_slug_to_active_campaign @@ -13,7 +14,7 @@ class CohortUserTestSuite(AdmissionsTestCase): 🔽🔽🔽 With zero Cohort """ - @patch('breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay", MagicMock()) def test_add_cohort_slug_to_active_campaign__zero_cohorts(self): from breathecode.marketing.tasks import add_cohort_slug_as_acp_tag @@ -28,7 +29,7 @@ def test_add_cohort_slug_to_active_campaign__zero_cohorts(self): 🔽🔽🔽 With one Cohort """ - @patch('breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay", MagicMock()) def test_add_cohort_slug_to_active_campaign__one_cohort(self): from breathecode.marketing.tasks import add_cohort_slug_as_acp_tag @@ -39,15 +40,18 @@ def test_add_cohort_slug_to_active_campaign__one_cohort(self): add_cohort_slug_to_active_campaign(None, request, queryset) - self.assertEqual(add_cohort_slug_as_acp_tag.delay.call_args_list, [ - call(model.cohort.id, model.cohort.academy.id), - ]) + self.assertEqual( + add_cohort_slug_as_acp_tag.delay.call_args_list, + [ + call(model.cohort.id, model.cohort.academy.id), + ], + ) """ 🔽🔽🔽 With two Cohort """ - @patch('breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay", MagicMock()) def test_add_cohort_slug_to_active_campaign__two_cohorts(self): from breathecode.marketing.tasks import add_cohort_slug_as_acp_tag @@ -59,7 +63,10 @@ def test_add_cohort_slug_to_active_campaign__two_cohorts(self): add_cohort_slug_to_active_campaign(None, request, queryset) - self.assertEqual(add_cohort_slug_as_acp_tag.delay.call_args_list, [ - call(model1.cohort.id, model1.cohort.academy.id), - call(model2.cohort.id, model2.cohort.academy.id), - ]) + self.assertEqual( + add_cohort_slug_as_acp_tag.delay.call_args_list, + [ + call(model1.cohort.id, model1.cohort.academy.id), + call(model2.cohort.id, model2.cohort.academy.id), + ], + ) diff --git a/breathecode/admissions/tests/admin/tests_add_student_tag_to_active_campaign.py b/breathecode/admissions/tests/admin/tests_add_student_tag_to_active_campaign.py index b97793d91..252892e0b 100644 --- a/breathecode/admissions/tests/admin/tests_add_student_tag_to_active_campaign.py +++ b/breathecode/admissions/tests/admin/tests_add_student_tag_to_active_campaign.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + from unittest.mock import MagicMock, call, patch from django.http.request import HttpRequest @@ -16,9 +17,9 @@ class CohortUserTestSuite(AdmissionsTestCase): 🔽🔽🔽 With zero CohortUser """ - @patch('breathecode.marketing.tasks.add_cohort_task_to_student.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.marketing.tasks.add_cohort_task_to_student.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_add_student_tag_to_active_campaign__zero_cohort_users(self): from breathecode.marketing.tasks import add_cohort_task_to_student @@ -33,9 +34,9 @@ def test_add_student_tag_to_active_campaign__zero_cohort_users(self): 🔽🔽🔽 With one CohortUser """ - @patch('breathecode.marketing.tasks.add_cohort_task_to_student.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.marketing.tasks.add_cohort_task_to_student.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_add_student_tag_to_active_campaign__one_cohort_user(self): from breathecode.marketing.tasks import add_cohort_task_to_student @@ -46,17 +47,20 @@ def test_add_student_tag_to_active_campaign__one_cohort_user(self): add_student_tag_to_active_campaign(None, request, queryset) - self.assertEqual(add_cohort_task_to_student.delay.call_args_list, [ - call(model.user.id, model.cohort.id, model.cohort.academy.id), - ]) + self.assertEqual( + add_cohort_task_to_student.delay.call_args_list, + [ + call(model.user.id, model.cohort.id, model.cohort.academy.id), + ], + ) """ 🔽🔽🔽 With two CohortUser """ - @patch('breathecode.marketing.tasks.add_cohort_task_to_student.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.marketing.tasks.add_cohort_task_to_student.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_add_student_tag_to_active_campaign__two_cohort_users(self): from breathecode.marketing.tasks import add_cohort_task_to_student @@ -68,7 +72,10 @@ def test_add_student_tag_to_active_campaign__two_cohort_users(self): add_student_tag_to_active_campaign(None, request, queryset) - self.assertEqual(add_cohort_task_to_student.delay.call_args_list, [ - call(model1.user.id, model1.cohort.id, model1.cohort.academy.id), - call(model2.user.id, model2.cohort.id, model2.cohort.academy.id), - ]) + self.assertEqual( + add_cohort_task_to_student.delay.call_args_list, + [ + call(model1.user.id, model1.cohort.id, model1.cohort.academy.id), + call(model2.user.id, model2.cohort.id, model2.cohort.academy.id), + ], + ) diff --git a/breathecode/admissions/tests/admin/tests_get_attendancy_logs.py b/breathecode/admissions/tests/admin/tests_get_attendancy_logs.py index 50d3b9568..4c660f9d7 100644 --- a/breathecode/admissions/tests/admin/tests_get_attendancy_logs.py +++ b/breathecode/admissions/tests/admin/tests_get_attendancy_logs.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + import random from unittest.mock import MagicMock, call, patch from breathecode.admissions.models import Cohort @@ -15,7 +16,7 @@ class CohortUserTestSuite(AdmissionsTestCase): 🔽🔽🔽 With zero Cohort """ - @patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()) + @patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()) def test_without_cohorts(self): request = HttpRequest() queryset = Cohort.objects.all() @@ -24,10 +25,10 @@ def test_without_cohorts(self): self.assertEqual(tasks.get_attendancy_log.delay.call_args_list, []) - @patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()) + @patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()) def test_with_many_cohorts(self): how_many = random.randint(2, 5) - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): self.bc.database.create(cohort=how_many) request = HttpRequest() queryset = Cohort.objects.all() diff --git a/breathecode/admissions/tests/admin/tests_link_randomly_relations_to_cohorts.py b/breathecode/admissions/tests/admin/tests_link_randomly_relations_to_cohorts.py index 5b0148354..d7d2d6de0 100644 --- a/breathecode/admissions/tests/admin/tests_link_randomly_relations_to_cohorts.py +++ b/breathecode/admissions/tests/admin/tests_link_randomly_relations_to_cohorts.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + from unittest.mock import patch from breathecode.tests.mocks.django_contrib import DJANGO_CONTRIB_PATH, apply_django_contrib_messages_mock from breathecode.admissions.models import Cohort @@ -11,24 +12,25 @@ class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" + """ 🔽🔽🔽 With zero Cohort """ - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) def test_link_randomly_relations_to_cohorts__with_zero_cohorts(self): request = HttpRequest() queryset = Cohort.objects.all() link_randomly_relations_to_cohorts(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) """ 🔽🔽🔽 With one Cohort """ - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) def test_link_randomly_relations_to_cohorts__with_one_cohort(self): # self.generate_models(academy=True, skip_cohort=True) model = self.generate_models(academy=True, cohort=True) @@ -37,54 +39,62 @@ def test_link_randomly_relations_to_cohorts__with_one_cohort(self): queryset = Cohort.objects.all() link_randomly_relations_to_cohorts(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) """ 🔽🔽🔽 With one Cohort and SyllabusVersion """ - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) def test_link_randomly_relations_to_cohorts__with_one_cohort__with_syllabus_version(self): # self.generate_models(academy=True, skip_cohort=True) - cohort_kwargs = {'syllabus_version': None} - model = self.generate_models(academy=True, - cohort=True, - syllabus=True, - syllabus_version=True, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"syllabus_version": None} + model = self.generate_models( + academy=True, cohort=True, syllabus=True, syllabus_version=True, cohort_kwargs=cohort_kwargs + ) request = HttpRequest() queryset = Cohort.objects.all() link_randomly_relations_to_cohorts(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - **self.model_to_dict(model, 'cohort'), - 'syllabus_version_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.model_to_dict(model, "cohort"), + "syllabus_version_id": 1, + } + ], + ) """ 🔽🔽🔽 With one Cohort, SyllabusVersion and SyllabusSchedule """ - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) def test_link_randomly_relations_to_cohorts__with_one_cohort__with_schedule(self): # self.generate_models(academy=True, skip_cohort=True) - cohort_kwargs = {'syllabus_version': None, 'schedule': None} - model = self.generate_models(academy=True, - cohort=True, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"syllabus_version": None, "schedule": None} + model = self.generate_models( + academy=True, + cohort=True, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + ) request = HttpRequest() queryset = Cohort.objects.all() link_randomly_relations_to_cohorts(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - **self.model_to_dict(model, 'cohort'), - 'syllabus_version_id': 1, - 'schedule_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.model_to_dict(model, "cohort"), + "syllabus_version_id": 1, + "schedule_id": 1, + } + ], + ) diff --git a/breathecode/admissions/tests/admin/tests_mark_as_available_as_sass.py b/breathecode/admissions/tests/admin/tests_mark_as_available_as_sass.py index cf7e70ce9..54f534897 100644 --- a/breathecode/admissions/tests/admin/tests_mark_as_available_as_sass.py +++ b/breathecode/admissions/tests/admin/tests_mark_as_available_as_sass.py @@ -1,6 +1,7 @@ """ Test replicate_in_all """ + from unittest.mock import MagicMock, call, patch from breathecode.admissions.admin import mark_as_available_as_saas from django.http.request import HttpRequest @@ -14,61 +15,61 @@ class CohortUserTestSuite(AdmissionsTestCase): def test__with_zero_academies(self): request = HttpRequest() - Academy = self.bc.database.get_model('admissions.Academy') + Academy = self.bc.database.get_model("admissions.Academy") queryset = Academy.objects.all() mark_as_available_as_saas(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), []) + self.assertEqual(self.bc.database.list_of("admissions.Academy"), []) """ 🔽🔽🔽 With two Academy """ def test__with_two_academies__available_as_saas_is_initially_false(self): - academy = {'available_as_saas': False} + academy = {"available_as_saas": False} model = self.bc.database.create(academy=(2, academy)) request = HttpRequest() - Academy = self.bc.database.get_model('admissions.Academy') + Academy = self.bc.database.get_model("admissions.Academy") queryset = Academy.objects.all() mark_as_available_as_saas(None, request, queryset) self.assertEqual( - self.bc.database.list_of('admissions.Academy'), + self.bc.database.list_of("admissions.Academy"), [ { **self.bc.format.to_dict(model.academy[0]), - 'available_as_saas': True, + "available_as_saas": True, }, { **self.bc.format.to_dict(model.academy[1]), - 'available_as_saas': True, + "available_as_saas": True, }, ], ) def test__with_two_academies__available_as_saas_is_initially_true(self): - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(academy=(2, academy)) request = HttpRequest() - Academy = self.bc.database.get_model('admissions.Academy') + Academy = self.bc.database.get_model("admissions.Academy") queryset = Academy.objects.all() mark_as_available_as_saas(None, request, queryset) self.assertEqual( - self.bc.database.list_of('admissions.Academy'), + self.bc.database.list_of("admissions.Academy"), [ { **self.bc.format.to_dict(model.academy[0]), - 'available_as_saas': True, + "available_as_saas": True, }, { **self.bc.format.to_dict(model.academy[1]), - 'available_as_saas': True, + "available_as_saas": True, }, ], ) diff --git a/breathecode/admissions/tests/admin/tests_mark_as_unavailable_as_sass.py b/breathecode/admissions/tests/admin/tests_mark_as_unavailable_as_sass.py index 55f22fbd9..61fd63298 100644 --- a/breathecode/admissions/tests/admin/tests_mark_as_unavailable_as_sass.py +++ b/breathecode/admissions/tests/admin/tests_mark_as_unavailable_as_sass.py @@ -1,6 +1,7 @@ """ Test replicate_in_all """ + from unittest.mock import MagicMock, call, patch from breathecode.admissions.admin import mark_as_unavailable_as_saas from django.http.request import HttpRequest @@ -14,61 +15,61 @@ class CohortUserTestSuite(AdmissionsTestCase): def test__with_zero_academies(self): request = HttpRequest() - Academy = self.bc.database.get_model('admissions.Academy') + Academy = self.bc.database.get_model("admissions.Academy") queryset = Academy.objects.all() mark_as_unavailable_as_saas(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), []) + self.assertEqual(self.bc.database.list_of("admissions.Academy"), []) """ 🔽🔽🔽 With two Academy """ def test__with_two_academies__available_as_saas_is_initially_false(self): - academy = {'available_as_saas': False} + academy = {"available_as_saas": False} model = self.bc.database.create(academy=(2, academy)) request = HttpRequest() - Academy = self.bc.database.get_model('admissions.Academy') + Academy = self.bc.database.get_model("admissions.Academy") queryset = Academy.objects.all() mark_as_unavailable_as_saas(None, request, queryset) self.assertEqual( - self.bc.database.list_of('admissions.Academy'), + self.bc.database.list_of("admissions.Academy"), [ { **self.bc.format.to_dict(model.academy[0]), - 'available_as_saas': False, + "available_as_saas": False, }, { **self.bc.format.to_dict(model.academy[1]), - 'available_as_saas': False, + "available_as_saas": False, }, ], ) def test__with_two_academies__available_as_saas_is_initially_true(self): - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(academy=(2, academy)) request = HttpRequest() - Academy = self.bc.database.get_model('admissions.Academy') + Academy = self.bc.database.get_model("admissions.Academy") queryset = Academy.objects.all() mark_as_unavailable_as_saas(None, request, queryset) self.assertEqual( - self.bc.database.list_of('admissions.Academy'), + self.bc.database.list_of("admissions.Academy"), [ { **self.bc.format.to_dict(model.academy[0]), - 'available_as_saas': False, + "available_as_saas": False, }, { **self.bc.format.to_dict(model.academy[1]), - 'available_as_saas': False, + "available_as_saas": False, }, ], ) diff --git a/breathecode/admissions/tests/admin/tests_replicate_in_all.py b/breathecode/admissions/tests/admin/tests_replicate_in_all.py index 3837dcd03..d96d2aaa3 100644 --- a/breathecode/admissions/tests/admin/tests_replicate_in_all.py +++ b/breathecode/admissions/tests/admin/tests_replicate_in_all.py @@ -1,6 +1,7 @@ """ Test replicate_in_all """ + from unittest.mock import MagicMock, call, patch from breathecode.admissions.models import SyllabusSchedule from breathecode.admissions.admin import replicate_in_all @@ -10,11 +11,12 @@ class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" + """ 🔽🔽🔽 With zero Academy """ - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_replicate_in_all__with_zero_schedules(self): from django.contrib import messages @@ -23,16 +25,18 @@ def test_replicate_in_all__with_zero_schedules(self): replicate_in_all(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), []) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) - self.assertEqual(messages.add_message.call_args_list, - [call(request, 20, 'All academies in sync with those syllabus schedules')]) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusSchedule"), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) + self.assertEqual( + messages.add_message.call_args_list, + [call(request, 20, "All academies in sync with those syllabus schedules")], + ) """ 🔽🔽🔽 With one Academy and one SyllabusSchedule """ - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_replicate_in_all__with_one_schedule__just_the_same_academy(self): from django.contrib import messages @@ -43,16 +47,21 @@ def test_replicate_in_all__with_one_schedule__just_the_same_academy(self): replicate_in_all(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), [ - self.bc.format.to_dict(model.syllabus_schedule), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusSchedule"), + [ + self.bc.format.to_dict(model.syllabus_schedule), + ], + ) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) - self.assertEqual(messages.add_message.call_args_list, - [call(request, 20, 'All academies in sync with those syllabus schedules')]) + self.assertEqual( + messages.add_message.call_args_list, + [call(request, 20, "All academies in sync with those syllabus schedules")], + ) - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_replicate_in_all__with_one_schedule__two_academies__without_timezone(self): from django.contrib import messages @@ -63,23 +72,32 @@ def test_replicate_in_all__with_one_schedule__two_academies__without_timezone(se replicate_in_all(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), [ - self.bc.format.to_dict(model.syllabus_schedule), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) - - self.assertEqual(messages.add_message.call_args_list, [ - call( - request, 40, f'The following academies ({model.academy[1].slug}) was skipped ' - "because it doesn't have a timezone assigned") - ]) - - @patch('django.contrib.messages.add_message', MagicMock()) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusSchedule"), + [ + self.bc.format.to_dict(model.syllabus_schedule), + ], + ) + + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) + + self.assertEqual( + messages.add_message.call_args_list, + [ + call( + request, + 40, + f"The following academies ({model.academy[1].slug}) was skipped " + "because it doesn't have a timezone assigned", + ) + ], + ) + + @patch("django.contrib.messages.add_message", MagicMock()) def test_replicate_in_all__with_one_schedule__two_academies(self): from django.contrib import messages - academy = {'timezone': 'Pacific/Pago_Pago'} + academy = {"timezone": "Pacific/Pago_Pago"} model = self.bc.database.create(academy=(2, academy), syllabus_schedule=1) request = HttpRequest() @@ -87,28 +105,32 @@ def test_replicate_in_all__with_one_schedule__two_academies(self): replicate_in_all(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule) - }, - { - **self.bc.format.to_dict(model.syllabus_schedule), - 'id': 2, - 'academy_id': 2, - }, - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) - - self.assertEqual(messages.add_message.call_args_list, [ - call(request, 20, 'All academies in sync with those syllabus schedules'), - ]) - - @patch('django.contrib.messages.add_message', MagicMock()) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusSchedule"), + [ + {**self.bc.format.to_dict(model.syllabus_schedule)}, + { + **self.bc.format.to_dict(model.syllabus_schedule), + "id": 2, + "academy_id": 2, + }, + ], + ) + + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) + + self.assertEqual( + messages.add_message.call_args_list, + [ + call(request, 20, "All academies in sync with those syllabus schedules"), + ], + ) + + @patch("django.contrib.messages.add_message", MagicMock()) def test_replicate_in_all__with_one_schedule__two_academies__zero_timeslots(self): from django.contrib import messages - academy = {'timezone': 'Pacific/Pago_Pago'} + academy = {"timezone": "Pacific/Pago_Pago"} model = self.bc.database.create(academy=(2, academy), syllabus_schedule=1) request = HttpRequest() @@ -116,28 +138,32 @@ def test_replicate_in_all__with_one_schedule__two_academies__zero_timeslots(self replicate_in_all(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule) - }, - { - **self.bc.format.to_dict(model.syllabus_schedule), - 'id': 2, - 'academy_id': 2, - }, - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) - - self.assertEqual(messages.add_message.call_args_list, [ - call(request, 20, 'All academies in sync with those syllabus schedules'), - ]) - - @patch('django.contrib.messages.add_message', MagicMock()) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusSchedule"), + [ + {**self.bc.format.to_dict(model.syllabus_schedule)}, + { + **self.bc.format.to_dict(model.syllabus_schedule), + "id": 2, + "academy_id": 2, + }, + ], + ) + + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) + + self.assertEqual( + messages.add_message.call_args_list, + [ + call(request, 20, "All academies in sync with those syllabus schedules"), + ], + ) + + @patch("django.contrib.messages.add_message", MagicMock()) def test_replicate_in_all__with_one_schedule__two_academies__one_timeslot(self): from django.contrib import messages - academy = {'timezone': 'Pacific/Pago_Pago'} + academy = {"timezone": "Pacific/Pago_Pago"} model = self.bc.database.create(academy=(2, academy), syllabus_schedule=1, syllabus_schedule_time_slot=1) request = HttpRequest() @@ -145,38 +171,43 @@ def test_replicate_in_all__with_one_schedule__two_academies__one_timeslot(self): replicate_in_all(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule) - }, - { - **self.bc.format.to_dict(model.syllabus_schedule), - 'id': 2, - 'academy_id': 2, - }, - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot) - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot), - 'id': 2, - 'schedule_id': 2, - 'timezone': model.academy[1].timezone, - }, - ]) - - self.assertEqual(messages.add_message.call_args_list, [ - call(request, 20, 'All academies in sync with those syllabus schedules'), - ]) - - @patch('django.contrib.messages.add_message', MagicMock()) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusSchedule"), + [ + {**self.bc.format.to_dict(model.syllabus_schedule)}, + { + **self.bc.format.to_dict(model.syllabus_schedule), + "id": 2, + "academy_id": 2, + }, + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), + [ + {**self.bc.format.to_dict(model.syllabus_schedule_time_slot)}, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot), + "id": 2, + "schedule_id": 2, + "timezone": model.academy[1].timezone, + }, + ], + ) + + self.assertEqual( + messages.add_message.call_args_list, + [ + call(request, 20, "All academies in sync with those syllabus schedules"), + ], + ) + + @patch("django.contrib.messages.add_message", MagicMock()) def test_replicate_in_all__with_one_schedule__two_academies__two_timeslots(self): from django.contrib import messages - academy = {'timezone': 'Pacific/Pago_Pago'} + academy = {"timezone": "Pacific/Pago_Pago"} model = self.bc.database.create(academy=(2, academy), syllabus_schedule=1, syllabus_schedule_time_slot=2) request = HttpRequest() @@ -184,38 +215,41 @@ def test_replicate_in_all__with_one_schedule__two_academies__two_timeslots(self) replicate_in_all(None, request, queryset) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule) - }, - { - **self.bc.format.to_dict(model.syllabus_schedule), - 'id': 2, - 'academy_id': 2, - }, - ]) - - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]) - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]) - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), - 'id': 3, - 'schedule_id': 2, - 'timezone': model.academy[1].timezone, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), - 'id': 4, - 'schedule_id': 2, - 'timezone': model.academy[1].timezone, - }, - ]) - - self.assertEqual(messages.add_message.call_args_list, [ - call(request, 20, 'All academies in sync with those syllabus schedules'), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusSchedule"), + [ + {**self.bc.format.to_dict(model.syllabus_schedule)}, + { + **self.bc.format.to_dict(model.syllabus_schedule), + "id": 2, + "academy_id": 2, + }, + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), + [ + {**self.bc.format.to_dict(model.syllabus_schedule_time_slot[0])}, + {**self.bc.format.to_dict(model.syllabus_schedule_time_slot[1])}, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), + "id": 3, + "schedule_id": 2, + "timezone": model.academy[1].timezone, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), + "id": 4, + "schedule_id": 2, + "timezone": model.academy[1].timezone, + }, + ], + ) + + self.assertEqual( + messages.add_message.call_args_list, + [ + call(request, 20, "All academies in sync with those syllabus schedules"), + ], + ) diff --git a/breathecode/admissions/tests/management/commands/tests_add_legacy_to_all_users.py b/breathecode/admissions/tests/management/commands/tests_add_legacy_to_all_users.py index 12431116a..716afe9db 100644 --- a/breathecode/admissions/tests/management/commands/tests_add_legacy_to_all_users.py +++ b/breathecode/admissions/tests/management/commands/tests_add_legacy_to_all_users.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + from mixer.backend.django import mixer from ...mixins import AdmissionsTestCase @@ -18,17 +19,17 @@ def test_0_users(self): command = Command() self.assertEqual(command.handle(), None) - self.assertEqual(self.bc.database.list_of('auth.User'), []) + self.assertEqual(self.bc.database.list_of("auth.User"), []) # When: 2 User and 1 Group called Legacy # Then: link Legacy group to both users def test_2_users(self): """Test /academy/cohort without auth""" - user = {'groups': []} - model = self.bc.database.create(user=(2, user), group={'name': 'Legacy'}) + user = {"groups": []} + model = self.bc.database.create(user=(2, user), group={"name": "Legacy"}) command = Command() self.assertEqual(command.handle(), None) - self.assertEqual(self.bc.database.list_of('auth.User'), self.bc.format.to_dict(model.user)) + self.assertEqual(self.bc.database.list_of("auth.User"), self.bc.format.to_dict(model.user)) self.bc.check.queryset_with_pks(model.user[0].groups.all(), [1]) self.bc.check.queryset_with_pks(model.user[1].groups.all(), [1]) diff --git a/breathecode/admissions/tests/management/commands/tests_delete_duplicates.py b/breathecode/admissions/tests/management/commands/tests_delete_duplicates.py index 2462c0a1a..816648c9b 100644 --- a/breathecode/admissions/tests/management/commands/tests_delete_duplicates.py +++ b/breathecode/admissions/tests/management/commands/tests_delete_duplicates.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + from unittest.mock import MagicMock, patch from mixer.backend.django import mixer @@ -12,20 +13,20 @@ class AcademyCohortTestSuite(AdmissionsTestCase): """Test /academy/cohort""" - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_duplicates(self): """Test /academy/cohort without auth""" model = self.generate_models(cohort=True, user=True) models = [ - mixer.blend('admissions.CohortUser', user=model['user'], cohort=model['cohort']) for _ in range(0, 10) + mixer.blend("admissions.CohortUser", user=model["user"], cohort=model["cohort"]) for _ in range(0, 10) ] model_dict = self.remove_dinamics_fields(models[0].__dict__) command = Command() self.assertEqual(command.handle(), None) self.assertEqual(self.count_cohort_user(), 1) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [model_dict]) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), [model_dict]) # @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) # @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) diff --git a/breathecode/admissions/tests/management/commands/tests_migrate_or_delete_syllabus_schedule.py b/breathecode/admissions/tests/management/commands/tests_migrate_or_delete_syllabus_schedule.py index 2bcb247b8..ce786a78a 100644 --- a/breathecode/admissions/tests/management/commands/tests_migrate_or_delete_syllabus_schedule.py +++ b/breathecode/admissions/tests/management/commands/tests_migrate_or_delete_syllabus_schedule.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + from unittest.mock import MagicMock, patch, call from mixer.backend.django import mixer @@ -11,7 +12,7 @@ class AcademyCohortTestSuite(AdmissionsTestCase): """Test /academy/cohort""" - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__without_schedules(self): from django.core.management.base import OutputWrapper @@ -21,11 +22,11 @@ def test_migrate_or_delete_syllabus_schedule__without_schedules(self): self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), []) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) - self.assertEqual(OutputWrapper.write.call_args_list, [call('Done!')]) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusSchedule"), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) + self.assertEqual(OutputWrapper.write.call_args_list, [call("Done!")]) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__without_academy(self): """ Descriptions of models are being generated: @@ -42,15 +43,18 @@ def test_migrate_or_delete_syllabus_schedule__without_academy(self): self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), []) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusSchedule"), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Done!'), - ]) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Done!"), + ], + ) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__without_cohort(self): """ Descriptions of models are being generated: @@ -71,13 +75,13 @@ def test_migrate_or_delete_syllabus_schedule__without_cohort(self): self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), []) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusSchedule"), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) - self.assertEqual(OutputWrapper.write.call_args_list, [call('Done!')]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) + self.assertEqual(OutputWrapper.write.call_args_list, [call("Done!")]) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__without_other_academy__without_timezone(self): """ Descriptions of models are being generated: @@ -102,19 +106,22 @@ def test_migrate_or_delete_syllabus_schedule__without_other_academy__without_tim self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusSchedule"), []) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort), - 'schedule_id': None, - }, - ]) - self.assertEqual(OutputWrapper.write.call_args_list, [call('Done!')]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort), + "schedule_id": None, + }, + ], + ) + self.assertEqual(OutputWrapper.write.call_args_list, [call("Done!")]) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__other_academy_without_schedules__without_timezone(self): """ Descriptions of models are being generated: @@ -135,7 +142,7 @@ def test_migrate_or_delete_syllabus_schedule__other_academy_without_schedules__w """ from django.core.management.base import OutputWrapper - cohorts = [{'schedule_id': 1}, {'schedule': None}] + cohorts = [{"schedule_id": 1}, {"schedule": None}] model = self.bc.database.create(syllabus_schedule=1, academy=2, cohort=cohorts, syllabus_schedule_time_slot=1) command = Command() @@ -143,67 +150,77 @@ def test_migrate_or_delete_syllabus_schedule__other_academy_without_schedules__w self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusSchedule"), []) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort[0]), - 'schedule_id': None, - }, - { - **self.bc.format.to_dict(model.cohort[1]), - 'schedule_id': None, - }, - ]) - self.assertEqual(OutputWrapper.write.call_args_list, [call('Done!')]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort[0]), + "schedule_id": None, + }, + { + **self.bc.format.to_dict(model.cohort[1]), + "schedule_id": None, + }, + ], + ) + self.assertEqual(OutputWrapper.write.call_args_list, [call("Done!")]) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__other_academy_with_two_schedules__inferred_from_cohort(self): from django.core.management.base import OutputWrapper - cohorts = [{'schedule_id': 1, 'academy_id': 1}, {'schedule_id': 1, 'academy_id': 2}] - syllabus_schedule = {'academy': None} - syllabus_schedule_time_slots = (2, {'schedule_id': 1}) - model = self.bc.database.create(syllabus_schedule=syllabus_schedule, - academy=2, - cohort=cohorts, - syllabus_schedule_time_slot=syllabus_schedule_time_slots) + cohorts = [{"schedule_id": 1, "academy_id": 1}, {"schedule_id": 1, "academy_id": 2}] + syllabus_schedule = {"academy": None} + syllabus_schedule_time_slots = (2, {"schedule_id": 1}) + model = self.bc.database.create( + syllabus_schedule=syllabus_schedule, + academy=2, + cohort=cohorts, + syllabus_schedule_time_slot=syllabus_schedule_time_slots, + ) command = Command() result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusSchedule"), []) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), []) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort[0]), - 'schedule_id': None, - }, - { - **self.bc.format.to_dict(model.cohort[1]), - 'schedule_id': None, - }, - ]) - self.assertEqual(OutputWrapper.write.call_args_list, [call('Done!')]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort[0]), + "schedule_id": None, + }, + { + **self.bc.format.to_dict(model.cohort[1]), + "schedule_id": None, + }, + ], + ) + self.assertEqual(OutputWrapper.write.call_args_list, [call("Done!")]) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__one_per_academy(self): from django.core.management.base import OutputWrapper - cohorts = [{'schedule_id': 1, 'academy_id': 1}, {'schedule_id': 2, 'academy_id': 2}] - syllabus_schedule = {'academy': None} - academies = [{'timezone': 'America/New_York'}, {'timezone': 'Pacific/Pago_Pago'}] - syllabus_schedule_time_slots = [{'schedule_id': 1}, {'schedule_id': 2}] - model = self.bc.database.create(syllabus_schedule=(2, syllabus_schedule), - academy=academies, - cohort=cohorts, - syllabus_schedule_time_slot=syllabus_schedule_time_slots) + cohorts = [{"schedule_id": 1, "academy_id": 1}, {"schedule_id": 2, "academy_id": 2}] + syllabus_schedule = {"academy": None} + academies = [{"timezone": "America/New_York"}, {"timezone": "Pacific/Pago_Pago"}] + syllabus_schedule_time_slots = [{"schedule_id": 1}, {"schedule_id": 2}] + model = self.bc.database.create( + syllabus_schedule=(2, syllabus_schedule), + academy=academies, + cohort=cohorts, + syllabus_schedule_time_slot=syllabus_schedule_time_slots, + ) command = Command() result = command.handle() @@ -211,84 +228,72 @@ def test_migrate_or_delete_syllabus_schedule__one_per_academy(self): self.assertEqual(result, None) self.assertEqual( - self.bc.database.list_of('admissions.SyllabusSchedule'), + self.bc.database.list_of("admissions.SyllabusSchedule"), [ { **self.bc.format.to_dict(model.syllabus_schedule[0]), - 'academy_id': 1, + "academy_id": 1, }, { **self.bc.format.to_dict(model.syllabus_schedule[1]), - 'academy_id': 2, + "academy_id": 2, }, ], ) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), - 'schedule_id': 1, - 'timezone': 'America/New_York', - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), - 'schedule_id': 2, - 'timezone': 'Pacific/Pago_Pago', - }, - ]) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), + [ + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), + "schedule_id": 1, + "timezone": "America/New_York", + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), + "schedule_id": 2, + "timezone": "Pacific/Pago_Pago", + }, + ], + ) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort[0]), - }, - { - **self.bc.format.to_dict(model.cohort[1]), - }, - ]) - self.assertEqual(OutputWrapper.write.call_args_list, [call('Done!')]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort[0]), + }, + { + **self.bc.format.to_dict(model.cohort[1]), + }, + ], + ) + self.assertEqual(OutputWrapper.write.call_args_list, [call("Done!")]) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__two_schedule_related_to_the_first__two_academy(self): from django.core.management.base import OutputWrapper cohorts = [ - { - 'schedule_id': 1, - 'academy_id': 1 - }, - { - 'schedule_id': 2, - 'academy_id': 1 - }, - { - 'schedule_id': 1, - 'academy_id': 2 - }, - { - 'schedule_id': 2, - 'academy_id': 2 - }, + {"schedule_id": 1, "academy_id": 1}, + {"schedule_id": 2, "academy_id": 1}, + {"schedule_id": 1, "academy_id": 2}, + {"schedule_id": 2, "academy_id": 2}, ] - syllabus_schedule = {'academy': None} - academies = [{'timezone': 'America/New_York'}, {'timezone': 'Pacific/Pago_Pago'}] + syllabus_schedule = {"academy": None} + academies = [{"timezone": "America/New_York"}, {"timezone": "Pacific/Pago_Pago"}] syllabus_schedule_time_slots = [ - { - 'schedule_id': 1 - }, - { - 'schedule_id': 2 - }, - { - 'schedule_id': 3 - }, - { - 'schedule_id': 4 - }, + {"schedule_id": 1}, + {"schedule_id": 2}, + {"schedule_id": 3}, + {"schedule_id": 4}, ] - model = self.bc.database.create(syllabus_schedule=(4, syllabus_schedule), - academy=academies, - cohort=cohorts, - syllabus_schedule_time_slot=syllabus_schedule_time_slots) + model = self.bc.database.create( + syllabus_schedule=(4, syllabus_schedule), + academy=academies, + cohort=cohorts, + syllabus_schedule_time_slot=syllabus_schedule_time_slots, + ) command = Command() result = command.handle() @@ -296,121 +301,125 @@ def test_migrate_or_delete_syllabus_schedule__two_schedule_related_to_the_first_ self.assertEqual(result, None) self.assertEqual( - self.bc.database.list_of('admissions.SyllabusSchedule'), + self.bc.database.list_of("admissions.SyllabusSchedule"), [ { **self.bc.format.to_dict(model.syllabus_schedule[0]), - 'academy_id': 1, + "academy_id": 1, }, { **self.bc.format.to_dict(model.syllabus_schedule[1]), - 'academy_id': 1, + "academy_id": 1, }, { **self.bc.format.to_dict(model.syllabus_schedule[0]), - 'id': 5, - 'academy_id': 2, + "id": 5, + "academy_id": 2, }, { **self.bc.format.to_dict(model.syllabus_schedule[1]), - 'id': 6, - 'academy_id': 2, + "id": 6, + "academy_id": 2, }, ], ) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), - 'schedule_id': 1, - 'timezone': model.academy[0].timezone, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), - 'schedule_id': 2, - 'timezone': model.academy[0].timezone, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), - 'id': 5, - 'schedule_id': 5, - 'timezone': model.academy[1].timezone, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), - 'id': 6, - 'schedule_id': 6, - 'timezone': model.academy[1].timezone, - }, - ]) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort[0]), - }, - { - **self.bc.format.to_dict(model.cohort[1]), - }, - { - **self.bc.format.to_dict(model.cohort[2]), - 'schedule_id': 5, - }, - { - **self.bc.format.to_dict(model.cohort[3]), - 'schedule_id': 6, - }, - ]) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), + [ + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), + "schedule_id": 1, + "timezone": model.academy[0].timezone, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), + "schedule_id": 2, + "timezone": model.academy[0].timezone, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), + "id": 5, + "schedule_id": 5, + "timezone": model.academy[1].timezone, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), + "id": 6, + "schedule_id": 6, + "timezone": model.academy[1].timezone, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort[0]), + }, + { + **self.bc.format.to_dict(model.cohort[1]), + }, + { + **self.bc.format.to_dict(model.cohort[2]), + "schedule_id": 5, + }, + { + **self.bc.format.to_dict(model.cohort[3]), + "schedule_id": 6, + }, + ], + ) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Done!'), - ]) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Done!"), + ], + ) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_migrate_or_delete_syllabus_schedule__two_schedule_related_to_the_first__two_academy__cohort_with_timezone( - self): + self, + ): from django.core.management.base import OutputWrapper cohorts = [ { - 'schedule_id': 1, - 'academy_id': 1, - 'timezone': 'Europe/Madrid', + "schedule_id": 1, + "academy_id": 1, + "timezone": "Europe/Madrid", }, { - 'schedule_id': 2, - 'academy_id': 1, - 'timezone': 'America/Caracas', + "schedule_id": 2, + "academy_id": 1, + "timezone": "America/Caracas", }, { - 'schedule_id': 1, - 'academy_id': 2, - 'timezone': 'America/Bogota', + "schedule_id": 1, + "academy_id": 2, + "timezone": "America/Bogota", }, { - 'schedule_id': 2, - 'academy_id': 2, - 'timezone': 'America/Santiago', + "schedule_id": 2, + "academy_id": 2, + "timezone": "America/Santiago", }, ] - syllabus_schedule = {'academy': None} - academies = [{'timezone': 'America/New_York'}, {'timezone': 'Pacific/Pago_Pago'}] + syllabus_schedule = {"academy": None} + academies = [{"timezone": "America/New_York"}, {"timezone": "Pacific/Pago_Pago"}] syllabus_schedule_time_slots = [ - { - 'schedule_id': 1 - }, - { - 'schedule_id': 2 - }, - { - 'schedule_id': 3 - }, - { - 'schedule_id': 4 - }, + {"schedule_id": 1}, + {"schedule_id": 2}, + {"schedule_id": 3}, + {"schedule_id": 4}, ] - model = self.bc.database.create(syllabus_schedule=(4, syllabus_schedule), - academy=academies, - cohort=cohorts, - syllabus_schedule_time_slot=syllabus_schedule_time_slots) + model = self.bc.database.create( + syllabus_schedule=(4, syllabus_schedule), + academy=academies, + cohort=cohorts, + syllabus_schedule_time_slot=syllabus_schedule_time_slots, + ) command = Command() result = command.handle() @@ -418,70 +427,79 @@ def test_migrate_or_delete_syllabus_schedule__two_schedule_related_to_the_first_ self.assertEqual(result, None) self.assertEqual( - self.bc.database.list_of('admissions.SyllabusSchedule'), + self.bc.database.list_of("admissions.SyllabusSchedule"), [ { **self.bc.format.to_dict(model.syllabus_schedule[0]), - 'academy_id': 1, + "academy_id": 1, }, { **self.bc.format.to_dict(model.syllabus_schedule[1]), - 'academy_id': 1, + "academy_id": 1, }, { **self.bc.format.to_dict(model.syllabus_schedule[0]), - 'id': 5, - 'academy_id': 2, + "id": 5, + "academy_id": 2, }, { **self.bc.format.to_dict(model.syllabus_schedule[1]), - 'id': 6, - 'academy_id': 2, + "id": 6, + "academy_id": 2, }, ], ) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusScheduleTimeSlot'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), - 'schedule_id': 1, - 'timezone': model.cohort[0].timezone, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), - 'schedule_id': 2, - 'timezone': model.cohort[1].timezone, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), - 'id': 5, - 'schedule_id': 5, - 'timezone': model.cohort[2].timezone, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), - 'id': 6, - 'schedule_id': 6, - 'timezone': model.cohort[3].timezone, - }, - ]) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - { - **self.bc.format.to_dict(model.cohort[0]), - }, - { - **self.bc.format.to_dict(model.cohort[1]), - }, - { - **self.bc.format.to_dict(model.cohort[2]), - 'schedule_id': 5, - }, - { - **self.bc.format.to_dict(model.cohort[3]), - 'schedule_id': 6, - }, - ]) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusScheduleTimeSlot"), + [ + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), + "schedule_id": 1, + "timezone": model.cohort[0].timezone, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), + "schedule_id": 2, + "timezone": model.cohort[1].timezone, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), + "id": 5, + "schedule_id": 5, + "timezone": model.cohort[2].timezone, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), + "id": 6, + "schedule_id": 6, + "timezone": model.cohort[3].timezone, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.bc.format.to_dict(model.cohort[0]), + }, + { + **self.bc.format.to_dict(model.cohort[1]), + }, + { + **self.bc.format.to_dict(model.cohort[2]), + "schedule_id": 5, + }, + { + **self.bc.format.to_dict(model.cohort[3]), + "schedule_id": 6, + }, + ], + ) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Done!'), - ]) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Done!"), + ], + ) diff --git a/breathecode/admissions/tests/management/commands/tests_sync_admissions.py b/breathecode/admissions/tests/management/commands/tests_sync_admissions.py index 0035568ef..1b5a1eedd 100644 --- a/breathecode/admissions/tests/management/commands/tests_sync_admissions.py +++ b/breathecode/admissions/tests/management/commands/tests_sync_admissions.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import datetime import json import os @@ -23,296 +24,304 @@ # from ...utils import GenerateModels -HOST = os.environ.get('OLD_BREATHECODE_API') +HOST = os.environ.get("OLD_BREATHECODE_API") -with open(f'{os.getcwd()}/breathecode/admissions/fixtures/legacy_teachers.json', 'r') as file: +with open(f"{os.getcwd()}/breathecode/admissions/fixtures/legacy_teachers.json", "r") as file: legacy_teachers = json.load(file) -with open(f'{os.getcwd()}/breathecode/admissions/fixtures/legacy_students.json', 'r') as file: +with open(f"{os.getcwd()}/breathecode/admissions/fixtures/legacy_students.json", "r") as file: legacy_students = json.load(file) financial_status = { - 'late': 'LATE', - 'fully_paid': 'FULLY_PAID', - 'up_to_date': 'UP_TO_DATE', - 'uknown': None, + "late": "LATE", + "fully_paid": "FULLY_PAID", + "up_to_date": "UP_TO_DATE", + "uknown": None, } educational_status = { - 'under_review': 'ACTIVE', - 'currently_active': 'ACTIVE', - 'blocked': 'SUSPENDED', - 'postponed': 'POSTPONED', - 'studies_finished': 'GRADUATED', - 'student_dropped': 'DROPPED', + "under_review": "ACTIVE", + "currently_active": "ACTIVE", + "blocked": "SUSPENDED", + "postponed": "POSTPONED", + "studies_finished": "GRADUATED", + "student_dropped": "DROPPED", } class AcademyCohortTestSuite(AdmissionsTestCase): """Test /academy/cohort""" - @patch(LEGACY_API_PATH['get'], apply_screenshotmachine_requests_get_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(LEGACY_API_PATH["get"], apply_screenshotmachine_requests_get_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_students(self): """Test /academy/cohort without auth""" cohorts = set() count_cohorts = 0 - for student in legacy_students['data']: - for cohort in student['cohorts']: + for student in legacy_students["data"]: + for cohort in student["cohorts"]: cohorts.add(cohort) count_cohorts += 1 - models = [mixer.blend('admissions.Cohort', slug=slug) for slug in cohorts] + models = [mixer.blend("admissions.Cohort", slug=slug) for slug in cohorts] models_dict = [self.remove_dinamics_fields(model.__dict__) for model in models] command = Command() self.assertEqual(self.count_cohort_user(), 0) - self.assertEqual(command.students({'override': False}), None) + self.assertEqual(command.students({"override": False}), None) self.assertEqual(self.count_cohort(), len(cohorts)) self.assertEqual(self.count_user(), 10) self.assertEqual(self.count_cohort_user(), count_cohorts) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), models_dict) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), models_dict) cohort_user_acc = 0 - for student in legacy_students['data']: - for slug in student['cohorts']: - email = student['email'] - cohort = Cohort.objects.filter(slug=slug).values_list('id', flat=True).first() - user = User.objects.filter(email=email).values_list('id', flat=True).first() + for student in legacy_students["data"]: + for slug in student["cohorts"]: + email = student["email"] + cohort = Cohort.objects.filter(slug=slug).values_list("id", flat=True).first() + user = User.objects.filter(email=email).values_list("id", flat=True).first() filter = { - 'cohort_id': cohort, - 'user_id': user, + "cohort_id": cohort, + "user_id": user, } self.assertEqual(CohortUser.objects.filter(**filter).count(), 1) model = CohortUser.objects.filter(**filter).first().__dict__ - del model['_state'] - del model['_CohortUser__old_edu_status'] + del model["_state"] + del model["_CohortUser__old_edu_status"] - self.assertEqual(isinstance(model['created_at'], datetime.datetime), True) - del model['created_at'] + self.assertEqual(isinstance(model["created_at"], datetime.datetime), True) + del model["created_at"] - self.assertEqual(isinstance(model['updated_at'], datetime.datetime), True) - del model['updated_at'] + self.assertEqual(isinstance(model["updated_at"], datetime.datetime), True) + del model["updated_at"] cohort_user_acc += 1 self.assertEqual( - model, { - 'id': cohort_user_acc, - 'cohort_id': cohort, - 'user_id': user, - 'educational_status': educational_status[student['status']], - 'finantial_status': financial_status[student['financial_status']], - 'role': 'STUDENT', - 'watching': False, - 'history_log': {}, - }) + model, + { + "id": cohort_user_acc, + "cohort_id": cohort, + "user_id": user, + "educational_status": educational_status[student["status"]], + "finantial_status": financial_status[student["financial_status"]], + "role": "STUDENT", + "watching": False, + "history_log": {}, + }, + ) self.assertEqual(self.count_cohort_user(), cohort_user_acc) - @patch(LEGACY_API_PATH['get'], apply_screenshotmachine_requests_get_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(LEGACY_API_PATH["get"], apply_screenshotmachine_requests_get_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_students_twice(self): """Test /academy/cohort without auth""" cohorts = set() count_cohorts = 0 - for student in legacy_students['data']: - for cohort in student['cohorts']: + for student in legacy_students["data"]: + for cohort in student["cohorts"]: cohorts.add(cohort) count_cohorts += 1 - models = [mixer.blend('admissions.Cohort', slug=slug) for slug in cohorts] + models = [mixer.blend("admissions.Cohort", slug=slug) for slug in cohorts] models_dict = [self.remove_dinamics_fields(model.__dict__) for model in models] command = Command() self.assertEqual(self.count_cohort_user(), 0) - self.assertEqual(command.students({'override': False}), None) - self.assertEqual(command.students({'override': False}), None) # call twice + self.assertEqual(command.students({"override": False}), None) + self.assertEqual(command.students({"override": False}), None) # call twice self.assertEqual(self.count_cohort(), len(cohorts)) self.assertEqual(self.count_user(), 10) self.assertEqual(self.count_cohort_user(), count_cohorts) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), models_dict) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), models_dict) cohort_user_acc = 0 - for student in legacy_students['data']: - for slug in student['cohorts']: - email = student['email'] - cohort = Cohort.objects.filter(slug=slug).values_list('id', flat=True).first() - user = User.objects.filter(email=email).values_list('id', flat=True).first() + for student in legacy_students["data"]: + for slug in student["cohorts"]: + email = student["email"] + cohort = Cohort.objects.filter(slug=slug).values_list("id", flat=True).first() + user = User.objects.filter(email=email).values_list("id", flat=True).first() filter = { - 'cohort_id': cohort, - 'user_id': user, + "cohort_id": cohort, + "user_id": user, } self.assertEqual(CohortUser.objects.filter(**filter).count(), 1) model = CohortUser.objects.filter(**filter).first().__dict__ - del model['_state'] - del model['_CohortUser__old_edu_status'] + del model["_state"] + del model["_CohortUser__old_edu_status"] - self.assertEqual(isinstance(model['created_at'], datetime.datetime), True) - del model['created_at'] + self.assertEqual(isinstance(model["created_at"], datetime.datetime), True) + del model["created_at"] - self.assertEqual(isinstance(model['updated_at'], datetime.datetime), True) - del model['updated_at'] + self.assertEqual(isinstance(model["updated_at"], datetime.datetime), True) + del model["updated_at"] cohort_user_acc += 1 self.assertEqual( - model, { - 'id': cohort_user_acc, - 'cohort_id': cohort, - 'user_id': user, - 'educational_status': educational_status[student['status']], - 'finantial_status': financial_status[student['financial_status']], - 'role': 'STUDENT', - 'watching': False, - 'history_log': {}, - }) + model, + { + "id": cohort_user_acc, + "cohort_id": cohort, + "user_id": user, + "educational_status": educational_status[student["status"]], + "finantial_status": financial_status[student["financial_status"]], + "role": "STUDENT", + "watching": False, + "history_log": {}, + }, + ) self.assertEqual(self.count_cohort_user(), cohort_user_acc) - @patch(LEGACY_API_PATH['get'], apply_screenshotmachine_requests_get_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(LEGACY_API_PATH["get"], apply_screenshotmachine_requests_get_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_teachers(self): """Test /academy/cohort without auth""" cohorts = set() count_cohorts = 0 - for teacher in legacy_teachers['data']: - for cohort in teacher['cohorts']: + for teacher in legacy_teachers["data"]: + for cohort in teacher["cohorts"]: cohorts.add(cohort) count_cohorts += 1 - models = [mixer.blend('admissions.Cohort', slug=slug) for slug in cohorts] + models = [mixer.blend("admissions.Cohort", slug=slug) for slug in cohorts] models_dict = [self.remove_dinamics_fields(model.__dict__) for model in models] command = Command() self.assertEqual(self.count_cohort_user(), 0) - self.assertEqual(command.teachers({'override': False}), None) + self.assertEqual(command.teachers({"override": False}), None) self.assertEqual(self.count_cohort(), len(cohorts)) self.assertEqual(self.count_user(), 10) self.assertEqual(self.count_cohort_user(), count_cohorts) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), models_dict) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), models_dict) cohort_user_acc = 0 - for student in legacy_teachers['data']: - for slug in student['cohorts']: - email = student['username'] - cohort = Cohort.objects.filter(slug=slug).values_list('id', flat=True).first() - user = User.objects.filter(email=email).values_list('id', flat=True).first() + for student in legacy_teachers["data"]: + for slug in student["cohorts"]: + email = student["username"] + cohort = Cohort.objects.filter(slug=slug).values_list("id", flat=True).first() + user = User.objects.filter(email=email).values_list("id", flat=True).first() filter = { - 'cohort_id': cohort, - 'user_id': user, + "cohort_id": cohort, + "user_id": user, } self.assertEqual(CohortUser.objects.filter(**filter).count(), 1) model = CohortUser.objects.filter(**filter).first().__dict__ - del model['_state'] - del model['_CohortUser__old_edu_status'] + del model["_state"] + del model["_CohortUser__old_edu_status"] - self.assertEqual(isinstance(model['created_at'], datetime.datetime), True) - del model['created_at'] + self.assertEqual(isinstance(model["created_at"], datetime.datetime), True) + del model["created_at"] - self.assertEqual(isinstance(model['updated_at'], datetime.datetime), True) - del model['updated_at'] + self.assertEqual(isinstance(model["updated_at"], datetime.datetime), True) + del model["updated_at"] cohort_user_acc += 1 self.assertEqual( - model, { - 'id': cohort_user_acc, - 'cohort_id': cohort, - 'user_id': user, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'role': 'TEACHER', - 'watching': False, - 'history_log': {}, - }) + model, + { + "id": cohort_user_acc, + "cohort_id": cohort, + "user_id": user, + "educational_status": "ACTIVE", + "finantial_status": None, + "role": "TEACHER", + "watching": False, + "history_log": {}, + }, + ) self.assertEqual(self.count_cohort_user(), cohort_user_acc) - @patch(LEGACY_API_PATH['get'], apply_screenshotmachine_requests_get_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(LEGACY_API_PATH["get"], apply_screenshotmachine_requests_get_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_teachers_twice(self): """Test /academy/cohort without auth""" cohorts = set() count_cohorts = 0 - for teacher in legacy_teachers['data']: - for cohort in teacher['cohorts']: + for teacher in legacy_teachers["data"]: + for cohort in teacher["cohorts"]: cohorts.add(cohort) count_cohorts += 1 - models = [mixer.blend('admissions.Cohort', slug=slug) for slug in cohorts] + models = [mixer.blend("admissions.Cohort", slug=slug) for slug in cohorts] models_dict = [self.remove_dinamics_fields(model.__dict__) for model in models] command = Command() self.assertEqual(self.count_cohort_user(), 0) - self.assertEqual(command.teachers({'override': False}), None) - self.assertEqual(command.teachers({'override': False}), None) # call twice + self.assertEqual(command.teachers({"override": False}), None) + self.assertEqual(command.teachers({"override": False}), None) # call twice self.assertEqual(self.count_cohort(), len(cohorts)) self.assertEqual(self.count_user(), 10) self.assertEqual(self.count_cohort_user(), count_cohorts) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), models_dict) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), models_dict) cohort_user_acc = 0 - for student in legacy_teachers['data']: - for slug in student['cohorts']: - email = student['username'] - cohort = Cohort.objects.filter(slug=slug).values_list('id', flat=True).first() - user = User.objects.filter(email=email).values_list('id', flat=True).first() + for student in legacy_teachers["data"]: + for slug in student["cohorts"]: + email = student["username"] + cohort = Cohort.objects.filter(slug=slug).values_list("id", flat=True).first() + user = User.objects.filter(email=email).values_list("id", flat=True).first() filter = { - 'cohort_id': cohort, - 'user_id': user, + "cohort_id": cohort, + "user_id": user, } self.assertEqual(CohortUser.objects.filter(**filter).count(), 1) model = CohortUser.objects.filter(**filter).first().__dict__ - del model['_state'] - del model['_CohortUser__old_edu_status'] + del model["_state"] + del model["_CohortUser__old_edu_status"] - self.assertEqual(isinstance(model['created_at'], datetime.datetime), True) - del model['created_at'] + self.assertEqual(isinstance(model["created_at"], datetime.datetime), True) + del model["created_at"] - self.assertEqual(isinstance(model['updated_at'], datetime.datetime), True) - del model['updated_at'] + self.assertEqual(isinstance(model["updated_at"], datetime.datetime), True) + del model["updated_at"] cohort_user_acc += 1 self.assertEqual( - model, { - 'id': cohort_user_acc, - 'cohort_id': cohort, - 'user_id': user, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'role': 'TEACHER', - 'watching': False, - 'history_log': {}, - }) + model, + { + "id": cohort_user_acc, + "cohort_id": cohort, + "user_id": user, + "educational_status": "ACTIVE", + "finantial_status": None, + "role": "TEACHER", + "watching": False, + "history_log": {}, + }, + ) self.assertEqual(self.count_cohort_user(), cohort_user_acc) diff --git a/breathecode/admissions/tests/mixins/__init__.py b/breathecode/admissions/tests/mixins/__init__.py index 31c07f76b..d9856d6a0 100644 --- a/breathecode/admissions/tests/mixins/__init__.py +++ b/breathecode/admissions/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Admissions mixins """ + from .admissions_test_case import AdmissionsTestCase # noqa: F401 diff --git a/breathecode/admissions/tests/mixins/admissions_test_case.py b/breathecode/admissions/tests/mixins/admissions_test_case.py index c83b244b0..6116b9eca 100644 --- a/breathecode/admissions/tests/mixins/admissions_test_case.py +++ b/breathecode/admissions/tests/mixins/admissions_test_case.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + import re from unittest.mock import MagicMock, patch @@ -18,8 +19,9 @@ ) -class AdmissionsTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, DatetimeMixin, ICallMixin, - BreathecodeMixin): +class AdmissionsTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, DatetimeMixin, ICallMixin, BreathecodeMixin +): """AdmissionsTestCase with auth methods""" def setUp(self): @@ -29,15 +31,15 @@ def setUp(self): def tearDown(self): self.clear_cache() - def fill_cohort_timeslot(self, id, cohort_id, certificate_timeslot, timezone='America/New_York'): + def fill_cohort_timeslot(self, id, cohort_id, certificate_timeslot, timezone="America/New_York"): return { - 'id': id, - 'cohort_id': cohort_id, - 'starting_at': certificate_timeslot.starting_at, - 'ending_at': certificate_timeslot.ending_at, - 'recurrent': certificate_timeslot.recurrent, - 'recurrency_type': certificate_timeslot.recurrency_type, - 'timezone': timezone, + "id": id, + "cohort_id": cohort_id, + "starting_at": certificate_timeslot.starting_at, + "ending_at": certificate_timeslot.ending_at, + "recurrent": certificate_timeslot.recurrent, + "recurrency_type": certificate_timeslot.recurrency_type, + "timezone": timezone, } def check_cohort_user_that_not_have_role_student_can_be_teacher(self, role, update=False, additional_data={}): @@ -45,72 +47,72 @@ def check_cohort_user_that_not_have_role_student_can_be_teacher(self, role, upda self.headers(academy=1) model_kwargs = { - 'authenticate': True, - 'cohort': True, - 'user': True, - 'profile_academy': True, - 'role': role, - 'capability': 'crud_cohort', + "authenticate": True, + "cohort": True, + "user": True, + "profile_academy": True, + "role": role, + "capability": "crud_cohort", } if update: - model_kwargs['cohort_user'] = True + model_kwargs["cohort_user"] = True model = self.generate_models(**model_kwargs) - reverse_name = 'academy_cohort_id_user_id' if update else 'cohort_id_user' - url_params = {'cohort_id': 1, 'user_id': 1} if update else {'cohort_id': 1} - url = reverse_lazy(f'admissions:{reverse_name}', kwargs=url_params) - data = {'user': model['user'].id, 'role': 'TEACHER'} + reverse_name = "academy_cohort_id_user_id" if update else "cohort_id_user" + url_params = {"cohort_id": 1, "user_id": 1} if update else {"cohort_id": 1} + url = reverse_lazy(f"admissions:{reverse_name}", kwargs=url_params) + data = {"user": model["user"].id, "role": "TEACHER"} request_func = self.client.put if update else self.client.post response = request_func(url, data) json = response.json() expected = { - 'id': 1, - 'role': 'TEACHER', - 'user': { - 'id': model['user'].id, - 'first_name': model['user'].first_name, - 'last_name': model['user'].last_name, - 'email': model['user'].email, + "id": 1, + "role": "TEACHER", + "user": { + "id": model["user"].id, + "first_name": model["user"].first_name, + "last_name": model["user"].last_name, + "email": model["user"].email, }, - 'cohort': { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': False, - 'remote_available': True, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'current_day': model['cohort'].current_day, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'academy': { - 'id': model['cohort'].academy.id, - 'name': model['cohort'].academy.name, - 'slug': model['cohort'].academy.slug, - 'country': model['cohort'].academy.country.code, - 'city': model['cohort'].academy.city.id, - 'street_address': model['cohort'].academy.street_address, + "cohort": { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": False, + "remote_available": True, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "current_day": model["cohort"].current_day, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "academy": { + "id": model["cohort"].academy.id, + "name": model["cohort"].academy.name, + "slug": model["cohort"].academy.slug, + "country": model["cohort"].academy.country.code, + "city": model["cohort"].academy.city.id, + "street_address": model["cohort"].academy.street_address, }, - 'schedule': None, - 'syllabus_version': None, - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'created_at': self.datetime_to_iso(model['cohort'].created_at), - 'updated_at': self.datetime_to_iso(model['cohort'].updated_at), + "schedule": None, + "syllabus_version": None, + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "created_at": self.datetime_to_iso(model["cohort"].created_at), + "updated_at": self.datetime_to_iso(model["cohort"].updated_at), }, **additional_data, } if update: - del expected['user'] - del expected['cohort'] + del expected["user"] + del expected["cohort"] - expected['educational_status'] = None - expected['finantial_status'] = None + expected["educational_status"] = None + expected["finantial_status"] = None self.assertEqual(json, expected) @@ -120,23 +122,32 @@ def check_cohort_user_that_not_have_role_student_can_be_teacher(self, role, upda self.assertEqual(response.status_code, status.HTTP_201_CREATED) if update: - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), - [{ - **self.model_to_dict(model, 'cohort_user'), - 'role': 'TEACHER', - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.model_to_dict(model, "cohort_user"), + "role": "TEACHER", + } + ], + ) else: - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': None, - 'finantial_status': None, - 'id': 1, - 'role': 'TEACHER', - 'user_id': 1, - 'watching': False, - }]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": None, + "finantial_status": None, + "id": 1, + "role": "TEACHER", + "user_id": 1, + "watching": False, + } + ], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) def check_academy_cohort__with_data(self, models=None, deleted=False): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved @@ -148,20 +159,22 @@ def check_academy_cohort__with_data(self, models=None, deleted=False): cohort_time_slot = cohort_time_slots[0] if models is None: - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} models = [ - self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) ] # reset because this call are coming from mixer @@ -169,7 +182,7 @@ def check_academy_cohort__with_data(self, models=None, deleted=False): models.sort(key=lambda x: x.cohort.kickoff_date, reverse=True) - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") response = self.client.get(url) json = response.json() @@ -177,88 +190,84 @@ def check_academy_cohort__with_data(self, models=None, deleted=False): expected = [] else: - expected = [{ - 'id': - model['cohort'].id, - 'slug': - model['cohort'].slug, - 'name': - model['cohort'].name, - 'never_ends': - model['cohort'].never_ends, - 'remote_available': - model['cohort'].remote_available, - 'private': - model['cohort'].private, - 'kickoff_date': - re.sub(r'\+00:00$', 'Z', model['cohort'].kickoff_date.isoformat()), - 'ending_date': - model['cohort'].ending_date, - 'stage': - model['cohort'].stage, - 'language': - model['cohort'].language, - 'current_day': - model['cohort'].current_day, - 'current_module': - model['cohort'].current_module, - 'online_meeting_url': - model['cohort'].online_meeting_url, - 'timezone': - model['cohort'].timezone, - 'is_hidden_on_prework': - model['cohort'].is_hidden_on_prework, - 'available_as_saas': - model['cohort'].available_as_saas, - 'timeslots': - [{ - 'ending_at': self.integer_to_iso(cohort_time_slot['timezone'], cohort_time_slot['ending_at']), - 'id': cohort_time_slot['id'], - 'recurrency_type': cohort_time_slot['recurrency_type'], - 'recurrent': cohort_time_slot['recurrent'], - 'starting_at': self.integer_to_iso(cohort_time_slot['timezone'], cohort_time_slot['starting_at']), - }] if cohort_time_slots and model.cohort.id != 1 else [], - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort"].kickoff_date.isoformat()), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": model["cohort"].current_module, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "timeslots": ( + [ + { + "ending_at": self.integer_to_iso( + cohort_time_slot["timezone"], cohort_time_slot["ending_at"] + ), + "id": cohort_time_slot["id"], + "recurrency_type": cohort_time_slot["recurrency_type"], + "recurrent": cohort_time_slot["recurrent"], + "starting_at": self.integer_to_iso( + cohort_time_slot["timezone"], cohort_time_slot["starting_at"] + ), + } + ] + if cohort_time_slots and model.cohort.id != 1 + else [] + ), + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - } for model in models] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + for model in models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), self.all_model_dict([x.cohort for x in models])) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), self.all_model_dict([x.cohort for x in models])) self.assertEqual(cohort_saved.send_robust.call_args_list, []) return models - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) def check_cohort_me__with_data(self, models=None, deleted=False): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved @@ -270,21 +279,23 @@ def check_cohort_me__with_data(self, models=None, deleted=False): cohort_time_slot = cohort_time_slots[0] if models is None: - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} models = [ - self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - cohort_user=1, - syllabus_version=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + cohort_user=1, + syllabus_version=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) ] # reset because this call are coming from mixer @@ -292,7 +303,7 @@ def check_cohort_me__with_data(self, models=None, deleted=False): models.sort(key=lambda x: x.cohort.kickoff_date, reverse=True) - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") response = self.client.get(url) json = response.json() @@ -300,84 +311,83 @@ def check_cohort_me__with_data(self, models=None, deleted=False): expected = [] else: - expected = [{ - 'id': - model['cohort'].id, - 'slug': - model['cohort'].slug, - 'name': - model['cohort'].name, - 'never_ends': - model['cohort'].never_ends, - 'remote_available': - model['cohort'].remote_available, - 'private': - model['cohort'].private, - 'kickoff_date': - re.sub(r'\+00:00$', 'Z', model['cohort'].kickoff_date.isoformat()) - if model['cohort'].kickoff_date else model['cohort'].kickoff_date, - 'ending_date': - model['cohort'].ending_date, - 'stage': - model['cohort'].stage, - 'language': - model['cohort'].language, - 'current_day': - model['cohort'].current_day, - 'current_module': - model['cohort'].current_module, - 'online_meeting_url': - model['cohort'].online_meeting_url, - 'timezone': - model['cohort'].timezone, - 'is_hidden_on_prework': - model['cohort'].is_hidden_on_prework, - 'available_as_saas': - model['cohort'].available_as_saas, - 'timeslots': - [{ - 'ending_at': self.integer_to_iso(cohort_time_slot['timezone'], cohort_time_slot['ending_at']), - 'id': cohort_time_slot['id'], - 'recurrency_type': cohort_time_slot['recurrency_type'], - 'recurrent': cohort_time_slot['recurrent'], - 'starting_at': self.integer_to_iso(cohort_time_slot['timezone'], cohort_time_slot['starting_at']), - }] if cohort_time_slots and model.cohort.id != 1 else [], - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": ( + re.sub(r"\+00:00$", "Z", model["cohort"].kickoff_date.isoformat()) + if model["cohort"].kickoff_date + else model["cohort"].kickoff_date + ), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": model["cohort"].current_module, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "timeslots": ( + [ + { + "ending_at": self.integer_to_iso( + cohort_time_slot["timezone"], cohort_time_slot["ending_at"] + ), + "id": cohort_time_slot["id"], + "recurrency_type": cohort_time_slot["recurrency_type"], + "recurrent": cohort_time_slot["recurrent"], + "starting_at": self.integer_to_iso( + cohort_time_slot["timezone"], cohort_time_slot["starting_at"] + ), + } + ] + if cohort_time_slots and model.cohort.id != 1 + else [] + ), + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - } for model in models] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + for model in models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), self.all_model_dict([x.cohort for x in models])) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), self.all_model_dict([x.cohort for x in models])) self.assertEqual(cohort_saved.send_robust.call_args_list, []) return models diff --git a/breathecode/admissions/tests/mixins/authenticate_mixin.py b/breathecode/admissions/tests/mixins/authenticate_mixin.py index 808503e55..34b67ce48 100644 --- a/breathecode/admissions/tests/mixins/authenticate_mixin.py +++ b/breathecode/admissions/tests/mixins/authenticate_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.authenticate.models import Capability, ProfileAcademy, Role from rest_framework.test import APITestCase from mixer.backend.django import mixer @@ -14,13 +15,13 @@ def remove_model_state(self, dict): result = None if dict: result = dict.copy() - del result['_state'] + del result["_state"] # remove any field starting with __ (double underscore) because it is considered private without_private_keys = result.copy() for key in result: - print('key', key) - if '__' in key: + print("key", key) + if "__" in key: del without_private_keys[key] return without_private_keys @@ -31,8 +32,8 @@ def remove_updated_at(self, dict): result = None if dict: result = dict.copy() - if 'updated_at' in result: - del result['updated_at'] + if "updated_at" in result: + del result["updated_at"] return result def remove_dinamics_fields(self, dict): @@ -68,7 +69,7 @@ def all_role_dict(self): def all_profile_academy_dict(self): return [self.remove_dinamics_fields(data.__dict__.copy()) for data in ProfileAcademy.objects.filter()] - def generate_credentials(self, profile_academy=False, capability='', role='', models=None, external_models=None): + def generate_credentials(self, profile_academy=False, capability="", role="", models=None, external_models=None): if models is None: models = {} @@ -84,42 +85,42 @@ def generate_credentials(self, profile_academy=False, capability='', role='', mo # if not 'capability' in models and capability: if capability: kargs = { - 'slug': capability, - 'description': capability, + "slug": capability, + "description": capability, } - models['capability'] = mixer.blend('authenticate.Capability', **kargs) + models["capability"] = mixer.blend("authenticate.Capability", **kargs) if role: kargs = { - 'slug': role, - 'name': role, + "slug": role, + "name": role, } - if not 'role' in models: + if not "role" in models: if capability: - kargs['capabilities'] = [models['capability']] + kargs["capabilities"] = [models["capability"]] - models['role'] = mixer.blend('authenticate.Role', **kargs) + models["role"] = mixer.blend("authenticate.Role", **kargs) else: role = Role.objects.filter(**kargs).first() - role.capabilities.add(models['capability ']) + role.capabilities.add(models["capability "]) role.save() # models['role'].capabilities.add(models['capability ']) # models['role'].save() - if not 'profile_academy' in models and profile_academy: + if not "profile_academy" in models and profile_academy: kargs = {} - if 'user' in models: - kargs['user'] = external_models['user'] + if "user" in models: + kargs["user"] = external_models["user"] - if 'certificate' in models: - kargs['certificate'] = external_models['certificate'] + if "certificate" in models: + kargs["certificate"] = external_models["certificate"] - if 'academy' in models: - kargs['academy'] = external_models['academy'] + if "academy" in models: + kargs["academy"] = external_models["academy"] - models['profile_academy'] = mixer.blend('authenticate.ProfileAcademy', **kargs) + models["profile_academy"] = mixer.blend("authenticate.ProfileAcademy", **kargs) return models diff --git a/breathecode/admissions/tests/mocks/__init__.py b/breathecode/admissions/tests/mocks/__init__.py index bd4442d5a..68ad9e888 100644 --- a/breathecode/admissions/tests/mocks/__init__.py +++ b/breathecode/admissions/tests/mocks/__init__.py @@ -1,5 +1,6 @@ """ Mocks """ + # flake8: noqa: F401 -from .legacy_api import (LEGACY_API_PATH, LEGACY_API_INSTANCES, apply_screenshotmachine_requests_get_mock) +from .legacy_api import LEGACY_API_PATH, LEGACY_API_INSTANCES, apply_screenshotmachine_requests_get_mock diff --git a/breathecode/admissions/tests/mocks/legacy_api/__init__.py b/breathecode/admissions/tests/mocks/legacy_api/__init__.py index 3400be58f..5d114c48f 100644 --- a/breathecode/admissions/tests/mocks/legacy_api/__init__.py +++ b/breathecode/admissions/tests/mocks/legacy_api/__init__.py @@ -1,16 +1,17 @@ """ Legacy breathecode API Mocks """ + from unittest.mock import Mock from .requests_mock import get_mock LEGACY_API_PATH = { - 'get': 'requests.get', + "get": "requests.get", } -LEGACY_API_INSTANCES = {'get': Mock(side_effect=get_mock)} +LEGACY_API_INSTANCES = {"get": Mock(side_effect=get_mock)} def apply_screenshotmachine_requests_get_mock(): """Apply Storage Blob Mock""" - return LEGACY_API_INSTANCES['get'] + return LEGACY_API_INSTANCES["get"] diff --git a/breathecode/admissions/tests/mocks/legacy_api/requests_mock.py b/breathecode/admissions/tests/mocks/legacy_api/requests_mock.py index 81be5fd64..6979b8e3c 100644 --- a/breathecode/admissions/tests/mocks/legacy_api/requests_mock.py +++ b/breathecode/admissions/tests/mocks/legacy_api/requests_mock.py @@ -1,24 +1,26 @@ """ Requests mock """ + import os, json -HOST = os.environ.get('OLD_BREATHECODE_API') +HOST = os.environ.get("OLD_BREATHECODE_API") -with open(f'{os.getcwd()}/breathecode/admissions/fixtures/legacy_teachers.json', 'r') as file: +with open(f"{os.getcwd()}/breathecode/admissions/fixtures/legacy_teachers.json", "r") as file: legacy_teachers = json.load(file) -with open(f'{os.getcwd()}/breathecode/admissions/fixtures/legacy_students.json', 'r') as file: +with open(f"{os.getcwd()}/breathecode/admissions/fixtures/legacy_students.json", "r") as file: legacy_students = json.load(file) -class ResponseMock(): +class ResponseMock: """Simutate Response to be used by mocks""" + status_code = None data = None content = None - def __init__(self, status_code=200, data=''): + def __init__(self, status_code=200, data=""): self.status_code = status_code if isinstance(data, str): @@ -33,8 +35,8 @@ def json(self) -> dict: def get_mock(url: str, stream=False, timeout=30): """Requests get mock""" - if url == f'{HOST}/students/' or url == f'{HOST}/students': + if url == f"{HOST}/students/" or url == f"{HOST}/students": return ResponseMock(data=legacy_students, status_code=200) - elif url == f'{HOST}/teachers/' or url == f'{HOST}/teachers': + elif url == f"{HOST}/teachers/" or url == f"{HOST}/teachers": return ResponseMock(data=legacy_teachers, status_code=200) - return ResponseMock(data='error', status_code=404) + return ResponseMock(data="error", status_code=404) diff --git a/breathecode/admissions/tests/permissions/contexts/tests_academy.py b/breathecode/admissions/tests/permissions/contexts/tests_academy.py index aa4544a7e..5863fdb1c 100644 --- a/breathecode/admissions/tests/permissions/contexts/tests_academy.py +++ b/breathecode/admissions/tests/permissions/contexts/tests_academy.py @@ -8,12 +8,12 @@ def serializer(academy): return { - 'id': academy.id, - 'slug': academy.slug, - 'city': academy.city.name, - 'country': academy.country.name, - 'zip_code': academy.zip_code, - 'timezone': academy.timezone, + "id": academy.id, + "slug": academy.slug, + "city": academy.city.name, + "country": academy.country.name, + "zip_code": academy.zip_code, + "timezone": academy.timezone, } @@ -22,22 +22,28 @@ def serializer(academy): class AcademyEventTestSuite(AdmissionsTestCase): - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.context', MagicMock(return_value=value)) + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.context", MagicMock(return_value=value)) def test_make_right_calls(self): model = self.bc.database.create(academy=1) ld = LaunchDarkly() result = academy(ld, model.academy) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [ - self.bc.format.to_dict(model.academy), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + self.bc.format.to_dict(model.academy), + ], + ) contexts = serializer(model.academy) - self.assertEqual(LaunchDarkly.context.call_args_list, [ - call('1', f'{model.academy.name} ({model.academy.slug})', 'academy', contexts), - ]) + self.assertEqual( + LaunchDarkly.context.call_args_list, + [ + call("1", f"{model.academy.name} ({model.academy.slug})", "academy", contexts), + ], + ) self.assertEqual(result, value) diff --git a/breathecode/admissions/tests/receivers/tests_mark_saas_student_as_graduated.py b/breathecode/admissions/tests/receivers/tests_mark_saas_student_as_graduated.py index be02b34a7..ec715ae9d 100644 --- a/breathecode/admissions/tests/receivers/tests_mark_saas_student_as_graduated.py +++ b/breathecode/admissions/tests/receivers/tests_mark_saas_student_as_graduated.py @@ -10,66 +10,68 @@ def arange(db, bc: Breathecode, fake): def wrapper(task=1, cohort_user=1, syllabus_version={}, cohort=1): syll = { - 'json': { - 'days': [ + "json": { + "days": [ { - 'quizzes': [ + "quizzes": [ { - 'slug': 'task-1', - 'mandatory': True, + "slug": "task-1", + "mandatory": True, }, { - 'slug': 'task-2', - 'mandatory': False, + "slug": "task-2", + "mandatory": False, }, ], - 'lessons': [ + "lessons": [ { - 'slug': 'task-3', - 'mandatory': True, + "slug": "task-3", + "mandatory": True, }, { - 'slug': 'task-4', - 'mandatory': False, + "slug": "task-4", + "mandatory": False, }, ], }, { - 'replits': [ + "replits": [ { - 'slug': 'task-5', - 'mandatory': True, + "slug": "task-5", + "mandatory": True, }, { - 'slug': 'task-6', - 'mandatory': False, + "slug": "task-6", + "mandatory": False, }, ], - 'assignments': [ + "assignments": [ { - 'slug': 'task-7', - 'mandatory': True, + "slug": "task-7", + "mandatory": True, }, { - 'slug': 'task-8', - 'mandatory': True, + "slug": "task-8", + "mandatory": True, }, { - 'slug': 'task-9', - 'mandatory': False, + "slug": "task-9", + "mandatory": False, }, ], }, ], }, } - return bc.database.create(task=task, - cohort_user=cohort_user, - cohort=cohort, - syllabus_version={ - **syll, - **syllabus_version, - }) + return bc.database.create( + task=task, + cohort_user=cohort_user, + cohort=cohort, + syllabus_version={ + **syll, + **syllabus_version, + }, + ) yield wrapper @@ -79,14 +81,14 @@ def test_no_updating_the_status(enable_signals, bc: Breathecode, arange): model = arange() - assert bc.database.list_of('assignments.Task') == [ + assert bc.database.list_of("assignments.Task") == [ bc.format.to_dict(model.task), ] - assert bc.database.list_of('admissions.CohortUser') == [ + assert bc.database.list_of("admissions.CohortUser") == [ { **bc.format.to_dict(model.cohort_user), - 'educational_status': 'ACTIVE', + "educational_status": "ACTIVE", }, ] @@ -94,85 +96,97 @@ def test_no_updating_the_status(enable_signals, bc: Breathecode, arange): def test_available_as_saas_false(enable_signals, bc: Breathecode, arange): enable_signals() - model = arange(task={'task_status': 'PENDING'}, cohort={'available_as_saas': False}) + model = arange(task={"task_status": "PENDING"}, cohort={"available_as_saas": False}) - model.task.task_status = 'DONE' + model.task.task_status = "DONE" model.task.save() - assert bc.database.list_of('assignments.Task') == [ + assert bc.database.list_of("assignments.Task") == [ bc.format.to_dict(model.task), ] - assert bc.database.list_of('admissions.CohortUser') == [ + assert bc.database.list_of("admissions.CohortUser") == [ { **bc.format.to_dict(model.cohort_user), - 'educational_status': 'ACTIVE', - 'history_log': { - 'delivered_assignments': [{ - 'id': 1, - 'type': model.task.task_type, - }], - 'pending_assignments': [], + "educational_status": "ACTIVE", + "history_log": { + "delivered_assignments": [ + { + "id": 1, + "type": model.task.task_type, + } + ], + "pending_assignments": [], }, }, ] -@pytest.mark.parametrize('revision_status1, revision_status2', [('PENDING', 'REJECTED'), ('REJECTED', 'PENDING')]) -def test_available_as_saas_true__no_mandatory_tasks__pending_tasks(enable_signals, bc: Breathecode, arange, - revision_status1, revision_status2): +@pytest.mark.parametrize("revision_status1, revision_status2", [("PENDING", "REJECTED"), ("REJECTED", "PENDING")]) +def test_available_as_saas_true__no_mandatory_tasks__pending_tasks( + enable_signals, bc: Breathecode, arange, revision_status1, revision_status2 +): enable_signals() task = { - 'associated_slug': 'task-9', - 'task_status': 'PENDING', - 'revision_status': revision_status1, - 'task_type': 'PROJECT', + "associated_slug": "task-9", + "task_status": "PENDING", + "revision_status": revision_status1, + "task_type": "PROJECT", } - cohort = {'available_as_saas': True} + cohort = {"available_as_saas": True} model = arange(task=task, cohort=cohort) - model.task.task_status = 'DONE' + model.task.task_status = "DONE" model.task.revision_status = revision_status2 model.task.save() - assert bc.database.list_of('assignments.Task') == [ + assert bc.database.list_of("assignments.Task") == [ bc.format.to_dict(model.task), ] - assert bc.database.list_of('admissions.CohortUser') == [ + assert bc.database.list_of("admissions.CohortUser") == [ { **bc.format.to_dict(model.cohort_user), - 'educational_status': 'ACTIVE', - 'history_log': { - 'delivered_assignments': [{ - 'id': 1, - 'type': model.task.task_type, - }], - 'pending_assignments': [], + "educational_status": "ACTIVE", + "history_log": { + "delivered_assignments": [ + { + "id": 1, + "type": model.task.task_type, + } + ], + "pending_assignments": [], }, }, ] -@pytest.mark.parametrize('good_revision_status, bad_revision_status', [ - ('IGNORED', 'PENDING'), - ('APPROVED', 'REJECTED'), -]) -def test_available_as_saas_true__all_mandatory_tasks_but_one(enable_signals, bc: Breathecode, arange, - good_revision_status, bad_revision_status): +@pytest.mark.parametrize( + "good_revision_status, bad_revision_status", + [ + ("IGNORED", "PENDING"), + ("APPROVED", "REJECTED"), + ], +) +def test_available_as_saas_true__all_mandatory_tasks_but_one( + enable_signals, bc: Breathecode, arange, good_revision_status, bad_revision_status +): enable_signals() - tasks = [{ - 'task_status': 'PENDING', - 'associated_slug': f'task-{n}', - 'revision_status': 'PENDING', - 'task_type': 'PROJECT', - } for n in [1, 3, 5, 7, 8]] + tasks = [ + { + "task_status": "PENDING", + "associated_slug": f"task-{n}", + "revision_status": "PENDING", + "task_type": "PROJECT", + } + for n in [1, 3, 5, 7, 8] + ] exception = random.randint(0, 3) - model = arange(task=tasks, cohort={'available_as_saas': True}) + model = arange(task=tasks, cohort={"available_as_saas": True}) for n in range(0, 4): if n == exception: @@ -187,95 +201,112 @@ def test_available_as_saas_true__all_mandatory_tasks_but_one(enable_signals, bc: if n == exception: continue - model.task[n].task_status = 'DONE' + model.task[n].task_status = "DONE" model.task[n].save() - assert bc.database.list_of('assignments.Task') == bc.format.to_dict(model.task) + assert bc.database.list_of("assignments.Task") == bc.format.to_dict(model.task) - assert bc.database.list_of('admissions.CohortUser') == [ + assert bc.database.list_of("admissions.CohortUser") == [ { **bc.format.to_dict(model.cohort_user), - 'educational_status': 'ACTIVE', - 'history_log': { - 'delivered_assignments': [{ - 'id': n + 1, - 'type': model.task[n].task_type, - } for n in range(0, 4) if n != exception], - 'pending_assignments': [], + "educational_status": "ACTIVE", + "history_log": { + "delivered_assignments": [ + { + "id": n + 1, + "type": model.task[n].task_type, + } + for n in range(0, 4) + if n != exception + ], + "pending_assignments": [], }, }, ] -@pytest.mark.parametrize('revision_status', ['IGNORED', 'APPROVED']) +@pytest.mark.parametrize("revision_status", ["IGNORED", "APPROVED"]) def test_available_as_saas_true__all_mandatory_tasks(enable_signals, bc: Breathecode, arange, revision_status): enable_signals() - tasks = [{ - 'task_status': 'PENDING', - 'associated_slug': f'task-{n}', - 'revision_status': 'PENDING', - 'task_type': 'PROJECT', - } for n in [1, 3, 5, 7, 8]] - cohort = {'available_as_saas': True} + tasks = [ + { + "task_status": "PENDING", + "associated_slug": f"task-{n}", + "revision_status": "PENDING", + "task_type": "PROJECT", + } + for n in [1, 3, 5, 7, 8] + ] + cohort = {"available_as_saas": True} model = arange(task=tasks, cohort=cohort) for n in range(0, 5): - model.task[n].task_status = 'DONE' + model.task[n].task_status = "DONE" model.task[n].revision_status = revision_status model.task[n].save() - assert bc.database.list_of('assignments.Task') == bc.format.to_dict(model.task) + assert bc.database.list_of("assignments.Task") == bc.format.to_dict(model.task) - assert bc.database.list_of('admissions.CohortUser') == [ + assert bc.database.list_of("admissions.CohortUser") == [ { **bc.format.to_dict(model.cohort_user), - 'educational_status': 'GRADUATED', - 'history_log': { - 'delivered_assignments': [{ - 'id': n + 1, - 'type': model.task[n].task_type, - } for n in range(0, 5)], - 'pending_assignments': [], + "educational_status": "GRADUATED", + "history_log": { + "delivered_assignments": [ + { + "id": n + 1, + "type": model.task[n].task_type, + } + for n in range(0, 5) + ], + "pending_assignments": [], }, }, ] -@pytest.mark.parametrize('task_type', ['QUIZ', 'LESSON', 'EXERCISE']) -@pytest.mark.parametrize('revision_status1, revision_status2', [('PENDING', 'REJECTED'), ('REJECTED', 'PENDING')]) +@pytest.mark.parametrize("task_type", ["QUIZ", "LESSON", "EXERCISE"]) +@pytest.mark.parametrize("revision_status1, revision_status2", [("PENDING", "REJECTED"), ("REJECTED", "PENDING")]) def test_available_as_saas_true__all_mandatory_tasks_pending__but_type_is_not_project( - enable_signals, bc: Breathecode, arange, revision_status1, revision_status2, task_type): + enable_signals, bc: Breathecode, arange, revision_status1, revision_status2, task_type +): enable_signals() - tasks = [{ - 'task_status': 'PENDING', - 'associated_slug': f'task-{n}', - 'revision_status': revision_status1, - 'task_type': task_type, - } for n in [1, 3, 5, 7, 8]] - cohort = {'available_as_saas': True} + tasks = [ + { + "task_status": "PENDING", + "associated_slug": f"task-{n}", + "revision_status": revision_status1, + "task_type": task_type, + } + for n in [1, 3, 5, 7, 8] + ] + cohort = {"available_as_saas": True} model = arange(task=tasks, cohort=cohort) for n in range(0, 4): - model.task[n].task_status = 'DONE' + model.task[n].task_status = "DONE" model.task[n].revision_status = revision_status2 model.task[n].save() - assert bc.database.list_of('assignments.Task') == bc.format.to_dict(model.task) + assert bc.database.list_of("assignments.Task") == bc.format.to_dict(model.task) - assert bc.database.list_of('admissions.CohortUser') == [ + assert bc.database.list_of("admissions.CohortUser") == [ { **bc.format.to_dict(model.cohort_user), - 'educational_status': 'ACTIVE', - 'history_log': { - 'delivered_assignments': [{ - 'id': n + 1, - 'type': model.task[n].task_type, - } for n in range(0, 4)], - 'pending_assignments': [], + "educational_status": "ACTIVE", + "history_log": { + "delivered_assignments": [ + { + "id": n + 1, + "type": model.task[n].task_type, + } + for n in range(0, 4) + ], + "pending_assignments": [], }, }, ] diff --git a/breathecode/admissions/tests/tasks/tests_build_profile_academy.py b/breathecode/admissions/tests/tasks/tests_build_profile_academy.py index 6ec832f8a..daab794cd 100644 --- a/breathecode/admissions/tests/tasks/tests_build_profile_academy.py +++ b/breathecode/admissions/tests/tasks/tests_build_profile_academy.py @@ -1,6 +1,7 @@ """ Test /academy """ + from unittest.mock import MagicMock, call, patch from logging import Logger @@ -10,16 +11,16 @@ def profile_academy_item(user, academy, data={}): return { - 'academy_id': academy.id, - 'address': None, - 'email': user.email, - 'first_name': user.first_name, - 'id': 0, - 'last_name': user.last_name, - 'phone': '', - 'role_id': '', - 'status': 'INVITED', - 'user_id': user.id, + "academy_id": academy.id, + "address": None, + "email": user.email, + "first_name": user.first_name, + "id": 0, + "last_name": user.last_name, + "phone": "", + "role_id": "", + "status": "INVITED", + "user_id": user.id, **data, } @@ -27,22 +28,28 @@ def profile_academy_item(user, academy, data={}): class AcademyActivateTestSuite(AdmissionsTestCase): """Test /academy/activate""" - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_user_not_found(self): build_profile_academy.delay(1, 1) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.bc.check.calls(Logger.info.call_args_list, [ - call('Starting build_profile_academy for cohort 1 and user 1'), - ]) - - self.bc.check.calls(Logger.error.call_args_list, [ - call('User with id 1 not found', exc_info=True), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.bc.check.calls( + Logger.info.call_args_list, + [ + call("Starting build_profile_academy for cohort 1 and user 1"), + ], + ) + + self.bc.check.calls( + Logger.error.call_args_list, + [ + call("User with id 1 not found", exc_info=True), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_academy_not_found(self): self.bc.database.create(user=1) @@ -51,17 +58,23 @@ def test_academy_not_found(self): build_profile_academy.delay(1, 1) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.bc.check.calls(Logger.info.call_args_list, [ - call('Starting build_profile_academy for cohort 1 and user 1'), - ]) - - self.bc.check.calls(Logger.error.call_args_list, [ - call('Academy with id 1 not found', exc_info=True), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.bc.check.calls( + Logger.info.call_args_list, + [ + call("Starting build_profile_academy for cohort 1 and user 1"), + ], + ) + + self.bc.check.calls( + Logger.error.call_args_list, + [ + call("Academy with id 1 not found", exc_info=True), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_role_not_found(self): self.bc.database.create(user=1, academy=1) @@ -70,42 +83,58 @@ def test_role_not_found(self): build_profile_academy.delay(1, 1) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.bc.check.calls(Logger.info.call_args_list, [ - call('Starting build_profile_academy for cohort 1 and user 1'), - ]) - - self.bc.check.calls(Logger.error.call_args_list, [ - call('Role with slug None not found', exc_info=True), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.bc.check.calls( + Logger.info.call_args_list, + [ + call("Starting build_profile_academy for cohort 1 and user 1"), + ], + ) + + self.bc.check.calls( + Logger.error.call_args_list, + [ + call("Role with slug None not found", exc_info=True), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_role_student(self): - model = self.bc.database.create(user=1, academy=1, role='student') + model = self.bc.database.create(user=1, academy=1, role="student") Logger.info.call_args_list = [] Logger.error.call_args_list = [] build_profile_academy.delay(1, 1) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - profile_academy_item(model.user, model.academy, data={ - 'id': 1, - 'status': 'ACTIVE', - 'role_id': 'student', - }), - ]) - - self.bc.check.calls(Logger.info.call_args_list, [ - call('Starting build_profile_academy for cohort 1 and user 1'), - call('ProfileAcademy added'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + profile_academy_item( + model.user, + model.academy, + data={ + "id": 1, + "status": "ACTIVE", + "role_id": "student", + }, + ), + ], + ) + + self.bc.check.calls( + Logger.info.call_args_list, + [ + call("Starting build_profile_academy for cohort 1 and user 1"), + call("ProfileAcademy added"), + ], + ) self.bc.check.calls(Logger.error.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_random_role(self): model = self.bc.database.create(user=1, academy=1, role=1) @@ -114,24 +143,33 @@ def test_random_role(self): build_profile_academy.delay(1, 1, model.role.slug) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - profile_academy_item( - model.user, model.academy, data={ - 'id': 1, - 'status': 'ACTIVE', - 'role_id': model.role.slug, - }), - ]) - - self.bc.check.calls(Logger.info.call_args_list, [ - call('Starting build_profile_academy for cohort 1 and user 1'), - call('ProfileAcademy added'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + profile_academy_item( + model.user, + model.academy, + data={ + "id": 1, + "status": "ACTIVE", + "role_id": model.role.slug, + }, + ), + ], + ) + + self.bc.check.calls( + Logger.info.call_args_list, + [ + call("Starting build_profile_academy for cohort 1 and user 1"), + call("ProfileAcademy added"), + ], + ) self.bc.check.calls(Logger.error.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_profile_academy_exists(self): model = self.bc.database.create(user=1, academy=1, role=1, profile_academy=1) @@ -140,18 +178,24 @@ def test_profile_academy_exists(self): build_profile_academy.delay(1, 1, model.role.slug) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - { - **self.bc.format.to_dict(model.profile_academy), - 'id': 1, - 'status': 'ACTIVE', - 'role_id': model.role.slug, - }, - ]) - - self.bc.check.calls(Logger.info.call_args_list, [ - call('Starting build_profile_academy for cohort 1 and user 1'), - call('ProfileAcademy mark as active'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + **self.bc.format.to_dict(model.profile_academy), + "id": 1, + "status": "ACTIVE", + "role_id": model.role.slug, + }, + ], + ) + + self.bc.check.calls( + Logger.info.call_args_list, + [ + call("Starting build_profile_academy for cohort 1 and user 1"), + call("ProfileAcademy mark as active"), + ], + ) self.bc.check.calls(Logger.error.call_args_list, []) diff --git a/breathecode/admissions/tests/urls/tests_academy.py b/breathecode/admissions/tests/urls/tests_academy.py index 9bf36f132..1de72521a 100644 --- a/breathecode/admissions/tests/urls/tests_academy.py +++ b/breathecode/admissions/tests/urls/tests_academy.py @@ -1,6 +1,7 @@ """ Test /academy """ + import random from django.urls.base import reverse_lazy from rest_framework import status @@ -9,13 +10,13 @@ def get_serializer(academy, country, city, data={}): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, - 'street_address': academy.street_address, - 'country': country.code, - 'city': city.id, - 'is_hidden_on_prework': academy.is_hidden_on_prework, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, + "street_address": academy.street_address, + "country": country.code, + "city": city.id, + "is_hidden_on_prework": academy.is_hidden_on_prework, **data, } @@ -25,7 +26,7 @@ class academyTestSuite(AdmissionsTestCase): def test_without_auth_should_be_ok(self): """Test /academy without auth""" - url = reverse_lazy('admissions:academy') + url = reverse_lazy("admissions:academy") response = self.client.get(url) json = response.json() @@ -33,19 +34,19 @@ def test_without_auth_should_be_ok(self): def test_without_data(self): """Test /academy without auth""" - url = reverse_lazy('admissions:academy') + url = reverse_lazy("admissions:academy") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), []) + self.assertEqual(self.bc.database.list_of("admissions.Academy"), []) def test_with_data(self): """Test /academy without auth""" model = self.bc.database.create(authenticate=True, academy=True) - url = reverse_lazy('admissions:academy') + url = reverse_lazy("admissions:academy") model_dict = self.remove_dinamics_fields(model.academy.__dict__) response = self.client.get(url) @@ -54,14 +55,17 @@ def test_with_data(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [ - self.bc.format.to_dict(model.academy), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + self.bc.format.to_dict(model.academy), + ], + ) def test_status_in_querystring__status_not_found(self): """Test /academy without auth""" model = self.generate_models(authenticate=True, academy=True) - url = reverse_lazy('admissions:academy') + '?status=asdsad' + url = reverse_lazy("admissions:academy") + "?status=asdsad" response = self.client.get(url) json = response.json() @@ -69,15 +73,19 @@ def test_status_in_querystring__status_not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [ - self.bc.format.to_dict(model.academy), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + self.bc.format.to_dict(model.academy), + ], + ) def test_status_in_querystring__status_found(self): """Test /academy without auth""" - statuses = ['INACTIVE', 'ACTIVE', 'DELETED'] - cases = [(x, x, random.choice([y for y in statuses if x != y])) - for x in statuses] + [(x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses] + statuses = ["INACTIVE", "ACTIVE", "DELETED"] + cases = [(x, x, random.choice([y for y in statuses if x != y])) for x in statuses] + [ + (x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses + ] model = self.generate_models(authenticate=True, academy=3) for current, query, bad_status in cases: @@ -90,7 +98,7 @@ def test_status_in_querystring__status_found(self): model.academy[2].status = bad_status model.academy[2].save() - url = reverse_lazy('admissions:academy') + f'?status={query}' + url = reverse_lazy("admissions:academy") + f"?status={query}" response = self.client.get(url) json = response.json() @@ -101,17 +109,20 @@ def test_status_in_querystring__status_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [ - { - **self.bc.format.to_dict(model.academy[0]), - 'status': current, - }, - { - **self.bc.format.to_dict(model.academy[1]), - 'status': current, - }, - { - **self.bc.format.to_dict(model.academy[2]), - 'status': bad_status, - }, - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + { + **self.bc.format.to_dict(model.academy[0]), + "status": current, + }, + { + **self.bc.format.to_dict(model.academy[1]), + "status": current, + }, + { + **self.bc.format.to_dict(model.academy[2]), + "status": bad_status, + }, + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_activate.py b/breathecode/admissions/tests/urls/tests_academy_activate.py index 5e3d4e1e9..53991807a 100644 --- a/breathecode/admissions/tests/urls/tests_academy_activate.py +++ b/breathecode/admissions/tests/urls/tests_academy_activate.py @@ -1,6 +1,7 @@ """ Test /academy """ + from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AdmissionsTestCase @@ -11,11 +12,11 @@ class AcademyActivateTestSuite(AdmissionsTestCase): def test_academy_without_auth(self): """Test /academy/activate without auth""" - url = reverse_lazy('admissions:academy_activate') + url = reverse_lazy("admissions:academy_activate") data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -23,13 +24,13 @@ def test_academy_without_auth(self): def test_academy_without_academy_id(self): """Test /academy/activate without academy id in header""" model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_activate') + url = reverse_lazy("admissions:academy_activate") data = {} response = self.client.put(url, data) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -42,13 +43,13 @@ def test_academy_without_capability(self): authenticate=True, profile_academy=True, ) - url = reverse_lazy('admissions:academy_activate') + url = reverse_lazy("admissions:academy_activate") data = {} response = self.client.put(url, data) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: academy_activate for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: academy_activate for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -57,94 +58,84 @@ def test_academy_without_capability(self): def test_academy_when_academy_deleted(self): """Test /academy/activate with academy status deleted""" self.headers(academy=1) - academy_kwargs = {'status': 'DELETED'} + academy_kwargs = {"status": "DELETED"} model = self.generate_models( authenticate=True, profile_academy=True, - capability='academy_activate', - role='potato', + capability="academy_activate", + role="potato", academy_kwargs=academy_kwargs, ) - url = reverse_lazy('admissions:academy_activate') + url = reverse_lazy("admissions:academy_activate") data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'This academy is deleted', 'status_code': 403} + expected = {"detail": "This academy is deleted", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [self.bc.format.to_dict(model.academy)]) + self.assertEqual(self.bc.database.list_of("admissions.Academy"), [self.bc.format.to_dict(model.academy)]) def test_academy_when_academy_inactive(self): """Test /academy/activate with capability""" self.headers(academy=1) - academy_kwargs = {'status': 'INACTIVE'} + academy_kwargs = {"status": "INACTIVE"} model = self.generate_models( authenticate=True, profile_academy=True, - capability='academy_activate', - role='potato', + capability="academy_activate", + role="potato", academy_kwargs=academy_kwargs, ) - url = reverse_lazy('admissions:academy_activate') + url = reverse_lazy("admissions:academy_activate") data = {} response = self.client.put(url, data) json = response.json() expected = { - 'id': model.academy.id, - 'slug': model.academy.slug, - 'name': model.academy.name, - 'status': 'ACTIVE', - 'country': { - 'code': model.academy.country.code, - 'name': model.academy.country.name - }, - 'city': { - 'name': model.academy.city.name - }, - 'logo_url': model.academy.logo_url + "id": model.academy.id, + "slug": model.academy.slug, + "name": model.academy.name, + "status": "ACTIVE", + "country": {"code": model.academy.country.code, "name": model.academy.country.name}, + "city": {"name": model.academy.city.name}, + "logo_url": model.academy.logo_url, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), - [{ - **self.bc.format.to_dict(model.academy), 'status': 'ACTIVE' - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [{**self.bc.format.to_dict(model.academy), "status": "ACTIVE"}], + ) def test_academy_when_academy_active(self): """Test /academy/activate with capability""" self.headers(academy=1) - academy_kwargs = {'status': 'ACTIVE'} + academy_kwargs = {"status": "ACTIVE"} model = self.generate_models( authenticate=True, profile_academy=True, - capability='academy_activate', - role='potato', + capability="academy_activate", + role="potato", academy_kwargs=academy_kwargs, ) - url = reverse_lazy('admissions:academy_activate') + url = reverse_lazy("admissions:academy_activate") data = {} response = self.client.put(url, data) json = response.json() expected = { - 'id': model.academy.id, - 'slug': model.academy.slug, - 'name': model.academy.name, - 'status': 'ACTIVE', - 'country': { - 'code': model.academy.country.code, - 'name': model.academy.country.name - }, - 'city': { - 'name': model.academy.city.name - }, - 'logo_url': model.academy.logo_url + "id": model.academy.id, + "slug": model.academy.slug, + "name": model.academy.name, + "status": "ACTIVE", + "country": {"code": model.academy.country.code, "name": model.academy.country.name}, + "city": {"name": model.academy.city.name}, + "logo_url": model.academy.logo_url, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), - [{ - **self.bc.format.to_dict(model.academy), 'status': 'ACTIVE' - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [{**self.bc.format.to_dict(model.academy), "status": "ACTIVE"}], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_cohort.py b/breathecode/admissions/tests/urls/tests_academy_cohort.py index 4a9bce277..9e9e928f0 100644 --- a/breathecode/admissions/tests/urls/tests_academy_cohort.py +++ b/breathecode/admissions/tests/urls/tests_academy_cohort.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import random import re from datetime import datetime, timedelta @@ -24,58 +25,58 @@ def post_serializer(self, academy, syllabus, syllabus_version, data={}): return { - 'id': 0, - 'slug': '', - 'name': '', - 'never_ends': True, - 'remote_available': True, - 'kickoff_date': self.datetime_to_iso(UTC_NOW), - 'current_day': 0, - 'schedule': 0, - 'online_meeting_url': None, - 'timezone': '', - 'is_hidden_on_prework': True, - 'academy': { - 'id': academy.id, - 'slug': academy.slug, - 'name': academy.name, - 'street_address': academy.street_address, - 'country': academy.country.code, - 'city': academy.city.id, - 'is_hidden_on_prework': True, + "id": 0, + "slug": "", + "name": "", + "never_ends": True, + "remote_available": True, + "kickoff_date": self.datetime_to_iso(UTC_NOW), + "current_day": 0, + "schedule": 0, + "online_meeting_url": None, + "timezone": "", + "is_hidden_on_prework": True, + "academy": { + "id": academy.id, + "slug": academy.slug, + "name": academy.name, + "street_address": academy.street_address, + "country": academy.country.code, + "city": academy.city.id, + "is_hidden_on_prework": True, }, - 'syllabus_version': syllabus.slug + '.v' + str(syllabus_version.version), - 'ending_date': None, - 'stage': 'INACTIVE', - 'language': 'en', - 'created_at': self.datetime_to_iso(UTC_NOW), - 'updated_at': self.datetime_to_iso(UTC_NOW), + "syllabus_version": syllabus.slug + ".v" + str(syllabus_version.version), + "ending_date": None, + "stage": "INACTIVE", + "language": "en", + "created_at": self.datetime_to_iso(UTC_NOW), + "updated_at": self.datetime_to_iso(UTC_NOW), **data, } def cohort_field(data={}): return { - 'academy_id': 1, - 'current_day': 0, - 'current_module': None, - 'ending_date': None, - 'history_log': None, - 'id': 1, - 'kickoff_date': ..., - 'language': 'en', - 'name': 'They killed kenny', - 'never_ends': True, - 'online_meeting_url': None, - 'private': False, - 'remote_available': True, - 'schedule_id': 1, - 'slug': 'they-killed-kenny', - 'stage': 'FINAL_PROJECT', - 'syllabus_version_id': 1, - 'timezone': 'America/Caracas', - 'is_hidden_on_prework': True, - 'available_as_saas': False, + "academy_id": 1, + "current_day": 0, + "current_module": None, + "ending_date": None, + "history_log": None, + "id": 1, + "kickoff_date": ..., + "language": "en", + "name": "They killed kenny", + "never_ends": True, + "online_meeting_url": None, + "private": False, + "remote_available": True, + "schedule_id": 1, + "slug": "they-killed-kenny", + "stage": "FINAL_PROJECT", + "syllabus_version_id": 1, + "timezone": "America/Caracas", + "is_hidden_on_prework": True, + "available_as_saas": False, **data, } @@ -88,34 +89,34 @@ class AcademyCohortTestSuite(AdmissionsTestCase): 🔽🔽🔽 Auth """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__without_authorization(self): """Test /academy/cohort without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") self.bc.database.create(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_all_cohort for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: read_all_cohort for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -123,32 +124,34 @@ def test_academy_cohort__without_capability(self): 🔽🔽🔽 Post """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__without_profile_academy(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True) + model = self.bc.database.create( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") data = {} response = self.client.post(url, data) json = response.json() expected = { - 'detail': 'missing-syllabus-field', - 'status_code': status.HTTP_400_BAD_REQUEST, + "detail": "missing-syllabus-field", + "status_code": status.HTTP_400_BAD_REQUEST, } self.assertEqual(json, expected) @@ -156,39 +159,41 @@ def test_academy_cohort__post__without_profile_academy(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__with_bad_fields(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") data = { - 'syllabus': model['syllabus'].id, - 'schedule': 1, + "syllabus": model["syllabus"].id, + "schedule": 1, } response = self.client.post(url, data) json = response.json() expected = { - 'slug': ['This field is required.'], - 'name': ['This field is required.'], - 'kickoff_date': ['This field is required.'], + "slug": ["This field is required."], + "name": ["This field is required."], + "kickoff_date": ["This field is required."], } self.assertEqual(json, expected) @@ -196,226 +201,236 @@ def test_academy_cohort__post__with_bad_fields(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__with_bad_current_day(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") data = { - 'syllabus': model['syllabus'].id, - 'current_day': 999, - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'schedule': 1, + "syllabus": model["syllabus"].id, + "current_day": 999, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "schedule": 1, } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'current-day-not-allowed', 'status_code': 400} + expected = {"detail": "current-day-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__without_ending_date_or_never_ends(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'schedule': 1, + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "schedule": 1, } response = self.client.post(url, data) json = response.json() expected = { - 'detail': 'cohort-without-ending-date-and-never-ends', - 'status_code': 400, + "detail": "cohort-without-ending-date-and-never-ends", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__with_ending_date_and_never_ends_true(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': datetime.today().isoformat(), - 'never_ends': True, - 'schedule': 1, + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "ending_date": datetime.today().isoformat(), + "never_ends": True, + "schedule": 1, } response = self.client.post(url, data) json = response.json() expected = { - 'detail': 'cohort-with-ending-date-and-never-ends', - 'status_code': 400, + "detail": "cohort-with-ending-date-and-never-ends", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__without_ending_date_and_never_ends_false(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'never_ends': False, - 'schedule': 1, + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "never_ends": False, + "schedule": 1, } response = self.client.post(url, data) json = response.json() expected = { - 'detail': 'cohort-without-ending-date-and-never-ends', - 'status_code': 400, + "detail": "cohort-without-ending-date-and-never-ends", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__with_kickoff_date_null(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': None, - 'never_ends': False, - 'schedule': 1, + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": None, + "never_ends": False, + "schedule": 1, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'kickoff_date': ['This field may not be null.'], + "kickoff_date": ["This field may not be null."], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) @@ -423,792 +438,900 @@ def test_academy_cohort__post__with_kickoff_date_null(self): 🔽🔽🔽 Put assigning the syllabus version 1 """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__assigning_syllabus_version_1(self): from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - syllabus_version = {'version': 1} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=syllabus_version, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + syllabus_version = {"version": 1} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=syllabus_version, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - models_dict = self.bc.database.list_of('admissions.Cohort') - url = reverse_lazy('admissions:academy_cohort') + models_dict = self.bc.database.list_of("admissions.Cohort") + url = reverse_lazy("admissions:academy_cohort") data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'never_ends': True, - 'schedule': 1, + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "never_ends": True, + "schedule": 1, } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'assigning-a-syllabus-version-1', 'status_code': 400} + expected = {"detail": "assigning-a-syllabus-version-1", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__without_timezone(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - models_dict = self.bc.database.list_of('admissions.Cohort') - url = reverse_lazy('admissions:academy_cohort') + models_dict = self.bc.database.list_of("admissions.Cohort") + url = reverse_lazy("admissions:academy_cohort") data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'never_ends': True, - 'remote_available': True, - 'schedule': 1, + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "never_ends": True, + "remote_available": True, + "schedule": 1, } response = self.client.post(url, data) json = response.json() cohort = self.get_cohort(1) expected = { - 'id': cohort.id, - 'slug': cohort.slug, - 'name': cohort.name, - 'never_ends': True, - 'remote_available': True, - 'kickoff_date': self.datetime_to_iso(cohort.kickoff_date), - 'current_day': cohort.current_day, - 'schedule': cohort.schedule.id, - 'online_meeting_url': cohort.online_meeting_url, - 'timezone': cohort.timezone, - 'is_hidden_on_prework': cohort.is_hidden_on_prework, - 'available_as_saas': cohort.available_as_saas, - 'academy': { - 'id': cohort.academy.id, - 'slug': cohort.academy.slug, - 'name': cohort.academy.name, - 'street_address': cohort.academy.street_address, - 'country': cohort.academy.country.code, - 'city': cohort.academy.city.id, - 'is_hidden_on_prework': cohort.academy.is_hidden_on_prework + "id": cohort.id, + "slug": cohort.slug, + "name": cohort.name, + "never_ends": True, + "remote_available": True, + "kickoff_date": self.datetime_to_iso(cohort.kickoff_date), + "current_day": cohort.current_day, + "schedule": cohort.schedule.id, + "online_meeting_url": cohort.online_meeting_url, + "timezone": cohort.timezone, + "is_hidden_on_prework": cohort.is_hidden_on_prework, + "available_as_saas": cohort.available_as_saas, + "academy": { + "id": cohort.academy.id, + "slug": cohort.academy.slug, + "name": cohort.academy.name, + "street_address": cohort.academy.street_address, + "country": cohort.academy.country.code, + "city": cohort.academy.city.id, + "is_hidden_on_prework": cohort.academy.is_hidden_on_prework, }, - 'syllabus_version': model['syllabus'].slug + '.v' + str(model['syllabus_version'].version), - 'ending_date': cohort.ending_date, - 'stage': cohort.stage, - 'language': cohort.language.lower(), - 'created_at': self.datetime_to_iso(cohort.created_at), - 'updated_at': self.datetime_to_iso(cohort.updated_at), + "syllabus_version": model["syllabus"].slug + ".v" + str(model["syllabus_version"].version), + "ending_date": cohort.ending_date, + "stage": cohort.stage, + "language": cohort.language.lower(), + "created_at": self.datetime_to_iso(cohort.created_at), + "updated_at": self.datetime_to_iso(cohort.updated_at), } - del data['kickoff_date'] + del data["kickoff_date"] cohort_two = cohort.__dict__.copy() cohort_two.update(data) - del cohort_two['syllabus'] - del cohort_two['schedule'] + del cohort_two["syllabus"] + del cohort_two["schedule"] models_dict.append(self.remove_dinamics_fields({**cohort_two})) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), models_dict) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), models_dict) self.assertEqual(self.all_cohort_time_slot_dict(), []) - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=cohort, sender=cohort.__class__, created=True)]) + self.assertEqual( + cohort_saved.send_robust.call_args_list, [call(instance=cohort, sender=cohort.__class__, created=True)] + ) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__kickoff_date_greater_than_ending_date(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") utc_now = timezone.now() data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'schedule': 1, - 'kickoff_date': utc_now + timedelta(seconds=1), - 'ending_date': utc_now, + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "schedule": 1, + "kickoff_date": utc_now + timedelta(seconds=1), + "ending_date": utc_now, } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'kickoff-date-greather-than-ending-date', 'status_code': 400} + expected = {"detail": "kickoff-date-greather-than-ending-date", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__with_timezone(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - models_dict = self.bc.database.list_of('admissions.Cohort') - url = reverse_lazy('admissions:academy_cohort') + models_dict = self.bc.database.list_of("admissions.Cohort") + url = reverse_lazy("admissions:academy_cohort") ending_date = datetime.today() + timedelta(days=18) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': ending_date.isoformat(), - 'never_ends': False, - 'remote_available': True, - 'schedule': 1, + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "ending_date": ending_date.isoformat(), + "never_ends": False, + "remote_available": True, + "schedule": 1, } response = self.client.post(url, data) json = response.json() cohort = self.get_cohort(1) expected = { - 'id': cohort.id, - 'slug': cohort.slug, - 'name': cohort.name, - 'never_ends': False, - 'remote_available': True, - 'kickoff_date': self.datetime_to_iso(cohort.kickoff_date), - 'current_day': cohort.current_day, - 'schedule': cohort.schedule.id, - 'online_meeting_url': cohort.online_meeting_url, - 'timezone': model.academy.timezone, - 'available_as_saas': cohort.available_as_saas, - 'is_hidden_on_prework': True, - 'academy': { - 'id': cohort.academy.id, - 'slug': cohort.academy.slug, - 'name': cohort.academy.name, - 'street_address': cohort.academy.street_address, - 'country': cohort.academy.country.code, - 'city': cohort.academy.city.id, - 'city': cohort.academy.city.id, - 'is_hidden_on_prework': cohort.academy.is_hidden_on_prework + "id": cohort.id, + "slug": cohort.slug, + "name": cohort.name, + "never_ends": False, + "remote_available": True, + "kickoff_date": self.datetime_to_iso(cohort.kickoff_date), + "current_day": cohort.current_day, + "schedule": cohort.schedule.id, + "online_meeting_url": cohort.online_meeting_url, + "timezone": model.academy.timezone, + "available_as_saas": cohort.available_as_saas, + "is_hidden_on_prework": True, + "academy": { + "id": cohort.academy.id, + "slug": cohort.academy.slug, + "name": cohort.academy.name, + "street_address": cohort.academy.street_address, + "country": cohort.academy.country.code, + "city": cohort.academy.city.id, + "city": cohort.academy.city.id, + "is_hidden_on_prework": cohort.academy.is_hidden_on_prework, }, - 'syllabus_version': model['syllabus'].slug + '.v' + str(model['syllabus_version'].version), - 'ending_date': self.datetime_to_iso(cohort.ending_date), - 'stage': cohort.stage, - 'language': cohort.language.lower(), - 'created_at': self.datetime_to_iso(cohort.created_at), - 'updated_at': self.datetime_to_iso(cohort.updated_at), + "syllabus_version": model["syllabus"].slug + ".v" + str(model["syllabus_version"].version), + "ending_date": self.datetime_to_iso(cohort.ending_date), + "stage": cohort.stage, + "language": cohort.language.lower(), + "created_at": self.datetime_to_iso(cohort.created_at), + "updated_at": self.datetime_to_iso(cohort.updated_at), } - del data['kickoff_date'] - del data['ending_date'] + del data["kickoff_date"] + del data["ending_date"] cohort_two = cohort.__dict__.copy() cohort_two.update(data) - del cohort_two['syllabus'] - del cohort_two['schedule'] + del cohort_two["syllabus"] + del cohort_two["schedule"] models_dict.append(self.remove_dinamics_fields({**cohort_two})) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), models_dict) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': 1, - 'cohort_id': 1, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': model.academy.timezone, - }]) - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=cohort, sender=cohort.__class__, created=True)]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), models_dict) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": 1, + "cohort_id": 1, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": model.academy.timezone, + } + ], + ) + self.assertEqual( + cohort_saved.send_robust.call_args_list, [call(instance=cohort, sender=cohort.__class__, created=True)] + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__with_timezone__passing_custom_timezone(self): """Test /academy/cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - models_dict = self.bc.database.list_of('admissions.Cohort') - url = reverse_lazy('admissions:academy_cohort') + models_dict = self.bc.database.list_of("admissions.Cohort") + url = reverse_lazy("admissions:academy_cohort") ending_date = datetime.today() + timedelta(days=18) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': ending_date.isoformat(), - 'never_ends': False, - 'remote_available': True, - 'schedule': 1, - 'timezone': 'Pacific/Pago_Pago', + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "kickoff_date": datetime.today().isoformat(), + "ending_date": ending_date.isoformat(), + "never_ends": False, + "remote_available": True, + "schedule": 1, + "timezone": "Pacific/Pago_Pago", } response = self.client.post(url, data) json = response.json() cohort = self.get_cohort(1) expected = { - 'id': cohort.id, - 'slug': cohort.slug, - 'name': cohort.name, - 'never_ends': False, - 'remote_available': True, - 'kickoff_date': self.datetime_to_iso(cohort.kickoff_date), - 'current_day': cohort.current_day, - 'schedule': cohort.schedule.id, - 'online_meeting_url': cohort.online_meeting_url, - 'timezone': 'Pacific/Pago_Pago', - 'is_hidden_on_prework': cohort.is_hidden_on_prework, - 'available_as_saas': cohort.available_as_saas, - 'academy': { - 'id': cohort.academy.id, - 'slug': cohort.academy.slug, - 'name': cohort.academy.name, - 'street_address': cohort.academy.street_address, - 'country': cohort.academy.country.code, - 'city': cohort.academy.city.id, - 'is_hidden_on_prework': cohort.academy.is_hidden_on_prework, + "id": cohort.id, + "slug": cohort.slug, + "name": cohort.name, + "never_ends": False, + "remote_available": True, + "kickoff_date": self.datetime_to_iso(cohort.kickoff_date), + "current_day": cohort.current_day, + "schedule": cohort.schedule.id, + "online_meeting_url": cohort.online_meeting_url, + "timezone": "Pacific/Pago_Pago", + "is_hidden_on_prework": cohort.is_hidden_on_prework, + "available_as_saas": cohort.available_as_saas, + "academy": { + "id": cohort.academy.id, + "slug": cohort.academy.slug, + "name": cohort.academy.name, + "street_address": cohort.academy.street_address, + "country": cohort.academy.country.code, + "city": cohort.academy.city.id, + "is_hidden_on_prework": cohort.academy.is_hidden_on_prework, }, - 'syllabus_version': model['syllabus'].slug + '.v' + str(model['syllabus_version'].version), - 'ending_date': self.datetime_to_iso(cohort.ending_date), - 'stage': cohort.stage, - 'language': cohort.language.lower(), - 'created_at': self.datetime_to_iso(cohort.created_at), - 'updated_at': self.datetime_to_iso(cohort.updated_at), + "syllabus_version": model["syllabus"].slug + ".v" + str(model["syllabus_version"].version), + "ending_date": self.datetime_to_iso(cohort.ending_date), + "stage": cohort.stage, + "language": cohort.language.lower(), + "created_at": self.datetime_to_iso(cohort.created_at), + "updated_at": self.datetime_to_iso(cohort.updated_at), } - del data['kickoff_date'] - del data['ending_date'] + del data["kickoff_date"] + del data["ending_date"] cohort_two = cohort.__dict__.copy() cohort_two.update(data) - del cohort_two['syllabus'] - del cohort_two['schedule'] + del cohort_two["syllabus"] + del cohort_two["schedule"] models_dict.append(self.remove_dinamics_fields({**cohort_two})) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), models_dict) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': 1, - 'cohort_id': 1, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': 'Pacific/Pago_Pago', - }]) - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=cohort, sender=cohort.__class__, created=True)]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), models_dict) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": 1, + "cohort_id": 1, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": "Pacific/Pago_Pago", + } + ], + ) + self.assertEqual( + cohort_saved.send_robust.call_args_list, [call(instance=cohort, sender=cohort.__class__, created=True)] + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__passing_all_statuses__uppercase(self): from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") - stage = random.choice(['INACTIVE', 'PREWORK', 'STARTED', 'FINAL_PROJECT', 'ENDED', 'DELETED']) + stage = random.choice(["INACTIVE", "PREWORK", "STARTED", "FINAL_PROJECT", "ENDED", "DELETED"]) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'stage': stage, - 'kickoff_date': UTC_NOW.isoformat(), - 'never_ends': True, - 'remote_available': True, - 'schedule': 1, - 'available_as_saas': False + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "stage": stage, + "kickoff_date": UTC_NOW.isoformat(), + "never_ends": True, + "remote_available": True, + "schedule": 1, + "available_as_saas": False, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.academy, - model.syllabus, - model.syllabus_version, - data={ - 'id': 1, - 'timezone': 'America/Caracas', - 'stage': stage, - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 1, - 'available_as_saas': False - }) + expected = post_serializer( + self, + model.academy, + model.syllabus, + model.syllabus_version, + data={ + "id": 1, + "timezone": "America/Caracas", + "stage": stage, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 1, + "available_as_saas": False, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - cohort_field({ - 'stage': stage, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'kickoff_date': UTC_NOW, - 'available_as_saas': False - }), - ]) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': 1, - 'cohort_id': 1, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': model.academy.timezone, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + cohort_field( + { + "stage": stage, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "kickoff_date": UTC_NOW, + "available_as_saas": False, + } + ), + ], + ) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": 1, + "cohort_id": 1, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": model.academy.timezone, + } + ], + ) cohort = self.get_cohort(1) - self.assertEqual(cohort_saved.send_robust.call_args_list, [ - call(instance=cohort, sender=cohort.__class__, created=True), - ]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [ + call(instance=cohort, sender=cohort.__class__, created=True), + ], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__passing_all_statuses__lowercase(self): from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") - stage = random.choice(['INACTIVE', 'PREWORK', 'STARTED', 'FINAL_PROJECT', 'ENDED', 'DELETED']) + stage = random.choice(["INACTIVE", "PREWORK", "STARTED", "FINAL_PROJECT", "ENDED", "DELETED"]) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'stage': stage.lower(), - 'kickoff_date': UTC_NOW.isoformat(), - 'never_ends': True, - 'remote_available': True, - 'schedule': 1, - 'available_as_saas': False + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "stage": stage.lower(), + "kickoff_date": UTC_NOW.isoformat(), + "never_ends": True, + "remote_available": True, + "schedule": 1, + "available_as_saas": False, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.academy, - model.syllabus, - model.syllabus_version, - data={ - 'id': 1, - 'timezone': 'America/Caracas', - 'stage': stage, - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 1, - 'available_as_saas': False - }) + expected = post_serializer( + self, + model.academy, + model.syllabus, + model.syllabus_version, + data={ + "id": 1, + "timezone": "America/Caracas", + "stage": stage, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 1, + "available_as_saas": False, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - cohort_field({ - 'stage': stage, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'kickoff_date': UTC_NOW, - 'available_as_saas': False - }), - ]) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': 1, - 'cohort_id': 1, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': model.academy.timezone, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + cohort_field( + { + "stage": stage, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "kickoff_date": UTC_NOW, + "available_as_saas": False, + } + ), + ], + ) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": 1, + "cohort_id": 1, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": model.academy.timezone, + } + ], + ) cohort = self.get_cohort(1) - self.assertEqual(cohort_saved.send_robust.call_args_list, [ - call(instance=cohort, sender=cohort.__class__, created=True), - ]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [ + call(instance=cohort, sender=cohort.__class__, created=True), + ], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__passing_available_as_saas_with__true(self): from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") - stage = random.choice(['INACTIVE', 'PREWORK', 'STARTED', 'FINAL_PROJECT', 'ENDED', 'DELETED']) + stage = random.choice(["INACTIVE", "PREWORK", "STARTED", "FINAL_PROJECT", "ENDED", "DELETED"]) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'stage': stage.lower(), - 'kickoff_date': UTC_NOW.isoformat(), - 'never_ends': True, - 'remote_available': True, - 'schedule': 1, - 'available_as_saas': True + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "stage": stage.lower(), + "kickoff_date": UTC_NOW.isoformat(), + "never_ends": True, + "remote_available": True, + "schedule": 1, + "available_as_saas": True, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.academy, - model.syllabus, - model.syllabus_version, - data={ - 'id': 1, - 'timezone': 'America/Caracas', - 'stage': stage, - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 1, - 'available_as_saas': True - }) + expected = post_serializer( + self, + model.academy, + model.syllabus, + model.syllabus_version, + data={ + "id": 1, + "timezone": "America/Caracas", + "stage": stage, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 1, + "available_as_saas": True, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - cohort_field({ - 'stage': stage, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'kickoff_date': UTC_NOW, - 'available_as_saas': True - }), - ]) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': 1, - 'cohort_id': 1, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': model.academy.timezone, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + cohort_field( + { + "stage": stage, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "kickoff_date": UTC_NOW, + "available_as_saas": True, + } + ), + ], + ) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": 1, + "cohort_id": 1, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": model.academy.timezone, + } + ], + ) cohort = self.get_cohort(1) - self.assertEqual(cohort_saved.send_robust.call_args_list, [ - call(instance=cohort, sender=cohort.__class__, created=True), - ]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [ + call(instance=cohort, sender=cohort.__class__, created=True), + ], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__passing_available_as_saas_with__false(self): from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") - stage = random.choice(['INACTIVE', 'PREWORK', 'STARTED', 'FINAL_PROJECT', 'ENDED', 'DELETED']) + stage = random.choice(["INACTIVE", "PREWORK", "STARTED", "FINAL_PROJECT", "ENDED", "DELETED"]) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'stage': stage.lower(), - 'kickoff_date': UTC_NOW.isoformat(), - 'never_ends': True, - 'remote_available': True, - 'schedule': 1, - 'available_as_saas': False + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "stage": stage.lower(), + "kickoff_date": UTC_NOW.isoformat(), + "never_ends": True, + "remote_available": True, + "schedule": 1, + "available_as_saas": False, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.academy, - model.syllabus, - model.syllabus_version, - data={ - 'id': 1, - 'timezone': 'America/Caracas', - 'stage': stage, - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 1, - 'available_as_saas': False - }) + expected = post_serializer( + self, + model.academy, + model.syllabus, + model.syllabus_version, + data={ + "id": 1, + "timezone": "America/Caracas", + "stage": stage, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 1, + "available_as_saas": False, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - cohort_field({ - 'stage': stage, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'kickoff_date': UTC_NOW, - 'available_as_saas': False - }), - ]) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': 1, - 'cohort_id': 1, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': model.academy.timezone, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + cohort_field( + { + "stage": stage, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "kickoff_date": UTC_NOW, + "available_as_saas": False, + } + ), + ], + ) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": 1, + "cohort_id": 1, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": model.academy.timezone, + } + ], + ) cohort = self.get_cohort(1) - self.assertEqual(cohort_saved.send_robust.call_args_list, [ - call(instance=cohort, sender=cohort.__class__, created=True), - ]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [ + call(instance=cohort, sender=cohort.__class__, created=True), + ], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__post__not_passing_available_as_saas(self): from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True, - skip_cohort=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs, - academy_kwargs=academy_kwargs) + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + skip_cohort=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + academy_kwargs=academy_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") - stage = random.choice(['INACTIVE', 'PREWORK', 'STARTED', 'FINAL_PROJECT', 'ENDED', 'DELETED']) + stage = random.choice(["INACTIVE", "PREWORK", "STARTED", "FINAL_PROJECT", "ENDED", "DELETED"]) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'stage': stage.lower(), - 'kickoff_date': UTC_NOW.isoformat(), - 'never_ends': True, - 'remote_available': True, - 'schedule': 1 + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "stage": stage.lower(), + "kickoff_date": UTC_NOW.isoformat(), + "never_ends": True, + "remote_available": True, + "schedule": 1, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.academy, - model.syllabus, - model.syllabus_version, - data={ - 'id': 1, - 'timezone': 'America/Caracas', - 'stage': stage, - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 1, - 'available_as_saas': model.academy.available_as_saas - }) + expected = post_serializer( + self, + model.academy, + model.syllabus, + model.syllabus_version, + data={ + "id": 1, + "timezone": "America/Caracas", + "stage": stage, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 1, + "available_as_saas": model.academy.available_as_saas, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - cohort_field({ - 'stage': stage, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'kickoff_date': UTC_NOW, - 'available_as_saas': model.academy.available_as_saas - }), - ]) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': 1, - 'cohort_id': 1, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': model.academy.timezone, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + cohort_field( + { + "stage": stage, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "kickoff_date": UTC_NOW, + "available_as_saas": model.academy.available_as_saas, + } + ), + ], + ) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": 1, + "cohort_id": 1, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": model.academy.timezone, + } + ], + ) cohort = self.get_cohort(1) - self.assertEqual(cohort_saved.send_robust.call_args_list, [ - call(instance=cohort, sender=cohort.__class__, created=True), - ]) + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [ + call(instance=cohort, sender=cohort.__class__, created=True), + ], + ) # """ @@ -1217,21 +1340,23 @@ def test_academy_cohort__post__not_passing_available_as_saas(self): 🔽🔽🔽 Without data """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_without_data(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort') - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - skip_cohort=True) + url = reverse_lazy("admissions:academy_cohort") + model = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + skip_cohort=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] @@ -1248,9 +1373,9 @@ def test_academy_cohort_without_data(self): 🔽🔽🔽 With data (this method is reusable) """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__with_data(self): """Test /cohort without auth""" self.check_academy_cohort__with_data() @@ -1259,20 +1384,18 @@ def test_academy_cohort__with_data(self): 🔽🔽🔽 Put """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__put__without_id(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort') - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True) + url = reverse_lazy("admissions:academy_cohort") + model = self.bc.database.create( + authenticate=True, profile_academy=True, capability="crud_cohort", role="potato", syllabus=True + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] @@ -1281,7 +1404,7 @@ def test_academy_cohort__put__without_id(self): response = self.client.put(url, data) json = response.json() - self.assertEqual(json, {'detail': 'Missing cohort_id', 'status_code': 400}) + self.assertEqual(json, {"detail": "Missing cohort_id", "status_code": 400}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) @@ -1290,126 +1413,132 @@ def test_academy_cohort__put__without_id(self): 🔽🔽🔽 Get """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__with_data__with_upcoming_false(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved cohort_kwargs = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), } self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - cohort=cohort_kwargs, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.bc.database.create( + authenticate=True, + cohort=cohort_kwargs, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model_dict = self.remove_dinamics_fields(model['cohort'].__dict__) - del model_dict['ending_date'] - del model_dict['kickoff_date'] - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?upcoming=false' + model_dict = self.remove_dinamics_fields(model["cohort"].__dict__) + del model_dict["ending_date"] + del model_dict["kickoff_date"] + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?upcoming=false" response = self.client.get(url) json = response.json() for j in json: - del j['ending_date'] - del j['kickoff_date'] - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - # 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - # 'ending_date': self.datetime_to_iso(model['cohort'].ending_date), - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + del j["ending_date"] + del j["kickoff_date"] + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + # 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), + # 'ending_date': self.datetime_to_iso(model['cohort'].ending_date), + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort(), 1) cohort_dict = self.get_cohort_dict(1) - del cohort_dict['ending_date'] - del cohort_dict['kickoff_date'] + del cohort_dict["ending_date"] + del cohort_dict["kickoff_date"] self.assertEqual(cohort_dict, model_dict) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__with_data__with_upcoming_true__without_data(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) self.clear_cache() - model = self.bc.database.create(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.bc.database.create( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model_dict = self.remove_dinamics_fields(model['cohort'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?upcoming=true' + model_dict = self.remove_dinamics_fields(model["cohort"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?upcoming=true" response = self.client.get(url) json = response.json() @@ -1420,85 +1549,89 @@ def test_academy_cohort__with_data__with_upcoming_true__without_data(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__with_data__with_upcoming_true(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) cohort_kwargs = { - 'kickoff_date': timezone.now() + timedelta(days=1), + "kickoff_date": timezone.now() + timedelta(days=1), } - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - cohort=cohort_kwargs) + model = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + cohort=cohort_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?upcoming=true' + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?upcoming=true" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': model['cohort'].ending_date, - 'remote_available': model['cohort'].remote_available, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": model["cohort"].ending_date, + "remote_available": model["cohort"].remote_available, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -1507,30 +1640,32 @@ def test_academy_cohort__with_data__with_upcoming_true(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__with_data__with_bad_academy(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - cohort=True, - impossible_kickoff_date=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.bc.database.create( + authenticate=True, + cohort=True, + impossible_kickoff_date=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?academy=they-killed-kenny' + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?academy=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -1541,86 +1676,90 @@ def test_academy_cohort__with_data__with_bad_academy(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_with_data_with_academy(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved cohort_kwargs = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), } self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - cohort=cohort_kwargs, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.bc.database.create( + authenticate=True, + cohort=cohort_kwargs, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?academy=' + model['academy'].slug + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?academy=" + model["academy"].slug response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': self.datetime_to_iso(model['cohort'].ending_date), - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': model['cohort'].current_module, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'timeslots': [], - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": model["cohort"].current_module, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "timeslots": [], + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -1629,86 +1768,90 @@ def test_academy_cohort_with_data_with_academy(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_with_data_with_academy_with_comma(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved cohort_kwargs = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), } self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - cohort=cohort_kwargs, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.bc.database.create( + authenticate=True, + cohort=cohort_kwargs, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?academy=' + model['academy'].slug + ',they-killed-kenny' + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?academy=" + model["academy"].slug + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': self.datetime_to_iso(model['cohort'].ending_date), - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -1717,31 +1860,33 @@ def test_academy_cohort_with_data_with_academy_with_comma(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_with_ten_datas_with_academy_with_comma(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved cohort_kwargs = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), } self.headers(academy=1) models = [ - self.bc.database.create(authenticate=True, - cohort=cohort_kwargs, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + self.bc.database.create( + authenticate=True, + cohort=cohort_kwargs, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) ] base = models[0].copy() - del base['cohort'] + del base["cohort"] models = models + [self.bc.database.create(cohort=cohort_kwargs, models=base) for index in range(0, 9)] models.sort(key=lambda x: x.cohort.kickoff_date, reverse=True) @@ -1749,74 +1894,77 @@ def test_academy_cohort_with_ten_datas_with_academy_with_comma(self): # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - self.client.force_authenticate(user=models[0]['user']) - base_url = reverse_lazy('admissions:academy_cohort') - params = ','.join([model['academy'].slug for model in models]) - url = f'{base_url}?academy={params}' + self.client.force_authenticate(user=models[0]["user"]) + base_url = reverse_lazy("admissions:academy_cohort") + params = ",".join([model["academy"].slug for model in models]) + url = f"{base_url}?academy={params}" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'language': model['cohort'].language, - 'kickoff_date': datetime_to_iso_format(model['cohort'].kickoff_date), - 'ending_date': datetime_to_iso_format(model['cohort'].ending_date), - 'stage': model['cohort'].stage, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "language": model["cohort"].language, + "kickoff_date": datetime_to_iso_format(model["cohort"].kickoff_date), + "ending_date": datetime_to_iso_format(model["cohort"].ending_date), + "stage": model["cohort"].stage, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - } for model in models] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + for model in models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self_all_cohort = self.bc.database.list_of('admissions.Cohort') + self_all_cohort = self.bc.database.list_of("admissions.Cohort") for j in self_all_cohort: - del j['ending_date'] - del j['kickoff_date'] + del j["ending_date"] + del j["kickoff_date"] self_all_model = self.all_model_dict([x.cohort for x in models]) for j in self_all_model: - del j['ending_date'] - del j['kickoff_date'] + del j["ending_date"] + del j["kickoff_date"] self.assertEqual(self_all_cohort, self_all_model) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) @@ -1825,128 +1973,130 @@ def test_academy_cohort_with_ten_datas_with_academy_with_comma(self): 🔽🔽🔽 Sort in querystring """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__with_data__with_sort(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved cohort_kwargs = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), } self.headers(academy=1) - base = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - skip_cohort=True) + base = self.bc.database.create( + authenticate=True, profile_academy=True, capability="read_all_cohort", role="potato", skip_cohort=True + ) models = [ - self.bc.database.create(cohort=cohort_kwargs, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - models=base) for _ in range(0, 2) + self.bc.database.create( + cohort=cohort_kwargs, syllabus=True, syllabus_version=True, syllabus_schedule=True, models=base + ) + for _ in range(0, 2) ] # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - ordened_models = sorted(models, key=lambda x: x['cohort'].slug, reverse=True) + ordened_models = sorted(models, key=lambda x: x["cohort"].slug, reverse=True) - url = reverse_lazy('admissions:academy_cohort') + '?sort=-slug' + url = reverse_lazy("admissions:academy_cohort") + "?sort=-slug" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'current_day': model.cohort.current_day, - 'current_module': None, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': self.datetime_to_iso(model['cohort'].ending_date), - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "current_day": model.cohort.current_day, + "current_module": None, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - } for model in ordened_models] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + for model in ordened_models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - all_cohort_dict = self.bc.database.list_of('admissions.Cohort') + all_cohort_dict = self.bc.database.list_of("admissions.Cohort") for cohort_dict in all_cohort_dict: - del cohort_dict['ending_date'] - del cohort_dict['kickoff_date'] - models_dict = [{**self.model_to_dict(model, 'cohort')} for model in models] + del cohort_dict["ending_date"] + del cohort_dict["kickoff_date"] + models_dict = [{**self.model_to_dict(model, "cohort")} for model in models] for dict in models_dict: - del dict['ending_date'] - del dict['kickoff_date'] + del dict["ending_date"] + del dict["kickoff_date"] self.assertEqual(all_cohort_dict, models_dict) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_with_data_with_bad_location(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.bc.database.create( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?location=they-killed-kenny' + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?location=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -1957,86 +2107,90 @@ def test_academy_cohort_with_data_with_bad_location(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_with_data_with_location(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved cohort_kwargs = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), } self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - cohort=cohort_kwargs, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.bc.database.create( + authenticate=True, + cohort=cohort_kwargs, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?location=' + model['academy'].slug + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?location=" + model["academy"].slug response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': self.datetime_to_iso(model['cohort'].ending_date), - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -2045,86 +2199,90 @@ def test_academy_cohort_with_data_with_location(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_with_data_with_location_with_comma(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved cohort_kwargs = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), } self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - cohort=cohort_kwargs, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.bc.database.create( + authenticate=True, + cohort=cohort_kwargs, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort') - url = f'{base_url}?location=' + model['academy'].slug + ',they-killed-kenny' + base_url = reverse_lazy("admissions:academy_cohort") + url = f"{base_url}?location=" + model["academy"].slug + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': self.datetime_to_iso(model['cohort'].ending_date), - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -2133,34 +2291,36 @@ def test_academy_cohort_with_data_with_location_with_comma(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_with_ten_datas_with_location_with_comma(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved cohort_kwargs = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), } self.headers(academy=1) models = [ - self.bc.database.create(authenticate=True, - cohort={ - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': future_date.isoformat(), - }, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + self.bc.database.create( + authenticate=True, + cohort={ + "kickoff_date": datetime.today().isoformat(), + "ending_date": future_date.isoformat(), + }, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) ] base = models[0].copy() - del base['cohort'] + del base["cohort"] models = models + [self.bc.database.create(cohort=cohort_kwargs, models=base) for index in range(0, 9)] models.sort(key=lambda x: x.cohort.kickoff_date, reverse=True) @@ -2168,124 +2328,131 @@ def test_academy_cohort_with_ten_datas_with_location_with_comma(self): # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - self.client.force_authenticate(user=models[0]['user']) - base_url = reverse_lazy('admissions:academy_cohort') - params = ','.join([model['academy'].slug for model in models]) - url = f'{base_url}?location={params}' + self.client.force_authenticate(user=models[0]["user"]) + base_url = reverse_lazy("admissions:academy_cohort") + params = ",".join([model["academy"].slug for model in models]) + url = f"{base_url}?location={params}" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'language': model['cohort'].language, - 'kickoff_date': self.bc.datetime.to_iso_string(model['cohort'].kickoff_date), - 'ending_date': self.bc.datetime.to_iso_string(model['cohort'].ending_date), - 'stage': model['cohort'].stage, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "language": model["cohort"].language, + "kickoff_date": self.bc.datetime.to_iso_string(model["cohort"].kickoff_date), + "ending_date": self.bc.datetime.to_iso_string(model["cohort"].ending_date), + "stage": model["cohort"].stage, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - } for model in models] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + for model in models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self_all_cohort = self.bc.database.list_of('admissions.Cohort') + self_all_cohort = self.bc.database.list_of("admissions.Cohort") for j in self_all_cohort: - del j['ending_date'] - del j['kickoff_date'] + del j["ending_date"] + del j["kickoff_date"] self_all_model = self.all_model_dict([x.cohort for x in models]) for j in self_all_model: - del j['ending_date'] - del j['kickoff_date'] + del j["ending_date"] + del j["kickoff_date"] self.assertEqual(self_all_cohort, self_all_model) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_delete_without_args_in_url_or_bulk(self): """Test /cohort/:id/user without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato') + model = self.bc.database.create( + authenticate=True, profile_academy=True, capability="crud_cohort", role="potato" + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort') + url = reverse_lazy("admissions:academy_cohort") response = self.client.delete(url) json = response.json() - expected = {'detail': 'Missing cohort_id', 'status_code': 400} + expected = {"detail": "Missing cohort_id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{ - **self.model_to_dict(model, 'cohort'), - }]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.model_to_dict(model, "cohort"), + } + ], + ) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_delete_in_bulk_with_students(self): """Test /cohort/:id/user without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] - base = self.bc.database.create(academy=True, capability='crud_cohort', role='potato') + base = self.bc.database.create(academy=True, capability="crud_cohort", role="potato") expected = { - 'detail': 'cohort-has-students', - 'status_code': 400, + "detail": "cohort-has-students", + "status_code": 400, } for field in many_fields: @@ -2294,153 +2461,168 @@ def test_academy_cohort_delete_in_bulk_with_students(self): # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - value = getattr(model['cohort'], field) + value = getattr(model["cohort"], field) - url = (reverse_lazy('admissions:academy_cohort') + f'?{field}=' + str(value)) + url = reverse_lazy("admissions:academy_cohort") + f"?{field}=" + str(value) response = self.client.delete(url) json = response.json() self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_delete_in_bulk_with_one(self): """Test /cohort/:id/user without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - many_fields = ['id'] - base = self.bc.database.create(academy=True, capability='crud_cohort', role='potato') + many_fields = ["id"] + base = self.bc.database.create(academy=True, capability="crud_cohort", role="potato") for field in many_fields: cohort_kwargs = { - 'kickoff_date': timezone.now(), - 'ending_date': timezone.now(), - 'timezone': choice(['-1', '-2', '-3', '-4', '-5']), + "kickoff_date": timezone.now(), + "ending_date": timezone.now(), + "timezone": choice(["-1", "-2", "-3", "-4", "-5"]), } model = self.bc.database.create(authenticate=True, profile_academy=True, cohort=cohort_kwargs, models=base) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - value = getattr(model['cohort'], field) + value = getattr(model["cohort"], field) - url = (reverse_lazy('admissions:academy_cohort') + f'?{field}=' + str(value)) + url = reverse_lazy("admissions:academy_cohort") + f"?{field}=" + str(value) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.count_cohort_user(), 0) - self.assertEqual(self.count_cohort_stage(model['cohort'].id), 'DELETED') - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=model.cohort, sender=model.cohort.__class__, created=False)]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.count_cohort_stage(model["cohort"].id), "DELETED") + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [call(instance=model.cohort, sender=model.cohort.__class__, created=False)], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_delete_in_bulk_with_two(self): """Test /cohort/:id/user without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] - base = self.bc.database.create(academy=True, capability='crud_cohort', role='potato') + base = self.bc.database.create(academy=True, capability="crud_cohort", role="potato") for field in many_fields: cohort_kwargs = { - 'kickoff_date': timezone.now(), - 'ending_date': timezone.now(), - 'timezone': choice(['-1', '-2', '-3', '-4', '-5']), + "kickoff_date": timezone.now(), + "ending_date": timezone.now(), + "timezone": choice(["-1", "-2", "-3", "-4", "-5"]), } - model1 = self.bc.database.create(authenticate=True, - profile_academy=True, - syllabus=True, - cohort=cohort_kwargs, - models=base) + model1 = self.bc.database.create( + authenticate=True, profile_academy=True, syllabus=True, cohort=cohort_kwargs, models=base + ) cohort_kwargs = { - 'kickoff_date': timezone.now(), - 'ending_date': timezone.now(), - 'timezone': choice(['-1', '-2', '-3', '-4', '-5']), + "kickoff_date": timezone.now(), + "ending_date": timezone.now(), + "timezone": choice(["-1", "-2", "-3", "-4", "-5"]), } model2 = self.bc.database.create(profile_academy=True, syllabus=True, cohort=cohort_kwargs, models=base) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - value1 = getattr(model1['cohort'], field) + value1 = getattr(model1["cohort"], field) value1 = self.datetime_to_iso(value1) if isinstance(value1, datetime) else value1 - value2 = getattr(model2['cohort'], field) + value2 = getattr(model2["cohort"], field) value2 = self.datetime_to_iso(value2) if isinstance(value2, datetime) else value2 - url = (reverse_lazy('admissions:academy_cohort') + f'?{field}=' + str(value1) + ',' + str(value2)) + url = reverse_lazy("admissions:academy_cohort") + f"?{field}=" + str(value1) + "," + str(value2) response = self.client.delete(url) self.assertEqual(self.count_cohort_user(), 0) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.count_cohort_stage(model1['cohort'].id), 'DELETED') - self.assertEqual(self.count_cohort_stage(model2['cohort'].id), 'DELETED') - self.assertEqual(cohort_saved.send_robust.call_args_list, [ - call(instance=model1.cohort, sender=model1.cohort.__class__, created=False), - call(instance=model2.cohort, sender=model2.cohort.__class__, created=False), - ]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.count_cohort_stage(model1["cohort"].id), "DELETED") + self.assertEqual(self.count_cohort_stage(model2["cohort"].id), "DELETED") + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [ + call(instance=model1.cohort, sender=model1.cohort.__class__, created=False), + call(instance=model2.cohort, sender=model2.cohort.__class__, created=False), + ], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__spy_extensions(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort') - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - skip_cohort=True) + url = reverse_lazy("admissions:academy_cohort") + model = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + skip_cohort=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort__spy_extension_arguments(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort') - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - skip_cohort=True) + url = reverse_lazy("admissions:academy_cohort") + model = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + skip_cohort=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=CohortCache, sort='-kickoff_date', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=CohortCache, sort="-kickoff_date", paginate=True), + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_cohort_id.py b/breathecode/admissions/tests/urls/tests_academy_cohort_id.py index 45cfbe6f9..0181e2461 100644 --- a/breathecode/admissions/tests/urls/tests_academy_cohort_id.py +++ b/breathecode/admissions/tests/urls/tests_academy_cohort_id.py @@ -1,6 +1,7 @@ """ Test /cohort """ + from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -23,56 +24,53 @@ class AcademyCohortIdTestSuite(AdmissionsTestCase): 🔽🔽🔽 Auth """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__without_auth(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 1}) response = self.client.put(url, {}) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_put__without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 1}) self.generate_models(authenticate=True) data = {} response = self.client.put(url, data) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_cohort for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: crud_cohort for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 Put without cohort """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__without_cohort(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 99999}) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 99999}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_cohort", role="potato", syllabus=True + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] @@ -81,26 +79,26 @@ def test_cohort_id__put__without_cohort(self): response = self.client.put(url, data) json = response.json() - self.assertEqual(json, {'status_code': 400, 'detail': 'Specified cohort not be found'}) + self.assertEqual(json, {"status_code": 400, "detail": "Specified cohort not be found"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put not have ending_date and never_ends """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__without_ending_date_or_never_ends(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 1}) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] @@ -109,157 +107,161 @@ def test_cohort_id__put__without_ending_date_or_never_ends(self): response = self.client.put(url, data) json = response.json() expected = { - 'detail': 'cohort-without-ending-date-and-never-ends', - 'status_code': 400, + "detail": "cohort-without-ending-date-and-never-ends", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put with ending_date and never_ends=True """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_ending_date_and_never_ends_true(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 1}) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] data = { - 'ending_date': timezone.now().isoformat(), - 'never_ends': True, + "ending_date": timezone.now().isoformat(), + "never_ends": True, } response = self.client.put(url, data) json = response.json() expected = { - 'detail': 'cohort-with-ending-date-and-never-ends', - 'status_code': 400, + "detail": "cohort-with-ending-date-and-never-ends", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put with date """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 1}) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] data = { - 'never_ends': True, + "never_ends": True, } response = self.client.put(url, data) json = response.json() expected = { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': True, - 'remote_available': True, - 'private': False, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': model['cohort'].ending_date, - 'current_day': model['cohort'].current_day, - 'current_module': model['cohort'].current_module, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'syllabus_version': model['cohort'].syllabus_version, - 'schedule': model['cohort'].schedule, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'academy': { - 'id': model.academy.id, - 'slug': model.academy.slug, - 'name': model.academy.name, - 'country': { - 'code': model.academy.country.code, - 'name': model.academy.country.name, + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": True, + "remote_available": True, + "private": False, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": model["cohort"].ending_date, + "current_day": model["cohort"].current_day, + "current_module": model["cohort"].current_module, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "syllabus_version": model["cohort"].syllabus_version, + "schedule": model["cohort"].schedule, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "academy": { + "id": model.academy.id, + "slug": model.academy.slug, + "name": model.academy.name, + "country": { + "code": model.academy.country.code, + "name": model.academy.country.name, }, - 'city': { - 'name': model.academy.city.name, + "city": { + "name": model.academy.city.name, }, - 'logo_url': model.academy.logo_url, - 'is_hidden_on_prework': model.academy.is_hidden_on_prework - } + "logo_url": model.academy.logo_url, + "is_hidden_on_prework": model.academy.is_hidden_on_prework, + }, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - **self.model_to_dict(model, 'cohort'), - 'never_ends': True, - }]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=model.cohort, sender=model.cohort.__class__, created=False)]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + **self.model_to_dict(model, "cohort"), + "never_ends": True, + } + ], + ) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [call(instance=model.cohort, sender=model.cohort.__class__, created=False)], + ) """ 🔽🔽🔽 Put with date, kickoff_date greater than ending_date """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__kickoff_date_greater_than_ending_date(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 1}) utc_now = timezone.now() - cohort = {'kickoff_date': utc_now, 'ending_date': utc_now + timedelta(seconds=1)} - model = self.generate_models(authenticate=True, - cohort=cohort, - profile_academy=True, - capability='crud_cohort', - role='potato') + cohort = {"kickoff_date": utc_now, "ending_date": utc_now + timedelta(seconds=1)} + model = self.generate_models( + authenticate=True, cohort=cohort, profile_academy=True, capability="crud_cohort", role="potato" + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] cases = [ { - 'kickoff_date': utc_now + timedelta(seconds=2), + "kickoff_date": utc_now + timedelta(seconds=2), }, { - 'ending_date': utc_now - timedelta(seconds=1), + "ending_date": utc_now - timedelta(seconds=1), }, { - 'kickoff_date': utc_now + timedelta(seconds=1), - 'ending_date': utc_now, + "kickoff_date": utc_now + timedelta(seconds=1), + "ending_date": utc_now, }, ] @@ -267,260 +269,276 @@ def test_cohort_id__put__kickoff_date_greater_than_ending_date(self): response = self.client.put(url, data) json = response.json() - expected = {'detail': 'kickoff-date-greather-than-ending-date', 'status_code': 400} + expected = {"detail": "kickoff-date-greather-than-ending-date", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.model_to_dict(model, 'cohort'), - ]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.model_to_dict(model, "cohort"), + ], + ) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put syllabus with id instead of {slug}.v{id} """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__with_bad_syllabus_version_malformed(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True, - syllabus_version=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': 1, - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": 1, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "current_day": model["cohort"].current_day + 1, + "language": "es", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'detail': 'syllabus-field-marformed', - 'status_code': 400, + "detail": "syllabus-field-marformed", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put syllabus but it doesn't exists """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__with_bad_syllabus_version(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True) + model = self.generate_models( + authenticate=True, cohort=True, profile_academy=True, capability="crud_cohort", role="potato", syllabus=True + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': 'they-killed-kenny.v1', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": "they-killed-kenny.v1", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "current_day": model["cohort"].current_day + 1, + "language": "es", } response = self.client.put(url, data) json = response.json() expected = { - 'detail': 'syllabus-version-not-found', - 'status_code': 400, + "detail": "syllabus-version-not-found", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put syllabus with bad slug {slug}.v{id} """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__with_bad_syllabus_version__with_bad_slug(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - syllabus_kwargs = {'slug': 'x'} - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus_version=True, - syllabus=True, - syllabus_kwargs=syllabus_kwargs) + syllabus_kwargs = {"slug": "x"} + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus_version=True, + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': f'they-killed-kenny.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": f"they-killed-kenny.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "current_day": model["cohort"].current_day + 1, + "language": "es", } response = self.client.put(url, data) json = response.json() expected = { - 'detail': 'syllabus-version-not-found', - 'status_code': 400, + "detail": "syllabus-version-not-found", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put syllabus with bad version {slug}.v{id} """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__with_bad_syllabus_version__with_bad_version(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - cohort_kwargs = {'never_ends': True} - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_kwargs=syllabus_kwargs, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"never_ends": True} + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + cohort_kwargs=cohort_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': model['syllabus'].slug + '.v999', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": model["syllabus"].slug + ".v999", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "current_day": model["cohort"].current_day + 1, + "language": "es", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'detail': 'syllabus-version-not-found', - 'status_code': 400, + "detail": "syllabus-version-not-found", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put assigning the syllabus version 1 """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__assigning_syllabus_version_1(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - cohort_kwargs = {'ending_date': timezone.now()} - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy = {'timezone': 'Pacific/Pago_Pago'} - timeslot = {'timezone': 'Pacific/Pago_Pago'} - syllabus_version = {'version': 1} - model = self.generate_models(authenticate=True, - cohort=True, - academy=academy, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True, - syllabus_version=syllabus_version, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - syllabus_schedule_time_slot=True, - cohort_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"ending_date": timezone.now()} + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy = {"timezone": "Pacific/Pago_Pago"} + timeslot = {"timezone": "Pacific/Pago_Pago"} + syllabus_version = {"version": 1} + model = self.generate_models( + authenticate=True, + cohort=True, + academy=academy, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + syllabus_version=syllabus_version, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + syllabus_schedule_time_slot=True, + cohort_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model2 = self.generate_models(academy=model.academy, - skip_cohort=True, - syllabus_schedule=True, - syllabus=model.syllabus, - syllabus_schedule_time_slot=(2, timeslot)) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + model2 = self.generate_models( + academy=model.academy, + skip_cohort=True, + syllabus_schedule=True, + syllabus=model.syllabus, + syllabus_schedule_time_slot=(2, timeslot), + ) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 2, - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 2, + "current_day": model["cohort"].current_day + 1, + "language": "es", } response = self.client.put(url, data) json = response.json() - expected = {'detail': 'assigning-a-syllabus-version-1', 'status_code': 400} + expected = {"detail": "assigning-a-syllabus-version-1", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), [ - self.bc.format.to_dict(model.cohort_time_slot), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + self.bc.format.to_dict(model.cohort_time_slot), + ], + ) self.assertEqual(cohort_saved.send_robust.call_args_list, []) @@ -528,749 +546,760 @@ def test_cohort_id__put__with_id__assigning_syllabus_version_1(self): 🔽🔽🔽 Put with some data """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__with_data_in_body(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - cohort_kwargs = {'ending_date': timezone.now()} - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy = {'timezone': 'Pacific/Pago_Pago'} - timeslot = {'timezone': 'Europe/Amsterdam'} - model = self.generate_models(authenticate=True, - cohort=True, - academy=academy, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - syllabus_schedule_time_slot=True, - cohort_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"ending_date": timezone.now()} + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy = {"timezone": "Pacific/Pago_Pago"} + timeslot = {"timezone": "Europe/Amsterdam"} + model = self.generate_models( + authenticate=True, + cohort=True, + academy=academy, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + syllabus_schedule_time_slot=True, + cohort_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model2 = self.generate_models(academy=model.academy, - skip_cohort=True, - syllabus_schedule=True, - syllabus=model.syllabus, - syllabus_schedule_time_slot=(2, timeslot)) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + model2 = self.generate_models( + academy=model.academy, + skip_cohort=True, + syllabus_schedule=True, + syllabus=model.syllabus, + syllabus_schedule_time_slot=(2, timeslot), + ) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 2, - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 2, + "current_day": model["cohort"].current_day + 1, + "language": "es", } response = self.client.put(url, data) json = response.json() expected = { - 'id': - model['cohort'].id, - 'slug': - data['slug'], - 'name': - data['name'], - 'never_ends': - False, - 'remote_available': - True, - 'private': - False, - 'language': - data['language'], - 'kickoff_date': - self.datetime_to_iso(model['cohort'].kickoff_date) - if model['cohort'].kickoff_date else model['cohort'].kickoff_date, - 'ending_date': - self.datetime_to_iso(model['cohort'].ending_date), - 'current_day': - data['current_day'], - 'current_module': - None, - 'stage': - model['cohort'].stage, - 'online_meeting_url': - model['cohort'].online_meeting_url, - 'timezone': - model['cohort'].timezone, - 'is_hidden_on_prework': - model['cohort'].is_hidden_on_prework, - 'available_as_saas': - model['cohort'].available_as_saas, - 'timeslots': [{ - 'ending_at': - DatetimeInteger.to_iso_string(model.academy.timezone, syllabus_schedule_time_slot.ending_at), - 'id': - syllabus_schedule_time_slot.id, - 'recurrency_type': - syllabus_schedule_time_slot.recurrency_type, - 'recurrent': - syllabus_schedule_time_slot.recurrent, - 'starting_at': - DatetimeInteger.to_iso_string(model.academy.timezone, syllabus_schedule_time_slot.starting_at), - } for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot], - 'schedule': { - 'id': model2.syllabus_schedule.id, - 'name': model2.syllabus_schedule.name, - 'syllabus': model2.syllabus_schedule.syllabus.id, + "id": model["cohort"].id, + "slug": data["slug"], + "name": data["name"], + "never_ends": False, + "remote_available": True, + "private": False, + "language": data["language"], + "kickoff_date": ( + self.datetime_to_iso(model["cohort"].kickoff_date) + if model["cohort"].kickoff_date + else model["cohort"].kickoff_date + ), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "current_day": data["current_day"], + "current_module": None, + "stage": model["cohort"].stage, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "timeslots": [ + { + "ending_at": DatetimeInteger.to_iso_string( + model.academy.timezone, syllabus_schedule_time_slot.ending_at + ), + "id": syllabus_schedule_time_slot.id, + "recurrency_type": syllabus_schedule_time_slot.recurrency_type, + "recurrent": syllabus_schedule_time_slot.recurrent, + "starting_at": DatetimeInteger.to_iso_string( + model.academy.timezone, syllabus_schedule_time_slot.starting_at + ), + } + for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot + ], + "schedule": { + "id": model2.syllabus_schedule.id, + "name": model2.syllabus_schedule.name, + "syllabus": model2.syllabus_schedule.syllabus.id, }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'academy': { - 'id': model.academy.id, - 'slug': model.academy.slug, - 'name': model.academy.name, - 'country': { - 'code': model.academy.country.code, - 'name': model.academy.country.name, + "academy": { + "id": model.academy.id, + "slug": model.academy.slug, + "name": model.academy.name, + "country": { + "code": model.academy.country.code, + "name": model.academy.country.name, }, - 'city': { - 'name': model.academy.city.name, + "city": { + "name": model.academy.city.name, }, - 'logo_url': model.academy.logo_url, - 'is_hidden_on_prework': model.academy.is_hidden_on_prework - } + "logo_url": model.academy.logo_url, + "is_hidden_on_prework": model.academy.is_hidden_on_prework, + }, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - 'academy_id': 1, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'current_day': data['current_day'], - 'current_module': None, - 'ending_date': model['cohort'].ending_date, - 'id': model['cohort'].id, - 'kickoff_date': model['cohort'].kickoff_date, - 'remote_available': model['cohort'].remote_available, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'language': data['language'], - 'name': data['name'], - 'never_ends': False, - 'private': False, - 'history_log': None, - 'slug': data['slug'], - 'stage': model['cohort'].stage, - 'syllabus_version_id': model['cohort'].syllabus_version.id, - 'schedule_id': model2.syllabus_schedule.id, - 'timezone': None, - 'is_hidden_on_prework': True, - 'available_as_saas': model['cohort'].available_as_saas - }]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), - [{ - 'cohort_id': 1, - 'removed_at': syllabus_schedule_time_slot.removed_at, - 'ending_at': syllabus_schedule_time_slot.ending_at, - 'id': syllabus_schedule_time_slot.id, - 'timezone': model.academy.timezone, - 'recurrency_type': syllabus_schedule_time_slot.recurrency_type, - 'recurrent': syllabus_schedule_time_slot.recurrent, - 'starting_at': syllabus_schedule_time_slot.starting_at, - } for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot]) - - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=model.cohort, sender=model.cohort.__class__, created=False)]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + "academy_id": 1, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "current_day": data["current_day"], + "current_module": None, + "ending_date": model["cohort"].ending_date, + "id": model["cohort"].id, + "kickoff_date": model["cohort"].kickoff_date, + "remote_available": model["cohort"].remote_available, + "online_meeting_url": model["cohort"].online_meeting_url, + "language": data["language"], + "name": data["name"], + "never_ends": False, + "private": False, + "history_log": None, + "slug": data["slug"], + "stage": model["cohort"].stage, + "syllabus_version_id": model["cohort"].syllabus_version.id, + "schedule_id": model2.syllabus_schedule.id, + "timezone": None, + "is_hidden_on_prework": True, + "available_as_saas": model["cohort"].available_as_saas, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + { + "cohort_id": 1, + "removed_at": syllabus_schedule_time_slot.removed_at, + "ending_at": syllabus_schedule_time_slot.ending_at, + "id": syllabus_schedule_time_slot.id, + "timezone": model.academy.timezone, + "recurrency_type": syllabus_schedule_time_slot.recurrency_type, + "recurrent": syllabus_schedule_time_slot.recurrent, + "starting_at": syllabus_schedule_time_slot.starting_at, + } + for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot + ], + ) + + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [call(instance=model.cohort, sender=model.cohort.__class__, created=False)], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__with_data_in_body__cohort_with_timezone(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - cohort_kwargs = {'ending_date': timezone.now(), 'timezone': 'Europe/Monaco'} - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy = {'timezone': 'Pacific/Pago_Pago'} - timeslot = {'timezone': 'Europe/Amsterdam'} - model = self.generate_models(authenticate=True, - cohort=True, - academy=academy, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - syllabus_schedule_time_slot=True, - cohort_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"ending_date": timezone.now(), "timezone": "Europe/Monaco"} + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy = {"timezone": "Pacific/Pago_Pago"} + timeslot = {"timezone": "Europe/Amsterdam"} + model = self.generate_models( + authenticate=True, + cohort=True, + academy=academy, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + syllabus_schedule_time_slot=True, + cohort_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model2 = self.generate_models(academy=model.academy, - skip_cohort=True, - syllabus_schedule=True, - syllabus=model.syllabus, - syllabus_schedule_time_slot=(2, timeslot)) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + model2 = self.generate_models( + academy=model.academy, + skip_cohort=True, + syllabus_schedule=True, + syllabus=model.syllabus, + syllabus_schedule_time_slot=(2, timeslot), + ) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 2, - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 2, + "current_day": model["cohort"].current_day + 1, + "language": "es", } response = self.client.put(url, data) json = response.json() expected = { - 'id': - model['cohort'].id, - 'slug': - data['slug'], - 'name': - data['name'], - 'never_ends': - False, - 'remote_available': - True, - 'private': - False, - 'language': - data['language'], - 'kickoff_date': - self.datetime_to_iso(model['cohort'].kickoff_date) - if model['cohort'].kickoff_date else model['cohort'].kickoff_date, - 'ending_date': - self.datetime_to_iso(model['cohort'].ending_date), - 'current_day': - data['current_day'], - 'current_module': - None, - 'stage': - model['cohort'].stage, - 'online_meeting_url': - model['cohort'].online_meeting_url, - 'timezone': - model['cohort'].timezone, - 'is_hidden_on_prework': - model['cohort'].is_hidden_on_prework, - 'available_as_saas': - model['cohort'].available_as_saas, - 'timeslots': [{ - 'ending_at': - DatetimeInteger.to_iso_string(model.cohort.timezone, syllabus_schedule_time_slot.ending_at), - 'id': - syllabus_schedule_time_slot.id, - 'recurrency_type': - syllabus_schedule_time_slot.recurrency_type, - 'recurrent': - syllabus_schedule_time_slot.recurrent, - 'starting_at': - DatetimeInteger.to_iso_string(model.cohort.timezone, syllabus_schedule_time_slot.starting_at), - } for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot], - 'schedule': { - 'id': model2.syllabus_schedule.id, - 'name': model2.syllabus_schedule.name, - 'syllabus': model2.syllabus_schedule.syllabus.id, + "id": model["cohort"].id, + "slug": data["slug"], + "name": data["name"], + "never_ends": False, + "remote_available": True, + "private": False, + "language": data["language"], + "kickoff_date": ( + self.datetime_to_iso(model["cohort"].kickoff_date) + if model["cohort"].kickoff_date + else model["cohort"].kickoff_date + ), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "current_day": data["current_day"], + "current_module": None, + "stage": model["cohort"].stage, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "timeslots": [ + { + "ending_at": DatetimeInteger.to_iso_string( + model.cohort.timezone, syllabus_schedule_time_slot.ending_at + ), + "id": syllabus_schedule_time_slot.id, + "recurrency_type": syllabus_schedule_time_slot.recurrency_type, + "recurrent": syllabus_schedule_time_slot.recurrent, + "starting_at": DatetimeInteger.to_iso_string( + model.cohort.timezone, syllabus_schedule_time_slot.starting_at + ), + } + for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot + ], + "schedule": { + "id": model2.syllabus_schedule.id, + "name": model2.syllabus_schedule.name, + "syllabus": model2.syllabus_schedule.syllabus.id, }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'academy': { - 'id': model.academy.id, - 'slug': model.academy.slug, - 'name': model.academy.name, - 'country': { - 'code': model.academy.country.code, - 'name': model.academy.country.name, + "academy": { + "id": model.academy.id, + "slug": model.academy.slug, + "name": model.academy.name, + "country": { + "code": model.academy.country.code, + "name": model.academy.country.name, }, - 'city': { - 'name': model.academy.city.name, + "city": { + "name": model.academy.city.name, }, - 'logo_url': model.academy.logo_url, - 'is_hidden_on_prework': model.academy.is_hidden_on_prework - } + "logo_url": model.academy.logo_url, + "is_hidden_on_prework": model.academy.is_hidden_on_prework, + }, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - 'academy_id': 1, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'current_day': data['current_day'], - 'current_module': None, - 'ending_date': model['cohort'].ending_date, - 'id': model['cohort'].id, - 'kickoff_date': model['cohort'].kickoff_date, - 'remote_available': model['cohort'].remote_available, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'language': data['language'], - 'name': data['name'], - 'never_ends': False, - 'private': False, - 'history_log': None, - 'slug': data['slug'], - 'stage': model['cohort'].stage, - 'syllabus_version_id': model['cohort'].syllabus_version.id, - 'schedule_id': model2.syllabus_schedule.id, - 'timezone': 'Europe/Monaco', - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas - }]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), - [{ - 'cohort_id': 1, - 'removed_at': syllabus_schedule_time_slot.removed_at, - 'ending_at': syllabus_schedule_time_slot.ending_at, - 'id': syllabus_schedule_time_slot.id, - 'timezone': model.cohort.timezone, - 'recurrency_type': syllabus_schedule_time_slot.recurrency_type, - 'recurrent': syllabus_schedule_time_slot.recurrent, - 'starting_at': syllabus_schedule_time_slot.starting_at, - } for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot]) - - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=model.cohort, sender=model.cohort.__class__, created=False)]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + "academy_id": 1, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "current_day": data["current_day"], + "current_module": None, + "ending_date": model["cohort"].ending_date, + "id": model["cohort"].id, + "kickoff_date": model["cohort"].kickoff_date, + "remote_available": model["cohort"].remote_available, + "online_meeting_url": model["cohort"].online_meeting_url, + "language": data["language"], + "name": data["name"], + "never_ends": False, + "private": False, + "history_log": None, + "slug": data["slug"], + "stage": model["cohort"].stage, + "syllabus_version_id": model["cohort"].syllabus_version.id, + "schedule_id": model2.syllabus_schedule.id, + "timezone": "Europe/Monaco", + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + { + "cohort_id": 1, + "removed_at": syllabus_schedule_time_slot.removed_at, + "ending_at": syllabus_schedule_time_slot.ending_at, + "id": syllabus_schedule_time_slot.id, + "timezone": model.cohort.timezone, + "recurrency_type": syllabus_schedule_time_slot.recurrency_type, + "recurrent": syllabus_schedule_time_slot.recurrent, + "starting_at": syllabus_schedule_time_slot.starting_at, + } + for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot + ], + ) + + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [call(instance=model.cohort, sender=model.cohort.__class__, created=False)], + ) """ 🔽🔽🔽 Put with some data, of other academy, syllabus public """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__schedule_related_to_syllabus_of_other_academy_public(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - cohort_kwargs = {'ending_date': timezone.now()} - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy = {'timezone': 'Pacific/Pago_Pago'} - timeslot = {'timezone': 'Europe/Amsterdam'} - model = self.generate_models(authenticate=True, - cohort=True, - academy=academy, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - syllabus_schedule_time_slot=True, - cohort_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"ending_date": timezone.now()} + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy = {"timezone": "Pacific/Pago_Pago"} + timeslot = {"timezone": "Europe/Amsterdam"} + model = self.generate_models( + authenticate=True, + cohort=True, + academy=academy, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + syllabus_schedule_time_slot=True, + cohort_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - syllabus = {'private': False} - model2 = self.generate_models(academy=1, - skip_cohort=True, - syllabus=syllabus, - syllabus_schedule=True, - syllabus_schedule_time_slot=(2, timeslot)) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + syllabus = {"private": False} + model2 = self.generate_models( + academy=1, + skip_cohort=True, + syllabus=syllabus, + syllabus_schedule=True, + syllabus_schedule_time_slot=(2, timeslot), + ) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 2, - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 2, + "current_day": model["cohort"].current_day + 1, + "language": "es", } response = self.client.put(url, data) json = response.json() expected = { - 'id': - model['cohort'].id, - 'slug': - data['slug'], - 'name': - data['name'], - 'never_ends': - False, - 'remote_available': - True, - 'private': - False, - 'language': - data['language'], - 'kickoff_date': - self.datetime_to_iso(model['cohort'].kickoff_date) - if model['cohort'].kickoff_date else model['cohort'].kickoff_date, - 'ending_date': - self.datetime_to_iso(model['cohort'].ending_date), - 'current_day': - data['current_day'], - 'current_module': - None, - 'stage': - model['cohort'].stage, - 'online_meeting_url': - model['cohort'].online_meeting_url, - 'timezone': - model['cohort'].timezone, - 'is_hidden_on_prework': - model['cohort'].is_hidden_on_prework, - 'available_as_saas': - model['cohort'].available_as_saas, - 'timeslots': [{ - 'ending_at': - DatetimeInteger.to_iso_string(model.academy.timezone, syllabus_schedule_time_slot.ending_at), - 'id': - syllabus_schedule_time_slot.id, - 'recurrency_type': - syllabus_schedule_time_slot.recurrency_type, - 'recurrent': - syllabus_schedule_time_slot.recurrent, - 'starting_at': - DatetimeInteger.to_iso_string(model.academy.timezone, syllabus_schedule_time_slot.starting_at), - } for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot], - 'schedule': { - 'id': model2.syllabus_schedule.id, - 'name': model2.syllabus_schedule.name, - 'syllabus': model2.syllabus_schedule.syllabus.id, + "id": model["cohort"].id, + "slug": data["slug"], + "name": data["name"], + "never_ends": False, + "remote_available": True, + "private": False, + "language": data["language"], + "kickoff_date": ( + self.datetime_to_iso(model["cohort"].kickoff_date) + if model["cohort"].kickoff_date + else model["cohort"].kickoff_date + ), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "current_day": data["current_day"], + "current_module": None, + "stage": model["cohort"].stage, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "timeslots": [ + { + "ending_at": DatetimeInteger.to_iso_string( + model.academy.timezone, syllabus_schedule_time_slot.ending_at + ), + "id": syllabus_schedule_time_slot.id, + "recurrency_type": syllabus_schedule_time_slot.recurrency_type, + "recurrent": syllabus_schedule_time_slot.recurrent, + "starting_at": DatetimeInteger.to_iso_string( + model.academy.timezone, syllabus_schedule_time_slot.starting_at + ), + } + for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot + ], + "schedule": { + "id": model2.syllabus_schedule.id, + "name": model2.syllabus_schedule.name, + "syllabus": model2.syllabus_schedule.syllabus.id, }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'academy': { - 'id': model.academy.id, - 'slug': model.academy.slug, - 'name': model.academy.name, - 'country': { - 'code': model.academy.country.code, - 'name': model.academy.country.name, + "academy": { + "id": model.academy.id, + "slug": model.academy.slug, + "name": model.academy.name, + "country": { + "code": model.academy.country.code, + "name": model.academy.country.name, }, - 'city': { - 'name': model.academy.city.name, + "city": { + "name": model.academy.city.name, }, - 'logo_url': model.academy.logo_url, - 'is_hidden_on_prework': model.academy.is_hidden_on_prework - } + "logo_url": model.academy.logo_url, + "is_hidden_on_prework": model.academy.is_hidden_on_prework, + }, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - 'academy_id': 1, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'current_day': data['current_day'], - 'current_module': None, - 'ending_date': model['cohort'].ending_date, - 'id': model['cohort'].id, - 'kickoff_date': model['cohort'].kickoff_date, - 'remote_available': model['cohort'].remote_available, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'language': data['language'], - 'name': data['name'], - 'never_ends': False, - 'history_log': None, - 'private': False, - 'slug': data['slug'], - 'stage': model['cohort'].stage, - 'syllabus_version_id': model['cohort'].syllabus_version.id, - 'schedule_id': model2.syllabus_schedule.id, - 'timezone': None, - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas - }]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), - [{ - 'cohort_id': 1, - 'removed_at': syllabus_schedule_time_slot.removed_at, - 'ending_at': syllabus_schedule_time_slot.ending_at, - 'id': syllabus_schedule_time_slot.id, - 'timezone': model.academy.timezone, - 'recurrency_type': syllabus_schedule_time_slot.recurrency_type, - 'recurrent': syllabus_schedule_time_slot.recurrent, - 'starting_at': syllabus_schedule_time_slot.starting_at, - } for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot]) - - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=model.cohort, sender=model.cohort.__class__, created=False)]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + "academy_id": 1, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "current_day": data["current_day"], + "current_module": None, + "ending_date": model["cohort"].ending_date, + "id": model["cohort"].id, + "kickoff_date": model["cohort"].kickoff_date, + "remote_available": model["cohort"].remote_available, + "online_meeting_url": model["cohort"].online_meeting_url, + "language": data["language"], + "name": data["name"], + "never_ends": False, + "history_log": None, + "private": False, + "slug": data["slug"], + "stage": model["cohort"].stage, + "syllabus_version_id": model["cohort"].syllabus_version.id, + "schedule_id": model2.syllabus_schedule.id, + "timezone": None, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + { + "cohort_id": 1, + "removed_at": syllabus_schedule_time_slot.removed_at, + "ending_at": syllabus_schedule_time_slot.ending_at, + "id": syllabus_schedule_time_slot.id, + "timezone": model.academy.timezone, + "recurrency_type": syllabus_schedule_time_slot.recurrency_type, + "recurrent": syllabus_schedule_time_slot.recurrent, + "starting_at": syllabus_schedule_time_slot.starting_at, + } + for syllabus_schedule_time_slot in model2.syllabus_schedule_time_slot + ], + ) + + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [call(instance=model.cohort, sender=model.cohort.__class__, created=False)], + ) """ 🔽🔽🔽 Put with some data, of other academy, syllabus private """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__put__with_id__schedule_related_to_syllabus_of_other_academy_private(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - cohort_kwargs = {'ending_date': timezone.now()} - syllabus_kwargs = {'slug': 'they-killed-kenny'} - academy = {'timezone': 'Pacific/Pago_Pago'} - timeslot = {'timezone': 'Europe/Amsterdam'} - model = self.generate_models(authenticate=True, - cohort=True, - academy=academy, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - syllabus_schedule_time_slot=True, - cohort_time_slot=True, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"ending_date": timezone.now()} + syllabus_kwargs = {"slug": "they-killed-kenny"} + academy = {"timezone": "Pacific/Pago_Pago"} + timeslot = {"timezone": "Europe/Amsterdam"} + model = self.generate_models( + authenticate=True, + cohort=True, + academy=academy, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + syllabus_schedule_time_slot=True, + cohort_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - syllabus = {'private': True} - model2 = self.generate_models(academy=1, - skip_cohort=True, - syllabus=syllabus, - syllabus_schedule=True, - syllabus_schedule_time_slot=(2, timeslot)) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + syllabus = {"private": True} + model2 = self.generate_models( + academy=1, + skip_cohort=True, + syllabus=syllabus, + syllabus_schedule=True, + syllabus_schedule_time_slot=(2, timeslot), + ) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) data = { - 'syllabus': f'{model.syllabus.slug}.v{model.syllabus_version.version}', - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'schedule': 2, - 'current_day': model['cohort'].current_day + 1, - 'language': 'es', + "syllabus": f"{model.syllabus.slug}.v{model.syllabus_version.version}", + "slug": "they-killed-kenny", + "name": "They killed kenny", + "schedule": 2, + "current_day": model["cohort"].current_day + 1, + "language": "es", } response = self.client.put(url, data) json = response.json() expected = { - 'id': - model['cohort'].id, - 'slug': - data['slug'], - 'name': - data['name'], - 'never_ends': - False, - 'remote_available': - True, - 'private': - False, - 'language': - data['language'], - 'kickoff_date': - self.datetime_to_iso(model['cohort'].kickoff_date) - if model['cohort'].kickoff_date else model['cohort'].kickoff_date, - 'ending_date': - self.datetime_to_iso(model['cohort'].ending_date), - 'current_day': - data['current_day'], - 'current_module': - None, - 'stage': - model['cohort'].stage, - 'online_meeting_url': - model['cohort'].online_meeting_url, - 'timezone': - model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': - model['cohort'].is_hidden_on_prework, - 'available_as_saas': - model['cohort'].available_as_saas, - 'schedule': { - 'id': model2.syllabus_schedule.id, - 'name': model2.syllabus_schedule.name, - 'syllabus': model2.syllabus_schedule.syllabus.id, + "id": model["cohort"].id, + "slug": data["slug"], + "name": data["name"], + "never_ends": False, + "remote_available": True, + "private": False, + "language": data["language"], + "kickoff_date": ( + self.datetime_to_iso(model["cohort"].kickoff_date) + if model["cohort"].kickoff_date + else model["cohort"].kickoff_date + ), + "ending_date": self.datetime_to_iso(model["cohort"].ending_date), + "current_day": data["current_day"], + "current_module": None, + "stage": model["cohort"].stage, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model2.syllabus_schedule.id, + "name": model2.syllabus_schedule.name, + "syllabus": model2.syllabus_schedule.syllabus.id, }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'academy': { - 'id': model.academy.id, - 'slug': model.academy.slug, - 'name': model.academy.name, - 'country': { - 'code': model.academy.country.code, - 'name': model.academy.country.name, + "academy": { + "id": model.academy.id, + "slug": model.academy.slug, + "name": model.academy.name, + "country": { + "code": model.academy.country.code, + "name": model.academy.country.name, }, - 'city': { - 'name': model.academy.city.name, + "city": { + "name": model.academy.city.name, }, - 'logo_url': model.academy.logo_url, - 'is_hidden_on_prework': model.academy.is_hidden_on_prework - } + "logo_url": model.academy.logo_url, + "is_hidden_on_prework": model.academy.is_hidden_on_prework, + }, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), - [{ - 'academy_id': 1, - 'intro_video': None, - 'accepts_enrollment_suggestions': True, - 'current_day': data['current_day'], - 'current_module': None, - 'ending_date': model['cohort'].ending_date, - 'id': model['cohort'].id, - 'kickoff_date': model['cohort'].kickoff_date, - 'remote_available': model['cohort'].remote_available, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'language': data['language'], - 'name': data['name'], - 'never_ends': False, - 'private': False, - 'history_log': None, - 'slug': data['slug'], - 'stage': model['cohort'].stage, - 'syllabus_version_id': model['cohort'].syllabus_version.id, - 'schedule_id': model2.syllabus_schedule.id, - 'timezone': None, - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas - }]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=model.cohort, sender=model.cohort.__class__, created=False)]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + { + "academy_id": 1, + "intro_video": None, + "accepts_enrollment_suggestions": True, + "current_day": data["current_day"], + "current_module": None, + "ending_date": model["cohort"].ending_date, + "id": model["cohort"].id, + "kickoff_date": model["cohort"].kickoff_date, + "remote_available": model["cohort"].remote_available, + "online_meeting_url": model["cohort"].online_meeting_url, + "language": data["language"], + "name": data["name"], + "never_ends": False, + "private": False, + "history_log": None, + "slug": data["slug"], + "stage": model["cohort"].stage, + "syllabus_version_id": model["cohort"].syllabus_version.id, + "schedule_id": model2.syllabus_schedule.id, + "timezone": None, + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [call(instance=model.cohort, sender=model.cohort.__class__, created=False)], + ) """ 🔽🔽🔽 Get data """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__get__with_id(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model_dict = self.remove_dinamics_fields(model['cohort'].__dict__) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + model_dict = self.remove_dinamics_fields(model["cohort"].__dict__) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) response = self.client.get(url) json = response.json() expected = { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': model['cohort'].current_module, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": model["cohort"].current_module, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "city": { + "name": model["cohort"].academy.city.name, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, }, } @@ -1284,25 +1313,27 @@ def test_cohort_id__get__with_id(self): 🔽🔽🔽 Get with bad slug """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__get__with_bad_slug(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True) + self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 'they-killed-kenny'}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": "they-killed-kenny"}) response = self.client.get(url) self.assertEqual(response.data, None) @@ -1313,80 +1344,82 @@ def test_cohort_id__get__with_bad_slug(self): 🔽🔽🔽 Get with slug """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__get__with_slug(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model_dict = self.remove_dinamics_fields(model['cohort'].__dict__) - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].slug}) + model_dict = self.remove_dinamics_fields(model["cohort"].__dict__) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].slug}) response = self.client.get(url) json = response.json() expected = { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': model['cohort'].ending_date, - 'language': model['cohort'].language, - 'stage': model['cohort'].stage, - 'current_day': model['cohort'].current_day, - 'current_module': model['cohort'].current_module, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": model["cohort"].ending_date, + "language": model["cohort"].language, + "stage": model["cohort"].stage, + "current_day": model["cohort"].current_day, + "current_module": model["cohort"].current_module, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': model['cohort'].academy.country, - 'city': model['cohort'].academy.city, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": model["cohort"].academy.country, + "city": model["cohort"].academy.city, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "city": { + "name": model["cohort"].academy.city.name, }, }, } @@ -1401,154 +1434,170 @@ def test_cohort_id__get__with_slug(self): 🔽🔽🔽 Delete with bad id """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__delete__with_bad_id(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus=True, - cohort_user=True) + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus=True, + cohort_user=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 0}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 0}) self.assertEqual(self.count_cohort_user(), 1) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.count_cohort_user(), 1) - self.assertEqual(self.count_cohort_stage(model['cohort'].id), 'INACTIVE') + self.assertEqual(self.count_cohort_stage(model["cohort"].id), "INACTIVE") self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Delete with id """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__delete__with_id(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - syllabus=True) + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + syllabus=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) - self.assertEqual(self.count_cohort_stage(model['cohort'].id), 'INACTIVE') + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) + self.assertEqual(self.count_cohort_stage(model["cohort"].id), "INACTIVE") response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.count_cohort_user(), 0) - self.assertEqual(self.count_cohort_stage(model['cohort'].id), 'DELETED') - self.assertEqual(cohort_saved.send_robust.call_args_list, - [call(instance=model.cohort, sender=model.cohort.__class__, created=False)]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.count_cohort_stage(model["cohort"].id), "DELETED") + self.assertEqual( + cohort_saved.send_robust.call_args_list, + [call(instance=model.cohort, sender=model.cohort.__class__, created=False)], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id__delete__cohort_with_students(self): from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - cohort_user=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_cohort", role="potato", cohort_user=True + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": 1}) response = self.client.delete(url) json = response.json() expected = { - 'detail': 'cohort-has-students', - 'status_code': 400, + "detail": "cohort-has-students", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Spy the extensions """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__get__spy_extensions(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id__get__spy_extension_arguments(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_version=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_all_cohort", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_version=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_id', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:academy_cohort_id", kwargs={"cohort_id": model["cohort"].id}) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=CohortCache, sort='-kickoff_date', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=CohortCache, sort="-kickoff_date", paginate=True), + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_cohort_id_timeslot.py b/breathecode/admissions/tests/urls/tests_academy_cohort_id_timeslot.py index d28cf57c9..4cae896de 100644 --- a/breathecode/admissions/tests/urls/tests_academy_cohort_id_timeslot.py +++ b/breathecode/admissions/tests/urls/tests_academy_cohort_id_timeslot.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + import random from unittest.mock import MagicMock, patch from django.urls.base import reverse_lazy @@ -11,62 +12,68 @@ def get_serializer(self, cohort_time_slot): return { - 'id': cohort_time_slot.id, - 'cohort': cohort_time_slot.cohort.id, - 'starting_at': self.integer_to_iso(cohort_time_slot.timezone, cohort_time_slot.starting_at), - 'ending_at': self.integer_to_iso(cohort_time_slot.timezone, cohort_time_slot.ending_at), - 'recurrent': cohort_time_slot.recurrent, - 'recurrency_type': cohort_time_slot.recurrency_type, - 'created_at': self.datetime_to_iso(cohort_time_slot.created_at), - 'updated_at': self.datetime_to_iso(cohort_time_slot.updated_at), + "id": cohort_time_slot.id, + "cohort": cohort_time_slot.cohort.id, + "starting_at": self.integer_to_iso(cohort_time_slot.timezone, cohort_time_slot.starting_at), + "ending_at": self.integer_to_iso(cohort_time_slot.timezone, cohort_time_slot.ending_at), + "recurrent": cohort_time_slot.recurrent, + "recurrency_type": cohort_time_slot.recurrency_type, + "created_at": self.datetime_to_iso(cohort_time_slot.created_at), + "updated_at": self.datetime_to_iso(cohort_time_slot.updated_at), } class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" + """ 🔽🔽🔽 Auth """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__without_auth(self): - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__without_academy_header(self): model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, - }) + json, + { + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__without_capabilities(self): self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_all_cohort for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_all_cohort for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -74,14 +81,13 @@ def test__without_capabilities(self): 🔽🔽🔽 Without data """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__without_data(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() @@ -93,62 +99,69 @@ def test__without_data(self): 🔽🔽🔽 With data """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__with_data(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - cohort_time_slot=True) - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_all_cohort", role="potato", cohort_time_slot=True + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() expected = [get_serializer(self, model.cohort_time_slot)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - **self.model_to_dict(model, 'cohort_time_slot'), - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + **self.model_to_dict(model, "cohort_time_slot"), + } + ], + ) """ 🔽🔽🔽 recurrency_type in querystring """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__recurrency_type_in_querystring__not_found(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - cohort_time_slot=True) - - url = (reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + - f'?recurrency_type=asdasdasd') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_all_cohort", role="potato", cohort_time_slot=True + ) + + url = ( + reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) + + f"?recurrency_type=asdasdasd" + ) response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - **self.model_to_dict(model, 'cohort_time_slot'), - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + **self.model_to_dict(model, "cohort_time_slot"), + } + ], + ) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__recurrency_type_in_querystring__found(self): - statuses = ['DAILY', 'WEEKLY', 'MONTHLY'] - cases = [(x, x, random.choice([y for y in statuses if x != y])) - for x in statuses] + [(x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses] + statuses = ["DAILY", "WEEKLY", "MONTHLY"] + cases = [(x, x, random.choice([y for y in statuses if x != y])) for x in statuses] + [ + (x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses + ] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - cohort_time_slot=3) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_all_cohort", role="potato", cohort_time_slot=3 + ) for current, query, bad_status in cases: model.cohort_time_slot[0].recurrency_type = current @@ -160,8 +173,10 @@ def test__recurrency_type_in_querystring__found(self): model.cohort_time_slot[2].recurrency_type = bad_status model.cohort_time_slot[2].save() - url = (reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + - f'?recurrency_type={query}') + url = ( + reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) + + f"?recurrency_type={query}" + ) response = self.client.get(url) json = response.json() @@ -172,34 +187,37 @@ def test__recurrency_type_in_querystring__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_cohort_time_slot_dict(), [ - { - **self.bc.format.to_dict(model.cohort_time_slot[0]), - 'recurrency_type': current, - }, - { - **self.bc.format.to_dict(model.cohort_time_slot[1]), - 'recurrency_type': current, - }, - { - **self.bc.format.to_dict(model.cohort_time_slot[2]), - 'recurrency_type': bad_status, - }, - ]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + **self.bc.format.to_dict(model.cohort_time_slot[0]), + "recurrency_type": current, + }, + { + **self.bc.format.to_dict(model.cohort_time_slot[1]), + "recurrency_type": current, + }, + { + **self.bc.format.to_dict(model.cohort_time_slot[2]), + "recurrency_type": bad_status, + }, + ], + ) """ 🔽🔽🔽 Without timezone """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__post__without_timezone(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'academy-without-timezone', 'status_code': 400} + expected = {"detail": "academy-without-timezone", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -209,112 +227,126 @@ def test__post__without_timezone(self): 🔽🔽🔽 Post """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__post__without_ending_at_and_starting_at(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'ending_at': ['This field is required.'], - 'starting_at': ['This field is required.'], + "ending_at": ["This field is required."], + "starting_at": ["This field is required."], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__post__passing_all_status__in_lowercase(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) starting_at = self.datetime_now() ending_at = self.datetime_now() - recurrency_type = random.choice(['DAILY', 'WEEKLY', 'MONTHLY']) + recurrency_type = random.choice(["DAILY", "WEEKLY", "MONTHLY"]) data = { - 'ending_at': self.datetime_to_iso(ending_at), - 'starting_at': self.datetime_to_iso(starting_at), - 'recurrency_type': recurrency_type.lower(), + "ending_at": self.datetime_to_iso(ending_at), + "starting_at": self.datetime_to_iso(starting_at), + "recurrency_type": recurrency_type.lower(), } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'cohort': 1, - 'id': 1, - 'recurrent': True, - 'timezone': 'America/Caracas', - 'recurrency_type': recurrency_type, + "cohort": 1, + "id": 1, + "recurrent": True, + "timezone": "America/Caracas", + "recurrency_type": recurrency_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_cohort_time_slot_dict(), - [{ - 'cohort_id': 1, - 'removed_at': None, - 'ending_at': DatetimeInteger.from_datetime(model.academy.timezone, ending_at), - 'id': 1, - 'recurrent': True, - 'starting_at': DatetimeInteger.from_datetime(model.academy.timezone, starting_at), - 'timezone': 'America/Caracas', - 'recurrency_type': recurrency_type, - }]) - - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "cohort_id": 1, + "removed_at": None, + "ending_at": DatetimeInteger.from_datetime(model.academy.timezone, ending_at), + "id": 1, + "recurrent": True, + "starting_at": DatetimeInteger.from_datetime(model.academy.timezone, starting_at), + "timezone": "America/Caracas", + "recurrency_type": recurrency_type, + } + ], + ) + + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) def test__post__passing_all_status__in_uppercase(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_cohort_id_timeslot', kwargs={'cohort_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot", kwargs={"cohort_id": 1}) starting_at = self.datetime_now() ending_at = self.datetime_now() - recurrency_type = random.choice(['DAILY', 'WEEKLY', 'MONTHLY']) + recurrency_type = random.choice(["DAILY", "WEEKLY", "MONTHLY"]) data = { - 'ending_at': self.datetime_to_iso(ending_at), - 'starting_at': self.datetime_to_iso(starting_at), - 'recurrency_type': recurrency_type, + "ending_at": self.datetime_to_iso(ending_at), + "starting_at": self.datetime_to_iso(starting_at), + "recurrency_type": recurrency_type, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'cohort': 1, - 'id': 1, - 'recurrent': True, - 'timezone': 'America/Caracas', - 'recurrency_type': recurrency_type, + "cohort": 1, + "id": 1, + "recurrent": True, + "timezone": "America/Caracas", + "recurrency_type": recurrency_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_cohort_time_slot_dict(), - [{ - 'cohort_id': 1, - 'removed_at': None, - 'ending_at': DatetimeInteger.from_datetime(model.academy.timezone, ending_at), - 'id': 1, - 'recurrent': True, - 'starting_at': DatetimeInteger.from_datetime(model.academy.timezone, starting_at), - 'timezone': 'America/Caracas', - 'recurrency_type': recurrency_type, - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "cohort_id": 1, + "removed_at": None, + "ending_at": DatetimeInteger.from_datetime(model.academy.timezone, ending_at), + "id": 1, + "recurrent": True, + "starting_at": DatetimeInteger.from_datetime(model.academy.timezone, starting_at), + "timezone": "America/Caracas", + "recurrency_type": recurrency_type, + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_cohort_id_timeslot_id.py b/breathecode/admissions/tests/urls/tests_academy_cohort_id_timeslot_id.py index bd5742bd6..a72c1f7f7 100644 --- a/breathecode/admissions/tests/urls/tests_academy_cohort_id_timeslot_id.py +++ b/breathecode/admissions/tests/urls/tests_academy_cohort_id_timeslot_id.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + import random from django.urls.base import reverse_lazy from rest_framework import status @@ -9,46 +10,52 @@ class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" + """ 🔽🔽🔽 Auth """ def test_cohort_time_slot__without_auth(self): - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_cohort_time_slot__without_academy_header(self): model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, - }) + json, + { + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_cohort_time_slot__without_capabilities(self): self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_all_cohort for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_all_cohort for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -58,16 +65,15 @@ def test_cohort_time_slot__without_capabilities(self): def test_cohort_time_slot__without_data(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'time-slot-not-found', - 'status_code': 404, + "detail": "time-slot-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -80,30 +86,33 @@ def test_cohort_time_slot__without_data(self): def test_cohort_time_slot__with_data(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato', - cohort_time_slot=True) - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_all_cohort", role="potato", cohort_time_slot=True + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'id': model.cohort_time_slot.id, - 'cohort': model.cohort_time_slot.cohort.id, - 'starting_at': self.integer_to_iso(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), - 'ending_at': self.integer_to_iso(model.cohort_time_slot.timezone, model.cohort_time_slot.ending_at), - 'recurrent': model.cohort_time_slot.recurrent, - 'recurrency_type': model.cohort_time_slot.recurrency_type, - 'created_at': self.datetime_to_iso(model.cohort_time_slot.created_at), - 'updated_at': self.datetime_to_iso(model.cohort_time_slot.updated_at), + "id": model.cohort_time_slot.id, + "cohort": model.cohort_time_slot.cohort.id, + "starting_at": self.integer_to_iso(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + "ending_at": self.integer_to_iso(model.cohort_time_slot.timezone, model.cohort_time_slot.ending_at), + "recurrent": model.cohort_time_slot.recurrent, + "recurrency_type": model.cohort_time_slot.recurrency_type, + "created_at": self.datetime_to_iso(model.cohort_time_slot.created_at), + "updated_at": self.datetime_to_iso(model.cohort_time_slot.updated_at), } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - **self.model_to_dict(model, 'cohort_time_slot'), - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + **self.model_to_dict(model, "cohort_time_slot"), + } + ], + ) """ 🔽🔽🔽 Put @@ -111,14 +120,14 @@ def test_cohort_time_slot__with_data(self): def test_cohort_time_slot__put__without_time_slot(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'detail': 'time-slot-not-found', - 'status_code': 404, + "detail": "time-slot-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -127,134 +136,150 @@ def test_cohort_time_slot__put__without_time_slot(self): def test_cohort_time_slot__put__without_timezone(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - cohort_time_slot=True) - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_cohort", role="potato", cohort_time_slot=True + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'academy-without-timezone', 'status_code': 400} + expected = {"detail": "academy-without-timezone", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - **self.model_to_dict(model, 'cohort_time_slot'), - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + **self.model_to_dict(model, "cohort_time_slot"), + } + ], + ) def test_cohort_time_slot__put__without_ending_at_and_starting_at(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - cohort_time_slot=True, - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + cohort_time_slot=True, + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'ending_at': ['This field is required.'], - 'starting_at': ['This field is required.'], + "ending_at": ["This field is required."], + "starting_at": ["This field is required."], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - **self.model_to_dict(model, 'cohort_time_slot'), - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + **self.model_to_dict(model, "cohort_time_slot"), + } + ], + ) def test_cohort_time_slot__put__in_lowercase(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - cohort_time_slot=True, - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + cohort_time_slot=True, + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) starting_at = self.datetime_now() ending_at = self.datetime_now() - recurrency_type = random.choice(['DAILY', 'WEEKLY', 'MONTHLY']) + recurrency_type = random.choice(["DAILY", "WEEKLY", "MONTHLY"]) data = { - 'ending_at': self.datetime_to_iso(ending_at), - 'starting_at': self.datetime_to_iso(starting_at), - 'recurrency_type': recurrency_type.lower(), + "ending_at": self.datetime_to_iso(ending_at), + "starting_at": self.datetime_to_iso(starting_at), + "recurrency_type": recurrency_type.lower(), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'cohort': 1, - 'id': 1, - 'recurrent': True, - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, + "cohort": 1, + "id": 1, + "recurrent": True, + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - **self.model_to_dict(model, 'cohort_time_slot'), - 'ending_at': - self.datetime_to_integer(model.academy.timezone, ending_at), - 'starting_at': - self.datetime_to_integer(model.academy.timezone, starting_at), - 'timezone': - model.academy.timezone, - 'recurrency_type': - recurrency_type, - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + **self.model_to_dict(model, "cohort_time_slot"), + "ending_at": self.datetime_to_integer(model.academy.timezone, ending_at), + "starting_at": self.datetime_to_integer(model.academy.timezone, starting_at), + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, + } + ], + ) def test_cohort_time_slot__put__in_uppercase(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - cohort_time_slot=True, - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + cohort_time_slot=True, + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) starting_at = self.datetime_now() ending_at = self.datetime_now() - recurrency_type = random.choice(['DAILY', 'WEEKLY', 'MONTHLY']) + recurrency_type = random.choice(["DAILY", "WEEKLY", "MONTHLY"]) data = { - 'ending_at': self.datetime_to_iso(ending_at), - 'starting_at': self.datetime_to_iso(starting_at), - 'recurrency_type': recurrency_type, + "ending_at": self.datetime_to_iso(ending_at), + "starting_at": self.datetime_to_iso(starting_at), + "recurrency_type": recurrency_type, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'cohort': 1, - 'id': 1, - 'recurrent': True, - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, + "cohort": 1, + "id": 1, + "recurrent": True, + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - **self.model_to_dict(model, 'cohort_time_slot'), - 'ending_at': - self.datetime_to_integer(model.academy.timezone, ending_at), - 'starting_at': - self.datetime_to_integer(model.academy.timezone, starting_at), - 'timezone': - model.academy.timezone, - 'recurrency_type': - recurrency_type, - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + **self.model_to_dict(model, "cohort_time_slot"), + "ending_at": self.datetime_to_integer(model.academy.timezone, ending_at), + "starting_at": self.datetime_to_integer(model.academy.timezone, starting_at), + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, + } + ], + ) """ 🔽🔽🔽 Delete @@ -262,13 +287,13 @@ def test_cohort_time_slot__put__in_uppercase(self): def test_cohort_time_slot__delete__without_time_slot(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) response = self.client.delete(url) json = response.json() expected = { - 'detail': 'time-slot-not-found', - 'status_code': 404, + "detail": "time-slot-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -277,12 +302,10 @@ def test_cohort_time_slot__delete__without_time_slot(self): def test_cohort_time_slot__delete(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_cohort', - role='potato', - cohort_time_slot=True) - url = reverse_lazy('admissions:academy_cohort_id_timeslot_id', kwargs={'cohort_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_cohort", role="potato", cohort_time_slot=True + ) + url = reverse_lazy("admissions:academy_cohort_id_timeslot_id", kwargs={"cohort_id": 1, "timeslot_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) diff --git a/breathecode/admissions/tests/urls/tests_academy_cohort_id_user_id.py b/breathecode/admissions/tests/urls/tests_academy_cohort_id_user_id.py index 65ef47262..de5480446 100644 --- a/breathecode/admissions/tests/urls/tests_academy_cohort_id_user_id.py +++ b/breathecode/admissions/tests/urls/tests_academy_cohort_id_user_id.py @@ -1,6 +1,7 @@ """ Test /cohort/:id/user/:id """ + import re from unittest.mock import MagicMock, patch @@ -15,85 +16,95 @@ def post_serializer(self, cohort, user, profile_academy=None, data={}): return { - 'cohort': { - 'ending_date': cohort.ending_date, - 'id': cohort.id, - 'kickoff_date': - self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date, - 'name': cohort.name, - 'slug': cohort.slug, - 'stage': cohort.stage, - 'available_as_saas': cohort.available_as_saas, + "cohort": { + "ending_date": cohort.ending_date, + "id": cohort.id, + "kickoff_date": ( + self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date + ), + "name": cohort.name, + "slug": cohort.slug, + "stage": cohort.stage, + "available_as_saas": cohort.available_as_saas, }, - 'created_at': self.bc.datetime.to_iso_string(UTC_NOW), - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'profile_academy': { - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - } if profile_academy else None, - 'role': 'STUDENT', - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'last_login': user.last_login, + "created_at": self.bc.datetime.to_iso_string(UTC_NOW), + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "profile_academy": ( + { + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + } + if profile_academy + else None + ), + "role": "STUDENT", + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "last_login": user.last_login, }, - 'watching': False, + "watching": False, **data, } def cohort_user_field(data={}): return { - 'cohort_id': 0, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 0, - 'role': 'STUDENT', - 'user_id': 0, - 'watching': False, + "cohort_id": 0, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 0, + "role": "STUDENT", + "user_id": 0, + "watching": False, **data, } def put_serializer(self, cohort_user, cohort, user, profile_academy=None, data={}): return { - 'cohort': { - 'ending_date': cohort.ending_date, - 'id': cohort.id, - 'kickoff_date': - self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date, - 'name': cohort.name, - 'slug': cohort.slug, - 'stage': cohort.stage, - 'available_as_saas': cohort.available_as_saas, + "cohort": { + "ending_date": cohort.ending_date, + "id": cohort.id, + "kickoff_date": ( + self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date + ), + "name": cohort.name, + "slug": cohort.slug, + "stage": cohort.stage, + "available_as_saas": cohort.available_as_saas, }, - 'created_at': self.bc.datetime.to_iso_string(cohort_user.created_at), - 'educational_status': cohort_user.educational_status, - 'finantial_status': cohort_user.finantial_status, - 'id': cohort_user.id, - 'profile_academy': { - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - } if profile_academy else None, - 'role': cohort_user.role, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'last_login': user.last_login, + "created_at": self.bc.datetime.to_iso_string(cohort_user.created_at), + "educational_status": cohort_user.educational_status, + "finantial_status": cohort_user.finantial_status, + "id": cohort_user.id, + "profile_academy": ( + { + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + } + if profile_academy + else None + ), + "role": cohort_user.role, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "last_login": user.last_login, }, - 'watching': cohort_user.watching, + "watching": cohort_user.watching, **data, } @@ -103,37 +114,39 @@ def check_cohort_user_that_not_have_role_student_can_be_teacher(self, role, upda self.headers(academy=1) model_kwargs = { - 'authenticate': True, - 'cohort': True, - 'user': True, - 'profile_academy': True, - 'role': role, - 'capability': 'crud_cohort', + "authenticate": True, + "cohort": True, + "user": True, + "profile_academy": True, + "role": role, + "capability": "crud_cohort", } if update: - model_kwargs['cohort_user'] = True + model_kwargs["cohort_user"] = True model = self.generate_models(**model_kwargs) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': 1}) - data = {'user': model['user'].id, 'role': 'TEACHER'} + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": 1}) + data = {"user": model["user"].id, "role": "TEACHER"} request_func = self.client.put if update else self.client.post - response = request_func(url, data, format='json') + response = request_func(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.cohort, - model.user, - model.profile_academy, - data={ - 'role': 'TEACHER', - **additional_data, - }) - - expected['educational_status'] = 'ACTIVE' - expected['finantial_status'] = None + expected = post_serializer( + self, + model.cohort, + model.user, + model.profile_academy, + data={ + "role": "TEACHER", + **additional_data, + }, + ) + + expected["educational_status"] = "ACTIVE" + expected["finantial_status"] = None self.assertEqual(json, expected) @@ -143,406 +156,421 @@ def check_cohort_user_that_not_have_role_student_can_be_teacher(self, role, upda self.assertEqual(response.status_code, status.HTTP_201_CREATED) if update: - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), - [{ - **self.model_to_dict(model, 'cohort_user'), - 'role': 'TEACHER', - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.model_to_dict(model, "cohort_user"), + "role": "TEACHER", + } + ], + ) else: - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'TEACHER', - 'user_id': 1, - 'watching': False, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "TEACHER", + "user_id": 1, + "watching": False, + } + ], + ) class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" + """ 🔽🔽🔽 Auth """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_user__without_auth(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1}) + url = reverse_lazy("admissions:academy_cohort_id_user_id", kwargs={"cohort_id": 1, "user_id": 1}) response = self.client.post(url, {}) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) """ 🔽🔽🔽 Post method """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_user__post(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort={'stage': 'STARTED'}, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato') - - url = reverse_lazy('admissions:academy_cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1}) + model = self.generate_models( + authenticate=True, + cohort={"stage": "STARTED"}, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + ) + + url = reverse_lazy("admissions:academy_cohort_id_user_id", kwargs={"cohort_id": 1, "user_id": 1}) data = { - 'user': model['user'].id, - 'cohort': model['cohort'].id, + "user": model["user"].id, + "cohort": model["cohort"].id, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.cohort, - model.user, - model.profile_academy, - data={ - 'id': 1, - 'role': 'STUDENT', - }) + expected = post_serializer( + self, + model.cohort, + model.user, + model.profile_academy, + data={ + "id": 1, + "role": "STUDENT", + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'STUDENT', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "STUDENT", + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) """ 🔽🔽🔽 Add the same teacher to two cohors """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_user__post__same_teacher_in_two_cohorts(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) models = [ - self.generate_models(authenticate=True, - user=True, - cohort={'stage': 'STARTED'}, - profile_academy=True, - capability='crud_cohort', - role='staff') + self.generate_models( + authenticate=True, + user=True, + cohort={"stage": "STARTED"}, + profile_academy=True, + capability="crud_cohort", + role="staff", + ) ] base = models[0].copy() - del base['cohort'] + del base["cohort"] - models = models + [self.generate_models(cohort={'stage': 'STARTED'}, models=base)] - url = reverse_lazy('admissions:academy_cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1}) + models = models + [self.generate_models(cohort={"stage": "STARTED"}, models=base)] + url = reverse_lazy("admissions:academy_cohort_id_user_id", kwargs={"cohort_id": 1, "user_id": 1}) data = { - 'user': 1, - 'cohort': 1, - 'role': 'TEACHER', + "user": 1, + "cohort": 1, + "role": "TEACHER", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") data = { - 'user': 1, - 'cohort': 2, - 'role': 'TEACHER', + "user": 1, + "cohort": 2, + "role": "TEACHER", } - url = reverse_lazy('admissions:academy_cohort_id_user_id', kwargs={'cohort_id': 2, 'user_id': 1}) - response = self.client.post(url, data, format='json') + url = reverse_lazy("admissions:academy_cohort_id_user_id", kwargs={"cohort_id": 2, "user_id": 1}) + response = self.client.post(url, data, format="json") json = response.json() model = models[1] - expected = post_serializer(self, - model.cohort, - model.user, - model.profile_academy, - data={ - 'id': 2, - 'role': 'TEACHER', - }) + expected = post_serializer( + self, + model.cohort, + model.user, + model.profile_academy, + data={ + "id": 2, + "role": "TEACHER", + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'TEACHER', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }, { - 'cohort_id': 2, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 2, - 'role': 'TEACHER', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "TEACHER", + "user_id": 1, + "watching": False, + "history_log": {}, + }, + { + "cohort_id": 2, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 2, + "role": "TEACHER", + "user_id": 1, + "watching": False, + "history_log": {}, + }, + ], + ) """ 🔽🔽🔽 Put """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_user__put(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1}) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='crud_cohort', - role='potato') + url = reverse_lazy("admissions:academy_cohort_id_user_id", kwargs={"cohort_id": 1, "user_id": 1}) + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="crud_cohort", role="potato" + ) data = { - 'id': model['cohort_user'].id, - 'user': 1, - 'cohort': 1, + "id": model["cohort_user"].id, + "user": 1, + "cohort": 1, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = put_serializer(self, - model.cohort_user, - model.cohort, - model.user, - model.profile_academy, - data={'role': 'STUDENT'}) + expected = put_serializer( + self, model.cohort_user, model.cohort, model.user, model.profile_academy, data={"role": "STUDENT"} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), - [{ - **self.model_to_dict(model, 'cohort_user'), - 'role': 'STUDENT', - 'watching': False, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.model_to_dict(model, "cohort_user"), + "role": "STUDENT", + "watching": False, + } + ], + ) """ 🔽🔽🔽 Put teacher """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_user__put__teacher_with_role_student(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1}) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='crud_cohort', - role='student') + url = reverse_lazy("admissions:academy_cohort_id_user_id", kwargs={"cohort_id": 1, "user_id": 1}) + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="crud_cohort", role="student" + ) data = { - 'id': model['cohort_user'].id, - 'role': 'TEACHER', - 'user': 1, - 'cohort': 1, + "id": model["cohort_user"].id, + "role": "TEACHER", + "user": 1, + "cohort": 1, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'detail': 'The user must be staff member to this academy before it can be a teacher', - 'status_code': 400, + "detail": "The user must be staff member to this academy before it can be a teacher", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), - [{ - **self.model_to_dict(model, 'cohort_user'), - 'role': 'STUDENT', - }]) - - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.model_to_dict(model, "cohort_user"), + "role": "STUDENT", + } + ], + ) + + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_user__put__teacher(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1}) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='crud_cohort', - role='staff') + url = reverse_lazy("admissions:academy_cohort_id_user_id", kwargs={"cohort_id": 1, "user_id": 1}) + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="crud_cohort", role="staff" + ) data = { - 'id': model['cohort_user'].id, - 'role': 'TEACHER', - 'user': 1, - 'cohort': 1, + "id": model["cohort_user"].id, + "role": "TEACHER", + "user": 1, + "cohort": 1, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = put_serializer(self, - model.cohort_user, - model.cohort, - model.user, - model.profile_academy, - data={'role': 'TEACHER'}) + expected = put_serializer( + self, model.cohort_user, model.cohort, model.user, model.profile_academy, data={"role": "TEACHER"} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), - [{ - **self.model_to_dict(model, 'cohort_user'), - 'role': 'TEACHER', - 'watching': False, - }]) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.model_to_dict(model, "cohort_user"), + "role": "TEACHER", + "watching": False, + } + ], + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_staff(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'staff', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "staff", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_teacher(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'teacher', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "teacher", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_syllabus_coordinator(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'syllabus_coordinator', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "syllabus_coordinator", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_homework_reviewer(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'homework_reviewer', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "homework_reviewer", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_growth_manager(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'growth_manager', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "growth_manager", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_culture_and_recruitment(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'culture_and_recruitment', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "culture_and_recruitment", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_country_manager(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'country_manager', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "country_manager", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_community_manager(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'community_manager', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "community_manager", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_career_support(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'career_support', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "career_support", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_assistant(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'assistant', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "assistant", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_admissions_developer(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'admissions_developer', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "admissions_developer", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_admin(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'admin', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "admin", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_academy_token(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'academy_token', - update=True, - additional_data={'watching': False}) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "academy_token", update=True, additional_data={"watching": False} + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_id_user__post__one_teacher__with_role_academy_coordinator(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, - 'academy_coordinator', - update=True, - additional_data={'watching': False}) + check_cohort_user_that_not_have_role_student_can_be_teacher( + self, "academy_coordinator", update=True, additional_data={"watching": False} + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_cohort_me.py b/breathecode/admissions/tests/urls/tests_academy_cohort_me.py index b79d3c07d..f85b0ffd1 100644 --- a/breathecode/admissions/tests/urls/tests_academy_cohort_me.py +++ b/breathecode/admissions/tests/urls/tests_academy_cohort_me.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import re from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -23,34 +24,37 @@ class AcademyCohortTestSuite(AdmissionsTestCase): 🔽🔽🔽 Auth """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__post__without_authorization(self): """Test /academy/cohort without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_me') + url = reverse_lazy("admissions:academy_cohort_me") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_me') + url = reverse_lazy("admissions:academy_cohort_me") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_single_cohort for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_single_cohort for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -61,21 +65,23 @@ def test_cohort_me__without_capability(self): 🔽🔽🔽 Without data """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me_without_data(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_me') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - syllabus=True, - skip_cohort=True) + url = reverse_lazy("admissions:academy_cohort_me") + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + syllabus=True, + skip_cohort=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] @@ -92,9 +98,9 @@ def test_cohort_me_without_data(self): 🔽🔽🔽 With data (this method is reusable) """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__with_data(self): """Test /cohort without auth""" self.check_cohort_me__with_data() @@ -103,83 +109,87 @@ def test_cohort_me__with_data(self): 🔽🔽🔽 Get """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__with_data__with_upcoming_false(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model_dict = self.remove_dinamics_fields(model['cohort'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_me') - url = f'{base_url}?upcoming=false' + model_dict = self.remove_dinamics_fields(model["cohort"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_me") + url = f"{base_url}?upcoming=false" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort'].kickoff_date.isoformat()), - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'status': model['cohort'].syllabus_version.status, - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort"].kickoff_date.isoformat()), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "status": model["cohort"].syllabus_version.status, + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -188,31 +198,33 @@ def test_cohort_me__with_data__with_upcoming_false(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__with_data__with_upcoming_true__without_data(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) self.clear_cache() - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - model_dict = self.remove_dinamics_fields(model['cohort'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_me') - url = f'{base_url}?upcoming=true' + model_dict = self.remove_dinamics_fields(model["cohort"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_me") + url = f"{base_url}?upcoming=true" response = self.client.get(url) json = response.json() @@ -223,87 +235,91 @@ def test_cohort_me__with_data__with_upcoming_true__without_data(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__with_data__with_upcoming_true(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) cohort_kwargs = { - 'kickoff_date': timezone.now() + timedelta(days=1), + "kickoff_date": timezone.now() + timedelta(days=1), } - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort_me') - url = f'{base_url}?upcoming=true' + base_url = reverse_lazy("admissions:academy_cohort_me") + url = f"{base_url}?upcoming=true" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': self.datetime_to_iso(model['cohort'].kickoff_date), - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'status': model['cohort'].syllabus_version.status, - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": self.datetime_to_iso(model["cohort"].kickoff_date), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "status": model["cohort"].syllabus_version.status, + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -312,83 +328,87 @@ def test_cohort_me__with_data__with_upcoming_true(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me_with_data_with_academy(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort_me') - url = f'{base_url}?academy=' + model['academy'].slug + base_url = reverse_lazy("admissions:academy_cohort_me") + url = f"{base_url}?academy=" + model["academy"].slug response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort'].kickoff_date.isoformat()), - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': model['cohort'].current_module, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort"].kickoff_date.isoformat()), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": model["cohort"].current_module, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -397,83 +417,87 @@ def test_cohort_me_with_data_with_academy(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me_with_data_with_academy_with_comma(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort_me') - url = f'{base_url}?academy=' + model['academy'].slug + ',they-killed-kenny' + base_url = reverse_lazy("admissions:academy_cohort_me") + url = f"{base_url}?academy=" + model["academy"].slug + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort'].kickoff_date.isoformat()), - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort"].kickoff_date.isoformat()), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -572,9 +596,9 @@ def test_cohort_me_with_data_with_academy_with_comma(self): 🔽🔽🔽 Sort in querystring """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__with_data__with_sort(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved @@ -583,110 +607,114 @@ def test_cohort_me__with_data__with_sort(self): base = self.generate_models( authenticate=True, profile_academy=True, - capability='read_single_cohort', - role='potato', + capability="read_single_cohort", + role="potato", # cohort_user=1, - skip_cohort=True) + skip_cohort=True, + ) models = [ - self.generate_models(cohort=True, - syllabus=True, - cohort_user=1, - syllabus_version=True, - syllabus_schedule=True, - models=base) for _ in range(0, 2) + self.generate_models( + cohort=True, syllabus=True, cohort_user=1, syllabus_version=True, syllabus_schedule=True, models=base + ) + for _ in range(0, 2) ] # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - ordened_models = sorted(models, key=lambda x: x['cohort'].slug, reverse=True) + ordened_models = sorted(models, key=lambda x: x["cohort"].slug, reverse=True) - url = reverse_lazy('admissions:academy_cohort_me') + '?sort=-slug' + url = reverse_lazy("admissions:academy_cohort_me") + "?sort=-slug" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'current_day': model.cohort.current_day, - 'current_module': None, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort'].kickoff_date.isoformat()), - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "current_day": model.cohort.current_day, + "current_module": None, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort"].kickoff_date.isoformat()), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - } for model in ordened_models] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + for model in ordened_models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{ - **self.model_to_dict(model, 'cohort') - } for model in models]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")} for model in models] + ) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me_with_data__ignore_cohort_then_student_is_not_registered(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort_me') - url = f'{base_url}?location=they-killed-kenny' + base_url = reverse_lazy("admissions:academy_cohort_me") + url = f"{base_url}?location=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -697,83 +725,87 @@ def test_cohort_me_with_data__ignore_cohort_then_student_is_not_registered(self) self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me_with_data_with_location(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort_me') - url = f'{base_url}?location=' + model['academy'].slug + base_url = reverse_lazy("admissions:academy_cohort_me") + url = f"{base_url}?location=" + model["academy"].slug response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort'].kickoff_date.isoformat()), - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort"].kickoff_date.isoformat()), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -782,83 +814,87 @@ def test_cohort_me_with_data_with_location(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me_with_data_with_location_with_comma(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] model_dict = self.get_cohort_dict(1) - base_url = reverse_lazy('admissions:academy_cohort_me') - url = f'{base_url}?location=' + model['academy'].slug + ',they-killed-kenny' + base_url = reverse_lazy("admissions:academy_cohort_me") + url = f"{base_url}?location=" + model["academy"].slug + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, - 'never_ends': model['cohort'].never_ends, - 'remote_available': model['cohort'].remote_available, - 'private': model['cohort'].private, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort'].kickoff_date.isoformat()), - 'ending_date': model['cohort'].ending_date, - 'stage': model['cohort'].stage, - 'language': model['cohort'].language, - 'current_day': model['cohort'].current_day, - 'current_module': None, - 'online_meeting_url': model['cohort'].online_meeting_url, - 'timezone': model['cohort'].timezone, - 'timeslots': [], - 'is_hidden_on_prework': model['cohort'].is_hidden_on_prework, - 'available_as_saas': model['cohort'].available_as_saas, - 'schedule': { - 'id': model['cohort'].schedule.id, - 'name': model['cohort'].schedule.name, - 'syllabus': model['cohort'].schedule.syllabus.id, - }, - 'syllabus_version': { - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - 'status': model['cohort'].syllabus_version.status, - 'version': model['cohort'].syllabus_version.version, - 'syllabus': model['cohort'].syllabus_version.syllabus.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - }, - 'academy': { - 'id': model['cohort'].academy.id, - 'slug': model['cohort'].academy.slug, - 'name': model['cohort'].academy.name, - 'country': { - 'code': model['cohort'].academy.country.code, - 'name': model['cohort'].academy.country.name, + expected = [ + { + "id": model["cohort"].id, + "slug": model["cohort"].slug, + "name": model["cohort"].name, + "never_ends": model["cohort"].never_ends, + "remote_available": model["cohort"].remote_available, + "private": model["cohort"].private, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort"].kickoff_date.isoformat()), + "ending_date": model["cohort"].ending_date, + "stage": model["cohort"].stage, + "language": model["cohort"].language, + "current_day": model["cohort"].current_day, + "current_module": None, + "online_meeting_url": model["cohort"].online_meeting_url, + "timezone": model["cohort"].timezone, + "timeslots": [], + "is_hidden_on_prework": model["cohort"].is_hidden_on_prework, + "available_as_saas": model["cohort"].available_as_saas, + "schedule": { + "id": model["cohort"].schedule.id, + "name": model["cohort"].schedule.name, + "syllabus": model["cohort"].schedule.syllabus.id, }, - 'city': { - 'name': model['cohort'].academy.city.name, + "syllabus_version": { + "name": model.syllabus.name, + "slug": model.syllabus.slug, + "status": model["cohort"].syllabus_version.status, + "version": model["cohort"].syllabus_version.version, + "syllabus": model["cohort"].syllabus_version.syllabus.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, }, - 'logo_url': model['cohort'].academy.logo_url, - 'is_hidden_on_prework': model['cohort'].academy.is_hidden_on_prework - }, - }] + "academy": { + "id": model["cohort"].academy.id, + "slug": model["cohort"].academy.slug, + "name": model["cohort"].academy.name, + "country": { + "code": model["cohort"].academy.country.code, + "name": model["cohort"].academy.country.name, + }, + "city": { + "name": model["cohort"].academy.city.name, + }, + "logo_url": model["cohort"].academy.logo_url, + "is_hidden_on_prework": model["cohort"].academy.is_hidden_on_prework, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -867,63 +903,80 @@ def test_cohort_me_with_data_with_location_with_comma(self): self.assertEqual(self.all_cohort_time_slot_dict(), []) self.assertEqual(cohort_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__spy_extensions(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_me') + f'?location={model["academy"].slug},they-killed-kenny' + url = reverse_lazy("admissions:academy_cohort_me") + f'?location={model["academy"].slug},they-killed-kenny' self.client.get(url) self.assertEqual( str(APIViewExtensionHandlers._spy_extensions.call_args_list), - str([ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', - 'SortExtension']), - ])) - - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + str( + [ + call( + [ + "CacheExtension", + "LanguageExtension", + "LookupExtension", + "PaginationExtension", + "SortExtension", + ] + ), + ] + ), + ) + + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_me__spy_extension_arguments(self): """Test /cohort without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='read_single_cohort', - role='potato', - cohort_user=1, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True) + model = self.generate_models( + authenticate=True, + cohort=True, + profile_academy=True, + capability="read_single_cohort", + role="potato", + cohort_user=1, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] - url = reverse_lazy('admissions:academy_cohort_me') + f'?location={model["academy"].slug},they-killed-kenny' + url = reverse_lazy("admissions:academy_cohort_me") + f'?location={model["academy"].slug},they-killed-kenny' self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=CohortCache, cache_per_user=True, sort='-kickoff_date', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=CohortCache, cache_per_user=True, sort="-kickoff_date", paginate=True), + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_cohort_sync_timeslot.py b/breathecode/admissions/tests/urls/tests_academy_cohort_sync_timeslot.py index 52e2266a9..d95188e89 100644 --- a/breathecode/admissions/tests/urls/tests_academy_cohort_sync_timeslot.py +++ b/breathecode/admissions/tests/urls/tests_academy_cohort_sync_timeslot.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AdmissionsTestCase @@ -8,36 +9,36 @@ class CertificateTestSuite(AdmissionsTestCase): """Test /certificate""" + """ 🔽🔽🔽 Auth """ def test_academy_cohort_sync_timeslot__without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_academy_cohort_sync_timeslot__without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") self.bc.database.create(authenticate=True) data = {} response = self.client.post(url, data) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: crud_certificate ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: crud_certificate " "for academy 1", } self.assertEqual(json, expected) @@ -51,18 +52,17 @@ def test_academy_cohort_sync_timeslot__without_capability(self): def test_academy_cohort_sync_timeslot__without_cohort_in_querystring(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") + model = self.bc.database.create( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) data = {} response = self.client.post(url, data) json = response.json() expected = { - 'status_code': 400, - 'detail': 'missing-cohort-in-querystring', + "status_code": 400, + "detail": "missing-cohort-in-querystring", } self.assertEqual(json, expected) @@ -72,18 +72,17 @@ def test_academy_cohort_sync_timeslot__without_cohort_in_querystring(self): def test_academy_cohort_sync_timeslot__with_cohort_in_querystring__without_certificate(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + '?cohort=1' - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") + "?cohort=1" + model = self.bc.database.create( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) data = {} response = self.client.post(url, data) json = response.json() expected = { - 'status_code': 400, - 'detail': 'cohort-without-specialty-mode', + "status_code": 400, + "detail": "cohort-without-specialty-mode", } self.assertEqual(json, expected) @@ -93,16 +92,18 @@ def test_academy_cohort_sync_timeslot__with_cohort_in_querystring__without_certi def test_academy_cohort_sync_timeslot__with_cohort_in_querystring__with_certificate(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + '?cohort=1' - - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus_schedule=True, - syllabus=True, - academy_kwargs=academy_kwargs) + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") + "?cohort=1" + + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus_schedule=True, + syllabus=True, + academy_kwargs=academy_kwargs, + ) data = {} response = self.client.post(url, data) @@ -120,19 +121,21 @@ def test_academy_cohort_sync_timeslot__with_cohort_in_querystring__with_certific def test_academy_cohort_sync_timeslot__academy_without_timezone(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + '?cohort=1' - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_schedule_time_slot=True) + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") + "?cohort=1" + model = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_schedule_time_slot=True, + ) data = {} response = self.client.post(url, data) json = response.json() - expected = {'detail': 'without-timezone', 'status_code': 400} + expected = {"detail": "without-timezone", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -145,82 +148,99 @@ def test_academy_cohort_sync_timeslot__academy_without_timezone(self): def test_academy_cohort_sync_timeslot__with_one_certificate_timeslot(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + '?cohort=1' - - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus_schedule=True, - syllabus=True, - syllabus_schedule_time_slot=True, - academy_kwargs=academy_kwargs) + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") + "?cohort=1" + + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus_schedule=True, + syllabus=True, + syllabus_schedule_time_slot=True, + academy_kwargs=academy_kwargs, + ) data = {} response = self.client.post(url, data) json = response.json() - expected = [{ - 'id': 1, - 'cohort': model.cohort.id, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas' - }] + expected = [ + { + "id": 1, + "cohort": model.cohort.id, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': 1, - 'cohort_id': model.cohort.id, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas' - }]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": 1, + "cohort_id": model.cohort.id, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + ], + ) def test_academy_cohort_sync_timeslot__with_two_certificate_timeslot(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + '?cohort=1' - academy_kwargs = {'timezone': 'America/Caracas'} - base = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus_schedule=True, - syllabus=True, - academy_kwargs=academy_kwargs) + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") + "?cohort=1" + academy_kwargs = {"timezone": "America/Caracas"} + base = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus_schedule=True, + syllabus=True, + academy_kwargs=academy_kwargs, + ) models = [self.bc.database.create(syllabus_schedule_time_slot=True, models=base) for _ in range(0, 2)] data = {} response = self.client.post(url, data) json = response.json() - expected = [{ - 'id': model.syllabus_schedule_time_slot.id, - 'cohort': model.cohort.id, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas', - } for model in models] + expected = [ + { + "id": model.syllabus_schedule_time_slot.id, + "cohort": model.cohort.id, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + for model in models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual( self.all_cohort_time_slot_dict(), - [{ - 'id': model.syllabus_schedule_time_slot.id, - 'cohort_id': model.cohort.id, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas', - } for model in models], + [ + { + "id": model.syllabus_schedule_time_slot.id, + "cohort_id": model.cohort.id, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + for model in models + ], ) """ @@ -230,16 +250,18 @@ def test_academy_cohort_sync_timeslot__with_two_certificate_timeslot(self): def test_academy_cohort_sync_timeslot__with_two_certificate_timeslot__with_two_cohort(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + '?cohort=1,2' - academy_kwargs = {'timezone': 'America/Caracas'} - base = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus_schedule=True, - syllabus=True, - skip_cohort=True, - academy_kwargs=academy_kwargs) + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") + "?cohort=1,2" + academy_kwargs = {"timezone": "America/Caracas"} + base = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus_schedule=True, + syllabus=True, + skip_cohort=True, + academy_kwargs=academy_kwargs, + ) cohorts = [self.bc.database.create(cohort=True, models=base).cohort for _ in range(0, 2)] @@ -253,41 +275,57 @@ def test_academy_cohort_sync_timeslot__with_two_certificate_timeslot__with_two_c json = response.json() # base = 0 - expected = [{ - 'id': schedule_time_slot.id, - 'cohort': 1, - 'recurrent': schedule_time_slot.recurrent, - 'recurrency_type': schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas', - } for schedule_time_slot in certificate_timeslots] + [{ - 'id': schedule_time_slot.id + 2, - 'cohort': 2, - 'recurrent': schedule_time_slot.recurrent, - 'recurrency_type': schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas', - } for schedule_time_slot in certificate_timeslots] + expected = [ + { + "id": schedule_time_slot.id, + "cohort": 1, + "recurrent": schedule_time_slot.recurrent, + "recurrency_type": schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + for schedule_time_slot in certificate_timeslots + ] + [ + { + "id": schedule_time_slot.id + 2, + "cohort": 2, + "recurrent": schedule_time_slot.recurrent, + "recurrency_type": schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + for schedule_time_slot in certificate_timeslots + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_cohort_time_slot_dict(), [{ - 'id': schedule_time_slot.id, - 'cohort_id': 1, - 'removed_at': schedule_time_slot.removed_at, - 'starting_at': schedule_time_slot.starting_at, - 'ending_at': schedule_time_slot.ending_at, - 'recurrent': schedule_time_slot.recurrent, - 'recurrency_type': schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas', - } for schedule_time_slot in certificate_timeslots] + [{ - 'id': schedule_time_slot.id + 2, - 'cohort_id': 2, - 'removed_at': schedule_time_slot.removed_at, - 'starting_at': schedule_time_slot.starting_at, - 'ending_at': schedule_time_slot.ending_at, - 'recurrent': schedule_time_slot.recurrent, - 'recurrency_type': schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas', - } for schedule_time_slot in certificate_timeslots]) + self.assertEqual( + self.all_cohort_time_slot_dict(), + [ + { + "id": schedule_time_slot.id, + "cohort_id": 1, + "removed_at": schedule_time_slot.removed_at, + "starting_at": schedule_time_slot.starting_at, + "ending_at": schedule_time_slot.ending_at, + "recurrent": schedule_time_slot.recurrent, + "recurrency_type": schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + for schedule_time_slot in certificate_timeslots + ] + + [ + { + "id": schedule_time_slot.id + 2, + "cohort_id": 2, + "removed_at": schedule_time_slot.removed_at, + "starting_at": schedule_time_slot.starting_at, + "ending_at": schedule_time_slot.ending_at, + "recurrent": schedule_time_slot.recurrent, + "recurrency_type": schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + for schedule_time_slot in certificate_timeslots + ], + ) """ 🔽🔽🔽 With cohort timeslot @@ -296,42 +334,48 @@ def test_academy_cohort_sync_timeslot__with_two_certificate_timeslot__with_two_c def test_academy_cohort_sync_timeslot__with_one_cohort_timeslot(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_sync_timeslot') + '?cohort=1' - - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus_schedule=True, - syllabus=True, - cohort_time_slot=True, - syllabus_schedule_time_slot=True, - academy_kwargs=academy_kwargs) + url = reverse_lazy("admissions:academy_cohort_sync_timeslot") + "?cohort=1" + + academy_kwargs = {"timezone": "America/Caracas"} + model = self.bc.database.create( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus_schedule=True, + syllabus=True, + cohort_time_slot=True, + syllabus_schedule_time_slot=True, + academy_kwargs=academy_kwargs, + ) data = {} response = self.client.post(url, data) json = response.json() - expected = [{ - 'id': 2, - 'cohort': model.cohort.id, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas', - }] + expected = [ + { + "id": 2, + "cohort": model.cohort.id, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual( self.all_cohort_time_slot_dict(), - [{ - 'id': 2, - 'cohort_id': model.cohort.id, - 'removed_at': model.syllabus_schedule_time_slot.removed_at, - 'starting_at': model.syllabus_schedule_time_slot.starting_at, - 'ending_at': model.syllabus_schedule_time_slot.ending_at, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'timezone': 'America/Caracas', - }], + [ + { + "id": 2, + "cohort_id": model.cohort.id, + "removed_at": model.syllabus_schedule_time_slot.removed_at, + "starting_at": model.syllabus_schedule_time_slot.starting_at, + "ending_at": model.syllabus_schedule_time_slot.ending_at, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "timezone": "America/Caracas", + } + ], ) diff --git a/breathecode/admissions/tests/urls/tests_academy_cohort_user.py b/breathecode/admissions/tests/urls/tests_academy_cohort_user.py index bf2d0cf98..1a4f1d782 100644 --- a/breathecode/admissions/tests/urls/tests_academy_cohort_user.py +++ b/breathecode/admissions/tests/urls/tests_academy_cohort_user.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + import random import re from random import choice @@ -19,126 +20,136 @@ def cohort_user_item(data={}): return { - 'cohort_id': 0, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 0, - 'role': 'STUDENT', - 'user_id': 0, - 'watching': False, - 'history_log': {}, + "cohort_id": 0, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 0, + "role": "STUDENT", + "user_id": 0, + "watching": False, + "history_log": {}, **data, } def post_serializer(self, cohort, user, profile_academy=None, data={}): return { - 'cohort': { - 'ending_date': cohort.ending_date, - 'id': cohort.id, - 'kickoff_date': - self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date, - 'name': cohort.name, - 'slug': cohort.slug, - 'stage': cohort.stage, - 'available_as_saas': cohort.available_as_saas, + "cohort": { + "ending_date": cohort.ending_date, + "id": cohort.id, + "kickoff_date": ( + self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date + ), + "name": cohort.name, + "slug": cohort.slug, + "stage": cohort.stage, + "available_as_saas": cohort.available_as_saas, }, - 'created_at': self.bc.datetime.to_iso_string(UTC_NOW), - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'profile_academy': { - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - } if profile_academy else None, - 'role': 'STUDENT', - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'last_login': user.last_login, + "created_at": self.bc.datetime.to_iso_string(UTC_NOW), + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "profile_academy": ( + { + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + } + if profile_academy + else None + ), + "role": "STUDENT", + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "last_login": user.last_login, }, - 'watching': False, + "watching": False, **data, } def put_serializer(self, cohort_user, cohort, user, profile_academy=None, data={}): return { - 'cohort': { - 'ending_date': cohort.ending_date, - 'id': cohort.id, - 'kickoff_date': - self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date, - 'name': cohort.name, - 'slug': cohort.slug, - 'stage': cohort.stage, - 'available_as_saas': cohort.available_as_saas, + "cohort": { + "ending_date": cohort.ending_date, + "id": cohort.id, + "kickoff_date": ( + self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date + ), + "name": cohort.name, + "slug": cohort.slug, + "stage": cohort.stage, + "available_as_saas": cohort.available_as_saas, }, - 'created_at': self.bc.datetime.to_iso_string(cohort_user.created_at), - 'educational_status': cohort_user.educational_status, - 'finantial_status': cohort_user.finantial_status, - 'id': cohort_user.id, - 'profile_academy': { - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - } if profile_academy else None, - 'role': cohort_user.role, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'last_login': user.last_login, + "created_at": self.bc.datetime.to_iso_string(cohort_user.created_at), + "educational_status": cohort_user.educational_status, + "finantial_status": cohort_user.finantial_status, + "id": cohort_user.id, + "profile_academy": ( + { + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + } + if profile_academy + else None + ), + "role": cohort_user.role, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "last_login": user.last_login, }, - 'watching': cohort_user.watching, + "watching": cohort_user.watching, **data, } class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" + """ 🔽🔽🔽 Auth """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__without_auth(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_user') + url = reverse_lazy("admissions:academy_cohort_user") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) """ 🔽🔽🔽 Without data """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__without_data(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - url = reverse_lazy('admissions:academy_cohort_user') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + url = reverse_lazy("admissions:academy_cohort_user") response = self.client.get(url) json = response.json() @@ -150,52 +161,52 @@ def test__without_data(self): 🔽🔽🔽 With data """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - url = reverse_lazy('admissions:academy_cohort_user') + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + url = reverse_lazy("admissions:academy_cohort_user") response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -206,20 +217,18 @@ def test__with_data(self): 🔽🔽🔽 Roles in querystring """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_bad_roles(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?roles=they-killed-kenny' + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?roles=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -228,110 +237,110 @@ def test__with_data__with_bad_roles(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_roles(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - role = model['cohort_user'].role + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + role = model["cohort_user"].role if random.randint(0, 1): role = role.lower() - url = reverse_lazy('admissions:academy_cohort_user') + f'?roles={role}' + url = reverse_lazy("admissions:academy_cohort_user") + f"?roles={role}" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_roles__with_comma(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?roles=' + model['cohort_user'].role + ',they-killed-kenny' + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?roles=" + model["cohort_user"].role + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -342,20 +351,18 @@ def test__with_data__with_roles__with_comma(self): 🔽🔽🔽 Finantial status in querystring """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_bad_finantial_status(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?finantial_status=they-killed-kenny' + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?finantial_status=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -364,112 +371,120 @@ def test__with_data__with_bad_finantial_status(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_finantial_status(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'finantial_status': 'LATE'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - role = model['cohort_user'].finantial_status + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"finantial_status": "LATE"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + role = model["cohort_user"].finantial_status if random.randint(0, 1): role = role.lower() - url = reverse_lazy('admissions:academy_cohort_user') + f'?finantial_status={role}' + url = reverse_lazy("admissions:academy_cohort_user") + f"?finantial_status={role}" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_finantial_status__with_comma(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'finantial_status': 'LATE'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = (f'{base_url}?finantial_status=' + model['cohort_user'].finantial_status + ',they-killed-kenny') + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"finantial_status": "LATE"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?finantial_status=" + model["cohort_user"].finantial_status + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -480,20 +495,18 @@ def test__with_data__with_finantial_status__with_comma(self): 🔽🔽🔽 Educational status in querystring """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_bad_educational_status(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?educational_status=they-killed-kenny' + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?educational_status=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -502,113 +515,120 @@ def test__with_data__with_bad_educational_status(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_educational_status(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - role = model['cohort_user'].educational_status + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + role = model["cohort_user"].educational_status if random.randint(0, 1): role = role.lower() - url = reverse_lazy('admissions:academy_cohort_user') + f'?educational_status={role}' + url = reverse_lazy("admissions:academy_cohort_user") + f"?educational_status={role}" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_educational_status__with_comma(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = (f'{base_url}?educational_status=' + model['cohort_user'].educational_status + ',' - 'they-killed-kenny') + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?educational_status=" + model["cohort_user"].educational_status + "," "they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -619,108 +639,116 @@ def test__with_data__with_educational_status__with_comma(self): 🔽🔽🔽 Academy in querystring """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_academy(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?academy=' + model['cohort_user'].cohort.academy.slug + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?academy=" + model["cohort_user"].cohort.academy.slug response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_academy__with_comma(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?academy=' + model['cohort_user'].cohort.academy.slug + ',they-killed-kenny' + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?academy=" + model["cohort_user"].cohort.academy.slug + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -731,20 +759,18 @@ def test__with_data__with_academy__with_comma(self): 🔽🔽🔽 Cohorts in querystring """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_bad_cohorts(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?cohorts=they-killed-kenny' + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="read_all_cohort", role="potato" + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?cohorts=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -753,197 +779,196 @@ def test__with_data__with_bad_cohorts(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_cohorts(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?cohorts=' + model['cohort_user'].cohort.slug + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?cohorts=" + model["cohort_user"].cohort.slug response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_cohorts_get_tasks_with_no_tasks(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?cohorts=' + model['cohort_user'].cohort.slug + '&tasks=True' + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?cohorts=" + model["cohort_user"].cohort.slug + "&tasks=True" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'tasks': [], - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "tasks": [], + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_data__with_cohorts_get_tasks_with_no_tasks(self): """Test /cohort/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort_user=True, - task=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - profile_academy=True, - capability='read_all_cohort', - role='potato') - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:academy_cohort_user') - url = f'{base_url}?cohorts=' + model['cohort_user'].cohort.slug + '&tasks=True' + model = self.generate_models( + authenticate=True, + cohort_user=True, + task=True, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + profile_academy=True, + capability="read_all_cohort", + role="potato", + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:academy_cohort_user") + url = f"{base_url}?cohorts=" + model["cohort_user"].cohort.slug + "&tasks=True" response = self.client.get(url) json = response.json() - expected = [{ - 'id': - model['cohort_user'].id, - 'role': - model['cohort_user'].role, - 'finantial_status': - model['cohort_user'].finantial_status, - 'educational_status': - model['cohort_user'].educational_status, - 'created_at': - re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'tasks': [{ - 'id': - model['task'].id, - 'associated_slug': - model['task'].associated_slug, - 'description': - model['task'].description, - 'github_url': - model['task'].github_url, - 'live_url': - model['task'].live_url, - 'task_type': - model['task'].task_type, - 'task_status': - model['task'].task_status, - 'revision_status': - model['task'].revision_status, - 'created_at': - re.sub(r'\+00:00$', 'Z', model['task'].created_at.isoformat()), - 'updated_at': - re.sub(r'\+00:00$', 'Z', model['task'].updated_at.isoformat()), - 'delivered_at': - re.sub(r'\+00:00$', 'Z', model['task'].delivered_at.isoformat()) - if model['task'].delivered_at is not None else None, - 'title': - model['task'].title, - }], - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': - False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "tasks": [ + { + "id": model["task"].id, + "associated_slug": model["task"].associated_slug, + "description": model["task"].description, + "github_url": model["task"].github_url, + "live_url": model["task"].live_url, + "task_type": model["task"].task_type, + "task_status": model["task"].task_status, + "revision_status": model["task"].revision_status, + "created_at": re.sub(r"\+00:00$", "Z", model["task"].created_at.isoformat()), + "updated_at": re.sub(r"\+00:00$", "Z", model["task"].updated_at.isoformat()), + "delivered_at": ( + re.sub(r"\+00:00$", "Z", model["task"].delivered_at.isoformat()) + if model["task"].delivered_at is not None + else None + ), + "title": model["task"].title, + } + ], + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -954,482 +979,544 @@ def test__with_data__with_cohorts_get_tasks_with_no_tasks(self): 🔽🔽🔽 Put without id """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__without_id(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_user') - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + url = reverse_lazy("admissions:academy_cohort_user") + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'status_code': 400, 'detail': 'Missing cohort_id, user_id and id'} + expected = {"status_code": 400, "detail": "Missing cohort_id, user_id and id"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 Put bulk mode """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__in_bulk__without_data(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_user') - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + url = reverse_lazy("admissions:academy_cohort_user") + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") data = [] - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__in_bulk__without_data__without_id(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_user') - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + url = reverse_lazy("admissions:academy_cohort_user") + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") data = [{}] - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'Missing cohort_id, user_id and id', 'status_code': 400} + expected = {"detail": "Missing cohort_id, user_id and id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__in_bulk__without_data__with_bad_id(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_user') - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') - data = [{'id': 1}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("admissions:academy_cohort_user") + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") + data = [{"id": 1}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'Cannot determine CohortUser in index 0', 'status_code': 400} + expected = {"detail": "Cannot determine CohortUser in index 0", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__in_bulk__with_one_item(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_user') - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='crud_cohort', - role='potato') - data = [{'id': model['cohort_user'].id}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("admissions:academy_cohort_user") + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="crud_cohort", role="potato" + ) + data = [{"id": model["cohort_user"].id}] + response = self.client.put(url, data, format="json") json = response.json() expected = [ - put_serializer(self, - model.cohort_user, - model.cohort, - model.user, - model.profile_academy, - data={ - 'role': 'STUDENT', - }) + put_serializer( + self, + model.cohort_user, + model.cohort, + model.user, + model.profile_academy, + data={ + "role": "STUDENT", + }, + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - self.bc.format.to_dict(model.cohort_user), - ]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + self.bc.format.to_dict(model.cohort_user), + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__in_bulk__with_two_items(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_cohort_user') + url = reverse_lazy("admissions:academy_cohort_user") model = [ - self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - capability='crud_cohort', - role='potato') + self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, capability="crud_cohort", role="potato" + ) ] base = model[0].copy() - del base['user'] - del base['cohort'] - del base['cohort_user'] - del base['profile_academy'] + del base["user"] + del base["cohort"] + del base["cohort_user"] + del base["profile_academy"] model = model + [self.generate_models(cohort_user=True, profile_academy=True, models=base)] - data = [{ - 'id': 1, - 'finantial_status': 'LATE', - }, { - 'user': '2', - 'cohort': '2', - 'educational_status': 'GRADUATED' - }] - response = self.client.put(url, data, format='json') + data = [ + { + "id": 1, + "finantial_status": "LATE", + }, + {"user": "2", "cohort": "2", "educational_status": "GRADUATED"}, + ] + response = self.client.put(url, data, format="json") json = response.json() expected = [ - put_serializer(self, - m.cohort_user, - m.cohort, - m.user, - m.profile_academy, - data={ - 'educational_status': 'ACTIVE' if m.cohort.id == 1 else 'GRADUATED', - 'finantial_status': 'LATE' if m.cohort.id == 1 else None, - }) for m in model + put_serializer( + self, + m.cohort_user, + m.cohort, + m.user, + m.profile_academy, + data={ + "educational_status": "ACTIVE" if m.cohort.id == 1 else "GRADUATED", + "finantial_status": "LATE" if m.cohort.id == 1 else None, + }, + ) + for m in model ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model[0].cohort_user), - 'finantial_status': 'LATE', - }, - { - **self.bc.format.to_dict(model[1].cohort_user), - 'educational_status': 'GRADUATED', - }, - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model[0].cohort_user), + "finantial_status": "LATE", + }, + { + **self.bc.format.to_dict(model[1].cohort_user), + "educational_status": "GRADUATED", + }, + ], + ) """ 🔽🔽🔽 Post bulk mode """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__post__in_bulk__0_items(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - capability='crud_cohort', - role='potato') - url = reverse_lazy('admissions:academy_cohort_user') + model = self.generate_models( + authenticate=True, cohort=True, profile_academy=True, capability="crud_cohort", role="potato" + ) + url = reverse_lazy("admissions:academy_cohort_user") data = [] - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__post__in_bulk__1_item(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort={'stage': 'STARTED'}, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato') - url = reverse_lazy('admissions:academy_cohort_user') - data = [{ - 'user': model['user'].id, - 'cohort': model['cohort'].id, - }] - response = self.client.post(url, data, format='json') + model = self.generate_models( + authenticate=True, + cohort={"stage": "STARTED"}, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + ) + url = reverse_lazy("admissions:academy_cohort_user") + data = [ + { + "user": model["user"].id, + "cohort": model["cohort"].id, + } + ] + response = self.client.post(url, data, format="json") json = response.json() expected = [ - post_serializer(self, model.cohort, model.user, model.profile_academy, data={ - 'id': 1, - 'role': 'STUDENT', - }), + post_serializer( + self, + model.cohort, + model.user, + model.profile_academy, + data={ + "id": 1, + "role": "STUDENT", + }, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - cohort_user_item({ - 'cohort_id': 1, - 'id': 1, - 'user_id': 1, - }), - ]) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + cohort_user_item( + { + "cohort_id": 1, + "id": 1, + "user_id": 1, + } + ), + ], + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__post_in_bulk__2_items(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - cohort={'stage': 'STARTED'}, - profile_academy=True, - capability='crud_cohort', - role='potato') - del base['user'] + base = self.generate_models( + authenticate=True, + cohort={"stage": "STARTED"}, + profile_academy=True, + capability="crud_cohort", + role="potato", + ) + del base["user"] models = [self.generate_models(user=True, models=base) for _ in range(0, 2)] - url = reverse_lazy('admissions:academy_cohort_user') - data = [{ - 'user': model['user'].id, - 'cohort': models[0]['cohort'].id, - } for model in models] - response = self.client.post(url, data, format='json') + url = reverse_lazy("admissions:academy_cohort_user") + data = [ + { + "user": model["user"].id, + "cohort": models[0]["cohort"].id, + } + for model in models + ] + response = self.client.post(url, data, format="json") json = response.json() expected = [ - post_serializer(self, model.cohort, model.user, None, data={ - 'id': model.user.id - 1, - 'role': 'STUDENT', - }) for model in models + post_serializer( + self, + model.cohort, + model.user, + None, + data={ + "id": model.user.id - 1, + "role": "STUDENT", + }, + ) + for model in models ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - cohort_user_item({ - 'cohort_id': 1, - 'id': 1, - 'user_id': 2, - }), - cohort_user_item({ - 'cohort_id': 1, - 'id': 2, - 'user_id': 3, - }), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + cohort_user_item( + { + "cohort_id": 1, + "id": 1, + "user_id": 2, + } + ), + cohort_user_item( + { + "cohort_id": 1, + "id": 2, + "user_id": 3, + } + ), + ], + ) """ 🔽🔽🔽 Post in bulk, statuses in lowercase """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__post__in_bulk__1_item__statuses_in_lowercase(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort={'stage': 'STARTED'}, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato') - url = reverse_lazy('admissions:academy_cohort_user') - role = random.choice(['TEACHER', 'ASSISTANT', 'REVIEWER', 'STUDENT']).lower() - finantial_status = random.choice(['FULLY_PAID', 'UP_TO_DATE', 'LATE']).lower() + model = self.generate_models( + authenticate=True, + cohort={"stage": "STARTED"}, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + ) + url = reverse_lazy("admissions:academy_cohort_user") + role = random.choice(["TEACHER", "ASSISTANT", "REVIEWER", "STUDENT"]).lower() + finantial_status = random.choice(["FULLY_PAID", "UP_TO_DATE", "LATE"]).lower() # don't put GRADUATED here - educational_status = random.choice(['ACTIVE', 'POSTPONED', 'SUSPENDED', 'DROPPED']).lower() - data = [{ - 'role': role, - 'finantial_status': finantial_status, - 'educational_status': educational_status, - 'user': model['user'].id, - 'cohort': model['cohort'].id, - }] - response = self.client.post(url, data, format='json') + educational_status = random.choice(["ACTIVE", "POSTPONED", "SUSPENDED", "DROPPED"]).lower() + data = [ + { + "role": role, + "finantial_status": finantial_status, + "educational_status": educational_status, + "user": model["user"].id, + "cohort": model["cohort"].id, + } + ] + response = self.client.post(url, data, format="json") json = response.json() - del data[0]['user'] - del data[0]['cohort'] + del data[0]["user"] + del data[0]["cohort"] expected = [ - post_serializer(self, - model.cohort, - model.user, - model.profile_academy, - data={ - **data[0], - 'role': role.upper(), - 'finantial_status': finantial_status.upper(), - 'educational_status': educational_status.upper(), - }), + post_serializer( + self, + model.cohort, + model.user, + model.profile_academy, + data={ + **data[0], + "role": role.upper(), + "finantial_status": finantial_status.upper(), + "educational_status": educational_status.upper(), + }, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - cohort_user_item({ - 'cohort_id': 1, - 'role': role.upper(), - 'finantial_status': finantial_status.upper(), - 'educational_status': educational_status.upper(), - 'id': 1, - 'user_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + cohort_user_item( + { + "cohort_id": 1, + "role": role.upper(), + "finantial_status": finantial_status.upper(), + "educational_status": educational_status.upper(), + "id": 1, + "user_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 Delete in bulk """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__delete__without_args_in_url_or_bulk(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') - url = reverse_lazy('admissions:academy_cohort_user') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") + url = reverse_lazy("admissions:academy_cohort_user") response = self.client.delete(url) json = response.json() - expected = {'detail': 'Missing user_id or cohort_id', 'status_code': 400} + expected = {"detail": "Missing user_id or cohort_id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__delete__in_bulk__with_one(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] - base = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") - del base['user'] - del base['cohort'] + del base["user"] + del base["cohort"] for field in many_fields: cohort_user_kwargs = { - 'role': choice(['STUDENT', 'ASSISTANT', 'TEACHER']), - 'finantial_status': choice(['FULLY_PAID', 'UP_TO_DATE', 'LATE']), - 'educational_status': choice(['ACTIVE', 'POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED']), + "role": choice(["STUDENT", "ASSISTANT", "TEACHER"]), + "finantial_status": choice(["FULLY_PAID", "UP_TO_DATE", "LATE"]), + "educational_status": choice(["ACTIVE", "POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"]), } model = self.generate_models(cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base) - url = (reverse_lazy('admissions:academy_cohort_user') + f'?{field}=' + - str(getattr(model['cohort_user'], field))) + url = ( + reverse_lazy("admissions:academy_cohort_user") + + f"?{field}=" + + str(getattr(model["cohort_user"], field)) + ) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__delete__in_bulk__with_two(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] - base = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") - del base['user'] - del base['cohort'] + del base["user"] + del base["cohort"] for field in many_fields: cohort_user_kwargs = { - 'role': choice(['STUDENT', 'ASSISTANT', 'TEACHER']), - 'finantial_status': choice(['FULLY_PAID', 'UP_TO_DATE', 'LATE']), - 'educational_status': choice(['ACTIVE', 'POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED']), + "role": choice(["STUDENT", "ASSISTANT", "TEACHER"]), + "finantial_status": choice(["FULLY_PAID", "UP_TO_DATE", "LATE"]), + "educational_status": choice(["ACTIVE", "POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"]), } model1 = self.generate_models(cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base) cohort_user_kwargs = { - 'role': choice(['STUDENT', 'ASSISTANT', 'TEACHER']), - 'finantial_status': choice(['FULLY_PAID', 'UP_TO_DATE', 'LATE']), - 'educational_status': choice(['ACTIVE', 'POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED']), + "role": choice(["STUDENT", "ASSISTANT", "TEACHER"]), + "finantial_status": choice(["FULLY_PAID", "UP_TO_DATE", "LATE"]), + "educational_status": choice(["ACTIVE", "POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"]), } model2 = self.generate_models(cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base) - url = (reverse_lazy('admissions:academy_cohort_user') + f'?{field}=' + - str(getattr(model1['cohort_user'], field)) + ',' + str(getattr(model2['cohort_user'], field))) + url = ( + reverse_lazy("admissions:academy_cohort_user") + + f"?{field}=" + + str(getattr(model1["cohort_user"], field)) + + "," + + str(getattr(model2["cohort_user"], field)) + ) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_user__post__1_item(self): - prohibited_stages = ['INACTIVE', 'DELETED', 'ENDED'] + prohibited_stages = ["INACTIVE", "DELETED", "ENDED"] for stage in prohibited_stages: - model = self.generate_models(authenticate=True, - cohort={'stage': stage}, - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato') + model = self.generate_models( + authenticate=True, + cohort={"stage": stage}, + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('admissions:academy_cohort_user') + url = reverse_lazy("admissions:academy_cohort_user") data = { - 'user': model['user'].id, - 'cohort': model['cohort'].id, + "user": model["user"].id, + "cohort": model["cohort"].id, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'adding-student-to-a-closed-cohort', 'status_code': 400} + expected = {"detail": "adding-student-to-a-closed-cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_cohort_user__post__2_item(self): - #incomplete test - prohibited_stages = ['INACTIVE', 'DELETED', 'ENDED'] + # incomplete test + prohibited_stages = ["INACTIVE", "DELETED", "ENDED"] for stage in prohibited_stages: - model = self.generate_models(authenticate=True, - cohort=(2, { - 'stage': stage - }), - user=True, - profile_academy=True, - capability='crud_cohort', - role='potato') + model = self.generate_models( + authenticate=True, + cohort=(2, {"stage": stage}), + user=True, + profile_academy=True, + capability="crud_cohort", + role="potato", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('admissions:academy_cohort_user') - data = [{ - 'user': model['user'].id, - 'cohort': model['cohort'][0].id, - }, { - 'user': model['user'].id, - 'cohort': model['cohort'][1].id, - }] - response = self.client.post(url, data, format='json') + url = reverse_lazy("admissions:academy_cohort_user") + data = [ + { + "user": model["user"].id, + "cohort": model["cohort"][0].id, + }, + { + "user": model["user"].id, + "cohort": model["cohort"][1].id, + }, + ] + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'adding-student-to-a-closed-cohort', 'status_code': 400} + expected = {"detail": "adding-student-to-a-closed-cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) diff --git a/breathecode/admissions/tests/urls/tests_academy_id_syllabus.py b/breathecode/admissions/tests/urls/tests_academy_id_syllabus.py index cc184080b..7974a2056 100644 --- a/breathecode/admissions/tests/urls/tests_academy_id_syllabus.py +++ b/breathecode/admissions/tests/urls/tests_academy_id_syllabus.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AdmissionsTestCase @@ -11,27 +12,27 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:academy_id_syllabus', kwargs={'academy_id': 1}) + url = reverse_lazy("admissions:academy_id_syllabus", kwargs={"academy_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_syllabus_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus', kwargs={'academy_id': 1}) + url = reverse_lazy("admissions:academy_id_syllabus", kwargs={"academy_id": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: read_syllabus for academy 1" + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus for academy 1", } self.assertEqual(json, expected) @@ -41,12 +42,10 @@ def test_syllabus_without_capability(self): def test_syllabus_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:academy_id_syllabus', kwargs={'academy_id': 1}) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy("admissions:academy_id_syllabus", kwargs={"academy_id": 1}) response = self.client.get(url) json = response.json() expected = [] @@ -58,54 +57,57 @@ def test_syllabus_without_syllabus(self): def test_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_id_syllabus', kwargs={'academy_id': 1}) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:academy_id_syllabus", kwargs={"academy_id": 1}) response = self.client.get(url) json = response.json() - expected = [{ - 'main_technologies': None, - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': { - 'id': model.syllabus.academy_owner.id, - 'name': model.syllabus.academy_owner.name, - 'slug': model.syllabus.academy_owner.slug, - 'white_labeled': model.syllabus.academy_owner.white_labeled, - 'icon_url': model.syllabus.academy_owner.icon_url, - 'available_as_saas': model.syllabus.academy_owner.available_as_saas, - }, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'created_at': self.datetime_to_iso(model.syllabus.created_at), - 'updated_at': self.datetime_to_iso(model.syllabus.updated_at), - }] + expected = [ + { + "main_technologies": None, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": { + "id": model.syllabus.academy_owner.id, + "name": model.syllabus.academy_owner.name, + "slug": model.syllabus.academy_owner.slug, + "white_labeled": model.syllabus.academy_owner.white_labeled, + "icon_url": model.syllabus.academy_owner.icon_url, + "available_as_saas": model.syllabus.academy_owner.available_as_saas, + }, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "created_at": self.datetime_to_iso(model.syllabus.created_at), + "updated_at": self.datetime_to_iso(model.syllabus.updated_at), + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_post_without_capabilities(self): """Test /certificate without auth""" self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_id_syllabus', kwargs={'academy_id': 1}) + url = reverse_lazy("admissions:academy_id_syllabus", kwargs={"academy_id": 1}) data = {} response = self.client.post(url, data) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_syllabus " - 'for academy 1', - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_syllabus " "for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -115,13 +117,13 @@ def test_syllabus_post_without_capabilities(self): def test_syllabus__post__missing_slug_in_request(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus', kwargs={'academy_id': 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy("admissions:academy_id_syllabus", kwargs={"academy_id": 1}) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'missing-slug', 'status_code': 400} + expected = {"detail": "missing-slug", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -130,13 +132,13 @@ def test_syllabus__post__missing_slug_in_request(self): def test_syllabus__post__missing_name_in_request(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus', kwargs={'academy_id': 1}) - data = {'slug': 'they-killed-kenny'} - response = self.client.post(url, data, format='json') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy("admissions:academy_id_syllabus", kwargs={"academy_id": 1}) + data = {"slug": "they-killed-kenny"} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'missing-name', 'status_code': 400} + expected = {"detail": "missing-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -145,39 +147,44 @@ def test_syllabus__post__missing_name_in_request(self): def test_syllabus__post(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus', kwargs={'academy_id': 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy("admissions:academy_id_syllabus", kwargs={"academy_id": 1}) data = { - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', + "slug": "they-killed-kenny", + "name": "They killed kenny", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'academy_owner': 1, - 'duration_in_days': None, - 'duration_in_hours': None, - 'github_url': None, - 'id': 1, - 'logo': None, - 'private': False, - 'week_hours': None, + "academy_owner": 1, + "duration_in_days": None, + "duration_in_hours": None, + "github_url": None, + "id": 1, + "logo": None, + "private": False, + "week_hours": None, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_dict(), [{ - 'main_technologies': None, - 'academy_owner_id': 1, - 'duration_in_days': None, - 'duration_in_hours': None, - 'github_url': None, - 'id': 1, - 'is_documentation': False, - 'logo': None, - 'private': False, - 'week_hours': None, - **data, - }]) + self.assertEqual( + self.all_syllabus_dict(), + [ + { + "main_technologies": None, + "academy_owner_id": 1, + "duration_in_days": None, + "duration_in_hours": None, + "github_url": None, + "id": 1, + "is_documentation": False, + "logo": None, + "private": False, + "week_hours": None, + **data, + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id.py b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id.py index 913366715..4594423a4 100644 --- a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id.py +++ b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AdmissionsTestCase @@ -11,33 +12,39 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_id_without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_id", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_syllabus_id_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_id", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: read_syllabus for academy 1" + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus for academy 1", } self.assertEqual(json, expected) @@ -47,14 +54,17 @@ def test_syllabus_id_without_capability(self): def test_syllabus_id_without_data(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_syllabus', role='potato') + url = reverse_lazy( + "admissions:academy_id_syllabus_id", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_syllabus", role="potato") response = self.client.get(url) json = response.json() - expected = {'status_code': 404, 'detail': 'syllabus-not-found'} + expected = {"status_code": 404, "detail": "syllabus-not-found"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -63,59 +73,63 @@ def test_syllabus_id_without_data(self): def test_syllabus_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus_version=True, - syllabus=True) - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={'academy_id': 1, 'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus_version=True, + syllabus=True, + ) + url = reverse_lazy("admissions:academy_id_syllabus_id", kwargs={"academy_id": 1, "syllabus_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'main_technologies': None, - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': { - 'id': model.syllabus.academy_owner.id, - 'name': model.syllabus.academy_owner.name, - 'slug': model.syllabus.academy_owner.slug, - 'white_labeled': model.syllabus.academy_owner.white_labeled, - 'icon_url': model.syllabus.academy_owner.icon_url, - 'available_as_saas': model.syllabus.academy_owner.available_as_saas, + "main_technologies": None, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": { + "id": model.syllabus.academy_owner.id, + "name": model.syllabus.academy_owner.name, + "slug": model.syllabus.academy_owner.slug, + "white_labeled": model.syllabus.academy_owner.white_labeled, + "icon_url": model.syllabus.academy_owner.icon_url, + "available_as_saas": model.syllabus.academy_owner.available_as_saas, }, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'created_at': self.datetime_to_iso(model.syllabus.created_at), - 'updated_at': self.datetime_to_iso(model.syllabus.updated_at), + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "created_at": self.datetime_to_iso(model.syllabus.created_at), + "updated_at": self.datetime_to_iso(model.syllabus.updated_at), } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_id__put__without_capabilities(self): """Test /certificate without auth""" self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_id", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) data = {} response = self.client.put(url, data) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_syllabus " - 'for academy 1', - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_syllabus " "for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -125,15 +139,18 @@ def test_syllabus_id__put__without_capabilities(self): def test_syllabus_id__put__setting_slug_as_empty(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) - data = {'slug': ''} + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:academy_id_syllabus_id", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) + data = {"slug": ""} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'empty-slug', 'status_code': 400} + expected = {"detail": "empty-slug", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -142,15 +159,18 @@ def test_syllabus_id__put__setting_slug_as_empty(self): def test_syllabus_id__put__setting_name_as_empty(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) - data = {'name': ''} + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:academy_id_syllabus_id", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) + data = {"name": ""} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'empty-name', 'status_code': 400} + expected = {"detail": "empty-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -159,15 +179,18 @@ def test_syllabus_id__put__setting_name_as_empty(self): def test_syllabus_id__put__not_found(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:academy_id_syllabus_id", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -176,17 +199,19 @@ def test_syllabus_id__put__not_found(self): def test_syllabus_id__put__not_founds2(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={'academy_id': 1, 'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:academy_id_syllabus_id", kwargs={"academy_id": 1, "syllabus_id": 1}) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -195,66 +220,75 @@ def test_syllabus_id__put__not_founds2(self): def test_syllabus_id__put(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={'academy_id': 1, 'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:academy_id_syllabus_id", kwargs={"academy_id": 1, "syllabus_id": 1}) data = {} response = self.client.put(url, data) json = response.json() expected = { - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': model.syllabus.academy_owner.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": model.syllabus.academy_owner.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_id__put__change_values(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:academy_id_syllabus_id', kwargs={'academy_id': 1, 'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:academy_id_syllabus_id", kwargs={"academy_id": 1, "syllabus_id": 1}) data = { - 'duration_in_days': 9, - 'duration_in_hours': 99, - 'week_hours': 999, - 'github_url': 'https://tierragamer.com/wp-content/uploads/2020/08/naruto-cosplay-konan.jpg', - 'logo': 'a', - 'private': not model.syllabus.private, + "duration_in_days": 9, + "duration_in_hours": 99, + "week_hours": 999, + "github_url": "https://tierragamer.com/wp-content/uploads/2020/08/naruto-cosplay-konan.jpg", + "logo": "a", + "private": not model.syllabus.private, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'academy_owner': model.syllabus.academy_owner.id, - 'id': model.syllabus.id, - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, + "academy_owner": model.syllabus.academy_owner.id, + "id": model.syllabus.id, + "slug": model.syllabus.slug, + "name": model.syllabus.name, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{ - **self.model_to_dict(model, 'syllabus'), - **data, - }]) + self.assertEqual( + self.all_syllabus_dict(), + [ + { + **self.model_to_dict(model, "syllabus"), + **data, + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id_version.py b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id_version.py index dc3b5b518..867788d1f 100644 --- a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id_version.py +++ b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + import random from unittest.mock import MagicMock, patch from breathecode.services import datetime_to_iso_format @@ -28,23 +29,36 @@ def generate_syllabus_json(lesson_slug, quiz_slug=None, reply_slug=None, project n = random.randint(1, 10) return { - 'days': [{ - 'lessons': [{ - 'slug': lesson_slug, - }], - 'quizzes': [{ - 'slug': quiz_slug, - }], - 'replits': [{ - 'slug': reply_slug, - }], - 'projects': [{ - 'slug': project_slug, - }], - 'assignments': [{ - 'slug': assignment_slug, - }], - } for _ in range(n)] + "days": [ + { + "lessons": [ + { + "slug": lesson_slug, + } + ], + "quizzes": [ + { + "slug": quiz_slug, + } + ], + "replits": [ + { + "slug": reply_slug, + } + ], + "projects": [ + { + "slug": project_slug, + } + ], + "assignments": [ + { + "slug": assignment_slug, + } + ], + } + for _ in range(n) + ] } @@ -54,14 +68,14 @@ class CertificateTestSuite(AdmissionsTestCase): def test_academy_id_syllabus_id_version_without_auth(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_id_version', kwargs={'academy_id': 1, 'syllabus_id': '1'}) + url = reverse_lazy("admissions:academy_id_syllabus_id_version", kwargs={"academy_id": 1, "syllabus_id": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -69,14 +83,13 @@ def test_academy_id_syllabus_id_version_without_auth(self): def test_academy_id_syllabus_id_version_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_id_version', kwargs={'academy_id': 1, 'syllabus_id': '1'}) + url = reverse_lazy("admissions:academy_id_syllabus_id_version", kwargs={"academy_id": 1, "syllabus_id": "1"}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: read_syllabus ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus " "for academy 1", } self.assertEqual(json, expected) @@ -87,12 +100,10 @@ def test_academy_id_syllabus_id_version_without_capability(self): def test_academy_id_syllabus_id_version_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_id_version', kwargs={'academy_id': 1, 'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy("admissions:academy_id_syllabus_id_version", kwargs={"academy_id": 1, "syllabus_id": 1}) response = self.client.get(url) json = response.json() expected = [] @@ -104,74 +115,86 @@ def test_academy_id_syllabus_id_version_without_syllabus(self): self.assertEqual(self.all_syllabus_version_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_academy_id_syllabus_id_version(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True, - syllabus_version=True) - url = reverse_lazy('admissions:academy_id_syllabus_id_version', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) response = self.client.get(url) json = response.json() - expected = [{ - 'json': model['syllabus_version'].json, - 'created_at': datetime_to_iso_format(model['syllabus_version'].created_at), - 'updated_at': datetime_to_iso_format(model['syllabus_version'].updated_at), - 'name': model.syllabus.name, - 'slug': model['syllabus'].slug, - 'syllabus': 1, - 'academy_owner': { - 'id': model['syllabus'].academy_owner.id, - 'name': model['syllabus'].academy_owner.name, - 'slug': model['syllabus'].academy_owner.slug, - 'white_labeled': model['syllabus'].academy_owner.white_labeled, - 'icon_url': model['syllabus'].academy_owner.icon_url, - 'available_as_saas': model['syllabus'].academy_owner.available_as_saas, - }, - 'version': model['syllabus_version'].version, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - 'main_technologies': None, - 'change_log_details': None, - 'status': 'PUBLISHED', - }] + expected = [ + { + "json": model["syllabus_version"].json, + "created_at": datetime_to_iso_format(model["syllabus_version"].created_at), + "updated_at": datetime_to_iso_format(model["syllabus_version"].updated_at), + "name": model.syllabus.name, + "slug": model["syllabus"].slug, + "syllabus": 1, + "academy_owner": { + "id": model["syllabus"].academy_owner.id, + "name": model["syllabus"].academy_owner.name, + "slug": model["syllabus"].academy_owner.slug, + "white_labeled": model["syllabus"].academy_owner.white_labeled, + "icon_url": model["syllabus"].academy_owner.icon_url, + "available_as_saas": model["syllabus"].academy_owner.available_as_saas, + }, + "version": model["syllabus_version"].version, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, + "main_technologies": None, + "change_log_details": None, + "status": "PUBLISHED", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_syllabus_id_version__post__bad_syllabus_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_id_syllabus_id_version', kwargs={ - 'academy_id': 1, - 'syllabus_id': 9999, - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version", + kwargs={ + "academy_id": 1, + "syllabus_id": 9999, + }, + ) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -180,20 +203,25 @@ def test_syllabus_id_version__post__bad_syllabus_id(self): def test_syllabus_id_version__post__without_json_field(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_id_syllabus_id_version', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'json': ['This field is required.']} + expected = {"json": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -203,84 +231,103 @@ def test_syllabus_id_version__post(self): """Test /certificate without auth""" self.headers(academy=1) slug = self.bc.fake.slug() - asset_alias = {'slug': slug} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - asset_alias=asset_alias, - syllabus=True) - url = reverse_lazy('admissions:academy_id_syllabus_id_version', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) - data = {'json': generate_syllabus_json(slug)} - response = self.client.post(url, data, format='json') + asset_alias = {"slug": slug} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + asset_alias=asset_alias, + syllabus=True, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) + data = {"json": generate_syllabus_json(slug)} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'syllabus': 1, - 'change_log_details': None, - 'status': 'PUBLISHED', - 'version': 1, + "syllabus": 1, + "change_log_details": None, + "status": "PUBLISHED", + "version": 1, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_version_dict(), [{ - 'id': 1, - 'integrity_check_at': None, - 'integrity_report': None, - 'integrity_status': 'PENDING', - 'change_log_details': None, - 'status': 'PUBLISHED', - 'json': {}, - 'syllabus_id': 1, - 'version': 1, - **data, - }]) + self.assertEqual( + self.all_syllabus_version_dict(), + [ + { + "id": 1, + "integrity_check_at": None, + "integrity_report": None, + "integrity_status": "PENDING", + "change_log_details": None, + "status": "PUBLISHED", + "json": {}, + "syllabus_id": 1, + "version": 1, + **data, + } + ], + ) def test_syllabus_id_version__post__autoincrement_version(self): """Test /certificate without auth""" self.headers(academy=1) slug = self.bc.fake.slug() - asset_alias = {'slug': slug} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - asset_alias=asset_alias, - syllabus_version=True) - url = reverse_lazy('admissions:academy_id_syllabus_id_version', kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - }) - data = {'json': generate_syllabus_json(slug)} - response = self.client.post(url, data, format='json') + asset_alias = {"slug": slug} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + asset_alias=asset_alias, + syllabus_version=True, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version", + kwargs={ + "academy_id": 1, + "syllabus_id": 1, + }, + ) + data = {"json": generate_syllabus_json(slug)} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'syllabus': 1, - 'change_log_details': None, - 'status': 'PUBLISHED', - 'version': model.syllabus_version.version + 1, + "syllabus": 1, + "change_log_details": None, + "status": "PUBLISHED", + "version": model.syllabus_version.version + 1, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_version_dict(), [{ - **self.model_to_dict(model, 'syllabus_version') - }, { - 'id': 2, - 'integrity_check_at': None, - 'integrity_report': None, - 'integrity_status': 'PENDING', - 'change_log_details': None, - 'status': 'PUBLISHED', - 'syllabus_id': 1, - 'version': model.syllabus_version.version + 1, - **data, - }]) + self.assertEqual( + self.all_syllabus_version_dict(), + [ + {**self.model_to_dict(model, "syllabus_version")}, + { + "id": 2, + "integrity_check_at": None, + "integrity_report": None, + "integrity_status": "PENDING", + "change_log_details": None, + "status": "PUBLISHED", + "syllabus_id": 1, + "version": model.syllabus_version.version + 1, + **data, + }, + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id_version_version.py b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id_version_version.py index 177852cec..d96ff7d3d 100644 --- a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id_version_version.py +++ b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_id_version_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from breathecode.services import datetime_to_iso_format from django.urls.base import reverse_lazy from rest_framework import status @@ -18,19 +19,17 @@ class CertificateTestSuite(AdmissionsTestCase): def test_academy_id_syllabus_id_version_version_without_auth(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_id_version_version', - kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - 'version': 1 - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version_version", + kwargs={"academy_id": 1, "syllabus_id": 1, "version": 1}, + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -38,19 +37,16 @@ def test_academy_id_syllabus_id_version_version_without_auth(self): def test_academy_id_syllabus_id_version_version_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_id_version_version', - kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - 'version': 1 - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version_version", + kwargs={"academy_id": 1, "syllabus_id": 1, "version": 1}, + ) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: read_syllabus ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus " "for academy 1", } self.assertEqual(json, expected) @@ -61,20 +57,16 @@ def test_academy_id_syllabus_id_version_version_without_capability(self): def test_academy_id_syllabus_id_version_version_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_id_version_version', - kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - 'version': 1 - }) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version_version", + kwargs={"academy_id": 1, "syllabus_id": 1, "version": 1}, + ) response = self.client.get(url) json = response.json() - expected = {'detail': 'syllabus-version-not-found', 'status_code': 404} + expected = {"detail": "syllabus-version-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -83,55 +75,55 @@ def test_academy_id_syllabus_id_version_version_without_syllabus(self): self.assertEqual(self.all_syllabus_version_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_academy_id_syllabus_id_version_version(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True, - syllabus_version=True) - url = reverse_lazy('admissions:academy_id_syllabus_id_version_version', - kwargs={ - 'academy_id': 1, - 'syllabus_id': 1, - 'version': model.syllabus_version.version - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_id_version_version", + kwargs={"academy_id": 1, "syllabus_id": 1, "version": model.syllabus_version.version}, + ) response = self.client.get(url) json = response.json() expected = { - 'json': model['syllabus_version'].json, - 'created_at': datetime_to_iso_format(model['syllabus_version'].created_at), - 'updated_at': datetime_to_iso_format(model['syllabus_version'].updated_at), - 'name': model.syllabus.name, - 'slug': model['syllabus'].slug, - 'syllabus': 1, - 'academy_owner': { - 'id': model['syllabus'].academy_owner.id, - 'name': model['syllabus'].academy_owner.name, - 'slug': model['syllabus'].academy_owner.slug, - 'white_labeled': model['syllabus'].academy_owner.white_labeled, - 'icon_url': model['syllabus'].academy_owner.icon_url, - 'available_as_saas': model['syllabus'].academy_owner.available_as_saas, + "json": model["syllabus_version"].json, + "created_at": datetime_to_iso_format(model["syllabus_version"].created_at), + "updated_at": datetime_to_iso_format(model["syllabus_version"].updated_at), + "name": model.syllabus.name, + "slug": model["syllabus"].slug, + "syllabus": 1, + "academy_owner": { + "id": model["syllabus"].academy_owner.id, + "name": model["syllabus"].academy_owner.name, + "slug": model["syllabus"].academy_owner.slug, + "white_labeled": model["syllabus"].academy_owner.white_labeled, + "icon_url": model["syllabus"].academy_owner.icon_url, + "available_as_saas": model["syllabus"].academy_owner.available_as_saas, }, - 'version': model['syllabus_version'].version, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'change_log_details': model.syllabus_version.change_log_details, - 'status': model.syllabus_version.status, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - 'main_technologies': None, + "version": model["syllabus_version"].version, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "change_log_details": model.syllabus_version.change_log_details, + "status": model.syllabus_version.status, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, + "main_technologies": None, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) self.assertEqual(self.all_cohort_time_slot_dict(), []) diff --git a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug.py b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug.py index 08a277109..9e59e3dba 100644 --- a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug.py +++ b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AdmissionsTestCase @@ -11,35 +12,39 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_id_without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they_killed_kenny', - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they_killed_kenny", + }, + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_syllabus_id_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they_killed_kenny', - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they_killed_kenny", + }, + ) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: read_syllabus for academy 1" + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus for academy 1", } self.assertEqual(json, expected) @@ -49,15 +54,17 @@ def test_syllabus_id_without_capability(self): def test_syllabus_id_without_data(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they_killed_kenny', - }) - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_syllabus', role='potato') + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they_killed_kenny", + }, + ) + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_syllabus", role="potato") response = self.client.get(url) json = response.json() - expected = {'status_code': 404, 'detail': 'syllabus-not-found'} + expected = {"status_code": 404, "detail": "syllabus-not-found"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -66,66 +73,67 @@ def test_syllabus_id_without_data(self): def test_syllabus_id(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus_version=True, - syllabus=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny' - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus_version=True, + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny"} + ) response = self.client.get(url) json = response.json() expected = { - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': { - 'id': model.syllabus.academy_owner.id, - 'name': model.syllabus.academy_owner.name, - 'slug': model.syllabus.academy_owner.slug, - 'white_labeled': model.syllabus.academy_owner.white_labeled, - 'icon_url': model.syllabus.academy_owner.icon_url, - 'available_as_saas': model.syllabus.academy_owner.available_as_saas, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": { + "id": model.syllabus.academy_owner.id, + "name": model.syllabus.academy_owner.name, + "slug": model.syllabus.academy_owner.slug, + "white_labeled": model.syllabus.academy_owner.white_labeled, + "icon_url": model.syllabus.academy_owner.icon_url, + "available_as_saas": model.syllabus.academy_owner.available_as_saas, }, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'created_at': self.datetime_to_iso(model.syllabus.created_at), - 'updated_at': self.datetime_to_iso(model.syllabus.updated_at), - 'main_technologies': None, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "created_at": self.datetime_to_iso(model.syllabus.created_at), + "updated_at": self.datetime_to_iso(model.syllabus.updated_at), + "main_technologies": None, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_id__put__without_capabilities(self): """Test /certificate without auth""" self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they_killed_kenny', - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they_killed_kenny", + }, + ) data = {} response = self.client.put(url, data) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_syllabus " - 'for academy 1', - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_syllabus " "for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -135,16 +143,18 @@ def test_syllabus_id__put__without_capabilities(self): def test_syllabus_id__put__setting_slug_as_empty(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they_killed_kenny', - }) - data = {'slug': ''} + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they_killed_kenny", + }, + ) + data = {"slug": ""} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'empty-slug', 'status_code': 400} + expected = {"detail": "empty-slug", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -153,16 +163,18 @@ def test_syllabus_id__put__setting_slug_as_empty(self): def test_syllabus_id__put__setting_name_as_empty(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they_killed_kenny', - }) - data = {'name': ''} + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they_killed_kenny", + }, + ) + data = {"name": ""} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'empty-name', 'status_code': 400} + expected = {"detail": "empty-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -171,16 +183,18 @@ def test_syllabus_id__put__setting_name_as_empty(self): def test_syllabus_id__put__not_found(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they_killed_kenny', - }) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they_killed_kenny", + }, + ) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -189,21 +203,21 @@ def test_syllabus_id__put__not_found(self): def test_syllabus_id__put__not_founds2(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny' - }) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny"} + ) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -212,78 +226,83 @@ def test_syllabus_id__put__not_founds2(self): def test_syllabus_id__put(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny' - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny"} + ) data = {} response = self.client.put(url, data) json = response.json() expected = { - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': model.syllabus.academy_owner.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": model.syllabus.academy_owner.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_id__put__change_values(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_id_syllabus_slug', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny' - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug", kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny"} + ) data = { - 'duration_in_days': 9, - 'duration_in_hours': 99, - 'week_hours': 999, - 'github_url': 'https://tierragamer.com/wp-content/uploads/2020/08/naruto-cosplay-konan.jpg', - 'logo': 'a', - 'private': not model.syllabus.private, + "duration_in_days": 9, + "duration_in_hours": 99, + "week_hours": 999, + "github_url": "https://tierragamer.com/wp-content/uploads/2020/08/naruto-cosplay-konan.jpg", + "logo": "a", + "private": not model.syllabus.private, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'academy_owner': model.syllabus.academy_owner.id, - 'id': model.syllabus.id, - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, + "academy_owner": model.syllabus.academy_owner.id, + "id": model.syllabus.id, + "slug": model.syllabus.slug, + "name": model.syllabus.name, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{ - **self.model_to_dict(model, 'syllabus'), - **data, - }]) + self.assertEqual( + self.all_syllabus_dict(), + [ + { + **self.model_to_dict(model, "syllabus"), + **data, + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug_version.py b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug_version.py index a6f733631..9cd228cd9 100644 --- a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug_version.py +++ b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + import random from unittest.mock import patch, MagicMock from breathecode.services import datetime_to_iso_format @@ -29,23 +30,36 @@ def generate_syllabus_json(lesson_slug, quiz_slug=None, reply_slug=None, project n = random.randint(1, 10) return { - 'days': [{ - 'lessons': [{ - 'slug': lesson_slug, - }], - 'quizzes': [{ - 'slug': quiz_slug, - }], - 'replits': [{ - 'slug': reply_slug, - }], - 'projects': [{ - 'slug': project_slug, - }], - 'assignments': [{ - 'slug': assignment_slug, - }], - } for _ in range(n)] + "days": [ + { + "lessons": [ + { + "slug": lesson_slug, + } + ], + "quizzes": [ + { + "slug": quiz_slug, + } + ], + "replits": [ + { + "slug": reply_slug, + } + ], + "projects": [ + { + "slug": project_slug, + } + ], + "assignments": [ + { + "slug": assignment_slug, + } + ], + } + for _ in range(n) + ] } @@ -55,18 +69,17 @@ class CertificateTestSuite(AdmissionsTestCase): def test_academy_id_syllabus_slug_version_without_auth(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny' - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version", + kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny"}, + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -74,18 +87,16 @@ def test_academy_id_syllabus_slug_version_without_auth(self): def test_academy_id_syllabus_slug_version_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny' - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version", + kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny"}, + ) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: read_syllabus ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus " "for academy 1", } self.assertEqual(json, expected) @@ -96,16 +107,13 @@ def test_academy_id_syllabus_slug_version_without_capability(self): def test_academy_id_syllabus_slug_version_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_slug_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny' - }) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version", + kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny"}, + ) response = self.client.get(url) json = response.json() expected = [] @@ -117,78 +125,88 @@ def test_academy_id_syllabus_slug_version_without_syllabus(self): self.assertEqual(self.all_syllabus_version_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_academy_id_syllabus_slug_version(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they-killed-kenny", + }, + ) response = self.client.get(url) json = response.json() - expected = [{ - 'json': model['syllabus_version'].json, - 'created_at': datetime_to_iso_format(model['syllabus_version'].created_at), - 'updated_at': datetime_to_iso_format(model['syllabus_version'].updated_at), - 'name': model.syllabus.name, - 'slug': model['syllabus'].slug, - 'syllabus': 1, - 'academy_owner': { - 'id': model['syllabus'].academy_owner.id, - 'name': model['syllabus'].academy_owner.name, - 'slug': model['syllabus'].academy_owner.slug, - 'white_labeled': model['syllabus'].academy_owner.white_labeled, - 'icon_url': model['syllabus'].academy_owner.icon_url, - 'available_as_saas': model['syllabus'].academy_owner.available_as_saas, - }, - 'version': model['syllabus_version'].version, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - 'change_log_details': model.syllabus_version.change_log_details, - 'status': model.syllabus_version.status, - 'main_technologies': None, - }] + expected = [ + { + "json": model["syllabus_version"].json, + "created_at": datetime_to_iso_format(model["syllabus_version"].created_at), + "updated_at": datetime_to_iso_format(model["syllabus_version"].updated_at), + "name": model.syllabus.name, + "slug": model["syllabus"].slug, + "syllabus": 1, + "academy_owner": { + "id": model["syllabus"].academy_owner.id, + "name": model["syllabus"].academy_owner.name, + "slug": model["syllabus"].academy_owner.slug, + "white_labeled": model["syllabus"].academy_owner.white_labeled, + "icon_url": model["syllabus"].academy_owner.icon_url, + "available_as_saas": model["syllabus"].academy_owner.available_as_saas, + }, + "version": model["syllabus_version"].version, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, + "change_log_details": model.syllabus_version.change_log_details, + "status": model.syllabus_version.status, + "main_technologies": None, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_syllabus_slug_version__post__bad_syllabus_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they-killed-kenny", + }, + ) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -197,23 +215,27 @@ def test_syllabus_slug_version__post__bad_syllabus_id(self): def test_syllabus_slug_version__post__without_json_field(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they-killed-kenny", + }, + ) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'json': ['This field is required.']} + expected = {"json": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -222,90 +244,107 @@ def test_syllabus_slug_version__post__without_json_field(self): def test_syllabus_slug_version__post(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} + syllabus_kwargs = {"slug": "they-killed-kenny"} slug = self.bc.fake.slug() - asset_alias = {'slug': slug} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - asset_alias=asset_alias, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - }) - data = {'json': generate_syllabus_json(slug)} - response = self.client.post(url, data, format='json') + asset_alias = {"slug": slug} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + asset_alias=asset_alias, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they-killed-kenny", + }, + ) + data = {"json": generate_syllabus_json(slug)} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'syllabus': 1, - 'version': 1, - 'change_log_details': None, - 'status': 'PUBLISHED', + "syllabus": 1, + "version": 1, + "change_log_details": None, + "status": "PUBLISHED", **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_version_dict(), [{ - 'id': 1, - 'integrity_check_at': None, - 'integrity_report': None, - 'integrity_status': 'PENDING', - 'change_log_details': None, - 'status': 'PUBLISHED', - 'syllabus_id': 1, - 'version': 1, - **data, - }]) + self.assertEqual( + self.all_syllabus_version_dict(), + [ + { + "id": 1, + "integrity_check_at": None, + "integrity_report": None, + "integrity_status": "PENDING", + "change_log_details": None, + "status": "PUBLISHED", + "syllabus_id": 1, + "version": 1, + **data, + } + ], + ) def test_syllabus_slug_version__post__autoincrement_version(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} + syllabus_kwargs = {"slug": "they-killed-kenny"} slug = self.bc.fake.slug() - asset_alias = {'slug': slug} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_version=True, - asset_alias=asset_alias, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - }) - data = {'json': generate_syllabus_json(slug)} - response = self.client.post(url, data, format='json') + asset_alias = {"slug": slug} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + asset_alias=asset_alias, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version", + kwargs={ + "academy_id": 1, + "syllabus_slug": "they-killed-kenny", + }, + ) + data = {"json": generate_syllabus_json(slug)} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'syllabus': 1, - 'change_log_details': None, - 'status': 'PUBLISHED', - 'version': model.syllabus_version.version + 1, + "syllabus": 1, + "change_log_details": None, + "status": "PUBLISHED", + "version": model.syllabus_version.version + 1, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_version_dict(), [{ - **self.model_to_dict(model, 'syllabus_version') - }, { - 'id': 2, - 'integrity_check_at': None, - 'integrity_report': None, - 'integrity_status': 'PENDING', - 'change_log_details': None, - 'status': 'PUBLISHED', - 'syllabus_id': 1, - 'version': model.syllabus_version.version + 1, - **data, - }]) + self.assertEqual( + self.all_syllabus_version_dict(), + [ + {**self.model_to_dict(model, "syllabus_version")}, + { + "id": 2, + "integrity_check_at": None, + "integrity_report": None, + "integrity_status": "PENDING", + "change_log_details": None, + "status": "PUBLISHED", + "syllabus_id": 1, + "version": model.syllabus_version.version + 1, + **data, + }, + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug_version_version.py b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug_version_version.py index a84e56751..362578add 100644 --- a/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug_version_version.py +++ b/breathecode/admissions/tests/urls/tests_academy_id_syllabus_slug_version_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import patch, MagicMock from breathecode.services import datetime_to_iso_format from django.urls.base import reverse_lazy @@ -18,19 +19,17 @@ class CertificateTestSuite(AdmissionsTestCase): def test_academy_id_syllabus_slug_version_version_without_auth(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - 'version': 1 - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version_version", + kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny", "version": 1}, + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -38,19 +37,16 @@ def test_academy_id_syllabus_slug_version_version_without_auth(self): def test_academy_id_syllabus_slug_version_version_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - 'version': 1 - }) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version_version", + kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny", "version": 1}, + ) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: read_syllabus ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus " "for academy 1", } self.assertEqual(json, expected) @@ -61,20 +57,16 @@ def test_academy_id_syllabus_slug_version_version_without_capability(self): def test_academy_id_syllabus_slug_version_version_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:academy_id_syllabus_slug_version_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - 'version': 1 - }) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version_version", + kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny", "version": 1}, + ) response = self.client.get(url) json = response.json() - expected = {'detail': 'syllabus-version-not-found', 'status_code': 404} + expected = {"detail": "syllabus-version-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -83,57 +75,57 @@ def test_academy_id_syllabus_slug_version_version_without_syllabus(self): self.assertEqual(self.all_syllabus_version_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_academy_id_syllabus_slug_version_version(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_id_syllabus_slug_version_version', - kwargs={ - 'academy_id': 1, - 'syllabus_slug': 'they-killed-kenny', - 'version': model.syllabus_version.version - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:academy_id_syllabus_slug_version_version", + kwargs={"academy_id": 1, "syllabus_slug": "they-killed-kenny", "version": model.syllabus_version.version}, + ) response = self.client.get(url) json = response.json() expected = { - 'json': model['syllabus_version'].json, - 'created_at': datetime_to_iso_format(model['syllabus_version'].created_at), - 'updated_at': datetime_to_iso_format(model['syllabus_version'].updated_at), - 'name': model.syllabus.name, - 'slug': model['syllabus'].slug, - 'syllabus': 1, - 'academy_owner': { - 'id': model['syllabus'].academy_owner.id, - 'name': model['syllabus'].academy_owner.name, - 'slug': model['syllabus'].academy_owner.slug, - 'white_labeled': model['syllabus'].academy_owner.white_labeled, - 'icon_url': model['syllabus'].academy_owner.icon_url, - 'available_as_saas': model['syllabus'].academy_owner.available_as_saas, + "json": model["syllabus_version"].json, + "created_at": datetime_to_iso_format(model["syllabus_version"].created_at), + "updated_at": datetime_to_iso_format(model["syllabus_version"].updated_at), + "name": model.syllabus.name, + "slug": model["syllabus"].slug, + "syllabus": 1, + "academy_owner": { + "id": model["syllabus"].academy_owner.id, + "name": model["syllabus"].academy_owner.name, + "slug": model["syllabus"].academy_owner.slug, + "white_labeled": model["syllabus"].academy_owner.white_labeled, + "icon_url": model["syllabus"].academy_owner.icon_url, + "available_as_saas": model["syllabus"].academy_owner.available_as_saas, }, - 'version': model['syllabus_version'].version, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - 'status': model.syllabus_version.status, - 'change_log_details': model.syllabus_version.change_log_details, - 'main_technologies': None, + "version": model["syllabus_version"].version, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, + "status": model.syllabus_version.status, + "change_log_details": model.syllabus_version.change_log_details, + "main_technologies": None, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) self.assertEqual(self.all_cohort_time_slot_dict(), []) diff --git a/breathecode/admissions/tests/urls/tests_academy_me.py b/breathecode/admissions/tests/urls/tests_academy_me.py index 4338d3c31..097074b94 100644 --- a/breathecode/admissions/tests/urls/tests_academy_me.py +++ b/breathecode/admissions/tests/urls/tests_academy_me.py @@ -1,6 +1,7 @@ """ Test /cohort """ + import random from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -18,13 +19,13 @@ def put_serializer(academy, country, city, data={}): return { - 'city': city.id, - 'country': country.code, - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, - 'street_address': academy.street_address, - 'is_hidden_on_prework': academy.is_hidden_on_prework, + "city": city.id, + "country": country.code, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, + "street_address": academy.street_address, + "is_hidden_on_prework": academy.is_hidden_on_prework, **data, } @@ -40,124 +41,136 @@ class AcademyCohortIdTestSuite(AdmissionsTestCase): def test__without_auth(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_me') - response = self.client.put(url, {}, format='json') + url = reverse_lazy("admissions:academy_me") + response = self.client.put(url, {}, format="json") json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_put__without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_me') + url = reverse_lazy("admissions:academy_me") self.generate_models(authenticate=True) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_my_academy for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: crud_my_academy for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 Put without required fields """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) def test__put__without_required_fields(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_me') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_my_academy', - role='potato', - skip_cohort=True, - syllabus=True) + url = reverse_lazy("admissions:academy_me") + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_my_academy", + role="potato", + skip_cohort=True, + syllabus=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'name': ['This field is required.'], - 'slug': ['This field is required.'], - 'street_address': ['This field is required.'], + "name": ["This field is required."], + "slug": ["This field is required."], + "street_address": ["This field is required."], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [ - self.bc.format.to_dict(model.academy), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + self.bc.format.to_dict(model.academy), + ], + ) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put with Academy, try to modify slug """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) def test__put__with_academy__try_to_modify_slug(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_me') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_my_academy', - role='potato', - skip_cohort=True, - syllabus=True) + url = reverse_lazy("admissions:academy_me") + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_my_academy", + role="potato", + skip_cohort=True, + syllabus=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] data = { - 'name': self.bc.fake.name(), - 'slug': self.bc.fake.slug(), - 'street_address': self.bc.fake.address(), + "name": self.bc.fake.name(), + "slug": self.bc.fake.slug(), + "street_address": self.bc.fake.address(), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'Academy slug cannot be updated', 'status_code': 400} + expected = {"detail": "Academy slug cannot be updated", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [ - self.bc.format.to_dict(model.academy), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + self.bc.format.to_dict(model.academy), + ], + ) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put with Academy, passing all the fields """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) def test__put__with_academy__passing_all_the_fields(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_me') - model = self.generate_models(authenticate=True, - profile_academy=True, - country=3, - city=3, - capability='crud_my_academy', - role='potato', - skip_cohort=True, - syllabus=True) + url = reverse_lazy("admissions:academy_me") + model = self.generate_models( + authenticate=True, + profile_academy=True, + country=3, + city=3, + capability="crud_my_academy", + role="potato", + skip_cohort=True, + syllabus=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] @@ -165,48 +178,55 @@ def test__put__with_academy__passing_all_the_fields(self): country = random.choice(model.country) city = random.choice(model.city) data = { - 'name': self.bc.fake.name(), - 'slug': model.academy.slug, - 'street_address': self.bc.fake.address(), - 'country': country.code, - 'city': city.id, + "name": self.bc.fake.name(), + "slug": model.academy.slug, + "street_address": self.bc.fake.address(), + "country": country.code, + "city": city.id, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = put_serializer(model.academy, country, city, data=data) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - fields = ['country', 'city'] + fields = ["country", "city"] for field in fields: - data[f'{field}_id'] = data.pop(field) - - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [{ - **self.bc.format.to_dict(model.academy), - **data, - }]) + data[f"{field}_id"] = data.pop(field) + + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + { + **self.bc.format.to_dict(model.academy), + **data, + } + ], + ) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put with Academy, passing all the wrong fields """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) def test__put__with_academy__passing_all_the_wrong_fields(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_me') - model = self.generate_models(authenticate=True, - profile_academy=True, - country=3, - city=3, - capability='crud_my_academy', - role='potato', - skip_cohort=True, - syllabus=True) + url = reverse_lazy("admissions:academy_me") + model = self.generate_models( + authenticate=True, + profile_academy=True, + country=3, + city=3, + capability="crud_my_academy", + role="potato", + skip_cohort=True, + syllabus=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] @@ -214,76 +234,83 @@ def test__put__with_academy__passing_all_the_wrong_fields(self): country = random.choice(model.country) city = random.choice(model.city) data = { - 'name': self.bc.fake.name(), - 'slug': model.academy.slug, - 'street_address': self.bc.fake.address(), - 'country': country.code, - 'city': city.id, + "name": self.bc.fake.name(), + "slug": model.academy.slug, + "street_address": self.bc.fake.address(), + "country": country.code, + "city": city.id, } incorrect_values = { - 'logo_url': self.bc.fake.url(), - 'icon_url': self.bc.fake.url(), - 'website_url': self.bc.fake.url(), - 'marketing_email': self.bc.fake.email(), - 'feedback_email': self.bc.fake.email(), - 'marketing_phone': self.bc.fake.phone_number(), - 'twitter_handle': self.bc.fake.user_name(), - 'facebook_handle': self.bc.fake.user_name(), - 'instagram_handle': self.bc.fake.user_name(), - 'github_handle': self.bc.fake.user_name(), - 'linkedin_url': self.bc.fake.url(), - 'youtube_url': self.bc.fake.url(), - 'latitude': random.random() * 90 * random.choice([1, -1]), # - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'zip_code': str(random.randint(1, 1000)), - 'white_labeled': bool(random.randint(0, 1)), - 'active_campaign_slug': self.bc.fake.slug(), - 'available_as_saas': bool(random.randint(0, 1)), - 'status': random.choice(['INACTIVE', 'ACTIVE', 'DELETED']), - 'timezone': self.bc.fake.name(), - 'logistical_information': self.bc.fake.text()[:150] + "logo_url": self.bc.fake.url(), + "icon_url": self.bc.fake.url(), + "website_url": self.bc.fake.url(), + "marketing_email": self.bc.fake.email(), + "feedback_email": self.bc.fake.email(), + "marketing_phone": self.bc.fake.phone_number(), + "twitter_handle": self.bc.fake.user_name(), + "facebook_handle": self.bc.fake.user_name(), + "instagram_handle": self.bc.fake.user_name(), + "github_handle": self.bc.fake.user_name(), + "linkedin_url": self.bc.fake.url(), + "youtube_url": self.bc.fake.url(), + "latitude": random.random() * 90 * random.choice([1, -1]), # + "longitude": random.random() * 90 * random.choice([1, -1]), + "zip_code": str(random.randint(1, 1000)), + "white_labeled": bool(random.randint(0, 1)), + "active_campaign_slug": self.bc.fake.slug(), + "available_as_saas": bool(random.randint(0, 1)), + "status": random.choice(["INACTIVE", "ACTIVE", "DELETED"]), + "timezone": self.bc.fake.name(), + "logistical_information": self.bc.fake.text()[:150], } to_send = data.copy() to_send |= incorrect_values - response = self.client.put(url, to_send, format='json') + response = self.client.put(url, to_send, format="json") json = response.json() expected = put_serializer(model.academy, country, city, data=data) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - fields = ['country', 'city'] + fields = ["country", "city"] for field in fields: - data[f'{field}_id'] = data.pop(field) - - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [{ - **self.bc.format.to_dict(model.academy), - **data, - }]) + data[f"{field}_id"] = data.pop(field) + + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + { + **self.bc.format.to_dict(model.academy), + **data, + } + ], + ) self.assertEqual(cohort_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Put with Academy, passing all the fields """ - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) def test__put__with_academy__passing_all_the_status(self): """Test /cohort/:id without auth""" from breathecode.admissions.signals import cohort_saved self.headers(academy=1) - url = reverse_lazy('admissions:academy_me') - model = self.generate_models(authenticate=True, - profile_academy=True, - country=3, - city=3, - capability='crud_my_academy', - role='potato', - skip_cohort=True, - syllabus=True) + url = reverse_lazy("admissions:academy_me") + model = self.generate_models( + authenticate=True, + profile_academy=True, + country=3, + city=3, + capability="crud_my_academy", + role="potato", + skip_cohort=True, + syllabus=True, + ) # reset because this call are coming from mixer cohort_saved.send_robust.call_args_list = [] @@ -291,25 +318,30 @@ def test__put__with_academy__passing_all_the_status(self): country = random.choice(model.country) city = random.choice(model.city) data = { - 'name': self.bc.fake.name(), - 'slug': model.academy.slug, - 'street_address': self.bc.fake.address(), - 'country': country.code, - 'city': city.id, + "name": self.bc.fake.name(), + "slug": model.academy.slug, + "street_address": self.bc.fake.address(), + "country": country.code, + "city": city.id, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = put_serializer(model.academy, country, city, data=data) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - fields = ['country', 'city'] + fields = ["country", "city"] for field in fields: - data[f'{field}_id'] = data.pop(field) - - self.assertEqual(self.bc.database.list_of('admissions.Academy'), [{ - **self.bc.format.to_dict(model.academy), - **data, - }]) + data[f"{field}_id"] = data.pop(field) + + self.assertEqual( + self.bc.database.list_of("admissions.Academy"), + [ + { + **self.bc.format.to_dict(model.academy), + **data, + } + ], + ) self.assertEqual(cohort_saved.send_robust.call_args_list, []) diff --git a/breathecode/admissions/tests/urls/tests_academy_schedule.py b/breathecode/admissions/tests/urls/tests_academy_schedule.py index 8e143e961..897eda006 100644 --- a/breathecode/admissions/tests/urls/tests_academy_schedule.py +++ b/breathecode/admissions/tests/urls/tests_academy_schedule.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from random import choice, randint import random from django.urls.base import reverse_lazy @@ -10,10 +11,10 @@ def get_serializer(syllabus_schedule): return { - 'id': syllabus_schedule.id, - 'name': syllabus_schedule.name, - 'description': syllabus_schedule.description, - 'syllabus': syllabus_schedule.syllabus.id, + "id": syllabus_schedule.id, + "name": syllabus_schedule.name, + "description": syllabus_schedule.description, + "syllabus": syllabus_schedule.syllabus.id, } @@ -24,27 +25,27 @@ class CertificateTestSuite(AdmissionsTestCase): def test__without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:academy_schedule') + url = reverse_lazy("admissions:academy_schedule") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test__without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_schedule') + url = reverse_lazy("admissions:academy_schedule") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: read_certificate for academy 1" + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_certificate for academy 1", } self.assertEqual(json, expected) @@ -58,14 +59,16 @@ def test__without_capability(self): def test__with_schedule_of_other_academy(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_schedule = {'academy_id': 2} - model = self.generate_models(authenticate=True, - syllabus_schedule=syllabus_schedule, - academy=2, - profile_academy=True, - capability='read_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') + syllabus_schedule = {"academy_id": 2} + model = self.generate_models( + authenticate=True, + syllabus_schedule=syllabus_schedule, + academy=2, + profile_academy=True, + capability="read_certificate", + role="potato", + ) + url = reverse_lazy("admissions:academy_schedule") response = self.client.get(url) json = response.json() expected = [] @@ -81,20 +84,22 @@ def test__with_schedule_of_other_academy(self): def test_academy_schedule(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_schedule') + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:academy_schedule") response = self.client.get(url) json = response.json() expected = [get_serializer(model.syllabus_schedule)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) """ 🔽🔽🔽 Syllabus id in querystring @@ -103,38 +108,42 @@ def test_academy_schedule(self): def test__syllabus_id_in_querystring__bad_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_schedule') + '?syllabus_id=9999' + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:academy_schedule") + "?syllabus_id=9999" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test__syllabus_id_in_querystring(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_schedule') + '?syllabus_id=1' + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:academy_schedule") + "?syllabus_id=1" response = self.client.get(url) json = response.json() expected = [get_serializer(model.syllabus_schedule)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) """ 🔽🔽🔽 schedule_type in querystring @@ -143,33 +152,38 @@ def test__syllabus_id_in_querystring(self): def test__schedule_type_in_querystring__not_found(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_schedule') + '?schedule_type=asdasdasd' + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:academy_schedule") + "?schedule_type=asdasdasd" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test__schedule_type_in_querystring__found(self): """Test /certificate without auth""" - statuses = ['PARTIME', 'FULLTIME'] - cases = [(x, x, random.choice([y for y in statuses if x != y])) - for x in statuses] + [(x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses] + statuses = ["PARTIME", "FULLTIME"] + cases = [(x, x, random.choice([y for y in statuses if x != y])) for x in statuses] + [ + (x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses + ] self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=3, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) + model = self.generate_models( + authenticate=True, + syllabus_schedule=3, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) for current, query, bad_status in cases: model.syllabus_schedule[0].schedule_type = current @@ -181,7 +195,7 @@ def test__schedule_type_in_querystring__found(self): model.syllabus_schedule[2].schedule_type = bad_status model.syllabus_schedule[2].save() - url = reverse_lazy('admissions:academy_schedule') + f'?schedule_type={query}' + url = reverse_lazy("admissions:academy_schedule") + f"?schedule_type={query}" response = self.client.get(url) json = response.json() expected = [ @@ -191,20 +205,23 @@ def test__schedule_type_in_querystring__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.SyllabusSchedule'), [ - { - **self.bc.format.to_dict(model.syllabus_schedule[0]), - 'schedule_type': current, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule[1]), - 'schedule_type': current, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule[2]), - 'schedule_type': bad_status, - }, - ]) + self.assertEqual( + self.bc.database.list_of("admissions.SyllabusSchedule"), + [ + { + **self.bc.format.to_dict(model.syllabus_schedule[0]), + "schedule_type": current, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule[1]), + "schedule_type": current, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule[2]), + "schedule_type": bad_status, + }, + ], + ) """ 🔽🔽🔽 Syllabus slug in querystring @@ -213,41 +230,45 @@ def test__schedule_type_in_querystring__found(self): def test__syllabus_slug_in_querystring__bad_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:academy_schedule') + '?syllabus_slug=they-killed-kenny' + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:academy_schedule") + "?syllabus_slug=they-killed-kenny" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test__syllabus_slug_in_querystring(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:academy_schedule') + '?syllabus_slug=they-killed-kenny' + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy("admissions:academy_schedule") + "?syllabus_slug=they-killed-kenny" response = self.client.get(url) json = response.json() expected = [get_serializer(model.syllabus_schedule)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) """ 🔽🔽🔽 Delete @@ -256,26 +277,31 @@ def test__syllabus_slug_in_querystring(self): def test_delete_in_bulk_with_one(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] base = self.generate_models(academy=True) for field in many_fields: certificate_kwargs = { - 'logo': choice(['http://exampledot.com', 'http://exampledotdot.com', 'http://exampledotdotdot.com']), - 'week_hours': randint(0, 999999999), - 'schedule_type': choice(['PAR-TIME', 'FULL-TIME']), + "logo": choice(["http://exampledot.com", "http://exampledotdot.com", "http://exampledotdotdot.com"]), + "week_hours": randint(0, 999999999), + "schedule_type": choice(["PAR-TIME", "FULL-TIME"]), } - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - certificate_kwargs=certificate_kwargs, - syllabus=True, - syllabus_schedule=True, - models=base) - url = (reverse_lazy('admissions:academy_schedule') + f'?{field}=' + - str(getattr(model['syllabus_schedule'], field))) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + certificate_kwargs=certificate_kwargs, + syllabus=True, + syllabus_schedule=True, + models=base, + ) + url = ( + reverse_lazy("admissions:academy_schedule") + + f"?{field}=" + + str(getattr(model["syllabus_schedule"], field)) + ) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) @@ -283,10 +309,10 @@ def test_delete_in_bulk_with_one(self): def test_delete_without_auth(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('admissions:academy_schedule') + url = reverse_lazy("admissions:academy_schedule") response = self.client.delete(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -295,14 +321,13 @@ def test_delete_without_auth(self): def test_delete_without_args_in_url_or_bulk(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") response = self.client.delete(url) json = response.json() - expected = {'detail': 'Missing parameters in the querystring', 'status_code': 400} + expected = {"detail": "Missing parameters in the querystring", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -311,36 +336,44 @@ def test_delete_without_args_in_url_or_bulk(self): def test_delete_in_bulk_with_two(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] base = self.generate_models(academy=True) for field in many_fields: certificate_kwargs = { - 'logo': choice(['http://exampledot.com', 'http://exampledotdot.com', 'http://exampledotdotdot.com']), - 'week_hours': randint(0, 999999999), - 'schedule_type': choice(['PAR-TIME', 'FULL-TIME']), + "logo": choice(["http://exampledot.com", "http://exampledotdot.com", "http://exampledotdotdot.com"]), + "week_hours": randint(0, 999999999), + "schedule_type": choice(["PAR-TIME", "FULL-TIME"]), } - model1 = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - certificate_kwargs=certificate_kwargs, - syllabus=True, - syllabus_schedule=True, - models=base) - - model2 = self.generate_models(profile_academy=True, - capability='crud_certificate', - role='potato', - certificate_kwargs=certificate_kwargs, - syllabus=True, - syllabus_schedule=True, - models=base) - - url = (reverse_lazy('admissions:academy_schedule') + f'?{field}=' + - str(getattr(model1['syllabus_schedule'], field)) + ',' + - str(getattr(model2['syllabus_schedule'], field))) + model1 = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + certificate_kwargs=certificate_kwargs, + syllabus=True, + syllabus_schedule=True, + models=base, + ) + + model2 = self.generate_models( + profile_academy=True, + capability="crud_certificate", + role="potato", + certificate_kwargs=certificate_kwargs, + syllabus=True, + syllabus_schedule=True, + models=base, + ) + + url = ( + reverse_lazy("admissions:academy_schedule") + + f"?{field}=" + + str(getattr(model1["syllabus_schedule"], field)) + + "," + + str(getattr(model2["syllabus_schedule"], field)) + ) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) @@ -349,16 +382,15 @@ def test_delete_in_bulk_with_two(self): def test__post__without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") response = self.client.post(url) json = response.json() expected = { - 'detail': 'missing-syllabus-in-request', - 'status_code': 400, + "detail": "missing-syllabus-in-request", + "status_code": 400, } self.assertEqual(json, expected) @@ -368,17 +400,16 @@ def test__post__without_syllabus(self): def test__post__syllabus_not_found(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') - data = {'syllabus': 1} - response = self.client.post(url, data, format='json') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") + data = {"syllabus": 1} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'detail': 'syllabus-not-found', - 'status_code': 404, + "detail": "syllabus-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -388,16 +419,14 @@ def test__post__syllabus_not_found(self): def test__post__without_academy(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') - data = {'syllabus': 1} - response = self.client.post(url, data, format='json') + model = self.generate_models( + authenticate=True, profile_academy=True, syllabus=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") + data = {"syllabus": 1} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'missing-academy-in-request', 'status_code': 400} + expected = {"detail": "missing-academy-in-request", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -406,16 +435,14 @@ def test__post__without_academy(self): def test__post__academy_not_found(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') - data = {'syllabus': 1, 'academy': 2} - response = self.client.post(url, data, format='json') + model = self.generate_models( + authenticate=True, profile_academy=True, syllabus=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") + data = {"syllabus": 1, "academy": 2} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'academy-not-found', 'status_code': 404} + expected = {"detail": "academy-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -424,18 +451,16 @@ def test__post__academy_not_found(self): def test__post__without_body(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') - data = {'syllabus': 1, 'academy': 1} - response = self.client.post(url, data, format='json') + model = self.generate_models( + authenticate=True, profile_academy=True, syllabus=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") + data = {"syllabus": 1, "academy": 1} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'name': ['This field is required.'], - 'description': ['This field is required.'], + "name": ["This field is required."], + "description": ["This field is required."], } self.assertEqual(json, expected) @@ -445,133 +470,142 @@ def test__post__without_body(self): def test__post(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') + model = self.generate_models( + authenticate=True, profile_academy=True, syllabus=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") data = { - 'academy': 1, - 'syllabus': 1, - 'name': 'They killed kenny', - 'description': 'Oh my god!', + "academy": 1, + "syllabus": 1, + "name": "They killed kenny", + "description": "Oh my god!", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - del json['created_at'] + self.assertDatetime(json["created_at"]) + del json["created_at"] - self.assertDatetime(json['updated_at']) - del json['updated_at'] + self.assertDatetime(json["updated_at"]) + del json["updated_at"] expected = { - 'id': 1, - 'schedule_type': 'PART-TIME', - 'syllabus': 1, + "id": 1, + "schedule_type": "PART-TIME", + "syllabus": 1, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - 'id': 1, - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'schedule_type': 'PART-TIME', - 'syllabus_id': 1, - 'academy_id': 1, - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + "id": 1, + "name": "They killed kenny", + "description": "Oh my god!", + "schedule_type": "PART-TIME", + "syllabus_id": 1, + "academy_id": 1, + } + ], + ) def test__post__passing_all_status__in_lowercase(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') - schedule_type = random.choice(['PART-TIME', 'FULL-TIME']) + model = self.generate_models( + authenticate=True, profile_academy=True, syllabus=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") + schedule_type = random.choice(["PART-TIME", "FULL-TIME"]) data = { - 'academy': 1, - 'syllabus': 1, - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'schedule_type': schedule_type.lower(), + "academy": 1, + "syllabus": 1, + "name": "They killed kenny", + "description": "Oh my god!", + "schedule_type": schedule_type.lower(), } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - del json['created_at'] + self.assertDatetime(json["created_at"]) + del json["created_at"] - self.assertDatetime(json['updated_at']) - del json['updated_at'] + self.assertDatetime(json["updated_at"]) + del json["updated_at"] expected = { - 'id': 1, - 'schedule_type': 'PART-TIME', - 'syllabus': 1, + "id": 1, + "schedule_type": "PART-TIME", + "syllabus": 1, **data, - 'schedule_type': schedule_type, + "schedule_type": schedule_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - 'id': 1, - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'schedule_type': 'PART-TIME', - 'syllabus_id': 1, - 'academy_id': 1, - 'schedule_type': schedule_type, - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + "id": 1, + "name": "They killed kenny", + "description": "Oh my god!", + "schedule_type": "PART-TIME", + "syllabus_id": 1, + "academy_id": 1, + "schedule_type": schedule_type, + } + ], + ) def test__post__passing_all_status__in_uppercase(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule') - schedule_type = random.choice(['PART-TIME', 'FULL-TIME']) + model = self.generate_models( + authenticate=True, profile_academy=True, syllabus=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule") + schedule_type = random.choice(["PART-TIME", "FULL-TIME"]) data = { - 'academy': 1, - 'syllabus': 1, - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'schedule_type': schedule_type, + "academy": 1, + "syllabus": 1, + "name": "They killed kenny", + "description": "Oh my god!", + "schedule_type": schedule_type, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - del json['created_at'] + self.assertDatetime(json["created_at"]) + del json["created_at"] - self.assertDatetime(json['updated_at']) - del json['updated_at'] + self.assertDatetime(json["updated_at"]) + del json["updated_at"] expected = { - 'id': 1, - 'schedule_type': 'PART-TIME', - 'syllabus': 1, + "id": 1, + "schedule_type": "PART-TIME", + "syllabus": 1, **data, - 'schedule_type': schedule_type, + "schedule_type": schedule_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - 'id': 1, - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'schedule_type': 'PART-TIME', - 'syllabus_id': 1, - 'academy_id': 1, - 'schedule_type': schedule_type, - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + "id": 1, + "name": "They killed kenny", + "description": "Oh my god!", + "schedule_type": "PART-TIME", + "syllabus_id": 1, + "academy_id": 1, + "schedule_type": schedule_type, + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_schedule_id.py b/breathecode/admissions/tests/urls/tests_academy_schedule_id.py index 2c99ccc33..2d3f0ce5d 100644 --- a/breathecode/admissions/tests/urls/tests_academy_schedule_id.py +++ b/breathecode/admissions/tests/urls/tests_academy_schedule_id.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from random import choice, randint import random from django.urls.base import reverse_lazy @@ -15,27 +16,27 @@ class CertificateTestSuite(AdmissionsTestCase): def test_academy_schedule_id__without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:academy_schedule_id', kwargs={'certificate_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id", kwargs={"certificate_id": 1}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_academy_schedule_id__without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:academy_schedule_id', kwargs={'certificate_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id", kwargs={"certificate_id": 1}) self.generate_models(authenticate=True) response = self.client.put(url) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: crud_certificate for academy 1" + "status_code": 403, + "detail": "You (user: 1) don't have this capability: crud_certificate for academy 1", } self.assertEqual(json, expected) @@ -49,16 +50,15 @@ def test_academy_schedule_id__without_capability(self): def test_academy_schedule_id__not_found(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id', kwargs={'certificate_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule_id", kwargs={"certificate_id": 1}) response = self.client.put(url) json = response.json() expected = { - 'detail': 'specialty-mode-not-found', - 'status_code': 404, + "detail": "specialty-mode-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -69,163 +69,198 @@ def test_academy_schedule_id__schedule_of_other_academy(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_schedule = {'academy_id': 2} - model = self.generate_models(authenticate=1, - syllabus_schedule=syllabus_schedule, - academy=2, - profile_academy=1, - capability='crud_certificate', - role='potato') + syllabus_schedule = {"academy_id": 2} + model = self.generate_models( + authenticate=1, + syllabus_schedule=syllabus_schedule, + academy=2, + profile_academy=1, + capability="crud_certificate", + role="potato", + ) - url = reverse_lazy('admissions:academy_schedule_id', kwargs={'certificate_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id", kwargs={"certificate_id": 1}) data = { - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'syllabus': 2, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "description": "Oh my god!", + "syllabus": 2, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'syllabus-schedule-of-other-academy', 'status_code': 404} + expected = {"detail": "syllabus-schedule-of-other-academy", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule'), - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule"), + } + ], + ) def test_academy_schedule_id__bad_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=1, - syllabus_schedule=1, - academy=1, - profile_academy=1, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id', kwargs={'certificate_id': 1}) + model = self.generate_models( + authenticate=1, + syllabus_schedule=1, + academy=1, + profile_academy=1, + capability="crud_certificate", + role="potato", + ) + url = reverse_lazy("admissions:academy_schedule_id", kwargs={"certificate_id": 1}) data = { - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'syllabus': 2, + "slug": "they-killed-kenny", + "name": "They killed kenny", + "description": "Oh my god!", + "syllabus": 2, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule'), - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule"), + } + ], + ) def test_academy_schedule_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id', kwargs={'certificate_id': 1}) - data = {'name': 'They killed kenny', 'description': 'Oh my god!'} - response = self.client.put(url, data, format='json') + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + ) + url = reverse_lazy("admissions:academy_schedule_id", kwargs={"certificate_id": 1}) + data = {"name": "They killed kenny", "description": "Oh my god!"} + response = self.client.put(url, data, format="json") json = response.json() - self.assertDatetime(json['updated_at']) - del json['updated_at'] + self.assertDatetime(json["updated_at"]) + del json["updated_at"] expected = { - 'created_at': self.datetime_to_iso(model.syllabus_schedule.created_at), - 'id': model.syllabus_schedule.id, - 'schedule_type': model.syllabus_schedule.schedule_type, - 'syllabus': model.syllabus_schedule.syllabus, - 'academy': model.syllabus_schedule.academy.id, + "created_at": self.datetime_to_iso(model.syllabus_schedule.created_at), + "id": model.syllabus_schedule.id, + "schedule_type": model.syllabus_schedule.schedule_type, + "syllabus": model.syllabus_schedule.syllabus, + "academy": model.syllabus_schedule.academy.id, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule'), - **data, - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule"), + **data, + } + ], + ) def test_academy_schedule_id__passing_all_status__in_lowercase(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id', kwargs={'certificate_id': 1}) - schedule_type = random.choice(['PART-TIME', 'FULL-TIME']) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + ) + url = reverse_lazy("admissions:academy_schedule_id", kwargs={"certificate_id": 1}) + schedule_type = random.choice(["PART-TIME", "FULL-TIME"]) data = { - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'schedule_type': schedule_type.lower(), + "name": "They killed kenny", + "description": "Oh my god!", + "schedule_type": schedule_type.lower(), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - self.assertDatetime(json['updated_at']) - del json['updated_at'] + self.assertDatetime(json["updated_at"]) + del json["updated_at"] expected = { - 'created_at': self.datetime_to_iso(model.syllabus_schedule.created_at), - 'id': model.syllabus_schedule.id, - 'schedule_type': model.syllabus_schedule.schedule_type, - 'syllabus': model.syllabus_schedule.syllabus, - 'academy': model.syllabus_schedule.academy.id, + "created_at": self.datetime_to_iso(model.syllabus_schedule.created_at), + "id": model.syllabus_schedule.id, + "schedule_type": model.syllabus_schedule.schedule_type, + "syllabus": model.syllabus_schedule.syllabus, + "academy": model.syllabus_schedule.academy.id, **data, - 'schedule_type': schedule_type, + "schedule_type": schedule_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule'), - **data, - 'schedule_type': schedule_type, - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule"), + **data, + "schedule_type": schedule_type, + } + ], + ) def test_academy_schedule_id__passing_all_status__in_uppercase(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id', kwargs={'certificate_id': 1}) - schedule_type = random.choice(['PART-TIME', 'FULL-TIME']) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + ) + url = reverse_lazy("admissions:academy_schedule_id", kwargs={"certificate_id": 1}) + schedule_type = random.choice(["PART-TIME", "FULL-TIME"]) data = { - 'name': 'They killed kenny', - 'description': 'Oh my god!', - 'schedule_type': schedule_type, + "name": "They killed kenny", + "description": "Oh my god!", + "schedule_type": schedule_type, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - self.assertDatetime(json['updated_at']) - del json['updated_at'] + self.assertDatetime(json["updated_at"]) + del json["updated_at"] expected = { - 'created_at': self.datetime_to_iso(model.syllabus_schedule.created_at), - 'id': model.syllabus_schedule.id, - 'schedule_type': model.syllabus_schedule.schedule_type, - 'syllabus': model.syllabus_schedule.syllabus, - 'academy': model.syllabus_schedule.academy.id, + "created_at": self.datetime_to_iso(model.syllabus_schedule.created_at), + "id": model.syllabus_schedule.id, + "schedule_type": model.syllabus_schedule.schedule_type, + "syllabus": model.syllabus_schedule.syllabus, + "academy": model.syllabus_schedule.academy.id, **data, - 'schedule_type': schedule_type, + "schedule_type": schedule_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule'), - **data, - 'schedule_type': schedule_type, - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule"), + **data, + "schedule_type": schedule_type, + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_academy_schedule_id_timeslot.py b/breathecode/admissions/tests/urls/tests_academy_schedule_id_timeslot.py index 9db9a6c2e..7fa87ff6a 100644 --- a/breathecode/admissions/tests/urls/tests_academy_schedule_id_timeslot.py +++ b/breathecode/admissions/tests/urls/tests_academy_schedule_id_timeslot.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + import random from django.urls.base import reverse_lazy from rest_framework import status @@ -10,60 +11,67 @@ def get_serializer(self, syllabus_schedule_time_slot): return { - 'id': syllabus_schedule_time_slot.id, - 'schedule': syllabus_schedule_time_slot.schedule.id, - 'starting_at': self.integer_to_iso(syllabus_schedule_time_slot.timezone, - syllabus_schedule_time_slot.starting_at), - 'ending_at': self.integer_to_iso(syllabus_schedule_time_slot.timezone, syllabus_schedule_time_slot.ending_at), - 'recurrent': syllabus_schedule_time_slot.recurrent, - 'recurrency_type': syllabus_schedule_time_slot.recurrency_type, - 'created_at': self.datetime_to_iso(syllabus_schedule_time_slot.created_at), - 'updated_at': self.datetime_to_iso(syllabus_schedule_time_slot.updated_at), + "id": syllabus_schedule_time_slot.id, + "schedule": syllabus_schedule_time_slot.schedule.id, + "starting_at": self.integer_to_iso( + syllabus_schedule_time_slot.timezone, syllabus_schedule_time_slot.starting_at + ), + "ending_at": self.integer_to_iso(syllabus_schedule_time_slot.timezone, syllabus_schedule_time_slot.ending_at), + "recurrent": syllabus_schedule_time_slot.recurrent, + "recurrency_type": syllabus_schedule_time_slot.recurrency_type, + "created_at": self.datetime_to_iso(syllabus_schedule_time_slot.created_at), + "updated_at": self.datetime_to_iso(syllabus_schedule_time_slot.updated_at), } class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" + """ 🔽🔽🔽 Auth """ def test__without_auth(self): - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test__without_academy_header(self): model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, - }) + json, + { + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), []) def test__without_capabilities(self): self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_certificate for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_certificate for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), []) @@ -73,12 +81,14 @@ def test__without_capabilities(self): def test__without_data(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus_schedule=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus_schedule=True, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) response = self.client.get(url) json = response.json() @@ -92,21 +102,28 @@ def test__without_data(self): def test__with_data(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) response = self.client.get(url) json = response.json() expected = [get_serializer(self, model.syllabus_schedule_time_slot)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule_time_slot'), - }]) + self.assertEqual( + self.all_syllabus_schedule_time_slot_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule_time_slot"), + } + ], + ) """ 🔽🔽🔽 recurrency_type in querystring @@ -114,34 +131,46 @@ def test__with_data(self): def test__recurrency_type_in_querystring__not_found(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus_schedule_time_slot=True) - url = (reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + - f'?recurrency_type=asdasdasd') + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus_schedule_time_slot=True, + ) + url = ( + reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) + + f"?recurrency_type=asdasdasd" + ) response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule_time_slot'), - }]) + self.assertEqual( + self.all_syllabus_schedule_time_slot_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule_time_slot"), + } + ], + ) def test__recurrency_type_in_querystring__found(self): - statuses = ['DAILY', 'WEEKLY', 'MONTHLY'] - cases = [(x, x, random.choice([y for y in statuses if x != y])) - for x in statuses] + [(x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses] + statuses = ["DAILY", "WEEKLY", "MONTHLY"] + cases = [(x, x, random.choice([y for y in statuses if x != y])) for x in statuses] + [ + (x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses + ] self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus_schedule_time_slot=3) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus_schedule_time_slot=3, + ) for current, query, bad_status in cases: model.syllabus_schedule_time_slot[0].recurrency_type = current @@ -153,8 +182,10 @@ def test__recurrency_type_in_querystring__found(self): model.syllabus_schedule_time_slot[2].recurrency_type = bad_status model.syllabus_schedule_time_slot[2].save() - url = (reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + - f'?recurrency_type={query}') + url = ( + reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) + + f"?recurrency_type={query}" + ) response = self.client.get(url) json = response.json() @@ -165,20 +196,23 @@ def test__recurrency_type_in_querystring__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), [ - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), - 'recurrency_type': current, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), - 'recurrency_type': current, - }, - { - **self.bc.format.to_dict(model.syllabus_schedule_time_slot[2]), - 'recurrency_type': bad_status, - }, - ]) + self.assertEqual( + self.all_syllabus_schedule_time_slot_dict(), + [ + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[0]), + "recurrency_type": current, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[1]), + "recurrency_type": current, + }, + { + **self.bc.format.to_dict(model.syllabus_schedule_time_slot[2]), + "recurrency_type": bad_status, + }, + ], + ) """ 🔽🔽🔽 Post @@ -186,17 +220,16 @@ def test__recurrency_type_in_querystring__found(self): def test__post__without_academy_certificate(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'detail': 'certificate-not-found', - 'status_code': 404, + "detail": "certificate-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -205,17 +238,19 @@ def test__post__without_academy_certificate(self): def test__post__without_timezone(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule=True, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'academy-without-timezone', 'status_code': 400} + expected = {"detail": "academy-without-timezone", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -223,21 +258,23 @@ def test__post__without_timezone(self): def test__post__without_ending_at_and_starting_at(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule=True, - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule=True, + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'ending_at': ['This field is required.'], - 'starting_at': ['This field is required.'], + "ending_at": ["This field is required."], + "starting_at": ["This field is required."], } self.assertEqual(json, expected) @@ -246,94 +283,102 @@ def test__post__without_ending_at_and_starting_at(self): def test__post__passing_all_status__in_lowercase(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule=True, - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule=True, + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) starting_at = self.datetime_now() ending_at = self.datetime_now() - recurrency_type = random.choice(['DAILY', 'WEEKLY', 'MONTHLY']) + recurrency_type = random.choice(["DAILY", "WEEKLY", "MONTHLY"]) data = { - 'ending_at': self.datetime_to_iso(ending_at), - 'starting_at': self.datetime_to_iso(starting_at), - 'recurrency_type': recurrency_type.lower(), + "ending_at": self.datetime_to_iso(ending_at), + "starting_at": self.datetime_to_iso(starting_at), + "recurrency_type": recurrency_type.lower(), } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'schedule': 1, - 'id': 1, - 'recurrent': True, - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, + "schedule": 1, + "id": 1, + "recurrent": True, + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual( self.all_syllabus_schedule_time_slot_dict(), - [{ - 'schedule_id': 1, - 'removed_at': None, - 'ending_at': DatetimeInteger.from_datetime(model.academy.timezone, ending_at), - 'id': 1, - 'recurrent': True, - 'starting_at': DatetimeInteger.from_datetime(model.academy.timezone, starting_at), - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, - }], + [ + { + "schedule_id": 1, + "removed_at": None, + "ending_at": DatetimeInteger.from_datetime(model.academy.timezone, ending_at), + "id": 1, + "recurrent": True, + "starting_at": DatetimeInteger.from_datetime(model.academy.timezone, starting_at), + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, + } + ], ) def test__post__passing_all_status__in_uppercase(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule=True, - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_schedule_id_timeslot', kwargs={'certificate_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule=True, + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot", kwargs={"certificate_id": 1}) starting_at = self.datetime_now() ending_at = self.datetime_now() - recurrency_type = random.choice(['DAILY', 'WEEKLY', 'MONTHLY']) + recurrency_type = random.choice(["DAILY", "WEEKLY", "MONTHLY"]) data = { - 'ending_at': self.datetime_to_iso(ending_at), - 'starting_at': self.datetime_to_iso(starting_at), - 'recurrency_type': recurrency_type, + "ending_at": self.datetime_to_iso(ending_at), + "starting_at": self.datetime_to_iso(starting_at), + "recurrency_type": recurrency_type, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'schedule': 1, - 'id': 1, - 'recurrent': True, - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, + "schedule": 1, + "id": 1, + "recurrent": True, + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual( self.all_syllabus_schedule_time_slot_dict(), - [{ - 'schedule_id': 1, - 'removed_at': None, - 'ending_at': DatetimeInteger.from_datetime(model.academy.timezone, ending_at), - 'id': 1, - 'recurrent': True, - 'starting_at': DatetimeInteger.from_datetime(model.academy.timezone, starting_at), - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, - }], + [ + { + "schedule_id": 1, + "removed_at": None, + "ending_at": DatetimeInteger.from_datetime(model.academy.timezone, ending_at), + "id": 1, + "recurrent": True, + "starting_at": DatetimeInteger.from_datetime(model.academy.timezone, starting_at), + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, + } + ], ) diff --git a/breathecode/admissions/tests/urls/tests_academy_schedule_id_timeslot_id.py b/breathecode/admissions/tests/urls/tests_academy_schedule_id_timeslot_id.py index 91981dd0c..09403ecb0 100644 --- a/breathecode/admissions/tests/urls/tests_academy_schedule_id_timeslot_id.py +++ b/breathecode/admissions/tests/urls/tests_academy_schedule_id_timeslot_id.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + import random import pytz from datetime import datetime, date, time @@ -13,46 +14,52 @@ class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" + """ 🔽🔽🔽 Auth """ def test_schedule_time_slot__without_auth(self): - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_schedule_time_slot__without_academy_header(self): model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, - }) + json, + { + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), []) def test_schedule_time_slot__without_capabilities(self): self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_certificate for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_certificate for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), []) @@ -62,16 +69,15 @@ def test_schedule_time_slot__without_capabilities(self): def test_schedule_time_slot__without_data(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'time-slot-not-found', - 'status_code': 404, + "detail": "time-slot-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -86,42 +92,50 @@ def test_schedule_time_slot__with_data(self): self.headers(academy=1) date = 202310301330 - iso_string = datetime(2023, 10, 30, 13, 30, tzinfo=gettz('Europe/Amsterdam')).astimezone( - pytz.UTC).isoformat()[:-6] + 'Z' + iso_string = ( + datetime(2023, 10, 30, 13, 30, tzinfo=gettz("Europe/Amsterdam")).astimezone(pytz.UTC).isoformat()[:-6] + "Z" + ) schedule_time_slot_kwargs = { - 'starting_at': date, - 'ending_at': date, - 'timezone': 'Europe/Amsterdam', + "starting_at": date, + "ending_at": date, + "timezone": "Europe/Amsterdam", } - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus_schedule=True, - syllabus_schedule_time_slot=True, - syllabus_schedule_time_slot_kwargs=schedule_time_slot_kwargs) - - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + syllabus_schedule_time_slot_kwargs=schedule_time_slot_kwargs, + ) + + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'id': model.syllabus_schedule_time_slot.id, - 'schedule': model.syllabus_schedule_time_slot.schedule.id, - 'starting_at': iso_string, - 'ending_at': iso_string, - 'recurrent': model.syllabus_schedule_time_slot.recurrent, - 'recurrency_type': model.syllabus_schedule_time_slot.recurrency_type, - 'created_at': self.datetime_to_iso(model.syllabus_schedule_time_slot.created_at), - 'updated_at': self.datetime_to_iso(model.syllabus_schedule_time_slot.updated_at), + "id": model.syllabus_schedule_time_slot.id, + "schedule": model.syllabus_schedule_time_slot.schedule.id, + "starting_at": iso_string, + "ending_at": iso_string, + "recurrent": model.syllabus_schedule_time_slot.recurrent, + "recurrency_type": model.syllabus_schedule_time_slot.recurrency_type, + "created_at": self.datetime_to_iso(model.syllabus_schedule_time_slot.created_at), + "updated_at": self.datetime_to_iso(model.syllabus_schedule_time_slot.updated_at), } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule_time_slot'), - }]) + self.assertEqual( + self.all_syllabus_schedule_time_slot_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule_time_slot"), + } + ], + ) # assert False """ @@ -130,17 +144,16 @@ def test_schedule_time_slot__with_data(self): def test_schedule_time_slot__put__without_academy_certificate(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'detail': 'certificate-not-found', - 'status_code': 404, + "detail": "certificate-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -149,19 +162,21 @@ def test_schedule_time_slot__put__without_academy_certificate(self): def test_schedule_time_slot__put__without_time_slot(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule=True, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'detail': 'time-slot-not-found', - 'status_code': 404, + "detail": "time-slot-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -170,144 +185,172 @@ def test_schedule_time_slot__put__without_time_slot(self): def test_schedule_time_slot__put__without_timezone(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'academy-without-timezone', 'status_code': 400} + expected = {"detail": "academy-without-timezone", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule_time_slot'), - }]) + self.assertEqual( + self.all_syllabus_schedule_time_slot_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule_time_slot"), + } + ], + ) def test_schedule_time_slot__put__without_ending_at_and_starting_at(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'America/Caracas'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True, - academy_kwargs=academy_kwargs) - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + academy_kwargs = {"timezone": "America/Caracas"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'ending_at': ['This field is required.'], - 'starting_at': ['This field is required.'], + "ending_at": ["This field is required."], + "starting_at": ["This field is required."], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule_time_slot'), - }]) + self.assertEqual( + self.all_syllabus_schedule_time_slot_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule_time_slot"), + } + ], + ) def test_schedule_time_slot__put__passing_all_status__in_lowercase(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'Europe/Amsterdam'} + academy_kwargs = {"timezone": "Europe/Amsterdam"} date = 202310301330 - iso_string = datetime(2023, 10, 30, 13, 30, tzinfo=gettz('Europe/Amsterdam')).astimezone( - pytz.UTC).isoformat()[:-6] + 'Z' - - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule_time_slot=True, - syllabus_schedule=True, - academy_kwargs=academy_kwargs) - - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) - - recurrency_type = random.choice(['DAILY', 'WEEKLY', 'MONTHLY']) + iso_string = ( + datetime(2023, 10, 30, 13, 30, tzinfo=gettz("Europe/Amsterdam")).astimezone(pytz.UTC).isoformat()[:-6] + "Z" + ) + + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule_time_slot=True, + syllabus_schedule=True, + academy_kwargs=academy_kwargs, + ) + + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) + + recurrency_type = random.choice(["DAILY", "WEEKLY", "MONTHLY"]) data = { - 'ending_at': iso_string, - 'starting_at': iso_string, - 'recurrency_type': recurrency_type.lower(), + "ending_at": iso_string, + "starting_at": iso_string, + "recurrency_type": recurrency_type.lower(), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'schedule': 1, - 'id': 1, - 'recurrent': True, - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, + "schedule": 1, + "id": 1, + "recurrent": True, + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), - [{ - **self.model_to_dict(model, 'syllabus_schedule_time_slot'), - 'ending_at': date, - 'starting_at': date, - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, - }]) + self.assertEqual( + self.all_syllabus_schedule_time_slot_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule_time_slot"), + "ending_at": date, + "starting_at": date, + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, + } + ], + ) def test_schedule_time_slot__put__passing_all_status__in_uppercase(self): self.headers(academy=1) - academy_kwargs = {'timezone': 'Europe/Amsterdam'} + academy_kwargs = {"timezone": "Europe/Amsterdam"} date = 202310301330 - iso_string = datetime(2023, 10, 30, 13, 30, tzinfo=gettz('Europe/Amsterdam')).astimezone( - pytz.UTC).isoformat()[:-6] + 'Z' - - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus=True, - syllabus_schedule_time_slot=True, - syllabus_schedule=True, - academy_kwargs=academy_kwargs) - - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) - - recurrency_type = random.choice(['DAILY', 'WEEKLY', 'MONTHLY']) + iso_string = ( + datetime(2023, 10, 30, 13, 30, tzinfo=gettz("Europe/Amsterdam")).astimezone(pytz.UTC).isoformat()[:-6] + "Z" + ) + + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus=True, + syllabus_schedule_time_slot=True, + syllabus_schedule=True, + academy_kwargs=academy_kwargs, + ) + + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) + + recurrency_type = random.choice(["DAILY", "WEEKLY", "MONTHLY"]) data = { - 'ending_at': iso_string, - 'starting_at': iso_string, - 'recurrency_type': recurrency_type, + "ending_at": iso_string, + "starting_at": iso_string, + "recurrency_type": recurrency_type, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'schedule': 1, - 'id': 1, - 'recurrent': True, - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, + "schedule": 1, + "id": 1, + "recurrent": True, + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_time_slot_dict(), - [{ - **self.model_to_dict(model, 'syllabus_schedule_time_slot'), - 'ending_at': date, - 'starting_at': date, - 'timezone': model.academy.timezone, - 'recurrency_type': recurrency_type, - }]) + self.assertEqual( + self.all_syllabus_schedule_time_slot_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule_time_slot"), + "ending_at": date, + "starting_at": date, + "timezone": model.academy.timezone, + "recurrency_type": recurrency_type, + } + ], + ) """ 🔽🔽🔽 Delete @@ -315,16 +358,15 @@ def test_schedule_time_slot__put__passing_all_status__in_uppercase(self): def test_schedule_time_slot__delete__without_time_slot(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato') - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_certificate", role="potato" + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) response = self.client.delete(url) json = response.json() expected = { - 'detail': 'time-slot-not-found', - 'status_code': 404, + "detail": "time-slot-not-found", + "status_code": 404, } self.assertEqual(json, expected) @@ -333,12 +375,14 @@ def test_schedule_time_slot__delete__without_time_slot(self): def test_schedule_time_slot__delete(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_certificate', - role='potato', - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:academy_schedule_id_timeslot_id', kwargs={'certificate_id': 1, 'timeslot_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_certificate", + role="potato", + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:academy_schedule_id_timeslot_id", kwargs={"certificate_id": 1, "timeslot_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) diff --git a/breathecode/admissions/tests/urls/tests_cohort_all.py b/breathecode/admissions/tests/urls/tests_cohort_all.py index 97dd483cb..f99396f7c 100644 --- a/breathecode/admissions/tests/urls/tests_cohort_all.py +++ b/breathecode/admissions/tests/urls/tests_cohort_all.py @@ -1,6 +1,7 @@ """ Test /cohort/all """ + import random import re from datetime import datetime, timedelta @@ -14,47 +15,48 @@ def get_serializer(cohort, syllabus, syllabus_version, data={}): return { - 'id': cohort.id, - 'distance': None, - 'slug': cohort.slug, - 'name': cohort.name, - 'never_ends': cohort.never_ends, - 'private': cohort.private, - 'kickoff_date': - re.sub(r'\+00:00$', 'Z', cohort.kickoff_date.isoformat()) if cohort.kickoff_date else cohort.kickoff_date, - 'ending_date': cohort.ending_date, - 'language': cohort.language.lower(), - 'remote_available': cohort.remote_available, - 'syllabus_version': { - 'name': syllabus.name, - 'status': syllabus_version.status, - 'slug': syllabus.slug, - 'syllabus': syllabus_version.syllabus.id, - 'version': cohort.syllabus_version.version, - 'duration_in_days': syllabus.duration_in_days, - 'duration_in_hours': syllabus.duration_in_hours, - 'github_url': syllabus.github_url, - 'logo': syllabus.logo, - 'private': syllabus.private, - 'week_hours': syllabus.week_hours, + "id": cohort.id, + "distance": None, + "slug": cohort.slug, + "name": cohort.name, + "never_ends": cohort.never_ends, + "private": cohort.private, + "kickoff_date": ( + re.sub(r"\+00:00$", "Z", cohort.kickoff_date.isoformat()) if cohort.kickoff_date else cohort.kickoff_date + ), + "ending_date": cohort.ending_date, + "language": cohort.language.lower(), + "remote_available": cohort.remote_available, + "syllabus_version": { + "name": syllabus.name, + "status": syllabus_version.status, + "slug": syllabus.slug, + "syllabus": syllabus_version.syllabus.id, + "version": cohort.syllabus_version.version, + "duration_in_days": syllabus.duration_in_days, + "duration_in_hours": syllabus.duration_in_hours, + "github_url": syllabus.github_url, + "logo": syllabus.logo, + "private": syllabus.private, + "week_hours": syllabus.week_hours, }, - 'academy': { - 'id': cohort.academy.id, - 'slug': cohort.academy.slug, - 'name': cohort.academy.name, - 'country': { - 'code': cohort.academy.country.code, - 'name': cohort.academy.country.name, + "academy": { + "id": cohort.academy.id, + "slug": cohort.academy.slug, + "name": cohort.academy.name, + "country": { + "code": cohort.academy.country.code, + "name": cohort.academy.country.name, }, - 'city': { - 'name': cohort.academy.city.name, + "city": { + "name": cohort.academy.city.name, }, - 'logo_url': cohort.academy.logo_url, - 'is_hidden_on_prework': cohort.academy.is_hidden_on_prework + "logo_url": cohort.academy.logo_url, + "is_hidden_on_prework": cohort.academy.is_hidden_on_prework, }, - 'schedule': None, - 'timeslots': [], - 'timezone': None, + "schedule": None, + "timeslots": [], + "timezone": None, **data, } @@ -64,7 +66,7 @@ class CohortAllTestSuite(AdmissionsTestCase): def test_without_auth(self): """Test /cohort/all without auth""" - url = reverse_lazy('admissions:cohort_all') + url = reverse_lazy("admissions:cohort_all") response = self.client.get(url) json = response.json() @@ -75,7 +77,7 @@ def test_without_auth(self): def test_without_data(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:cohort_all') + url = reverse_lazy("admissions:cohort_all") response = self.client.get(url) json = response.json() @@ -92,163 +94,159 @@ def test__with_data__with_sort(self): base = self.generate_models(authenticate=True, profile_academy=True, skip_cohort=True, syllabus_version=True) models = [self.generate_models(cohort=True, syllabus=True, models=base) for _ in range(0, 2)] - ordened_models = sorted(models, key=lambda x: x['cohort'].slug, reverse=True) + ordened_models = sorted(models, key=lambda x: x["cohort"].slug, reverse=True) - url = reverse_lazy('admissions:cohort_all') + '?sort=-slug' + url = reverse_lazy("admissions:cohort_all") + "?sort=-slug" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version) for model in ordened_models] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{ - **self.model_to_dict(model, 'cohort') - } for model in models]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")} for model in models] + ) def test_with_data_with_bad_get_academy(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?academy=they-killed-kenny' + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?academy=they-killed-kenny" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data_with_get_academy(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True, syllabus_version=True) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?academy={model.academy.slug}' + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?academy={model.academy.slug}" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data_with_bad_get_location(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?location=they-killed-kenny' + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?location=they-killed-kenny" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data_with_get_location(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True, syllabus_version=True) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?location={model.academy.slug}' + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?location={model.academy.slug}" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data_with_get_location_with_comma(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True, syllabus_version=True) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?location={model.academy.slug},they-killed-kenny' + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?location={model.academy.slug},they-killed-kenny" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data_with_get_upcoming_false(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True, syllabus_version=True) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?upcoming=false' + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?upcoming=false" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data_with_get_upcoming_true_without_current_data(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?upcoming=true' + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?upcoming=true" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data_with_get_upcoming_true_with_current_data(self): """Test /cohort/all without auth""" - cohort_kwargs = {'kickoff_date': timezone.now() + timedelta(days=365 * 2000)} - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - syllabus_version=True, - cohort_kwargs=cohort_kwargs) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?upcoming=true' + cohort_kwargs = {"kickoff_date": timezone.now() + timedelta(days=365 * 2000)} + model = self.generate_models( + authenticate=True, cohort=True, profile_academy=True, syllabus_version=True, cohort_kwargs=cohort_kwargs + ) + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?upcoming=true" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True, syllabus_version=True) - url = reverse_lazy('admissions:cohort_all') + url = reverse_lazy("admissions:cohort_all") response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data_but_is_private(self): """Test /cohort/all without auth""" - cohort_kwargs = {'private': True} - model = self.generate_models(authenticate=True, - cohort=True, - profile_academy=True, - syllabus=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('admissions:cohort_all') + cohort_kwargs = {"private": True} + model = self.generate_models( + authenticate=True, cohort=True, profile_academy=True, syllabus=True, cohort_kwargs=cohort_kwargs + ) + + url = reverse_lazy("admissions:cohort_all") response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) """ 🔽🔽🔽 Sort querystring @@ -256,17 +254,17 @@ def test_with_data_but_is_private(self): def test_with_data__cohort_with_stage_deleted(self): """Test /cohort/all without auth""" - cohort = {'stage': 'DELETED'} + cohort = {"stage": "DELETED"} model = self.generate_models(authenticate=True, cohort=cohort, profile_academy=True, syllabus_version=True) - url = reverse_lazy('admissions:cohort_all') + '?stage=asdasdasd' + url = reverse_lazy("admissions:cohort_all") + "?stage=asdasdasd" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) """ 🔽🔽🔽 Sort querystring @@ -276,22 +274,23 @@ def test_with_data__querystring_in_stage__not_found(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort=True, profile_academy=True, syllabus_version=True) - url = reverse_lazy('admissions:cohort_all') + '?coordinates=a' + url = reverse_lazy("admissions:cohort_all") + "?coordinates=a" response = self.client.get(url) json = response.json() - expected = {'detail': 'bad-coordinates', 'status_code': 400} + expected = {"detail": "bad-coordinates", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data__querystring_in_stage__found(self): """Test /cohort/all without auth""" - statuses = ['INACTIVE', 'PREWORK', 'STARTED', 'FINAL_PROJECT', 'ENDED', 'DELETED'] - cases = [(x, x, random.choice([y for y in statuses if x != y])) - for x in statuses] + [(x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses] + statuses = ["INACTIVE", "PREWORK", "STARTED", "FINAL_PROJECT", "ENDED", "DELETED"] + cases = [(x, x, random.choice([y for y in statuses if x != y])) for x in statuses] + [ + (x, x.lower(), random.choice([y for y in statuses if x != y])) for x in statuses + ] - cohorts = [{'kickoff_date': datetime.today().isoformat()} for n in range(0, 3)] + cohorts = [{"kickoff_date": datetime.today().isoformat()} for n in range(0, 3)] model = self.generate_models(authenticate=True, cohort=cohorts, profile_academy=True, syllabus_version=True) for current, query, bad_status in cases: @@ -304,26 +303,28 @@ def test_with_data__querystring_in_stage__found(self): model.cohort[2].stage = bad_status model.cohort[2].save() - url = reverse_lazy('admissions:cohort_all') + f'?stage={query}' + url = reverse_lazy("admissions:cohort_all") + f"?stage={query}" response = self.client.get(url) json = response.json() - expected = sorted([ - get_serializer(model.cohort[0], model.syllabus, model.syllabus_version), - get_serializer(model.cohort[1], model.syllabus, model.syllabus_version), - ], - key=lambda x: self.bc.datetime.from_iso_string(x['kickoff_date']), - reverse=True) + expected = sorted( + [ + get_serializer(model.cohort[0], model.syllabus, model.syllabus_version), + get_serializer(model.cohort[1], model.syllabus, model.syllabus_version), + ], + key=lambda x: self.bc.datetime.from_iso_string(x["kickoff_date"]), + reverse=True, + ) for j in json: - del j['kickoff_date'] + del j["kickoff_date"] for i in expected: - del i['kickoff_date'] - list_of_cohorts = self.bc.database.list_of('admissions.Cohort') + del i["kickoff_date"] + list_of_cohorts = self.bc.database.list_of("admissions.Cohort") cohorts_dict = self.bc.format.to_dict(model.cohort) for j in list_of_cohorts: - del j['kickoff_date'] + del j["kickoff_date"] for i in cohorts_dict: - del i['kickoff_date'] + del i["kickoff_date"] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( @@ -334,13 +335,13 @@ def test_with_data__querystring_in_stage__found(self): def test_with_data__distance_is_none(self): """Test /cohort/all without auth""" cases = [ - ('', None, None), - ('', 1, None), - ('', None, 1), - ('', 1, 1), - ('1,1', None, None), - ('1,1', 1, None), - ('1,1', None, 1), + ("", None, None), + ("", 1, None), + ("", None, 1), + ("", 1, 1), + ("1,1", None, None), + ("1,1", 1, None), + ("1,1", None, 1), ] model = self.generate_models(cohort=True, syllabus_version=True) @@ -349,22 +350,22 @@ def test_with_data__distance_is_none(self): model.academy.longitude = longitude model.academy.save() - url = reverse_lazy('admissions:cohort_all') + '?coordinates=' + query + url = reverse_lazy("admissions:cohort_all") + "?coordinates=" + query response = self.client.get(url) json = response.json() - expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version, data={'distance': None})] + expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version, data={"distance": None})] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data__bad_coordinates(self): """Test /cohort/all without auth""" cases = [ - ('1', None, None), - ('1,', None, None), - ('1,a', None, None), - ('a,1', None, None), + ("1", None, None), + ("1,", None, None), + ("1,a", None, None), + ("a,1", None, None), ] model = self.generate_models(cohort=True, syllabus_version=True) @@ -373,26 +374,26 @@ def test_with_data__bad_coordinates(self): model.academy.longitude = longitude model.academy.save() - url = reverse_lazy('admissions:cohort_all') + '?coordinates=' + query + url = reverse_lazy("admissions:cohort_all") + "?coordinates=" + query response = self.client.get(url) json = response.json() - expected = {'detail': 'bad-coordinates', 'status_code': 400} + expected = {"detail": "bad-coordinates", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data__bad_coordinates__invalid_values(self): """Test /cohort/all without auth""" cases = [ - ('91,180', None, None, 'bad-latitude'), - ('-91,-180', None, None, 'bad-latitude'), - ('91,-180', None, None, 'bad-latitude'), - ('-91,180', None, None, 'bad-latitude'), - ('90,181', None, None, 'bad-longitude'), - ('-90,-181', None, None, 'bad-longitude'), - ('90,-181', None, None, 'bad-longitude'), - ('-90,181', None, None, 'bad-longitude'), + ("91,180", None, None, "bad-latitude"), + ("-91,-180", None, None, "bad-latitude"), + ("91,-180", None, None, "bad-latitude"), + ("-91,180", None, None, "bad-latitude"), + ("90,181", None, None, "bad-longitude"), + ("-90,-181", None, None, "bad-longitude"), + ("90,-181", None, None, "bad-longitude"), + ("-90,181", None, None, "bad-longitude"), ] model = self.generate_models(cohort=True, syllabus_version=True) @@ -401,26 +402,26 @@ def test_with_data__bad_coordinates__invalid_values(self): model.academy.longitude = longitude model.academy.save() - url = reverse_lazy('admissions:cohort_all') + '?coordinates=' + query + url = reverse_lazy("admissions:cohort_all") + "?coordinates=" + query response = self.client.get(url) json = response.json() - expected = {'detail': error, 'status_code': 400} + expected = {"detail": error, "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data__good_coordinates__check_same_distance_with_different_symbols(self): """Test /cohort/all without auth""" cases = [ - ('90,180', 80, 180), - ('-90,-180', -80, -180), - ('90,-180', 80, -180), - ('-90,180', -80, 180), - ('90,180', 80, 180), - ('-90,-180', -80, -180), - ('90,-180', 80, -180), - ('-90,180', -80, 180), + ("90,180", 80, 180), + ("-90,-180", -80, -180), + ("90,-180", 80, -180), + ("-90,180", -80, 180), + ("90,180", 80, 180), + ("-90,-180", -80, -180), + ("90,-180", 80, -180), + ("-90,180", -80, 180), ] model = self.generate_models(cohort=True, syllabus_version=True) @@ -429,27 +430,26 @@ def test_with_data__good_coordinates__check_same_distance_with_different_symbols model.academy.longitude = longitude model.academy.save() - url = reverse_lazy('admissions:cohort_all') + '?coordinates=' + query + url = reverse_lazy("admissions:cohort_all") + "?coordinates=" + query response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.cohort, - model.syllabus, - model.syllabus_version, - data={'distance': 1111.9492664455875}) + get_serializer( + model.cohort, model.syllabus, model.syllabus_version, data={"distance": 1111.9492664455875} + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data__good_coordinates__generating_correct_distance(self): """Test /cohort/all without auth""" cases = [ - ('76,90', 76, 130, 1055.5073456754758), - ('-54,-133', -60, -99, 2136.8610368766904), - ('33,-1', 90, -33, 6338.110818739848), - ('-56,167', 43, -165, 11318.400937786448), + ("76,90", 76, 130, 1055.5073456754758), + ("-54,-133", -60, -99, 2136.8610368766904), + ("33,-1", 90, -33, 6338.110818739848), + ("-56,167", 43, -165, 11318.400937786448), ] model = self.generate_models(cohort=True, syllabus_version=True) @@ -458,16 +458,16 @@ def test_with_data__good_coordinates__generating_correct_distance(self): model.academy.longitude = longitude model.academy.save() - url = reverse_lazy('admissions:cohort_all') + '?coordinates=' + query + url = reverse_lazy("admissions:cohort_all") + "?coordinates=" + query response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.cohort, model.syllabus, model.syllabus_version, data={'distance': distance}) + get_serializer(model.cohort, model.syllabus, model.syllabus_version, data={"distance": distance}) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_with_data__good_coordinates__sorting_the_distances(self): """Test /cohort/all without auth""" @@ -477,85 +477,87 @@ def test_with_data__good_coordinates__sorting_the_distances(self): distance4 = 16234.459290105573 academies = [ { - 'latitude': -60, - 'longitude': -99, + "latitude": -60, + "longitude": -99, }, { - 'latitude': 76, - 'longitude': 130, + "latitude": 76, + "longitude": 130, }, { - 'latitude': 43, - 'longitude': -165, + "latitude": 43, + "longitude": -165, }, { - 'latitude': 90, - 'longitude': -33, + "latitude": 90, + "longitude": -33, }, ] - cohorts = [{'academy_id': n} for n in range(1, 5)] + cohorts = [{"academy_id": n} for n in range(1, 5)] model = self.generate_models(academy=academies, cohort=cohorts, syllabus_version=True) - url = reverse_lazy('admissions:cohort_all') + '?coordinates=-56,167' + url = reverse_lazy("admissions:cohort_all") + "?coordinates=-56,167" response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.cohort[0], model.syllabus, model.syllabus_version, data={'distance': distance1}), - get_serializer(model.cohort[2], model.syllabus, model.syllabus_version, data={'distance': distance2}), - get_serializer(model.cohort[1], model.syllabus, model.syllabus_version, data={'distance': distance3}), - get_serializer(model.cohort[3], model.syllabus, model.syllabus_version, data={'distance': distance4}), + get_serializer(model.cohort[0], model.syllabus, model.syllabus_version, data={"distance": distance1}), + get_serializer(model.cohort[2], model.syllabus, model.syllabus_version, data={"distance": distance2}), + get_serializer(model.cohort[1], model.syllabus, model.syllabus_version, data={"distance": distance3}), + get_serializer(model.cohort[3], model.syllabus, model.syllabus_version, data={"distance": distance4}), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), self.bc.format.to_dict(model.cohort)) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), self.bc.format.to_dict(model.cohort)) """ 🔽🔽🔽 saas in querystring """ def test_with_data__empty_and_random_saas_in_querystring(self): - cases = ['', self.bc.fake.slug()] + cases = ["", self.bc.fake.slug()] academies = [ { - 'available_as_saas': True, + "available_as_saas": True, }, { - 'available_as_saas': False, + "available_as_saas": False, }, { - 'available_as_saas': True, + "available_as_saas": True, }, { - 'available_as_saas': False, + "available_as_saas": False, }, ] - cohorts = [{'academy_id': n, 'kickoff_date': datetime.today().isoformat()} for n in range(1, 5)] + cohorts = [{"academy_id": n, "kickoff_date": datetime.today().isoformat()} for n in range(1, 5)] model = self.generate_models(academy=academies, cohort=cohorts, syllabus_version=True) for query in cases: - url = reverse_lazy('admissions:cohort_all') + f'?saas={query}' + url = reverse_lazy("admissions:cohort_all") + f"?saas={query}" response = self.client.get(url) json = response.json() - expected = sorted([ - get_serializer(model.cohort[0], model.syllabus, model.syllabus_version, data={'distance': None}), - get_serializer(model.cohort[1], model.syllabus, model.syllabus_version, data={'distance': None}), - get_serializer(model.cohort[2], model.syllabus, model.syllabus_version, data={'distance': None}), - get_serializer(model.cohort[3], model.syllabus, model.syllabus_version, data={'distance': None}), - ], - key=lambda x: self.bc.datetime.from_iso_string(x['kickoff_date']), - reverse=True) + expected = sorted( + [ + get_serializer(model.cohort[0], model.syllabus, model.syllabus_version, data={"distance": None}), + get_serializer(model.cohort[1], model.syllabus, model.syllabus_version, data={"distance": None}), + get_serializer(model.cohort[2], model.syllabus, model.syllabus_version, data={"distance": None}), + get_serializer(model.cohort[3], model.syllabus, model.syllabus_version, data={"distance": None}), + ], + key=lambda x: self.bc.datetime.from_iso_string(x["kickoff_date"]), + reverse=True, + ) for j in json: - del j['kickoff_date'] + del j["kickoff_date"] for i in expected: - del i['kickoff_date'] - list_of_cohorts = self.bc.database.list_of('admissions.Cohort') + del i["kickoff_date"] + list_of_cohorts = self.bc.database.list_of("admissions.Cohort") cohorts_dict = self.bc.format.to_dict(model.cohort) for j in list_of_cohorts: - del j['kickoff_date'] + del j["kickoff_date"] for i in cohorts_dict: - del i['kickoff_date'] + del i["kickoff_date"] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(list_of_cohorts, cohorts_dict) @@ -563,41 +565,43 @@ def test_with_data__empty_and_random_saas_in_querystring(self): def test_with_data__saas_is_false(self): academies = [ { - 'available_as_saas': True, + "available_as_saas": True, }, { - 'available_as_saas': False, + "available_as_saas": False, }, { - 'available_as_saas': True, + "available_as_saas": True, }, { - 'available_as_saas': False, + "available_as_saas": False, }, ] - cohorts = [{'academy_id': n, 'kickoff_date': datetime.today().isoformat()} for n in range(1, 5)] + cohorts = [{"academy_id": n, "kickoff_date": datetime.today().isoformat()} for n in range(1, 5)] model = self.generate_models(academy=academies, cohort=cohorts, syllabus_version=True) - url = reverse_lazy('admissions:cohort_all') + f'?saas=false' + url = reverse_lazy("admissions:cohort_all") + f"?saas=false" response = self.client.get(url) json = response.json() - expected = sorted([ - get_serializer(model.cohort[1], model.syllabus, model.syllabus_version, data={'distance': None}), - get_serializer(model.cohort[3], model.syllabus, model.syllabus_version, data={'distance': None}), - ], - key=lambda x: self.bc.datetime.from_iso_string(x['kickoff_date']), - reverse=True) + expected = sorted( + [ + get_serializer(model.cohort[1], model.syllabus, model.syllabus_version, data={"distance": None}), + get_serializer(model.cohort[3], model.syllabus, model.syllabus_version, data={"distance": None}), + ], + key=lambda x: self.bc.datetime.from_iso_string(x["kickoff_date"]), + reverse=True, + ) for j in json: - del j['kickoff_date'] + del j["kickoff_date"] for i in expected: - del i['kickoff_date'] - list_of_cohorts = self.bc.database.list_of('admissions.Cohort') + del i["kickoff_date"] + list_of_cohorts = self.bc.database.list_of("admissions.Cohort") cohorts_dict = self.bc.format.to_dict(model.cohort) for j in list_of_cohorts: - del j['kickoff_date'] + del j["kickoff_date"] for i in cohorts_dict: - del i['kickoff_date'] + del i["kickoff_date"] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(list_of_cohorts, cohorts_dict) @@ -605,41 +609,43 @@ def test_with_data__saas_is_false(self): def test_with_data__saas_is_true(self): academies = [ { - 'available_as_saas': True, + "available_as_saas": True, }, { - 'available_as_saas': False, + "available_as_saas": False, }, { - 'available_as_saas': True, + "available_as_saas": True, }, { - 'available_as_saas': False, + "available_as_saas": False, }, ] - cohorts = [{'academy_id': n, 'kickoff_date': datetime.today()} for n in range(1, 5)] + cohorts = [{"academy_id": n, "kickoff_date": datetime.today()} for n in range(1, 5)] model = self.generate_models(academy=academies, cohort=cohorts, syllabus_version=True) - url = reverse_lazy('admissions:cohort_all') + f'?saas=true' + url = reverse_lazy("admissions:cohort_all") + f"?saas=true" response = self.client.get(url) json = response.json() - expected = sorted([ - get_serializer(model.cohort[0], model.syllabus, model.syllabus_version, data={'distance': None}), - get_serializer(model.cohort[2], model.syllabus, model.syllabus_version, data={'distance': None}), - ], - key=lambda x: self.bc.datetime.from_iso_string(x['kickoff_date']), - reverse=True) + expected = sorted( + [ + get_serializer(model.cohort[0], model.syllabus, model.syllabus_version, data={"distance": None}), + get_serializer(model.cohort[2], model.syllabus, model.syllabus_version, data={"distance": None}), + ], + key=lambda x: self.bc.datetime.from_iso_string(x["kickoff_date"]), + reverse=True, + ) for j in json: - del j['kickoff_date'] + del j["kickoff_date"] for i in expected: - del i['kickoff_date'] - list_of_cohorts = self.bc.database.list_of('admissions.Cohort') + del i["kickoff_date"] + list_of_cohorts = self.bc.database.list_of("admissions.Cohort") cohorts_dict = self.bc.format.to_dict(model.cohort) for j in list_of_cohorts: - del j['kickoff_date'] + del j["kickoff_date"] for i in cohorts_dict: - del i['kickoff_date'] + del i["kickoff_date"] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(list_of_cohorts, cohorts_dict) @@ -650,45 +656,47 @@ def test_with_data__saas_is_true(self): def test_plan_true__without_scheduler(self): """Test /cohort/all without auth""" - cohort = {'available_as_saas': True} + cohort = {"available_as_saas": True} model = self.generate_models(authenticate=True, cohort=cohort, profile_academy=1) - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?plan=true' + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?plan=true" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_plan_true__with_scheduler(self): """Test /cohort/all without auth""" - plan = {'time_of_life': None, 'time_of_life_unit': None} - cohort = {'available_as_saas': True} - academy = {'available_as_saas': True} - model = self.generate_models(authenticate=True, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - profile_academy=1, - syllabus_version=1, - currency=1, - plan_service_item=1, - mentorship_service=1, - mentorship_service_set=1, - plan=plan, - academy=academy) - - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?plan=true' + plan = {"time_of_life": None, "time_of_life_unit": None} + cohort = {"available_as_saas": True} + academy = {"available_as_saas": True} + model = self.generate_models( + authenticate=True, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + profile_academy=1, + syllabus_version=1, + currency=1, + plan_service_item=1, + mentorship_service=1, + mentorship_service_set=1, + plan=plan, + academy=academy, + ) + + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?plan=true" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) """ 🔽🔽🔽 GET with plan=false in querystring @@ -696,42 +704,46 @@ def test_plan_true__with_scheduler(self): def test_plan_false__without_scheduler(self): """Test /cohort/all without auth""" - cohort = {'available_as_saas': True} - academy = {'available_as_saas': True} - model = self.generate_models(authenticate=True, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - profile_academy=1, - syllabus_version=1, - academy=academy) - - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?plan=false' + cohort = {"available_as_saas": True} + academy = {"available_as_saas": True} + model = self.generate_models( + authenticate=True, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + profile_academy=1, + syllabus_version=1, + academy=academy, + ) + + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?plan=false" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort, model.syllabus, model.syllabus_version)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_plan_false__with_scheduler(self): """Test /cohort/all without auth""" - plan = {'time_of_life': None, 'time_of_life_unit': None} - cohort = {'available_as_saas': True} - model = self.generate_models(authenticate=True, - cohort=cohort, - profile_academy=1, - syllabus_version=1, - currency=1, - plan_service_item=1, - mentorship_service=1, - mentorship_service_set=1, - plan=plan) - - base_url = reverse_lazy('admissions:cohort_all') - url = f'{base_url}?plan=false' + plan = {"time_of_life": None, "time_of_life_unit": None} + cohort = {"available_as_saas": True} + model = self.generate_models( + authenticate=True, + cohort=cohort, + profile_academy=1, + syllabus_version=1, + currency=1, + plan_service_item=1, + mentorship_service=1, + mentorship_service_set=1, + plan=plan, + ) + + base_url = reverse_lazy("admissions:cohort_all") + url = f"{base_url}?plan=false" response = self.client.get(url) json = response.json() expected = [] @@ -739,7 +751,7 @@ def test_plan_false__with_scheduler(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) """ 🔽🔽🔽 GET with plan as slug in querystring @@ -747,40 +759,42 @@ def test_plan_false__with_scheduler(self): def test_plan_is_slug__without_scheduler(self): """Test /cohort/all without auth""" - cohort = {'available_as_saas': True} + cohort = {"available_as_saas": True} model = self.generate_models(authenticate=True, cohort=cohort, profile_academy=1) slug = self.bc.fake.slug() - url = reverse_lazy('admissions:cohort_all') + f'?plan={slug}' + url = reverse_lazy("admissions:cohort_all") + f"?plan={slug}" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) def test_plan_is_slug__with_scheduler(self): """Test /cohort/all without auth""" slug = self.bc.fake.slug() - plan = {'slug': slug, 'time_of_life': None, 'time_of_life_unit': None} - cohort = {'available_as_saas': True} - academy = {'available_as_saas': True} - - model = self.generate_models(authenticate=True, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - profile_academy=1, - syllabus_version=1, - currency=1, - plan_service_item=1, - mentorship_service=1, - mentorship_service_set=1, - plan=plan, - academy=academy) - - url = reverse_lazy('admissions:cohort_all') + f'?plan={slug}' + plan = {"slug": slug, "time_of_life": None, "time_of_life_unit": None} + cohort = {"available_as_saas": True} + academy = {"available_as_saas": True} + + model = self.generate_models( + authenticate=True, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + profile_academy=1, + syllabus_version=1, + currency=1, + plan_service_item=1, + mentorship_service=1, + mentorship_service_set=1, + plan=plan, + academy=academy, + ) + + url = reverse_lazy("admissions:cohort_all") + f"?plan={slug}" response = self.client.get(url) json = response.json() @@ -789,4 +803,4 @@ def test_plan_is_slug__with_scheduler(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [{**self.model_to_dict(model, 'cohort')}]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [{**self.model_to_dict(model, "cohort")}]) diff --git a/breathecode/admissions/tests/urls/tests_cohort_id_join.py b/breathecode/admissions/tests/urls/tests_cohort_id_join.py index 96f2b3339..38e6e24e1 100644 --- a/breathecode/admissions/tests/urls/tests_cohort_id_join.py +++ b/breathecode/admissions/tests/urls/tests_cohort_id_join.py @@ -15,65 +15,65 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def cohort_serializer(cohort): return { - 'id': cohort.id, - 'name': cohort.name, - 'slug': cohort.slug, + "id": cohort.id, + "name": cohort.name, + "slug": cohort.slug, } def user_serializer(user): return { - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, } def cohort_set_serializer(cohort_set, cohorts, academy): return { - 'academy': academy_serializer(academy), - 'cohorts': [cohort_serializer(cohort) for cohort in cohorts], - 'id': cohort_set.id, - 'slug': cohort_set.slug, + "academy": academy_serializer(academy), + "cohorts": [cohort_serializer(cohort) for cohort in cohorts], + "id": cohort_set.id, + "slug": cohort_set.slug, } def post_serializer(self, i_owe_you, cohort_set, cohorts=[], academy=None, user=None, data={}): return { - 'academy': academy_serializer(academy), - 'id': i_owe_you.id, - 'invoices': [], - 'next_payment_at': self.bc.datetime.to_iso_string(i_owe_you.next_payment_at), - 'plans': [], - 'selected_cohort_set': cohort_set_serializer(cohort_set, cohorts, academy), - 'selected_event_type_set': i_owe_you.selected_event_type_set, - 'selected_mentorship_service_set': i_owe_you.selected_mentorship_service_set, - 'status': i_owe_you.status, - 'status_message': i_owe_you.status_message, - 'user': user_serializer(user), - 'valid_until': self.bc.datetime.to_iso_string(i_owe_you.valid_until) if i_owe_you.valid_until else None, + "academy": academy_serializer(academy), + "id": i_owe_you.id, + "invoices": [], + "next_payment_at": self.bc.datetime.to_iso_string(i_owe_you.next_payment_at), + "plans": [], + "selected_cohort_set": cohort_set_serializer(cohort_set, cohorts, academy), + "selected_event_type_set": i_owe_you.selected_event_type_set, + "selected_mentorship_service_set": i_owe_you.selected_mentorship_service_set, + "status": i_owe_you.status, + "status_message": i_owe_you.status_message, + "user": user_serializer(user), + "valid_until": self.bc.datetime.to_iso_string(i_owe_you.valid_until) if i_owe_you.valid_until else None, **data, } def cohort_user_field(data={}): return { - 'cohort_id': 0, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 0, - 'role': 'STUDENT', - 'user_id': 0, - 'watching': False, - 'history_log': {}, + "cohort_id": 0, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 0, + "role": "STUDENT", + "user_id": 0, + "watching": False, + "history_log": {}, **data, } @@ -81,136 +81,133 @@ def cohort_user_field(data={}): class CohortIdUserIdTestSuite(AdmissionsTestCase): # When: no auth # Then: should return 401 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__post__no_auth(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('admissions:cohort_id_join', kwargs={'cohort_id': 999}) + url = reverse_lazy("admissions:cohort_id_join", kwargs={"cohort_id": 999}) response = self.client.post(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) # When: no cohort # Then: should return 404 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.tasks.build_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.tasks.build_profile_academy.delay', MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.tasks.build_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.tasks.build_profile_academy.delay", MagicMock()) def test__post__no_cohort(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('admissions:cohort_id_join', kwargs={'cohort_id': 999}) + url = reverse_lazy("admissions:cohort_id_join", kwargs={"cohort_id": 999}) model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) response = self.client.post(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) self.bc.check.calls(tasks.build_cohort_user.delay.call_args_list, []) self.bc.check.calls(tasks.build_profile_academy.delay.call_args_list, []) # When: no have a PlanFinancing or Subscription belonging to the user # Then: should return 400 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.tasks.build_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.tasks.build_profile_academy.delay', MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.tasks.build_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.tasks.build_profile_academy.delay", MagicMock()) def test__post__not_subscribed(self): if random.randint(0, 1): cohort = { - 'never_ends': True, - 'ending_date': None, - 'available_as_saas': True, + "never_ends": True, + "ending_date": None, + "available_as_saas": True, } else: cohort = { - 'never_ends': False, - 'ending_date': timezone.now() + timedelta(days=1), - 'available_as_saas': True, + "never_ends": False, + "ending_date": timezone.now() + timedelta(days=1), + "available_as_saas": True, } model = self.bc.database.create(user=1, cohort=cohort) self.client.force_authenticate(model.user) - url = reverse_lazy('admissions:cohort_id_join', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:cohort_id_join", kwargs={"cohort_id": 1}) response = self.client.post(url) json = response.json() - expected = {'detail': 'not-subscribed', 'status_code': 400} + expected = {"detail": "not-subscribed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) self.bc.check.calls(tasks.build_cohort_user.delay.call_args_list, []) self.bc.check.calls(tasks.build_profile_academy.delay.call_args_list, []) # When: have one of PlanFinancing or Subscription belonging to the user # Then: should return 400 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.tasks.build_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.tasks.build_profile_academy.delay', MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.tasks.build_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.tasks.build_profile_academy.delay", MagicMock()) def test__post__have_a_subscription_or_a_plan_financing(self): if random.randint(0, 1): cohort = { - 'never_ends': True, - 'ending_date': None, - 'available_as_saas': True, + "never_ends": True, + "ending_date": None, + "available_as_saas": True, } else: cohort = { - 'never_ends': False, - 'ending_date': timezone.now() + timedelta(days=1), - 'available_as_saas': True, + "never_ends": False, + "ending_date": timezone.now() + timedelta(days=1), + "available_as_saas": True, } if is_a_subscription := random.randint(0, 1): extra = { - 'subscription': { - 'joined_cohorts': [], - 'valid_until': timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, + "subscription": { + "joined_cohorts": [], + "valid_until": timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, }, } else: extra = { - 'plan_financing': { - 'joined_cohorts': [], - 'valid_until': timezone.now() + timedelta(days=1), - 'plan_expires_at': timezone.now() + timedelta(days=1), - 'monthly_price': random.randint(1, 100), + "plan_financing": { + "joined_cohorts": [], + "valid_until": timezone.now() + timedelta(days=1), + "plan_expires_at": timezone.now() + timedelta(days=1), + "monthly_price": random.randint(1, 100), }, } - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - academy=academy, - **extra) + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, cohort=cohort, cohort_set=1, cohort_set_cohort=1, academy=academy, **extra + ) self.client.force_authenticate(model.user) - url = reverse_lazy('admissions:cohort_id_join', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:cohort_id_join", kwargs={"cohort_id": 1}) response = self.client.post(url) json = response.json() @@ -220,257 +217,274 @@ def test__post__have_a_subscription_or_a_plan_financing(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) if is_a_subscription: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) else: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) - self.bc.check.calls(tasks.build_cohort_user.delay.call_args_list, [call(1, 1, 'STUDENT')]) - self.bc.check.calls(tasks.build_profile_academy.delay.call_args_list, [call(1, 1, 'student')]) + self.bc.check.calls(tasks.build_cohort_user.delay.call_args_list, [call(1, 1, "STUDENT")]) + self.bc.check.calls(tasks.build_profile_academy.delay.call_args_list, [call(1, 1, "student")]) # When: joined to cohort externally to subscription # Then: should return 400 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.tasks.build_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.tasks.build_profile_academy.delay', MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.tasks.build_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.tasks.build_profile_academy.delay", MagicMock()) def test__post__joined_to_cohort(self): if random.randint(0, 1): cohort = { - 'never_ends': True, - 'ending_date': None, - 'available_as_saas': True, + "never_ends": True, + "ending_date": None, + "available_as_saas": True, } else: cohort = { - 'never_ends': False, - 'ending_date': timezone.now() + timedelta(days=1), - 'available_as_saas': True, + "never_ends": False, + "ending_date": timezone.now() + timedelta(days=1), + "available_as_saas": True, } if is_a_subscription := random.randint(0, 1): extra = { - 'subscription': { - 'joined_cohorts': [], - 'valid_until': timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, + "subscription": { + "joined_cohorts": [], + "valid_until": timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, }, } else: extra = { - 'plan_financing': { - 'joined_cohorts': [], - 'valid_until': timezone.now() + timedelta(days=1), - 'plan_expires_at': timezone.now() + timedelta(days=1), - 'monthly_price': random.randint(1, 100), + "plan_financing": { + "joined_cohorts": [], + "valid_until": timezone.now() + timedelta(days=1), + "plan_expires_at": timezone.now() + timedelta(days=1), + "monthly_price": random.randint(1, 100), }, } - academy = {'available_as_saas': True} - model = self.bc.database.create(user=1, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - cohort_user=1, - academy=academy, - **extra) + academy = {"available_as_saas": True} + model = self.bc.database.create( + user=1, cohort=cohort, cohort_set=1, cohort_set_cohort=1, cohort_user=1, academy=academy, **extra + ) self.client.force_authenticate(model.user) - url = reverse_lazy('admissions:cohort_id_join', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:cohort_id_join", kwargs={"cohort_id": 1}) response = self.client.post(url) json = response.json() - expected = {'detail': 'already-joined-to-cohort', 'status_code': 400} + expected = {"detail": "already-joined-to-cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - self.bc.format.to_dict(model.cohort_user), - ]) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + self.bc.format.to_dict(model.cohort_user), + ], + ) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) if is_a_subscription: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) else: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) self.bc.check.calls(tasks.build_cohort_user.delay.call_args_list, []) self.bc.check.calls(tasks.build_profile_academy.delay.call_args_list, []) # When: rejoining to cohort from a subscription # Then: should return 400 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.tasks.build_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.tasks.build_profile_academy.delay', MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.tasks.build_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.tasks.build_profile_academy.delay", MagicMock()) def test__post__rejoining_from_a_subscription(self): if random.randint(0, 1): cohort = { - 'never_ends': True, - 'ending_date': None, - 'available_as_saas': True, + "never_ends": True, + "ending_date": None, + "available_as_saas": True, } else: cohort = { - 'never_ends': False, - 'ending_date': timezone.now() + timedelta(days=1), - 'available_as_saas': True, + "never_ends": False, + "ending_date": timezone.now() + timedelta(days=1), + "available_as_saas": True, } if is_a_subscription := random.randint(0, 1): extra = { - 'subscription': { - 'joined_cohorts': [1], - 'valid_until': timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, + "subscription": { + "joined_cohorts": [1], + "valid_until": timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, }, } else: extra = { - 'plan_financing': { - 'joined_cohorts': [1], - 'valid_until': timezone.now() + timedelta(days=1), - 'plan_expires_at': timezone.now() + timedelta(days=1), - 'monthly_price': random.randint(1, 100), + "plan_financing": { + "joined_cohorts": [1], + "valid_until": timezone.now() + timedelta(days=1), + "plan_expires_at": timezone.now() + timedelta(days=1), + "monthly_price": random.randint(1, 100), }, } - academy = {'available_as_saas': True} - model = self.bc.database.create(user=1, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - academy=academy, - **extra) + academy = {"available_as_saas": True} + model = self.bc.database.create( + user=1, cohort=cohort, cohort_set=1, cohort_set_cohort=1, academy=academy, **extra + ) self.client.force_authenticate(model.user) - url = reverse_lazy('admissions:cohort_id_join', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:cohort_id_join", kwargs={"cohort_id": 1}) response = self.client.post(url) json = response.json() - expected = {'detail': 'already-joined', 'status_code': 400} + expected = {"detail": "already-joined", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) if is_a_subscription: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) else: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) self.bc.check.calls(tasks.build_cohort_user.delay.call_args_list, []) self.bc.check.calls(tasks.build_profile_academy.delay.call_args_list, []) # When: joined to another endable cohort # Then: should return 400 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.tasks.build_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.tasks.build_profile_academy.delay', MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.tasks.build_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.tasks.build_profile_academy.delay", MagicMock()) def test__post__joined_to_another_endable_cohort(self): cohort = { - 'never_ends': False, - 'ending_date': timezone.now() + timedelta(days=1), - 'available_as_saas': True, + "never_ends": False, + "ending_date": timezone.now() + timedelta(days=1), + "available_as_saas": True, } if is_a_subscription := random.randint(0, 1): extra = { - 'subscription': { - 'joined_cohorts': [2], - 'valid_until': timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, + "subscription": { + "joined_cohorts": [2], + "valid_until": timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, }, } else: extra = { - 'plan_financing': { - 'joined_cohorts': [2], - 'valid_until': timezone.now() + timedelta(days=1), - 'plan_expires_at': timezone.now() + timedelta(days=1), - 'monthly_price': random.randint(1, 100), + "plan_financing": { + "joined_cohorts": [2], + "valid_until": timezone.now() + timedelta(days=1), + "plan_expires_at": timezone.now() + timedelta(days=1), + "monthly_price": random.randint(1, 100), }, } - academy = {'available_as_saas': True} - model = self.bc.database.create(user=1, - cohort=(2, cohort), - cohort_set=1, - cohort_set_cohort=1, - academy=academy, - **extra) + academy = {"available_as_saas": True} + model = self.bc.database.create( + user=1, cohort=(2, cohort), cohort_set=1, cohort_set_cohort=1, academy=academy, **extra + ) self.client.force_authenticate(model.user) - url = reverse_lazy('admissions:cohort_id_join', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:cohort_id_join", kwargs={"cohort_id": 1}) response = self.client.post(url) json = response.json() - expected = {'detail': 'already-joined-to-another-cohort', 'status_code': 400} + expected = {"detail": "already-joined-to-another-cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) if is_a_subscription: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) else: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) self.bc.check.calls(tasks.build_cohort_user.delay.call_args_list, []) self.bc.check.calls(tasks.build_profile_academy.delay.call_args_list, []) # When: joined to another endable cohort # Then: should return 400 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.tasks.build_cohort_user.delay', MagicMock()) - @patch('breathecode.admissions.tasks.build_profile_academy.delay', MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.tasks.build_cohort_user.delay", MagicMock()) + @patch("breathecode.admissions.tasks.build_profile_academy.delay", MagicMock()) def test__post__joined_to_another_endable_cohort(self): endable = { - 'never_ends': False, - 'ending_date': timezone.now() + timedelta(days=1), - 'available_as_saas': True, + "never_ends": False, + "ending_date": timezone.now() + timedelta(days=1), + "available_as_saas": True, } no_endable = { - 'never_ends': True, - 'ending_date': None, - 'available_as_saas': True, + "never_ends": True, + "ending_date": None, + "available_as_saas": True, } cohorts = [ @@ -483,77 +497,88 @@ def test__post__joined_to_another_endable_cohort(self): for cohort1, cohort2 in cohorts: if is_a_subscription := random.randint(0, 1): extra = { - 'subscription': { - 'joined_cohorts': [id + 2], - 'valid_until': timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, + "subscription": { + "joined_cohorts": [id + 2], + "valid_until": timezone.now() + timedelta(days=1) if random.randint(0, 1) else None, }, } else: extra = { - 'plan_financing': { - 'joined_cohorts': [id + 2], - 'valid_until': timezone.now() + timedelta(days=1), - 'plan_expires_at': timezone.now() + timedelta(days=1), - 'monthly_price': random.randint(1, 100), + "plan_financing": { + "joined_cohorts": [id + 2], + "valid_until": timezone.now() + timedelta(days=1), + "plan_expires_at": timezone.now() + timedelta(days=1), + "monthly_price": random.randint(1, 100), }, } - cohort_set_cohorts = [{'cohort_id': x} for x in [id + 1, id + 2]] - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - cohort=[cohort1, cohort2], - cohort_set=1, - cohort_set_cohort=cohort_set_cohorts, - academy=academy, - **extra) + cohort_set_cohorts = [{"cohort_id": x} for x in [id + 1, id + 2]] + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + cohort=[cohort1, cohort2], + cohort_set=1, + cohort_set_cohort=cohort_set_cohorts, + academy=academy, + **extra, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('admissions:cohort_id_join', kwargs={'cohort_id': id + 1}) + url = reverse_lazy("admissions:cohort_id_join", kwargs={"cohort_id": id + 1}) response = self.client.post(url) json = response.json() resource = model.subscription if is_a_subscription else model.plan_financing - expected = post_serializer(self, - resource, - model.cohort_set, [model.cohort[0], model.cohort[1]], - model.academy, - model.user, - data={}) + expected = post_serializer( + self, resource, model.cohort_set, [model.cohort[0], model.cohort[1]], model.academy, model.user, data={} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) if is_a_subscription: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) else: - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) - - self.bc.check.calls(tasks.build_cohort_user.delay.call_args_list, [ - call(model.cohort[0].id, model.user.id, 'STUDENT'), - ]) - self.bc.check.calls(tasks.build_profile_academy.delay.call_args_list, [ - call(model.academy.id, model.user.id, 'student'), - ]) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) + + self.bc.check.calls( + tasks.build_cohort_user.delay.call_args_list, + [ + call(model.cohort[0].id, model.user.id, "STUDENT"), + ], + ) + self.bc.check.calls( + tasks.build_profile_academy.delay.call_args_list, + [ + call(model.academy.id, model.user.id, "student"), + ], + ) id += 2 # teardown - self.bc.database.delete('admissions.CohortUser') - self.bc.database.delete('authenticate.ProfileAcademy') - self.bc.database.delete('payments.Subscription') - self.bc.database.delete('payments.PlanFinancing') + self.bc.database.delete("admissions.CohortUser") + self.bc.database.delete("authenticate.ProfileAcademy") + self.bc.database.delete("payments.Subscription") + self.bc.database.delete("payments.PlanFinancing") tasks.build_cohort_user.delay.call_args_list = [] tasks.build_profile_academy.delay.call_args_list = [] diff --git a/breathecode/admissions/tests/urls/tests_cohort_id_user.py b/breathecode/admissions/tests/urls/tests_cohort_id_user.py index ad5408b20..3c92ea8df 100644 --- a/breathecode/admissions/tests/urls/tests_cohort_id_user.py +++ b/breathecode/admissions/tests/urls/tests_cohort_id_user.py @@ -1,6 +1,7 @@ """ Test /cohort/:id/user """ + import random from unittest.mock import MagicMock, patch @@ -22,50 +23,55 @@ def post_serializer(self, cohort, user, profile_academy=None, data={}): return { - 'cohort': { - 'ending_date': cohort.ending_date, - 'id': cohort.id, - 'kickoff_date': - self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date, - 'name': cohort.name, - 'slug': cohort.slug, - 'stage': cohort.stage, - 'available_as_saas': cohort.available_as_saas, + "cohort": { + "ending_date": cohort.ending_date, + "id": cohort.id, + "kickoff_date": ( + self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date + ), + "name": cohort.name, + "slug": cohort.slug, + "stage": cohort.stage, + "available_as_saas": cohort.available_as_saas, }, - 'created_at': self.bc.datetime.to_iso_string(UTC_NOW), - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'profile_academy': { - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - } if profile_academy else None, - 'role': 'STUDENT', - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'last_login': user.last_login, + "created_at": self.bc.datetime.to_iso_string(UTC_NOW), + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "profile_academy": ( + { + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + } + if profile_academy + else None + ), + "role": "STUDENT", + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "last_login": user.last_login, }, - 'watching': False, + "watching": False, **data, } def cohort_user_field(data={}): return { - 'cohort_id': 0, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 0, - 'role': 'STUDENT', - 'user_id': 0, - 'watching': False, - 'history_log': {}, + "cohort_id": 0, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 0, + "role": "STUDENT", + "user_id": 0, + "watching": False, + "history_log": {}, **data, } @@ -75,39 +81,39 @@ def check_cohort_user_that_not_have_role_student_can_be_teacher(self, role, upda self.headers(academy=1) model_kwargs = { - 'authenticate': True, - 'cohort': { - 'stage': 'STARTED' - }, - 'user': True, - 'profile_academy': True, - 'role': role, - 'capability': 'crud_cohort', + "authenticate": True, + "cohort": {"stage": "STARTED"}, + "user": True, + "profile_academy": True, + "role": role, + "capability": "crud_cohort", } if update: - model_kwargs['cohort_user'] = True + model_kwargs["cohort_user"] = True model = self.generate_models(**model_kwargs) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': 1}) - data = {'user': model['user'].id, 'role': 'TEACHER'} + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": 1}) + data = {"user": model["user"].id, "role": "TEACHER"} request_func = self.client.put if update else self.client.post - response = request_func(url, data, format='json') + response = request_func(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.cohort, - model.user, - model.profile_academy, - data={ - 'role': 'TEACHER', - **additional_data, - }) - - expected['educational_status'] = 'ACTIVE' - expected['finantial_status'] = None + expected = post_serializer( + self, + model.cohort, + model.user, + model.profile_academy, + data={ + "role": "TEACHER", + **additional_data, + }, + ) + + expected["educational_status"] = "ACTIVE" + expected["finantial_status"] = None self.assertEqual(json, expected) @@ -117,43 +123,53 @@ def check_cohort_user_that_not_have_role_student_can_be_teacher(self, role, upda self.assertEqual(response.status_code, status.HTTP_201_CREATED) if update: - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), - [{ - **self.model_to_dict(model, 'cohort_user'), - 'role': 'TEACHER', - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.model_to_dict(model, "cohort_user"), + "role": "TEACHER", + } + ], + ) else: - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'TEACHER', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "TEACHER", + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) class CohortIdUserIdTestSuite(AdmissionsTestCase): """Test /cohort/:id/user""" + """ 🔽🔽🔽 Without cohord id in url """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__with_bad_cohort_id(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': 999}) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": 999}) model = self.generate_models(authenticate=True) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'Missing cohort or user in the request', 'status_code': 400} + expected = {"detail": "Missing cohort or user in the request", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -162,19 +178,19 @@ def test_cohort_id_user__post__with_bad_cohort_id(self): 🔽🔽🔽 Bad user in body """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__with_bad_user(self): """Test /cohort/:id/user without auth""" model = self.generate_models(authenticate=True, cohort=True) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) - data = {'user': 999} - response = self.client.post(url, data, format='json') + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) + data = {"user": 999} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'User not found', 'status_code': 400} + expected = {"detail": "User not found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -183,19 +199,19 @@ def test_cohort_id_user__post__with_bad_user(self): 🔽🔽🔽 Without user in body """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__without_user(self): """Test /cohort/:id/user without auth""" model = self.generate_models(authenticate=True, cohort=True, user=True, profile_academy=True, cohort_user=True) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'Missing cohort or user in the request', 'status_code': 400} + expected = {"detail": "Missing cohort or user in the request", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -204,71 +220,69 @@ def test_cohort_id_user__post__without_user(self): 🔽🔽🔽 Authenticate user is not staff in this academy """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__without_profile_academy(self): """Test /cohort/:id/user without auth""" model = self.generate_models(authenticate=True, user=True, cohort=1) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) data = { - 'user': model['user'].id, + "user": model["user"].id, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'Specified cohort not be found', 'status_code': 400} + expected = {"detail": "Specified cohort not be found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - ''' + """ post to a cohort with stage DELETED - ''' + """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__stage_deleted(self): """Test /cohort/:id/user without auth""" - cohort_kwargs = {'stage': 'DELETED'} - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - cohort_kwargs=cohort_kwargs) - models_dict = self.bc.database.list_of('admissions.CohortUser') - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) + cohort_kwargs = {"stage": "DELETED"} + model = self.generate_models( + authenticate=True, cohort=True, user=True, profile_academy=True, cohort_kwargs=cohort_kwargs + ) + models_dict = self.bc.database.list_of("admissions.CohortUser") + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) data = { - 'user': model['user'].id, + "user": model["user"].id, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'adding-student-to-a-closed-cohort', 'status_code': 400} + expected = {"detail": "adding-student-to-a-closed-cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 Post """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_cohort_id_user__post(self): """Test /cohort/:id/user without auth""" - model = self.generate_models(authenticate=True, cohort={'stage': 'STARTED'}, user=True, profile_academy=True) - models_dict = self.bc.database.list_of('admissions.CohortUser') - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) + model = self.generate_models(authenticate=True, cohort={"stage": "STARTED"}, user=True, profile_academy=True) + models_dict = self.bc.database.list_of("admissions.CohortUser") + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) data = { - 'user': model['user'].id, + "user": model["user"].id, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = post_serializer(self, model.cohort, model.user, model.profile_academy, data={}) @@ -278,228 +292,271 @@ def test_cohort_id_user__post(self): cohort_user = self.get_cohort_user(1) cohort_user_two = cohort_user.__dict__.copy() cohort_user_two.update(data) - cohort_user_two['user_id'] = cohort_user_two['user'] - cohort_user_two['cohort_id'] = model['cohort'].id - del cohort_user_two['user'] + cohort_user_two["user_id"] = cohort_user_two["user"] + cohort_user_two["cohort_id"] = model["cohort"].id + del cohort_user_two["user"] models_dict.append(self.remove_dinamics_fields(cohort_user_two)) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), models_dict) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), models_dict) """ 🔽🔽🔽 Post """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__status_in_upper_and_lower(self): """Test /cohort/:id/user without auth""" - model = self.generate_models(authenticate=True, cohort={'stage': 'STARTED'}, user=True, profile_academy=True) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model.cohort.id}) + model = self.generate_models(authenticate=True, cohort={"stage": "STARTED"}, user=True, profile_academy=True) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model.cohort.id}) - roles = ['TEACHER', 'ASSISTANT', 'STUDENT', 'REVIEWER'] - finantial_status = ['FULLY_PAID', 'UP_TO_DATE', 'LATE'] - educational_status = ['ACTIVE', 'POSTPONED', 'SUSPENDED', 'DROPPED'] # do not put GRADUATED here + roles = ["TEACHER", "ASSISTANT", "STUDENT", "REVIEWER"] + finantial_status = ["FULLY_PAID", "UP_TO_DATE", "LATE"] + educational_status = ["ACTIVE", "POSTPONED", "SUSPENDED", "DROPPED"] # do not put GRADUATED here data = { - 'user': model['user'].id, - 'role': random.choice(roles + [x.lower() for x in roles]), - 'finantial_status': random.choice(finantial_status + [x.lower() for x in finantial_status]), - 'educational_status': random.choice(educational_status + [x.lower() for x in educational_status]), + "user": model["user"].id, + "role": random.choice(roles + [x.lower() for x in roles]), + "finantial_status": random.choice(finantial_status + [x.lower() for x in finantial_status]), + "educational_status": random.choice(educational_status + [x.lower() for x in educational_status]), } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.cohort, - model.user, - model.profile_academy, - data={ - 'role': data['role'].upper(), - 'finantial_status': data['finantial_status'].upper(), - 'educational_status': data['educational_status'].upper(), - }) + expected = post_serializer( + self, + model.cohort, + model.user, + model.profile_academy, + data={ + "role": data["role"].upper(), + "finantial_status": data["finantial_status"].upper(), + "educational_status": data["educational_status"].upper(), + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - cohort_user_field({ - 'id': 1, - 'cohort_id': 1, - 'user_id': 1, - 'role': data['role'].upper(), - 'finantial_status': data['finantial_status'].upper(), - 'educational_status': data['educational_status'].upper(), - }), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + cohort_user_field( + { + "id": 1, + "cohort_id": 1, + "user_id": 1, + "role": data["role"].upper(), + "finantial_status": data["finantial_status"].upper(), + "educational_status": data["educational_status"].upper(), + } + ), + ], + ) """ 🔽🔽🔽 Post in bulk mode """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__in_bulk__cohort_with_stage_deleted(self): """Test /cohort/:id/user without auth""" model = self.generate_models( authenticate=True, - cohort={'stage': 'DELETED'}, + cohort={"stage": "DELETED"}, user=True, profile_academy=True, ) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) - data = [{ - 'user': model['user'].id, - }] - response = self.client.post(url, data, format='json') + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) + data = [ + { + "user": model["user"].id, + } + ] + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'adding-student-to-a-closed-cohort', 'status_code': 400} + expected = {"detail": "adding-student-to-a-closed-cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__in_bulk__zero_items(self): """Test /cohort/:id/user without auth""" model = self.generate_models(authenticate=True, cohort=True) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) data = [] - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__in_bulk__with_one_item(self): """Test /cohort/:id/user without auth""" - model = self.generate_models(authenticate=True, cohort={'stage': 'STARTED'}, user=True, profile_academy=True) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) - data = [{ - 'user': model['user'].id, - }] - response = self.client.post(url, data, format='json') + model = self.generate_models(authenticate=True, cohort={"stage": "STARTED"}, user=True, profile_academy=True) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) + data = [ + { + "user": model["user"].id, + } + ] + response = self.client.post(url, data, format="json") json = response.json() - expected = [post_serializer(self, model.cohort, model.user, model.profile_academy, data={ - 'role': 'STUDENT', - })] + expected = [ + post_serializer( + self, + model.cohort, + model.user, + model.profile_academy, + data={ + "role": "STUDENT", + }, + ) + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'STUDENT', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "STUDENT", + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__in_bulk__with_two_items(self): """Test /cohort/:id/user without auth""" - base = self.generate_models(authenticate=True, cohort={'stage': 'STARTED'}, profile_academy=True) - del base['user'] + base = self.generate_models(authenticate=True, cohort={"stage": "STARTED"}, profile_academy=True) + del base["user"] models = [self.generate_models(user=True, models=base) for _ in range(0, 2)] - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': models[0]['cohort'].id}) - data = [{ - 'user': model['user'].id, - } for model in models] - response = self.client.post(url, data, format='json') + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": models[0]["cohort"].id}) + data = [ + { + "user": model["user"].id, + } + for model in models + ] + response = self.client.post(url, data, format="json") json = response.json() expected = [ - post_serializer(self, model.cohort, model.user, None, data={ - 'id': model.user.id - 1, - 'role': 'STUDENT', - }) for model in models + post_serializer( + self, + model.cohort, + model.user, + None, + data={ + "id": model.user.id - 1, + "role": "STUDENT", + }, + ) + for model in models ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'STUDENT', - 'user_id': 2, - 'watching': False, - 'history_log': {}, - }, { - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 2, - 'role': 'STUDENT', - 'user_id': 3, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "STUDENT", + "user_id": 2, + "watching": False, + "history_log": {}, + }, + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 2, + "role": "STUDENT", + "user_id": 3, + "watching": False, + "history_log": {}, + }, + ], + ) """ 🔽🔽🔽 User in two cohort with the same certificate """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__with_two_cohort__with_the_same_certificate(self): """Test /cohort/:id/user without auth""" models = [ - self.generate_models(authenticate=True, - cohort={'stage': 'STARTED'}, - user=True, - profile_academy=True, - cohort_user=True, - syllabus=True, - syllabus_schedule=True) + self.generate_models( + authenticate=True, + cohort={"stage": "STARTED"}, + user=True, + profile_academy=True, + cohort_user=True, + syllabus=True, + syllabus_schedule=True, + ) ] base = models[0].copy() - del base['user'] - del base['cohort'] - del base['cohort_user'] + del base["user"] + del base["cohort"] + del base["cohort_user"] - models = models + [self.generate_models(cohort={'stage': 'STARTED'}, user=True, cohort_user=True, models=base)] - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': models[1]['cohort'].id}) + models = models + [self.generate_models(cohort={"stage": "STARTED"}, user=True, cohort_user=True, models=base)] + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": models[1]["cohort"].id}) data = { - 'user': models[0]['user'].id, + "user": models[0]["user"].id, } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'detail': ('This student is already in another cohort for the same ' - 'certificate, please mark him/her hi educational status on ' - 'this prior cohort different than ACTIVE before cotinuing'), - 'status_code': - 400 + "detail": ( + "This student is already in another cohort for the same " + "certificate, please mark him/her hi educational status on " + "this prior cohort different than ACTIVE before cotinuing" + ), + "status_code": 400, } self.assertEqual(json, expected) @@ -509,26 +566,24 @@ def test_cohort_id_user__post__with_two_cohort__with_the_same_certificate(self): 🔽🔽🔽 Post adding the same user twice """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__twice(self): """Test /cohort/:id/user without auth""" - model = self.generate_models(authenticate=True, - cohort={'stage': 'STARTED'}, - user=True, - profile_academy=True, - cohort_user=True) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) + model = self.generate_models( + authenticate=True, cohort={"stage": "STARTED"}, user=True, profile_academy=True, cohort_user=True + ) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) data = { - 'user': model['user'].id, + "user": model["user"].id, } # self.client.post(url, data) - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'That user already exists in this cohort', 'status_code': 400} + expected = {"detail": "That user already exists in this cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -537,201 +592,199 @@ def test_cohort_id_user__post__twice(self): 🔽🔽🔽 Post one teacher """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__role_student(self): """Test /cohort/:id/user without auth""" - model = self.generate_models(authenticate=True, - cohort={'stage': 'STARTED'}, - user=True, - profile_academy=True, - role='student') - models_dict = self.bc.database.list_of('admissions.CohortUser') - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) - data = {'user': model['user'].id, 'role': 'TEACHER'} - response = self.client.post(url, data, format='json') + model = self.generate_models( + authenticate=True, cohort={"stage": "STARTED"}, user=True, profile_academy=True, role="student" + ) + models_dict = self.bc.database.list_of("admissions.CohortUser") + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) + data = {"user": model["user"].id, "role": "TEACHER"} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'detail': 'The user must be staff member to this academy before it can be a teacher', - 'status_code': 400, + "detail": "The user must be staff member to this academy before it can be a teacher", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_staff(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'staff') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "staff") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_teacher(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'teacher') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "teacher") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_syllabus_coordinator(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'syllabus_coordinator') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "syllabus_coordinator") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_homework_reviewer(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'homework_reviewer') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "homework_reviewer") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_growth_manager(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'growth_manager') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "growth_manager") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_culture_and_recruitment(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'culture_and_recruitment') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "culture_and_recruitment") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_country_manager(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'country_manager') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "country_manager") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_community_manager(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'community_manager') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "community_manager") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_career_support(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'career_support') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "career_support") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_assistant(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'assistant') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "assistant") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_admissions_developer(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'admissions_developer') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "admissions_developer") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_admin(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'admin') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "admin") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_academy_token(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'academy_token') - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "academy_token") + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__one_teacher__with_role_academy_coordinator(self): """Test /cohort/:id/user without auth""" - check_cohort_user_that_not_have_role_student_can_be_teacher(self, 'academy_coordinator') + check_cohort_user_that_not_have_role_student_can_be_teacher(self, "academy_coordinator") """ 🔽🔽🔽 Post just one main teacher for cohort """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__two_teacher(self): """Test /cohort/:id/user without auth""" models = [ - self.generate_models(authenticate=True, cohort={'stage': 'STARTED'}, profile_academy=True, role='staff') + self.generate_models(authenticate=True, cohort={"stage": "STARTED"}, profile_academy=True, role="staff") ] base = models[0].copy() - del base['user'] - del base['profile_academy'] + del base["user"] + del base["profile_academy"] models = models + [self.generate_models(user=True, models=base, profile_academy=True)] - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': models[0]['cohort'].id}) - data = {'user': models[0]['user'].id, 'role': 'TEACHER'} - self.client.post(url, data, format='json') + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": models[0]["cohort"].id}) + data = {"user": models[0]["user"].id, "role": "TEACHER"} + self.client.post(url, data, format="json") - data = {'user': models[1]['user'].id, 'role': 'TEACHER'} - response = self.client.post(url, data, format='json') + data = {"user": models[1]["user"].id, "role": "TEACHER"} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'There can only be one main instructor in a cohort', 'status_code': 400} + expected = {"detail": "There can only be one main instructor in a cohort", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -739,39 +792,45 @@ def test_cohort_id_user__post__two_teacher(self): 🔽🔽🔽 Student cannot be graduated if has pending tasks """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__with_unsuccess_task(self): """Test /cohort/:id/user without auth""" - task = {'task_status': 'PENDING', 'task_type': 'PROJECT', 'associated_slug': 'testing-slug'} - model = self.generate_models(authenticate=True, - cohort={'stage': 'STARTED'}, - user=True, - profile_academy=True, - task=task, - syllabus_version={ - 'id': 1, - 'json': { - 'days': [{ - 'assignments': [{ - 'slug': 'testing-slug', - }] - }] - } - }) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) + task = {"task_status": "PENDING", "task_type": "PROJECT", "associated_slug": "testing-slug"} + model = self.generate_models( + authenticate=True, + cohort={"stage": "STARTED"}, + user=True, + profile_academy=True, + task=task, + syllabus_version={ + "id": 1, + "json": { + "days": [ + { + "assignments": [ + { + "slug": "testing-slug", + } + ] + } + ] + }, + }, + ) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) data = { - 'user': model['user'].id, - 'educational_status': 'GRADUATED', + "user": model["user"].id, + "educational_status": "GRADUATED", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'detail': 'User has tasks with status pending the educational status cannot be GRADUATED', - 'status_code': 400 + "detail": "User has tasks with status pending the educational status cannot be GRADUATED", + "status_code": 400, } self.assertEqual(json, expected) @@ -781,23 +840,23 @@ def test_cohort_id_user__post__with_unsuccess_task(self): 🔽🔽🔽 Student cannot graduated if its financial status is late """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user__post__with_unsuccess_finantial_status(self): """Test /cohort/:id/user without auth""" - model = self.generate_models(authenticate=True, cohort={'stage': 'STARTED'}, user=True, profile_academy=True) - url = reverse_lazy('admissions:cohort_id_user', kwargs={'cohort_id': model['cohort'].id}) + model = self.generate_models(authenticate=True, cohort={"stage": "STARTED"}, user=True, profile_academy=True) + url = reverse_lazy("admissions:cohort_id_user", kwargs={"cohort_id": model["cohort"].id}) data = { - 'user': model['user'].id, - 'educational_status': 'GRADUATED', - 'finantial_status': 'LATE', + "user": model["user"].id, + "educational_status": "GRADUATED", + "finantial_status": "LATE", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'Cannot be marked as `GRADUATED` if its financial status is `LATE`', 'status_code': 400} + expected = {"detail": "Cannot be marked as `GRADUATED` if its financial status is `LATE`", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/breathecode/admissions/tests/urls/tests_cohort_id_user_id.py b/breathecode/admissions/tests/urls/tests_cohort_id_user_id.py index 10fbe749f..28f2d60ef 100644 --- a/breathecode/admissions/tests/urls/tests_cohort_id_user_id.py +++ b/breathecode/admissions/tests/urls/tests_cohort_id_user_id.py @@ -1,6 +1,7 @@ """ Test /cohort/:id/user/:id """ + import re from unittest.mock import MagicMock, patch @@ -19,36 +20,41 @@ def put_serializer(self, cohort_user, cohort, user, profile_academy=None, data={}): return { - 'cohort': { - 'ending_date': cohort.ending_date, - 'id': cohort.id, - 'kickoff_date': - self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date, - 'name': cohort.name, - 'slug': cohort.slug, - 'stage': cohort.stage, - 'available_as_saas': cohort.available_as_saas, + "cohort": { + "ending_date": cohort.ending_date, + "id": cohort.id, + "kickoff_date": ( + self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date + ), + "name": cohort.name, + "slug": cohort.slug, + "stage": cohort.stage, + "available_as_saas": cohort.available_as_saas, }, - 'created_at': self.bc.datetime.to_iso_string(cohort_user.created_at), - 'educational_status': cohort_user.educational_status, - 'finantial_status': cohort_user.finantial_status, - 'id': cohort_user.id, - 'profile_academy': { - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - } if profile_academy else None, - 'role': cohort_user.role, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'last_login': user.last_login, + "created_at": self.bc.datetime.to_iso_string(cohort_user.created_at), + "educational_status": cohort_user.educational_status, + "finantial_status": cohort_user.finantial_status, + "id": cohort_user.id, + "profile_academy": ( + { + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + } + if profile_academy + else None + ), + "role": cohort_user.role, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "last_login": user.last_login, }, - 'watching': cohort_user.watching, + "watching": cohort_user.watching, **data, } @@ -56,141 +62,130 @@ def put_serializer(self, cohort_user, cohort, user, profile_academy=None, data={ class CohortIdUserIdTestSuite(AdmissionsTestCase): """Test /cohort/:id/user/:id""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_without_auth(self): """Test /cohort/:id/user/:id without auth""" - url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1}) + url = reverse_lazy("admissions:cohort_id_user_id", kwargs={"cohort_id": 1, "user_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_bad_cohort_id(self): """Test /cohort/:id/user/:id without auth""" model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1}) + url = reverse_lazy("admissions:cohort_id_user_id", kwargs={"cohort_id": 1, "user_id": 1}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'status_code': 400, 'detail': 'Cannot determine CohortUser'} + expected = {"status_code": 400, "detail": "Cannot determine CohortUser"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_bad_user_id(self): """Test /cohort/:id/user/:id without auth""" model = self.generate_models(authenticate=True, cohort=True) - url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': model.cohort.id, 'user_id': 999}) + url = reverse_lazy("admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": 999}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'status_code': 400, 'detail': 'Cannot determine CohortUser'} + expected = {"status_code": 400, "detail": "Cannot determine CohortUser"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_bad_id(self): """Test /cohort/:id/user/:id without auth""" model = self.generate_models(authenticate=True, cohort=True, user=True) - url = reverse_lazy('admissions:cohort_id_user_id', - kwargs={ - 'cohort_id': model.cohort.id, - 'user_id': model.user.id - }) + url = reverse_lazy( + "admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": model.user.id} + ) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'status_code': 400, 'detail': 'Cannot determine CohortUser'} + expected = {"status_code": 400, "detail": "Cannot determine CohortUser"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_id_but_without_user(self): """Test /cohort/:id/user/:id without auth""" model = self.generate_models(authenticate=True, cohort=True) - url = reverse_lazy('admissions:cohort_id_user_id', - kwargs={ - 'cohort_id': model.cohort.id, - 'user_id': model.user.id - }) + url = reverse_lazy( + "admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": model.user.id} + ) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'status_code': 400, 'detail': 'Cannot determine CohortUser'} + expected = {"status_code": 400, "detail": "Cannot determine CohortUser"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_id_but_with_user(self): """Test /cohort/:id/user/:id without auth""" model = self.generate_models(authenticate=True, cohort=True, user=True) - url = reverse_lazy('admissions:cohort_id_user_id', - kwargs={ - 'cohort_id': model.cohort.id, - 'user_id': model.user.id - }) + url = reverse_lazy( + "admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": model.user.id} + ) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'status_code': 400, 'detail': 'Cannot determine CohortUser'} + expected = {"status_code": 400, "detail": "Cannot determine CohortUser"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_id(self): """Test /cohort/:id/user/:id without auth""" - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - syllabus_schedule=True, - profile_academy=True, - cohort_user=True) + model = self.generate_models( + authenticate=True, cohort=True, user=True, syllabus_schedule=True, profile_academy=True, cohort_user=True + ) model_dict = self.get_cohort_user_dict(1) - url = reverse_lazy('admissions:cohort_id_user_id', - kwargs={ - 'cohort_id': model.cohort.id, - 'user_id': model.user.id - }) - data = {'schedule': model.syllabus_schedule.id} - response = self.client.put(url, data, format='json') + url = reverse_lazy( + "admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": model.user.id} + ) + data = {"schedule": model.syllabus_schedule.id} + response = self.client.put(url, data, format="json") json = response.json() expected = put_serializer(self, model.cohort_user, model.cohort, model.user, model.profile_academy, data={}) @@ -199,160 +194,151 @@ def test_cohort_id_user_id_put_with_id(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self): """Test /cohort/:id/user/:id without auth""" - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - syllabus_schedule=True, - profile_academy=True, - cohort_user=True) - url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': model.cohort.id, 'user_id': 9999}) - data = {'schedule': model.syllabus_schedule.id} + model = self.generate_models( + authenticate=True, cohort=True, user=True, syllabus_schedule=True, profile_academy=True, cohort_user=True + ) + url = reverse_lazy("admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": 9999}) + data = {"schedule": model.syllabus_schedule.id} response = self.client.delete(url, data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self): """Test /cohort/:id/user/:id without auth""" - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - syllabus_schedule=True, - profile_academy=True, - cohort_user=True) - url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 9999, 'user_id': model.user.id}) - data = {'schedule': model.syllabus_schedule.id} + model = self.generate_models( + authenticate=True, cohort=True, user=True, syllabus_schedule=True, profile_academy=True, cohort_user=True + ) + url = reverse_lazy("admissions:cohort_id_user_id", kwargs={"cohort_id": 9999, "user_id": model.user.id}) + data = {"schedule": model.syllabus_schedule.id} response = self.client.delete(url, data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_delete_with_id(self): """Test /cohort/:id/user/:id without auth""" - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - syllabus_schedule=True, - profile_academy=True, - cohort_user=True) - url = reverse_lazy('admissions:cohort_id_user_id', - kwargs={ - 'cohort_id': model.cohort.id, - 'user_id': model.user.id - }) - data = {'schedule': model.syllabus_schedule.id} + model = self.generate_models( + authenticate=True, cohort=True, user=True, syllabus_schedule=True, profile_academy=True, cohort_user=True + ) + url = reverse_lazy( + "admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": model.user.id} + ) + data = {"schedule": model.syllabus_schedule.id} response = self.client.delete(url, data) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.count_cohort_user(), 0) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_unsuccess_task(self): """Test /cohort/:id/user/:id without auth""" - task = {'task_status': 'PENDING', 'task_type': 'PROJECT', 'associated_slug': 'testing-slug'} - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - cohort_user=True, - task=task, - syllabus_version={ - 'id': 1, - 'json': { - 'days': [{ - 'assignments': [{ - 'slug': 'testing-slug', - }] - }] - } - }) - url = reverse_lazy('admissions:cohort_id_user_id', - kwargs={ - 'cohort_id': model.cohort.id, - 'user_id': model.user.id - }) + task = {"task_status": "PENDING", "task_type": "PROJECT", "associated_slug": "testing-slug"} + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + cohort_user=True, + task=task, + syllabus_version={ + "id": 1, + "json": { + "days": [ + { + "assignments": [ + { + "slug": "testing-slug", + } + ] + } + ] + }, + }, + ) + url = reverse_lazy( + "admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": model.user.id} + ) data = { - 'educational_status': 'GRADUATED', + "educational_status": "GRADUATED", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'status_code': 400, - 'detail': 'User has tasks with status pending the educational status cannot be GRADUATED', + "status_code": 400, + "detail": "User has tasks with status pending the educational status cannot be GRADUATED", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self): """Test /cohort/:id/user/:id without auth""" model = self.generate_models(authenticate=True, cohort=True, user=True, profile_academy=True, cohort_user=True) - url = reverse_lazy('admissions:cohort_id_user_id', - kwargs={ - 'cohort_id': model.cohort.id, - 'user_id': model.user.id - }) + url = reverse_lazy( + "admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": model.user.id} + ) data = { - 'educational_status': 'GRADUATED', - 'finantial_status': 'LATE', + "educational_status": "GRADUATED", + "finantial_status": "LATE", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'status_code': 400, - 'detail': 'Cannot be marked as `GRADUATED` if its financial status is `LATE`', + "status_code": 400, + "detail": "Cannot be marked as `GRADUATED` if its financial status is `LATE`", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_id_put_with_stage_delete(self): """Test /cohort/:id/user/:id without auth""" - cohort_kwargs = {'stage': 'DELETED'} - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - cohort_user=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('admissions:cohort_id_user_id', - kwargs={ - 'cohort_id': model.cohort.id, - 'user_id': model.user.id - }) + cohort_kwargs = {"stage": "DELETED"} + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + cohort_user=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy( + "admissions:cohort_id_user_id", kwargs={"cohort_id": model.cohort.id, "user_id": model.user.id} + ) data = { - 'educational_status': 'GRADUATED', - 'finantial_status': 'LATE', + "educational_status": "GRADUATED", + "finantial_status": "LATE", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'status_code': 400, - 'detail': 'cohort-with-stage-deleted', + "status_code": 400, + "detail": "cohort-with-stage-deleted", } self.assertEqual(json, expected) diff --git a/breathecode/admissions/tests/urls/tests_cohort_user.py b/breathecode/admissions/tests/urls/tests_cohort_user.py index 05c0000d5..ea1a6f96f 100644 --- a/breathecode/admissions/tests/urls/tests_cohort_user.py +++ b/breathecode/admissions/tests/urls/tests_cohort_user.py @@ -1,6 +1,7 @@ """ Test /cohort/user """ + import re from random import choice from unittest.mock import MagicMock, call, patch @@ -22,36 +23,41 @@ def put_serializer(self, cohort_user, cohort, user, profile_academy=None, data={}): return { - 'cohort': { - 'ending_date': cohort.ending_date, - 'id': cohort.id, - 'kickoff_date': - self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date, - 'name': cohort.name, - 'slug': cohort.slug, - 'stage': cohort.stage, - 'available_as_saas': cohort.available_as_saas + "cohort": { + "ending_date": cohort.ending_date, + "id": cohort.id, + "kickoff_date": ( + self.bc.datetime.to_iso_string(cohort.kickoff_date) if cohort.kickoff_date else cohort.kickoff_date + ), + "name": cohort.name, + "slug": cohort.slug, + "stage": cohort.stage, + "available_as_saas": cohort.available_as_saas, }, - 'created_at': self.bc.datetime.to_iso_string(cohort_user.created_at), - 'educational_status': cohort_user.educational_status, - 'finantial_status': cohort_user.finantial_status, - 'id': cohort_user.id, - 'profile_academy': { - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - } if profile_academy else None, - 'role': cohort_user.role, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'last_login': user.last_login, + "created_at": self.bc.datetime.to_iso_string(cohort_user.created_at), + "educational_status": cohort_user.educational_status, + "finantial_status": cohort_user.finantial_status, + "id": cohort_user.id, + "profile_academy": ( + { + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + } + if profile_academy + else None + ), + "role": cohort_user.role, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "last_login": user.last_login, }, - 'watching': cohort_user.watching, + "watching": cohort_user.watching, **data, } @@ -59,26 +65,26 @@ def put_serializer(self, cohort_user, cohort, user, profile_academy=None, data={ class CohortUserTestSuite(AdmissionsTestCase): """Test /cohort/user""" - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_without_auth(self): """Test /cohort/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_without_data(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") response = self.client.get(url) json = response.json() @@ -86,60 +92,62 @@ def test_without_data(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 0) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - url = reverse_lazy('admissions:cohort_user') + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + url = reverse_lazy("admissions:cohort_user") response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - }, - 'profile_academy': None, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + }, + "profile_academy": None, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_bad_roles(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?roles=they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?roles=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -148,106 +156,110 @@ def test_with_data_with_bad_roles(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_roles(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?roles=' + model['cohort_user'].role + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?roles=" + model["cohort_user"].role response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_roles_with_comma(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?roles=' + model['cohort_user'].role + ',they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?roles=" + model["cohort_user"].role + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_user_with_data_with_bad_financial_status(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?finantial_status=they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?finantial_status=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -256,109 +268,113 @@ def test_cohort_user_with_data_with_bad_financial_status(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_finantial_status(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'finantial_status': 'LATE'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?finantial_status=' + model['cohort_user'].finantial_status + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"finantial_status": "LATE"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?finantial_status=" + model["cohort_user"].finantial_status response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_finantial_status_with_comma(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'finantial_status': 'LATE'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = (f'{base_url}?finantial_status=' + model['cohort_user'].finantial_status + ',they-killed-kenny') + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"finantial_status": "LATE"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?finantial_status=" + model["cohort_user"].finantial_status + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_bad_educational_status(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?educational_status=they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?educational_status=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -367,112 +383,115 @@ def test_with_data_with_bad_educational_status(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_educational_status(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?educational_status=' + model['cohort_user'].educational_status + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"educational_status": "GRADUATED"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?educational_status=" + model["cohort_user"].educational_status response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_educational_status_with_comma(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = (f'{base_url}?educational_status=' + model['cohort_user'].educational_status + ',' - 'they-killed-kenny') + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"educational_status": "GRADUATED"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?educational_status=" + model["cohort_user"].educational_status + "," "they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_bad_academy(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?academy=they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?academy=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -481,47 +500,49 @@ def test_with_data_with_bad_academy(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_academy(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?academy=' + model['cohort_user'].cohort.academy.slug + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"educational_status": "GRADUATED"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?academy=" + model["cohort_user"].cohort.academy.slug response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -532,118 +553,124 @@ def test_with_data_with_academy(self): 🔽🔽🔽 With profile academy """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__with_profile_academy(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - profile_academy=True) - - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - url = reverse_lazy('admissions:cohort_user') + model = self.generate_models( + authenticate=True, + cohort_user=True, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + profile_academy=True, + ) + + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + url = reverse_lazy("admissions:cohort_user") response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': { - 'id': model['profile_academy'].id, - 'first_name': model['profile_academy'].first_name, - 'last_name': model['profile_academy'].last_name, - 'email': model['profile_academy'].email, - 'phone': model['profile_academy'].phone, - }, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": { + "id": model["profile_academy"].id, + "first_name": model["profile_academy"].first_name, + "last_name": model["profile_academy"].last_name, + "email": model["profile_academy"].email, + "phone": model["profile_academy"].phone, + }, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_academy_with_comma(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?academy=' + model['cohort_user'].cohort.academy.slug + ',they-killed-kenny' + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"educational_status": "GRADUATED"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?academy=" + model["cohort_user"].cohort.academy.slug + ",they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_bad_cohorts(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?cohorts=they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?cohorts=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -652,414 +679,456 @@ def test_with_data_with_bad_cohorts(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data_with_cohorts(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:cohort_user') - url = f'{base_url}?cohorts=' + model['cohort_user'].cohort.slug + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"educational_status": "GRADUATED"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:cohort_user") + url = f"{base_url}?cohorts=" + model["cohort_user"].cohort.slug response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['cohort_user'].id, - 'role': model['cohort_user'].role, - 'finantial_status': model['cohort_user'].finantial_status, - 'educational_status': model['cohort_user'].educational_status, - 'created_at': re.sub(r'\+00:00$', 'Z', model['cohort_user'].created_at.isoformat()), - 'user': { - 'id': model['cohort_user'].user.id, - 'first_name': model['cohort_user'].user.first_name, - 'last_name': model['cohort_user'].user.last_name, - 'email': model['cohort_user'].user.email, - 'last_login': model['cohort_user'].user.last_login, - }, - 'profile_academy': None, - 'cohort': { - 'id': model['cohort_user'].cohort.id, - 'slug': model['cohort_user'].cohort.slug, - 'name': model['cohort_user'].cohort.name, - 'kickoff_date': re.sub(r'\+00:00$', 'Z', model['cohort_user'].cohort.kickoff_date.isoformat()), - 'ending_date': model['cohort_user'].cohort.ending_date, - 'stage': model['cohort_user'].cohort.stage, - 'available_as_saas': model['cohort_user'].cohort.available_as_saas, - }, - 'watching': False, - }] + expected = [ + { + "id": model["cohort_user"].id, + "role": model["cohort_user"].role, + "finantial_status": model["cohort_user"].finantial_status, + "educational_status": model["cohort_user"].educational_status, + "created_at": re.sub(r"\+00:00$", "Z", model["cohort_user"].created_at.isoformat()), + "user": { + "id": model["cohort_user"].user.id, + "first_name": model["cohort_user"].user.first_name, + "last_name": model["cohort_user"].user.last_name, + "email": model["cohort_user"].user.email, + "last_login": model["cohort_user"].user.last_login, + }, + "profile_academy": None, + "cohort": { + "id": model["cohort_user"].cohort.id, + "slug": model["cohort_user"].cohort.slug, + "name": model["cohort_user"].cohort.name, + "kickoff_date": re.sub(r"\+00:00$", "Z", model["cohort_user"].cohort.kickoff_date.isoformat()), + "ending_date": model["cohort_user"].cohort.ending_date, + "stage": model["cohort_user"].cohort.stage, + "available_as_saas": model["cohort_user"].cohort.available_as_saas, + }, + "watching": False, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_without_id(self): """Test /cohort/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") model = self.generate_models(authenticate=True) data = {} response = self.client.put(url, data) json = response.json() - self.assertEqual(json, {'status_code': 400, 'detail': 'Missing cohort_id, user_id and id'}) + self.assertEqual(json, {"status_code": 400, "detail": "Missing cohort_id, user_id and id"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_in_bulk_without_data(self): """Test /cohort/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") model = self.generate_models(authenticate=True) data = [] - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_in_bulk_without_data__without_passing_attrs(self): """Test /cohort/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") model = self.generate_models(authenticate=True) data = [{}] - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'Missing cohort_id, user_id and id', 'status_code': 400} + expected = {"detail": "Missing cohort_id, user_id and id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_in_bulk_without_data__cannot_determine_the_cohort_user(self): """Test /cohort/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") model = self.generate_models(authenticate=True) - data = [{'id': 1}] - response = self.client.put(url, data, format='json') + data = [{"id": 1}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'Cannot determine CohortUser in index 0', 'status_code': 400} + expected = {"detail": "Cannot determine CohortUser in index 0", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_in_bulk_without_profile_academy(self): """Test /cohort/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") model = self.generate_models(authenticate=True, cohort_user=True) - data = [{'id': model['cohort_user'].id}] - response = self.client.put(url, data, format='json') + data = [{"id": model["cohort_user"].id}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'Specified cohort not be found', 'status_code': 400} + expected = {"detail": "Specified cohort not be found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'id': 1, - 'user_id': 1, - 'cohort_id': 1, - 'role': 'STUDENT', - 'finantial_status': None, - 'educational_status': 'ACTIVE', - 'watching': False, - 'history_log': {}, - }]) - - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "id": 1, + "user_id": 1, + "cohort_id": 1, + "role": "STUDENT", + "finantial_status": None, + "educational_status": "ACTIVE", + "watching": False, + "history_log": {}, + } + ], + ) + + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_in_bulk_with_stage_delete(self): """Test /cohort/user without auth""" - cohort_kwargs = {'stage': 'DELETED'} - url = reverse_lazy('admissions:cohort_user') - model = self.generate_models(authenticate=True, - cohort_user=True, - profile_academy=True, - cohort_kwargs=cohort_kwargs) - data = [{'id': model['cohort_user'].id}] - response = self.client.put(url, data, format='json') + cohort_kwargs = {"stage": "DELETED"} + url = reverse_lazy("admissions:cohort_user") + model = self.generate_models( + authenticate=True, cohort_user=True, profile_academy=True, cohort_kwargs=cohort_kwargs + ) + data = [{"id": model["cohort_user"].id}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'cohort-with-stage-deleted', 'status_code': 400} + expected = {"detail": "cohort-with-stage-deleted", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'id': 1, - 'user_id': 1, - 'cohort_id': 1, - 'role': 'STUDENT', - 'finantial_status': None, - 'educational_status': 'ACTIVE', - 'watching': False, - 'history_log': {}, - }]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "id": 1, + "user_id": 1, + "cohort_id": 1, + "role": "STUDENT", + "finantial_status": None, + "educational_status": "ACTIVE", + "watching": False, + "history_log": {}, + } + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_in_bulk_with_one_item(self): """Test /cohort/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") model = self.generate_models(authenticate=True, cohort_user=True, profile_academy=True) - data = [{'id': model['cohort_user'].id}] - response = self.client.put(url, data, format='json') + data = [{"id": model["cohort_user"].id}] + response = self.client.put(url, data, format="json") json = response.json() - expected = [{ - 'id': 1, - 'role': 'STUDENT', - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'watching': False, - }] + expected = [ + { + "id": 1, + "role": "STUDENT", + "educational_status": "ACTIVE", + "finantial_status": None, + "watching": False, + } + ] expected = [ - put_serializer(self, - model.cohort_user, - model.cohort, - model.user, - model.profile_academy, - data={ - 'role': 'STUDENT', - }) + put_serializer( + self, + model.cohort_user, + model.cohort, + model.user, + model.profile_academy, + data={ + "role": "STUDENT", + }, + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'id': 1, - 'user_id': 1, - 'cohort_id': 1, - 'role': 'STUDENT', - 'finantial_status': None, - 'educational_status': 'ACTIVE', - 'watching': False, - 'history_log': {}, - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "id": 1, + "user_id": 1, + "cohort_id": 1, + "role": "STUDENT", + "finantial_status": None, + "educational_status": "ACTIVE", + "watching": False, + "history_log": {}, + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_in_bulk_with_two_items(self): """Test /cohort/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") model = [self.generate_models(authenticate=True, cohort_user=True, profile_academy=True)] base = model[0].copy() - del base['user'] - del base['cohort'] - del base['cohort_user'] - del base['profile_academy'] + del base["user"] + del base["cohort"] + del base["cohort_user"] + del base["profile_academy"] model = model + [self.generate_models(cohort_user=True, profile_academy=True, models=base)] - data = [{ - 'id': 1, - 'finantial_status': 'LATE', - }, { - 'user': '2', - 'cohort': '2', - 'educational_status': 'GRADUATED' - }] - response = self.client.put(url, data, format='json') + data = [ + { + "id": 1, + "finantial_status": "LATE", + }, + {"user": "2", "cohort": "2", "educational_status": "GRADUATED"}, + ] + response = self.client.put(url, data, format="json") json = response.json() - expected = [{ - 'id': 1, - 'role': 'STUDENT', - 'educational_status': None, - 'finantial_status': 'LATE', - 'watching': False, - }, { - 'id': 2, - 'role': 'STUDENT', - 'educational_status': 'GRADUATED', - 'finantial_status': None, - 'watching': False, - }] expected = [ - put_serializer(self, - m.cohort_user, - m.cohort, - m.user, - m.profile_academy, - data={ - 'educational_status': 'ACTIVE' if m.cohort.id == 1 else 'GRADUATED', - 'finantial_status': 'LATE' if m.cohort.id == 1 else None, - }) for m in model + { + "id": 1, + "role": "STUDENT", + "educational_status": None, + "finantial_status": "LATE", + "watching": False, + }, + { + "id": 2, + "role": "STUDENT", + "educational_status": "GRADUATED", + "finantial_status": None, + "watching": False, + }, + ] + expected = [ + put_serializer( + self, + m.cohort_user, + m.cohort, + m.user, + m.profile_academy, + data={ + "educational_status": "ACTIVE" if m.cohort.id == 1 else "GRADUATED", + "finantial_status": "LATE" if m.cohort.id == 1 else None, + }, + ) + for m in model ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'id': 1, - 'user_id': 1, - 'cohort_id': 1, - 'role': 'STUDENT', - 'finantial_status': 'LATE', - 'educational_status': 'ACTIVE', - 'watching': False, - 'history_log': {}, - }, { - 'id': 2, - 'user_id': 2, - 'cohort_id': 2, - 'role': 'STUDENT', - 'finantial_status': None, - 'educational_status': 'GRADUATED', - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "id": 1, + "user_id": 1, + "cohort_id": 1, + "role": "STUDENT", + "finantial_status": "LATE", + "educational_status": "ACTIVE", + "watching": False, + "history_log": {}, + }, + { + "id": 2, + "user_id": 2, + "cohort_id": 2, + "role": "STUDENT", + "finantial_status": None, + "educational_status": "GRADUATED", + "watching": False, + "history_log": {}, + }, + ], + ) # that's methods name is irrelevant because it's deprecated - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_delete_without_auth(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") response = self.client.delete(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_delete_without_args_in_url_or_bulk(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') - url = reverse_lazy('admissions:cohort_user') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") + url = reverse_lazy("admissions:cohort_user") response = self.client.delete(url) json = response.json() - expected = {'detail': 'Missing user_id or cohort_id', 'status_code': 400} + expected = {"detail": "Missing user_id or cohort_id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_cohort_id_user_delete_in_bulk_with_one(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] - base = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") - del base['user'] - del base['cohort'] + del base["user"] + del base["cohort"] for field in many_fields: cohort_user_kwargs = { - 'role': choice(['STUDENT', 'ASSISTANT', 'TEACHER']), - 'finantial_status': choice(['FULLY_PAID', 'UP_TO_DATE', 'LATE']), - 'educational_status': choice(['ACTIVE', 'POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED']), + "role": choice(["STUDENT", "ASSISTANT", "TEACHER"]), + "finantial_status": choice(["FULLY_PAID", "UP_TO_DATE", "LATE"]), + "educational_status": choice(["ACTIVE", "POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"]), } model = self.generate_models(cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base) - url = (reverse_lazy('admissions:cohort_user') + f'?{field}=' + str(getattr(model['cohort_user'], field))) + url = reverse_lazy("admissions:cohort_user") + f"?{field}=" + str(getattr(model["cohort_user"], field)) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_cohort_id_user_delete_in_bulk_with_two(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] - base = self.generate_models(authenticate=True, profile_academy=True, capability='crud_cohort', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="crud_cohort", role="potato") - del base['user'] - del base['cohort'] + del base["user"] + del base["cohort"] for field in many_fields: cohort_user_kwargs = { - 'role': choice(['STUDENT', 'ASSISTANT', 'TEACHER']), - 'finantial_status': choice(['FULLY_PAID', 'UP_TO_DATE', 'LATE']), - 'educational_status': choice(['ACTIVE', 'POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED']), + "role": choice(["STUDENT", "ASSISTANT", "TEACHER"]), + "finantial_status": choice(["FULLY_PAID", "UP_TO_DATE", "LATE"]), + "educational_status": choice(["ACTIVE", "POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"]), } model1 = self.generate_models(cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base) cohort_user_kwargs = { - 'role': choice(['STUDENT', 'ASSISTANT', 'TEACHER']), - 'finantial_status': choice(['FULLY_PAID', 'UP_TO_DATE', 'LATE']), - 'educational_status': choice(['ACTIVE', 'POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED']), + "role": choice(["STUDENT", "ASSISTANT", "TEACHER"]), + "finantial_status": choice(["FULLY_PAID", "UP_TO_DATE", "LATE"]), + "educational_status": choice(["ACTIVE", "POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"]), } model2 = self.generate_models(cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base) - url = (reverse_lazy('admissions:cohort_user') + f'?{field}=' + str(getattr(model1['cohort_user'], field)) + - ',' + str(getattr(model2['cohort_user'], field))) + url = ( + reverse_lazy("admissions:cohort_user") + + f"?{field}=" + + str(getattr(model1["cohort_user"], field)) + + "," + + str(getattr(model2["cohort_user"], field)) + ) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - url = reverse_lazy('admissions:cohort_user') + url = reverse_lazy("admissions:cohort_user") response = self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension']), - ]) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=CohortUserCache, paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension"]), + ], + ) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=CohortUserCache, paginate=True), + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_me_cohort_id_user_log.py b/breathecode/admissions/tests/urls/tests_me_cohort_id_user_log.py index 42b283109..e5650c204 100644 --- a/breathecode/admissions/tests/urls/tests_me_cohort_id_user_log.py +++ b/breathecode/admissions/tests/urls/tests_me_cohort_id_user_log.py @@ -1,6 +1,7 @@ """ Test /cohort/:id/user/:id """ + import re from unittest.mock import MagicMock, patch @@ -19,48 +20,48 @@ def get_serializer(cohort_user, cohort, data={}): return { - 'cohort': { - 'id': cohort.id, - 'slug': cohort.slug, + "cohort": { + "id": cohort.id, + "slug": cohort.slug, }, - 'history_log': cohort_user.history_log, + "history_log": cohort_user.history_log, } class CohortIdUserIdTestSuite(AdmissionsTestCase): """Test /cohort/:id/user/:id""" - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_auth(self): """Test /cohort/:id/user/:id without auth""" - url = reverse_lazy('admissions:me_cohort_id_user_log', kwargs={'cohort_id': 1}) + url = reverse_lazy("admissions:me_cohort_id_user_log", kwargs={"cohort_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_zero_items(self): """Test /cohort/:id/user/:id without auth""" model = self.generate_models(user=1) - self.bc.request.authenticate(model['user']) - url = reverse_lazy('admissions:me_cohort_id_user_log', kwargs={'cohort_id': 1}) - response = self.client.get(url, format='json') + self.bc.request.authenticate(model["user"]) + url = reverse_lazy("admissions:me_cohort_id_user_log", kwargs={"cohort_id": 1}) + response = self.client.get(url, format="json") json = response.json() - expected = {'detail': 'cohort-user-not-found', 'status_code': 404} + expected = {"detail": "cohort-user-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_two_items(self): """Test /cohort/:id/user/:id without auth""" @@ -69,16 +70,19 @@ def test_two_items(self): self.bc.fake.slug(): self.bc.fake.slug(), self.bc.fake.slug(): self.bc.fake.slug(), } - cohort_user = {'history_log': history_log} + cohort_user = {"history_log": history_log} model = self.generate_models(user=1, cohort_user=cohort_user) - self.bc.request.authenticate(model['user']) - url = reverse_lazy('admissions:me_cohort_id_user_log', kwargs={'cohort_id': 1}) - response = self.client.get(url, format='json') + self.bc.request.authenticate(model["user"]) + url = reverse_lazy("admissions:me_cohort_id_user_log", kwargs={"cohort_id": 1}) + response = self.client.get(url, format="json") json = response.json() expected = get_serializer(model.cohort_user, model.cohort) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - self.bc.format.to_dict(model.cohort_user), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + self.bc.format.to_dict(model.cohort_user), + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_me_cohort_user_log.py b/breathecode/admissions/tests/urls/tests_me_cohort_user_log.py index 73dc63c06..feba85bb0 100644 --- a/breathecode/admissions/tests/urls/tests_me_cohort_user_log.py +++ b/breathecode/admissions/tests/urls/tests_me_cohort_user_log.py @@ -1,6 +1,7 @@ """ Test /cohort/:id/user/:id """ + import re from unittest.mock import MagicMock, patch @@ -19,48 +20,48 @@ def get_serializer(cohort_user, cohort, data={}): return { - 'cohort': { - 'id': cohort.id, - 'slug': cohort.slug, + "cohort": { + "id": cohort.id, + "slug": cohort.slug, }, - 'history_log': cohort_user.history_log, + "history_log": cohort_user.history_log, } class CohortIdUserIdTestSuite(AdmissionsTestCase): """Test /cohort/:id/user/:id""" - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_auth(self): """Test /cohort/:id/user/:id without auth""" - url = reverse_lazy('admissions:me_cohort_user_log') + url = reverse_lazy("admissions:me_cohort_user_log") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_zero_items(self): """Test /cohort/:id/user/:id without auth""" model = self.generate_models(user=1) - self.bc.request.authenticate(model['user']) - url = reverse_lazy('admissions:me_cohort_user_log') - response = self.client.get(url, format='json') + self.bc.request.authenticate(model["user"]) + url = reverse_lazy("admissions:me_cohort_user_log") + response = self.client.get(url, format="json") json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_two_items(self): """Test /cohort/:id/user/:id without auth""" @@ -69,14 +70,14 @@ def test_two_items(self): self.bc.fake.slug(): self.bc.fake.slug(), self.bc.fake.slug(): self.bc.fake.slug(), } - cohort_user = {'history_log': history_log} + cohort_user = {"history_log": history_log} model = self.generate_models(user=1, cohort_user=(2, cohort_user)) - self.bc.request.authenticate(model['user']) - url = reverse_lazy('admissions:me_cohort_user_log') - response = self.client.get(url, format='json') + self.bc.request.authenticate(model["user"]) + url = reverse_lazy("admissions:me_cohort_user_log") + response = self.client.get(url, format="json") json = response.json() expected = [get_serializer(cohort_user, model.cohort) for cohort_user in model.cohort_user] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), self.bc.format.to_dict(model.cohort_user)) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), self.bc.format.to_dict(model.cohort_user)) diff --git a/breathecode/admissions/tests/urls/tests_public_cohort_user.py b/breathecode/admissions/tests/urls/tests_public_cohort_user.py index 5939b6d82..b82d3fb73 100644 --- a/breathecode/admissions/tests/urls/tests_public_cohort_user.py +++ b/breathecode/admissions/tests/urls/tests_public_cohort_user.py @@ -1,6 +1,7 @@ """ Test /cohort/all """ + import random import re from datetime import timedelta @@ -15,11 +16,11 @@ def get_serializer(cohort_user, user): return { - 'role': cohort_user.role, - 'user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, + "role": cohort_user.role, + "user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, }, } @@ -29,7 +30,7 @@ class CohortAllTestSuite(AdmissionsTestCase): def test_without_auth(self): """Test /cohort/all without auth""" - url = reverse_lazy('admissions:public_cohort_user') + url = reverse_lazy("admissions:public_cohort_user") response = self.client.get(url) json = response.json() @@ -40,7 +41,7 @@ def test_without_auth(self): def test_without_data(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:public_cohort_user') + url = reverse_lazy("admissions:public_cohort_user") response = self.client.get(url) json = response.json() @@ -52,33 +53,33 @@ def test_without_data(self): 🔽🔽🔽 With data """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data(self): """Test /cohort/all without auth""" model = self.generate_models(authenticate=True, cohort_user=True, profile_academy=True, syllabus_version=True) - url = reverse_lazy('admissions:public_cohort_user') + url = reverse_lazy("admissions:public_cohort_user") response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort_user, model.user)] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - **self.model_to_dict(model, 'cohort_user') - }]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), [{**self.model_to_dict(model, "cohort_user")}] + ) """ 🔽🔽🔽 roles in querystring """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_bad_roles(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:public_cohort_user') - url = f'{base_url}?roles=they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:public_cohort_user") + url = f"{base_url}?roles=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -87,17 +88,17 @@ def test_with_data_with_bad_roles(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_roles(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - role = model['cohort_user'].role + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + role = model["cohort_user"].role if random.randint(0, 1): role = role.lower() - url = reverse_lazy('admissions:public_cohort_user') + f'?roles=' + role + url = reverse_lazy("admissions:public_cohort_user") + f"?roles=" + role response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort_user, model.user)] @@ -107,13 +108,13 @@ def test_with_data_with_roles(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_roles_with_comma(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:public_cohort_user') - url = f'{base_url}?roles=' + model['cohort_user'].role + ',they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:public_cohort_user") + url = f"{base_url}?roles=" + model["cohort_user"].role + ",they-killed-kenny" response = self.client.get(url) json = response.json() @@ -128,13 +129,13 @@ def test_with_data_with_roles_with_comma(self): 🔽🔽🔽 finantial_status in querystring """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_bad_finantial_status(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:public_cohort_user') - url = f'{base_url}?finantial_status=they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:public_cohort_user") + url = f"{base_url}?finantial_status=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -143,19 +144,19 @@ def test_with_data_with_bad_finantial_status(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_finantial_status(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'finantial_status': 'LATE'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - role = model['cohort_user'].finantial_status + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"finantial_status": "LATE"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + role = model["cohort_user"].finantial_status if random.randint(0, 1): role = role.lower() - url = reverse_lazy('admissions:public_cohort_user') + f'?finantial_status=' + role + url = reverse_lazy("admissions:public_cohort_user") + f"?finantial_status=" + role response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort_user, model.user)] @@ -165,15 +166,15 @@ def test_with_data_with_finantial_status(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_finantial_status_with_comma(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'finantial_status': 'LATE'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:public_cohort_user') - url = (f'{base_url}?finantial_status=' + model['cohort_user'].finantial_status + ',they-killed-kenny') + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"finantial_status": "LATE"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:public_cohort_user") + url = f"{base_url}?finantial_status=" + model["cohort_user"].finantial_status + ",they-killed-kenny" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort_user, model.user)] @@ -187,13 +188,13 @@ def test_with_data_with_finantial_status_with_comma(self): 🔽🔽🔽 educational_status in querystring """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_bad_educational_status(self): """Test /cohort/user without auth""" model = self.generate_models(authenticate=True, cohort_user=True) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:public_cohort_user') - url = f'{base_url}?educational_status=they-killed-kenny' + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:public_cohort_user") + url = f"{base_url}?educational_status=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -202,20 +203,20 @@ def test_with_data_with_bad_educational_status(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_educational_status(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:public_cohort_user') - role = model['cohort_user'].educational_status + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"educational_status": "GRADUATED"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:public_cohort_user") + role = model["cohort_user"].educational_status if random.randint(0, 1): role = role.lower() - url = reverse_lazy('admissions:public_cohort_user') + f'?educational_status=' + role + url = reverse_lazy("admissions:public_cohort_user") + f"?educational_status=" + role response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort_user, model.user)] @@ -225,16 +226,15 @@ def test_with_data_with_educational_status(self): self.assertEqual(self.count_cohort_user(), 1) self.assertEqual(self.get_cohort_user_dict(1), model_dict) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_with_data_with_educational_status_with_comma(self): """Test /cohort/user without auth""" - model = self.generate_models(authenticate=True, - cohort_user=True, - cohort_user_kwargs={'educational_status': 'GRADUATED'}) - model_dict = self.remove_dinamics_fields(model['cohort_user'].__dict__) - base_url = reverse_lazy('admissions:public_cohort_user') - url = (f'{base_url}?educational_status=' + model['cohort_user'].educational_status + ',' - 'they-killed-kenny') + model = self.generate_models( + authenticate=True, cohort_user=True, cohort_user_kwargs={"educational_status": "GRADUATED"} + ) + model_dict = self.remove_dinamics_fields(model["cohort_user"].__dict__) + base_url = reverse_lazy("admissions:public_cohort_user") + url = f"{base_url}?educational_status=" + model["cohort_user"].educational_status + "," "they-killed-kenny" response = self.client.get(url) json = response.json() expected = [get_serializer(model.cohort_user, model.user)] diff --git a/breathecode/admissions/tests/urls/tests_schedule.py b/breathecode/admissions/tests/urls/tests_schedule.py index fa3699dd8..d2eba5548 100644 --- a/breathecode/admissions/tests/urls/tests_schedule.py +++ b/breathecode/admissions/tests/urls/tests_schedule.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy from rest_framework import status @@ -15,19 +16,19 @@ class CertificateTestSuite(AdmissionsTestCase): def test_certificate_without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:schedule') + url = reverse_lazy("admissions:schedule") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_certificate_without_data(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:schedule') + url = reverse_lazy("admissions:schedule") self.bc.database.create(authenticate=True) response = self.client.get(url) json = response.json() @@ -39,21 +40,31 @@ def test_certificate_without_data(self): def test_certificate_with_data(self): """Test /certificate without auth""" model = self.bc.database.create(authenticate=True, syllabus=True, syllabus_schedule=True) - url = reverse_lazy('admissions:schedule') + url = reverse_lazy("admissions:schedule") response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'id': model['syllabus_schedule'].id, - 'name': model['syllabus_schedule'].name, - 'description': model['syllabus_schedule'].description, - 'syllabus': model['syllabus_schedule'].syllabus.id, - }]) + self.assertEqual( + json, + [ + { + "id": model["syllabus_schedule"].id, + "name": model["syllabus_schedule"].name, + "description": model["syllabus_schedule"].description, + "syllabus": model["syllabus_schedule"].syllabus.id, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule'), - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule"), + } + ], + ) """ 🔽🔽🔽 Syllabus id in querystring @@ -62,43 +73,49 @@ def test_certificate_with_data(self): def test_academy_schedule__syllabus_id_in_querystring__bad_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:schedule') + '?syllabus_id=9999' + model = self.bc.database.create( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:schedule") + "?syllabus_id=9999" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_academy_schedule__syllabus_id_in_querystring(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:schedule') + '?syllabus_id=1' + model = self.bc.database.create( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:schedule") + "?syllabus_id=1" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model.syllabus_schedule.id, - 'name': model.syllabus_schedule.name, - 'description': model.syllabus_schedule.description, - 'syllabus': model.syllabus_schedule.syllabus.id, - }] + expected = [ + { + "id": model.syllabus_schedule.id, + "name": model.syllabus_schedule.name, + "description": model.syllabus_schedule.description, + "syllabus": model.syllabus_schedule.syllabus.id, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) """ 🔽🔽🔽 Syllabus slug in querystring @@ -107,46 +124,52 @@ def test_academy_schedule__syllabus_id_in_querystring(self): def test_academy_schedule__syllabus_slug_in_querystring__bad_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:schedule') + '?syllabus_slug=they-killed-kenny' + model = self.bc.database.create( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:schedule") + "?syllabus_slug=they-killed-kenny" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_academy_schedule__syllabus_slug_in_querystring(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:schedule') + '?syllabus_slug=they-killed-kenny' + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy("admissions:schedule") + "?syllabus_slug=they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model.syllabus_schedule.id, - 'name': model.syllabus_schedule.name, - 'description': model.syllabus_schedule.description, - 'syllabus': model.syllabus_schedule.syllabus.id, - }] + expected = [ + { + "id": model.syllabus_schedule.id, + "name": model.syllabus_schedule.name, + "description": model.syllabus_schedule.description, + "syllabus": model.syllabus_schedule.syllabus.id, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) """ 🔽🔽🔽 Academy id in querystring @@ -155,43 +178,49 @@ def test_academy_schedule__syllabus_slug_in_querystring(self): def test_academy_schedule__academy_id_in_querystring__bad_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:schedule') + '?academy_id=9999' + model = self.bc.database.create( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:schedule") + "?academy_id=9999" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_academy_schedule__academy_id_in_querystring(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:schedule') + '?academy_id=1' + model = self.bc.database.create( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:schedule") + "?academy_id=1" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model.syllabus_schedule.id, - 'name': model.syllabus_schedule.name, - 'description': model.syllabus_schedule.description, - 'syllabus': model.syllabus_schedule.syllabus.id, - }] + expected = [ + { + "id": model.syllabus_schedule.id, + "name": model.syllabus_schedule.name, + "description": model.syllabus_schedule.description, + "syllabus": model.syllabus_schedule.syllabus.id, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) """ 🔽🔽🔽 Academy slug in querystring @@ -200,71 +229,83 @@ def test_academy_schedule__academy_id_in_querystring(self): def test_academy_schedule__academy_slug_in_querystring__bad_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:schedule') + '?academy_slug=they-killed-kenny' + model = self.bc.database.create( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:schedule") + "?academy_slug=they-killed-kenny" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_academy_schedule__academy_slug_in_querystring(self): """Test /certificate without auth""" self.headers(academy=1) - academy = {'slug': 'they-killed-kenny'} - model = self.bc.database.create(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_certificate', - role='potato', - syllabus=True, - academy=academy) - url = reverse_lazy('admissions:schedule') + '?academy_slug=they-killed-kenny' + academy = {"slug": "they-killed-kenny"} + model = self.bc.database.create( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_certificate", + role="potato", + syllabus=True, + academy=academy, + ) + url = reverse_lazy("admissions:schedule") + "?academy_slug=they-killed-kenny" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model.syllabus_schedule.id, - 'name': model.syllabus_schedule.name, - 'description': model.syllabus_schedule.description, - 'syllabus': model.syllabus_schedule.syllabus.id, - }] + expected = [ + { + "id": model.syllabus_schedule.id, + "name": model.syllabus_schedule.name, + "description": model.syllabus_schedule.description, + "syllabus": model.syllabus_schedule.syllabus.id, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_certificate__spy_extensions(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:schedule') + url = reverse_lazy("admissions:schedule") self.bc.database.create(authenticate=True) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension"]), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_certificate__spy_extension_arguments(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:schedule') + url = reverse_lazy("admissions:schedule") self.bc.database.create(authenticate=True) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(paginate=True), + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_schedule_id.py b/breathecode/admissions/tests/urls/tests_schedule_id.py index 1f81f3737..43c5ffe0a 100644 --- a/breathecode/admissions/tests/urls/tests_schedule_id.py +++ b/breathecode/admissions/tests/urls/tests_schedule_id.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import patch from django.urls.base import reverse_lazy from rest_framework import status @@ -18,47 +19,54 @@ class CertificateTestSuite(AdmissionsTestCase): def test_certificate_without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:schedule_id', kwargs={'schedule_id': 1}) + url = reverse_lazy("admissions:schedule_id", kwargs={"schedule_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_certificate_without_data(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:schedule_id', kwargs={'schedule_id': 1}) + url = reverse_lazy("admissions:schedule_id", kwargs={"schedule_id": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - expected = {'status_code': 404, 'detail': 'schedule-not-found'} + expected = {"status_code": 404, "detail": "schedule-not-found"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_syllabus_schedule_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_certificate_with_data(self): """Test /certificate without auth""" model = self.generate_models(authenticate=True, syllabus_schedule=True, syllabus=True) - url = reverse_lazy('admissions:schedule_id', kwargs={'schedule_id': 1}) + url = reverse_lazy("admissions:schedule_id", kwargs={"schedule_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'id': model['syllabus_schedule'].id, - 'name': model['syllabus_schedule'].name, - 'description': model['syllabus_schedule'].description, - 'syllabus': model['syllabus_schedule'].syllabus.id, - }) + json, + { + "id": model["syllabus_schedule"].id, + "name": model["syllabus_schedule"].name, + "description": model["syllabus_schedule"].description, + "syllabus": model["syllabus_schedule"].syllabus.id, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_schedule_dict(), [{ - **self.model_to_dict(model, 'syllabus_schedule'), - }]) + self.assertEqual( + self.all_syllabus_schedule_dict(), + [ + { + **self.model_to_dict(model, "syllabus_schedule"), + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_syllabus.py b/breathecode/admissions/tests/urls/tests_syllabus.py index 137b7ff5b..539caf625 100644 --- a/breathecode/admissions/tests/urls/tests_syllabus.py +++ b/breathecode/admissions/tests/urls/tests_syllabus.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy from rest_framework import status @@ -14,27 +15,27 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:syllabus') + url = reverse_lazy("admissions:syllabus") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_syllabus_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus') + url = reverse_lazy("admissions:syllabus") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: read_syllabus for academy 1" + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus for academy 1", } self.assertEqual(json, expected) @@ -44,12 +45,10 @@ def test_syllabus_without_capability(self): def test_syllabus_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:syllabus') + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy("admissions:syllabus") response = self.client.get(url) json = response.json() expected = [] @@ -61,54 +60,57 @@ def test_syllabus_without_syllabus(self): def test_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:syllabus') + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + ) + url = reverse_lazy("admissions:syllabus") response = self.client.get(url) json = response.json() - expected = [{ - 'main_technologies': None, - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': { - 'id': model.syllabus.academy_owner.id, - 'name': model.syllabus.academy_owner.name, - 'slug': model.syllabus.academy_owner.slug, - 'white_labeled': model.syllabus.academy_owner.white_labeled, - 'icon_url': model.syllabus.academy_owner.icon_url, - 'available_as_saas': model.syllabus.academy_owner.available_as_saas, - }, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'created_at': self.datetime_to_iso(model.syllabus.created_at), - 'updated_at': self.datetime_to_iso(model.syllabus.updated_at), - }] + expected = [ + { + "main_technologies": None, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": { + "id": model.syllabus.academy_owner.id, + "name": model.syllabus.academy_owner.name, + "slug": model.syllabus.academy_owner.slug, + "white_labeled": model.syllabus.academy_owner.white_labeled, + "icon_url": model.syllabus.academy_owner.icon_url, + "available_as_saas": model.syllabus.academy_owner.available_as_saas, + }, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "created_at": self.datetime_to_iso(model.syllabus.created_at), + "updated_at": self.datetime_to_iso(model.syllabus.updated_at), + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_post_without_capabilities(self): """Test /certificate without auth""" self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:syllabus') + url = reverse_lazy("admissions:syllabus") data = {} response = self.client.post(url, data) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_syllabus " - 'for academy 1', - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_syllabus " "for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -118,13 +120,13 @@ def test_syllabus_post_without_capabilities(self): def test_syllabus__post__missing_slug_in_request(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy("admissions:syllabus") data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'missing-slug', 'status_code': 400} + expected = {"detail": "missing-slug", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -133,13 +135,13 @@ def test_syllabus__post__missing_slug_in_request(self): def test_syllabus__post__missing_name_in_request(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus') - data = {'slug': 'they-killed-kenny'} - response = self.client.post(url, data, format='json') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy("admissions:syllabus") + data = {"slug": "they-killed-kenny"} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'missing-name', 'status_code': 400} + expected = {"detail": "missing-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -148,73 +150,80 @@ def test_syllabus__post__missing_name_in_request(self): def test_syllabus__post(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy("admissions:syllabus") data = { - 'slug': 'they-killed-kenny', - 'name': 'They killed kenny', + "slug": "they-killed-kenny", + "name": "They killed kenny", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'academy_owner': 1, - 'duration_in_days': None, - 'duration_in_hours': None, - 'github_url': None, - 'id': 1, - 'logo': None, - 'private': False, - 'week_hours': None, + "academy_owner": 1, + "duration_in_days": None, + "duration_in_hours": None, + "github_url": None, + "id": 1, + "logo": None, + "private": False, + "week_hours": None, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_dict(), [{ - 'main_technologies': None, - 'academy_owner_id': 1, - 'duration_in_days': None, - 'duration_in_hours': None, - 'github_url': None, - 'id': 1, - 'is_documentation': False, - 'logo': None, - 'private': False, - 'week_hours': None, - **data, - }]) - - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + self.assertEqual( + self.all_syllabus_dict(), + [ + { + "main_technologies": None, + "academy_owner_id": 1, + "duration_in_days": None, + "duration_in_hours": None, + "github_url": None, + "id": 1, + "is_documentation": False, + "logo": None, + "private": False, + "week_hours": None, + **data, + } + ], + ) + + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_syllabus__spy_extensions(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) - url = reverse_lazy('admissions:syllabus') + url = reverse_lazy("admissions:syllabus") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension"]), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_syllabus__spy_extension_arguments(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) - url = reverse_lazy('admissions:syllabus') + url = reverse_lazy("admissions:syllabus") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(paginate=True), + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_syllabus_id.py b/breathecode/admissions/tests/urls/tests_syllabus_id.py index 46030f7e5..bff2d4e4f 100644 --- a/breathecode/admissions/tests/urls/tests_syllabus_id.py +++ b/breathecode/admissions/tests/urls/tests_syllabus_id.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AdmissionsTestCase @@ -11,31 +12,37 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_id_without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:syllabus_id', kwargs={ - 'syllabus_id': 1, - }) + url = reverse_lazy( + "admissions:syllabus_id", + kwargs={ + "syllabus_id": 1, + }, + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_syllabus_id_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_id', kwargs={ - 'syllabus_id': 1, - }) + url = reverse_lazy( + "admissions:syllabus_id", + kwargs={ + "syllabus_id": 1, + }, + ) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: read_syllabus for academy 1" + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus for academy 1", } self.assertEqual(json, expected) @@ -45,13 +52,16 @@ def test_syllabus_id_without_capability(self): def test_syllabus_id_without_data(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_id', kwargs={ - 'syllabus_id': 1, - }) - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_syllabus', role='potato') + url = reverse_lazy( + "admissions:syllabus_id", + kwargs={ + "syllabus_id": 1, + }, + ) + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_syllabus", role="potato") response = self.client.get(url) json = response.json() - expected = {'status_code': 404, 'detail': 'syllabus-not-found'} + expected = {"status_code": 404, "detail": "syllabus-not-found"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -60,58 +70,62 @@ def test_syllabus_id_without_data(self): def test_syllabus_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus_version=True, - syllabus=True) - url = reverse_lazy('admissions:syllabus_id', kwargs={'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus_version=True, + syllabus=True, + ) + url = reverse_lazy("admissions:syllabus_id", kwargs={"syllabus_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': { - 'id': model.syllabus.academy_owner.id, - 'name': model.syllabus.academy_owner.name, - 'slug': model.syllabus.academy_owner.slug, - 'white_labeled': model.syllabus.academy_owner.white_labeled, - 'icon_url': model.syllabus.academy_owner.icon_url, - 'available_as_saas': model.syllabus.academy_owner.available_as_saas, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": { + "id": model.syllabus.academy_owner.id, + "name": model.syllabus.academy_owner.name, + "slug": model.syllabus.academy_owner.slug, + "white_labeled": model.syllabus.academy_owner.white_labeled, + "icon_url": model.syllabus.academy_owner.icon_url, + "available_as_saas": model.syllabus.academy_owner.available_as_saas, }, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'main_technologies': None, - 'created_at': self.datetime_to_iso(model.syllabus.created_at), - 'updated_at': self.datetime_to_iso(model.syllabus.updated_at), + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "main_technologies": None, + "created_at": self.datetime_to_iso(model.syllabus.created_at), + "updated_at": self.datetime_to_iso(model.syllabus.updated_at), } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_id__put__without_capabilities(self): """Test /certificate without auth""" self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:syllabus_id', kwargs={ - 'syllabus_id': 1, - }) + url = reverse_lazy( + "admissions:syllabus_id", + kwargs={ + "syllabus_id": 1, + }, + ) data = {} response = self.client.put(url, data) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_syllabus " - 'for academy 1', - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_syllabus " "for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -121,14 +135,17 @@ def test_syllabus_id__put__without_capabilities(self): def test_syllabus_id__put__setting_slug_as_empty(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus_id', kwargs={ - 'syllabus_id': 1, - }) - data = {'slug': ''} + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:syllabus_id", + kwargs={ + "syllabus_id": 1, + }, + ) + data = {"slug": ""} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'empty-slug', 'status_code': 400} + expected = {"detail": "empty-slug", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -137,14 +154,17 @@ def test_syllabus_id__put__setting_slug_as_empty(self): def test_syllabus_id__put__setting_name_as_empty(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus_id', kwargs={ - 'syllabus_id': 1, - }) - data = {'name': ''} + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:syllabus_id", + kwargs={ + "syllabus_id": 1, + }, + ) + data = {"name": ""} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'empty-name', 'status_code': 400} + expected = {"detail": "empty-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -153,14 +173,17 @@ def test_syllabus_id__put__setting_name_as_empty(self): def test_syllabus_id__put__not_found(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus_id', kwargs={ - 'syllabus_id': 1, - }) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:syllabus_id", + kwargs={ + "syllabus_id": 1, + }, + ) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -169,17 +192,19 @@ def test_syllabus_id__put__not_found(self): def test_syllabus_id__put__not_founds2(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:syllabus_id', kwargs={'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:syllabus_id", kwargs={"syllabus_id": 1}) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -188,66 +213,75 @@ def test_syllabus_id__put__not_founds2(self): def test_syllabus_id__put(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:syllabus_id', kwargs={'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:syllabus_id", kwargs={"syllabus_id": 1}) data = {} response = self.client.put(url, data) json = response.json() expected = { - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': model.syllabus.academy_owner.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": model.syllabus.academy_owner.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_id__put__change_values(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:syllabus_id', kwargs={'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:syllabus_id", kwargs={"syllabus_id": 1}) data = { - 'duration_in_days': 9, - 'duration_in_hours': 99, - 'week_hours': 999, - 'github_url': 'https://tierragamer.com/wp-content/uploads/2020/08/naruto-cosplay-konan.jpg', - 'logo': 'a', - 'private': not model.syllabus.private, + "duration_in_days": 9, + "duration_in_hours": 99, + "week_hours": 999, + "github_url": "https://tierragamer.com/wp-content/uploads/2020/08/naruto-cosplay-konan.jpg", + "logo": "a", + "private": not model.syllabus.private, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'academy_owner': model.syllabus.academy_owner.id, - 'id': model.syllabus.id, - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, + "academy_owner": model.syllabus.academy_owner.id, + "id": model.syllabus.id, + "slug": model.syllabus.slug, + "name": model.syllabus.name, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{ - **self.model_to_dict(model, 'syllabus'), - **data, - }]) + self.assertEqual( + self.all_syllabus_dict(), + [ + { + **self.model_to_dict(model, "syllabus"), + **data, + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_syllabus_id_version.py b/breathecode/admissions/tests/urls/tests_syllabus_id_version.py index 5f066ca1d..f263b9363 100644 --- a/breathecode/admissions/tests/urls/tests_syllabus_id_version.py +++ b/breathecode/admissions/tests/urls/tests_syllabus_id_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + import random from unittest.mock import patch, MagicMock from breathecode.services import datetime_to_iso_format @@ -29,23 +30,36 @@ def generate_syllabus_json(lesson_slug, quiz_slug=None, reply_slug=None, project n = random.randint(1, 10) return { - 'days': [{ - 'lessons': [{ - 'slug': lesson_slug, - }], - 'quizzes': [{ - 'slug': quiz_slug, - }], - 'replits': [{ - 'slug': reply_slug, - }], - 'projects': [{ - 'slug': project_slug, - }], - 'assignments': [{ - 'slug': assignment_slug, - }], - } for _ in range(n)] + "days": [ + { + "lessons": [ + { + "slug": lesson_slug, + } + ], + "quizzes": [ + { + "slug": quiz_slug, + } + ], + "replits": [ + { + "slug": reply_slug, + } + ], + "projects": [ + { + "slug": project_slug, + } + ], + "assignments": [ + { + "slug": assignment_slug, + } + ], + } + for _ in range(n) + ] } @@ -55,28 +69,27 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_id_version_without_auth(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_id_version', kwargs={'syllabus_id': '1'}) + url = reverse_lazy("admissions:syllabus_id_version", kwargs={"syllabus_id": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_syllabus_id_version_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_id_version', kwargs={'syllabus_id': '1'}) + url = reverse_lazy("admissions:syllabus_id_version", kwargs={"syllabus_id": "1"}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: read_syllabus ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus " "for academy 1", } self.assertEqual(json, expected) @@ -86,12 +99,10 @@ def test_syllabus_id_version_without_capability(self): def test_syllabus_id_version_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:syllabus_id_version', kwargs={'syllabus_id': 1}) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy("admissions:syllabus_id_version", kwargs={"syllabus_id": 1}) response = self.client.get(url) json = response.json() expected = [] @@ -102,70 +113,82 @@ def test_syllabus_id_version_without_syllabus(self): self.assertEqual(self.all_syllabus_dict(), []) self.assertEqual(self.all_syllabus_version_dict(), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_syllabus_id_version(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True, - syllabus_version=True) - url = reverse_lazy('admissions:syllabus_id_version', kwargs={ - 'syllabus_id': 1, - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + ) + url = reverse_lazy( + "admissions:syllabus_id_version", + kwargs={ + "syllabus_id": 1, + }, + ) response = self.client.get(url) json = response.json() - expected = [{ - 'json': model['syllabus_version'].json, - 'created_at': datetime_to_iso_format(model['syllabus_version'].created_at), - 'updated_at': datetime_to_iso_format(model['syllabus_version'].updated_at), - 'name': model['syllabus'].name, - 'slug': model['syllabus'].slug, - 'syllabus': 1, - 'academy_owner': { - 'id': model['syllabus'].academy_owner.id, - 'name': model['syllabus'].academy_owner.name, - 'slug': model['syllabus'].academy_owner.slug, - 'white_labeled': model['syllabus'].academy_owner.white_labeled, - 'icon_url': model['syllabus'].academy_owner.icon_url, - 'available_as_saas': model['syllabus'].academy_owner.available_as_saas, - }, - 'version': model['syllabus_version'].version, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'main_technologies': None, - 'week_hours': model.syllabus.week_hours, - 'change_log_details': None, - 'status': 'PUBLISHED', - }] + expected = [ + { + "json": model["syllabus_version"].json, + "created_at": datetime_to_iso_format(model["syllabus_version"].created_at), + "updated_at": datetime_to_iso_format(model["syllabus_version"].updated_at), + "name": model["syllabus"].name, + "slug": model["syllabus"].slug, + "syllabus": 1, + "academy_owner": { + "id": model["syllabus"].academy_owner.id, + "name": model["syllabus"].academy_owner.name, + "slug": model["syllabus"].academy_owner.slug, + "white_labeled": model["syllabus"].academy_owner.white_labeled, + "icon_url": model["syllabus"].academy_owner.icon_url, + "available_as_saas": model["syllabus"].academy_owner.available_as_saas, + }, + "version": model["syllabus_version"].version, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "main_technologies": None, + "week_hours": model.syllabus.week_hours, + "change_log_details": None, + "status": "PUBLISHED", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) def test_syllabus_id_version__post__bad_syllabus_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:syllabus_id_version', kwargs={ - 'syllabus_id': 9999, - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + ) + url = reverse_lazy( + "admissions:syllabus_id_version", + kwargs={ + "syllabus_id": 9999, + }, + ) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -174,19 +197,24 @@ def test_syllabus_id_version__post__bad_syllabus_id(self): def test_syllabus_id_version__post__without_json_field(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:syllabus_id_version', kwargs={ - 'syllabus_id': 1, - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + ) + url = reverse_lazy( + "admissions:syllabus_id_version", + kwargs={ + "syllabus_id": 1, + }, + ) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'json': ['This field is required.']} + expected = {"json": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -196,83 +224,102 @@ def test_syllabus_id_version__post(self): """Test /certificate without auth""" self.headers(academy=1) slug = self.bc.fake.slug() - asset_alias = {'slug': slug} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - asset_alias=asset_alias, - syllabus=True) - url = reverse_lazy('admissions:syllabus_id_version', kwargs={ - 'syllabus_id': 1, - }) - data = {'json': generate_syllabus_json(slug)} - response = self.client.post(url, data, format='json') + asset_alias = {"slug": slug} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + asset_alias=asset_alias, + syllabus=True, + ) + url = reverse_lazy( + "admissions:syllabus_id_version", + kwargs={ + "syllabus_id": 1, + }, + ) + data = {"json": generate_syllabus_json(slug)} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'syllabus': 1, - 'version': 1, - 'change_log_details': None, - 'status': 'PUBLISHED', + "syllabus": 1, + "version": 1, + "change_log_details": None, + "status": "PUBLISHED", **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_version_dict(), [{ - 'id': 1, - 'integrity_check_at': None, - 'integrity_report': None, - 'integrity_status': 'PENDING', - 'json': {}, - 'change_log_details': None, - 'status': 'PUBLISHED', - 'syllabus_id': 1, - 'version': 1, - **data, - }]) + self.assertEqual( + self.all_syllabus_version_dict(), + [ + { + "id": 1, + "integrity_check_at": None, + "integrity_report": None, + "integrity_status": "PENDING", + "json": {}, + "change_log_details": None, + "status": "PUBLISHED", + "syllabus_id": 1, + "version": 1, + **data, + } + ], + ) def test_syllabus_id_version__post__autoincrement_version(self): """Test /certificate without auth""" self.headers(academy=1) slug = self.bc.fake.slug() - asset_alias = {'slug': slug} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - asset_alias=asset_alias, - syllabus_version=True) - url = reverse_lazy('admissions:syllabus_id_version', kwargs={ - 'syllabus_id': 1, - }) - data = {'json': generate_syllabus_json(slug)} - response = self.client.post(url, data, format='json') + asset_alias = {"slug": slug} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + asset_alias=asset_alias, + syllabus_version=True, + ) + url = reverse_lazy( + "admissions:syllabus_id_version", + kwargs={ + "syllabus_id": 1, + }, + ) + data = {"json": generate_syllabus_json(slug)} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'syllabus': 1, - 'change_log_details': None, - 'status': 'PUBLISHED', - 'version': model.syllabus_version.version + 1, + "syllabus": 1, + "change_log_details": None, + "status": "PUBLISHED", + "version": model.syllabus_version.version + 1, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_version_dict(), [{ - **self.model_to_dict(model, 'syllabus_version') - }, { - 'id': 2, - 'integrity_check_at': None, - 'integrity_report': None, - 'integrity_status': 'PENDING', - 'change_log_details': None, - 'status': 'PUBLISHED', - 'json': {}, - 'syllabus_id': 1, - 'version': model.syllabus_version.version + 1, - **data, - }]) + self.assertEqual( + self.all_syllabus_version_dict(), + [ + {**self.model_to_dict(model, "syllabus_version")}, + { + "id": 2, + "integrity_check_at": None, + "integrity_report": None, + "integrity_status": "PENDING", + "change_log_details": None, + "status": "PUBLISHED", + "json": {}, + "syllabus_id": 1, + "version": model.syllabus_version.version + 1, + **data, + }, + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_syllabus_id_version_version.py b/breathecode/admissions/tests/urls/tests_syllabus_id_version_version.py index 29db9dbc0..1312c01d9 100644 --- a/breathecode/admissions/tests/urls/tests_syllabus_id_version_version.py +++ b/breathecode/admissions/tests/urls/tests_syllabus_id_version_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import patch, MagicMock from breathecode.services import datetime_to_iso_format from django.urls.base import reverse_lazy @@ -17,14 +18,14 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_id_version_version_without_auth(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_id_version_version', kwargs={'syllabus_id': 1, 'version': 1}) + url = reverse_lazy("admissions:syllabus_id_version_version", kwargs={"syllabus_id": 1, "version": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -32,14 +33,13 @@ def test_syllabus_id_version_version_without_auth(self): def test_syllabus_id_version_version_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_id_version_version', kwargs={'syllabus_id': 1, 'version': 1}) + url = reverse_lazy("admissions:syllabus_id_version_version", kwargs={"syllabus_id": 1, "version": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: read_syllabus ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus " "for academy 1", } self.assertEqual(json, expected) @@ -50,15 +50,13 @@ def test_syllabus_id_version_version_without_capability(self): def test_syllabus_id_version_version_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:syllabus_id_version_version', kwargs={'syllabus_id': 1, 'version': 1}) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy("admissions:syllabus_id_version_version", kwargs={"syllabus_id": 1, "version": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'syllabus-version-not-found', 'status_code': 404} + expected = {"detail": "syllabus-version-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -67,54 +65,55 @@ def test_syllabus_id_version_version_without_syllabus(self): self.assertEqual(self.all_syllabus_version_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_syllabus_id_version_version(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True, - syllabus_version=True) - url = reverse_lazy('admissions:syllabus_id_version_version', - kwargs={ - 'syllabus_id': 1, - 'version': model.syllabus_version.version - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + ) + url = reverse_lazy( + "admissions:syllabus_id_version_version", + kwargs={"syllabus_id": 1, "version": model.syllabus_version.version}, + ) response = self.client.get(url) json = response.json() expected = { - 'json': model['syllabus_version'].json, - 'created_at': datetime_to_iso_format(model['syllabus_version'].created_at), - 'updated_at': datetime_to_iso_format(model['syllabus_version'].updated_at), - 'name': model['syllabus'].name, - 'slug': model['syllabus'].slug, - 'syllabus': 1, - 'academy_owner': { - 'id': model['syllabus'].academy_owner.id, - 'name': model['syllabus'].academy_owner.name, - 'slug': model['syllabus'].academy_owner.slug, - 'white_labeled': model['syllabus'].academy_owner.white_labeled, - 'icon_url': model['syllabus'].academy_owner.icon_url, - 'available_as_saas': model['syllabus'].academy_owner.available_as_saas, + "json": model["syllabus_version"].json, + "created_at": datetime_to_iso_format(model["syllabus_version"].created_at), + "updated_at": datetime_to_iso_format(model["syllabus_version"].updated_at), + "name": model["syllabus"].name, + "slug": model["syllabus"].slug, + "syllabus": 1, + "academy_owner": { + "id": model["syllabus"].academy_owner.id, + "name": model["syllabus"].academy_owner.name, + "slug": model["syllabus"].academy_owner.slug, + "white_labeled": model["syllabus"].academy_owner.white_labeled, + "icon_url": model["syllabus"].academy_owner.icon_url, + "available_as_saas": model["syllabus"].academy_owner.available_as_saas, }, - 'version': model['syllabus_version'].version, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'week_hours': model.syllabus.week_hours, - 'change_log_details': model.syllabus_version.change_log_details, - 'status': model.syllabus_version.status, - 'main_technologies': None, + "version": model["syllabus_version"].version, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "week_hours": model.syllabus.week_hours, + "change_log_details": model.syllabus_version.change_log_details, + "status": model.syllabus_version.status, + "main_technologies": None, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) self.assertEqual(self.all_cohort_time_slot_dict(), []) diff --git a/breathecode/admissions/tests/urls/tests_syllabus_slug.py b/breathecode/admissions/tests/urls/tests_syllabus_slug.py index 7dc632cc5..eecd64021 100644 --- a/breathecode/admissions/tests/urls/tests_syllabus_slug.py +++ b/breathecode/admissions/tests/urls/tests_syllabus_slug.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AdmissionsTestCase @@ -11,31 +12,37 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_id_without_auth(self): """Test /certificate without auth""" - url = reverse_lazy('admissions:syllabus_slug', kwargs={ - 'syllabus_slug': 'they_killed_kenny', - }) + url = reverse_lazy( + "admissions:syllabus_slug", + kwargs={ + "syllabus_slug": "they_killed_kenny", + }, + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) def test_syllabus_id_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_slug', kwargs={ - 'syllabus_slug': 'they_killed_kenny', - }) + url = reverse_lazy( + "admissions:syllabus_slug", + kwargs={ + "syllabus_slug": "they_killed_kenny", + }, + ) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: read_syllabus for academy 1" + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus for academy 1", } self.assertEqual(json, expected) @@ -45,13 +52,16 @@ def test_syllabus_id_without_capability(self): def test_syllabus_id_without_data(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_slug', kwargs={ - 'syllabus_slug': 'they_killed_kenny', - }) - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_syllabus', role='potato') + url = reverse_lazy( + "admissions:syllabus_slug", + kwargs={ + "syllabus_slug": "they_killed_kenny", + }, + ) + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_syllabus", role="potato") response = self.client.get(url) json = response.json() - expected = {'status_code': 404, 'detail': 'syllabus-not-found'} + expected = {"status_code": 404, "detail": "syllabus-not-found"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -60,60 +70,64 @@ def test_syllabus_id_without_data(self): def test_syllabus_id(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus_version=True, - syllabus=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:syllabus_slug', kwargs={'syllabus_slug': 'they-killed-kenny'}) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus_version=True, + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy("admissions:syllabus_slug", kwargs={"syllabus_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() expected = { - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': { - 'id': model.syllabus.academy_owner.id, - 'name': model.syllabus.academy_owner.name, - 'slug': model.syllabus.academy_owner.slug, - 'white_labeled': model.syllabus.academy_owner.white_labeled, - 'icon_url': model.syllabus.academy_owner.icon_url, - 'available_as_saas': model.syllabus.academy_owner.available_as_saas, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": { + "id": model.syllabus.academy_owner.id, + "name": model.syllabus.academy_owner.name, + "slug": model.syllabus.academy_owner.slug, + "white_labeled": model.syllabus.academy_owner.white_labeled, + "icon_url": model.syllabus.academy_owner.icon_url, + "available_as_saas": model.syllabus.academy_owner.available_as_saas, }, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'main_technologies': None, - 'created_at': self.datetime_to_iso(model.syllabus.created_at), - 'updated_at': self.datetime_to_iso(model.syllabus.updated_at), + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "main_technologies": None, + "created_at": self.datetime_to_iso(model.syllabus.created_at), + "updated_at": self.datetime_to_iso(model.syllabus.updated_at), } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_id__put__without_capabilities(self): """Test /certificate without auth""" self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('admissions:syllabus_slug', kwargs={ - 'syllabus_slug': 'they_killed_kenny', - }) + url = reverse_lazy( + "admissions:syllabus_slug", + kwargs={ + "syllabus_slug": "they_killed_kenny", + }, + ) data = {} response = self.client.put(url, data) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_syllabus " - 'for academy 1', - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_syllabus " "for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -123,14 +137,17 @@ def test_syllabus_id__put__without_capabilities(self): def test_syllabus_id__put__setting_slug_as_empty(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus_slug', kwargs={ - 'syllabus_slug': 'they_killed_kenny', - }) - data = {'slug': ''} + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:syllabus_slug", + kwargs={ + "syllabus_slug": "they_killed_kenny", + }, + ) + data = {"slug": ""} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'empty-slug', 'status_code': 400} + expected = {"detail": "empty-slug", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -139,14 +156,17 @@ def test_syllabus_id__put__setting_slug_as_empty(self): def test_syllabus_id__put__setting_name_as_empty(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus_slug', kwargs={ - 'syllabus_slug': 'they_killed_kenny', - }) - data = {'name': ''} + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:syllabus_slug", + kwargs={ + "syllabus_slug": "they_killed_kenny", + }, + ) + data = {"name": ""} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'empty-name', 'status_code': 400} + expected = {"detail": "empty-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -155,14 +175,17 @@ def test_syllabus_id__put__setting_name_as_empty(self): def test_syllabus_id__put__not_found(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_syllabus', role='potato') - url = reverse_lazy('admissions:syllabus_slug', kwargs={ - 'syllabus_slug': 'they_killed_kenny', - }) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_syllabus", role="potato") + url = reverse_lazy( + "admissions:syllabus_slug", + kwargs={ + "syllabus_slug": "they_killed_kenny", + }, + ) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -171,17 +194,19 @@ def test_syllabus_id__put__not_found(self): def test_syllabus_id__put__not_founds2(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus_schedule=True, - syllabus_schedule_time_slot=True) - url = reverse_lazy('admissions:syllabus_slug', kwargs={'syllabus_slug': 'they-killed-kenny'}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + ) + url = reverse_lazy("admissions:syllabus_slug", kwargs={"syllabus_slug": "they-killed-kenny"}) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -190,70 +215,79 @@ def test_syllabus_id__put__not_founds2(self): def test_syllabus_id__put(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:syllabus_slug', kwargs={'syllabus_slug': 'they-killed-kenny'}) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy("admissions:syllabus_slug", kwargs={"syllabus_slug": "they-killed-kenny"}) data = {} response = self.client.put(url, data) json = response.json() expected = { - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, - 'academy_owner': model.syllabus.academy_owner.id, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'week_hours': model.syllabus.week_hours, - 'github_url': model.syllabus.github_url, - 'id': model.syllabus.id, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, + "slug": model.syllabus.slug, + "name": model.syllabus.name, + "academy_owner": model.syllabus.academy_owner.id, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "week_hours": model.syllabus.week_hours, + "github_url": model.syllabus.github_url, + "id": model.syllabus.id, + "logo": model.syllabus.logo, + "private": model.syllabus.private, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) def test_syllabus_id__put__change_values(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_schedule=True, - syllabus_schedule_time_slot=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:syllabus_slug', kwargs={'syllabus_slug': 'they-killed-kenny'}) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_schedule=True, + syllabus_schedule_time_slot=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy("admissions:syllabus_slug", kwargs={"syllabus_slug": "they-killed-kenny"}) data = { - 'duration_in_days': 9, - 'duration_in_hours': 99, - 'week_hours': 999, - 'github_url': 'https://tierragamer.com/wp-content/uploads/2020/08/naruto-cosplay-konan.jpg', - 'logo': 'a', - 'private': not model.syllabus.private, + "duration_in_days": 9, + "duration_in_hours": 99, + "week_hours": 999, + "github_url": "https://tierragamer.com/wp-content/uploads/2020/08/naruto-cosplay-konan.jpg", + "logo": "a", + "private": not model.syllabus.private, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'academy_owner': model.syllabus.academy_owner.id, - 'id': model.syllabus.id, - 'slug': model.syllabus.slug, - 'name': model.syllabus.name, + "academy_owner": model.syllabus.academy_owner.id, + "id": model.syllabus.id, + "slug": model.syllabus.slug, + "name": model.syllabus.name, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{ - **self.model_to_dict(model, 'syllabus'), - **data, - }]) + self.assertEqual( + self.all_syllabus_dict(), + [ + { + **self.model_to_dict(model, "syllabus"), + **data, + } + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_syllabus_slug_version.py b/breathecode/admissions/tests/urls/tests_syllabus_slug_version.py index 810238d1c..311a25979 100644 --- a/breathecode/admissions/tests/urls/tests_syllabus_slug_version.py +++ b/breathecode/admissions/tests/urls/tests_syllabus_slug_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + import random from unittest.mock import patch, MagicMock @@ -30,23 +31,36 @@ def generate_syllabus_json(lesson_slug, quiz_slug=None, reply_slug=None, project n = random.randint(1, 10) return { - 'days': [{ - 'lessons': [{ - 'slug': lesson_slug, - }], - 'quizzes': [{ - 'slug': quiz_slug, - }], - 'replits': [{ - 'slug': reply_slug, - }], - 'projects': [{ - 'slug': project_slug, - }], - 'assignments': [{ - 'slug': assignment_slug, - }], - } for _ in range(n)] + "days": [ + { + "lessons": [ + { + "slug": lesson_slug, + } + ], + "quizzes": [ + { + "slug": quiz_slug, + } + ], + "replits": [ + { + "slug": reply_slug, + } + ], + "projects": [ + { + "slug": project_slug, + } + ], + "assignments": [ + { + "slug": assignment_slug, + } + ], + } + for _ in range(n) + ] } @@ -56,28 +70,27 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_slug_version_without_auth(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_slug_version', kwargs={'syllabus_slug': 'they-killed-kenny'}) + url = reverse_lazy("admissions:syllabus_slug_version", kwargs={"syllabus_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_version_dict(), []) def test_syllabus_slug_version_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_slug_version', kwargs={'syllabus_slug': 'they-killed-kenny'}) + url = reverse_lazy("admissions:syllabus_slug_version", kwargs={"syllabus_slug": "they-killed-kenny"}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: read_syllabus ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus " "for academy 1", } self.assertEqual(json, expected) @@ -87,12 +100,10 @@ def test_syllabus_slug_version_without_capability(self): def test_syllabus_slug_version_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:syllabus_slug_version', kwargs={'syllabus_slug': 'they-killed-kenny'}) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy("admissions:syllabus_slug_version", kwargs={"syllabus_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() expected = [] @@ -102,72 +113,84 @@ def test_syllabus_slug_version_without_syllabus(self): self.assertEqual(self.all_syllabus_version_dict(), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_syllabus_slug_version(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:syllabus_slug_version', kwargs={ - 'syllabus_slug': 'they-killed-kenny', - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:syllabus_slug_version", + kwargs={ + "syllabus_slug": "they-killed-kenny", + }, + ) response = self.client.get(url) json = response.json() - expected = [{ - 'json': model['syllabus_version'].json, - 'created_at': datetime_to_iso_format(model['syllabus_version'].created_at), - 'updated_at': datetime_to_iso_format(model['syllabus_version'].updated_at), - 'name': model['syllabus'].name, - 'slug': model['syllabus'].slug, - 'syllabus': 1, - 'version': model['syllabus_version'].version, - 'academy_owner': { - 'id': model['syllabus'].academy_owner.id, - 'name': model['syllabus'].academy_owner.name, - 'slug': model['syllabus'].academy_owner.slug, - 'white_labeled': model['syllabus'].academy_owner.white_labeled, - 'icon_url': model['syllabus'].academy_owner.icon_url, - 'available_as_saas': model['syllabus'].academy_owner.available_as_saas, - }, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'main_technologies': None, - 'week_hours': model.syllabus.week_hours, - 'change_log_details': model.syllabus_version.change_log_details, - 'status': model.syllabus_version.status, - }] + expected = [ + { + "json": model["syllabus_version"].json, + "created_at": datetime_to_iso_format(model["syllabus_version"].created_at), + "updated_at": datetime_to_iso_format(model["syllabus_version"].updated_at), + "name": model["syllabus"].name, + "slug": model["syllabus"].slug, + "syllabus": 1, + "version": model["syllabus_version"].version, + "academy_owner": { + "id": model["syllabus"].academy_owner.id, + "name": model["syllabus"].academy_owner.name, + "slug": model["syllabus"].academy_owner.slug, + "white_labeled": model["syllabus"].academy_owner.white_labeled, + "icon_url": model["syllabus"].academy_owner.icon_url, + "available_as_saas": model["syllabus"].academy_owner.available_as_saas, + }, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "main_technologies": None, + "week_hours": model.syllabus.week_hours, + "change_log_details": model.syllabus_version.change_log_details, + "status": model.syllabus_version.status, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) def test_syllabus_slug_version__post__bad_syllabus_id(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True) - url = reverse_lazy('admissions:syllabus_slug_version', kwargs={ - 'syllabus_slug': 'they-killed-kenny', - }) + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + ) + url = reverse_lazy( + "admissions:syllabus_slug_version", + kwargs={ + "syllabus_slug": "they-killed-kenny", + }, + ) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 404} + expected = {"detail": "syllabus-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -176,21 +199,26 @@ def test_syllabus_slug_version__post__bad_syllabus_id(self): def test_syllabus_slug_version__post__without_json_field(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:syllabus_slug_version', kwargs={ - 'syllabus_slug': 'they-killed-kenny', - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:syllabus_slug_version", + kwargs={ + "syllabus_slug": "they-killed-kenny", + }, + ) data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'json': ['This field is required.']} + expected = {"json": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -199,86 +227,105 @@ def test_syllabus_slug_version__post__without_json_field(self): def test_syllabus_slug_version__post(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} + syllabus_kwargs = {"slug": "they-killed-kenny"} slug = self.bc.fake.slug() - asset_alias = {'slug': slug} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=True, - asset_alias=asset_alias, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:syllabus_slug_version', kwargs={ - 'syllabus_slug': 'they-killed-kenny', - }) - data = {'json': generate_syllabus_json(slug)} - response = self.client.post(url, data, format='json') + asset_alias = {"slug": slug} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=True, + asset_alias=asset_alias, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:syllabus_slug_version", + kwargs={ + "syllabus_slug": "they-killed-kenny", + }, + ) + data = {"json": generate_syllabus_json(slug)} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'syllabus': 1, - 'change_log_details': None, - 'status': 'PUBLISHED', - 'version': 1, + "syllabus": 1, + "change_log_details": None, + "status": "PUBLISHED", + "version": 1, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_version_dict(), [{ - 'id': 1, - 'integrity_check_at': None, - 'integrity_report': None, - 'integrity_status': 'PENDING', - 'change_log_details': None, - 'status': 'PUBLISHED', - 'json': data['json'], - 'syllabus_id': 1, - 'version': 1, - }]) + self.assertEqual( + self.all_syllabus_version_dict(), + [ + { + "id": 1, + "integrity_check_at": None, + "integrity_report": None, + "integrity_status": "PENDING", + "change_log_details": None, + "status": "PUBLISHED", + "json": data["json"], + "syllabus_id": 1, + "version": 1, + } + ], + ) def test_syllabus_slug_version__post__autoincrement_version(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} + syllabus_kwargs = {"slug": "they-killed-kenny"} slug = self.bc.fake.slug() - asset_alias = {'slug': slug} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='crud_syllabus', - role='potato', - syllabus=syllabus_kwargs, - syllabus_version=1, - asset_alias=asset_alias) - url = reverse_lazy('admissions:syllabus_slug_version', kwargs={ - 'syllabus_slug': 'they-killed-kenny', - }) - data = {'json': generate_syllabus_json(slug)} - response = self.client.post(url, data, format='json') + asset_alias = {"slug": slug} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="crud_syllabus", + role="potato", + syllabus=syllabus_kwargs, + syllabus_version=1, + asset_alias=asset_alias, + ) + url = reverse_lazy( + "admissions:syllabus_slug_version", + kwargs={ + "syllabus_slug": "they-killed-kenny", + }, + ) + data = {"json": generate_syllabus_json(slug)} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'syllabus': 1, - 'change_log_details': None, - 'status': 'PUBLISHED', - 'version': model.syllabus_version.version + 1, + "syllabus": 1, + "change_log_details": None, + "status": "PUBLISHED", + "version": model.syllabus_version.version + 1, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.all_syllabus_version_dict(), [{ - **self.model_to_dict(model, 'syllabus_version') - }, { - 'id': 2, - 'integrity_check_at': None, - 'integrity_report': None, - 'integrity_status': 'PENDING', - 'change_log_details': None, - 'status': 'PUBLISHED', - 'json': {}, - 'syllabus_id': 1, - 'version': model.syllabus_version.version + 1, - **data, - }]) + self.assertEqual( + self.all_syllabus_version_dict(), + [ + {**self.model_to_dict(model, "syllabus_version")}, + { + "id": 2, + "integrity_check_at": None, + "integrity_report": None, + "integrity_status": "PENDING", + "change_log_details": None, + "status": "PUBLISHED", + "json": {}, + "syllabus_id": 1, + "version": model.syllabus_version.version + 1, + **data, + }, + ], + ) diff --git a/breathecode/admissions/tests/urls/tests_syllabus_slug_version_version.py b/breathecode/admissions/tests/urls/tests_syllabus_slug_version_version.py index e1b833331..34a638e77 100644 --- a/breathecode/admissions/tests/urls/tests_syllabus_slug_version_version.py +++ b/breathecode/admissions/tests/urls/tests_syllabus_slug_version_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import patch, MagicMock from breathecode.services import datetime_to_iso_format from django.urls.base import reverse_lazy @@ -17,18 +18,16 @@ class CertificateTestSuite(AdmissionsTestCase): def test_syllabus_slug_version_version_without_auth(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_slug_version_version', - kwargs={ - 'syllabus_slug': 'they-killed-kenny', - 'version': 1 - }) + url = reverse_lazy( + "admissions:syllabus_slug_version_version", kwargs={"syllabus_slug": "they-killed-kenny", "version": 1} + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_syllabus_schedule_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) @@ -36,18 +35,15 @@ def test_syllabus_slug_version_version_without_auth(self): def test_syllabus_slug_version_version_without_capability(self): """Test /certificate without auth""" self.headers(academy=1) - url = reverse_lazy('admissions:syllabus_slug_version_version', - kwargs={ - 'syllabus_slug': 'they-killed-kenny', - 'version': 1 - }) + url = reverse_lazy( + "admissions:syllabus_slug_version_version", kwargs={"syllabus_slug": "they-killed-kenny", "version": 1} + ) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'status_code': 403, - 'detail': 'You (user: 1) don\'t have this capability: read_syllabus ' - 'for academy 1' + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_syllabus " "for academy 1", } self.assertEqual(json, expected) @@ -58,19 +54,15 @@ def test_syllabus_slug_version_version_without_capability(self): def test_syllabus_slug_version_version_without_syllabus(self): """Test /certificate without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato') - url = reverse_lazy('admissions:syllabus_slug_version_version', - kwargs={ - 'syllabus_slug': 'they-killed-kenny', - 'version': 1 - }) + model = self.generate_models( + authenticate=True, syllabus_schedule=True, profile_academy=True, capability="read_syllabus", role="potato" + ) + url = reverse_lazy( + "admissions:syllabus_slug_version_version", kwargs={"syllabus_slug": "they-killed-kenny", "version": 1} + ) response = self.client.get(url) json = response.json() - expected = {'detail': 'syllabus-version-not-found', 'status_code': 404} + expected = {"detail": "syllabus-version-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -79,56 +71,57 @@ def test_syllabus_slug_version_version_without_syllabus(self): self.assertEqual(self.all_syllabus_version_dict(), []) self.assertEqual(self.all_cohort_time_slot_dict(), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_syllabus_slug_version_version(self): """Test /certificate without auth""" self.headers(academy=1) - syllabus_kwargs = {'slug': 'they-killed-kenny'} - model = self.generate_models(authenticate=True, - syllabus_schedule=True, - profile_academy=True, - capability='read_syllabus', - role='potato', - syllabus=True, - syllabus_version=True, - syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:syllabus_slug_version_version', - kwargs={ - 'syllabus_slug': 'they-killed-kenny', - 'version': model.syllabus_version.version - }) + syllabus_kwargs = {"slug": "they-killed-kenny"} + model = self.generate_models( + authenticate=True, + syllabus_schedule=True, + profile_academy=True, + capability="read_syllabus", + role="potato", + syllabus=True, + syllabus_version=True, + syllabus_kwargs=syllabus_kwargs, + ) + url = reverse_lazy( + "admissions:syllabus_slug_version_version", + kwargs={"syllabus_slug": "they-killed-kenny", "version": model.syllabus_version.version}, + ) response = self.client.get(url) json = response.json() expected = { - 'json': model['syllabus_version'].json, - 'created_at': datetime_to_iso_format(model['syllabus_version'].created_at), - 'updated_at': datetime_to_iso_format(model['syllabus_version'].updated_at), - 'name': model['syllabus'].name, - 'slug': model['syllabus'].slug, - 'academy_owner': { - 'id': model['syllabus'].academy_owner.id, - 'name': model['syllabus'].academy_owner.name, - 'slug': model['syllabus'].academy_owner.slug, - 'white_labeled': model['syllabus'].academy_owner.white_labeled, - 'icon_url': model['syllabus'].academy_owner.icon_url, - 'available_as_saas': model['syllabus'].academy_owner.available_as_saas, + "json": model["syllabus_version"].json, + "created_at": datetime_to_iso_format(model["syllabus_version"].created_at), + "updated_at": datetime_to_iso_format(model["syllabus_version"].updated_at), + "name": model["syllabus"].name, + "slug": model["syllabus"].slug, + "academy_owner": { + "id": model["syllabus"].academy_owner.id, + "name": model["syllabus"].academy_owner.name, + "slug": model["syllabus"].academy_owner.slug, + "white_labeled": model["syllabus"].academy_owner.white_labeled, + "icon_url": model["syllabus"].academy_owner.icon_url, + "available_as_saas": model["syllabus"].academy_owner.available_as_saas, }, - 'syllabus': 1, - 'version': model['syllabus_version'].version, - 'duration_in_days': model.syllabus.duration_in_days, - 'duration_in_hours': model.syllabus.duration_in_hours, - 'github_url': model.syllabus.github_url, - 'logo': model.syllabus.logo, - 'private': model.syllabus.private, - 'status': 'PUBLISHED', - 'main_technologies': None, - 'change_log_details': None, - 'week_hours': model.syllabus.week_hours, + "syllabus": 1, + "version": model["syllabus_version"].version, + "duration_in_days": model.syllabus.duration_in_days, + "duration_in_hours": model.syllabus.duration_in_hours, + "github_url": model.syllabus.github_url, + "logo": model.syllabus.logo, + "private": model.syllabus.private, + "status": "PUBLISHED", + "main_technologies": None, + "change_log_details": None, + "week_hours": model.syllabus.week_hours, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) self.assertEqual(self.all_cohort_time_slot_dict(), []) diff --git a/breathecode/admissions/tests/urls/tests_syllabus_version.py b/breathecode/admissions/tests/urls/tests_syllabus_version.py index 65bf50f14..c2fcf973f 100644 --- a/breathecode/admissions/tests/urls/tests_syllabus_version.py +++ b/breathecode/admissions/tests/urls/tests_syllabus_version.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import patch, MagicMock from breathecode.services import datetime_to_iso_format from django.urls.base import reverse_lazy @@ -13,93 +14,93 @@ def get_serializer(syllabus_version, syllabus): return { - 'json': syllabus_version.json, - 'created_at': datetime_to_iso_format(syllabus_version.created_at), - 'updated_at': datetime_to_iso_format(syllabus_version.updated_at), - 'name': syllabus.name, - 'slug': syllabus.slug, - 'academy_owner': { - 'id': syllabus.academy_owner.id, - 'name': syllabus.academy_owner.name, - 'slug': syllabus.academy_owner.slug, - 'white_labeled': syllabus.academy_owner.white_labeled, - 'icon_url': syllabus.academy_owner.icon_url, - 'available_as_saas': syllabus.academy_owner.available_as_saas, + "json": syllabus_version.json, + "created_at": datetime_to_iso_format(syllabus_version.created_at), + "updated_at": datetime_to_iso_format(syllabus_version.updated_at), + "name": syllabus.name, + "slug": syllabus.slug, + "academy_owner": { + "id": syllabus.academy_owner.id, + "name": syllabus.academy_owner.name, + "slug": syllabus.academy_owner.slug, + "white_labeled": syllabus.academy_owner.white_labeled, + "icon_url": syllabus.academy_owner.icon_url, + "available_as_saas": syllabus.academy_owner.available_as_saas, }, - 'syllabus': syllabus.id, - 'version': syllabus_version.version, - 'duration_in_days': syllabus.duration_in_days, - 'duration_in_hours': syllabus.duration_in_hours, - 'github_url': syllabus.github_url, - 'logo': syllabus.logo, - 'private': syllabus.private, - 'status': syllabus_version.status, - 'main_technologies': syllabus.main_technologies, - 'change_log_details': syllabus_version.change_log_details, - 'week_hours': syllabus.week_hours, + "syllabus": syllabus.id, + "version": syllabus_version.version, + "duration_in_days": syllabus.duration_in_days, + "duration_in_hours": syllabus.duration_in_hours, + "github_url": syllabus.github_url, + "logo": syllabus.logo, + "private": syllabus.private, + "status": syllabus_version.status, + "main_technologies": syllabus.main_technologies, + "change_log_details": syllabus_version.change_log_details, + "week_hours": syllabus.week_hours, } class SyllabusVersionTestSuite(AdmissionsTestCase): """Test /certificate""" - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_syllabus_slug_version(self): """Test /certificate without auth""" - syllabus_kwargs = {'slug': 'they-killed-kenny'} + syllabus_kwargs = {"slug": "they-killed-kenny"} model = self.generate_models(syllabus=True, syllabus_version=True, syllabus_kwargs=syllabus_kwargs) - url = reverse_lazy('admissions:syllabus_version') + url = reverse_lazy("admissions:syllabus_version") response = self.client.get(url) json = response.json() - expected = [get_serializer(model['syllabus_version'], model['syllabus'])] + expected = [get_serializer(model["syllabus_version"], model["syllabus"])] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_syllabus_slug_version_with_is_documentation_filter(self): """Test /certificate without auth""" - syllabus_kwargs = {'slug': 'they-killed-kenny', 'is_documentation': True} + syllabus_kwargs = {"slug": "they-killed-kenny", "is_documentation": True} model = self.generate_models(syllabus=True, syllabus_version=True, syllabus_kwargs=syllabus_kwargs) - base_url = reverse_lazy('admissions:syllabus_version') - url = f'{base_url}?is_documentation=True' + base_url = reverse_lazy("admissions:syllabus_version") + url = f"{base_url}?is_documentation=True" response = self.client.get(url) json = response.json() - expected = [get_serializer(model['syllabus_version'], model['syllabus'])] + expected = [get_serializer(model["syllabus_version"], model["syllabus"])] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_syllabus_slug_version_with_academy_filter(self): """Test /certificate without auth""" - syllabus_kwargs = {'slug': 'they-killed-kenny', 'is_documentation': True} + syllabus_kwargs = {"slug": "they-killed-kenny", "is_documentation": True} model = self.generate_models(syllabus=True, syllabus_version=True, syllabus_kwargs=syllabus_kwargs) - base_url = reverse_lazy('admissions:syllabus_version') - url = f'{base_url}?is_documentation=True&academy=1,2,3' + base_url = reverse_lazy("admissions:syllabus_version") + url = f"{base_url}?is_documentation=True&academy=1,2,3" response = self.client.get(url) json = response.json() - expected = [get_serializer(model['syllabus_version'], model['syllabus'])] + expected = [get_serializer(model["syllabus_version"], model["syllabus"])] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_syllabus_slug_version_with_wrong_academy_filter(self): """Test /certificate without auth""" - syllabus_kwargs = {'slug': 'they-killed-kenny', 'is_documentation': True} + syllabus_kwargs = {"slug": "they-killed-kenny", "is_documentation": True} model = self.generate_models(syllabus=True, syllabus_version=True, syllabus_kwargs=syllabus_kwargs) - base_url = reverse_lazy('admissions:syllabus_version') - url = f'{base_url}?is_documentation=True&academy=2,3' + base_url = reverse_lazy("admissions:syllabus_version") + url = f"{base_url}?is_documentation=True&academy=2,3" response = self.client.get(url) json = response.json() expected = [] @@ -107,5 +108,5 @@ def test_syllabus_slug_version_with_wrong_academy_filter(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, 'syllabus')}]) - self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, 'syllabus_version')}]) + self.assertEqual(self.all_syllabus_dict(), [{**self.model_to_dict(model, "syllabus")}]) + self.assertEqual(self.all_syllabus_version_dict(), [{**self.model_to_dict(model, "syllabus_version")}]) diff --git a/breathecode/admissions/tests/urls/tests_user.py b/breathecode/admissions/tests/urls/tests_user.py index a2374fd34..b03736d80 100644 --- a/breathecode/admissions/tests/urls/tests_user.py +++ b/breathecode/admissions/tests/urls/tests_user.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import re from datetime import datetime from unittest.mock import patch @@ -18,36 +19,36 @@ class AcademyCohortTestSuite(AdmissionsTestCase): """Test /academy/cohort""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_user_post_without_authorization(self): """Test /academy/cohort without auth""" - url = reverse_lazy('admissions:user') + url = reverse_lazy("admissions:user") data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_user_post_without_data(self): """Test /academy/cohort without auth""" model = self.generate_models(authenticate=True) model_dict = self.get_user_dict(1) - url = reverse_lazy('admissions:user') + url = reverse_lazy("admissions:user") data = {} response = self.client.put(url, data) json = response.json() expected = { - 'id': model.user.id, - 'first_name': model.user.first_name, - 'last_name': model.user.last_name, - 'email': model.user.email, + "id": model.user.id, + "first_name": model.user.first_name, + "last_name": model.user.last_name, + "email": model.user.email, } self.assertEqual(json, expected) @@ -55,25 +56,25 @@ def test_user_post_without_data(self): self.assertEqual(self.count_user(), 1) self.assertEqual(self.get_user_dict(1), model_dict) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_user_post(self): """Test /academy/cohort without auth""" model = self.generate_models(authenticate=True, user=True) model_dict = self.get_user_dict(1) - url = reverse_lazy('admissions:user') + url = reverse_lazy("admissions:user") data = { - 'first_name': 'Socrates', - 'last_name': 'Aristoteles', + "first_name": "Socrates", + "last_name": "Aristoteles", } response = self.client.put(url, data) json = response.json() expected = { - 'id': model.user.id, - 'first_name': data['first_name'], - 'last_name': data['last_name'], - 'email': model.user.email, + "id": model.user.id, + "first_name": data["first_name"], + "last_name": data["last_name"], + "email": model.user.email, } model_dict.update(data) diff --git a/breathecode/admissions/urls.py b/breathecode/admissions/urls.py index 1a3309510..7baaea15f 100644 --- a/breathecode/admissions/urls.py +++ b/breathecode/admissions/urls.py @@ -33,102 +33,122 @@ handle_test_syllabus, ) -app_name = 'admissions' +app_name = "admissions" urlpatterns = [ # keep before that academy/cohort/:id - path('academy/cohort/me', CohortMeView.as_view(), name='academy_cohort_me'), - path('public/syllabus', get_public_syllabus), + path("academy/cohort/me", CohortMeView.as_view(), name="academy_cohort_me"), + path("public/syllabus", get_public_syllabus), # deprecated methods, soon to be deleted - path('cohort/all', PublicCohortView.as_view(), name='cohort_all'), - path('cohort/user', CohortUserView.as_view(), name='cohort_user'), - path('cohort//join', CohortJoinView.as_view(), name='cohort_id_join'), - path('cohort//user/', CohortUserView.as_view(), name='cohort_id_user_id'), - path('cohort//user', CohortUserView.as_view(), name='cohort_id_user'), - + path("cohort/all", PublicCohortView.as_view(), name="cohort_all"), + path("cohort/user", CohortUserView.as_view(), name="cohort_user"), + path("cohort//join", CohortJoinView.as_view(), name="cohort_id_join"), + path("cohort//user/", CohortUserView.as_view(), name="cohort_id_user_id"), + path("cohort//user", CohortUserView.as_view(), name="cohort_id_user"), # me - path('me/cohort/user/log', MeCohortUserHistoryView.as_view(), name='me_cohort_user_log'), - path('me/cohort//user/log', MeCohortUserHistoryView.as_view(), name='me_cohort_id_user_log'), - + path("me/cohort/user/log", MeCohortUserHistoryView.as_view(), name="me_cohort_user_log"), + path("me/cohort//user/log", MeCohortUserHistoryView.as_view(), name="me_cohort_id_user_log"), # new endpoints (replacing above) - path('academy/cohort/user', AcademyCohortUserView.as_view(), name='academy_cohort_user'), - path('academy/cohort//log', AcademyCohortHistoryView.as_view(), name='academy_cohort_id_history'), - path('academy/cohort/', AcademyCohortView.as_view(), name='academy_cohort_id'), - path('academy/cohort//user/', - AcademyCohortUserView.as_view(), - name='academy_cohort_id_user_id'), - path('academy/cohort//user', AcademyCohortUserView.as_view()), - path('academy/cohort//timeslot', - AcademyCohortTimeSlotView.as_view(), - name='academy_cohort_id_timeslot'), - path('academy/cohort//timeslot/', - AcademyCohortTimeSlotView.as_view(), - name='academy_cohort_id_timeslot_id'), - path('academy/cohort/sync/timeslot', AcademySyncCohortTimeSlotView.as_view(), name='academy_cohort_sync_timeslot'), + path("academy/cohort/user", AcademyCohortUserView.as_view(), name="academy_cohort_user"), + path("academy/cohort//log", AcademyCohortHistoryView.as_view(), name="academy_cohort_id_history"), + path("academy/cohort/", AcademyCohortView.as_view(), name="academy_cohort_id"), + path( + "academy/cohort//user/", + AcademyCohortUserView.as_view(), + name="academy_cohort_id_user_id", + ), + path("academy/cohort//user", AcademyCohortUserView.as_view()), + path( + "academy/cohort//timeslot", + AcademyCohortTimeSlotView.as_view(), + name="academy_cohort_id_timeslot", + ), + path( + "academy/cohort//timeslot/", + AcademyCohortTimeSlotView.as_view(), + name="academy_cohort_id_timeslot_id", + ), + path("academy/cohort/sync/timeslot", AcademySyncCohortTimeSlotView.as_view(), name="academy_cohort_sync_timeslot"), # 🔽 this endpoint is deprecated 🔽 - path('academy/certificate//timeslot', AcademySyllabusScheduleTimeSlotView.as_view()), + path("academy/certificate//timeslot", AcademySyllabusScheduleTimeSlotView.as_view()), # 🔽 this endpoint is deprecated 🔽 - path('academy/certificate//timeslot/', - AcademySyllabusScheduleTimeSlotView.as_view()), - path('academy/schedule//timeslot', - AcademySyllabusScheduleTimeSlotView.as_view(), - name='academy_schedule_id_timeslot'), - path('academy/schedule//timeslot/', - AcademySyllabusScheduleTimeSlotView.as_view(), - name='academy_schedule_id_timeslot_id'), - path('academy/teacher', AcademyTeacherView.as_view(), name='academy_teacher'), - path('academy/', get_all_academies, name='academy'), - path('academy/', get_single_academy, name='single_academy'), - path('academy/me', AcademyView.as_view(), name='academy_me'), - path('academy/cohort', AcademyCohortView.as_view(), name='academy_cohort'), - path('academy/activate', AcademyActivateView.as_view(), name='academy_activate'), - path('user/me', UserMeView.as_view(), name='user_me'), - path('user', UserView.as_view(), name='user'), + path( + "academy/certificate//timeslot/", + AcademySyllabusScheduleTimeSlotView.as_view(), + ), + path( + "academy/schedule//timeslot", + AcademySyllabusScheduleTimeSlotView.as_view(), + name="academy_schedule_id_timeslot", + ), + path( + "academy/schedule//timeslot/", + AcademySyllabusScheduleTimeSlotView.as_view(), + name="academy_schedule_id_timeslot_id", + ), + path("academy/teacher", AcademyTeacherView.as_view(), name="academy_teacher"), + path("academy/", get_all_academies, name="academy"), + path("academy/", get_single_academy, name="single_academy"), + path("academy/me", AcademyView.as_view(), name="academy_me"), + path("academy/cohort", AcademyCohortView.as_view(), name="academy_cohort"), + path("academy/activate", AcademyActivateView.as_view(), name="academy_activate"), + path("user/me", UserMeView.as_view(), name="user_me"), + path("user", UserView.as_view(), name="user"), # 🔽 this endpoint is deprecated 🔽 - path('certificate', SyllabusScheduleView.as_view()), + path("certificate", SyllabusScheduleView.as_view()), # 🔽 this endpoint is deprecated 🔽 - path('certificate//', get_schedule), - path('schedule', SyllabusScheduleView.as_view(), name='schedule'), - path('schedule//', get_schedule, name='schedule_id'), + path("certificate//", get_schedule), + path("schedule", SyllabusScheduleView.as_view(), name="schedule"), + path("schedule//", get_schedule, name="schedule_id"), # 🔽 this endpoint is deprecated 🔽 - path('academy/certificate', AcademySyllabusScheduleView.as_view()), - path('academy/schedule', AcademySyllabusScheduleView.as_view(), name='academy_schedule'), - path('academy/schedule/', AcademySyllabusScheduleView.as_view(), name='academy_schedule_id'), - path('syllabus', SyllabusView.as_view(), name='syllabus'), - path('syllabus/test', handle_test_syllabus), - path('syllabus/', SyllabusView.as_view(), name='syllabus_id'), - path('syllabus//version', SyllabusVersionView.as_view(), name='syllabus_id_version'), - path('syllabus/version', AllSyllabusVersionsView.as_view(), name='syllabus_version'), - path('syllabus//version/', - SyllabusVersionView.as_view(), - name='syllabus_id_version_version'), - path('syllabus/', SyllabusView.as_view(), name='syllabus_slug'), - path('syllabus//version', SyllabusVersionView.as_view(), name='syllabus_slug_version'), - path('syllabus//version/', - SyllabusVersionView.as_view(), - name='syllabus_slug_version_version'), - path('academy//syllabus', SyllabusView.as_view(), name='academy_id_syllabus'), - path('academy//syllabus/', SyllabusView.as_view(), name='academy_id_syllabus_id'), - path('academy//syllabus/', - SyllabusView.as_view(), - name='academy_id_syllabus_slug'), - path('academy//syllabus//version', - SyllabusVersionView.as_view(), - name='academy_id_syllabus_id_version'), - path('academy//syllabus//version/', - SyllabusVersionView.as_view(), - name='academy_id_syllabus_id_version_version'), - path('academy//syllabus//version', - SyllabusVersionView.as_view(), - name='academy_id_syllabus_slug_version'), - path('academy//syllabus//version/', - SyllabusVersionView.as_view(), - name='academy_id_syllabus_slug_version_version'), - path('catalog/timezones', get_timezones, name='timezones_all'), - path('report', AcademyReportView.as_view(), name='report_admissions'), - - #replaces an asset slug in all syllabus versions - path('admin/syllabus/asset/', SyllabusAssetView.as_view(), name='syllabus_asset'), - + path("academy/certificate", AcademySyllabusScheduleView.as_view()), + path("academy/schedule", AcademySyllabusScheduleView.as_view(), name="academy_schedule"), + path("academy/schedule/", AcademySyllabusScheduleView.as_view(), name="academy_schedule_id"), + path("syllabus", SyllabusView.as_view(), name="syllabus"), + path("syllabus/test", handle_test_syllabus), + path("syllabus/", SyllabusView.as_view(), name="syllabus_id"), + path("syllabus//version", SyllabusVersionView.as_view(), name="syllabus_id_version"), + path("syllabus/version", AllSyllabusVersionsView.as_view(), name="syllabus_version"), + path( + "syllabus//version/", + SyllabusVersionView.as_view(), + name="syllabus_id_version_version", + ), + path("syllabus/", SyllabusView.as_view(), name="syllabus_slug"), + path("syllabus//version", SyllabusVersionView.as_view(), name="syllabus_slug_version"), + path( + "syllabus//version/", + SyllabusVersionView.as_view(), + name="syllabus_slug_version_version", + ), + path("academy//syllabus", SyllabusView.as_view(), name="academy_id_syllabus"), + path("academy//syllabus/", SyllabusView.as_view(), name="academy_id_syllabus_id"), + path( + "academy//syllabus/", SyllabusView.as_view(), name="academy_id_syllabus_slug" + ), + path( + "academy//syllabus//version", + SyllabusVersionView.as_view(), + name="academy_id_syllabus_id_version", + ), + path( + "academy//syllabus//version/", + SyllabusVersionView.as_view(), + name="academy_id_syllabus_id_version_version", + ), + path( + "academy//syllabus//version", + SyllabusVersionView.as_view(), + name="academy_id_syllabus_slug_version", + ), + path( + "academy//syllabus//version/", + SyllabusVersionView.as_view(), + name="academy_id_syllabus_slug_version_version", + ), + path("catalog/timezones", get_timezones, name="timezones_all"), + path("report", AcademyReportView.as_view(), name="report_admissions"), + # replaces an asset slug in all syllabus versions + path("admin/syllabus/asset/", SyllabusAssetView.as_view(), name="syllabus_asset"), # Public Endpoints anyone can call - path('public/cohort/user', PublicCohortUserView.as_view(), name='public_cohort_user'), + path("public/cohort/user", PublicCohortUserView.as_view(), name="public_cohort_user"), ] diff --git a/breathecode/admissions/utils/cohort_log.py b/breathecode/admissions/utils/cohort_log.py index 6a43ab745..a7dc50639 100644 --- a/breathecode/admissions/utils/cohort_log.py +++ b/breathecode/admissions/utils/cohort_log.py @@ -19,30 +19,32 @@ class CohortDayLog(object): attendance_ids = [] unattendance_ids = [] - def __init__(self, - current_module: str = None, - teacher_comments: str = None, - attendance_ids: list = None, - unattendance_ids: list = None, - updated_at: datetime = None, - allow_empty=False): + def __init__( + self, + current_module: str = None, + teacher_comments: str = None, + attendance_ids: list = None, + unattendance_ids: list = None, + updated_at: datetime = None, + allow_empty=False, + ): if not isinstance(current_module, str) and (not allow_empty and current_module is None): - raise Exception(f'Invalid current module value {str(current_module)}') + raise Exception(f"Invalid current module value {str(current_module)}") if teacher_comments is not None and not isinstance(teacher_comments, str): - raise Exception(f'Invalid teacher comments value {str(teacher_comments)}') + raise Exception(f"Invalid teacher comments value {str(teacher_comments)}") if not isinstance(attendance_ids, list): - raise Exception('Invalid attendance list, it must be an array of integer ids') + raise Exception("Invalid attendance list, it must be an array of integer ids") if not isinstance(unattendance_ids, list): - raise Exception('Invalid unattendance list, it must be an array of integer ids') + raise Exception("Invalid unattendance list, it must be an array of integer ids") if updated_at is None: updated_at = timezone.now() if has_duplicates(attendance_ids): - raise Exception('Attendance list has duplicate user ids') + raise Exception("Attendance list has duplicate user ids") if has_duplicates(unattendance_ids): - raise Exception('Unattendance list has duplicate user ids') + raise Exception("Unattendance list has duplicate user ids") self.current_module = current_module self.teacher_comments = teacher_comments @@ -54,21 +56,22 @@ def __init__(self, def empty(): return CohortDayLog( **{ - 'current_module': None, - 'teacher_comments': None, - 'updated_at': None, - 'attendance_ids': [], - 'unattendance_ids': [], - 'allow_empty': True - }) + "current_module": None, + "teacher_comments": None, + "updated_at": None, + "attendance_ids": [], + "unattendance_ids": [], + "allow_empty": True, + } + ) def serialize(self): return { - 'current_module': self.current_module, - 'teacher_comments': self.teacher_comments, - 'attendance_ids': self.attendance_ids, - 'unattendance_ids': self.unattendance_ids, - 'updated_at': str(self.updated_at), + "current_module": self.current_module, + "teacher_comments": self.teacher_comments, + "attendance_ids": self.attendance_ids, + "unattendance_ids": self.unattendance_ids, + "updated_at": str(self.updated_at), } @@ -92,7 +95,7 @@ def __init__(self, cohort): self.cohort.history_log = {} elif not isinstance(self.cohort.history_log, dict): - raise Exception('Cohort history json must be in dictionary format') + raise Exception("Cohort history json must be in dictionary format") for day in range(1, self.cohort.current_day + 1): if str(day) in self.cohort.history_log: @@ -103,24 +106,24 @@ def __init__(self, cohort): def log_day(self, payload, day=None): if not isinstance(payload, dict): - raise Exception('Entry log of cohort day must be a dictionary') + raise Exception("Entry log of cohort day must be a dictionary") if day is None: day = self.cohort.current_day if day == 0: - raise Exception('Invalid log for day index=0, cohort days start at 1') + raise Exception("Invalid log for day index=0, cohort days start at 1") elif day > self.cohort.current_day: raise Exception( - f'You cannot log activity for day {str(day)} because the cohort is currently at day {str(self.cohort.current_day)}' + f"You cannot log activity for day {str(day)} because the cohort is currently at day {str(self.cohort.current_day)}" ) try: self.days[day - 1] = CohortDayLog(**payload) - logger.debug(f'Replaced cohort {self.cohort.slug} log for day {day}') + logger.debug(f"Replaced cohort {self.cohort.slug} log for day {day}") except IndexError: - raise Exception(f'Error adding day {str(day-1)} log to cohort') + raise Exception(f"Error adding day {str(day-1)} log to cohort") def save(self): diff --git a/breathecode/admissions/views.py b/breathecode/admissions/views.py index 2f6194e09..8f3003565 100644 --- a/breathecode/admissions/views.py +++ b/breathecode/admissions/views.py @@ -77,36 +77,36 @@ logger = logging.getLogger(__name__) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_timezones(request, id=None): # timezones = [(x, x) for x in pytz.common_timezones] return Response(pytz.common_timezones) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_all_academies(request, id=None): items = Academy.objects.all() - status = request.GET.get('status') + status = request.GET.get("status") if status: - items = items.filter(status__in=status.upper().split(',')) + items = items.filter(status__in=status.upper().split(",")) - academy_ids = request.GET.get('academy_id') + academy_ids = request.GET.get("academy_id") if academy_ids: - items = items.filter(id__in=academy_ids.split(',')) + items = items.filter(id__in=academy_ids.split(",")) serializer = AcademySerializer(items, many=True) return Response(serializer.data) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_single_academy(request, academy_id=None): item = Academy.objects.filter(id=academy_id).first() if item is None: - raise ValidationException(f'Academy {academy_id} not found', slug='academy-not-found') + raise ValidationException(f"Academy {academy_id} not found", slug="academy-not-found") serializer = GetBigAcademySerializer(item) return Response(serializer.data) @@ -117,7 +117,7 @@ class AcademyTeacherView(APIView, GenerateLookupsMixin): extensions = APIViewExtensions(cache=TeacherCache, paginate=True) - @capable_of('read_member') + @capable_of("read_member") def get(self, request, academy_id): handler = self.extensions(request) @@ -125,37 +125,38 @@ def get(self, request, academy_id): if cache is not None: return cache - items = ProfileAcademy.objects.filter(academy__id=academy_id, - role__slug__in=['teacher', - 'assistant']).exclude(user__email__contains='@token.com') + items = ProfileAcademy.objects.filter(academy__id=academy_id, role__slug__in=["teacher", "assistant"]).exclude( + user__email__contains="@token.com" + ) - roles = request.GET.get('roles', None) + roles = request.GET.get("roles", None) if roles is not None: - items = items.filter(role__slug__in=roles.split(',')) + items = items.filter(role__slug__in=roles.split(",")) - _status = request.GET.get('status', None) + _status = request.GET.get("status", None) if _status is not None: items = items.filter(status__iexact=_status) - cohort_stage = request.GET.get('cohort_stage', None) + cohort_stage = request.GET.get("cohort_stage", None) no_sort = [] if cohort_stage is not None: - no_sort.append('cohort_stage') - items = items.filter(user__cohortuser__cohort__stage__iexact=cohort_stage).distinct('user') + no_sort.append("cohort_stage") + items = items.filter(user__cohortuser__cohort__stage__iexact=cohort_stage).distinct("user") - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = query_like_by_full_name(like=like, items=items) - sort = request.GET.get('sort', None) - if (sort is None or sort == '') and len(no_sort) == 0: - sort = '-first_name' + sort = request.GET.get("sort", None) + if (sort is None or sort == "") and len(no_sort) == 0: + sort = "-first_name" if len(no_sort) > 0 and sort: - raise ValidationException('No sorting allowed when following filters are applied: ' + ','.join(no_sort), - slug='no-sorting-allowed') - elif 'latest_cohorts' in sort: - items = items.annotate(latest_cohorts=Max('user__cohortuser__cohort__ending_date')) + raise ValidationException( + "No sorting allowed when following filters are applied: " + ",".join(no_sort), slug="no-sorting-allowed" + ) + elif "latest_cohorts" in sort: + items = items.annotate(latest_cohorts=Max("user__cohortuser__cohort__ending_date")) if sort is not None: items = items.order_by(sort) @@ -165,27 +166,27 @@ def get(self, request, academy_id): return handler.response(serializer.data) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def handle_test_syllabus(request): try: - ignore = request.GET.get('ignore', '') - syllabus_log = test_syllabus(request.data, validate_assets=True, ignore=ignore.lower().split(',')) + ignore = request.GET.get("ignore", "") + syllabus_log = test_syllabus(request.data, validate_assets=True, ignore=ignore.lower().split(",")) return Response(syllabus_log.serialize(), status=syllabus_log.http_status()) except Exception as e: - return Response({'details': str(e)}, status=400) + return Response({"details": str(e)}, status=400) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_public_syllabus(request, id=None): items = Syllabus.objects.filter(private=False) - slug = request.GET.get('slug', None) + slug = request.GET.get("slug", None) if slug is not None: - items = items.filter(slug__in=slug.split(',')) + items = items.filter(slug__in=slug.split(",")) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(Q(name__icontains=like) | Q(slug__icontains=like)) @@ -195,7 +196,7 @@ def get_public_syllabus(request, id=None): class PublicCohortView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(cache=CohortCache, paginate=True, sort='-kickoff_date') + extensions = APIViewExtensions(cache=CohortCache, paginate=True, sort="-kickoff_date") def get(self, request, id=None): handler = self.extensions(request) @@ -204,105 +205,106 @@ def get(self, request, id=None): if cache is not None: return cache - items = Cohort.objects.filter(private=False).select_related('syllabus_version__syllabus') + items = Cohort.objects.filter(private=False).select_related("syllabus_version__syllabus") - items = items.annotate(longitude=Value(None, output_field=FloatField()), - latitude=Value(None, output_field=FloatField())) + items = items.annotate( + longitude=Value(None, output_field=FloatField()), latitude=Value(None, output_field=FloatField()) + ) - upcoming = request.GET.get('upcoming', None) - if upcoming == 'true': + upcoming = request.GET.get("upcoming", None) + if upcoming == "true": now = timezone.now() items = items.filter(Q(kickoff_date__gte=now) | Q(never_ends=True)) - never_ends = request.GET.get('never_ends', None) - if never_ends == 'false': + never_ends = request.GET.get("never_ends", None) + if never_ends == "false": items = items.filter(never_ends=False) - academy = request.GET.get('academy', None) + academy = request.GET.get("academy", None) if academy is not None: - items = items.filter(academy__slug__in=academy.split(',')) + items = items.filter(academy__slug__in=academy.split(",")) - location = request.GET.get('location', None) + location = request.GET.get("location", None) if location is not None: - items = items.filter(academy__slug__in=location.split(',')) + items = items.filter(academy__slug__in=location.split(",")) - ids = request.GET.get('id', None) + ids = request.GET.get("id", None) if ids is not None: - items = items.filter(id__in=ids.split(',')) + items = items.filter(id__in=ids.split(",")) - slugs = request.GET.get('slug', None) + slugs = request.GET.get("slug", None) if slugs is not None: - items = items.filter(slug__in=slugs.split(',')) + items = items.filter(slug__in=slugs.split(",")) - stage = request.GET.get('stage') + stage = request.GET.get("stage") if stage: - items = items.filter(stage__in=stage.upper().split(',')) + items = items.filter(stage__in=stage.upper().split(",")) else: - items = items.exclude(stage='DELETED') + items = items.exclude(stage="DELETED") - if coordinates := request.GET.get('coordinates', ''): + if coordinates := request.GET.get("coordinates", ""): try: - latitude, longitude = coordinates.split(',') + latitude, longitude = coordinates.split(",") latitude = float(latitude) longitude = float(longitude) except Exception: - raise ValidationException('Bad coordinates, the format is latitude,longitude', slug='bad-coordinates') + raise ValidationException("Bad coordinates, the format is latitude,longitude", slug="bad-coordinates") if latitude > 90 or latitude < -90: - raise ValidationException('Bad latitude', slug='bad-latitude') + raise ValidationException("Bad latitude", slug="bad-latitude") if longitude > 180 or longitude < -180: - raise ValidationException('Bad longitude', slug='bad-longitude') + raise ValidationException("Bad longitude", slug="bad-longitude") items = items.annotate(longitude=Value(longitude, FloatField()), latitude=Value(latitude, FloatField())) - saas = request.GET.get('saas', '').lower() - if saas == 'true': + saas = request.GET.get("saas", "").lower() + if saas == "true": items = items.filter(academy__available_as_saas=True) - elif saas == 'false': + elif saas == "false": items = items.filter(academy__available_as_saas=False) - syllabus_slug = request.GET.get('syllabus_slug', '') + syllabus_slug = request.GET.get("syllabus_slug", "") if syllabus_slug: - items = items.filter(syllabus_version__syllabus__slug__in=syllabus_slug.split(',')) + items = items.filter(syllabus_version__syllabus__slug__in=syllabus_slug.split(",")) - syllabus_slug_like = request.GET.get('syllabus_slug_like', '') + syllabus_slug_like = request.GET.get("syllabus_slug_like", "") if syllabus_slug_like: items = items.filter(syllabus_version__syllabus__slug__icontains=syllabus_slug_like) - plan = request.GET.get('plan', '') - if plan == 'true': + plan = request.GET.get("plan", "") + if plan == "true": items = items.filter(academy__main_currency__isnull=False, cohortset__isnull=False).distinct() - elif plan == 'false': + elif plan == "false": items = items.filter().exclude(cohortset__isnull=True).distinct() elif plan: kwargs = {} if isinstance(plan, int) or plan.isnumeric(): - kwargs['cohortset__plan__id'] = plan + kwargs["cohortset__plan__id"] = plan else: - kwargs['cohortset__plan__slug'] = plan + kwargs["cohortset__plan__slug"] = plan items = items.filter(**kwargs).distinct() items = handler.queryset(items) serializer = PublicCohortSerializer(items, many=True) - data = sorted(serializer.data, key=lambda x: x['distance'] or float('inf')) if coordinates else serializer.data + data = sorted(serializer.data, key=lambda x: x["distance"] or float("inf")) if coordinates else serializer.data return handler.response(data) class AcademyReportView(APIView): - @capable_of('academy_reporting') + @capable_of("academy_reporting") def get(self, request, academy_id=None): academy = Academy.objects.filter(id=academy_id).first() if academy is None: - raise ValidationError('Academy not found', slug='academy-not-found') + raise ValidationError("Academy not found", slug="academy-not-found") users = AcademyReportSerializer(academy) return Response(users.data) @@ -310,12 +312,12 @@ def get(self, request, academy_id=None): class AcademyActivateView(APIView): - @capable_of('academy_activate') + @capable_of("academy_activate") def put(self, request, academy_id=None): academy = Academy.objects.filter(id=academy_id).first() - academy.status = 'ACTIVE' + academy.status = "ACTIVE" academy.save() serializer = GetAcademyWithStatusSerializer(academy) @@ -334,7 +336,7 @@ def get(self, request, format=None): try: if isinstance(request.user, AnonymousUser): - raise PermissionDenied('There is not user') + raise PermissionDenied("There is not user") except User.DoesNotExist: raise PermissionDenied("You don't have a user") @@ -349,13 +351,13 @@ def get(self, request, format=None): class AcademyView(APIView): """List all snippets, or create a new snippet.""" - @capable_of('read_my_academy') + @capable_of("read_my_academy") def get(self, request, format=None, academy_id=None): item = Academy.objects.get(id=academy_id) serializer = GetBigAcademySerializer(item) return Response(serializer.data) - @capable_of('crud_my_academy') + @capable_of("crud_my_academy") def put(self, request, format=None, academy_id=None): academy = Academy.objects.get(id=academy_id) data = {} @@ -375,7 +377,7 @@ class UserView(APIView): permission_classes = [IsAuthenticated] def put(self, request): - serializer = UserDJangoRestSerializer(request.user, data=request.data, context={'request': request}) + serializer = UserDJangoRestSerializer(request.user, data=request.data, context={"request": request}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -397,29 +399,29 @@ def get(self, request): items = CohortUser.objects.all() - roles = request.GET.get('roles', None) + roles = request.GET.get("roles", None) if roles is not None: - items = items.filter(role__in=roles.upper().split(',')) + items = items.filter(role__in=roles.upper().split(",")) - finantial_status = request.GET.get('finantial_status', None) + finantial_status = request.GET.get("finantial_status", None) if finantial_status is not None: - items = items.filter(finantial_status__in=finantial_status.upper().split(',')) + items = items.filter(finantial_status__in=finantial_status.upper().split(",")) - educational_status = request.GET.get('educational_status', None) + educational_status = request.GET.get("educational_status", None) if educational_status is not None: - items = items.filter(educational_status__in=educational_status.upper().split(',')) + items = items.filter(educational_status__in=educational_status.upper().split(",")) - academy = request.GET.get('academy', None) + academy = request.GET.get("academy", None) if academy is not None: - items = items.filter(cohort__academy__slug__in=academy.lower().split(',')) + items = items.filter(cohort__academy__slug__in=academy.lower().split(",")) - cohorts = request.GET.get('cohorts', None) + cohorts = request.GET.get("cohorts", None) if cohorts is not None: - items = items.filter(cohort__slug__in=cohorts.split(',')) + items = items.filter(cohort__slug__in=cohorts.split(",")) - users = request.GET.get('users', None) + users = request.GET.get("users", None) if users is not None: - items = items.filter(user__id__in=users.split(',')) + items = items.filter(user__id__in=users.split(",")) items = handler.queryset(items) serializer = GetCohortUserSerializer(items, many=True) @@ -429,36 +431,36 @@ def post(self, request, cohort_id=None, user_id=None): def validate_data(data): if user_id: - data['user'] = user_id + data["user"] = user_id if cohort_id: - data['cohort'] = cohort_id + data["cohort"] = cohort_id try: - data['user'] = int(data['user']) + data["user"] = int(data["user"]) except Exception: ... try: - data['cohort'] = int(data['cohort']) + data["cohort"] = int(data["cohort"]) except Exception: ... - if 'user' not in data or 'cohort' not in data: - raise ValidationException('Missing cohort or user in the request', code=400) + if "user" not in data or "cohort" not in data: + raise ValidationException("Missing cohort or user in the request", code=400) - if not User.objects.filter(id=int(data['user'])).exists(): - raise ValidationException('User not found', code=400) + if not User.objects.filter(id=int(data["user"])).exists(): + raise ValidationException("User not found", code=400) - if not Cohort.objects.filter(id=int(data['cohort'])).exists(): - raise ValidationException('Cohort not found', code=400) + if not Cohort.objects.filter(id=int(data["cohort"])).exists(): + raise ValidationException("Cohort not found", code=400) return data many = isinstance(request.data, list) context = { - 'request': request, - 'index': -1, + "request": request, + "index": -1, } data = [validate_data(data) for data in request.data] if many else validate_data(request.data) @@ -474,49 +476,49 @@ def validate_data(data): def put(self, request, cohort_id=None, user_id=None): def validate_data(data, many): - validate_data.__dict__['index'] += 1 + validate_data.__dict__["index"] += 1 instance = None if user_id: - data['user'] = user_id + data["user"] = user_id if cohort_id: - data['cohort'] = cohort_id + data["cohort"] = cohort_id - if 'user' not in data and 'cohort' not in data and 'id' not in data: - raise ValidationException('Missing cohort_id, user_id and id', code=400) + if "user" not in data and "cohort" not in data and "id" not in data: + raise ValidationException("Missing cohort_id, user_id and id", code=400) - id = data.get('id') + id = data.get("id") if isinstance(id, int) and (instance := CohortUser.objects.filter(id=id).first()): - data['id'] = instance.id - data['cohort'] = instance.cohort.id - data['user'] = instance.user.id + data["id"] = instance.id + data["cohort"] = instance.cohort.id + data["user"] = instance.user.id return data, instance - user = data.get('user') - cohort = data.get('cohort') + user = data.get("user") + cohort = data.get("cohort") if not id: try: user = int(user) - data['user'] = user + data["user"] = user except Exception: - raise ValidationException('invalid user_id', code=400) + raise ValidationException("invalid user_id", code=400) try: cohort = int(cohort) - data['cohort'] = cohort + data["cohort"] = cohort except Exception: - raise ValidationException('invalid cohort_id', code=400) + raise ValidationException("invalid cohort_id", code=400) if instance := CohortUser.objects.filter(cohort__id=cohort, user__id=user).first(): - data['id'] = instance.id - data['cohort'] = instance.cohort.id - data['user'] = instance.user.id + data["id"] = instance.id + data["cohort"] = instance.cohort.id + data["user"] = instance.user.id return data, instance - message = 'Cannot determine CohortUser' + message = "Cannot determine CohortUser" if many: message += f" in index {validate_data.__dict__['index']}" @@ -524,12 +526,12 @@ def validate_data(data, many): # many raise ValidationException(message) - validate_data.__dict__['index'] = -1 + validate_data.__dict__["index"] = -1 many = isinstance(request.data, list) context = { - 'request': request, - 'index': -1, + "request": request, + "index": -1, } if many: @@ -556,15 +558,15 @@ def validate_data(data, many): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def delete(self, request, cohort_id=None, user_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups and (user_id or cohort_id): raise ValidationException( - 'user_id or cohort_id was provided in url ' - 'in bulk mode request, use querystring style instead', - code=400) + "user_id or cohort_id was provided in url " "in bulk mode request, use querystring style instead", + code=400, + ) - academy_ids = ProfileAcademy.objects.filter(user=request.user).values_list('academy__id', flat=True) + academy_ids = ProfileAcademy.objects.filter(user=request.user).values_list("academy__id", flat=True) if lookups: items = CohortUser.objects.filter(**lookups, cohort__academy__id__in=academy_ids) @@ -575,12 +577,13 @@ def delete(self, request, cohort_id=None, user_id=None): return Response(None, status=status.HTTP_204_NO_CONTENT) if cohort_id is None or user_id is None: - raise ValidationException('Missing user_id or cohort_id', code=400) + raise ValidationException("Missing user_id or cohort_id", code=400) - cu = CohortUser.objects.filter(user__id=user_id, cohort__id=cohort_id, - cohort__academy__id__in=academy_ids).first() + cu = CohortUser.objects.filter( + user__id=user_id, cohort__id=cohort_id, cohort__academy__id__in=academy_ids + ).first() if cu is None: - raise ValidationException('Specified cohort and user could not be found') + raise ValidationException("Specified cohort and user could not be found") cu.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) @@ -589,7 +592,7 @@ def delete(self, request, cohort_id=None, user_id=None): class AcademyCohortUserView(APIView, GenerateLookupsMixin): extensions = APIViewExtensions(cache=CohortUserCache, paginate=True) - @capable_of('read_all_cohort') + @capable_of("read_all_cohort") def get(self, request, format=None, cohort_id=None, user_id=None, academy_id=None): handler = self.extensions(request) @@ -599,98 +602,104 @@ def get(self, request, format=None, cohort_id=None, user_id=None, academy_id=Non return cache if user_id is not None: - item = CohortUser.objects.filter(cohort__academy__id=academy_id, user__id=user_id, - cohort__id=cohort_id).first() + item = CohortUser.objects.filter( + cohort__academy__id=academy_id, user__id=user_id, cohort__id=cohort_id + ).first() if item is None: - raise ValidationException('Cohort user not found', 404) - tasks = request.GET.get('tasks', None) - serializer = GetCohortUserTasksSerializer( - item, many=False) if tasks is not None and tasks == 'True' else GetCohortUserSerializer(item, - many=False) + raise ValidationException("Cohort user not found", 404) + tasks = request.GET.get("tasks", None) + serializer = ( + GetCohortUserTasksSerializer(item, many=False) + if tasks is not None and tasks == "True" + else GetCohortUserSerializer(item, many=False) + ) return Response(serializer.data) items = CohortUser.objects.filter(cohort__academy__id=academy_id) try: - roles = request.GET.get('roles', None) + roles = request.GET.get("roles", None) if roles is not None: - items = items.filter(role__in=roles.upper().split(',')) + items = items.filter(role__in=roles.upper().split(",")) - finantial_status = request.GET.get('finantial_status', None) + finantial_status = request.GET.get("finantial_status", None) if finantial_status is not None: - items = items.filter(finantial_status__in=finantial_status.upper().split(',')) + items = items.filter(finantial_status__in=finantial_status.upper().split(",")) - educational_status = request.GET.get('educational_status', None) + educational_status = request.GET.get("educational_status", None) if educational_status is not None: - items = items.filter(educational_status__in=educational_status.upper().split(',')) + items = items.filter(educational_status__in=educational_status.upper().split(",")) - cohorts = request.GET.get('cohorts', None) + cohorts = request.GET.get("cohorts", None) if cohorts is not None: - items = items.filter(cohort__slug__in=cohorts.split(',')) + items = items.filter(cohort__slug__in=cohorts.split(",")) - watching = request.GET.get('watching', None) + watching = request.GET.get("watching", None) if watching is not None: - items = items.filter(watching=watching.lower() == 'true') + items = items.filter(watching=watching.lower() == "true") - if 'cohort' in self.request.GET: - param = self.request.GET.get('cohort') + if "cohort" in self.request.GET: + param = self.request.GET.get("cohort") items = items.filter(cohort__name__icontains=param) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: - items = query_like_by_full_name(like=like, items=items, prefix='user__') + items = query_like_by_full_name(like=like, items=items, prefix="user__") - syllabus = request.GET.get('syllabus', None) + syllabus = request.GET.get("syllabus", None) if syllabus is not None: - items = items.filter(cohort__syllabus_version__syllabus__slug__in=syllabus.split(',')) + items = items.filter(cohort__syllabus_version__syllabus__slug__in=syllabus.split(",")) - distinct = request.GET.get('distinct', None) - if distinct is not None and distinct == 'true': - ids = items.distinct('user__id').order_by('user__id').values_list('id', flat=True) + distinct = request.GET.get("distinct", None) + if distinct is not None and distinct == "true": + ids = items.distinct("user__id").order_by("user__id").values_list("id", flat=True) items = items.filter(id__in=ids) - users = request.GET.get('users', None) + users = request.GET.get("users", None) if users is not None: - items = items.filter(user__id__in=users.split(',')) + items = items.filter(user__id__in=users.split(",")) - items = items.order_by(request.GET.get('sort', '-created_at')) + items = items.order_by(request.GET.get("sort", "-created_at")) except Exception as e: raise ValidationException(str(e), 400) - tasks = request.GET.get('tasks', None) + tasks = request.GET.get("tasks", None) items = handler.queryset(items) - serializer = GetCohortUserTasksSerializer( - items, many=True) if tasks is not None and tasks == 'True' else GetCohortUserSerializer(items, many=True) + serializer = ( + GetCohortUserTasksSerializer(items, many=True) + if tasks is not None and tasks == "True" + else GetCohortUserSerializer(items, many=True) + ) return handler.response(serializer.data) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def post(self, request, cohort_id=None, academy_id=None, user_id=None): def validate_data(data): if user_id: - data['user'] = user_id + data["user"] = user_id if cohort_id: - data['cohort'] = cohort_id + data["cohort"] = cohort_id - if 'user' not in data or 'cohort' not in data: - raise ValidationException('Missing cohort_id or user_id', code=400) + if "user" not in data or "cohort" not in data: + raise ValidationException("Missing cohort_id or user_id", code=400) - if not isinstance(data['user'], int) or not User.objects.filter(id=int(data['user'])).exists(): - raise ValidationException('invalid user_id', code=400) + if not isinstance(data["user"], int) or not User.objects.filter(id=int(data["user"])).exists(): + raise ValidationException("invalid user_id", code=400) - if (not isinstance(data['cohort'], int) or not Cohort.objects.filter(id=int(data['cohort'])).exists()): - raise ValidationException('invalid cohort_id', code=400) + if not isinstance(data["cohort"], int) or not Cohort.objects.filter(id=int(data["cohort"])).exists(): + raise ValidationException("invalid cohort_id", code=400) return data many = isinstance(request.data, list) context = { - 'request': request, - 'index': -1, + "request": request, + "index": -1, } data = [validate_data(data) for data in request.data] if many else validate_data(request.data) @@ -703,53 +712,53 @@ def validate_data(data): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def put(self, request, cohort_id=None, user_id=None, academy_id=None): def validate_data(data, many): - validate_data.__dict__['index'] += 1 + validate_data.__dict__["index"] += 1 instance = None if user_id: - data['user'] = user_id + data["user"] = user_id if cohort_id: - data['cohort'] = cohort_id + data["cohort"] = cohort_id - if 'user' not in data and 'cohort' not in data and 'id' not in data: - raise ValidationException('Missing cohort_id, user_id and id', code=400) + if "user" not in data and "cohort" not in data and "id" not in data: + raise ValidationException("Missing cohort_id, user_id and id", code=400) - id = data.get('id') + id = data.get("id") if isinstance(id, int) and (instance := CohortUser.objects.filter(id=id).first()): - data['id'] = instance.id - data['cohort'] = instance.cohort.id - data['user'] = instance.user.id + data["id"] = instance.id + data["cohort"] = instance.cohort.id + data["user"] = instance.user.id return data, instance - user = data.get('user') - cohort = data.get('cohort') + user = data.get("user") + cohort = data.get("cohort") if not id: try: user = int(user) - data['user'] = user + data["user"] = user except Exception: - raise ValidationException('invalid user_id', code=400) + raise ValidationException("invalid user_id", code=400) try: cohort = int(cohort) - data['cohort'] = cohort + data["cohort"] = cohort except Exception: - raise ValidationException('invalid cohort_id', code=400) + raise ValidationException("invalid cohort_id", code=400) if instance := CohortUser.objects.filter(cohort__id=cohort, user__id=user).first(): - data['id'] = instance.id - data['cohort'] = instance.cohort.id - data['user'] = instance.user.id + data["id"] = instance.id + data["cohort"] = instance.cohort.id + data["user"] = instance.user.id return data, instance - message = 'Cannot determine CohortUser' + message = "Cannot determine CohortUser" if many: message += f" in index {validate_data.__dict__['index']}" @@ -757,12 +766,12 @@ def validate_data(data, many): # many raise ValidationException(message) - validate_data.__dict__['index'] = -1 + validate_data.__dict__["index"] = -1 many = isinstance(request.data, list) context = { - 'request': request, - 'index': -1, + "request": request, + "index": -1, } if many: @@ -788,15 +797,15 @@ def validate_data(data, many): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def delete(self, request, cohort_id=None, user_id=None, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups and (user_id or cohort_id): raise ValidationException( - 'user_id or cohort_id was provided in url ' - 'in bulk mode request, use querystring style instead', - code=400) + "user_id or cohort_id was provided in url " "in bulk mode request, use querystring style instead", + code=400, + ) if lookups: items = CohortUser.objects.filter(**lookups, cohort__academy__id__in=academy_id) @@ -807,12 +816,13 @@ def delete(self, request, cohort_id=None, user_id=None, academy_id=None): return Response(None, status=status.HTTP_204_NO_CONTENT) if cohort_id is None or user_id is None: - raise ValidationException('Missing user_id or cohort_id', code=400) + raise ValidationException("Missing user_id or cohort_id", code=400) - cu = CohortUser.objects.filter(user__id=user_id, cohort__id=cohort_id, - cohort__academy__id__in=academy_id).first() + cu = CohortUser.objects.filter( + user__id=user_id, cohort__id=cohort_id, cohort__academy__id__in=academy_id + ).first() if cu is None: - raise ValidationException('Specified cohort and user could not be found') + raise ValidationException("Specified cohort and user could not be found") cu.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) @@ -820,53 +830,54 @@ def delete(self, request, cohort_id=None, user_id=None, academy_id=None): class AcademyCohortTimeSlotView(APIView, GenerateLookupsMixin): - @capable_of('read_all_cohort') + @capable_of("read_all_cohort") def get(self, request, cohort_id=None, timeslot_id=None, academy_id=None): if timeslot_id is not None: - item = CohortTimeSlot.objects.filter(cohort__academy__id=academy_id, cohort__id=cohort_id, - id=timeslot_id).first() + item = CohortTimeSlot.objects.filter( + cohort__academy__id=academy_id, cohort__id=cohort_id, id=timeslot_id + ).first() if item is None: - raise ValidationException('Time slot not found', 404, slug='time-slot-not-found') + raise ValidationException("Time slot not found", 404, slug="time-slot-not-found") serializer = GETCohortTimeSlotSerializer(item, many=False) return Response(serializer.data) items = CohortTimeSlot.objects.filter(cohort__academy__id=academy_id, cohort__id=cohort_id) - recurrency_type = request.GET.get('recurrency_type') + recurrency_type = request.GET.get("recurrency_type") if recurrency_type: - items = items.filter(recurrency_type__in=recurrency_type.upper().split(',')) + items = items.filter(recurrency_type__in=recurrency_type.upper().split(",")) serializer = GETCohortTimeSlotSerializer(items, many=True) return Response(serializer.data) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def post(self, request, cohort_id=None, academy_id=None): - if 'cohort' in request.data or 'cohort_id' in request.data: - raise ValidationException("Cohort can't be passed in the body", 400, slug='cohort-in-body') + if "cohort" in request.data or "cohort_id" in request.data: + raise ValidationException("Cohort can't be passed in the body", 400, slug="cohort-in-body") cohort = Cohort.objects.filter(id=cohort_id, academy__id=academy_id).first() if cohort_id and not cohort: - raise ValidationException('Cohort not found', 404, slug='cohort-not-found') + raise ValidationException("Cohort not found", 404, slug="cohort-not-found") - timezone = Academy.objects.filter(id=academy_id).values_list('timezone', flat=True).first() + timezone = Academy.objects.filter(id=academy_id).values_list("timezone", flat=True).first() if not timezone: - raise ValidationException('Academy doesn\'t have a timezone assigned', slug='academy-without-timezone') + raise ValidationException("Academy doesn't have a timezone assigned", slug="academy-without-timezone") data = { **request.data, - 'cohort': cohort.id, - 'timezone': timezone, + "cohort": cohort.id, + "timezone": timezone, } - if 'starting_at' in data: - data['starting_at'] = DatetimeInteger.from_iso_string(timezone, data['starting_at']) + if "starting_at" in data: + data["starting_at"] = DatetimeInteger.from_iso_string(timezone, data["starting_at"]) - if 'ending_at' in data: - data['ending_at'] = DatetimeInteger.from_iso_string(timezone, data['ending_at']) + if "ending_at" in data: + data["ending_at"] = DatetimeInteger.from_iso_string(timezone, data["ending_at"]) serializer = CohortTimeSlotSerializer(data=data, many=False) if serializer.is_valid(): @@ -874,38 +885,39 @@ def post(self, request, cohort_id=None, academy_id=None): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def put(self, request, cohort_id=None, timeslot_id=None, academy_id=None): - if 'cohort' in request.data or 'cohort_id' in request.data: + if "cohort" in request.data or "cohort_id" in request.data: raise ValidationException("Cohort can't be passed in the body", 400) cohort = Cohort.objects.filter(id=cohort_id, academy__id=academy_id).first() if cohort_id and not cohort: - raise ValidationException('Cohort not found', 404, slug='cohort-not-found') + raise ValidationException("Cohort not found", 404, slug="cohort-not-found") - item = CohortTimeSlot.objects.filter(cohort__academy__id=academy_id, cohort__id=cohort_id, - id=timeslot_id).first() + item = CohortTimeSlot.objects.filter( + cohort__academy__id=academy_id, cohort__id=cohort_id, id=timeslot_id + ).first() if not item: - raise ValidationException('Time slot not found', 404, slug='time-slot-not-found') + raise ValidationException("Time slot not found", 404, slug="time-slot-not-found") - timezone = cohort.timezone or Academy.objects.filter(id=academy_id).values_list('timezone', flat=True).first() + timezone = cohort.timezone or Academy.objects.filter(id=academy_id).values_list("timezone", flat=True).first() if not timezone: - raise ValidationException('Academy doesn\'t have a timezone assigned', slug='academy-without-timezone') + raise ValidationException("Academy doesn't have a timezone assigned", slug="academy-without-timezone") data = { **request.data, - 'id': timeslot_id, - 'cohort': cohort.id, - 'timezone': timezone, + "id": timeslot_id, + "cohort": cohort.id, + "timezone": timezone, } - if 'starting_at' in data: - data['starting_at'] = DatetimeInteger.from_iso_string(timezone, data['starting_at']) + if "starting_at" in data: + data["starting_at"] = DatetimeInteger.from_iso_string(timezone, data["starting_at"]) - if 'ending_at' in data: - data['ending_at'] = DatetimeInteger.from_iso_string(timezone, data['ending_at']) + if "ending_at" in data: + data["ending_at"] = DatetimeInteger.from_iso_string(timezone, data["ending_at"]) serializer = CohortTimeSlotSerializer(item, data=data) if serializer.is_valid(): @@ -913,13 +925,14 @@ def put(self, request, cohort_id=None, timeslot_id=None, academy_id=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def delete(self, request, cohort_id=None, timeslot_id=None, academy_id=None): - item = CohortTimeSlot.objects.filter(cohort__academy__id=academy_id, cohort__id=cohort_id, - id=timeslot_id).first() + item = CohortTimeSlot.objects.filter( + cohort__academy__id=academy_id, cohort__id=cohort_id, id=timeslot_id + ).first() if not item: - raise ValidationException('Time slot not found', 404, slug='time-slot-not-found') + raise ValidationException("Time slot not found", 404, slug="time-slot-not-found") item.delete() @@ -928,43 +941,46 @@ def delete(self, request, cohort_id=None, timeslot_id=None, academy_id=None): class AcademySyncCohortTimeSlotView(APIView, GenerateLookupsMixin): - @capable_of('crud_certificate') + @capable_of("crud_certificate") def post(self, request, academy_id=None): - cohort_ids = request.GET.get('cohort', '') + cohort_ids = request.GET.get("cohort", "") if not cohort_ids: - raise ValidationException('Missing cohort in querystring', 400, slug='missing-cohort-in-querystring') + raise ValidationException("Missing cohort in querystring", 400, slug="missing-cohort-in-querystring") - cohort_ids = cohort_ids.split(',') + cohort_ids = cohort_ids.split(",") cohorts = Cohort.objects.filter(id__in=cohort_ids) if len(cohorts) != len(cohort_ids): - raise ValidationException('Cohort not found', 404, slug='cohort-not-found') + raise ValidationException("Cohort not found", 404, slug="cohort-not-found") for cohort in cohorts: if not cohort.schedule: - raise ValidationException("Cohort doesn't have any schedule", 400, slug='cohort-without-specialty-mode') + raise ValidationException("Cohort doesn't have any schedule", 400, slug="cohort-without-specialty-mode") academy = Academy.objects.filter(id=academy_id).first() if not academy.timezone: - raise ValidationException('Academy doesn\'t have any timezone assigned', slug='without-timezone') + raise ValidationException("Academy doesn't have any timezone assigned", slug="without-timezone") CohortTimeSlot.objects.filter(cohort__id__in=cohort_ids).delete() data = [] for cohort in cohorts: certificate_id = cohort.schedule.id - certificate_timeslots = SyllabusScheduleTimeSlot.objects.filter(schedule__academy__id=academy_id, - schedule__id=certificate_id) + certificate_timeslots = SyllabusScheduleTimeSlot.objects.filter( + schedule__academy__id=academy_id, schedule__id=certificate_id + ) for certificate_timeslot in certificate_timeslots: - data.append({ - 'cohort': cohort.id, - 'starting_at': certificate_timeslot.starting_at, - 'ending_at': certificate_timeslot.ending_at, - 'recurrent': certificate_timeslot.recurrent, - 'recurrency_type': certificate_timeslot.recurrency_type, - 'timezone': cohort.timezone or academy.timezone, - }) + data.append( + { + "cohort": cohort.id, + "starting_at": certificate_timeslot.starting_at, + "ending_at": certificate_timeslot.ending_at, + "recurrent": certificate_timeslot.recurrent, + "recurrency_type": certificate_timeslot.recurrency_type, + "timezone": cohort.timezone or academy.timezone, + } + ) serializer = CohortTimeSlotSerializer(data=data, many=True) if serializer.is_valid(): @@ -975,54 +991,54 @@ def post(self, request, academy_id=None): class AcademySyllabusScheduleTimeSlotView(APIView, GenerateLookupsMixin): - @capable_of('read_certificate') + @capable_of("read_certificate") def get(self, request, certificate_id=None, timeslot_id=None, academy_id=None): if timeslot_id: - item = SyllabusScheduleTimeSlot.objects.filter(schedule__academy__id=academy_id, - schedule__id=certificate_id, - id=timeslot_id).first() + item = SyllabusScheduleTimeSlot.objects.filter( + schedule__academy__id=academy_id, schedule__id=certificate_id, id=timeslot_id + ).first() if item is None: - raise ValidationException('Time slot not found', 404, slug='time-slot-not-found') + raise ValidationException("Time slot not found", 404, slug="time-slot-not-found") serializer = GETSyllabusScheduleTimeSlotSerializer(item, many=False) return Response(serializer.data) items = SyllabusScheduleTimeSlot.objects.filter(schedule__academy__id=academy_id, schedule__id=certificate_id) - recurrency_type = request.GET.get('recurrency_type') + recurrency_type = request.GET.get("recurrency_type") if recurrency_type: - items = items.filter(recurrency_type__in=recurrency_type.upper().split(',')) + items = items.filter(recurrency_type__in=recurrency_type.upper().split(",")) serializer = GETSyllabusScheduleTimeSlotSerializer(items, many=True) return Response(serializer.data) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def post(self, request, certificate_id=None, academy_id=None): - if 'certificate' in request.data or 'certificate_id' in request.data: - raise ValidationException("Certificate can't be passed is the body", 400, slug='certificate-in-body') + if "certificate" in request.data or "certificate_id" in request.data: + raise ValidationException("Certificate can't be passed is the body", 400, slug="certificate-in-body") certificate = SyllabusSchedule.objects.filter(id=certificate_id, academy__id=academy_id).first() if certificate_id and not certificate: - raise ValidationException('Schedule not found', 404, slug='certificate-not-found') + raise ValidationException("Schedule not found", 404, slug="certificate-not-found") - timezone = Academy.objects.filter(id=academy_id).values_list('timezone', flat=True).first() + timezone = Academy.objects.filter(id=academy_id).values_list("timezone", flat=True).first() if not timezone: - raise ValidationException('Academy doesn\'t have a timezone assigned', slug='academy-without-timezone') + raise ValidationException("Academy doesn't have a timezone assigned", slug="academy-without-timezone") data = { **request.data, - 'academy': academy_id, - 'schedule': certificate.id, - 'timezone': timezone, + "academy": academy_id, + "schedule": certificate.id, + "timezone": timezone, } - if 'starting_at' in data: - data['starting_at'] = DatetimeInteger.from_iso_string(timezone, data['starting_at']) + if "starting_at" in data: + data["starting_at"] = DatetimeInteger.from_iso_string(timezone, data["starting_at"]) - if 'ending_at' in data: - data['ending_at'] = DatetimeInteger.from_iso_string(timezone, data['ending_at']) + if "ending_at" in data: + data["ending_at"] = DatetimeInteger.from_iso_string(timezone, data["ending_at"]) serializer = SyllabusScheduleTimeSlotSerializer(data=data, many=False) if serializer.is_valid(): @@ -1030,38 +1046,38 @@ def post(self, request, certificate_id=None, academy_id=None): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def put(self, request, certificate_id=None, timeslot_id=None, academy_id=None): - if 'certificate' in request.data or 'certificate_id' in request.data: + if "certificate" in request.data or "certificate_id" in request.data: raise ValidationException("Certificate can't be passed is the body", 400) certificate = SyllabusSchedule.objects.filter(id=certificate_id, academy__id=academy_id).first() if certificate_id and not certificate: - raise ValidationException('Certificate not found', 404, slug='certificate-not-found') + raise ValidationException("Certificate not found", 404, slug="certificate-not-found") item = SyllabusScheduleTimeSlot.objects.filter(schedule__id=certificate_id, id=timeslot_id).first() if not item: - raise ValidationException('Time slot not found', 404, slug='time-slot-not-found') + raise ValidationException("Time slot not found", 404, slug="time-slot-not-found") - timezone = Academy.objects.filter(id=academy_id).values_list('timezone', flat=True).first() + timezone = Academy.objects.filter(id=academy_id).values_list("timezone", flat=True).first() if not timezone: - raise ValidationException('Academy doesn\'t have a timezone assigned', slug='academy-without-timezone') + raise ValidationException("Academy doesn't have a timezone assigned", slug="academy-without-timezone") data = { **request.data, - 'id': timeslot_id, - 'academy': academy_id, - 'schedule': certificate.id, - 'timezone': timezone, + "id": timeslot_id, + "academy": academy_id, + "schedule": certificate.id, + "timezone": timezone, } - if 'starting_at' in data: - data['starting_at'] = DatetimeInteger.from_iso_string(timezone, data['starting_at']) + if "starting_at" in data: + data["starting_at"] = DatetimeInteger.from_iso_string(timezone, data["starting_at"]) - if 'ending_at' in data: - data['ending_at'] = DatetimeInteger.from_iso_string(timezone, data['ending_at']) + if "ending_at" in data: + data["ending_at"] = DatetimeInteger.from_iso_string(timezone, data["ending_at"]) serializer = SyllabusScheduleTimeSlotSerializer(item, data=data) if serializer.is_valid(): @@ -1069,14 +1085,14 @@ def put(self, request, certificate_id=None, timeslot_id=None, academy_id=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def delete(self, request, certificate_id=None, timeslot_id=None, academy_id=None): - item = SyllabusScheduleTimeSlot.objects.filter(schedule__academy__id=academy_id, - schedule__id=certificate_id, - id=timeslot_id).first() + item = SyllabusScheduleTimeSlot.objects.filter( + schedule__academy__id=academy_id, schedule__id=certificate_id, id=timeslot_id + ).first() if not item: - raise ValidationException('Time slot not found', 404, slug='time-slot-not-found') + raise ValidationException("Time slot not found", 404, slug="time-slot-not-found") item.delete() @@ -1086,9 +1102,9 @@ def delete(self, request, certificate_id=None, timeslot_id=None, academy_id=None class CohortMeView(APIView, GenerateLookupsMixin): """List all snippets, or create a new snippet.""" - extensions = APIViewExtensions(cache=CohortCache, cache_per_user=True, sort='-kickoff_date', paginate=True) + extensions = APIViewExtensions(cache=CohortCache, cache_per_user=True, sort="-kickoff_date", paginate=True) - @capable_of('read_single_cohort') + @capable_of("read_single_cohort") def get(self, request, cohort_id=None, academy_id=None): handler = self.extensions(request) @@ -1098,12 +1114,13 @@ def get(self, request, cohort_id=None, academy_id=None): if cohort_id is not None: if cohort_id.isnumeric(): - cohort_user = CohortUser.objects.filter(user=request.user, academy__id=academy_id, - cohort__id=cohort_id).first() + cohort_user = CohortUser.objects.filter( + user=request.user, academy__id=academy_id, cohort__id=cohort_id + ).first() else: - cohort_user = CohortUser.objects.filter(user=request.user, - academy__id=academy_id, - cohort__slug=cohort_id).first() + cohort_user = CohortUser.objects.filter( + user=request.user, academy__id=academy_id, cohort__slug=cohort_id + ).first() if not cohort_user or not cohort_user.cohort: return Response(status=status.HTTP_404_NOT_FOUND) @@ -1111,21 +1128,21 @@ def get(self, request, cohort_id=None, academy_id=None): serializer = GetCohortSerializer(cohort_user.cohort, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - cohorts_of_student = CohortUser.objects.filter(user=request.user).values_list('cohort__id', flat=True) + cohorts_of_student = CohortUser.objects.filter(user=request.user).values_list("cohort__id", flat=True) items = Cohort.objects.filter(academy__id=academy_id, id__in=cohorts_of_student) - upcoming = request.GET.get('upcoming', None) - if upcoming == 'true': + upcoming = request.GET.get("upcoming", None) + if upcoming == "true": now = timezone.now() items = items.filter(kickoff_date__gte=now) - stage = request.GET.get('stage', None) + stage = request.GET.get("stage", None) if stage is not None: - items = items.filter(stage__in=stage.upper().split(',')) + items = items.filter(stage__in=stage.upper().split(",")) else: - items = items.exclude(stage='DELETED') + items = items.exclude(stage="DELETED") - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(Q(name__icontains=like) | Q(slug__icontains=like)) @@ -1139,9 +1156,9 @@ class AcademyCohortView(APIView, GenerateLookupsMixin): """List all snippets, or create a new snippet.""" permission_classes = [IsAuthenticated] - extensions = APIViewExtensions(cache=CohortCache, sort='-kickoff_date', paginate=True) + extensions = APIViewExtensions(cache=CohortCache, sort="-kickoff_date", paginate=True) - @capable_of('read_all_cohort') + @capable_of("read_all_cohort") def get(self, request, cohort_id=None, academy_id=None): handler = self.extensions(request) @@ -1164,26 +1181,26 @@ def get(self, request, cohort_id=None, academy_id=None): items = Cohort.objects.filter(academy__id=academy_id) - upcoming = request.GET.get('upcoming', None) - if upcoming == 'true': + upcoming = request.GET.get("upcoming", None) + if upcoming == "true": now = timezone.now() items = items.filter(kickoff_date__gte=now) - academy = request.GET.get('academy', None) + academy = request.GET.get("academy", None) if academy is not None: - items = items.filter(academy__slug__in=academy.split(',')) + items = items.filter(academy__slug__in=academy.split(",")) - location = request.GET.get('location', None) + location = request.GET.get("location", None) if location is not None: - items = items.filter(academy__slug__in=location.split(',')) + items = items.filter(academy__slug__in=location.split(",")) - stage = request.GET.get('stage', None) + stage = request.GET.get("stage", None) if stage is not None: - items = items.filter(stage__in=stage.upper().split(',')) + items = items.filter(stage__in=stage.upper().split(",")) else: - items = items.exclude(stage='DELETED') + items = items.exclude(stage="DELETED") - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(Q(name__icontains=like) | Q(slug__icontains=like)) @@ -1192,81 +1209,83 @@ def get(self, request, cohort_id=None, academy_id=None): return handler.response(serializer.data) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def post(self, request, academy_id=None): - if request.data.get('academy') or request.data.get('academy_id'): - raise ParseError(detail='academy and academy_id field is not allowed', slug='academy-in-body') + if request.data.get("academy") or request.data.get("academy_id"): + raise ParseError(detail="academy and academy_id field is not allowed", slug="academy-in-body") academy = Academy.objects.filter(id=academy_id).first() if academy is None: - raise ValidationException(f'Academy {academy_id} not found', slug='academy-not-found') + raise ValidationException(f"Academy {academy_id} not found", slug="academy-not-found") - syllabus = request.data.get('syllabus') + syllabus = request.data.get("syllabus") if syllabus is None: - raise ValidationException('syllabus field is missing', slug='missing-syllabus-field') + raise ValidationException("syllabus field is missing", slug="missing-syllabus-field") # schedule = request.data.get('schedule') # if schedule is None: # raise ValidationException('specialty mode field is missing', slug='specialty-mode-field') - if request.data.get('current_day'): - raise ValidationException('current_day field is not allowed', slug='current-day-not-allowed') + if request.data.get("current_day"): + raise ValidationException("current_day field is not allowed", slug="current-day-not-allowed") data = { - 'academy': academy, - 'current_day': 0, + "academy": academy, + "current_day": 0, } for key in request.data: data[key] = request.data.get(key) - if 'timezone' not in data: - data['timezone'] = academy.timezone + if "timezone" not in data: + data["timezone"] = academy.timezone - if 'syllabus_version' in data: - del data['syllabus_version'] + if "syllabus_version" in data: + del data["syllabus_version"] - serializer = CohortSerializer(data=data, context={'request': request, 'academy': academy}) + serializer = CohortSerializer(data=data, context={"request": request, "academy": academy}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def put(self, request, cohort_id=None, academy_id=None): - if request.data.get('academy') or request.data.get('academy_id'): - raise ParseError(detail='academy and academy_id field is not allowed') + if request.data.get("academy") or request.data.get("academy_id"): + raise ParseError(detail="academy and academy_id field is not allowed") academy = Academy.objects.filter(id=academy_id).first() if academy is None: - raise ValidationError(f'Academy {academy_id} not found') + raise ValidationError(f"Academy {academy_id} not found") if cohort_id is None: - raise ValidationException('Missing cohort_id', code=400) + raise ValidationException("Missing cohort_id", code=400) cohort = Cohort.objects.filter(id=cohort_id, academy__id=academy_id) # only from this academy cohort = localize_query(cohort, request).first() if cohort is None: - logger.debug('Cohort not be found in related academies') - raise ValidationException('Specified cohort not be found') + logger.debug("Cohort not be found in related academies") + raise ValidationException("Specified cohort not be found") data = {} for key in request.data: data[key] = request.data.get(key) - if 'syllabus_version' in data: - del data['syllabus_version'] + if "syllabus_version" in data: + del data["syllabus_version"] - serializer = CohortPUTSerializer(cohort, - data=data, - context={ - 'request': request, - 'cohort_id': cohort_id, - 'academy': academy, - }) + serializer = CohortPUTSerializer( + cohort, + data=data, + context={ + "request": request, + "cohort_id": cohort_id, + "academy": academy, + }, + ) if serializer.is_valid(): serializer.save() @@ -1274,14 +1293,14 @@ def put(self, request, cohort_id=None, academy_id=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_cohort') + @capable_of("crud_cohort") def delete(self, request, cohort_id=None, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups and cohort_id: raise ValidationException( - 'cohort_id was provided in url ' - 'in bulk mode request, use querystring style instead', code=400) + "cohort_id was provided in url " "in bulk mode request, use querystring style instead", code=400 + ) if lookups: items = Cohort.objects.filter(**lookups, academy__id=academy_id) @@ -1290,8 +1309,9 @@ def delete(self, request, cohort_id=None, academy_id=None): item_users = CohortUser.objects.filter(role=STUDENT, cohort__id=item.id) if item_users.count() > 0: - raise ValidationException('Please remove all students before trying to delete cohort', - slug='cohort-has-students') + raise ValidationException( + "Please remove all students before trying to delete cohort", slug="cohort-has-students" + ) for item in items: item.stage = DELETED @@ -1300,7 +1320,7 @@ def delete(self, request, cohort_id=None, academy_id=None): return Response(None, status=status.HTTP_204_NO_CONTENT) if cohort_id is None: - raise ValidationException('Missing cohort_id', code=400) + raise ValidationException("Missing cohort_id", code=400) try: cohort = Cohort.objects.get(id=cohort_id, academy__id=academy_id) @@ -1312,8 +1332,9 @@ def delete(self, request, cohort_id=None, academy_id=None): # Check if cohort has students before deleting if cohort_users.count() > 0: - raise ValidationException('Please remove all students before trying to delete cohort', - slug='cohort-has-students') + raise ValidationException( + "Please remove all students before trying to delete cohort", slug="cohort-has-students" + ) cohort.stage = DELETED cohort.save() @@ -1329,21 +1350,21 @@ def get(self, request): items = SyllabusSchedule.objects.filter(academy__isnull=False) - syllabus_id = request.GET.get('syllabus_id') + syllabus_id = request.GET.get("syllabus_id") if syllabus_id: - items = items.filter(syllabus__id__in=syllabus_id.split(',')) + items = items.filter(syllabus__id__in=syllabus_id.split(",")) - syllabus_slug = request.GET.get('syllabus_slug') + syllabus_slug = request.GET.get("syllabus_slug") if syllabus_slug: - items = items.filter(syllabus__slug__in=syllabus_slug.split(',')) + items = items.filter(syllabus__slug__in=syllabus_slug.split(",")) - academy_id = request.GET.get('academy_id') + academy_id = request.GET.get("academy_id") if academy_id: - items = items.filter(academy__id__in=academy_id.split(',')) + items = items.filter(academy__id__in=academy_id.split(",")) - academy_slug = request.GET.get('academy_slug') + academy_slug = request.GET.get("academy_slug") if academy_slug: - items = items.filter(academy__slug__in=academy_slug.split(',')) + items = items.filter(academy__slug__in=academy_slug.split(",")) items = handler.queryset(items) serializer = GetSyllabusScheduleSerializer(items, many=True) @@ -1353,21 +1374,21 @@ def get(self, request): class AcademySyllabusScheduleView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): - @capable_of('read_certificate') + @capable_of("read_certificate") def get(self, request, academy_id=None): items = SyllabusSchedule.objects.filter(academy__id=academy_id) - syllabus_id = request.GET.get('syllabus_id') + syllabus_id = request.GET.get("syllabus_id") if syllabus_id: - items = items.filter(syllabus__id__in=syllabus_id.split(',')) + items = items.filter(syllabus__id__in=syllabus_id.split(",")) - syllabus_slug = request.GET.get('syllabus_slug') + syllabus_slug = request.GET.get("syllabus_slug") if syllabus_slug: - items = items.filter(syllabus__slug__in=syllabus_slug.split(',')) + items = items.filter(syllabus__slug__in=syllabus_slug.split(",")) - schedule_type = request.GET.get('schedule_type') + schedule_type = request.GET.get("schedule_type") if schedule_type: - items = items.filter(schedule_type__in=schedule_type.upper().split(',')) + items = items.filter(schedule_type__in=schedule_type.upper().split(",")) page = self.paginate_queryset(items, request) serializer = GetSyllabusScheduleSerializer(page, many=True) @@ -1377,21 +1398,21 @@ def get(self, request, academy_id=None): else: return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def post(self, request, academy_id=None): - if 'syllabus' not in request.data: - raise ValidationException('Missing syllabus in the request', slug='missing-syllabus-in-request') + if "syllabus" not in request.data: + raise ValidationException("Missing syllabus in the request", slug="missing-syllabus-in-request") - syllabus = Syllabus.objects.filter(id=request.data['syllabus']).exists() + syllabus = Syllabus.objects.filter(id=request.data["syllabus"]).exists() if not syllabus: - raise ValidationException('Syllabus not found', code=404, slug='syllabus-not-found') + raise ValidationException("Syllabus not found", code=404, slug="syllabus-not-found") - if 'academy' not in request.data: - raise ValidationException('Missing academy in the request', slug='missing-academy-in-request') + if "academy" not in request.data: + raise ValidationException("Missing academy in the request", slug="missing-academy-in-request") - academy = Academy.objects.filter(id=request.data['academy']).exists() + academy = Academy.objects.filter(id=request.data["academy"]).exists() if not academy: - raise ValidationException('Academy not found', code=404, slug='academy-not-found') + raise ValidationException("Academy not found", code=404, slug="academy-not-found") serializer = SyllabusScheduleSerializer(data=request.data) @@ -1400,22 +1421,25 @@ def post(self, request, academy_id=None): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def put(self, request, certificate_id=None, academy_id=None): schedule = SyllabusSchedule.objects.filter(id=certificate_id).first() if not schedule: - raise ValidationException('Schedule not found', code=404, slug='specialty-mode-not-found') + raise ValidationException("Schedule not found", code=404, slug="specialty-mode-not-found") if schedule.academy.id != int(academy_id): - raise ValidationException('You can\'t edit a schedule of other academy', - code=404, - slug='syllabus-schedule-of-other-academy') + raise ValidationException( + "You can't edit a schedule of other academy", code=404, slug="syllabus-schedule-of-other-academy" + ) - if 'syllabus' in request.data and not Syllabus.objects.filter( + if ( + "syllabus" in request.data + and not Syllabus.objects.filter( Q(academy_owner__id=academy_id) | Q(private=False), - id=request.data['syllabus'], - ).exists(): - raise ValidationException('Syllabus not found', code=404, slug='syllabus-not-found') + id=request.data["syllabus"], + ).exists() + ): + raise ValidationException("Syllabus not found", code=404, slug="syllabus-not-found") serializer = SyllabusSchedulePUTSerializer(schedule, data=request.data) @@ -1424,15 +1448,15 @@ def put(self, request, certificate_id=None, academy_id=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def delete(self, request, academy_id=None): # TODO: here i don't add one single delete, because i don't know if it is required - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if not lookups: - raise ValidationException('Missing parameters in the querystring', code=400) + raise ValidationException("Missing parameters in the querystring", code=400) - ids = SyllabusSchedule.objects.filter(academy__id=academy_id).values_list('id', flat=True) + ids = SyllabusSchedule.objects.filter(academy__id=academy_id).values_list("id", flat=True) items = SyllabusSchedule.objects.filter(**lookups).filter(id__in=ids) @@ -1442,11 +1466,11 @@ def delete(self, request, academy_id=None): return Response(None, status=status.HTTP_204_NO_CONTENT) -@api_view(['GET']) +@api_view(["GET"]) def get_schedule(request, schedule_id): certificates = SyllabusSchedule.objects.filter(id=schedule_id).first() if certificates is None: - raise ValidationException('Schedule not found', slug='schedule-not-found', code=404) + raise ValidationException("Schedule not found", slug="schedule-not-found", code=404) serializer = GetSyllabusScheduleSerializer(certificates, many=False) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1456,7 +1480,7 @@ class SyllabusView(APIView): extensions = APIViewExtensions(paginate=True) - @capable_of('read_syllabus') + @capable_of("read_syllabus") def get(self, request, syllabus_id=None, syllabus_slug=None, academy_id=None): handler = self.extensions(request) @@ -1467,7 +1491,7 @@ def get(self, request, syllabus_id=None, syllabus_slug=None, academy_id=None): ).first() if not syllabus: - raise ValidationException('Syllabus not found', code=404, slug='syllabus-not-found') + raise ValidationException("Syllabus not found", code=404, slug="syllabus-not-found") serializer = GetSyllabusSerializer(syllabus, many=False) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1479,15 +1503,16 @@ def get(self, request, syllabus_id=None, syllabus_slug=None, academy_id=None): ).first() if not syllabus: - raise ValidationException('Syllabus not found', code=404, slug='syllabus-not-found') + raise ValidationException("Syllabus not found", code=404, slug="syllabus-not-found") serializer = GetSyllabusSerializer(syllabus, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - items = Syllabus.objects.filter(Q(academy_owner__id=academy_id) - | Q(private=False)).exclude(academy_owner__isnull=True) + items = Syllabus.objects.filter(Q(academy_owner__id=academy_id) | Q(private=False)).exclude( + academy_owner__isnull=True + ) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(Q(name__icontains=like) | Q(slug__icontains=like)) @@ -1496,17 +1521,17 @@ def get(self, request, syllabus_id=None, syllabus_slug=None, academy_id=None): return handler.response(serializer.data) - @capable_of('crud_syllabus') + @capable_of("crud_syllabus") def post(self, request, academy_id=None): - if not request.data.get('slug'): - raise ValidationException('Missing slug in request', slug='missing-slug') + if not request.data.get("slug"): + raise ValidationException("Missing slug in request", slug="missing-slug") - if not request.data.get('name'): - raise ValidationException('Missing name in request', slug='missing-name') + if not request.data.get("name"): + raise ValidationException("Missing name in request", slug="missing-name") data = { **request.data, - 'academy_owner': academy_id, + "academy_owner": academy_id, } serializer = SyllabusSerializer(data=data) @@ -1516,13 +1541,13 @@ def post(self, request, academy_id=None): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_syllabus') + @capable_of("crud_syllabus") def put(self, request, syllabus_id=None, syllabus_slug=None, academy_id=None): - if 'slug' in request.data and not request.data['slug']: - raise ValidationException('slug can\'t be empty', slug='empty-slug') + if "slug" in request.data and not request.data["slug"]: + raise ValidationException("slug can't be empty", slug="empty-slug") - if 'name' in request.data and not request.data['name']: - raise ValidationException('name can\'t be empty', slug='empty-name') + if "name" in request.data and not request.data["name"]: + raise ValidationException("name can't be empty", slug="empty-name") syllabus = Syllabus.objects.filter( Q(id=syllabus_id) | Q(slug=syllabus_slug), @@ -1530,11 +1555,11 @@ def put(self, request, syllabus_id=None, syllabus_slug=None, academy_id=None): ).first() data = { **request.data, - 'academy_owner': academy_id, + "academy_owner": academy_id, } if not syllabus: - raise ValidationException('Syllabus details not found', code=404, slug='syllabus-not-found') + raise ValidationException("Syllabus details not found", code=404, slug="syllabus-not-found") serializer = SyllabusSerializer(syllabus, data=data, many=False) if serializer.is_valid(): @@ -1548,32 +1573,31 @@ class SyllabusAssetView(APIView, HeaderLimitOffsetPagination): # TODO: @has_permission('superadmin') def get(self, request, asset_slug=None): - if asset_slug is None or asset_slug == '': - raise ValidationException('Please specify the asset slug you want to search', slug='invalid-asset-slug') + if asset_slug is None or asset_slug == "": + raise ValidationException("Please specify the asset slug you want to search", slug="invalid-asset-slug") - findings = find_asset_on_json(asset_slug=asset_slug, asset_type=request.GET.get('asset_type', None)) + findings = find_asset_on_json(asset_slug=asset_slug, asset_type=request.GET.get("asset_type", None)) return Response(findings, status=status.HTTP_200_OK) # TODO: @has_permission('superadmin') def put(self, request, asset_slug=None): - if asset_slug is None or asset_slug == '': - raise ValidationException('Please specify the asset slug you want to replace', slug='invalid-asset-slug') + if asset_slug is None or asset_slug == "": + raise ValidationException("Please specify the asset slug you want to replace", slug="invalid-asset-slug") asset = request.data - if 'slug' not in asset or asset['slug'] == '': - raise ValidationException('Missing or invalid slug', slug='invalid-asset-slug') - if 'type' not in asset or asset['type'] == '': - raise ValidationException('Missing or invalid asset type', slug='invalid-asset-type') + if "slug" not in asset or asset["slug"] == "": + raise ValidationException("Missing or invalid slug", slug="invalid-asset-slug") + if "type" not in asset or asset["type"] == "": + raise ValidationException("Missing or invalid asset type", slug="invalid-asset-type") simulate = True - if 'simulate' in asset and asset['simulate'] == False: + if "simulate" in asset and asset["simulate"] == False: simulate = False - findings = update_asset_on_json(from_slug=asset_slug, - to_slug=asset['slug'], - asset_type=asset['type'], - simulate=simulate) + findings = update_asset_on_json( + from_slug=asset_slug, to_slug=asset["slug"], asset_type=asset["type"], simulate=simulate + ) return Response(findings, status=status.HTTP_200_OK) @@ -1581,7 +1605,7 @@ def put(self, request, asset_slug=None): class SyllabusVersionView(APIView): extensions = APIViewExtensions(cache=SyllabusVersionCache, paginate=True) - @capable_of('read_syllabus') + @capable_of("read_syllabus") def get(self, request, syllabus_id=None, syllabus_slug=None, version=None, academy_id=None): handler = self.extensions(request) @@ -1590,17 +1614,22 @@ def get(self, request, syllabus_id=None, syllabus_slug=None, version=None, acade return cache if academy_id is None: - raise ValidationException('Missing academy id', slug='missing-academy-id') + raise ValidationException("Missing academy id", slug="missing-academy-id") if version is not None: syllabus_version = None - if version == 'latest': - syllabus_version = SyllabusVersion.objects.filter( - Q(syllabus__id=syllabus_id) | Q(syllabus__slug=syllabus_slug), - Q(syllabus__academy_owner__id=academy_id) | Q(syllabus__private=False), - ).filter(status='PUBLISHED').order_by('-version').first() - - if syllabus_version is None and version is not None and version != 'latest': + if version == "latest": + syllabus_version = ( + SyllabusVersion.objects.filter( + Q(syllabus__id=syllabus_id) | Q(syllabus__slug=syllabus_slug), + Q(syllabus__academy_owner__id=academy_id) | Q(syllabus__private=False), + ) + .filter(status="PUBLISHED") + .order_by("-version") + .first() + ) + + if syllabus_version is None and version is not None and version != "latest": syllabus_version = SyllabusVersion.objects.filter( Q(syllabus__id=syllabus_id) | Q(syllabus__slug=syllabus_slug), Q(syllabus__academy_owner__id=academy_id) | Q(syllabus__private=False), @@ -1608,9 +1637,9 @@ def get(self, request, syllabus_id=None, syllabus_slug=None, version=None, acade ).first() if syllabus_version is None: - raise ValidationException(f'Syllabus version "{version}" not found or is a draft', - code=404, - slug='syllabus-version-not-found') + raise ValidationException( + f'Syllabus version "{version}" not found or is a draft', code=404, slug="syllabus-version-not-found" + ) serializer = GetSyllabusVersionSerializer(syllabus_version, many=False) return handler.response(serializer.data) @@ -1618,61 +1647,60 @@ def get(self, request, syllabus_id=None, syllabus_slug=None, version=None, acade items = SyllabusVersion.objects.filter( Q(syllabus__id=syllabus_id) | Q(syllabus__slug=syllabus_slug), Q(syllabus__academy_owner__id=academy_id) | Q(syllabus__private=False), - ).order_by('version') + ).order_by("version") - _status = request.GET.get('status', None) + _status = request.GET.get("status", None) if _status is not None: - items = items.filter(status__in=_status.upper().split(',')) + items = items.filter(status__in=_status.upper().split(",")) items = handler.queryset(items) serializer = GetSyllabusVersionSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_syllabus') + @capable_of("crud_syllabus") def post(self, request, syllabus_id=None, syllabus_slug=None, academy_id=None): syllabus = None if syllabus_id or syllabus_slug: - syllabus = Syllabus.objects.filter(Q(id=syllabus_id) - | Q(slug=syllabus_slug, slug__isnull=False)).filter( - academy_owner__id=academy_id).first() + syllabus = ( + Syllabus.objects.filter(Q(id=syllabus_id) | Q(slug=syllabus_slug, slug__isnull=False)) + .filter(academy_owner__id=academy_id) + .first() + ) if not syllabus: - raise ValidationException('Syllabus not found for this academy', code=404, slug='syllabus-not-found') + raise ValidationException("Syllabus not found for this academy", code=404, slug="syllabus-not-found") - if not syllabus and 'syllabus' not in request.data: - raise ValidationException('Missing syllabus in the request', slug='missing-syllabus-in-request') + if not syllabus and "syllabus" not in request.data: + raise ValidationException("Missing syllabus in the request", slug="missing-syllabus-in-request") if not syllabus: - syllabus = Syllabus.objects.filter(id=request.data['syllabus']).first() + syllabus = Syllabus.objects.filter(id=request.data["syllabus"]).first() if not syllabus: - raise ValidationException('Syllabus not found for this academy', code=404, slug='syllabus-not-found') + raise ValidationException("Syllabus not found for this academy", code=404, slug="syllabus-not-found") academy = Academy.objects.filter(id=academy_id).first() if academy is None: - raise ValidationException(f'Invalid academy {str(academy_id)}') + raise ValidationException(f"Invalid academy {str(academy_id)}") if syllabus: - request.data['syllabus'] = syllabus.id + request.data["syllabus"] = syllabus.id - serializer = SyllabusVersionSerializer(data=request.data, - context={ - 'request': request, - 'academy': academy, - 'syllabus': syllabus - }) + serializer = SyllabusVersionSerializer( + data=request.data, context={"request": request, "academy": academy, "syllabus": syllabus} + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_syllabus') + @capable_of("crud_syllabus") def put(self, request, syllabus_id=None, syllabus_slug=None, version=None, academy_id=None): if not version: - raise ValidationException('Missing syllabus version', code=400) + raise ValidationException("Missing syllabus version", code=400) elif not version.isnumeric(): raise ValidationException(f'Syllabus version must be a number, received "{version}"', code=400) @@ -1683,14 +1711,13 @@ def put(self, request, syllabus_id=None, syllabus_slug=None, version=None, acade ).first() if not syllabus_version: - raise ValidationException('Syllabus version not found for this academy', - code=400, - slug='syllabus-not-found') - - serializer = SyllabusVersionPutSerializer(syllabus_version, - data=request.data, - many=False, - context={'request': request}) + raise ValidationException( + "Syllabus version not found for this academy", code=400, slug="syllabus-not-found" + ) + + serializer = SyllabusVersionPutSerializer( + syllabus_version, data=request.data, many=False, context={"request": request} + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -1707,24 +1734,24 @@ def get(self, request): items = SyllabusVersion.objects.all() lookup = {} - if 'version' in self.request.GET: - param = self.request.GET.get('version') - lookup['version'] = param + if "version" in self.request.GET: + param = self.request.GET.get("version") + lookup["version"] = param - if 'slug' in self.request.GET: - param = self.request.GET.get('slug') - lookup['syllabus__slug'] = param + if "slug" in self.request.GET: + param = self.request.GET.get("slug") + lookup["syllabus__slug"] = param - if 'academy' in self.request.GET: - param = self.request.GET.get('academy') - lookup['syllabus__academy_owner__id__in'] = [p for p in param.split(',')] + if "academy" in self.request.GET: + param = self.request.GET.get("academy") + lookup["syllabus__academy_owner__id__in"] = [p for p in param.split(",")] - if 'is_documentation' in self.request.GET: - param = self.request.GET.get('is_documentation') - if param == 'True': - lookup['syllabus__is_documentation'] = True + if "is_documentation" in self.request.GET: + param = self.request.GET.get("is_documentation") + if param == "True": + lookup["syllabus__is_documentation"] = True - items = items.filter(syllabus__private=False, **lookup).order_by('version') + items = items.filter(syllabus__private=False, **lookup).order_by("version") serializer = GetSyllabusVersionSerializer(items, many=True) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1746,29 +1773,29 @@ def get(self, request): items = CohortUser.objects.all() - cohort_id = request.GET.get('cohort_id', None) + cohort_id = request.GET.get("cohort_id", None) if cohort_id is not None: items = items.filter(cohort__id=cohort_id) - roles = request.GET.get('roles', None) + roles = request.GET.get("roles", None) if roles is not None: - items = items.filter(role__in=roles.upper().split(',')) + items = items.filter(role__in=roles.upper().split(",")) - finantial_status = request.GET.get('finantial_status', None) + finantial_status = request.GET.get("finantial_status", None) if finantial_status is not None: - items = items.filter(finantial_status__in=finantial_status.upper().split(',')) + items = items.filter(finantial_status__in=finantial_status.upper().split(",")) - educational_status = request.GET.get('educational_status', None) + educational_status = request.GET.get("educational_status", None) if educational_status is not None: - items = items.filter(educational_status__in=educational_status.upper().split(',')) + items = items.filter(educational_status__in=educational_status.upper().split(",")) - syllabus = request.GET.get('syllabus', None) + syllabus = request.GET.get("syllabus", None) if syllabus is not None: - items = items.filter(cohort__syllabus_version__syllabus__slug__in=syllabus.split(',')) + items = items.filter(cohort__syllabus_version__syllabus__slug__in=syllabus.split(",")) - users = request.GET.get('users', None) + users = request.GET.get("users", None) if users is not None: - items = items.filter(user__id__in=users.split(',')) + items = items.filter(user__id__in=users.split(",")) items = handler.queryset(items) serializer = GetPublicCohortUserSerializer(items, many=True) @@ -1778,7 +1805,7 @@ def get(self, request): class AcademyCohortHistoryView(APIView): """List all snippets, or create a new snippet.""" - @capable_of('read_cohort_log') + @capable_of("read_cohort_log") def get(self, request, cohort_id, academy_id): item = None @@ -1788,11 +1815,11 @@ def get(self, request, cohort_id, academy_id): item = Cohort.objects.filter(slug=cohort_id, academy__id=academy_id).first() if item is None: - raise ValidationException('Cohort not found on this academy', code=404, slug='cohort-not-found') + raise ValidationException("Cohort not found on this academy", code=404, slug="cohort-not-found") return Response(CohortLog(item).serialize()) - @capable_of('crud_cohort_log') + @capable_of("crud_cohort_log") def put(self, request, cohort_id, academy_id): item = None @@ -1802,26 +1829,26 @@ def put(self, request, cohort_id, academy_id): item = Cohort.objects.filter(slug=cohort_id, academy__id=academy_id).first() if item is None: - raise ValidationException('Cohort not found on this academy', code=404, slug='cohort-not-found') + raise ValidationException("Cohort not found on this academy", code=404, slug="cohort-not-found") day = None try: payload = {**request.data} - if 'day' in payload: - day = payload['day'] - del payload['day'] + if "day" in payload: + day = payload["day"] + del payload["day"] cohort_log = CohortLog(item) cohort_log.log_day(payload, day) cohort_log.save() except Exception as e: if day is None: - day = 'current day' + day = "current day" else: - day = 'day ' + str(day) + day = "day " + str(day) - logger.exception(f'Error logging {day} into the cohort') + logger.exception(f"Error logging {day} into the cohort") raise ValidationException(str(e)) return Response(cohort_log.serialize()) @@ -1835,25 +1862,30 @@ def get(self, request, cohort_id=None): if cohort_id: cohort_user = CohortUser.objects.filter(user=request.user, cohort__id=cohort_id).first() if not cohort_user: - raise ValidationException('Cohort user not found', code=404, slug='cohort-user-not-found') - - return Response({ - 'cohort': { - 'id': cohort_user.cohort.id, - 'slug': cohort_user.cohort.slug, - }, - 'history_log': cohort_user.history_log, - }) + raise ValidationException("Cohort user not found", code=404, slug="cohort-user-not-found") + + return Response( + { + "cohort": { + "id": cohort_user.cohort.id, + "slug": cohort_user.cohort.slug, + }, + "history_log": cohort_user.history_log, + } + ) items = CohortUser.objects.filter(user=request.user) - data = [{ - 'cohort': { - 'id': cohort_user.cohort.id, - 'slug': cohort_user.cohort.slug, - }, - 'history_log': cohort_user.history_log, - } for cohort_user in items] + data = [ + { + "cohort": { + "id": cohort_user.cohort.id, + "slug": cohort_user.cohort.slug, + }, + "history_log": cohort_user.history_log, + } + for cohort_user in items + ] return Response(data) @@ -1867,64 +1899,78 @@ def post(self, request, cohort_id=None): lang = get_user_language(request) - cohort = Cohort.objects.filter(Q(ending_date=None, never_ends=True) - | Q(ending_date__gte=timezone.now(), never_ends=False), - id=cohort_id).first() + cohort = Cohort.objects.filter( + Q(ending_date=None, never_ends=True) | Q(ending_date__gte=timezone.now(), never_ends=False), id=cohort_id + ).first() if not cohort: - raise ValidationException(translation(lang, - en='Cohort not found', - es='Cohorte no encontrada', - slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Cohort not found", es="Cohorte no encontrada", slug="not-found"), code=404 + ) if CohortUser.objects.filter(cohort__id=cohort_id, user__id=request.user.id).count(): - raise ValidationException(translation(lang, - en='You are already part of this cohort', - es='Ya eres parte de esta cohorte', - slug='already-joined-to-cohort'), - code=400) + raise ValidationException( + translation( + lang, + en="You are already part of this cohort", + es="Ya eres parte de esta cohorte", + slug="already-joined-to-cohort", + ), + code=400, + ) resource_available_now = Q(valid_until=None) | Q(valid_until__gte=timezone.now()) resource_belongs_to_user = Q(user__id=request.user.id, selected_cohort_set__cohorts=cohort) - excludes = Q(status='ERROR') | Q(status='CANCELED') | Q(status='PAYMENT_ISSUE') | Q(status='EXPIRED') + excludes = Q(status="ERROR") | Q(status="CANCELED") | Q(status="PAYMENT_ISSUE") | Q(status="EXPIRED") # it inherits from the an AbstractIOweYou, so you can apply polymorphism - resource = Subscription.objects.filter(resource_available_now, - resource_belongs_to_user).exclude(excludes).first() + resource = ( + Subscription.objects.filter(resource_available_now, resource_belongs_to_user).exclude(excludes).first() + ) if not resource: resource_available_now = Q(plan_expires_at__gte=timezone.now()) - resource = PlanFinancing.objects.filter(resource_available_now, - resource_belongs_to_user).exclude(excludes).first() + resource = ( + PlanFinancing.objects.filter(resource_available_now, resource_belongs_to_user).exclude(excludes).first() + ) if not resource: - raise ValidationException(translation( - lang, - en='Your current subscription does not include access to this cohort', - es='Tus subscripciones actuales no incluyen poder acceder a esta cohort', - slug='not-subscribed'), - code=400) + raise ValidationException( + translation( + lang, + en="Your current subscription does not include access to this cohort", + es="Tus subscripciones actuales no incluyen poder acceder a esta cohort", + slug="not-subscribed", + ), + code=400, + ) if resource.joined_cohorts.filter(id=cohort.id).count(): - raise ValidationException(translation(lang, - en='You have already joined to this cohort', - es='Ya te has unido a esta cohorte', - slug='already-joined'), - code=400) + raise ValidationException( + translation( + lang, + en="You have already joined to this cohort", + es="Ya te has unido a esta cohorte", + slug="already-joined", + ), + code=400, + ) if cohort.never_ends == False and resource.joined_cohorts.filter(never_ends=False).count() > 0: - raise ValidationException(translation( - lang, - en='You can\'t join to this cohort because you are already subscribed to another cohort', - es='No puedes unirte a esta cohorte porque ya estás suscrito a otra cohorte', - slug='already-joined-to-another-cohort'), - code=400) + raise ValidationException( + translation( + lang, + en="You can't join to this cohort because you are already subscribed to another cohort", + es="No puedes unirte a esta cohorte porque ya estás suscrito a otra cohorte", + slug="already-joined-to-another-cohort", + ), + code=400, + ) resource.joined_cohorts.add(cohort) - tasks.build_cohort_user.delay(cohort_id, request.user.id, 'STUDENT') - tasks.build_profile_academy.delay(cohort.academy.id, request.user.id, 'student') + tasks.build_cohort_user.delay(cohort_id, request.user.id, "STUDENT") + tasks.build_profile_academy.delay(cohort.academy.id, request.user.id, "student") serializer = GetAbstractIOweYouSerializer(resource, many=False) diff --git a/breathecode/asgi.py b/breathecode/asgi.py index ea2837143..b9bc862b6 100644 --- a/breathecode/asgi.py +++ b/breathecode/asgi.py @@ -12,6 +12,6 @@ from django.core.asgi import get_asgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'breathecode.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "breathecode.settings") application = get_asgi_application() diff --git a/breathecode/assessment/actions.py b/breathecode/assessment/actions.py index 59c680607..cdcb971dd 100644 --- a/breathecode/assessment/actions.py +++ b/breathecode/assessment/actions.py @@ -11,78 +11,95 @@ def validate_quiz_json(quiz, allow_override=False): - if 'info' not in quiz: - raise ValidationException('Quiz is missing info json property') + if "info" not in quiz: + raise ValidationException("Quiz is missing info json property") - if 'slug' not in quiz['info']: - raise ValidationException('Missing info.slug on quiz info') + if "slug" not in quiz["info"]: + raise ValidationException("Missing info.slug on quiz info") - _result = {'questions': []} + _result = {"questions": []} # We guarantee that "assessment" property will always be set to something (none or else) - _result['assessment'] = Assessment.objects.filter(slug=quiz['info']['slug']).first() - if not allow_override and _result['assessment']: + _result["assessment"] = Assessment.objects.filter(slug=quiz["info"]["slug"]).first() + if not allow_override and _result["assessment"]: raise ValidationException( f"There is already an assessment (maybe it's archived) with slug {quiz['info']['slug']}, rename your quiz info.slug or delete the previous assessment" ) - if 'id' in quiz: _result['id'] = quiz['id'] - elif 'id' in quiz['info']: _result['id'] = quiz['info']['id'] + if "id" in quiz: + _result["id"] = quiz["id"] + elif "id" in quiz["info"]: + _result["id"] = quiz["info"]["id"] - if 'questions' not in quiz: + if "questions" not in quiz: raise Exception('Missing "questions" property in quiz') - title = 'Untitled assessment' - if 'name' in quiz['info']: title = quiz['info']['name'] - if 'title' in quiz['info']: title = quiz['info']['title'] + title = "Untitled assessment" + if "name" in quiz["info"]: + title = quiz["info"]["name"] + if "title" in quiz["info"]: + title = quiz["info"]["title"] - _result['info'] = { - 'title': title, - 'slug': quiz['info']['slug'], + _result["info"] = { + "title": title, + "slug": quiz["info"]["slug"], } _index = 0 - for question in quiz['questions']: + for question in quiz["questions"]: _index += 1 - _question = {'id': question['id'] if 'id' in question else None} + _question = {"id": question["id"] if "id" in question else None} - title = '' - if 'q' in question: title = question['q'] - elif 'title' in question: title = question['title'] - else: raise Exception(f'Missing "title" property in quiz question #{_index}') + title = "" + if "q" in question: + title = question["q"] + elif "title" in question: + title = question["title"] + else: + raise Exception(f'Missing "title" property in quiz question #{_index}') - _question['title'] = title + _question["title"] = title options = [] - if 'a' in question: options = question['a'] - elif 'answers' in question: options = question['answers'] - elif 'options' in question: options = question['options'] - else: raise Exception('Missing "options" property in quiz question') + if "a" in question: + options = question["a"] + elif "answers" in question: + options = question["answers"] + elif "options" in question: + options = question["options"] + else: + raise Exception('Missing "options" property in quiz question') - _question['options'] = [] + _question["options"] = [] o_index = 0 for option in options: o_index += 1 _id = None - if 'id' in option: - _id = option['id'] + if "id" in option: + _id = option["id"] - title = 'Untitled option' - if 'option' in option: title = option['option'] - elif 'title' in option: title = option['title'] - else: raise Exception(f'Missing "title" property in option {str(o_index)}') + title = "Untitled option" + if "option" in option: + title = option["option"] + elif "title" in option: + title = option["title"] + else: + raise Exception(f'Missing "title" property in option {str(o_index)}') score = 0 - if 'correct' in option: score = option['correct'] - elif 'score' in option: score = option['score'] - else: raise Exception(f'Missing "score" property in option {str(o_index)}') + if "correct" in option: + score = option["correct"] + elif "score" in option: + score = option["score"] + else: + raise Exception(f'Missing "score" property in option {str(o_index)}') - _question['options'].append({'id': _id, 'title': title, 'score': int(score)}) + _question["options"].append({"id": _id, "title": title, "score": int(score)}) - _result['questions'].append(_question) + _result["questions"].append(_question) return _result @@ -90,34 +107,38 @@ def validate_quiz_json(quiz, allow_override=False): def create_from_asset(asset, allow_override=False): if asset.academy is None: - raise ValidationException(f'Asset {asset.slug} has not academy associated') + raise ValidationException(f"Asset {asset.slug} has not academy associated") if asset.assessment is not None and asset.assessment.asset_set.count() > 1: - associated_assets = ','.join(asset.assessment.asset_set.all()) - raise ValidationException('Assessment has more then one asset associated, please choose only one: ' + - associated_assets) + associated_assets = ",".join(asset.assessment.asset_set.all()) + raise ValidationException( + "Assessment has more then one asset associated, please choose only one: " + associated_assets + ) quiz = validate_quiz_json(asset.config, allow_override) if asset.assessment is None: - a = quiz['assessment'] + a = quiz["assessment"] if not a: - a = Assessment.objects.create(title=quiz['info']['title'], - lang=asset.lang, - slug=quiz['info']['slug'], - academy=asset.academy, - author=asset.author) + a = Assessment.objects.create( + title=quiz["info"]["title"], + lang=asset.lang, + slug=quiz["info"]["slug"], + academy=asset.academy, + author=asset.author, + ) if a is not None and a.question_set is not None and a.question_set.count() > 0: raise ValidationException( - 'Assessment already has questions, only empty assessments can by created from an asset') + "Assessment already has questions, only empty assessments can by created from an asset" + ) a.save() - for question in quiz['questions']: + for question in quiz["questions"]: q = None - if question['id']: - q = Question.filter(id=question['id']).first() + if question["id"]: + q = Question.filter(id=question["id"]).first() if not q: raise ValidationException(f"Question with id {question['id']} not found for quiz {q.id}") @@ -125,24 +146,24 @@ def create_from_asset(asset, allow_override=False): q = Question( lang=asset.lang, assessment=a, - question_type='SELECT', + question_type="SELECT", ) - q.title = question['title'] + q.title = question["title"] q.save() - for option in question['options']: + for option in question["options"]: o = None - if option['id']: - o = Option.filter(id=option['id']).first() + if option["id"]: + o = Option.filter(id=option["id"]).first() if not o: raise ValidationException(f"Option with id {option['id']} not found for question {q.id}") if not o: o = Option(question=q) - o.title = option['title'] - o.score = option['score'] + o.title = option["title"] + o.score = option["score"] o.save() asset.assessment = a @@ -155,14 +176,14 @@ def send_assestment(user_assessment): token, created = Token.get_or_create(user_assessment.user, hours_length=48) data = { - 'SUBJECT': user_assessment.assessment.title, - 'LINK': f'https://assessment.4geeks.com/{user_assessment.id}?token={token.key}' + "SUBJECT": user_assessment.assessment.title, + "LINK": f"https://assessment.4geeks.com/{user_assessment.id}?token={token.key}", } - send_email_message('assessment', user_assessment.user.email, data) + send_email_message("assessment", user_assessment.user.email, data) - logger.info(f'Assessment was sent for user: {str(user_assessment.user.id)}') + logger.info(f"Assessment was sent for user: {str(user_assessment.user.id)}") - user_assessment.status = 'SENT' + user_assessment.status = "SENT" user_assessment.save() return True diff --git a/breathecode/assessment/admin.py b/breathecode/assessment/admin.py index dabd290e8..34e93a682 100644 --- a/breathecode/assessment/admin.py +++ b/breathecode/assessment/admin.py @@ -22,13 +22,13 @@ logger = logging.getLogger(__name__) -@admin.display(description='Send General Assessment') +@admin.display(description="Send General Assessment") def send_bulk_assesment(modeladmin, request, queryset): user = queryset.all() try: for u in user: send_assestment(u) - messages.success(request, message='Assessment was successfully sent') + messages.success(request, message="Assessment was successfully sent") except Exception as e: logger.fatal(str(e)) messages.error(request, message=str(e)) @@ -36,15 +36,15 @@ def send_bulk_assesment(modeladmin, request, queryset): @admin.register(UserProxy) class UserAdmin(UserAdmin): - list_display = ('username', 'email', 'first_name', 'last_name') + list_display = ("username", "email", "first_name", "last_name") # Register your models here. @admin.register(Assessment) class AssessmentAdmin(admin.ModelAdmin): - search_fields = ['title', 'slug', 'academy__slug'] - list_display = ('slug', 'lang', 'title', 'academy', 'created_at') - list_filter = ['private', 'academy__slug'] + search_fields = ["title", "slug", "academy__slug"] + list_display = ("slug", "lang", "title", "academy", "created_at") + list_filter = ["private", "academy__slug"] # def entity(self, object): # return f"{object.entity_slug} (id:{str(object.entity_id)})" @@ -52,75 +52,77 @@ class AssessmentAdmin(admin.ModelAdmin): # Register your models here. @admin.register(Question) class QuestionAdmin(admin.ModelAdmin): - search_fields = ['title', 'assessment__title'] - list_display = ['title', 'is_deleted', 'position', 'lang', 'assessment', 'question_type'] - list_filter = ['lang', 'question_type', 'is_deleted'] + search_fields = ["title", "assessment__title"] + list_display = ["title", "is_deleted", "position", "lang", "assessment", "question_type"] + list_filter = ["lang", "question_type", "is_deleted"] # Register your models here. @admin.register(Option) class OptionAdmin(admin.ModelAdmin): - search_fields = ['title', 'question__assessment__title'] - list_display = ['id', 'title', 'is_deleted', 'position', 'lang', 'score', 'question'] - list_filter = ['lang', 'is_deleted'] + search_fields = ["title", "question__assessment__title"] + list_display = ["id", "title", "is_deleted", "position", "lang", "score", "question"] + list_filter = ["lang", "is_deleted"] def change_status_answered(modeladmin, request, queryset): items = queryset.all() for i in items: - i.status = 'ANSWERED' + i.status = "ANSWERED" i.save() @admin.register(UserAssessment) class UserAssessmentAdmin(admin.ModelAdmin): - search_fields = ['title', 'question__assessment__title'] - readonly_fields = ('token', ) - list_display = ['id', 'title', 'current_status', 'lang', 'owner', 'total_score', 'assessment', 'academy'] - list_filter = ['lang', 'status', 'academy'] - actions = [change_status_answered] + change_field(['DRAFT', 'SENT', 'ERROR', 'EXPIRED'], name='status') + search_fields = ["title", "question__assessment__title"] + readonly_fields = ("token",) + list_display = ["id", "title", "current_status", "lang", "owner", "total_score", "assessment", "academy"] + list_filter = ["lang", "status", "academy"] + actions = [change_status_answered] + change_field(["DRAFT", "SENT", "ERROR", "EXPIRED"], name="status") def current_status(self, obj): colors = { - 'DRAFT': 'bg-secondary', - 'SENT': 'bg-warning', - 'ANSWERED': 'bg-success', - 'ERROR': 'bg-error', - 'EXPIRED': 'bg-warning', - None: 'bg-error', + "DRAFT": "bg-secondary", + "SENT": "bg-warning", + "ANSWERED": "bg-success", + "ERROR": "bg-error", + "EXPIRED": "bg-warning", + None: "bg-error", } def from_status(s): if s in colors: return colors[s] - return '' + return "" - status = 'No status' + status = "No status" if obj.status_text is not None: - status = re.sub(r'[^\w\._\-]', ' ', obj.status_text) - return format_html(f""" + status = re.sub(r"[^\w\._\-]", " ", obj.status_text) + return format_html( + f"""
-
{obj.status}
{status}
""") + """ + ) @admin.register(AssessmentThreshold) class UserAssessmentThresholdAdmin(admin.ModelAdmin): - search_fields = ['assessment__slug', 'assessment__title', 'tags'] - list_display = ['id', 'title', 'score_threshold', 'assessment', 'tags'] - list_filter = ['assessment__slug'] + search_fields = ["assessment__slug", "assessment__title", "tags"] + list_display = ["id", "title", "score_threshold", "assessment", "tags"] + list_filter = ["assessment__slug"] actions = [] @admin.register(Answer) class AnswerAdmin(admin.ModelAdmin): - search_fields = ['user_assessment__owner', 'user_assessment__title'] - list_display = ['id', 'user_assessment', 'question', 'option', 'value'] - list_filter = ['user_assessment__assessment__slug'] + search_fields = ["user_assessment__owner", "user_assessment__title"] + list_display = ["id", "user_assessment", "question", "option", "value"] + list_filter = ["user_assessment__assessment__slug"] @admin.register(AssessmentLayout) class AssessmentLayoutAdmin(admin.ModelAdmin): - search_fields = ['slug'] - list_display = ['id', 'slug', 'academy'] - list_filter = ['academy'] + search_fields = ["slug"] + list_display = ["id", "slug", "academy"] + list_filter = ["academy"] diff --git a/breathecode/assessment/apps.py b/breathecode/assessment/apps.py index 2f37516c9..f931668d0 100644 --- a/breathecode/assessment/apps.py +++ b/breathecode/assessment/apps.py @@ -2,7 +2,7 @@ class AssessmentConfig(AppConfig): - name = 'breathecode.assessment' + name = "breathecode.assessment" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/assessment/management/commands/close_open_user_assessments.py b/breathecode/assessment/management/commands/close_open_user_assessments.py index c69fc179a..ede9a4183 100644 --- a/breathecode/assessment/management/commands/close_open_user_assessments.py +++ b/breathecode/assessment/management/commands/close_open_user_assessments.py @@ -4,16 +4,16 @@ from breathecode.assessment.models import UserAssessment -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" class Command(BaseCommand): - help = 'Close user assessments and totalize scores' + help = "Close user assessments and totalize scores" def handle(self, *args, **options): - unfinished_ua = UserAssessment.objects.filter(finished_at__isnull=True, status='SENT') + unfinished_ua = UserAssessment.objects.filter(finished_at__isnull=True, status="SENT") total = unfinished_ua.count() - unfinished_ua.update(status='ERROR', status_text='Unfinished user assessment') - self.stdout.write(self.style.SUCCESS(f'{total} user assessments automatically closed with error')) + unfinished_ua.update(status="ERROR", status_text="Unfinished user assessment") + self.stdout.write(self.style.SUCCESS(f"{total} user assessments automatically closed with error")) diff --git a/breathecode/assessment/migrations/0001_initial.py b/breathecode/assessment/migrations/0001_initial.py index 4fe55d7ff..1de3e012e 100644 --- a/breathecode/assessment/migrations/0001_initial.py +++ b/breathecode/assessment/migrations/0001_initial.py @@ -12,177 +12,241 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('admissions', '0038_alter_cohort_syllabus_version'), - ('auth', '0012_alter_user_first_name_max_length'), + ("admissions", "0038_alter_cohort_syllabus_version"), + ("auth", "0012_alter_user_first_name_max_length"), ] operations = [ migrations.CreateModel( - name='Assessment', + name="Assessment", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=200, unique=True)), - ('title', models.CharField(blank=True, max_length=255)), - ('lang', models.CharField(blank=True, default='en', max_length=3)), - ('score_threshold', - models.IntegerField(blank=True, - default=None, - help_text='You can set a threshold to determine if the user score is successfull', - null=True)), - ('private', models.BooleanField(default=False)), - ('comment', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(blank=True, - default=None, - help_text='Not all assesments are triggered by academies', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('author', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), - ('original', - models.ForeignKey( - blank=True, - default=None, - help_text='The original translation (will only be set if the quiz is a translation of another one)', - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name='translations', - to='assessment.assessment')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=200, unique=True)), + ("title", models.CharField(blank=True, max_length=255)), + ("lang", models.CharField(blank=True, default="en", max_length=3)), + ( + "score_threshold", + models.IntegerField( + blank=True, + default=None, + help_text="You can set a threshold to determine if the user score is successfull", + null=True, + ), + ), + ("private", models.BooleanField(default=False)), + ("comment", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + blank=True, + default=None, + help_text="Not all assesments are triggered by academies", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), + ), + ( + "author", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "original", + models.ForeignKey( + blank=True, + default=None, + help_text="The original translation (will only be set if the quiz is a translation of another one)", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="translations", + to="assessment.assessment", + ), + ), ], ), migrations.CreateModel( - name='UserProxy', + name="UserProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), migrations.CreateModel( - name='UserAssessment', + name="UserAssessment", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(blank=True, max_length=200)), - ('lang', models.CharField(blank=True, default='en', max_length=3)), - ('total_score', models.FloatField(help_text='Total sum of all chosen options in the assesment')), - ('opened', models.BooleanField(default=False)), - ('status', - models.CharField(choices=[('DRAFT', 'DRAFT'), ('SENT', 'Sent'), ('EXPIRED', 'Expired')], - default='DRAFT', - max_length=15)), - ('comment', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('started_at', models.DateTimeField(blank=True, default=None, null=True)), - ('finished_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('assessment', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='assessment.assessment')), - ('owner', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.CharField(blank=True, max_length=200)), + ("lang", models.CharField(blank=True, default="en", max_length=3)), + ("total_score", models.FloatField(help_text="Total sum of all chosen options in the assesment")), + ("opened", models.BooleanField(default=False)), + ( + "status", + models.CharField( + choices=[("DRAFT", "DRAFT"), ("SENT", "Sent"), ("EXPIRED", "Expired")], + default="DRAFT", + max_length=15, + ), + ), + ("comment", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("started_at", models.DateTimeField(blank=True, default=None, null=True)), + ("finished_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), + ), + ( + "assessment", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="assessment.assessment", + ), + ), + ( + "owner", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='Question', + name="Question", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.TextField()), - ('help_text', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('lang', models.CharField(blank=True, default='en', max_length=3)), - ('question_type', - models.CharField(choices=[('TEXT', 'Text'), ('NUMBER', 'Number'), ('SELECT', 'Select'), - ('SELECT_MULTIPLE', 'Select Multiple')], - default='SELECT', - max_length=15)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('assessment', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='assessment.assessment')), - ('author', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.TextField()), + ("help_text", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("lang", models.CharField(blank=True, default="en", max_length=3)), + ( + "question_type", + models.CharField( + choices=[ + ("TEXT", "Text"), + ("NUMBER", "Number"), + ("SELECT", "Select"), + ("SELECT_MULTIPLE", "Select Multiple"), + ], + default="SELECT", + max_length=15, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "assessment", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="assessment.assessment", + ), + ), + ( + "author", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='Option', + name="Option", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.TextField()), - ('help_text', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('lang', models.CharField(blank=True, default='en', max_length=3)), - ('score', - models.FloatField( - help_text= - 'If picked, this value will add up to the total score of the assesment, you can have negative or fractional values' - )), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('question', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='assessment.question')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.TextField()), + ("help_text", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("lang", models.CharField(blank=True, default="en", max_length=3)), + ( + "score", + models.FloatField( + help_text="If picked, this value will add up to the total score of the assesment, you can have negative or fractional values" + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "question", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="assessment.question", + ), + ), ], ), migrations.CreateModel( - name='Answer', + name="Answer", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('value', models.TextField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('option', - models.ForeignKey(blank=True, - default=None, - help_text='Will be null if open question, no options to pick', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='assessment.option')), - ('question', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='assessment.question')), - ('user_assesment', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='assessment.userassessment')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("value", models.TextField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "option", + models.ForeignKey( + blank=True, + default=None, + help_text="Will be null if open question, no options to pick", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="assessment.option", + ), + ), + ( + "question", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="assessment.question", + ), + ), + ( + "user_assesment", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="assessment.userassessment", + ), + ), ], ), ] diff --git a/breathecode/assessment/migrations/0002_auto_20221107_2155.py b/breathecode/assessment/migrations/0002_auto_20221107_2155.py index ec3034816..287c3c633 100644 --- a/breathecode/assessment/migrations/0002_auto_20221107_2155.py +++ b/breathecode/assessment/migrations/0002_auto_20221107_2155.py @@ -7,49 +7,58 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0047_merge_20220924_0611'), - ('assessment', '0001_initial'), + ("admissions", "0047_merge_20220924_0611"), + ("assessment", "0001_initial"), ] operations = [ migrations.RemoveField( - model_name='assessment', - name='score_threshold', + model_name="assessment", + name="score_threshold", ), migrations.AddField( - model_name='assessment', - name='next', + model_name="assessment", + name="next", field=models.URLField(default=False), ), migrations.CreateModel( - name='AssessmentThreshold', + name="AssessmentThreshold", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('score_threshold', - models.IntegerField( - help_text='You can set a threshold to determine if the user score is successfull')), - ('success_message', models.TextField(blank=True, default=None, null=True)), - ('fail_message', models.TextField(blank=True, default=None, null=True)), - ('success_next', models.URLField(blank=True, default=None, null=True)), - ('fail_next', models.URLField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey( - blank=True, - default=None, - help_text= - 'If null it will be default, but if specified, the only this academy will have this threshold', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('assessment', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name='score_thresholds', - to='assessment.assessment')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "score_threshold", + models.IntegerField( + help_text="You can set a threshold to determine if the user score is successfull" + ), + ), + ("success_message", models.TextField(blank=True, default=None, null=True)), + ("fail_message", models.TextField(blank=True, default=None, null=True)), + ("success_next", models.URLField(blank=True, default=None, null=True)), + ("fail_next", models.URLField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + blank=True, + default=None, + help_text="If null it will be default, but if specified, the only this academy will have this threshold", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), + ), + ( + "assessment", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="score_thresholds", + to="assessment.assessment", + ), + ), ], ), ] diff --git a/breathecode/assessment/migrations/0003_auto_20221109_1529.py b/breathecode/assessment/migrations/0003_auto_20221109_1529.py index c711cd35d..90c2314a0 100644 --- a/breathecode/assessment/migrations/0003_auto_20221109_1529.py +++ b/breathecode/assessment/migrations/0003_auto_20221109_1529.py @@ -6,19 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('assessment', '0002_auto_20221107_2155'), + ("assessment", "0002_auto_20221107_2155"), ] operations = [ migrations.AddField( - model_name='assessment', - name='is_instant_feedback', - field=models.BooleanField(default=True, - help_text='If true, users will know immediately if their answer was correct'), + model_name="assessment", + name="is_instant_feedback", + field=models.BooleanField( + default=True, help_text="If true, users will know immediately if their answer was correct" + ), ), migrations.AlterField( - model_name='question', - name='lang', - field=models.CharField(blank=True, default='us', max_length=3), + model_name="question", + name="lang", + field=models.CharField(blank=True, default="us", max_length=3), ), ] diff --git a/breathecode/assessment/migrations/0004_option_is_deleted_alter_assessment_next.py b/breathecode/assessment/migrations/0004_option_is_deleted_alter_assessment_next.py index c85682687..eac376cf7 100644 --- a/breathecode/assessment/migrations/0004_option_is_deleted_alter_assessment_next.py +++ b/breathecode/assessment/migrations/0004_option_is_deleted_alter_assessment_next.py @@ -6,20 +6,21 @@ class Migration(migrations.Migration): dependencies = [ - ('assessment', '0003_auto_20221109_1529'), + ("assessment", "0003_auto_20221109_1529"), ] operations = [ migrations.AddField( - model_name='option', - name='is_deleted', + model_name="option", + name="is_deleted", field=models.BooleanField( default=False, - help_text='Options with collected answers cannot not be deleted, they will have this bullet true'), + help_text="Options with collected answers cannot not be deleted, they will have this bullet true", + ), ), migrations.AlterField( - model_name='assessment', - name='next', + model_name="assessment", + name="next", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/assessment/migrations/0005_option_position_question_position.py b/breathecode/assessment/migrations/0005_option_position_question_position.py index 503aa30f6..6189bb7b3 100644 --- a/breathecode/assessment/migrations/0005_option_position_question_position.py +++ b/breathecode/assessment/migrations/0005_option_position_question_position.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('assessment', '0004_option_is_deleted_alter_assessment_next'), + ("assessment", "0004_option_is_deleted_alter_assessment_next"), ] operations = [ migrations.AddField( - model_name='option', - name='position', + model_name="option", + name="position", field=models.IntegerField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='question', - name='position', + model_name="question", + name="position", field=models.IntegerField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/assessment/migrations/0006_question_is_deleted.py b/breathecode/assessment/migrations/0006_question_is_deleted.py index 8d0c1173a..8fe7f2c56 100644 --- a/breathecode/assessment/migrations/0006_question_is_deleted.py +++ b/breathecode/assessment/migrations/0006_question_is_deleted.py @@ -6,15 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('assessment', '0005_option_position_question_position'), + ("assessment", "0005_option_position_question_position"), ] operations = [ migrations.AddField( - model_name='question', - name='is_deleted', + model_name="question", + name="is_deleted", field=models.BooleanField( default=False, - help_text='Question collected answers cannot not be deleted, they will have this bullet true'), + help_text="Question collected answers cannot not be deleted, they will have this bullet true", + ), ), ] diff --git a/breathecode/assessment/migrations/0007_rename_user_assesment_answer_user_assessment_and_more.py b/breathecode/assessment/migrations/0007_rename_user_assesment_answer_user_assessment_and_more.py index c23982597..25b2a22cd 100644 --- a/breathecode/assessment/migrations/0007_rename_user_assesment_answer_user_assessment_and_more.py +++ b/breathecode/assessment/migrations/0007_rename_user_assesment_answer_user_assessment_and_more.py @@ -10,115 +10,131 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0064_academy_legal_name'), - ('assessment', '0006_question_is_deleted'), + ("admissions", "0064_academy_legal_name"), + ("assessment", "0006_question_is_deleted"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.RenameField( - model_name='answer', - old_name='user_assesment', - new_name='user_assessment', + model_name="answer", + old_name="user_assesment", + new_name="user_assessment", ), migrations.AddField( - model_name='assessment', - name='is_archived', + model_name="assessment", + name="is_archived", field=models.BooleanField( default=False, - help_text='If assessments have answers, they cannot be deleted but will be archived instead'), + help_text="If assessments have answers, they cannot be deleted but will be archived instead", + ), ), migrations.AddField( - model_name='assessment', - name='max_session_duration', - field=models.DurationField(default=datetime.timedelta(seconds=1800), - help_text='No more answers will be accepted after X amount of minutes'), + model_name="assessment", + name="max_session_duration", + field=models.DurationField( + default=datetime.timedelta(seconds=1800), + help_text="No more answers will be accepted after X amount of minutes", + ), ), migrations.AddField( - model_name='userassessment', - name='conversion_info', - field=models.JSONField(blank=True, - default=None, - help_text='UTMs and other conversion information.', - null=True), + model_name="userassessment", + name="conversion_info", + field=models.JSONField( + blank=True, default=None, help_text="UTMs and other conversion information.", null=True + ), ), migrations.AddField( - model_name='userassessment', - name='has_marketing_consent', + model_name="userassessment", + name="has_marketing_consent", field=models.BooleanField(default=False), ), migrations.AddField( - model_name='userassessment', - name='owner_email', - field=models.CharField(blank=True, - default=None, - help_text='If there is not registered owner we can use the email as reference', - max_length=150, - null=True), + model_name="userassessment", + name="owner_email", + field=models.CharField( + blank=True, + default=None, + help_text="If there is not registered owner we can use the email as reference", + max_length=150, + null=True, + ), ), migrations.AddField( - model_name='userassessment', - name='owner_phone', + model_name="userassessment", + name="owner_phone", field=models.CharField( blank=True, - default='', + default="", max_length=17, validators=[ django.core.validators.RegexValidator( message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", - regex='^\\+?1?\\d{9,15}$') - ]), + regex="^\\+?1?\\d{9,15}$", + ) + ], + ), ), migrations.AddField( - model_name='userassessment', - name='status_text', + model_name="userassessment", + name="status_text", field=models.TextField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='userassessment', - name='token', - field=models.CharField(default=None, - help_text='Auto-generated when a user assignment is created', - max_length=255, - unique=True), + model_name="userassessment", + name="token", + field=models.CharField( + default=None, help_text="Auto-generated when a user assignment is created", max_length=255, unique=True + ), preserve_default=False, ), migrations.AlterField( - model_name='userassessment', - name='owner', - field=models.ForeignKey(blank=True, - default=None, - help_text='How is answering the assessment', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="userassessment", + name="owner", + field=models.ForeignKey( + blank=True, + default=None, + help_text="How is answering the assessment", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), migrations.AlterField( - model_name='userassessment', - name='status', - field=models.CharField(choices=[('DRAFT', 'Draft'), ('SENT', 'Sent'), ('ERROR', 'Error'), - ('EXPIRED', 'Expired')], - default='DRAFT', - max_length=15), + model_name="userassessment", + name="status", + field=models.CharField( + choices=[("DRAFT", "Draft"), ("SENT", "Sent"), ("ERROR", "Error"), ("EXPIRED", "Expired")], + default="DRAFT", + max_length=15, + ), ), migrations.CreateModel( - name='AssessmentLayout', + name="AssessmentLayout", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=200, unique=True)), - ('additional_styles', - models.TextField(blank=True, - default=None, - help_text='This stylesheet will be included in the assessment if specified', - null=True)), - ('variables', - models.JSONField(blank=True, - default=None, - help_text='Additional params to be passed into the assessment content', - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=200, unique=True)), + ( + "additional_styles", + models.TextField( + blank=True, + default=None, + help_text="This stylesheet will be included in the assessment if specified", + null=True, + ), + ), + ( + "variables", + models.JSONField( + blank=True, + default=None, + help_text="Additional params to be passed into the assessment content", + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), ] diff --git a/breathecode/assessment/migrations/0008_alter_answer_option_alter_userassessment_status.py b/breathecode/assessment/migrations/0008_alter_answer_option_alter_userassessment_status.py index f723506cf..6bccf0345 100644 --- a/breathecode/assessment/migrations/0008_alter_answer_option_alter_userassessment_status.py +++ b/breathecode/assessment/migrations/0008_alter_answer_option_alter_userassessment_status.py @@ -7,27 +7,35 @@ class Migration(migrations.Migration): dependencies = [ - ('assessment', '0007_rename_user_assesment_answer_user_assessment_and_more'), + ("assessment", "0007_rename_user_assesment_answer_user_assessment_and_more"), ] operations = [ migrations.AlterField( - model_name='answer', - name='option', + model_name="answer", + name="option", field=models.ForeignKey( blank=True, default=None, - help_text='Will be null if open question, no options to pick. Or if option was deleted historically', + help_text="Will be null if open question, no options to pick. Or if option was deleted historically", null=True, on_delete=django.db.models.deletion.SET_NULL, - to='assessment.option'), + to="assessment.option", + ), ), migrations.AlterField( - model_name='userassessment', - name='status', - field=models.CharField(choices=[('DRAFT', 'Draft'), ('SENT', 'Sent'), ('ANSWERED', 'Answered'), - ('ERROR', 'Error'), ('EXPIRED', 'Expired')], - default='DRAFT', - max_length=15), + model_name="userassessment", + name="status", + field=models.CharField( + choices=[ + ("DRAFT", "Draft"), + ("SENT", "Sent"), + ("ANSWERED", "Answered"), + ("ERROR", "Error"), + ("EXPIRED", "Expired"), + ], + default="DRAFT", + max_length=15, + ), ), ] diff --git a/breathecode/assessment/migrations/0009_assessmentthreshold_title.py b/breathecode/assessment/migrations/0009_assessmentthreshold_title.py index 89183db30..cb866a640 100644 --- a/breathecode/assessment/migrations/0009_assessmentthreshold_title.py +++ b/breathecode/assessment/migrations/0009_assessmentthreshold_title.py @@ -6,17 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('assessment', '0008_alter_answer_option_alter_userassessment_status'), + ("assessment", "0008_alter_answer_option_alter_userassessment_status"), ] operations = [ migrations.AddField( - model_name='assessmentthreshold', - name='title', - field=models.CharField(blank=True, - default=None, - help_text='Title is good for internal use', - max_length=255, - null=True), + model_name="assessmentthreshold", + name="title", + field=models.CharField( + blank=True, default=None, help_text="Title is good for internal use", max_length=255, null=True + ), ), ] diff --git a/breathecode/assessment/migrations/0010_assessmentthreshold_tags.py b/breathecode/assessment/migrations/0010_assessmentthreshold_tags.py index 31ad23af3..58672a4b6 100644 --- a/breathecode/assessment/migrations/0010_assessmentthreshold_tags.py +++ b/breathecode/assessment/migrations/0010_assessmentthreshold_tags.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('assessment', '0009_assessmentthreshold_title'), + ("assessment", "0009_assessmentthreshold_title"), ] operations = [ migrations.AddField( - model_name='assessmentthreshold', - name='tags', + model_name="assessmentthreshold", + name="tags", field=models.CharField( blank=True, default=None, - help_text= - 'Ideal to group thresholds under a taxonomy, that way you can have several groups of thresholds for the same quiz', + help_text="Ideal to group thresholds under a taxonomy, that way you can have several groups of thresholds for the same quiz", max_length=255, - null=True), + null=True, + ), ), ] diff --git a/breathecode/assessment/models.py b/breathecode/assessment/models.py index a92cbb566..cc0fa6158 100644 --- a/breathecode/assessment/models.py +++ b/breathecode/assessment/models.py @@ -10,7 +10,7 @@ from . import signals -__all__ = ['UserProxy', 'Assessment', 'Question', 'Option', 'UserAssessment', 'Answer'] +__all__ = ["UserProxy", "Assessment", "Question", "Option", "UserAssessment", "Answer"] class UserProxy(User): @@ -27,37 +27,43 @@ def __init__(self, *args, **kwargs): slug = models.SlugField(max_length=200, unique=True) title = models.CharField(max_length=255, blank=True) - lang = models.CharField(max_length=3, blank=True, default='en') + lang = models.CharField(max_length=3, blank=True, default="en") - max_session_duration = models.DurationField(default=timedelta(minutes=30), - help_text='No more answers will be accepted after X amount of minutes') + max_session_duration = models.DurationField( + default=timedelta(minutes=30), help_text="No more answers will be accepted after X amount of minutes" + ) - academy = models.ForeignKey(Academy, - on_delete=models.CASCADE, - default=None, - blank=True, - null=True, - help_text='Not all assesments are triggered by academies') + academy = models.ForeignKey( + Academy, + on_delete=models.CASCADE, + default=None, + blank=True, + null=True, + help_text="Not all assesments are triggered by academies", + ) author = models.ForeignKey(User, on_delete=models.SET_NULL, default=None, blank=True, null=True) private = models.BooleanField(default=False) is_archived = models.BooleanField( - default=False, help_text='If assessments have answers, they cannot be deleted but will be archived instead') + default=False, help_text="If assessments have answers, they cannot be deleted but will be archived instead" + ) next = models.URLField(default=None, blank=True, null=True) is_instant_feedback = models.BooleanField( - default=True, help_text='If true, users will know immediately if their answer was correct') + default=True, help_text="If true, users will know immediately if their answer was correct" + ) # the original translation (will only be set if the quiz is a translation of another one) original = models.ForeignKey( - 'Assessment', + "Assessment", on_delete=models.CASCADE, - related_name='translations', + related_name="translations", default=None, blank=True, null=True, - help_text='The original translation (will only be set if the quiz is a translation of another one)') + help_text="The original translation (will only be set if the quiz is a translation of another one)", + ) comment = models.CharField(max_length=255, default=None, blank=True, null=True) @@ -65,7 +71,7 @@ def __init__(self, *args, **kwargs): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.slug} ({self.lang})' + return f"{self.slug} ({self.lang})" def save(self, *args, **kwargs): super().save(*args, **kwargs) @@ -82,27 +88,29 @@ def delete(self, *args, **kwargs): def to_json(self, *args, **kwargs): _json = { - 'info': { - 'id': self.id, - 'slug': self.slug, - 'title': self.title, - 'is_instant_feedback': self.is_instant_feedback, + "info": { + "id": self.id, + "slug": self.slug, + "title": self.title, + "is_instant_feedback": self.is_instant_feedback, }, - 'questions': [] + "questions": [], } _questions = self.question_set.all() for q in _questions: - _q = {'id': q.id, 'title': q.title, 'options': []} + _q = {"id": q.id, "title": q.title, "options": []} _options = q.option_set.all() for o in _options: - _q['options'].append({ - 'id': o.id, - 'title': o.title, - 'score': o.score, - }) + _q["options"].append( + { + "id": o.id, + "title": o.title, + "score": o.score, + } + ) - _json['questions'].append(_q) + _json["questions"].append(_q) return _json @@ -111,14 +119,12 @@ class AssessmentLayout(models.Model): academy = models.ForeignKey(Academy, on_delete=models.CASCADE) slug = models.SlugField(max_length=200, unique=True) - additional_styles = models.TextField(blank=True, - null=True, - default=None, - help_text='This stylesheet will be included in the assessment if specified') - variables = models.JSONField(default=None, - blank=True, - null=True, - help_text='Additional params to be passed into the assessment content') + additional_styles = models.TextField( + blank=True, null=True, default=None, help_text="This stylesheet will be included in the assessment if specified" + ) + variables = models.JSONField( + default=None, blank=True, null=True, help_text="Additional params to be passed into the assessment content" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -126,26 +132,20 @@ class AssessmentLayout(models.Model): class AssessmentThreshold(models.Model): - assessment = models.ForeignKey('Assessment', - on_delete=models.CASCADE, - related_name='score_thresholds', - default=None, - blank=True, - null=True) + assessment = models.ForeignKey( + "Assessment", on_delete=models.CASCADE, related_name="score_thresholds", default=None, blank=True, null=True + ) - title = models.CharField(max_length=255, - default=None, - blank=True, - null=True, - help_text='Title is good for internal use') + title = models.CharField( + max_length=255, default=None, blank=True, null=True, help_text="Title is good for internal use" + ) tags = models.CharField( max_length=255, default=None, blank=True, null=True, - help_text= - 'Ideal to group thresholds under a taxonomy, that way you can have several groups of thresholds for the same quiz' + help_text="Ideal to group thresholds under a taxonomy, that way you can have several groups of thresholds for the same quiz", ) academy = models.ForeignKey( @@ -154,10 +154,12 @@ class AssessmentThreshold(models.Model): default=None, blank=True, null=True, - help_text='If null it will be default, but if specified, the only this academy will have this threshold') + help_text="If null it will be default, but if specified, the only this academy will have this threshold", + ) score_threshold = models.IntegerField( - help_text='You can set a threshold to determine if the user score is successfull') + help_text="You can set a threshold to determine if the user score is successfull" + ) success_message = models.TextField(default=None, blank=True, null=True) fail_message = models.TextField(default=None, blank=True, null=True) @@ -169,15 +171,15 @@ class AssessmentThreshold(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) -TEXT = 'TEXT' -NUMBER = 'NUMBER' -SELECT = 'SELECT' -SELECT_MULTIPLE = 'SELECT_MULTIPLE' +TEXT = "TEXT" +NUMBER = "NUMBER" +SELECT = "SELECT" +SELECT_MULTIPLE = "SELECT_MULTIPLE" QUESTION_TYPE = ( - (TEXT, 'Text'), - (NUMBER, 'Number'), - (SELECT, 'Select'), - (SELECT_MULTIPLE, 'Select Multiple'), + (TEXT, "Text"), + (NUMBER, "Number"), + (SELECT, "Select"), + (SELECT_MULTIPLE, "Select Multiple"), ) @@ -185,14 +187,15 @@ class Question(models.Model): title = models.TextField() help_text = models.CharField(max_length=255, default=None, blank=True, null=True) - lang = models.CharField(max_length=3, blank=True, default='us') + lang = models.CharField(max_length=3, blank=True, default="us") assessment = models.ForeignKey(Assessment, on_delete=models.CASCADE, default=None, blank=True, null=True) author = models.ForeignKey(User, on_delete=models.SET_NULL, default=None, blank=True, null=True) question_type = models.CharField(max_length=15, choices=QUESTION_TYPE, default=SELECT) is_deleted = models.BooleanField( - default=False, help_text='Question collected answers cannot not be deleted, they will have this bullet true') + default=False, help_text="Question collected answers cannot not be deleted, they will have this bullet true" + ) position = models.IntegerField(default=None, blank=True, null=True) @@ -200,43 +203,42 @@ class Question(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'Question {self.id}' + return f"Question {self.id}" class Option(models.Model): title = models.TextField() help_text = models.CharField(max_length=255, default=None, blank=True, null=True) - lang = models.CharField(max_length=3, blank=True, default='en') + lang = models.CharField(max_length=3, blank=True, default="en") is_deleted = models.BooleanField( - default=False, - help_text='Options with collected answers cannot not be deleted, they will have this bullet true') + default=False, help_text="Options with collected answers cannot not be deleted, they will have this bullet true" + ) position = models.IntegerField(default=None, blank=True, null=True) question = models.ForeignKey(Question, on_delete=models.CASCADE, default=None, blank=True, null=True) score = models.FloatField( - help_text= - 'If picked, this value will add up to the total score of the assesment, you can have negative or fractional values' + help_text="If picked, this value will add up to the total score of the assesment, you can have negative or fractional values" ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'Option {self.id}' + return f"Option {self.id}" -DRAFT = 'DRAFT' -SENT = 'SENT' -ANSWERED = 'ANSWERED' -ERROR = 'ERROR' -EXPIRED = 'EXPIRED' +DRAFT = "DRAFT" +SENT = "SENT" +ANSWERED = "ANSWERED" +ERROR = "ERROR" +EXPIRED = "EXPIRED" SURVEY_STATUS = ( - (DRAFT, 'Draft'), - (SENT, 'Sent'), - (ANSWERED, 'Answered'), # If marked as 'ANSWERED' the total_score will be auto-calculated - (ERROR, 'Error'), - (EXPIRED, 'Expired'), + (DRAFT, "Draft"), + (SENT, "Sent"), + (ANSWERED, "Answered"), # If marked as 'ANSWERED' the total_score will be auto-calculated + (ERROR, "Error"), + (EXPIRED, "Expired"), ) @@ -244,40 +246,40 @@ class UserAssessment(models.Model): _old_status = None title = models.CharField(max_length=200, blank=True) - lang = models.CharField(max_length=3, blank=True, default='en') + lang = models.CharField(max_length=3, blank=True, default="en") academy = models.ForeignKey(Academy, on_delete=models.CASCADE, default=None, blank=True, null=True) assessment = models.ForeignKey(Assessment, on_delete=models.CASCADE, default=None, blank=True, null=True) - owner = models.ForeignKey(User, - on_delete=models.CASCADE, - default=None, - blank=True, - null=True, - help_text='How is answering the assessment') - owner_email = models.CharField(max_length=150, - default=None, - blank=True, - null=True, - help_text='If there is not registered owner we can use the email as reference') + owner = models.ForeignKey( + User, on_delete=models.CASCADE, default=None, blank=True, null=True, help_text="How is answering the assessment" + ) + owner_email = models.CharField( + max_length=150, + default=None, + blank=True, + null=True, + help_text="If there is not registered owner we can use the email as reference", + ) has_marketing_consent = models.BooleanField(default=False) - conversion_info = models.JSONField(default=None, - blank=True, - null=True, - help_text='UTMs and other conversion information.') + conversion_info = models.JSONField( + default=None, blank=True, null=True, help_text="UTMs and other conversion information." + ) phone_regex = RegexValidator( - regex=r'^\+?1?\d{9,15}$', - message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.") - owner_phone = models.CharField(validators=[phone_regex], max_length=17, blank=True, - default='') # validators should be a list + regex=r"^\+?1?\d{9,15}$", + message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", + ) + owner_phone = models.CharField( + validators=[phone_regex], max_length=17, blank=True, default="" + ) # validators should be a list - total_score = models.FloatField(help_text='Total sum of all chosen options in the assesment') + total_score = models.FloatField(help_text="Total sum of all chosen options in the assesment") opened = models.BooleanField(default=False) status = models.CharField(max_length=15, choices=SURVEY_STATUS, default=DRAFT) status_text = models.TextField(default=None, blank=True, null=True) - token = models.CharField(max_length=255, unique=True, help_text='Auto-generated when a user assignment is created') + token = models.CharField(max_length=255, unique=True, help_text="Auto-generated when a user assignment is created") comment = models.CharField(max_length=255, default=None, blank=True, null=True) @@ -304,15 +306,16 @@ def save(self, *args, **kwargs): def get_score(self): total_score = 0 - answers = self.answer_set.all().order_by('created_at') + answers = self.answer_set.all().order_by("created_at") last_one = None for a in answers: last_one = a # Ignore open text questions - if a.question.question_type == 'TEXT': + if a.question.question_type == "TEXT": continue - if a.option: a.value = str(a.option.score) + if a.option: + a.value = str(a.option.score) try: total_score += float(a.value) @@ -336,7 +339,8 @@ class Answer(models.Model): default=None, blank=True, null=True, - help_text='Will be null if open question, no options to pick. Or if option was deleted historically') + help_text="Will be null if open question, no options to pick. Or if option was deleted historically", + ) question = models.ForeignKey(Question, on_delete=models.CASCADE, default=None, blank=True, null=True) value = models.TextField() diff --git a/breathecode/assessment/receivers.py b/breathecode/assessment/receivers.py index d81889f55..5ee751f4a 100644 --- a/breathecode/assessment/receivers.py +++ b/breathecode/assessment/receivers.py @@ -12,6 +12,6 @@ @receiver(userassessment_status_updated, sender=UserAssessment) def userassessment_status_updated(sender: Type[UserAssessment], instance: UserAssessment, **kwargs: Any): - logger.info('Processing userassessment_status_updated: ' + str(instance.id)) - if instance.status == 'ANSWERED': + logger.info("Processing userassessment_status_updated: " + str(instance.id)) + if instance.status == "ANSWERED": async_close_userassignment.delay(instance.id) diff --git a/breathecode/assessment/serializers.py b/breathecode/assessment/serializers.py index 867e014ef..c8f3f5a93 100644 --- a/breathecode/assessment/serializers.py +++ b/breathecode/assessment/serializers.py @@ -178,7 +178,7 @@ def get_summary(self, obj): if last_one is not None: last_answer = AnswerSmallSerializer(last_one).data - return {'last_answer': last_answer, 'live_score': total_score} + return {"last_answer": last_answer, "live_score": total_score} class GetAssessmentBigSerializer(GetAssessmentSerializer): @@ -186,22 +186,23 @@ class GetAssessmentBigSerializer(GetAssessmentSerializer): is_instant_feedback = serpy.Field() def get_questions(self, obj): - return GetQuestionSerializer(obj.question_set.filter(is_deleted=False).order_by('-position', 'id'), - many=True).data + return GetQuestionSerializer( + obj.question_set.filter(is_deleted=False).order_by("-position", "id"), many=True + ).data class OptionSerializer(serializers.ModelSerializer): class Meta: model = Option - exclude = ('created_at', 'updated_at') + exclude = ("created_at", "updated_at") class QuestionSerializer(serializers.ModelSerializer): class Meta: model = Question - exclude = ('created_at', 'updated_at', 'assessment') + exclude = ("created_at", "updated_at", "assessment") class AnswerSerializer(serializers.ModelSerializer): @@ -209,38 +210,44 @@ class AnswerSerializer(serializers.ModelSerializer): class Meta: model = Answer - exclude = ('created_at', 'updated_at') + exclude = ("created_at", "updated_at") def validate(self, data): - lang = self.context['lang'] + lang = self.context["lang"] validated_data = {**data} - del validated_data['token'] + del validated_data["token"] - uass = UserAssessment.objects.filter(token=data['token']).first() + uass = UserAssessment.objects.filter(token=data["token"]).first() if not uass: raise ValidationException( - translation(lang, - en='user assessment not found for this token', - es='No se han encontrado un user assessment con ese token', - slug='not-found')) - validated_data['user_assessment'] = uass + translation( + lang, + en="user assessment not found for this token", + es="No se han encontrado un user assessment con ese token", + slug="not-found", + ) + ) + validated_data["user_assessment"] = uass now = timezone.now() session_duration = uass.created_at max_duration = uass.created_at + uass.assessment.max_session_duration if now > max_duration: raise ValidationException( - f'User assessment session started {from_now(session_duration)} ago and it expires after {duration_to_str(uass.assessment.max_session_duration)}, no more updates can be made' + f"User assessment session started {from_now(session_duration)} ago and it expires after {duration_to_str(uass.assessment.max_session_duration)}, no more updates can be made" ) - if 'option' in data and data['option']: - if Answer.objects.filter(option=data['option'], user_assessment=uass).count() > 0: + if "option" in data and data["option"]: + if Answer.objects.filter(option=data["option"], user_assessment=uass).count() > 0: raise ValidationException( - translation(lang, - en='This answer has already been answered on this user assessment', - es='Esta opción ya fue respondida para este assessment', - slug='already-answered')) + translation( + lang, + en="This answer has already been answered on this user assessment", + es="Esta opción ya fue respondida para este assessment", + slug="already-answered", + ) + ) return super().validate(validated_data) @@ -249,11 +256,11 @@ def create(self, validated_data): # copy the validated data just to do small last minute corrections data = validated_data.copy() - if 'option' in data and data['option']: - data['question'] = data['option'].question + if "option" in data and data["option"]: + data["question"] = data["option"].question - if data['question'].question_type == 'SELECT': - data['value'] = data['option'].score + if data["question"].question_type == "SELECT": + data["value"] = data["option"].score return super().create({**data}) @@ -262,7 +269,7 @@ class AssessmentPUTSerializer(serializers.ModelSerializer): class Meta: model = Assessment - exclude = ('slug', 'academy', 'lang', 'author') + exclude = ("slug", "academy", "lang", "author") class PostUserAssessmentSerializer(serializers.ModelSerializer): @@ -270,73 +277,82 @@ class PostUserAssessmentSerializer(serializers.ModelSerializer): class Meta: model = UserAssessment - exclude = ('total_score', 'created_at', 'updated_at', 'token', 'owner') - read_only_fields = ['id'] + exclude = ("total_score", "created_at", "updated_at", "token", "owner") + read_only_fields = ["id"] def validate(self, data): - lang = self.context['lang'] - request = self.context['request'] + lang = self.context["lang"] + request = self.context["request"] - if 'status' in data and data['status'] not in ['DRAFT', 'SENT']: + if "status" in data and data["status"] not in ["DRAFT", "SENT"]: raise ValidationException( - translation(lang, - en=f'User assessment cannot be created with status {data["status"]}', - es=f'El user assessment no se puede crear con status {data["status"]}', - slug='invalid-status')) + translation( + lang, + en=f'User assessment cannot be created with status {data["status"]}', + es=f'El user assessment no se puede crear con status {data["status"]}', + slug="invalid-status", + ) + ) academy = None - if 'Academy' in request.headers: - academy_id = request.headers['Academy'] + if "Academy" in request.headers: + academy_id = request.headers["Academy"] academy = Academy.objects.filter(id=academy_id).first() - if not academy and 'academy' in data: - academy = data['academy'] + if not academy and "academy" in data: + academy = data["academy"] - if not academy and 'assessment' in data: - academy = data['assessment'].academy + if not academy and "assessment" in data: + academy = data["assessment"].academy if not academy: raise ValidationException( - translation(lang, - en='Could not determine academy ownership of this user assessment', - es='No se ha podido determinar a que academia pertenece este user assessment', - slug='not-academy-detected')) + translation( + lang, + en="Could not determine academy ownership of this user assessment", + es="No se ha podido determinar a que academia pertenece este user assessment", + slug="not-academy-detected", + ) + ) if not isinstance(request.user, AnonymousUser): - data['owner'] = request.user - elif 'owner_email' not in data or not data['owner_email']: + data["owner"] = request.user + elif "owner_email" not in data or not data["owner_email"]: raise ValidationException( - translation(lang, - en='User assessment cannot be tracked because its missing owner information', - es='Este user assessment no puede registrarse porque no tiene informacion del owner', - slug='no-owner-detected')) + translation( + lang, + en="User assessment cannot be tracked because its missing owner information", + es="Este user assessment no puede registrarse porque no tiene informacion del owner", + slug="no-owner-detected", + ) + ) - return super().validate({**data, 'academy': academy}) + return super().validate({**data, "academy": academy}) def create(self, validated_data): # copy the validated data just to do small last minute corrections data = validated_data.copy() - if data['academy'] is None: - data['status'] = 'ERROR' - data['status_text'] = 'Missing academy. Maybe the assessment.academy is null?' + if data["academy"] is None: + data["status"] = "ERROR" + data["status_text"] = "Missing academy. Maybe the assessment.academy is null?" # "us" language will become "en" language, its the right lang code - if 'lang' in data and data['lang'] == 'us': - data['lang'] = 'en' + if "lang" in data and data["lang"] == "us": + data["lang"] = "en" - if 'started_at' not in data or data['started_at'] is None: - data['started_at'] = timezone.now() + if "started_at" not in data or data["started_at"] is None: + data["started_at"] = timezone.now() - if 'title' not in data or not data['title']: - if 'owner_email' in data and data['owner_email']: - data['title'] = f"{data['assessment'].title} from {data['owner_email']}" - if 'owner' in data and data['owner']: - data['title'] = f"{data['assessment'].title} from {data['owner'].email}" + if "title" not in data or not data["title"]: + if "owner_email" in data and data["owner_email"]: + data["title"] = f"{data['assessment'].title} from {data['owner_email']}" + if "owner" in data and data["owner"]: + data["title"] = f"{data['assessment'].title} from {data['owner'].email}" - result = super().create({**data, 'total_score': 0, 'academy': validated_data['academy']}) + result = super().create({**data, "total_score": 0, "academy": validated_data["academy"]}) return result @@ -345,22 +361,25 @@ class PUTUserAssessmentSerializer(serializers.ModelSerializer): class Meta: model = UserAssessment - exclude = ('academy', 'assessment', 'lang', 'total_score', 'token', 'started_at', 'owner') + exclude = ("academy", "assessment", "lang", "total_score", "token", "started_at", "owner") read_only_fields = [ - 'id', - 'academy', + "id", + "academy", ] def validate(self, data): - lang = self.context['lang'] + lang = self.context["lang"] - if self.instance.status not in ['DRAFT', 'SENT', 'ERROR']: + if self.instance.status not in ["DRAFT", "SENT", "ERROR"]: raise ValidationException( - translation(lang, - en=f'User assessment cannot be updated because is {self.instance.status}', - es=f'El user assessment status no se puede editar mas porque esta {self.instance.status}', - slug='invalid-status')) + translation( + lang, + en=f"User assessment cannot be updated because is {self.instance.status}", + es=f"El user assessment status no se puede editar mas porque esta {self.instance.status}", + slug="invalid-status", + ) + ) return super().validate({**data}) @@ -371,21 +390,22 @@ def update(self, instance, validated_data): data = validated_data.copy() # If not being closed - if validated_data['status'] != 'ANSWERED' or instance.status == validated_data['status']: + if validated_data["status"] != "ANSWERED" or instance.status == validated_data["status"]: if now > (instance.created_at + instance.assessment.max_session_duration): raise ValidationException( - f'Session started {from_now(instance.created_at)} ago and it expires after {duration_to_str(instance.assessment.max_session_duration)}, no more updates can be made' + f"Session started {from_now(instance.created_at)} ago and it expires after {duration_to_str(instance.assessment.max_session_duration)}, no more updates can be made" ) # copy the validated data just to do small last minute corrections data = validated_data.copy() - if 'status_text' in data: del data['status_text'] + if "status_text" in data: + del data["status_text"] # "us" language will become "en" language, its the right lang code - if 'lang' in data and data['lang'] == 'us': - data['lang'] = 'en' + if "lang" in data and data["lang"] == "us": + data["lang"] = "en" - if 'started_at' not in data and instance.started_at is None: - data['started_at'] = now + if "started_at" not in data and instance.started_at is None: + data["started_at"] = now return super().update(instance, data) diff --git a/breathecode/assessment/signals.py b/breathecode/assessment/signals.py index 31234556e..4a7057fff 100644 --- a/breathecode/assessment/signals.py +++ b/breathecode/assessment/signals.py @@ -2,6 +2,7 @@ For each signal you want other apps to be able to receive, you have to declare a new variable here like this: """ + from django import dispatch assessment_updated = dispatch.Signal() diff --git a/breathecode/assessment/tasks.py b/breathecode/assessment/tasks.py index e36623e80..636b5e462 100644 --- a/breathecode/assessment/tasks.py +++ b/breathecode/assessment/tasks.py @@ -13,7 +13,7 @@ @shared_task(bind=True, priority=TaskPriority.ASSESSMENT.value) def async_close_userassignment(self, ua_id): """Notify if the task was change.""" - logger.info('Starting async_close_userassignment') + logger.info("Starting async_close_userassignment") ua = UserAssessment.objects.filter(id=ua_id).first() if not ua: @@ -23,14 +23,14 @@ def async_close_userassignment(self, ua_id): # Not one answer found for the user assessment if last_answer is None: - ua.status = 'ERROR' - ua.status_text = 'No answers found for this user assessment session' + ua.status = "ERROR" + ua.status_text = "No answers found for this user assessment session" ua.save() return True ua.total_score = score - ua.status = 'ANSWERED' - ua.status_text = '' + ua.status = "ANSWERED" + ua.status_text = "" ua.finished_at = last_answer.created_at ua.save() return True diff --git a/breathecode/assessment/urls.py b/breathecode/assessment/urls.py index 5b9add631..26b7624fc 100644 --- a/breathecode/assessment/urls.py +++ b/breathecode/assessment/urls.py @@ -1,25 +1,34 @@ from django.urls import path -from .views import (TrackAssessmentView, GetAssessmentView, GetThresholdView, AssessmentQuestionView, - AssessmentOptionView, AcademyUserAssessmentView, AssessmentLayoutView, AcademyAssessmentLayoutView, - AnswerView, AcademyAnswerView) +from .views import ( + TrackAssessmentView, + GetAssessmentView, + GetThresholdView, + AssessmentQuestionView, + AssessmentOptionView, + AcademyUserAssessmentView, + AssessmentLayoutView, + AcademyAssessmentLayoutView, + AnswerView, + AcademyAnswerView, +) -app_name = 'assessment' +app_name = "assessment" urlpatterns = [ # user assessments - path('user/assessment', TrackAssessmentView.as_view()), - path('user/assessment/', TrackAssessmentView.as_view()), - path('user/assessment//answer', AnswerView.as_view()), - path('user/assessment//answer/', AnswerView.as_view()), - path('academy/user/assessment//answer/', AcademyAnswerView.as_view()), - path('academy/user/assessment', AcademyUserAssessmentView.as_view()), - path('academy/user/assessment/', AcademyUserAssessmentView.as_view()), - path('', GetAssessmentView.as_view()), - path('layout/', AssessmentLayoutView.as_view()), - path('academy/layout', AcademyAssessmentLayoutView.as_view()), - path('academy/layout/', AcademyAssessmentLayoutView.as_view()), - path('/threshold', GetThresholdView.as_view()), - path('/threshold/', GetThresholdView.as_view()), - path('/question/', AssessmentQuestionView.as_view()), - path('/option/', AssessmentOptionView.as_view()), - path('', GetAssessmentView.as_view()), + path("user/assessment", TrackAssessmentView.as_view()), + path("user/assessment/", TrackAssessmentView.as_view()), + path("user/assessment//answer", AnswerView.as_view()), + path("user/assessment//answer/", AnswerView.as_view()), + path("academy/user/assessment//answer/", AcademyAnswerView.as_view()), + path("academy/user/assessment", AcademyUserAssessmentView.as_view()), + path("academy/user/assessment/", AcademyUserAssessmentView.as_view()), + path("", GetAssessmentView.as_view()), + path("layout/", AssessmentLayoutView.as_view()), + path("academy/layout", AcademyAssessmentLayoutView.as_view()), + path("academy/layout/", AcademyAssessmentLayoutView.as_view()), + path("/threshold", GetThresholdView.as_view()), + path("/threshold/", GetThresholdView.as_view()), + path("/question/", AssessmentQuestionView.as_view()), + path("/option/", AssessmentOptionView.as_view()), + path("", GetAssessmentView.as_view()), ] diff --git a/breathecode/assessment/views.py b/breathecode/assessment/views.py index 315032f8c..c1b57e8ab 100644 --- a/breathecode/assessment/views.py +++ b/breathecode/assessment/views.py @@ -38,6 +38,7 @@ class TrackAssessmentView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] def get(self, request, ua_token): @@ -46,11 +47,15 @@ def get(self, request, ua_token): single = UserAssessment.objects.filter(token=ua_token).first() if single is None or now > single.created_at + single.assessment.max_session_duration: - raise ValidationException(translation(lang, - en='User assessment session does not exist or has already expired', - es='Esta sessión de evaluación no existe o ya ha expirado', - slug='not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="User assessment session does not exist or has already expired", + es="Esta sessión de evaluación no existe o ya ha expirado", + slug="not-found", + ), + code=404, + ) serializer = PublicUserAssessmentSerializer(single, many=False) return Response(serializer.data, status=status.HTTP_200_OK) @@ -59,9 +64,9 @@ def put(self, request, ua_token): lang = get_user_language(request) ass = UserAssessment.objects.filter(token=ua_token).first() if not ass: - raise ValidationException('User Assessment not found', 404) + raise ValidationException("User Assessment not found", 404) - serializer = PUTUserAssessmentSerializer(ass, data=request.data, context={'request': request, 'lang': lang}) + serializer = PUTUserAssessmentSerializer(ass, data=request.data, context={"request": request, "lang": lang}) if serializer.is_valid(): serializer.save() serializer = GetUserAssessmentSerializer(serializer.instance) @@ -72,7 +77,7 @@ def post(self, request): lang = get_user_language(request) payload = request.data.copy() - serializer = PostUserAssessmentSerializer(data=payload, context={'request': request, 'lang': lang}) + serializer = PostUserAssessmentSerializer(data=payload, context={"request": request, "lang": lang}) if serializer.is_valid(): serializer.save() serializer = GetUserAssessmentSerializer(serializer.instance) @@ -84,18 +89,19 @@ class GetAssessmentView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] def get(self, request, assessment_slug=None): if assessment_slug is not None: lang = None - if 'lang' in self.request.GET: - lang = self.request.GET.get('lang') + if "lang" in self.request.GET: + lang = self.request.GET.get("lang") item = Assessment.objects.filter(slug=assessment_slug, is_archived=False).first() if item is None: - raise ValidationException('Assessment not found or its archived', 404) + raise ValidationException("Assessment not found or its archived", 404) if lang is not None and item.lang != lang: item = item.translations.filter(lang=lang).first() @@ -109,31 +115,31 @@ def get(self, request, assessment_slug=None): items = Assessment.objects.all() lookup = {} - if 'academy' in self.request.GET: - param = self.request.GET.get('academy') - lookup['academy__id'] = param + if "academy" in self.request.GET: + param = self.request.GET.get("academy") + lookup["academy__id"] = param - if 'lang' in self.request.GET: - param = self.request.GET.get('lang') - lookup['lang'] = param + if "lang" in self.request.GET: + param = self.request.GET.get("lang") + lookup["lang"] = param # user can specify include_archived on querystring to include archived assessments - if not 'include_archived' in self.request.GET or self.request.GET.get('include_archived') != 'true': - lookup['is_archived'] = False + if not "include_archived" in self.request.GET or self.request.GET.get("include_archived") != "true": + lookup["is_archived"] = False - if 'no_asset' in self.request.GET and self.request.GET.get('no_asset').lower() == 'true': - lookup['asset__isnull'] = True + if "no_asset" in self.request.GET and self.request.GET.get("no_asset").lower() == "true": + lookup["asset__isnull"] = True - if 'author' in self.request.GET: - param = self.request.GET.get('author') - lookup['author__id'] = param + if "author" in self.request.GET: + param = self.request.GET.get("author") + lookup["author__id"] = param - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = GetAssessmentSerializer(items, many=True) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_assessment') + @capable_of("crud_assessment") def put(self, request, assessment_slug=None, academy_id=None): lang = get_user_language(request) @@ -143,42 +149,44 @@ def put(self, request, assessment_slug=None, academy_id=None): raise ValidationException( translation( lang, - en=f'Assessment {assessment_slug} not found or its archived for academy {academy_id}', - es=f'La evaluación {assessment_slug} no se encontró o esta archivada para la academia {academy_id}', - slug='not-found')) + en=f"Assessment {assessment_slug} not found or its archived for academy {academy_id}", + es=f"La evaluación {assessment_slug} no se encontró o esta archivada para la academia {academy_id}", + slug="not-found", + ) + ) all_serializers = [] - assessment_serializer = AssessmentPUTSerializer(_assessment, - data=request.data, - context={ - 'request': request, - 'academy': academy_id, - 'lang': lang - }) + assessment_serializer = AssessmentPUTSerializer( + _assessment, data=request.data, context={"request": request, "academy": academy_id, "lang": lang} + ) if not assessment_serializer.is_valid(): return Response(assessment_serializer.errors, status=status.HTTP_400_BAD_REQUEST) all_serializers.append(assessment_serializer) question_index = 0 - if 'questions' in request.data: - for q in request.data['questions']: + if "questions" in request.data: + for q in request.data["questions"]: question_index += 1 q_serializer = None - if 'id' in q: - question = Question.objects.filter(id=q['id'], assessment=_assessment).first() + if "id" in q: + question = Question.objects.filter(id=q["id"], assessment=_assessment).first() if question is None: raise ValidationException( - translation(lang, - en=f'Question {q["id"]} not found for this assessment', - es=f'No se ha encontrado esta pregunta {q["id"]} dentro del assessment', - slug='not-found')) + translation( + lang, + en=f'Question {q["id"]} not found for this assessment', + es=f'No se ha encontrado esta pregunta {q["id"]} dentro del assessment', + slug="not-found", + ) + ) q_serializer = QuestionSerializer(question, data=q) - if 'title' in q and q_serializer is None: - question = Question.objects.filter(title=q['title'], assessment=_assessment).first() - if question is not None: q_serializer = QuestionSerializer(question, data=q) + if "title" in q and q_serializer is None: + question = Question.objects.filter(title=q["title"], assessment=_assessment).first() + if question is not None: + q_serializer = QuestionSerializer(question, data=q) if q_serializer is None: q_serializer = QuestionSerializer(data=q) @@ -188,24 +196,28 @@ def put(self, request, assessment_slug=None, academy_id=None): return Response(assessment_serializer.errors, status=status.HTTP_400_BAD_REQUEST) total_score = 0 - if 'options' in q: - for opt in q['options']: + if "options" in q: + for opt in q["options"]: opt_serializer = None - if 'id' in opt: - option = Option.objects.filter(id=opt['id'], question=question).first() + if "id" in opt: + option = Option.objects.filter(id=opt["id"], question=question).first() if option is None: raise ValidationException( - translation(lang, - en=f'Option {opt["id"]} not found on this question', - es=f'No se ha encontrado la opcion {opt["id"]} en esta pregunta', - slug='not-found')) + translation( + lang, + en=f'Option {opt["id"]} not found on this question', + es=f'No se ha encontrado la opcion {opt["id"]} en esta pregunta', + slug="not-found", + ) + ) opt_serializer = OptionSerializer(option, data=opt) - if 'title' in opt and opt_serializer is None: - option = Option.objects.filter(title=opt['title'], question=question).first() - if option is not None: opt_serializer = OptionSerializer(option, data=opt) + if "title" in opt and opt_serializer is None: + option = Option.objects.filter(title=opt["title"], question=question).first() + if option is not None: + opt_serializer = OptionSerializer(option, data=opt) if opt_serializer is None: opt_serializer = OptionSerializer(data=opt) @@ -214,15 +226,19 @@ def put(self, request, assessment_slug=None, academy_id=None): if not opt_serializer.is_valid(): return Response(opt_serializer.errors, status=status.HTTP_400_BAD_REQUEST) - score = float(opt['score']) if 'score' in opt else float(opt_serializer.data['score']) - if score > 0: total_score += score + score = float(opt["score"]) if "score" in opt else float(opt_serializer.data["score"]) + if score > 0: + total_score += score if total_score <= 0: raise ValidationException( - translation(lang, - en=f'Question {question_index} total score must be allowed to be bigger than 0', - es=f'El score de la pregunta {question_index} debe poder ser mayor a 0', - slug='bigger-than-cero')) + translation( + lang, + en=f"Question {question_index} total score must be allowed to be bigger than 0", + es=f"El score de la pregunta {question_index} debe poder ser mayor a 0", + slug="bigger-than-cero", + ) + ) first_instance = None question_to_assign = None @@ -230,7 +246,8 @@ def put(self, request, assessment_slug=None, academy_id=None): _ins = s.save() # lets save the assessment instance to return it to the front end - if first_instance is None: first_instance = _ins + if first_instance is None: + first_instance = _ins # Assign question to the nearest options if isinstance(_ins, Question): @@ -251,13 +268,14 @@ class AssessmentLayoutView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] def get(self, request, layout_slug): item = AssessmentLayout.objects.filter(slug=layout_slug).first() if item is None: - raise ValidationException('Assessment layout not found', 404) + raise ValidationException("Assessment layout not found", 404) serializer = GetAssessmentLayoutSerializer(item, many=False) return Response(serializer.data, status=status.HTTP_200_OK) @@ -266,15 +284,16 @@ class AcademyAssessmentLayoutView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] - @capable_of('read_assessment') + @capable_of("read_assessment") def get(self, request, academy_id, layout_slug=None): if layout_slug: item = AssessmentLayout.objects.filter(slug=layout_slug, academy__id=academy_id).first() if item is None: - raise ValidationException('Assessment layout not found for this academy', 404) + raise ValidationException("Assessment layout not found for this academy", 404) serializer = GetAssessmentLayoutSerializer(item) return Response(serializer.data, status=status.HTTP_200_OK) @@ -286,20 +305,20 @@ def get(self, request, academy_id, layout_slug=None): # param = self.request.GET.get('academy') # lookup['academy__isnull'] = True - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = GetAssessmentLayoutSerializer(items, many=True) return Response(serializer.data, status=status.HTTP_200_OK) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def create_public_assessment(request): data = request.data.copy() # remove spaces from phone - if 'phone' in data: - data['phone'] = data['phone'].replace(' ', '') + if "phone" in data: + data["phone"] = data["phone"].replace(" ", "") serializer = PostFormEntrySerializer(data=data) if serializer.is_valid(): @@ -314,7 +333,7 @@ def create_public_assessment(request): class AssessmentOptionView(APIView): - @capable_of('crud_assessment') + @capable_of("crud_assessment") def delete(self, request, assessment_slug, option_id=None, academy_id=None): lang = get_user_language(request) @@ -322,17 +341,23 @@ def delete(self, request, assessment_slug, option_id=None, academy_id=None): option = Option.objects.filter(id=option_id, question__assessment__slug=assessment_slug).first() if option is None: raise ValidationException( - translation(lang, - en=f'Option {option_id} not found on assessment {assessment_slug}', - es=f'Option de pregunta {option_id} no encontrada para el assessment {assessment_slug}', - slug='not-found')) + translation( + lang, + en=f"Option {option_id} not found on assessment {assessment_slug}", + es=f"Option de pregunta {option_id} no encontrada para el assessment {assessment_slug}", + slug="not-found", + ) + ) if option.question.option_set.filter(is_deleted=False).count() <= 2: raise ValidationException( - translation(lang, - en=f'Question {option.question.id} needs at least 2 options', - es=f'La pregunta {option.question.id} necesita al menos dos opciones', - slug='at-least-two')) + translation( + lang, + en=f"Question {option.question.id} needs at least 2 options", + es=f"La pregunta {option.question.id} necesita al menos dos opciones", + slug="at-least-two", + ) + ) if option.answer_set.count() > 0: option.is_deleted = True @@ -345,7 +370,7 @@ def delete(self, request, assessment_slug, option_id=None, academy_id=None): class AssessmentQuestionView(APIView): - @capable_of('crud_assessment') + @capable_of("crud_assessment") def delete(self, request, assessment_slug, question_id=None, academy_id=None): lang = get_user_language(request) @@ -353,17 +378,23 @@ def delete(self, request, assessment_slug, question_id=None, academy_id=None): question = Question.objects.filter(id=question_id, assessment__slug=assessment_slug).first() if question is None: raise ValidationException( - translation(lang, - en=f'Question {question_id} not found on assessment {assessment_slug}', - es=f'La pregunta {question_id} no fue encontrada para el assessment {assessment_slug}', - slug='not-found')) + translation( + lang, + en=f"Question {question_id} not found on assessment {assessment_slug}", + es=f"La pregunta {question_id} no fue encontrada para el assessment {assessment_slug}", + slug="not-found", + ) + ) if question.assessment.question_set.filter(is_deleted=False).count() <= 2: raise ValidationException( - translation(lang, - en=f'Assessment {assessment_slug} needs at least 2 questions', - es=f'La evaluación {assessment_slug} necesita al menos dos preguntas', - slug='at-least-two')) + translation( + lang, + en=f"Assessment {assessment_slug} needs at least 2 questions", + es=f"La evaluación {assessment_slug} necesita al menos dos preguntas", + slug="at-least-two", + ) + ) if question.answer_set.count() > 0: question.is_deleted = True @@ -378,18 +409,19 @@ class GetThresholdView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] def get(self, request, assessment_slug, threshold_id=None): item = Assessment.objects.filter(slug=assessment_slug).first() if item is None: - raise ValidationException('Assessment not found', 404) + raise ValidationException("Assessment not found", 404) if threshold_id is not None: single = AssessmentThreshold.objects.filter(id=threshold_id, assessment__slug=assessment_slug).first() if single is None: - raise ValidationException(f'Threshold {threshold_id} not found', 404, slug='threshold-not-found') + raise ValidationException(f"Threshold {threshold_id} not found", 404, slug="threshold-not-found") serializer = GetAssessmentThresholdSerializer(single, many=False) return Response(serializer.data, status=status.HTTP_200_OK) @@ -398,24 +430,24 @@ def get(self, request, assessment_slug, threshold_id=None): items = AssessmentThreshold.objects.filter(assessment__slug=assessment_slug) lookup = {} - if 'academy' in self.request.GET: - param = self.request.GET.get('academy') + if "academy" in self.request.GET: + param = self.request.GET.get("academy") if param.isnumeric(): - lookup['academy__id'] = int(param) + lookup["academy__id"] = int(param) else: - lookup['academy__slug'] = param + lookup["academy__slug"] = param else: - lookup['academy__isnull'] = True + lookup["academy__isnull"] = True - if 'tag' in self.request.GET: - param = self.request.GET.get('tag') - if param != 'all': - lookup['tags__icontains'] = param + if "tag" in self.request.GET: + param = self.request.GET.get("tag") + if param != "all": + lookup["tags__icontains"] = param else: - lookup['tags__in'] = ['', None] + lookup["tags__in"] = ["", None] - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = GetAssessmentThresholdSerializer(items, many=True) return Response(serializer.data, status=status.HTTP_200_OK) @@ -426,16 +458,16 @@ class AcademyUserAssessmentView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_user_assessment') + @capable_of("read_user_assessment") def get(self, request, academy_id=None, ua_id=None): handler = self.extensions(request) if ua_id is not None: single = UserAssessment.objects.filter(id=ua_id, academy__id=academy_id).first() if single is None: - raise ValidationException(f'UserAssessment {ua_id} not found', 404, slug='user-assessment-not-found') + raise ValidationException(f"UserAssessment {ua_id} not found", 404, slug="user-assessment-not-found") serializer = GetUserAssessmentSerializer(single, many=False) return handler.response(serializer.data) @@ -443,38 +475,38 @@ def get(self, request, academy_id=None, ua_id=None): items = UserAssessment.objects.filter(academy__id=academy_id) lookup = {} - start = request.GET.get('started_at', None) + start = request.GET.get("started_at", None) if start is not None: - start_date = datetime.datetime.strptime(start, '%Y-%m-%d').date() - lookup['started_at__gte'] = start_date + start_date = datetime.datetime.strptime(start, "%Y-%m-%d").date() + lookup["started_at__gte"] = start_date - end = request.GET.get('finished_at', None) + end = request.GET.get("finished_at", None) if end is not None: - end_date = datetime.datetime.strptime(end, '%Y-%m-%d').date() - lookup['finished_at__lte'] = end_date + end_date = datetime.datetime.strptime(end, "%Y-%m-%d").date() + lookup["finished_at__lte"] = end_date - if 'status' in self.request.GET: - param = self.request.GET.get('status') - lookup['status'] = param + if "status" in self.request.GET: + param = self.request.GET.get("status") + lookup["status"] = param - if 'opened' in self.request.GET: - param = self.request.GET.get('opened') - lookup['opened'] = param == 'true' + if "opened" in self.request.GET: + param = self.request.GET.get("opened") + lookup["opened"] = param == "true" - if 'course' in self.request.GET: - param = self.request.GET.get('course') - lookup['course__in'] = [x.strip() for x in param.split(',')] + if "course" in self.request.GET: + param = self.request.GET.get("course") + lookup["course__in"] = [x.strip() for x in param.split(",")] - if 'owner' in self.request.GET: - param = self.request.GET.get('owner') - lookup['owner__id'] = param - elif 'owner_email' in self.request.GET: - param = self.request.GET.get('owner_email') - lookup['owner_email'] = param + if "owner" in self.request.GET: + param = self.request.GET.get("owner") + lookup["owner__id"] = param + elif "owner_email" in self.request.GET: + param = self.request.GET.get("owner_email") + lookup["owner_email"] = param - if 'lang' in self.request.GET: - param = self.request.GET.get('lang') - lookup['lang'] = param + if "lang" in self.request.GET: + param = self.request.GET.get("lang") + lookup["lang"] = param items = items.filter(**lookup) items = handler.queryset(items) @@ -482,17 +514,17 @@ def get(self, request, academy_id=None, ua_id=None): serializer = SmallUserAssessmentSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_user_assessment') + @capable_of("crud_user_assessment") def post(self, request, academy_id=None): academy = Academy.objects.filter(id=academy_id).first() if academy is None: - raise ValidationException(f'Academy {academy_id} not found', slug='academy-not-found') + raise ValidationException(f"Academy {academy_id} not found", slug="academy-not-found") # ignore the incoming location information and override with the session academy - data = {**request.data, 'location': academy.active_campaign_slug} + data = {**request.data, "location": academy.active_campaign_slug} - serializer = PostFormEntrySerializer(data=data, context={'request': request, 'academy': academy_id}) + serializer = PostFormEntrySerializer(data=data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() big_serializer = FormEntryBigSerializer(serializer.instance) @@ -505,20 +537,20 @@ class AcademyAnswerView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_user_assessment') + @capable_of("read_user_assessment") def get(self, request, academy_id, ua_id=None, answer_id=None): handler = self.extensions(request) if answer_id is not None: - single = Answer.objects.filter(id=answer_id, - user_assessment__id=ua_id, - user_assessment__academy__id=academy_id).first() + single = Answer.objects.filter( + id=answer_id, user_assessment__id=ua_id, user_assessment__academy__id=academy_id + ).first() if single is None: - raise ValidationException(f'Answer {answer_id} not found on user assessment {ua_id}', - 404, - slug='answer-not-found') + raise ValidationException( + f"Answer {answer_id} not found on user assessment {ua_id}", 404, slug="answer-not-found" + ) serializer = AnswerSmallSerializer(single, many=False) return handler.response(serializer.data) @@ -526,43 +558,43 @@ def get(self, request, academy_id, ua_id=None, answer_id=None): items = Answer.objects.filter(user_assessment__id=ua_id, user_assessment__academy__id=academy_id) lookup = {} - start = request.GET.get('starting_at', None) + start = request.GET.get("starting_at", None) if start is not None: - start_date = datetime.strptime(start, '%Y-%m-%d').date() - lookup['created_at__gte'] = start_date + start_date = datetime.strptime(start, "%Y-%m-%d").date() + lookup["created_at__gte"] = start_date - end = request.GET.get('ending_at', None) + end = request.GET.get("ending_at", None) if end is not None: - end_date = datetime.strptime(end, '%Y-%m-%d').date() - lookup['created_at__lte'] = end_date + end_date = datetime.strptime(end, "%Y-%m-%d").date() + lookup["created_at__lte"] = end_date - if 'user_assessments' in self.request.GET: - param = self.request.GET.get('user_assessments') - lookup['user_assessment__id__in'] = [x.strip() for x in param.split(',')] + if "user_assessments" in self.request.GET: + param = self.request.GET.get("user_assessments") + lookup["user_assessment__id__in"] = [x.strip() for x in param.split(",")] - if 'assessments' in self.request.GET: - param = self.request.GET.get('assessments') - lookup['question__assessment__id__in'] = [x.strip() for x in param.split(',')] + if "assessments" in self.request.GET: + param = self.request.GET.get("assessments") + lookup["question__assessment__id__in"] = [x.strip() for x in param.split(",")] - if 'questions' in self.request.GET: - param = self.request.GET.get('questions') - lookup['question__id__in'] = [x.strip() for x in param.split(',')] + if "questions" in self.request.GET: + param = self.request.GET.get("questions") + lookup["question__id__in"] = [x.strip() for x in param.split(",")] - if 'options' in self.request.GET: - param = self.request.GET.get('options') - lookup['option__id__in'] = [x.strip() for x in param.split(',')] + if "options" in self.request.GET: + param = self.request.GET.get("options") + lookup["option__id__in"] = [x.strip() for x in param.split(",")] - if 'owner' in self.request.GET: - param = self.request.GET.get('owner') - lookup['user_assessments__owner__id__in'] = [x.strip() for x in param.split(',')] + if "owner" in self.request.GET: + param = self.request.GET.get("owner") + lookup["user_assessments__owner__id__in"] = [x.strip() for x in param.split(",")] - elif 'owner_email' in self.request.GET: - param = self.request.GET.get('owner_email') - lookup['owner_email'] = param + elif "owner_email" in self.request.GET: + param = self.request.GET.get("owner_email") + lookup["owner_email"] = param - if 'lang' in self.request.GET: - param = self.request.GET.get('lang') - lookup['user_assessments__lang'] = param + if "lang" in self.request.GET: + param = self.request.GET.get("lang") + lookup["user_assessments__lang"] = param items = items.filter(**lookup) items = handler.queryset(items) @@ -575,9 +607,10 @@ class AnswerView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) def get(self, request, token, answer_id=None): handler = self.extensions(request) @@ -585,9 +618,9 @@ def get(self, request, token, answer_id=None): if answer_id is not None: single = Answer.objects.filter(id=answer_id, user_assessment__token=token).first() if single is None: - raise ValidationException(f'Answer {answer_id} not found on user assessment', - 404, - slug='answer-not-found') + raise ValidationException( + f"Answer {answer_id} not found on user assessment", 404, slug="answer-not-found" + ) serializer = AnswerSmallSerializer(single, many=False) return handler.response(serializer.data) @@ -595,13 +628,13 @@ def get(self, request, token, answer_id=None): items = Answer.objects.filter(user_assessment__token=token) lookup = {} - if 'questions' in self.request.GET: - param = self.request.GET.get('questions') - lookup['question__id__in'] = [x.strip() for x in param.split(',')] + if "questions" in self.request.GET: + param = self.request.GET.get("questions") + lookup["question__id__in"] = [x.strip() for x in param.split(",")] - if 'options' in self.request.GET: - param = self.request.GET.get('options') - lookup['option__id__in'] = [x.strip() for x in param.split(',')] + if "options" in self.request.GET: + param = self.request.GET.get("options") + lookup["option__id__in"] = [x.strip() for x in param.split(",")] items = items.filter(**lookup) items = handler.queryset(items) @@ -615,9 +648,9 @@ def post(self, request, token): data = { **request.data, - 'token': token, + "token": token, } - serializer = AnswerSerializer(data=data, context={'request': request, 'lang': lang}) + serializer = AnswerSerializer(data=data, context={"request": request, "lang": lang}) if serializer.is_valid(): serializer.save() big_serializer = AnswerSmallSerializer(serializer.instance) @@ -627,23 +660,28 @@ def post(self, request, token): def delete(self, request, token, answer_id=None): lang = get_user_language(request) - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups and answer_id: raise ValidationException( translation( lang, - en='answer_id must not be provided by url if deleting in bulk', - es='El answer_id no debe ser enviado como parte del path si se quiere una eliminacion masiva', - slug='bulk-querystring')) + en="answer_id must not be provided by url if deleting in bulk", + es="El answer_id no debe ser enviado como parte del path si se quiere una eliminacion masiva", + slug="bulk-querystring", + ) + ) uass = UserAssessment.objects.filter(token=token).first() if not uass: raise ValidationException( - translation(lang, - en='user assessment not found for this token', - es='No se han encontrado un user assessment con ese token', - slug='not-found')) + translation( + lang, + en="user assessment not found for this token", + es="No se han encontrado un user assessment con ese token", + slug="not-found", + ) + ) if lookups: items = Answer.objects.filter(**lookups, user_assessment=uass) @@ -654,11 +692,11 @@ def delete(self, request, token, answer_id=None): return Response(None, status=status.HTTP_204_NO_CONTENT) if answer_id is None: - raise ValidationException('Missing answer_id', code=400) + raise ValidationException("Missing answer_id", code=400) ans = Answer.objects.filter(id=answer_id, user_assessment=uass).first() if ans is None: - raise ValidationException('Specified answer and token could not be found') + raise ValidationException("Specified answer and token could not be found") ans.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) diff --git a/breathecode/assignments/actions.py b/breathecode/assignments/actions.py index 35e1163f6..c296c71b2 100644 --- a/breathecode/assignments/actions.py +++ b/breathecode/assignments/actions.py @@ -10,33 +10,33 @@ logger = logging.getLogger(__name__) -HOST = os.environ.get('OLD_BREATHECODE_API') +HOST = os.environ.get("OLD_BREATHECODE_API") NOTIFICATION_STRINGS = { - 'en': { - 'teacher': { - 'subject': '{first_name} {last_name} send their task', - 'details': '{first_name} {last_name} send their task "{title}", you can review the task at {url}', + "en": { + "teacher": { + "subject": "{first_name} {last_name} send their task", + "details": '{first_name} {last_name} send their task "{title}", you can review the task at {url}', }, - 'student': { - 'subject': 'Your task "{title}" has been reviewed', - 'PENDING': 'Your task has been marked as pending', - 'APPROVED': 'Your task has been marked as approved', - 'REJECTED': 'Your task has been marked as rejected', - 'IGNORED': 'Your task has been marked as ignored', + "student": { + "subject": 'Your task "{title}" has been reviewed', + "PENDING": "Your task has been marked as pending", + "APPROVED": "Your task has been marked as approved", + "REJECTED": "Your task has been marked as rejected", + "IGNORED": "Your task has been marked as ignored", }, }, - 'es': { - 'teacher': { - 'subject': '{first_name} {last_name} envió su tarea', - 'details': '{first_name} {last_name} envió su tarea "{title}", puedes revisarla en {url}', + "es": { + "teacher": { + "subject": "{first_name} {last_name} envió su tarea", + "details": '{first_name} {last_name} envió su tarea "{title}", puedes revisarla en {url}', }, - 'student': { - 'subject': 'Tu tarea "{title}" ha sido revisada', - 'PENDING': 'Tu tarea se ha marcado como pendiente', - 'APPROVED': 'Tu tarea se ha marcado como aprobada', - 'REJECTED': 'Tu tarea se ha marcado como rechazada', - 'IGNORED': 'Tu tarea se ha marcado como ignorada', + "student": { + "subject": 'Tu tarea "{title}" ha sido revisada', + "PENDING": "Tu tarea se ha marcado como pendiente", + "APPROVED": "Tu tarea se ha marcado como aprobada", + "REJECTED": "Tu tarea se ha marcado como rechazada", + "IGNORED": "Tu tarea se ha marcado como ignorada", }, }, } @@ -47,81 +47,83 @@ def deliver_task(github_url, live_url=None, task_id=None, task=None): if task is None: task = Task.objects.filter(id=task_id).first() if task is None: - raise ValidationException('Invalid or missing task id') + raise ValidationException("Invalid or missing task id") task.github_url = github_url task.live_url = live_url - task.task_status = 'DONE' - task.revision_status = 'PENDING' #we have to make it pending so the teachers reviews again + task.task_status = "DONE" + task.revision_status = "PENDING" # we have to make it pending so the teachers reviews again task.save() return task -#FIXME: this maybe is a deadcode +# FIXME: this maybe is a deadcode def sync_student_tasks(user, cohort=None): if cohort is None: - cu = CohortUser.objects.filter(user=user).exclude(cohort__slug__contains='prework').first() + cu = CohortUser.objects.filter(user=user).exclude(cohort__slug__contains="prework").first() if cu is not None: cohort = cu.cohort - response = requests.get(f'{HOST}/student/{user.email}/task/', timeout=2) + response = requests.get(f"{HOST}/student/{user.email}/task/", timeout=2) if response.status_code != 200: - raise Exception(f'Student {user.email} not found on the old API') + raise Exception(f"Student {user.email} not found on the old API") tasks = response.json() task_type = { - 'assignment': 'PROJECT', - 'quiz': 'QUIZ', - 'lesson': 'LESSON', - 'replit': 'EXERCISE', + "assignment": "PROJECT", + "quiz": "QUIZ", + "lesson": "LESSON", + "replit": "EXERCISE", } revision_status = { - 'None': 'PENDING', - 'pending': 'PENDING', - 'approved': 'APPROVED', - 'rejected': 'REJECTED', + "None": "PENDING", + "pending": "PENDING", + "approved": "APPROVED", + "rejected": "REJECTED", } task_status = { - 'pending': 'PENDING', - 'done': 'DONE', + "pending": "PENDING", + "done": "DONE", } syncronized = [] - for _task in tasks['data']: + for _task in tasks["data"]: - if _task['type'] not in task_type: + if _task["type"] not in task_type: raise Exception(f"Invalid task_type {_task['type']}") - if _task['status'] not in task_status: + if _task["status"] not in task_status: raise Exception(f"Invalid status {_task['status']}") - if str(_task['revision_status']) not in revision_status: + if str(_task["revision_status"]) not in revision_status: raise Exception(f"Invalid revision_status {_task['revision_status']}") - task = Task.objects.filter(user_id=user.id, associated_slug=_task['associated_slug']).first() + task = Task.objects.filter(user_id=user.id, associated_slug=_task["associated_slug"]).first() if task is None: - task = Task(user=user, ) - task.task_status = task_status[_task['status']] - task.live_url = _task['live_url'] - task.github_url = _task['github_url'] - task.associated_slug = _task['associated_slug'] - task.title = _task['title'] - task.task_type = task_type[_task['type']] - task.revision_status = revision_status[str(_task['revision_status'])] - task.description = _task['description'] + task = Task( + user=user, + ) + task.task_status = task_status[_task["status"]] + task.live_url = _task["live_url"] + task.github_url = _task["github_url"] + task.associated_slug = _task["associated_slug"] + task.title = _task["title"] + task.task_type = task_type[_task["type"]] + task.revision_status = revision_status[str(_task["revision_status"])] + task.description = _task["description"] task.cohort = cohort task.save() syncronized.append(task) - logger.debug(f'Added {len(syncronized)} tasks for student {user.email}') + logger.debug(f"Added {len(syncronized)} tasks for student {user.email}") return syncronized def sync_cohort_tasks(cohort): synchronized = [] - cohort_users = CohortUser.objects.filter(cohort__id=cohort.id, role='STUDENT', educational_status__in=['ACTIVE']) + cohort_users = CohortUser.objects.filter(cohort__id=cohort.id, role="STUDENT", educational_status__in=["ACTIVE"]) for cu in cohort_users: try: tasks = sync_student_tasks(cu.user, cohort=cohort) @@ -134,17 +136,17 @@ def sync_cohort_tasks(cohort): def task_is_valid_for_notifications(task: Task) -> bool: if not task: - logger.error('Task not found') + logger.error("Task not found") return False if not task.cohort: - logger.error('Can\'t determine the student cohort') + logger.error("Can't determine the student cohort") return False language = task.cohort.language.lower() if language not in NOTIFICATION_STRINGS: - logger.error(f'The language {language} is not implemented in teacher_task_notification') + logger.error(f"The language {language} is not implemented in teacher_task_notification") return False return True diff --git a/breathecode/assignments/admin.py b/breathecode/assignments/admin.py index 8d283d677..24e474b0e 100644 --- a/breathecode/assignments/admin.py +++ b/breathecode/assignments/admin.py @@ -16,7 +16,7 @@ logger = logging.getLogger(__name__) -@admin.display(description='Delete and sync Tasks') +@admin.display(description="Delete and sync Tasks") def sync_tasks(modeladmin, request, queryset): for u in queryset: @@ -24,16 +24,16 @@ def sync_tasks(modeladmin, request, queryset): Task.objects.filter(user_id=u.id).delete() sync_student_tasks(u) except Exception: - logger.exception(f'There was a problem syncronizing tassks for student {u.email}') + logger.exception(f"There was a problem syncronizing tassks for student {u.email}") @admin.register(UserProxy) class UserAdmin(UserAdmin): - list_display = ('username', 'email', 'first_name', 'last_name') + list_display = ("username", "email", "first_name", "last_name") actions = [sync_tasks] -@admin.display(description='Delete AND SYNC Tasks for all students of this cohort') +@admin.display(description="Delete AND SYNC Tasks for all students of this cohort") def sync_cohort_tasks(modeladmin, request, queryset): from .actions import sync_cohort_tasks @@ -45,7 +45,7 @@ def sync_cohort_tasks(modeladmin, request, queryset): pass -@admin.display(description='Delete tasks for all students of this cohort') +@admin.display(description="Delete tasks for all students of this cohort") def delete_cohort_tasks(modeladmin, request, queryset): for c in queryset: @@ -57,57 +57,57 @@ def delete_cohort_tasks(modeladmin, request, queryset): @admin.register(CohortProxy) class CohortAdmin(admin.ModelAdmin): - list_display = ('id', 'slug', 'stage', 'name', 'kickoff_date', 'syllabus_version', 'schedule') + list_display = ("id", "slug", "stage", "name", "kickoff_date", "syllabus_version", "schedule") actions = [sync_cohort_tasks, delete_cohort_tasks] -@admin.display(description='Mark task status as DONE') +@admin.display(description="Mark task status as DONE") def mark_as_delivered(modeladmin, request, queryset): - queryset.update(task_status='DONE') + queryset.update(task_status="DONE") -@admin.display(description='Mark revision status as APPROVED') +@admin.display(description="Mark revision status as APPROVED") def mark_as_approved(modeladmin, request, queryset): - queryset.update(revision_status='APPROVED') + queryset.update(revision_status="APPROVED") -@admin.display(description='Mark revision status as IGNORED') +@admin.display(description="Mark revision status as IGNORED") def mark_as_ignored(modeladmin, request, queryset): - queryset.update(revision_status='IGNORED') + queryset.update(revision_status="IGNORED") -@admin.display(description='Mark revision status as REJECTED') +@admin.display(description="Mark revision status as REJECTED") def mark_as_rejected(modeladmin, request, queryset): - queryset.update(revision_status='REJECTED') + queryset.update(revision_status="REJECTED") @admin.register(Task) class TaskAdmin(admin.ModelAdmin): - search_fields = ['title', 'associated_slug', 'user__first_name', 'user__last_name', 'user__email'] - list_display = ('title', 'task_type', 'associated_slug', 'task_status', 'revision_status', 'user', 'delivery_url') - list_filter = ['task_type', 'task_status', 'revision_status'] + search_fields = ["title", "associated_slug", "user__first_name", "user__last_name", "user__email"] + list_display = ("title", "task_type", "associated_slug", "task_status", "revision_status", "user", "delivery_url") + list_filter = ["task_type", "task_status", "revision_status"] actions = [mark_as_delivered, mark_as_approved, mark_as_rejected, mark_as_ignored] def delivery_url(self, obj): - token, created = Token.get_or_create(obj.user, token_type='temporal') - url = os.getenv('API_URL') + f'/v1/assignment/task/{str(obj.id)}/deliver/{token}' + token, created = Token.get_or_create(obj.user, token_type="temporal") + url = os.getenv("API_URL") + f"/v1/assignment/task/{str(obj.id)}/deliver/{token}" return format_html(f"deliver") @admin.register(UserAttachment) class UserAttachmentAdmin(admin.ModelAdmin): - search_fields = ['slug', 'name', 'user__first_name', 'user__last_name', 'user__email'] - list_display = ('slug', 'name', 'user', 'url', 'mime') - list_filter = ['mime'] + search_fields = ["slug", "name", "user__first_name", "user__last_name", "user__email"] + list_display = ("slug", "name", "user", "url", "mime") + list_filter = ["mime"] @admin.register(FinalProject) class FinalProjectAdmin(admin.ModelAdmin): - list_display = ['name', 'cohort', 'project_status', 'revision_status', 'visibility_status'] - search_fields = ('name', 'cohort__slug', 'repo_url', 'members__email') - filter_horizontal = ['members'] - raw_id_fields = ['cohort'] - list_filter = ['project_status', 'revision_status', 'visibility_status', 'cohort__academy__slug'] + list_display = ["name", "cohort", "project_status", "revision_status", "visibility_status"] + search_fields = ("name", "cohort__slug", "repo_url", "members__email") + filter_horizontal = ["members"] + raw_id_fields = ["cohort"] + list_filter = ["project_status", "revision_status", "visibility_status", "cohort__academy__slug"] # actions = [mark_as_delivered, mark_as_approved, mark_as_rejected, mark_as_ignored] # def delivery_url(self, obj): @@ -118,14 +118,14 @@ class FinalProjectAdmin(admin.ModelAdmin): def async_process_hook(modeladmin, request, queryset): # stay this here for use the poor mocking system - for hook in queryset.all().order_by('created_at'): + for hook in queryset.all().order_by("created_at"): async_learnpack_webhook.delay(hook.id) def process_hook(modeladmin, request, queryset): # stay this here for use the poor mocking system - for hook in queryset.all().order_by('created_at'): - print(f'Procesing hook: {hook.id}') + for hook in queryset.all().order_by("created_at"): + print(f"Procesing hook: {hook.id}") client = LearnPack() try: client.execute_action(hook.id) @@ -133,36 +133,36 @@ def process_hook(modeladmin, request, queryset): raise e pass - messages.success(request, message='Check each updated status on the webhook list for details') + messages.success(request, message="Check each updated status on the webhook list for details") @admin.register(AssignmentTelemetry) class AssignmentTelemetryAdmin(admin.ModelAdmin): - list_display = ('id', 'asset_slug', 'user', 'created_at') - search_fields = ['asset_slug', 'user__email', 'user__id'] - raw_id_fields = ['user'] + list_display = ("id", "asset_slug", "user", "created_at") + search_fields = ["asset_slug", "user__email", "user__id"] + raw_id_fields = ["user"] @admin.register(LearnPackWebhook) class LearnPackWebhookAdmin(admin.ModelAdmin): - list_display = ('id', 'event', 'status', 'student', 'created_at') - search_fields = ['telemetry__asset_slug', 'telemetry__user__email'] - list_filter = ['status', 'event'] - raw_id_fields = ['student', 'telemetry'] + list_display = ("id", "event", "status", "student", "created_at") + search_fields = ["telemetry__asset_slug", "telemetry__user__email"] + list_filter = ["status", "event"] + raw_id_fields = ["student", "telemetry"] actions = [process_hook, async_process_hook] def current_status(self, obj): colors = { - 'DONE': 'bg-success', - 'ERROR': 'bg-error', - 'PENDING': 'bg-warning', - 'webhook': 'bg-warning', - None: 'bg-warning', + "DONE": "bg-success", + "ERROR": "bg-error", + "PENDING": "bg-warning", + "webhook": "bg-warning", + None: "bg-warning", } def from_status(s): if s in colors: return colors[s] - return '' + return "" return format_html(f"
{obj.status}
{obj.status_text}") diff --git a/breathecode/assignments/apps.py b/breathecode/assignments/apps.py index 80e9e1305..4cab329cb 100644 --- a/breathecode/assignments/apps.py +++ b/breathecode/assignments/apps.py @@ -2,7 +2,7 @@ class TasksConfig(AppConfig): - name = 'breathecode.assignments' + name = "breathecode.assignments" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/assignments/forms.py b/breathecode/assignments/forms.py index 445b8fcf0..8ac53f7d4 100644 --- a/breathecode/assignments/forms.py +++ b/breathecode/assignments/forms.py @@ -5,27 +5,37 @@ class DeliverAssigntmentForm(forms.Form): token = forms.CharField(widget=forms.HiddenInput()) task_id = forms.CharField(widget=forms.HiddenInput()) callback = forms.CharField(required=False, widget=forms.HiddenInput()) - task_name = forms.CharField(widget=forms.TextInput(attrs={ - 'class': 'form-control', - 'readonly': 'readonly', - })) - github_url = forms.URLField(widget=forms.TextInput( - attrs={ - 'placeholder': 'Please specify the github repository URL for this assignment.', - 'type': 'url', - 'class': 'form-control', - })) + task_name = forms.CharField( + widget=forms.TextInput( + attrs={ + "class": "form-control", + "readonly": "readonly", + } + ) + ) + github_url = forms.URLField( + widget=forms.TextInput( + attrs={ + "placeholder": "Please specify the github repository URL for this assignment.", + "type": "url", + "class": "form-control", + } + ) + ) live_url = forms.URLField( required=False, - widget=forms.TextInput(attrs={ - 'placeholder': 'Optionally you can also specify the live URL', - 'type': 'url', - 'class': 'form-control', - })) + widget=forms.TextInput( + attrs={ + "placeholder": "Optionally you can also specify the live URL", + "type": "url", + "class": "form-control", + } + ), + ) def __init__(self, params, *args, **kwargs): super(forms.Form, self).__init__(params, *args, **kwargs) - self.fields['token'].widget.attrs.update({'initial': params.get('token')}) - self.fields['callback'].widget.attrs.update({'initial': params.get('callback')}) - self.fields['task_name'].widget.attrs.update({'initial': params.get('task_name')}) - self.fields['task_id'].widget.attrs.update({'initial': params.get('task_id')}) + self.fields["token"].widget.attrs.update({"initial": params.get("token")}) + self.fields["callback"].widget.attrs.update({"initial": params.get("callback")}) + self.fields["task_name"].widget.attrs.update({"initial": params.get("task_name")}) + self.fields["task_id"].widget.attrs.update({"initial": params.get("task_id")}) diff --git a/breathecode/assignments/management/commands/assignments_garbage_collect.py b/breathecode/assignments/management/commands/assignments_garbage_collect.py index dcd62e402..d5f6e4161 100644 --- a/breathecode/assignments/management/commands/assignments_garbage_collect.py +++ b/breathecode/assignments/management/commands/assignments_garbage_collect.py @@ -3,7 +3,7 @@ class Command(BaseCommand): - help = 'Clean data from marketing module' + help = "Clean data from marketing module" def handle(self, *args, **options): @@ -11,7 +11,7 @@ def handle(self, *args, **options): def delete_old_webhooks(self): cursor = connection.cursor() - #status = 'ERROR' or status = 'PENDING' AND + # status = 'ERROR' or status = 'PENDING' AND cursor.execute("DELETE FROM assignments_learnpackwebhook WHERE created_at < NOW() - INTERVAL '30 days'") cursor.execute("DELETE FROM assignments_learnpackwebhook WHERE status <> 'ERROR' AND status <> 'PENDING'") diff --git a/breathecode/assignments/management/commands/delete_assignments.py b/breathecode/assignments/management/commands/delete_assignments.py index 4cc6490da..5f359761b 100644 --- a/breathecode/assignments/management/commands/delete_assignments.py +++ b/breathecode/assignments/management/commands/delete_assignments.py @@ -2,19 +2,19 @@ from django.core.management.base import BaseCommand from ...models import Task -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" class Command(BaseCommand): - help = 'Sync academies from old breathecode' + help = "Sync academies from old breathecode" def add_arguments(self, parser): - parser.add_argument('entity', type=str) + parser.add_argument("entity", type=str) def handle(self, *args, **options): try: - func = getattr(self, options['entity'], 'entity_not_found') + func = getattr(self, options["entity"], "entity_not_found") except TypeError: print(f'Delete method for {options["entity"]} no Found!') @@ -30,23 +30,26 @@ def repeated(self, options): count += 1 b = Task.objects.filter(user__id=a.user.id, associated_slug=a.associated_slug).exclude(id=a.id).first() if b is not None: - if a.task_status == 'PENDING': + if a.task_status == "PENDING": a.delete() self.stdout.write( self.style.SUCCESS( - f'Student: {a.user.email} task {a.associated_slug} with status {a.task_status} was deleted') + f"Student: {a.user.email} task {a.associated_slug} with status {a.task_status} was deleted" + ) ) - elif b.task_status == 'PENDING': + elif b.task_status == "PENDING": b.delete() self.stdout.write( self.style.SUCCESS( - f'Student: {b.user.email} task {b.associated_slug} with status {b.task_status} was deleted') + f"Student: {b.user.email} task {b.associated_slug} with status {b.task_status} was deleted" + ) ) else: a.delete() self.stdout.write( self.style.SUCCESS( - f'Student: {a.user.email} task {a.associated_slug} with status {a.task_status} was deleted') + f"Student: {a.user.email} task {a.associated_slug} with status {a.task_status} was deleted" + ) ) - self.stdout.write(self.style.NOTICE(f'Ended with {str(count)} tasks evaluated.')) + self.stdout.write(self.style.NOTICE(f"Ended with {str(count)} tasks evaluated.")) diff --git a/breathecode/assignments/management/commands/schedule_repository_deletions.py b/breathecode/assignments/management/commands/schedule_repository_deletions.py index 3563d2812..3ab07ed6c 100644 --- a/breathecode/assignments/management/commands/schedule_repository_deletions.py +++ b/breathecode/assignments/management/commands/schedule_repository_deletions.py @@ -15,8 +15,8 @@ class Command(BaseCommand): - help = 'Clean data from marketing module' - github_url_pattern = re.compile(r'https:\/\/github\.com\/(?P[^\/]+)\/(?P[^\/\s]+)\/?') + help = "Clean data from marketing module" + github_url_pattern = re.compile(r"https:\/\/github\.com\/(?P[^\/]+)\/(?P[^\/\s]+)\/?") def handle(self, *args, **options): self.fill_whitelist() @@ -26,9 +26,11 @@ def handle(self, *args, **options): def github(self): processed = set() for settings in AcademyAuthSettings.objects.filter( - github_owner__isnull=False, github_owner__credentialsgithub__isnull=False).exclude(github_username=''): - self.github_client = Github(org=settings.github_username, - token=settings.github_owner.credentialsgithub.token) + github_owner__isnull=False, github_owner__credentialsgithub__isnull=False + ).exclude(github_username=""): + self.github_client = Github( + org=settings.github_username, token=settings.github_owner.credentialsgithub.token + ) key = (settings.github_username, settings.github_owner.id) if key in processed: @@ -37,8 +39,11 @@ def github(self): processed.add(key) last_check = None - last = RepositoryDeletionOrder.objects.filter( - provider=RepositoryDeletionOrder.Provider.GITHUB).only('created_at').last() + last = ( + RepositoryDeletionOrder.objects.filter(provider=RepositoryDeletionOrder.Provider.GITHUB) + .only("created_at") + .last() + ) if last: last_check = last.created_at @@ -50,15 +55,18 @@ def purge_deletion_orders(self): page = 0 while True: qs = RepositoryDeletionOrder.objects.filter( - status=RepositoryDeletionOrder.Status.PENDING, )[page * 100:(page + 1) * 100] + status=RepositoryDeletionOrder.Status.PENDING, + )[page * 100 : (page + 1) * 100] if len(qs) == 0: break for deletion_order in qs: - if RepositoryWhiteList.objects.filter(provider=deletion_order.provider, - repository_user__iexact=deletion_order.repository_user, - repository_name__iexact=deletion_order.repository_name).exists(): + if RepositoryWhiteList.objects.filter( + provider=deletion_order.provider, + repository_user__iexact=deletion_order.repository_user, + repository_name__iexact=deletion_order.repository_name, + ).exists(): deletion_order.delete() page += 1 @@ -66,17 +74,20 @@ def purge_deletion_orders(self): def delete_github_repositories(self): while True: - qs = RepositoryDeletionOrder.objects.filter(provider=RepositoryDeletionOrder.Provider.GITHUB, - status=RepositoryDeletionOrder.Status.PENDING, - created_at__lte=timezone.now() - relativedelta(months=2))[:100] + qs = RepositoryDeletionOrder.objects.filter( + provider=RepositoryDeletionOrder.Provider.GITHUB, + status=RepositoryDeletionOrder.Status.PENDING, + created_at__lte=timezone.now() - relativedelta(months=2), + )[:100] if qs.count() == 0: break for deletion_order in qs: try: - self.github_client.delete_org_repo(owner=deletion_order.repository_user, - repo=deletion_order.repository_name) + self.github_client.delete_org_repo( + owner=deletion_order.repository_user, repo=deletion_order.repository_name + ) deletion_order.status = RepositoryDeletionOrder.Status.DELETED deletion_order.save() @@ -90,16 +101,20 @@ def fill_whitelist(self): for asset in assets: options = [ - asset.url, asset.solution_url, asset.preview, asset.readme_url, asset.intro_video_url, - asset.solution_video_url + asset.url, + asset.solution_url, + asset.preview, + asset.readme_url, + asset.intro_video_url, + asset.solution_video_url, ] for url in [x for x in options if x]: match = self.github_url_pattern.search(url) if match: - user = match.group('user') - repo_name = match.group('repo') + user = match.group("user") + repo_name = match.group("repo") - self.add_to_whitelist('GITHUB', user, repo_name) + self.add_to_whitelist("GITHUB", user, repo_name) readme_raw = Asset.decode(asset.readme_raw) if readme_raw is None: @@ -110,7 +125,7 @@ def fill_whitelist(self): for match in urls: user, repo_name = match - self.add_to_whitelist('GITHUB', user, repo_name) + self.add_to_whitelist("GITHUB", user, repo_name) assets = Asset.objects.filter() @@ -118,43 +133,45 @@ def fill_whitelist(self): for subscription in subscriptions: match = self.github_url_pattern.search(subscription.repository) if match: - user = match.group('user') - repo_name = match.group('repo') + user = match.group("user") + repo_name = match.group("repo") - self.add_to_whitelist('GITHUB', user, repo_name) + self.add_to_whitelist("GITHUB", user, repo_name) def add_to_whitelist(self, provider: str, user: str, repo_name: str): - if RepositoryWhiteList.objects.filter(provider=provider, - repository_user__iexact=user, - repository_name__iexact=repo_name).exists() is False: - RepositoryWhiteList.objects.get_or_create(provider=provider, - repository_user=user, - repository_name=repo_name) + if ( + RepositoryWhiteList.objects.filter( + provider=provider, repository_user__iexact=user, repository_name__iexact=repo_name + ).exists() + is False + ): + RepositoryWhiteList.objects.get_or_create( + provider=provider, repository_user=user, repository_name=repo_name + ) def schedule_github_deletions(self, organization: str, last_check: Optional[datetime] = None): - for repos in self.github_client.get_org_repos(organization, - type='forks', - per_page=30, - direction='desc', - sort='created'): + for repos in self.github_client.get_org_repos( + organization, type="forks", per_page=30, direction="desc", sort="created" + ): for repo in repos: - created_at = parser.parse(repo['created_at']) + created_at = parser.parse(repo["created_at"]) if last_check and last_check > created_at: return - if repo['fork'] is True and repo['is_template'] is False and repo['allow_forking'] is True: - match = self.github_url_pattern.search(repo['html_url']) + if repo["fork"] is True and repo["is_template"] is False and repo["allow_forking"] is True: + match = self.github_url_pattern.search(repo["html_url"]) if match: - user = match.group('user') - repo_name = match.group('repo') - self.schedule_github_deletion('GITHUB', user, repo_name) + user = match.group("user") + repo_name = match.group("repo") + self.schedule_github_deletion("GITHUB", user, repo_name) def schedule_github_deletion(self, provider: str, user: str, repo_name: str): - if RepositoryWhiteList.objects.filter(provider=provider, repository_user=user, - repository_name=repo_name).exists(): + if RepositoryWhiteList.objects.filter( + provider=provider, repository_user=user, repository_name=repo_name + ).exists(): return - RepositoryDeletionOrder.objects.get_or_create(provider=provider, - repository_user=user, - repository_name=repo_name) + RepositoryDeletionOrder.objects.get_or_create( + provider=provider, repository_user=user, repository_name=repo_name + ) diff --git a/breathecode/assignments/management/commands/sync_assignments.py b/breathecode/assignments/management/commands/sync_assignments.py index 62dc9b4b2..8c6de5773 100644 --- a/breathecode/assignments/management/commands/sync_assignments.py +++ b/breathecode/assignments/management/commands/sync_assignments.py @@ -5,32 +5,32 @@ from breathecode.admissions.models import CohortUser from django.db.models import Count -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" class Command(BaseCommand): - help = 'Sync academies from old breathecode' + help = "Sync academies from old breathecode" def add_arguments(self, parser): - parser.add_argument('entity', type=str) + parser.add_argument("entity", type=str) parser.add_argument( - '--cohorts', + "--cohorts", type=str, default=None, - help='Cohorts slugs to sync', + help="Cohorts slugs to sync", ) parser.add_argument( - '--students', + "--students", type=str, default=None, - help='Cohorts slugs to sync', + help="Cohorts slugs to sync", ) - parser.add_argument('--limit', action='store', dest='limit', type=int, default=0, help='How many to import') + parser.add_argument("--limit", action="store", dest="limit", type=int, default=0, help="How many to import") def handle(self, *args, **options): try: - func = getattr(self, options['entity'], 'entity_not_found') + func = getattr(self, options["entity"], "entity_not_found") except TypeError: print(f'Sync method for {options["entity"]} no Found!') func(options) @@ -39,41 +39,43 @@ def tasks(self, options): limit = False total = 0 - if 'limit' in options and options['limit']: - limit = options['limit'] + if "limit" in options and options["limit"]: + limit = options["limit"] - if options['students'] is not None: - emails = options['students'].split(',') + if options["students"] is not None: + emails = options["students"].split(",") for email in emails: total += 1 if limit and limit > 0 and total > limit: self.stdout.write( - self.style.SUCCESS(f'Stopped at {total} because there was a limit on the command arguments')) + self.style.SUCCESS(f"Stopped at {total} because there was a limit on the command arguments") + ) return user = User.objects.filter(email=email).first() if user is None: - raise CommandError(f'Student {email} not found new API') + raise CommandError(f"Student {email} not found new API") sync_student_tasks(user) else: - users = CohortUser.objects.filter(role='STUDENT').values('user').annotate(dcount=Count('user')) - self.stdout.write(self.style.NOTICE(f'Analyzing {users.count()} cohort users')) + users = CohortUser.objects.filter(role="STUDENT").values("user").annotate(dcount=Count("user")) + self.stdout.write(self.style.NOTICE(f"Analyzing {users.count()} cohort users")) for u in users: if limit and limit > 0 and total > limit: self.stdout.write( - self.style.SUCCESS(f'Stopped at {total} because there was a limit on the command arguments')) + self.style.SUCCESS(f"Stopped at {total} because there was a limit on the command arguments") + ) return - user = User.objects.get(id=u['user']) + user = User.objects.get(id=u["user"]) if user.task_set.count() == 0: - self.stdout.write(self.style.SUCCESS(f'Fetching tasks for student {user.email}')) + self.stdout.write(self.style.SUCCESS(f"Fetching tasks for student {user.email}")) else: - self.stdout.write(self.style.NOTICE(f'Tasks already fetched for {user.email}')) + self.stdout.write(self.style.NOTICE(f"Tasks already fetched for {user.email}")) continue total += 1 try: sync_student_tasks(user) except Exception: - self.stdout.write(self.style.NOTICE(f'Error synching student stasks for {user.email}')) + self.stdout.write(self.style.NOTICE(f"Error synching student stasks for {user.email}")) diff --git a/breathecode/assignments/migrations/0001_initial.py b/breathecode/assignments/migrations/0001_initial.py index f9539b603..af208287d 100644 --- a/breathecode/assignments/migrations/0001_initial.py +++ b/breathecode/assignments/migrations/0001_initial.py @@ -15,27 +15,37 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Task', + name="Task", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('associated_slug', models.CharField(max_length=150, unique=True)), - ('title', models.CharField(max_length=150)), - ('task_status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done')], default='PENDING', - max_length=15)), - ('revision_status', - models.CharField(choices=[('PENDING', 'Pending'), ('APPROVED', 'Approved'), ('REJECTED', 'Rejected')], - max_length=15)), - ('task_type', - models.CharField(choices=[('PROJECT', 'project'), ('QUIZ', 'quiz'), ('LESSON', 'lesson'), - ('REPLIT', 'replit')], - max_length=15)), - ('github_url', models.CharField(default=True, max_length=150, null=True)), - ('live_url', models.CharField(default=True, max_length=150, null=True)), - ('description', models.TextField(max_length=450)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("associated_slug", models.CharField(max_length=150, unique=True)), + ("title", models.CharField(max_length=150)), + ( + "task_status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done")], default="PENDING", max_length=15 + ), + ), + ( + "revision_status", + models.CharField( + choices=[("PENDING", "Pending"), ("APPROVED", "Approved"), ("REJECTED", "Rejected")], + max_length=15, + ), + ), + ( + "task_type", + models.CharField( + choices=[("PROJECT", "project"), ("QUIZ", "quiz"), ("LESSON", "lesson"), ("REPLIT", "replit")], + max_length=15, + ), + ), + ("github_url", models.CharField(default=True, max_length=150, null=True)), + ("live_url", models.CharField(default=True, max_length=150, null=True)), + ("description", models.TextField(max_length=450)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ] diff --git a/breathecode/assignments/migrations/0002_auto_20200703_1722.py b/breathecode/assignments/migrations/0002_auto_20200703_1722.py index 667fadd8c..1f4e7ea26 100644 --- a/breathecode/assignments/migrations/0002_auto_20200703_1722.py +++ b/breathecode/assignments/migrations/0002_auto_20200703_1722.py @@ -6,30 +6,32 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0001_initial'), + ("assignments", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='task', - name='description', + model_name="task", + name="description", field=models.TextField(blank=True, max_length=450), ), migrations.AlterField( - model_name='task', - name='github_url', + model_name="task", + name="github_url", field=models.CharField(blank=True, default=None, max_length=150, null=True), ), migrations.AlterField( - model_name='task', - name='live_url', + model_name="task", + name="live_url", field=models.CharField(blank=True, default=None, max_length=150, null=True), ), migrations.AlterField( - model_name='task', - name='revision_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('APPROVED', 'Approved'), ('REJECTED', 'Rejected')], - default='PENDING', - max_length=15), + model_name="task", + name="revision_status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("APPROVED", "Approved"), ("REJECTED", "Rejected")], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/assignments/migrations/0003_auto_20200703_2205.py b/breathecode/assignments/migrations/0003_auto_20200703_2205.py index 9e738b026..ee394958f 100644 --- a/breathecode/assignments/migrations/0003_auto_20200703_2205.py +++ b/breathecode/assignments/migrations/0003_auto_20200703_2205.py @@ -6,20 +6,21 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0002_auto_20200703_1722'), + ("assignments", "0002_auto_20200703_1722"), ] operations = [ migrations.AlterField( - model_name='task', - name='associated_slug', + model_name="task", + name="associated_slug", field=models.CharField(max_length=150), ), migrations.AlterField( - model_name='task', - name='task_type', - field=models.CharField(choices=[('PROJECT', 'project'), ('QUIZ', 'quiz'), ('LESSON', 'lesson'), - ('EXERCISE', 'Exercise')], - max_length=15), + model_name="task", + name="task_type", + field=models.CharField( + choices=[("PROJECT", "project"), ("QUIZ", "quiz"), ("LESSON", "lesson"), ("EXERCISE", "Exercise")], + max_length=15, + ), ), ] diff --git a/breathecode/assignments/migrations/0004_auto_20200708_0049.py b/breathecode/assignments/migrations/0004_auto_20200708_0049.py index cbdfb03cf..215b14e29 100644 --- a/breathecode/assignments/migrations/0004_auto_20200708_0049.py +++ b/breathecode/assignments/migrations/0004_auto_20200708_0049.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0003_auto_20200703_2205'), + ("assignments", "0003_auto_20200703_2205"), ] operations = [ migrations.AlterField( - model_name='task', - name='associated_slug', + model_name="task", + name="associated_slug", field=models.SlugField(max_length=150), ), ] diff --git a/breathecode/assignments/migrations/0005_task_cohort.py b/breathecode/assignments/migrations/0005_task_cohort.py index 756c3d590..109c8c698 100644 --- a/breathecode/assignments/migrations/0005_task_cohort.py +++ b/breathecode/assignments/migrations/0005_task_cohort.py @@ -7,17 +7,16 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0013_auto_20201209_0216'), - ('assignments', '0004_auto_20200708_0049'), + ("admissions", "0013_auto_20201209_0216"), + ("assignments", "0004_auto_20200708_0049"), ] operations = [ migrations.AddField( - model_name='task', - name='cohort', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="task", + name="cohort", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), ), ] diff --git a/breathecode/assignments/migrations/0006_cohortproxy_userproxy.py b/breathecode/assignments/migrations/0006_cohortproxy_userproxy.py index c4d0da347..c545cd6a7 100644 --- a/breathecode/assignments/migrations/0006_cohortproxy_userproxy.py +++ b/breathecode/assignments/migrations/0006_cohortproxy_userproxy.py @@ -7,33 +7,33 @@ class Migration(migrations.Migration): dependencies = [ - ('auth', '0012_alter_user_first_name_max_length'), - ('admissions', '0014_auto_20201218_0534'), - ('assignments', '0005_task_cohort'), + ("auth", "0012_alter_user_first_name_max_length"), + ("admissions", "0014_auto_20201218_0534"), + ("assignments", "0005_task_cohort"), ] operations = [ migrations.CreateModel( - name='CohortProxy', + name="CohortProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('admissions.cohort', ), + bases=("admissions.cohort",), ), migrations.CreateModel( - name='UserProxy', + name="UserProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), ] diff --git a/breathecode/assignments/migrations/0007_finalproject.py b/breathecode/assignments/migrations/0007_finalproject.py index b58e2d086..2f89bf7ea 100644 --- a/breathecode/assignments/migrations/0007_finalproject.py +++ b/breathecode/assignments/migrations/0007_finalproject.py @@ -8,54 +8,71 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0041_cohortuser_watching'), + ("admissions", "0041_cohortuser_watching"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('assignments', '0006_cohortproxy_userproxy'), + ("assignments", "0006_cohortproxy_userproxy"), ] operations = [ migrations.CreateModel( - name='FinalProject', + name="FinalProject", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=150)), - ('one_line_desc', models.CharField(max_length=150)), - ('description', models.TextField()), - ('project_status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done')], - default='PENDING', - help_text='Done projects will be reviewed for publication', - max_length=15)), - ('revision_status', - models.CharField(choices=[('PENDING', 'Pending'), ('APPROVED', 'Approved'), ('REJECTED', 'Rejected')], - default='PENDING', - help_text='Only approved projects will display on the feature projects list', - max_length=15)), - ('revision_message', models.TextField(blank=True, default=None, null=True)), - ('visibility_status', - models.CharField(choices=[('PRIVATE', 'Private'), ('UNLISTED', 'Unlisted'), ('PUBLIC', 'Public')], - default='PRIVATE', - help_text='Public project will be visible to other users', - max_length=15)), - ('repo_url', models.URLField(blank=True, default=None, null=True)), - ('public_url', models.URLField(blank=True, default=None, null=True)), - ('logo_url', models.URLField(blank=True, default=None, null=True)), - ('slides_url', models.URLField(blank=True, default=None, null=True)), - ('video_demo_url', models.URLField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('cohort', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort')), - ('members', models.ManyToManyField(related_name='final_projects', to=settings.AUTH_USER_MODEL)), - ('repo_owner', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name='projects_owned', - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=150)), + ("one_line_desc", models.CharField(max_length=150)), + ("description", models.TextField()), + ( + "project_status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done")], + default="PENDING", + help_text="Done projects will be reviewed for publication", + max_length=15, + ), + ), + ( + "revision_status", + models.CharField( + choices=[("PENDING", "Pending"), ("APPROVED", "Approved"), ("REJECTED", "Rejected")], + default="PENDING", + help_text="Only approved projects will display on the feature projects list", + max_length=15, + ), + ), + ("revision_message", models.TextField(blank=True, default=None, null=True)), + ( + "visibility_status", + models.CharField( + choices=[("PRIVATE", "Private"), ("UNLISTED", "Unlisted"), ("PUBLIC", "Public")], + default="PRIVATE", + help_text="Public project will be visible to other users", + max_length=15, + ), + ), + ("repo_url", models.URLField(blank=True, default=None, null=True)), + ("public_url", models.URLField(blank=True, default=None, null=True)), + ("logo_url", models.URLField(blank=True, default=None, null=True)), + ("slides_url", models.URLField(blank=True, default=None, null=True)), + ("video_demo_url", models.URLField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "cohort", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), + ), + ("members", models.ManyToManyField(related_name="final_projects", to=settings.AUTH_USER_MODEL)), + ( + "repo_owner", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="projects_owned", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/breathecode/assignments/migrations/0008_auto_20220711_1823.py b/breathecode/assignments/migrations/0008_auto_20220711_1823.py index 80068f2b8..442464456 100644 --- a/breathecode/assignments/migrations/0008_auto_20220711_1823.py +++ b/breathecode/assignments/migrations/0008_auto_20220711_1823.py @@ -6,25 +6,37 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0007_finalproject'), + ("assignments", "0007_finalproject"), ] operations = [ migrations.AlterField( - model_name='finalproject', - name='revision_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('APPROVED', 'Approved'), ('REJECTED', 'Rejected'), - ('IGNORED', 'Ignored')], - default='PENDING', - help_text='Only approved projects will display on the feature projects list', - max_length=15), + model_name="finalproject", + name="revision_status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("APPROVED", "Approved"), + ("REJECTED", "Rejected"), + ("IGNORED", "Ignored"), + ], + default="PENDING", + help_text="Only approved projects will display on the feature projects list", + max_length=15, + ), ), migrations.AlterField( - model_name='task', - name='revision_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('APPROVED', 'Approved'), ('REJECTED', 'Rejected'), - ('IGNORED', 'Ignored')], - default='PENDING', - max_length=15), + model_name="task", + name="revision_status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("APPROVED", "Approved"), + ("REJECTED", "Rejected"), + ("IGNORED", "Ignored"), + ], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/assignments/migrations/0009_task_subtasks.py b/breathecode/assignments/migrations/0009_task_subtasks.py index 0ac8fb294..45a16e365 100644 --- a/breathecode/assignments/migrations/0009_task_subtasks.py +++ b/breathecode/assignments/migrations/0009_task_subtasks.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0008_auto_20220711_1823'), + ("assignments", "0008_auto_20220711_1823"), ] operations = [ migrations.AddField( - model_name='task', - name='subtasks', + model_name="task", + name="subtasks", field=models.JSONField( blank=True, default=None, - help_text= - 'If readme contains checkboxes they will be converted into substasks and this json will kep track of completition', - null=True), + help_text="If readme contains checkboxes they will be converted into substasks and this json will kep track of completition", + null=True, + ), ), ] diff --git a/breathecode/assignments/migrations/0010_auto_20221026_0340.py b/breathecode/assignments/migrations/0010_auto_20221026_0340.py index 14855e639..8315eb8e0 100644 --- a/breathecode/assignments/migrations/0010_auto_20221026_0340.py +++ b/breathecode/assignments/migrations/0010_auto_20221026_0340.py @@ -9,27 +9,27 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('assignments', '0009_task_subtasks'), + ("assignments", "0009_task_subtasks"), ] operations = [ migrations.CreateModel( - name='UserAttachment', + name="UserAttachment", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('mime', models.CharField(max_length=60)), - ('url', models.URLField(max_length=255)), - ('hash', models.CharField(max_length=64)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("mime", models.CharField(max_length=60)), + ("url", models.URLField(max_length=255)), + ("hash", models.CharField(max_length=64)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( - model_name='task', - name='attachments', - field=models.ManyToManyField(to='assignments.UserAttachment'), + model_name="task", + name="attachments", + field=models.ManyToManyField(to="assignments.UserAttachment"), ), ] diff --git a/breathecode/assignments/migrations/0011_alter_task_attachments.py b/breathecode/assignments/migrations/0011_alter_task_attachments.py index 3f3a1d89f..318bb3677 100644 --- a/breathecode/assignments/migrations/0011_alter_task_attachments.py +++ b/breathecode/assignments/migrations/0011_alter_task_attachments.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0010_auto_20221026_0340'), + ("assignments", "0010_auto_20221026_0340"), ] operations = [ migrations.AlterField( - model_name='task', - name='attachments', - field=models.ManyToManyField(blank=True, to='assignments.UserAttachment'), + model_name="task", + name="attachments", + field=models.ManyToManyField(blank=True, to="assignments.UserAttachment"), ), ] diff --git a/breathecode/assignments/migrations/0012_finalproject_screenshot.py b/breathecode/assignments/migrations/0012_finalproject_screenshot.py index 96de36e46..89229c98f 100644 --- a/breathecode/assignments/migrations/0012_finalproject_screenshot.py +++ b/breathecode/assignments/migrations/0012_finalproject_screenshot.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0011_alter_task_attachments'), + ("assignments", "0011_alter_task_attachments"), ] operations = [ migrations.AddField( - model_name='finalproject', - name='screenshot', + model_name="finalproject", + name="screenshot", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/assignments/migrations/0013_task_opened_at.py b/breathecode/assignments/migrations/0013_task_opened_at.py index 901fbd8e1..4cc0d011e 100644 --- a/breathecode/assignments/migrations/0013_task_opened_at.py +++ b/breathecode/assignments/migrations/0013_task_opened_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0012_finalproject_screenshot'), + ("assignments", "0012_finalproject_screenshot"), ] operations = [ migrations.AddField( - model_name='task', - name='opened_at', + model_name="task", + name="opened_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/assignments/migrations/0014_task_rigobot_repository_id.py b/breathecode/assignments/migrations/0014_task_rigobot_repository_id.py index 94a6c58da..372260c3c 100644 --- a/breathecode/assignments/migrations/0014_task_rigobot_repository_id.py +++ b/breathecode/assignments/migrations/0014_task_rigobot_repository_id.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0013_task_opened_at'), + ("assignments", "0013_task_opened_at"), ] operations = [ migrations.AddField( - model_name='task', - name='rigobot_repository_id', + model_name="task", + name="rigobot_repository_id", field=models.IntegerField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/assignments/migrations/0015_auto_20231017_0605.py b/breathecode/assignments/migrations/0015_auto_20231017_0605.py index ca57ba72b..33ee86c1e 100644 --- a/breathecode/assignments/migrations/0015_auto_20231017_0605.py +++ b/breathecode/assignments/migrations/0015_auto_20231017_0605.py @@ -6,48 +6,54 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0014_task_rigobot_repository_id'), + ("assignments", "0014_task_rigobot_repository_id"), ] operations = [ migrations.AlterField( - model_name='task', - name='opened_at', + model_name="task", + name="opened_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='task', - name='revision_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('APPROVED', 'Approved'), ('REJECTED', 'Rejected'), - ('IGNORED', 'Ignored')], - db_index=True, - default='PENDING', - max_length=15), + model_name="task", + name="revision_status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("APPROVED", "Approved"), + ("REJECTED", "Rejected"), + ("IGNORED", "Ignored"), + ], + db_index=True, + default="PENDING", + max_length=15, + ), ), migrations.AlterField( - model_name='task', - name='rigobot_repository_id', + model_name="task", + name="rigobot_repository_id", field=models.IntegerField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='task', - name='task_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done')], - db_index=True, - default='PENDING', - max_length=15), + model_name="task", + name="task_status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done")], db_index=True, default="PENDING", max_length=15 + ), ), migrations.AlterField( - model_name='task', - name='task_type', - field=models.CharField(choices=[('PROJECT', 'project'), ('QUIZ', 'quiz'), ('LESSON', 'lesson'), - ('EXERCISE', 'Exercise')], - db_index=True, - max_length=15), + model_name="task", + name="task_type", + field=models.CharField( + choices=[("PROJECT", "project"), ("QUIZ", "quiz"), ("LESSON", "lesson"), ("EXERCISE", "Exercise")], + db_index=True, + max_length=15, + ), ), migrations.AlterField( - model_name='task', - name='title', + model_name="task", + name="title", field=models.CharField(db_index=True, max_length=150), ), ] diff --git a/breathecode/assignments/migrations/0016_task_delivered_at.py b/breathecode/assignments/migrations/0016_task_delivered_at.py index 77e7823aa..1a93f3c98 100644 --- a/breathecode/assignments/migrations/0016_task_delivered_at.py +++ b/breathecode/assignments/migrations/0016_task_delivered_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0015_auto_20231017_0605'), + ("assignments", "0015_auto_20231017_0605"), ] operations = [ migrations.AddField( - model_name='task', - name='delivered_at', + model_name="task", + name="delivered_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), ] diff --git a/breathecode/assignments/migrations/0017_assignmenttelemetry_task_telemetry_learnpackwebhook.py b/breathecode/assignments/migrations/0017_assignmenttelemetry_task_telemetry_learnpackwebhook.py index 08dff7ec9..c55f63749 100644 --- a/breathecode/assignments/migrations/0017_assignmenttelemetry_task_telemetry_learnpackwebhook.py +++ b/breathecode/assignments/migrations/0017_assignmenttelemetry_task_telemetry_learnpackwebhook.py @@ -8,65 +8,83 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0016_task_delivered_at'), + ("assignments", "0016_task_delivered_at"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='AssignmentTelemetry', + name="AssignmentTelemetry", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('asset_slug', models.CharField(max_length=200)), - ('telemetry', - models.JSONField(blank=True, - default=None, - help_text='Incoming JSON from LearnPack with detailed telemetry info', - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("asset_slug", models.CharField(max_length=200)), + ( + "telemetry", + models.JSONField( + blank=True, + default=None, + help_text="Incoming JSON from LearnPack with detailed telemetry info", + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( - model_name='task', - name='telemetry', + model_name="task", + name="telemetry", field=models.ForeignKey( blank=True, default=None, - help_text= - 'Learnpack telemetry json will be stored and shared among all the assignments form the same associalted_slug', + help_text="Learnpack telemetry json will be stored and shared among all the assignments form the same associalted_slug", null=True, on_delete=django.db.models.deletion.CASCADE, - to='assignments.assignmenttelemetry'), + to="assignments.assignmenttelemetry", + ), ), migrations.CreateModel( - name='LearnPackWebhook', + name="LearnPackWebhook", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('is_streaming', models.BooleanField()), - ('event', models.CharField(max_length=15)), - ('payload', models.JSONField(blank=True, default=None, help_text='Will be set by learnpack', - null=True)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('ERROR', 'Error')], - default='PENDING', - max_length=9)), - ('status_text', models.TextField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('student', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), - ('telemetry', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='assignments.assignmenttelemetry')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("is_streaming", models.BooleanField()), + ("event", models.CharField(max_length=15)), + ( + "payload", + models.JSONField(blank=True, default=None, help_text="Will be set by learnpack", null=True), + ), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("ERROR", "Error")], + default="PENDING", + max_length=9, + ), + ), + ("status_text", models.TextField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "student", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "telemetry", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="assignments.assignmenttelemetry", + ), + ), ], ), ] diff --git a/breathecode/assignments/migrations/0018_repositorydeletionorder_repositorywhitelist.py b/breathecode/assignments/migrations/0018_repositorydeletionorder_repositorywhitelist.py index e259983b4..25e09d159 100644 --- a/breathecode/assignments/migrations/0018_repositorydeletionorder_repositorywhitelist.py +++ b/breathecode/assignments/migrations/0018_repositorydeletionorder_repositorywhitelist.py @@ -6,35 +6,43 @@ class Migration(migrations.Migration): dependencies = [ - ('assignments', '0017_assignmenttelemetry_task_telemetry_learnpackwebhook'), + ("assignments", "0017_assignmenttelemetry_task_telemetry_learnpackwebhook"), ] operations = [ migrations.CreateModel( - name='RepositoryDeletionOrder', + name="RepositoryDeletionOrder", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('provider', models.CharField(choices=[('GITHUB', 'GitHub')], default='GITHUB', max_length=15)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('DELETED', 'Deleted'), - ('CANCELLED', 'Cancelled')], - default='PENDING', - max_length=15)), - ('status_text', models.TextField(blank=True, default=None, null=True)), - ('repository_user', models.CharField(max_length=100)), - ('repository_name', models.CharField(max_length=100)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("provider", models.CharField(choices=[("GITHUB", "GitHub")], default="GITHUB", max_length=15)), + ( + "status", + models.CharField( + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("DELETED", "Deleted"), + ("CANCELLED", "Cancelled"), + ], + default="PENDING", + max_length=15, + ), + ), + ("status_text", models.TextField(blank=True, default=None, null=True)), + ("repository_user", models.CharField(max_length=100)), + ("repository_name", models.CharField(max_length=100)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='RepositoryWhiteList', + name="RepositoryWhiteList", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('provider', models.CharField(choices=[('GITHUB', 'GitHub')], default='GITHUB', max_length=15)), - ('repository_user', models.CharField(max_length=100)), - ('repository_name', models.CharField(max_length=100)), - ('created_at', models.DateTimeField(auto_now_add=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("provider", models.CharField(choices=[("GITHUB", "GitHub")], default="GITHUB", max_length=15)), + ("repository_user", models.CharField(max_length=100)), + ("repository_name", models.CharField(max_length=100)), + ("created_at", models.DateTimeField(auto_now_add=True)), ], ), ] diff --git a/breathecode/assignments/models.py b/breathecode/assignments/models.py index 12c8414d7..f5159a5f8 100644 --- a/breathecode/assignments/models.py +++ b/breathecode/assignments/models.py @@ -6,7 +6,7 @@ from . import signals -__all__ = ['UserProxy', 'CohortProxy', 'Task', 'UserAttachment'] +__all__ = ["UserProxy", "CohortProxy", "Task", "UserAttachment"] class UserAttachment(models.Model): @@ -22,48 +22,47 @@ class UserAttachment(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class AssignmentTelemetry(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) asset_slug = models.CharField(max_length=200) - telemetry = models.JSONField(null=True, - blank=True, - default=None, - help_text='Incoming JSON from LearnPack with detailed telemetry info') + telemetry = models.JSONField( + null=True, blank=True, default=None, help_text="Incoming JSON from LearnPack with detailed telemetry info" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) -PENDING = 'PENDING' -DONE = 'DONE' +PENDING = "PENDING" +DONE = "DONE" TASK_STATUS = ( - (PENDING, 'Pending'), - (DONE, 'Done'), + (PENDING, "Pending"), + (DONE, "Done"), ) -APPROVED = 'APPROVED' -REJECTED = 'REJECTED' -IGNORED = 'IGNORED' +APPROVED = "APPROVED" +REJECTED = "REJECTED" +IGNORED = "IGNORED" REVISION_STATUS = ( - (PENDING, 'Pending'), - (APPROVED, 'Approved'), - (REJECTED, 'Rejected'), - (IGNORED, 'Ignored'), + (PENDING, "Pending"), + (APPROVED, "Approved"), + (REJECTED, "Rejected"), + (IGNORED, "Ignored"), ) -PROJECT = 'PROJECT' -QUIZ = 'QUIZ' -LESSON = 'LESSON' -EXERCISE = 'EXERCISE' +PROJECT = "PROJECT" +QUIZ = "QUIZ" +LESSON = "LESSON" +EXERCISE = "EXERCISE" TASK_TYPE = ( - (PROJECT, 'project'), - (QUIZ, 'quiz'), - (LESSON, 'lesson'), - (EXERCISE, 'Exercise'), + (PROJECT, "project"), + (QUIZ, "quiz"), + (LESSON, "lesson"), + (EXERCISE, "Exercise"), ) @@ -80,8 +79,8 @@ class Task(models.Model): default=None, null=True, blank=True, - help_text= - 'Learnpack telemetry json will be stored and shared among all the assignments form the same associalted_slug') + help_text="Learnpack telemetry json will be stored and shared among all the assignments form the same associalted_slug", + ) associated_slug = models.SlugField(max_length=150, db_index=True) title = models.CharField(max_length=150, db_index=True) @@ -101,8 +100,7 @@ class Task(models.Model): default=None, blank=True, null=True, - help_text= - 'If readme contains checkboxes they will be converted into substasks and this json will kep track of completition' + help_text="If readme contains checkboxes they will be converted into substasks and this json will kep track of completition", ) cohort = models.ForeignKey(Cohort, on_delete=models.CASCADE, blank=True, null=True) @@ -154,42 +152,43 @@ class Meta: proxy = True -PRIVATE = 'PRIVATE' -UNLISTED = 'UNLISTED' -PUBLIC = 'PUBLIC' +PRIVATE = "PRIVATE" +UNLISTED = "UNLISTED" +PUBLIC = "PUBLIC" VISIBILITY_STATUS = ( - (PRIVATE, 'Private'), - (UNLISTED, 'Unlisted'), - (PUBLIC, 'Public'), + (PRIVATE, "Private"), + (UNLISTED, "Unlisted"), + (PUBLIC, "Public"), ) class FinalProject(models.Model): - repo_owner = models.ForeignKey(User, - on_delete=models.SET_NULL, - blank=True, - null=True, - related_name='projects_owned') + repo_owner = models.ForeignKey( + User, on_delete=models.SET_NULL, blank=True, null=True, related_name="projects_owned" + ) name = models.CharField(max_length=150) one_line_desc = models.CharField(max_length=150) description = models.TextField() - members = models.ManyToManyField(User, related_name='final_projects') + members = models.ManyToManyField(User, related_name="final_projects") - project_status = models.CharField(max_length=15, - choices=TASK_STATUS, - default=PENDING, - help_text='Done projects will be reviewed for publication') - revision_status = models.CharField(max_length=15, - choices=REVISION_STATUS, - default=PENDING, - help_text='Only approved projects will display on the feature projects list') + project_status = models.CharField( + max_length=15, choices=TASK_STATUS, default=PENDING, help_text="Done projects will be reviewed for publication" + ) + revision_status = models.CharField( + max_length=15, + choices=REVISION_STATUS, + default=PENDING, + help_text="Only approved projects will display on the feature projects list", + ) revision_message = models.TextField(null=True, blank=True, default=None) - visibility_status = models.CharField(max_length=15, - choices=VISIBILITY_STATUS, - default=PRIVATE, - help_text='Public project will be visible to other users') + visibility_status = models.CharField( + max_length=15, + choices=VISIBILITY_STATUS, + default=PRIVATE, + help_text="Public project will be visible to other users", + ) repo_url = models.URLField(blank=True, null=True, default=None) public_url = models.URLField(blank=True, null=True, default=None) @@ -206,11 +205,11 @@ class FinalProject(models.Model): # PENDING = 'PENDING' # DONE = 'DONE' -ERROR = 'ERROR' +ERROR = "ERROR" LEARNPACK_WEBHOOK_STATUS = ( - (PENDING, 'Pending'), - (DONE, 'Done'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (DONE, "Done"), + (ERROR, "Error"), ) @@ -218,7 +217,7 @@ class LearnPackWebhook(models.Model): is_streaming = models.BooleanField() event = models.CharField(max_length=15) - payload = models.JSONField(blank=True, null=True, default=None, help_text='Will be set by learnpack') + payload = models.JSONField(blank=True, null=True, default=None, help_text="Will be set by learnpack") student = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True, default=None) telemetry = models.ForeignKey(AssignmentTelemetry, on_delete=models.CASCADE, blank=True, null=True, default=None) @@ -233,17 +232,17 @@ class LearnPackWebhook(models.Model): class Provider(models.TextChoices): - GITHUB = 'GITHUB', 'GitHub' + GITHUB = "GITHUB", "GitHub" class RepositoryDeletionOrder(models.Model): Provider = Provider class Status(models.TextChoices): - PENDING = 'PENDING', 'Pending' - ERROR = 'ERROR', 'Error' - DELETED = 'DELETED', 'Deleted' - CANCELLED = 'CANCELLED', 'Cancelled' + PENDING = "PENDING", "Pending" + ERROR = "ERROR", "Error" + DELETED = "DELETED", "Deleted" + CANCELLED = "CANCELLED", "Cancelled" provider = models.CharField(max_length=15, choices=Provider, default=Provider.GITHUB) status = models.CharField(max_length=15, choices=Status, default=Status.PENDING) @@ -276,8 +275,10 @@ def save(self, *args, **kwargs): self.full_clean() super().save(*args, **kwargs) - RepositoryDeletionOrder.objects.filter(provider=self.provider, - repository_user__iexact=self.repository_user, - repository_name__iexact=self.repository_name).exclude( - Q(status=RepositoryDeletionOrder.Status.DELETED) - | Q(status=RepositoryDeletionOrder.Status.CANCELLED)).delete() + RepositoryDeletionOrder.objects.filter( + provider=self.provider, + repository_user__iexact=self.repository_user, + repository_name__iexact=self.repository_name, + ).exclude( + Q(status=RepositoryDeletionOrder.Status.DELETED) | Q(status=RepositoryDeletionOrder.Status.CANCELLED) + ).delete() diff --git a/breathecode/assignments/permissions/consumers.py b/breathecode/assignments/permissions/consumers.py index d0023bc03..685ff2cd8 100644 --- a/breathecode/assignments/permissions/consumers.py +++ b/breathecode/assignments/permissions/consumers.py @@ -11,15 +11,18 @@ def code_revision_service(context: ServiceContext, args: tuple, kwargs: dict) -> tuple[dict, tuple, dict]: - request = context['request'] + request = context["request"] lang = get_user_language(request) - if is_no_saas_student_up_to_date_in_any_cohort(context['request'].user) is False: + if is_no_saas_student_up_to_date_in_any_cohort(context["request"].user) is False: raise PaymentException( - translation(lang, - en='You can\'t access this asset because your finantial status is not up to date', - es='No puedes acceder a este recurso porque tu estado financiero no está al dia', - slug='cohort-user-status-later')) + translation( + lang, + en="You can't access this asset because your finantial status is not up to date", + es="No puedes acceder a este recurso porque tu estado financiero no está al dia", + slug="cohort-user-status-later", + ) + ) - context['consumables'] = context['consumables'].filter(service_item__service__type=Service.Type.VOID) + context["consumables"] = context["consumables"].filter(service_item__service__type=Service.Type.VOID) return (context, args, kwargs) diff --git a/breathecode/assignments/permissions/flags.py b/breathecode/assignments/permissions/flags.py index b9e2626a9..4d5f36ff1 100644 --- a/breathecode/assignments/permissions/flags.py +++ b/breathecode/assignments/permissions/flags.py @@ -1,8 +1,7 @@ -__all__ = ['api'] +__all__ = ["api"] -class API: - ... +class API: ... api = API() diff --git a/breathecode/assignments/receivers.py b/breathecode/assignments/receivers.py index c0f00fcc7..58128a84a 100644 --- a/breathecode/assignments/receivers.py +++ b/breathecode/assignments/receivers.py @@ -15,17 +15,19 @@ @receiver(syllabus_asset_slug_updated) def process_syllabus_asset_slug_updated(sender, **kwargs): - from_slug = kwargs.pop('from_slug', None) - to_slug = kwargs.pop('to_slug', None) - asset_type = kwargs.pop('asset_type', None) + from_slug = kwargs.pop("from_slug", None) + to_slug = kwargs.pop("to_slug", None) + asset_type = kwargs.pop("asset_type", None) Task.objects.filter(associated_slug=from_slug, task_type=asset_type.upper()).update(associated_slug=to_slug) - logger.debug(f'{asset_type} slug {from_slug} was replaced with {to_slug} on all the syllabus, as a sideeffect ' - 'we are replacing the slug also on the student tasks') + logger.debug( + f"{asset_type} slug {from_slug} was replaced with {to_slug} on all the syllabus, as a sideeffect " + "we are replacing the slug also on the student tasks" + ) @receiver(assignment_status_updated, sender=Task) def process_cohort_history_log(sender: Type[Task], instance: Task, **kwargs: Any): - logger.info('Procesing Cohort history log for cohort: ' + str(instance.id)) + logger.info("Procesing Cohort history log for cohort: " + str(instance.id)) tasks.set_cohort_user_assignments.delay(instance.id) diff --git a/breathecode/assignments/serializers.py b/breathecode/assignments/serializers.py index 0d1fba69b..861a0e915 100644 --- a/breathecode/assignments/serializers.py +++ b/breathecode/assignments/serializers.py @@ -48,6 +48,7 @@ class TaskAttachmentSerializer(serpy.Serializer): class TaskGETSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() title = serpy.Field() @@ -75,6 +76,7 @@ def get_assignment_telemetry(self, obj): class TaskGETSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() title = serpy.Field() @@ -93,12 +95,13 @@ class TaskGETSmallSerializer(serpy.Serializer): class TaskGETDeliverSerializer(TaskGETSerializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. delivery_url = serpy.MethodField() def get_delivery_url(self, obj): - token, created = Token.get_or_create(obj.user, token_type='temporal') - return os.getenv('API_URL') + f'/v1/assignment/task/{str(obj.id)}/deliver/{token}' + token, created = Token.get_or_create(obj.user, token_type="temporal") + return os.getenv("API_URL") + f"/v1/assignment/task/{str(obj.id)}/deliver/{token}" class PostTaskSerializer(serializers.ModelSerializer): @@ -107,32 +110,39 @@ class PostTaskSerializer(serializers.ModelSerializer): class Meta: model = Task - exclude = ('user', ) + exclude = ("user",) def validate(self, data): - user = User.objects.filter(id=self.context['user_id']).first() + user = User.objects.filter(id=self.context["user_id"]).first() if user is None: - raise ValidationException('User does not exists') + raise ValidationException("User does not exists") # the teacher shouldn't be allowed to approve a project that isn't done - if ('associated_slug' in data and 'task_status' in data and 'revision_status' in data - and data['task_status'] == 'PENDING' and data['revision_status'] == 'APPROVED'): - raise ValidationException('Only tasks that are DONE should be approved by the teacher') + if ( + "associated_slug" in data + and "task_status" in data + and "revision_status" in data + and data["task_status"] == "PENDING" + and data["revision_status"] == "APPROVED" + ): + raise ValidationException("Only tasks that are DONE should be approved by the teacher") - return super(PostTaskSerializer, self).validate({**data, 'user': user}) + return super(PostTaskSerializer, self).validate({**data, "user": user}) def create(self, validated_data): - _task = Task.objects.filter(associated_slug=validated_data['associated_slug'], - task_type=validated_data['task_type'], - user__id=validated_data['user'].id) + _task = Task.objects.filter( + associated_slug=validated_data["associated_slug"], + task_type=validated_data["task_type"], + user__id=validated_data["user"].id, + ) # optional cohort parameter - if 'cohort' not in validated_data: + if "cohort" not in validated_data: _task = _task.filter(cohort__isnull=True) else: - _task = _task.filter(cohort=validated_data['cohort']) + _task = _task.filter(cohort=validated_data["cohort"]) _task = _task.first() @@ -151,13 +161,13 @@ def update(self, instance, validated_data): ret = [] for data in validated_data: - item = [x for x in instance if 'id' in data and x.id == data['id']] + item = [x for x in instance if "id" in data and x.id == data["id"]] item = item[0] if len(item) else None - if 'id' in data and not data['id']: - del data['id'] + if "id" in data and not data["id"]: + del data["id"] - if 'id' in data: + if "id" in data: ret.append(self.child.update(item, data)) else: ret.append(self.child.create(data)) @@ -175,7 +185,7 @@ class UserAttachmentSerializer(serializers.ModelSerializer): class Meta: model = UserAttachment - fields = ('id', 'url', 'hash', 'slug', 'mime', 'name', 'user') + fields = ("id", "url", "hash", "slug", "mime", "name", "user") exclude = () list_serializer_class = AttachmentListSerializer @@ -188,80 +198,109 @@ class PUTTaskSerializer(serializers.ModelSerializer): class Meta: model = Task - exclude = ('user', ) + exclude = ("user",) def validate(self, data): - if self.instance.user.id != self.context['request'].user.id: - if 'task_status' in data and data['task_status'] != self.instance.task_status: - raise ValidationException(f'Only the task {self.instance.id} owner can modify its status', - slug='put-task-status-of-other-user') - if 'live_url' in data and data['live_url'] != self.instance.live_url: - raise ValidationException('Only the task owner can modify its live_url', - slug='put-live-url-of-other-user') - if 'github_url' in data and data['github_url'] != self.instance.github_url: - raise ValidationException('Only the task owner can modify its github_url', - slug='put-github-url-of-other-user') + if self.instance.user.id != self.context["request"].user.id: + if "task_status" in data and data["task_status"] != self.instance.task_status: + raise ValidationException( + f"Only the task {self.instance.id} owner can modify its status", + slug="put-task-status-of-other-user", + ) + if "live_url" in data and data["live_url"] != self.instance.live_url: + raise ValidationException( + "Only the task owner can modify its live_url", slug="put-live-url-of-other-user" + ) + if "github_url" in data and data["github_url"] != self.instance.github_url: + raise ValidationException( + "Only the task owner can modify its github_url", slug="put-github-url-of-other-user" + ) # the teacher shouldn't be allowed to approve a project that isn't done - if ('task_status' in data and 'revision_status' in data and data['task_status'] == 'PENDING' - and data['revision_status'] == 'APPROVED'): - raise ValidationException('Only tasks that are DONE should be approved by the teacher', - slug='task-marked-approved-when-pending') - if (self.instance.task_status == 'PENDING' and 'revision_status' in data - and data['revision_status'] == 'APPROVED'): - raise ValidationException('Only tasks that are DONE should be approved by the teacher', - slug='task-marked-approved-when-pending') - - if 'revision_status' in data and data['revision_status'] != self.instance.revision_status: - student_cohorts = CohortUser.objects.filter(user__id=self.instance.user.id, - role='STUDENT').values_list('cohort__id', flat=True) - student_academies = CohortUser.objects.filter(user__id=self.instance.user.id, - role='STUDENT').values_list('cohort__academy__id', flat=True) + if ( + "task_status" in data + and "revision_status" in data + and data["task_status"] == "PENDING" + and data["revision_status"] == "APPROVED" + ): + raise ValidationException( + "Only tasks that are DONE should be approved by the teacher", slug="task-marked-approved-when-pending" + ) + if ( + self.instance.task_status == "PENDING" + and "revision_status" in data + and data["revision_status"] == "APPROVED" + ): + raise ValidationException( + "Only tasks that are DONE should be approved by the teacher", slug="task-marked-approved-when-pending" + ) + + if "revision_status" in data and data["revision_status"] != self.instance.revision_status: + student_cohorts = CohortUser.objects.filter(user__id=self.instance.user.id, role="STUDENT").values_list( + "cohort__id", flat=True + ) + student_academies = CohortUser.objects.filter(user__id=self.instance.user.id, role="STUDENT").values_list( + "cohort__academy__id", flat=True + ) # the logged in user could be a teacher from the same cohort as the student - teacher = CohortUser.objects.filter(cohort__id__in=student_cohorts, - role__in=['TEACHER', 'ASSISTANT'], - user__id=self.context['request'].user.id).first() + teacher = CohortUser.objects.filter( + cohort__id__in=student_cohorts, + role__in=["TEACHER", "ASSISTANT"], + user__id=self.context["request"].user.id, + ).first() # the logged in user could be a staff member from the same academy that the student belongs - staff = ProfileAcademy.objects.filter(academy__id__in=student_academies, - user__id=self.context['request'].user.id).first() + staff = ProfileAcademy.objects.filter( + academy__id__in=student_academies, user__id=self.context["request"].user.id + ).first() # task owner should only be able to mark revision status to PENDING - if data['revision_status'] != 'PENDING' and staff is None and teacher is None: + if data["revision_status"] != "PENDING" and staff is None and teacher is None: raise ValidationException( - 'Only staff members or teachers from the same academy as this student can update the ' - 'review status', - slug='editing-revision-status-but-is-not-teacher-or-assistant') + "Only staff members or teachers from the same academy as this student can update the " + "review status", + slug="editing-revision-status-but-is-not-teacher-or-assistant", + ) return data def update(self, instance, validated_data): - if 'opened_at' in validated_data and validated_data['opened_at'] is not None and ( - instance.opened_at is None or validated_data['opened_at'] > instance.opened_at): - tasks_activity.add_activity.delay(self.context['request'].user.id, - 'read_assignment', - related_type='assignments.Task', - related_id=instance.id) - - if 'revision_status' in validated_data and validated_data['revision_status'] != instance.revision_status: - tasks_activity.add_activity.delay(self.context['request'].user.id, - 'assignment_review_status_updated', - related_type='assignments.Task', - related_id=instance.id) - - if 'task_status' in validated_data and validated_data['task_status'] != instance.task_status: - tasks_activity.add_activity.delay(self.context['request'].user.id, - 'assignment_status_updated', - related_type='assignments.Task', - related_id=instance.id) + if ( + "opened_at" in validated_data + and validated_data["opened_at"] is not None + and (instance.opened_at is None or validated_data["opened_at"] > instance.opened_at) + ): + tasks_activity.add_activity.delay( + self.context["request"].user.id, + "read_assignment", + related_type="assignments.Task", + related_id=instance.id, + ) + + if "revision_status" in validated_data and validated_data["revision_status"] != instance.revision_status: + tasks_activity.add_activity.delay( + self.context["request"].user.id, + "assignment_review_status_updated", + related_type="assignments.Task", + related_id=instance.id, + ) + + if "task_status" in validated_data and validated_data["task_status"] != instance.task_status: + tasks_activity.add_activity.delay( + self.context["request"].user.id, + "assignment_status_updated", + related_type="assignments.Task", + related_id=instance.id, + ) return super().update(instance, validated_data) class FinalProjectGETSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() repo_owner = UserSmallSerializer(required=False) @@ -298,40 +337,44 @@ class PostFinalProjectSerializer(serializers.ModelSerializer): class Meta: model = FinalProject - exclude = ('repo_owner', ) + exclude = ("repo_owner",) def validate(self, data): - user = User.objects.filter(id=self.context['user_id']).first() + user = User.objects.filter(id=self.context["user_id"]).first() if user is None: - raise ValidationException('User does not exists') + raise ValidationException("User does not exists") # the teacher shouldn't be allowed to approve a project that isn't done - if ('project_status' in data and 'revision_status' in data and data['project_status'] == 'PENDING' - and data['revision_status'] == 'APPROVED'): - raise ValidationException('Only projects that are DONE should be approved') - - if 'cohort' not in data or data['cohort'] is None: - raise ValidationException('Missing cohort id for this project') + if ( + "project_status" in data + and "revision_status" in data + and data["project_status"] == "PENDING" + and data["revision_status"] == "APPROVED" + ): + raise ValidationException("Only projects that are DONE should be approved") + + if "cohort" not in data or data["cohort"] is None: + raise ValidationException("Missing cohort id for this project") else: - total_students = CohortUser.objects.filter(user__id__in=[m.id for m in data['members']], - cohort__id=data['cohort'].id, - role='STUDENT').count() - if 'members' in data and len(data['members']) != total_students: + total_students = CohortUser.objects.filter( + user__id__in=[m.id for m in data["members"]], cohort__id=data["cohort"].id, role="STUDENT" + ).count() + if "members" in data and len(data["members"]) != total_students: raise ValidationException(f'Project members must be students on this cohort {data["cohort"].name}') - if 'repo_url' not in data: - raise ValidationException('Missing repository URL') + if "repo_url" not in data: + raise ValidationException("Missing repository URL") else: - proj = FinalProject.objects.filter(repo_url=data['repo_url']).first() + proj = FinalProject.objects.filter(repo_url=data["repo_url"]).first() if proj is not None: - raise ValidationException(f'There is another project already with this repository: {proj.name}') + raise ValidationException(f"There is another project already with this repository: {proj.name}") - return super(PostFinalProjectSerializer, self).validate({**data, 'repo_owner': user}) + return super(PostFinalProjectSerializer, self).validate({**data, "repo_owner": user}) def create(self, validated_data): - members = validated_data.pop('members') + members = validated_data.pop("members") project = FinalProject.objects.create(**validated_data) project.members.set(members) return project @@ -345,60 +388,80 @@ class PUTFinalProjectSerializer(serializers.ModelSerializer): class Meta: model = FinalProject - exclude = ('repo_owner', ) + exclude = ("repo_owner",) def validate(self, data): - user = self.context['request'].user + user = self.context["request"].user - if 'repo_url' in data and data['repo_url'] != self.instance.repo_url: - raise ValidationException('Repository URL cannot be updated, delete the project instead', - slug='put-update-repo-url') + if "repo_url" in data and data["repo_url"] != self.instance.repo_url: + raise ValidationException( + "Repository URL cannot be updated, delete the project instead", slug="put-update-repo-url" + ) exists = self.instance.members.filter(id=user.id).first() if exists is None: - for field_name in ['project_status']: + for field_name in ["project_status"]: if field_name in data and data[field_name] != getattr(self.instance, field_name): - raise ValidationException(f'Only the project members can modify its {field_name}', - slug='put-project-property-from-none-members') - - if 'members' in data: - total_students = CohortUser.objects.filter(user__id__in=[m.id for m in data['members']], - cohort__id=data['cohort'].id, - role='STUDENT').count() - if len(data['members']) != total_students: + raise ValidationException( + f"Only the project members can modify its {field_name}", + slug="put-project-property-from-none-members", + ) + + if "members" in data: + total_students = CohortUser.objects.filter( + user__id__in=[m.id for m in data["members"]], cohort__id=data["cohort"].id, role="STUDENT" + ).count() + if len(data["members"]) != total_students: raise ValidationException( - f'All members of this project must belong to the cohort {data["cohort"].name} - {total_students}') + f'All members of this project must belong to the cohort {data["cohort"].name} - {total_students}' + ) # the teacher shouldn't be allowed to approve a project that isn't done - if ('project_status' in data and 'revision_status' in data and data['project_status'] == 'PENDING' - and data['revision_status'] == 'APPROVED'): - raise ValidationException('Only projects that are DONE should be approved', - slug='project-marked-approved-when-pending') - if (self.instance.project_status == 'PENDING' and 'revision_status' in data - and data['revision_status'] == 'APPROVED'): - raise ValidationException('Only projects that are DONE should be approved by the teacher', - slug='project-marked-approved-when-pending') - - if 'revision_status' in data and data['revision_status'] != self.instance.revision_status: - student_cohorts = CohortUser.objects.filter(user__in=self.instance.members.all(), - role='STUDENT').values_list('cohort__id', flat=True) - student_academies = CohortUser.objects.filter(user__in=self.instance.members.all(), - role='STUDENT').values_list('cohort__academy__id', flat=True) + if ( + "project_status" in data + and "revision_status" in data + and data["project_status"] == "PENDING" + and data["revision_status"] == "APPROVED" + ): + raise ValidationException( + "Only projects that are DONE should be approved", slug="project-marked-approved-when-pending" + ) + if ( + self.instance.project_status == "PENDING" + and "revision_status" in data + and data["revision_status"] == "APPROVED" + ): + raise ValidationException( + "Only projects that are DONE should be approved by the teacher", + slug="project-marked-approved-when-pending", + ) + + if "revision_status" in data and data["revision_status"] != self.instance.revision_status: + student_cohorts = CohortUser.objects.filter( + user__in=self.instance.members.all(), role="STUDENT" + ).values_list("cohort__id", flat=True) + student_academies = CohortUser.objects.filter( + user__in=self.instance.members.all(), role="STUDENT" + ).values_list("cohort__academy__id", flat=True) # the logged in user could be a teacher from the same cohort as the student - teacher = CohortUser.objects.filter(cohort__id__in=student_cohorts, - role__in=['TEACHER', 'ASSISTANT'], - user__id=self.context['request'].user.id).first() + teacher = CohortUser.objects.filter( + cohort__id__in=student_cohorts, + role__in=["TEACHER", "ASSISTANT"], + user__id=self.context["request"].user.id, + ).first() # the logged in user could be a staff member from the same academy that the student belongs - staff = ProfileAcademy.objects.filter(academy__id__in=student_academies, - user__id=self.context['request'].user.id).first() + staff = ProfileAcademy.objects.filter( + academy__id__in=student_academies, user__id=self.context["request"].user.id + ).first() # task owner should only be able to mark revision status to PENDING - if data['revision_status'] != 'PENDING' and staff is None and teacher is None: + if data["revision_status"] != "PENDING" and staff is None and teacher is None: raise ValidationException( - 'Only staff members or teachers from the same academy as this student can update the ' - 'revision status', - slug='editing-revision-status-but-is-not-teacher-or-assistant') + "Only staff members or teachers from the same academy as this student can update the " + "revision status", + slug="editing-revision-status-but-is-not-teacher-or-assistant", + ) return data diff --git a/breathecode/assignments/signals.py b/breathecode/assignments/signals.py index ce62adbbc..e26e8e30c 100644 --- a/breathecode/assignments/signals.py +++ b/breathecode/assignments/signals.py @@ -2,6 +2,7 @@ For each signal you want other apps to be able to receive, you have to declare a new variable here like this: """ + from django import dispatch assignment_created = dispatch.Signal() diff --git a/breathecode/assignments/tasks.py b/breathecode/assignments/tasks.py index 8df746ba7..2f6c68e22 100644 --- a/breathecode/assignments/tasks.py +++ b/breathecode/assignments/tasks.py @@ -21,7 +21,7 @@ @shared_task(bind=True, priority=TaskPriority.NOTIFICATION.value) def student_task_notification(self, task_id): """Notify if the task was change.""" - logger.info('Starting student_task_notification') + logger.info("Starting student_task_notification") task = Task.objects.filter(id=task_id).first() if not task_is_valid_for_notifications(task): @@ -29,25 +29,28 @@ def student_task_notification(self, task_id): language = task.cohort.language.lower() revision_status = task.revision_status - subject = NOTIFICATION_STRINGS[language]['student']['subject'].format(title=task.title) - details = NOTIFICATION_STRINGS[language]['student'][revision_status] + subject = NOTIFICATION_STRINGS[language]["student"]["subject"].format(title=task.title) + details = NOTIFICATION_STRINGS[language]["student"][revision_status] academy = None if task.cohort: academy = task.cohort.academy - actions.send_email_message('diagnostic', - task.user.email, { - 'subject': subject, - 'details': details, - }, - academy=academy) + actions.send_email_message( + "diagnostic", + task.user.email, + { + "subject": subject, + "details": details, + }, + academy=academy, + ) @shared_task(bind=True, priority=TaskPriority.ACTIVITY.value) def async_learnpack_webhook(self, webhook_id): - logger.debug('Starting async_learnpack_webhook') - status = 'ok' + logger.debug("Starting async_learnpack_webhook") + status = "ok" webhook = LearnPackWebhook.objects.filter(id=webhook_id).first() if webhook: @@ -55,126 +58,135 @@ def async_learnpack_webhook(self, webhook_id): client = LearnPack() client.execute_action(webhook_id) except Exception as e: - logger.debug('LearnPack Telemetry exception') + logger.debug("LearnPack Telemetry exception") logger.debug(str(e)) - status = 'error' + status = "error" else: - message = f'Webhook {webhook_id} not found' - webhook.status = 'ERROR' + message = f"Webhook {webhook_id} not found" + webhook.status = "ERROR" webhook.status_text = message webhook.save() logger.debug(message) - status = 'error' + status = "error" - logger.debug(f'LearnPack telemetry status: {status}') + logger.debug(f"LearnPack telemetry status: {status}") @shared_task(bind=True, priority=TaskPriority.NOTIFICATION.value) def teacher_task_notification(self, task_id): """Notify if the task was change.""" - logger.info('Starting teacher_task_notification') + logger.info("Starting teacher_task_notification") - url = os.getenv('TEACHER_URL') + url = os.getenv("TEACHER_URL") if not url: - logger.error('TEACHER_URL is not set as environment variable') + logger.error("TEACHER_URL is not set as environment variable") return - url = re.sub('/$', '', url) + url = re.sub("/$", "", url) task = Task.objects.filter(id=task_id).first() if not task_is_valid_for_notifications(task): return language = task.cohort.language.lower() - subject = NOTIFICATION_STRINGS[language]['teacher']['subject'].format(first_name=task.user.first_name, - last_name=task.user.last_name) + subject = NOTIFICATION_STRINGS[language]["teacher"]["subject"].format( + first_name=task.user.first_name, last_name=task.user.last_name + ) - details = NOTIFICATION_STRINGS[language]['teacher']['details'].format( + details = NOTIFICATION_STRINGS[language]["teacher"]["details"].format( first_name=task.user.first_name, last_name=task.user.last_name, title=task.title, - url=f'{url}/cohort/{task.cohort.slug}/assignments') + url=f"{url}/cohort/{task.cohort.slug}/assignments", + ) academy = None if task.cohort: academy = task.cohort.academy - actions.send_email_message('diagnostic', - task.user.email, { - 'subject': subject, - 'details': details, - }, - academy=academy) + actions.send_email_message( + "diagnostic", + task.user.email, + { + "subject": subject, + "details": details, + }, + academy=academy, + ) @shared_task(bind=False, priority=TaskPriority.ACADEMY.value) def set_cohort_user_assignments(task_id: int): - logger.info('Executing set_cohort_user_assignments') + logger.info("Executing set_cohort_user_assignments") def serialize_task(task): return { - 'id': task.id, - 'type': task.task_type, + "id": task.id, + "type": task.task_type, } task = Task.objects.filter(id=task_id).first() if not task: - logger.error('Task not found') + logger.error("Task not found") return - cohort_user = CohortUser.objects.filter(cohort=task.cohort, user=task.user, role='STUDENT').first() + cohort_user = CohortUser.objects.filter(cohort=task.cohort, user=task.user, role="STUDENT").first() if not cohort_user: - logger.error('CohortUser not found') + logger.error("CohortUser not found") return user_history_log = cohort_user.history_log or {} - user_history_log['delivered_assignments'] = user_history_log.get('delivered_assignments', []) - user_history_log['pending_assignments'] = user_history_log.get('pending_assignments', []) + user_history_log["delivered_assignments"] = user_history_log.get("delivered_assignments", []) + user_history_log["pending_assignments"] = user_history_log.get("pending_assignments", []) - user_history_log['pending_assignments'] = [x for x in user_history_log['pending_assignments'] if x['id'] != task.id] + user_history_log["pending_assignments"] = [x for x in user_history_log["pending_assignments"] if x["id"] != task.id] - user_history_log['delivered_assignments'] = [ - x for x in user_history_log['delivered_assignments'] if x['id'] != task.id + user_history_log["delivered_assignments"] = [ + x for x in user_history_log["delivered_assignments"] if x["id"] != task.id ] - if task.task_status == 'PENDING': - user_history_log['pending_assignments'].append(serialize_task(task)) + if task.task_status == "PENDING": + user_history_log["pending_assignments"].append(serialize_task(task)) - if task.task_status == 'DONE': - user_history_log['delivered_assignments'].append(serialize_task(task)) + if task.task_status == "DONE": + user_history_log["delivered_assignments"].append(serialize_task(task)) cohort_user.history_log = user_history_log cohort_user.save() s = None try: - if hasattr(task.user, 'credentialsgithub') and task.github_url: - with Service('rigobot', task.user.id) as s: - if task.task_status == 'DONE': - response = s.post('/v1/finetuning/me/repository/', - json={ - 'url': task.github_url, - 'watchers': task.user.credentialsgithub.username, - }) + if hasattr(task.user, "credentialsgithub") and task.github_url: + with Service("rigobot", task.user.id) as s: + if task.task_status == "DONE": + response = s.post( + "/v1/finetuning/me/repository/", + json={ + "url": task.github_url, + "watchers": task.user.credentialsgithub.username, + }, + ) data = response.json() - task.rigobot_repository_id = data['id'] + task.rigobot_repository_id = data["id"] else: - response = s.put('/v1/finetuning/me/repository/', - json={ - 'url': task.github_url, - 'activity_status': 'INACTIVE', - }) + response = s.put( + "/v1/finetuning/me/repository/", + json={ + "url": task.github_url, + "activity_status": "INACTIVE", + }, + ) data = response.json() - task.rigobot_repository_id = data['id'] + task.rigobot_repository_id = data["id"] except Exception as e: logger.error(str(e)) - logger.info('History log saved') + logger.info("History log saved") diff --git a/breathecode/assignments/tests/management/commands/tests_schedule_repository_deletions.py b/breathecode/assignments/tests/management/commands/tests_schedule_repository_deletions.py index adbd9ef4c..6f8fa6657 100644 --- a/breathecode/assignments/tests/management/commands/tests_schedule_repository_deletions.py +++ b/breathecode/assignments/tests/management/commands/tests_schedule_repository_deletions.py @@ -1,6 +1,7 @@ """ Test /answer """ + from unittest.mock import MagicMock import pytest @@ -42,13 +43,13 @@ def handler(objs): def x(*args, **kwargs): nonlocal objs - res = [obj for obj in objs if obj['url'] == kwargs['url'] and obj['method'] == kwargs['method']] + res = [obj for obj in objs if obj["url"] == kwargs["url"] and obj["method"] == kwargs["method"]] if len(res) == 0: return ResponseMock({}, 404, {}) - return ResponseMock(res[0]['expected'], res[0]['code'], res[0]['headers']) + return ResponseMock(res[0]["expected"], res[0]["code"], res[0]["headers"]) - monkeypatch.setattr('requests.request', MagicMock(side_effect=x)) + monkeypatch.setattr("requests.request", MagicMock(side_effect=x)) yield handler @@ -57,752 +58,757 @@ def test_no_settings(database: capyc.Database): command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [] - assert database.list_of('assignments.RepositoryWhiteList') == [] + assert database.list_of("assignments.RepositoryDeletionOrder") == [] + assert database.list_of("assignments.RepositoryWhiteList") == [] def test_no_repos(database: capyc.Database, patch_get): model = database.create(academy_auth_settings=1, city=1, country=1, user=1, credentials_github=1) - patch_get([ - { - 'method': 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [], - 'code': 200, - 'headers': {} - }, - ]) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [] - assert database.list_of('assignments.RepositoryWhiteList') == [] + assert database.list_of("assignments.RepositoryDeletionOrder") == [] + assert database.list_of("assignments.RepositoryWhiteList") == [] def test_two_repos(database: capyc.Database, patch_get): model = database.create(academy_auth_settings=1, city=1, country=1, user=1, credentials_github=1) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - ]) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - 'status': 'PENDING', - 'status_text': None, + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + "status": "PENDING", + "status_text": None, }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - 'status': 'PENDING', - 'status_text': None, + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + "status": "PENDING", + "status_text": None, }, ] - assert database.list_of('assignments.RepositoryWhiteList') == [] + assert database.list_of("assignments.RepositoryWhiteList") == [] def test_two_repos__deleting_repositories(database: capyc.Database, patch_get, set_datetime, utc_now): from django.utils import timezone delta = relativedelta(months=2, hours=1) - model = database.create(academy_auth_settings=1, - city=1, - country=1, - user=1, - credentials_github=1, - repository_deletion_order=[ - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - 'status': 'PENDING', - 'status_text': None, - }, - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - 'status': 'PENDING', - 'status_text': None, - }, - ]) + model = database.create( + academy_auth_settings=1, + city=1, + country=1, + user=1, + credentials_github=1, + repository_deletion_order=[ + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + "status": "PENDING", + "status_text": None, + }, + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + "status": "PENDING", + "status_text": None, + }, + ], + ) set_datetime(utc_now + delta) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - { - 'method': 'DELETE', - 'url': 'https://api.github.com/repos/breatheco-de/curso-nodejs-4geeks', - 'expected': None, - 'code': 204, - 'headers': {}, - }, - { - 'method': 'DELETE', - 'url': 'https://api.github.com/repos/4GeeksAcademy/curso-nodejs-4geeks', - 'expected': None, - 'code': 204, - 'headers': {}, - }, - ]) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + { + "method": "DELETE", + "url": "https://api.github.com/repos/breatheco-de/curso-nodejs-4geeks", + "expected": None, + "code": 204, + "headers": {}, + }, + { + "method": "DELETE", + "url": "https://api.github.com/repos/4GeeksAcademy/curso-nodejs-4geeks", + "expected": None, + "code": 204, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - 'status': 'DELETED', - 'status_text': None, + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + "status": "DELETED", + "status_text": None, }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - 'status': 'DELETED', - 'status_text': None, + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + "status": "DELETED", + "status_text": None, }, ] - assert database.list_of('assignments.RepositoryWhiteList') == [] + assert database.list_of("assignments.RepositoryWhiteList") == [] def test_two_repos__deleting_repositories__got_an_error(database: capyc.Database, patch_get, set_datetime, utc_now): from django.utils import timezone delta = relativedelta(months=2, hours=1) - model = database.create(academy_auth_settings=1, - city=1, - country=1, - user=1, - credentials_github=1, - repository_deletion_order=[ - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - 'status': 'PENDING', - 'status_text': None, - }, - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - 'status': 'PENDING', - 'status_text': None, - }, - ]) + model = database.create( + academy_auth_settings=1, + city=1, + country=1, + user=1, + credentials_github=1, + repository_deletion_order=[ + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + "status": "PENDING", + "status_text": None, + }, + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + "status": "PENDING", + "status_text": None, + }, + ], + ) set_datetime(utc_now + delta) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - ]) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - 'status': 'ERROR', - 'status_text': - 'Unable to communicate with Github API for /repos/breatheco-de/curso-nodejs-4geeks, error: 404', + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + "status": "ERROR", + "status_text": "Unable to communicate with Github API for /repos/breatheco-de/curso-nodejs-4geeks, error: 404", }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - 'status': 'ERROR', - 'status_text': - 'Unable to communicate with Github API for /repos/4GeeksAcademy/curso-nodejs-4geeks, error: 404', + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + "status": "ERROR", + "status_text": "Unable to communicate with Github API for /repos/4GeeksAcademy/curso-nodejs-4geeks, error: 404", }, ] - assert database.list_of('assignments.RepositoryWhiteList') == [] + assert database.list_of("assignments.RepositoryWhiteList") == [] def test_two_repos_in_the_whitelist(database: capyc.Database, patch_get): - model = database.create(academy_auth_settings=1, - city=1, - country=1, - user=1, - credentials_github=1, - repository_white_list=[ - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - }, - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - }, - ]) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - ]) + model = database.create( + academy_auth_settings=1, + city=1, + country=1, + user=1, + credentials_github=1, + repository_white_list=[ + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + }, + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + }, + ], + ) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [] - assert database.list_of('assignments.RepositoryWhiteList') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [] + assert database.list_of("assignments.RepositoryWhiteList") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", }, ] def test_two_repos_scheduled_and_in_this_execution_was_added_to_the_whitelist(database: capyc.Database, patch_get): - model = database.create(academy_auth_settings=1, - city=1, - country=1, - user=1, - credentials_github=1, - repository_white_list=[ - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - }, - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - }, - ], - repository_deletion_order=[ - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - 'status': 'PENDING', - 'status_text': None, - }, - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - 'status': 'PENDING', - 'status_text': None, - }, - ]) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - ]) + model = database.create( + academy_auth_settings=1, + city=1, + country=1, + user=1, + credentials_github=1, + repository_white_list=[ + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + }, + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + }, + ], + repository_deletion_order=[ + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + "status": "PENDING", + "status_text": None, + }, + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + "status": "PENDING", + "status_text": None, + }, + ], + ) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [] - assert database.list_of('assignments.RepositoryWhiteList') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [] + assert database.list_of("assignments.RepositoryWhiteList") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", }, ] def test_two_repos_used_in_subscriptions(database: capyc.Database, patch_get): - model = database.create(academy_auth_settings=1, - city=1, - country=1, - user=1, - credentials_github=1, - repository_subscription=[ - { - 'repository': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - }, - { - 'repository': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - }, - ]) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - ]) + model = database.create( + academy_auth_settings=1, + city=1, + country=1, + user=1, + credentials_github=1, + repository_subscription=[ + { + "repository": "https://github.com/breatheco-de/curso-nodejs-4geeks", + }, + { + "repository": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + }, + ], + ) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [] - assert database.list_of('assignments.RepositoryWhiteList') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [] + assert database.list_of("assignments.RepositoryWhiteList") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", }, ] def test_two_repos_scheduled_and_in_this_execution_was_added_to_the_subscriptions(database: capyc.Database, patch_get): - model = database.create(academy_auth_settings=1, - city=1, - country=1, - user=1, - credentials_github=1, - repository_subscription=[ - { - 'repository': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - }, - { - 'repository': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - }, - ], - repository_deletion_order=[ - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - 'status': 'PENDING', - 'status_text': None, - }, - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - 'status': 'PENDING', - 'status_text': None, - }, - ]) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - ]) + model = database.create( + academy_auth_settings=1, + city=1, + country=1, + user=1, + credentials_github=1, + repository_subscription=[ + { + "repository": "https://github.com/breatheco-de/curso-nodejs-4geeks", + }, + { + "repository": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + }, + ], + repository_deletion_order=[ + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + "status": "PENDING", + "status_text": None, + }, + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + "status": "PENDING", + "status_text": None, + }, + ], + ) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [] - assert database.list_of('assignments.RepositoryWhiteList') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [] + assert database.list_of("assignments.RepositoryWhiteList") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", }, ] -@pytest.mark.parametrize('attr, is_readme, is_joined', [ - ('url', False, False), - ('solution_url', False, False), - ('preview', False, False), - ('readme_url', False, False), - ('intro_video_url', False, False), - ('solution_video_url', False, False), - ('readme_raw', True, False), - ('readme_raw', True, True), -]) +@pytest.mark.parametrize( + "attr, is_readme, is_joined", + [ + ("url", False, False), + ("solution_url", False, False), + ("preview", False, False), + ("readme_url", False, False), + ("intro_video_url", False, False), + ("solution_video_url", False, False), + ("readme_raw", True, False), + ("readme_raw", True, True), + ], +) def test_two_repos_used_in_assets(database: capyc.Database, patch_get, attr, is_readme, is_joined): if is_readme and is_joined: assets = [ { - attr: - Asset.encode( - 'https://github.com/breatheco-de/curso-nodejs-4geeks https://github.com/4GeeksAcademy/curso-nodejs-4geeks' + attr: Asset.encode( + "https://github.com/breatheco-de/curso-nodejs-4geeks https://github.com/4GeeksAcademy/curso-nodejs-4geeks" ), }, ] elif is_readme and is_joined is False: assets = [ { - attr: Asset.encode('https://github.com/breatheco-de/curso-nodejs-4geeks'), + attr: Asset.encode("https://github.com/breatheco-de/curso-nodejs-4geeks"), }, { - attr: Asset.encode('https://github.com/4GeeksAcademy/curso-nodejs-4geeks'), + attr: Asset.encode("https://github.com/4GeeksAcademy/curso-nodejs-4geeks"), }, ] else: assets = [ { - attr: 'https://github.com/breatheco-de/curso-nodejs-4geeks', + attr: "https://github.com/breatheco-de/curso-nodejs-4geeks", }, { - attr: 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', + attr: "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", }, ] - model = database.create(academy_auth_settings=1, - city=1, - country=1, - user=1, - credentials_github=1, - asset=assets, - asset_category=1) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - ]) + model = database.create( + academy_auth_settings=1, city=1, country=1, user=1, credentials_github=1, asset=assets, asset_category=1 + ) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [] - assert database.list_of('assignments.RepositoryWhiteList') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [] + assert database.list_of("assignments.RepositoryWhiteList") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", }, ] -@pytest.mark.parametrize('attr, is_readme, is_joined', [ - ('url', False, False), - ('solution_url', False, False), - ('preview', False, False), - ('readme_url', False, False), - ('intro_video_url', False, False), - ('solution_video_url', False, False), - ('readme_raw', True, False), - ('readme_raw', True, True), -]) -def test_two_repos_scheduled_and_in_this_execution_was_added_to_the_assets(database: capyc.Database, patch_get, attr, - is_readme, is_joined): +@pytest.mark.parametrize( + "attr, is_readme, is_joined", + [ + ("url", False, False), + ("solution_url", False, False), + ("preview", False, False), + ("readme_url", False, False), + ("intro_video_url", False, False), + ("solution_video_url", False, False), + ("readme_raw", True, False), + ("readme_raw", True, True), + ], +) +def test_two_repos_scheduled_and_in_this_execution_was_added_to_the_assets( + database: capyc.Database, patch_get, attr, is_readme, is_joined +): if is_readme and is_joined: assets = [ { - attr: - Asset.encode( - 'https://github.com/breatheco-de/curso-nodejs-4geeks https://github.com/4GeeksAcademy/curso-nodejs-4geeks' + attr: Asset.encode( + "https://github.com/breatheco-de/curso-nodejs-4geeks https://github.com/4GeeksAcademy/curso-nodejs-4geeks" ), }, ] elif is_readme and is_joined is False: assets = [ { - attr: Asset.encode('https://github.com/breatheco-de/curso-nodejs-4geeks'), + attr: Asset.encode("https://github.com/breatheco-de/curso-nodejs-4geeks"), }, { - attr: Asset.encode('https://github.com/4GeeksAcademy/curso-nodejs-4geeks'), + attr: Asset.encode("https://github.com/4GeeksAcademy/curso-nodejs-4geeks"), }, ] else: assets = [ { - attr: 'https://github.com/breatheco-de/curso-nodejs-4geeks', + attr: "https://github.com/breatheco-de/curso-nodejs-4geeks", }, { - attr: 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', + attr: "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", }, ] - model = database.create(academy_auth_settings=1, - city=1, - country=1, - user=1, - credentials_github=1, - asset=assets, - asset_category=1, - repository_deletion_order=[ - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', - 'status': 'PENDING', - 'status_text': None, - }, - { - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', - 'status': 'PENDING', - 'status_text': None, - }, - ]) - patch_get([ - { - 'method': - 'GET', - 'url': - f'https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc', - 'expected': [ - { - 'private': False, - 'html_url': 'https://github.com/breatheco-de/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - { - 'private': False, - 'html_url': 'https://github.com/4GeeksAcademy/curso-nodejs-4geeks', - 'fork': True, - 'created_at': '2024-04-05T19:22:39Z', - 'is_template': False, - 'allow_forking': True, - }, - ], - 'code': - 200, - 'headers': {}, - }, - ]) + model = database.create( + academy_auth_settings=1, + city=1, + country=1, + user=1, + credentials_github=1, + asset=assets, + asset_category=1, + repository_deletion_order=[ + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", + "status": "PENDING", + "status_text": None, + }, + { + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", + "status": "PENDING", + "status_text": None, + }, + ], + ) + patch_get( + [ + { + "method": "GET", + "url": f"https://api.github.com/orgs/{model.academy_auth_settings.github_username}/repos?page=1&type=forks&per_page=30&sort=created&direction=desc", + "expected": [ + { + "private": False, + "html_url": "https://github.com/breatheco-de/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + { + "private": False, + "html_url": "https://github.com/4GeeksAcademy/curso-nodejs-4geeks", + "fork": True, + "created_at": "2024-04-05T19:22:39Z", + "is_template": False, + "allow_forking": True, + }, + ], + "code": 200, + "headers": {}, + }, + ] + ) command = Command() command.handle() - assert database.list_of('assignments.RepositoryDeletionOrder') == [] - assert database.list_of('assignments.RepositoryWhiteList') == [ + assert database.list_of("assignments.RepositoryDeletionOrder") == [] + assert database.list_of("assignments.RepositoryWhiteList") == [ { - 'id': 1, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': 'breatheco-de', + "id": 1, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "breatheco-de", }, { - 'id': 2, - 'provider': 'GITHUB', - 'repository_name': 'curso-nodejs-4geeks', - 'repository_user': '4GeeksAcademy', + "id": 2, + "provider": "GITHUB", + "repository_name": "curso-nodejs-4geeks", + "repository_user": "4GeeksAcademy", }, ] diff --git a/breathecode/assignments/tests/mixins/__init__.py b/breathecode/assignments/tests/mixins/__init__.py index 8f7d7e039..00dbd8a3d 100644 --- a/breathecode/assignments/tests/mixins/__init__.py +++ b/breathecode/assignments/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Assignments mixins """ + from .assignments_test_case import AssignmentsTestCase # noqa: F401 diff --git a/breathecode/assignments/tests/mixins/assignments_test_case.py b/breathecode/assignments/tests/mixins/assignments_test_case.py index 9557af2f2..baa95df46 100644 --- a/breathecode/assignments/tests/mixins/assignments_test_case.py +++ b/breathecode/assignments/tests/mixins/assignments_test_case.py @@ -1,14 +1,22 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + BreathecodeMixin, +) from breathecode.authenticate.models import Token -class AssignmentsTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, BreathecodeMixin): +class AssignmentsTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, DatetimeMixin, BreathecodeMixin +): """AdmissionsTestCase with auth methods""" def setUp(self): @@ -21,109 +29,105 @@ def tearDown(self): def get_token_key(self, id=None): kwargs = {} if id: - kwargs['id'] = id - return Token.objects.filter(**kwargs).values_list('key', flat=True).first() + kwargs["id"] = id + return Token.objects.filter(**kwargs).values_list("key", flat=True).first() def check_email_contain_a_correct_token(self, lang, dicts, mock, model): token = self.get_token_key() - question = dicts[0]['title'] + question = dicts[0]["title"] link = f"https://nps.4geeks.com/{dicts[0]['id']}?token={token}" args_list = mock.call_args_list template = get_template_content( - 'nps', { - 'QUESTION': question, - 'HIGHEST': dicts[0]['highest'], - 'LOWEST': dicts[0]['lowest'], - 'SUBJECT': question, - 'ANSWER_ID': dicts[0]['id'], - 'BUTTON': strings[lang]['button_label'], - 'LINK': link, - }, ['email']) + "nps", + { + "QUESTION": question, + "HIGHEST": dicts[0]["highest"], + "LOWEST": dicts[0]["lowest"], + "SUBJECT": question, + "ANSWER_ID": dicts[0]["id"], + "BUTTON": strings[lang]["button_label"], + "LINK": link, + }, + ["email"], + ) - self.assertEqual(args_list, [ - call(f'https://api.mailgun.net/v3/{os.environ.get("MAILGUN_DOMAIN")}/messages', - auth=('api', os.environ.get('MAILGUN_API_KEY', '')), - data={ - 'from': f"4Geeks ", - 'to': model['user'].email, - 'subject': template['subject'], - 'text': template['text'], - 'html': template['html'] - }) - ]) + self.assertEqual( + args_list, + [ + call( + f'https://api.mailgun.net/v3/{os.environ.get("MAILGUN_DOMAIN")}/messages', + auth=("api", os.environ.get("MAILGUN_API_KEY", "")), + data={ + "from": f"4Geeks ", + "to": model["user"].email, + "subject": template["subject"], + "text": template["text"], + "html": template["html"], + }, + ) + ], + ) - html = template['html'] - del template['html'] + html = template["html"] + del template["html"] self.assertEqual( - template, { - 'SUBJECT': - question, - 'subject': - question, - 'text': - '\n' - '\n' - 'Please take 2 min to answer the following question:\n' - '\n' - f'{question}\n' - '\n' - 'Click here to vote: ' - f'{link}' - '\n' - '\n' - '\n' - '\n' - 'The 4Geeks Team' - }) + template, + { + "SUBJECT": question, + "subject": question, + "text": "\n" + "\n" + "Please take 2 min to answer the following question:\n" + "\n" + f"{question}\n" + "\n" + "Click here to vote: " + f"{link}" + "\n" + "\n" + "\n" + "\n" + "The 4Geeks Team", + }, + ) self.assertToken(token) self.assertTrue(link in html) def check_slack_contain_a_correct_token(self, lang, dicts, mock, model): token = self.get_token_key() - slack_token = model['slack_team'].owner.credentialsslack.token - slack_id = model['slack_user'].slack_id + slack_token = model["slack_team"].owner.credentialsslack.token + slack_id = model["slack_user"].slack_id args_list = mock.call_args_list - question = dicts[0]['title'] - answer = strings[lang]['button_label'] + question = dicts[0]["title"] + answer = strings[lang]["button_label"] expected = [ - call(method='POST', - url='https://slack.com/api/chat.postMessage', - headers={ - 'Authorization': f'Bearer {slack_token}', - 'Content-type': 'application/json' - }, - params=None, - json={ - 'channel': - slack_id, - 'private_metadata': - '', - 'blocks': [{ - 'type': 'header', - 'text': { - 'type': 'plain_text', - 'text': question, - 'emoji': True - } - }, { - 'type': - 'actions', - 'elements': [{ - 'type': 'button', - 'text': { - 'type': 'plain_text', - 'text': answer, - 'emoji': True - }, - 'url': f'https://nps.4geeks.com/1?token={token}' - }] - }], - 'parse': - 'full' - }) + call( + method="POST", + url="https://slack.com/api/chat.postMessage", + headers={"Authorization": f"Bearer {slack_token}", "Content-type": "application/json"}, + params=None, + json={ + "channel": slack_id, + "private_metadata": "", + "blocks": [ + {"type": "header", "text": {"type": "plain_text", "text": question, "emoji": True}}, + { + "type": "actions", + "elements": [ + { + "type": "button", + "text": {"type": "plain_text", "text": answer, "emoji": True}, + "url": f"https://nps.4geeks.com/1?token={token}", + } + ], + }, + ], + "parse": "full", + }, + ) ] self.assertEqual(args_list, expected) diff --git a/breathecode/assignments/tests/receivers/tests_post_save_cohort_time_slot.py b/breathecode/assignments/tests/receivers/tests_post_save_cohort_time_slot.py index b3a27a9b9..fb14a40c1 100644 --- a/breathecode/assignments/tests/receivers/tests_post_save_cohort_time_slot.py +++ b/breathecode/assignments/tests/receivers/tests_post_save_cohort_time_slot.py @@ -14,20 +14,20 @@ class TestMedia(LegacyAPITestCase): 🔽🔽🔽 With zero Cohort """ - @patch('breathecode.assignments.tasks.set_cohort_user_assignments.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.assignments.tasks.set_cohort_user_assignments.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_without_cohorts(self, enable_signals): enable_signals() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual(tasks.set_cohort_user_assignments.delay.call_args_list, []) """ 🔽🔽🔽 With two Task """ - @patch('breathecode.assignments.tasks.set_cohort_user_assignments.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.assignments.tasks.set_cohort_user_assignments.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_two_tasks__without_change_task_status(self, enable_signals): enable_signals() @@ -39,23 +39,23 @@ def test_with_two_tasks__without_change_task_status(self, enable_signals): model.task[1].title = self.bc.fake.name()[:150] model.task[1].save() - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), self.bc.database.list_of('admissions.Cohort')) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), self.bc.database.list_of("admissions.Cohort")) self.assertEqual(tasks.set_cohort_user_assignments.delay.call_args_list, []) - @patch('breathecode.assignments.tasks.set_cohort_user_assignments.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.assignments.tasks.set_cohort_user_assignments.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_two_tasks__changing_task_status(self, enable_signals): enable_signals() - statuses = ['PENDING', 'DONE'] - task = [{'task_status': random.choice(statuses)} for _ in range(2)] + statuses = ["PENDING", "DONE"] + task = [{"task_status": random.choice(statuses)} for _ in range(2)] model = self.bc.database.create(task=task, cohort=1) - model.task[0].task_status = 'DONE' if model.task[0].task_status == 'PENDING' else 'PENDING' + model.task[0].task_status = "DONE" if model.task[0].task_status == "PENDING" else "PENDING" model.task[0].save() - model.task[1].task_status = 'DONE' if model.task[1].task_status == 'PENDING' else 'PENDING' + model.task[1].task_status = "DONE" if model.task[1].task_status == "PENDING" else "PENDING" model.task[1].save() - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), self.bc.database.list_of('admissions.Cohort')) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), self.bc.database.list_of("admissions.Cohort")) self.assertEqual(tasks.set_cohort_user_assignments.delay.call_args_list, [call(1), call(2)]) diff --git a/breathecode/assignments/tests/tasks/tests_set_cohort_user_assignments.py b/breathecode/assignments/tests/tasks/tests_set_cohort_user_assignments.py index c6bfbceed..1e492649d 100644 --- a/breathecode/assignments/tests/tasks/tests_set_cohort_user_assignments.py +++ b/breathecode/assignments/tests/tasks/tests_set_cohort_user_assignments.py @@ -22,20 +22,21 @@ def x(db, monkeypatch): reset_app_cache() - monkeypatch.setattr('logging.Logger.info', MagicMock()) - monkeypatch.setattr('logging.Logger.error', MagicMock()) + monkeypatch.setattr("logging.Logger.info", MagicMock()) + monkeypatch.setattr("logging.Logger.error", MagicMock()) - monkeypatch.setattr('breathecode.assignments.signals.assignment_created.send_robust', empty) - monkeypatch.setattr('breathecode.assignments.signals.assignment_status_updated.send_robust', empty) - monkeypatch.setattr('breathecode.activity.tasks.get_attendancy_log.delay', empty) - monkeypatch.setattr('django.db.models.signals.pre_delete.send_robust', empty) - monkeypatch.setattr('breathecode.admissions.signals.student_edu_status_updated.send_robust', empty) + monkeypatch.setattr("breathecode.assignments.signals.assignment_created.send_robust", empty) + monkeypatch.setattr("breathecode.assignments.signals.assignment_status_updated.send_robust", empty) + monkeypatch.setattr("breathecode.activity.tasks.get_attendancy_log.delay", empty) + monkeypatch.setattr("django.db.models.signals.pre_delete.send_robust", empty) + monkeypatch.setattr("breathecode.admissions.signals.student_edu_status_updated.send_robust", empty) yield class MediaTestSuite(AssignmentsTestCase): """Test /answer""" + """ 🔽🔽🔽 Without Task """ @@ -43,10 +44,10 @@ class MediaTestSuite(AssignmentsTestCase): def test__without_tasks(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - self.assertEqual(Logger.info.call_args_list, [call('Executing set_cohort_user_assignments')]) - self.assertEqual(Logger.error.call_args_list, [call('Task not found')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + self.assertEqual(Logger.info.call_args_list, [call("Executing set_cohort_user_assignments")]) + self.assertEqual(Logger.error.call_args_list, [call("Task not found")]) """ 🔽🔽🔽 One Task @@ -59,20 +60,20 @@ def test__with_one_task(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - self.assertEqual(Logger.info.call_args_list, [call('Executing set_cohort_user_assignments')]) - self.assertEqual(Logger.error.call_args_list, [call('CohortUser not found')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + self.assertEqual(Logger.info.call_args_list, [call("Executing set_cohort_user_assignments")]) + self.assertEqual(Logger.error.call_args_list, [call("CohortUser not found")]) """ 🔽🔽🔽 One Task """ def test__with_one_task__task_is_pending(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'PENDING', - 'task_type': task_type, + "task_status": "PENDING", + "task_type": task_type, } model = self.bc.database.create(task=task, cohort_user=1) @@ -80,32 +81,38 @@ def test__with_one_task__task_is_pending(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [], - 'pending_assignments': [ - { - 'id': 1, - 'type': task_type, - }, - ], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [], + "pending_assignments": [ + { + "id": 1, + "type": task_type, + }, + ], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) def test__with_one_task__task_is_done(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'DONE', - 'task_type': task_type, + "task_status": "DONE", + "task_type": task_type, } model = self.bc.database.create(task=task, cohort_user=1) @@ -113,25 +120,31 @@ def test__with_one_task__task_is_done(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [ - { - 'id': 1, - 'type': task_type, - }, - ], - 'pending_assignments': [], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [ + { + "id": 1, + "type": task_type, + }, + ], + "pending_assignments": [], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) """ @@ -139,18 +152,18 @@ def test__with_one_task__task_is_done(self): """ def test__with_one_task__task_is_pending__with_log__already_exists(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'PENDING', - 'task_type': task_type, + "task_status": "PENDING", + "task_type": task_type, } cohort_user = { - 'history_log': { - 'delivered_assignments': [], - 'pending_assignments': [ + "history_log": { + "delivered_assignments": [], + "pending_assignments": [ { - 'id': 1, - 'type': task_type, + "id": 1, + "type": task_type, }, ], } @@ -161,45 +174,51 @@ def test__with_one_task__task_is_pending__with_log__already_exists(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [], - 'pending_assignments': [ - { - 'id': 1, - 'type': task_type, - }, - ], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [], + "pending_assignments": [ + { + "id": 1, + "type": task_type, + }, + ], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) def test__with_one_task__task_is_pending__with_log__from_different_items(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'PENDING', - 'task_type': task_type, + "task_status": "PENDING", + "task_type": task_type, } cohort_user = { - 'history_log': { - 'delivered_assignments': [ + "history_log": { + "delivered_assignments": [ { - 'id': 3, - 'type': task_type, + "id": 3, + "type": task_type, }, ], - 'pending_assignments': [ + "pending_assignments": [ { - 'id': 2, - 'type': task_type, + "id": 2, + "type": task_type, }, ], } @@ -210,55 +229,61 @@ def test__with_one_task__task_is_pending__with_log__from_different_items(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [ - { - 'id': 3, - 'type': task_type, - }, - ], - 'pending_assignments': [ - { - 'id': 2, - 'type': task_type, - }, - { - 'id': 1, - 'type': task_type, - }, - ], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [ + { + "id": 3, + "type": task_type, + }, + ], + "pending_assignments": [ + { + "id": 2, + "type": task_type, + }, + { + "id": 1, + "type": task_type, + }, + ], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) def test__rigobot_not_found(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'PENDING', - 'task_type': task_type, - 'github_url': self.bc.fake.url(), + "task_status": "PENDING", + "task_type": task_type, + "github_url": self.bc.fake.url(), } cohort_user = { - 'history_log': { - 'delivered_assignments': [ + "history_log": { + "delivered_assignments": [ { - 'id': 3, - 'type': task_type, + "id": 3, + "type": task_type, }, ], - 'pending_assignments': [ + "pending_assignments": [ { - 'id': 2, - 'type': task_type, + "id": 2, + "type": task_type, }, ], } @@ -269,191 +294,203 @@ def test__rigobot_not_found(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [ - { - 'id': 3, - 'type': task_type, - }, - ], - 'pending_assignments': [ - { - 'id': 2, - 'type': task_type, - }, - { - 'id': 1, - 'type': task_type, - }, - ], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [ + { + "id": 3, + "type": task_type, + }, + ], + "pending_assignments": [ + { + "id": 2, + "type": task_type, + }, + { + "id": 1, + "type": task_type, + }, + ], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) - self.assertEqual(Logger.error.call_args_list, [call('App rigobot not found')]) - - @patch.multiple('linked_services.django.service.Service', post=MagicMock(), put=MagicMock()) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) + self.assertEqual(Logger.error.call_args_list, [call("App rigobot not found")]) + + @patch.multiple("linked_services.django.service.Service", post=MagicMock(), put=MagicMock()) def test__rigobot_cancelled_revision(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'PENDING', - 'task_type': task_type, - 'github_url': self.bc.fake.url(), + "task_status": "PENDING", + "task_type": task_type, + "github_url": self.bc.fake.url(), } cohort_user = { - 'history_log': { - 'delivered_assignments': [ + "history_log": { + "delivered_assignments": [ { - 'id': 3, - 'type': task_type, + "id": 3, + "type": task_type, }, ], - 'pending_assignments': [ + "pending_assignments": [ { - 'id': 2, - 'type': task_type, + "id": 2, + "type": task_type, }, ], } } - model = self.bc.database.create(task=task, - cohort_user=cohort_user, - credentials_github=1, - app={'slug': 'rigobot'}) + model = self.bc.database.create( + task=task, cohort_user=cohort_user, credentials_github=1, app={"slug": "rigobot"} + ) Logger.info.call_args_list = [] set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [ - { - 'id': 3, - 'type': task_type, - }, - ], - 'pending_assignments': [ - { - 'id': 2, - 'type': task_type, - }, - { - 'id': 1, - 'type': task_type, - }, - ], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [ + { + "id": 3, + "type": task_type, + }, + ], + "pending_assignments": [ + { + "id": 2, + "type": task_type, + }, + { + "id": 1, + "type": task_type, + }, + ], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) self.bc.check.calls(Service.post.call_args_list, []) self.bc.check.calls( Service.put.call_args_list, - [call('/v1/finetuning/me/repository/', json={ - 'url': model.task.github_url, - 'activity_status': 'INACTIVE' - })]) + [call("/v1/finetuning/me/repository/", json={"url": model.task.github_url, "activity_status": "INACTIVE"})], + ) - @patch.multiple('linked_services.core.service.Service', post=MagicMock(), put=MagicMock()) + @patch.multiple("linked_services.core.service.Service", post=MagicMock(), put=MagicMock()) def test__rigobot_schedule_revision(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'DONE', - 'task_type': task_type, - 'github_url': self.bc.fake.url(), + "task_status": "DONE", + "task_type": task_type, + "github_url": self.bc.fake.url(), } cohort_user = { - 'history_log': { - 'delivered_assignments': [ + "history_log": { + "delivered_assignments": [ { - 'id': 3, - 'type': task_type, + "id": 3, + "type": task_type, }, ], - 'pending_assignments': [ + "pending_assignments": [ { - 'id': 2, - 'type': task_type, + "id": 2, + "type": task_type, }, ], } } - model = self.bc.database.create(task=task, - cohort_user=cohort_user, - credentials_github=1, - app={'slug': 'rigobot'}) + model = self.bc.database.create( + task=task, cohort_user=cohort_user, credentials_github=1, app={"slug": "rigobot"} + ) Logger.info.call_args_list = [] set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [ - { - 'id': 3, - 'type': task_type, - }, - { - 'id': 1, - 'type': task_type, - }, - ], - 'pending_assignments': [ - { - 'id': 2, - 'type': task_type, - }, - ], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [ + { + "id": 3, + "type": task_type, + }, + { + "id": 1, + "type": task_type, + }, + ], + "pending_assignments": [ + { + "id": 2, + "type": task_type, + }, + ], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) self.bc.check.calls( Service.post.call_args_list, - [call('/v1/finetuning/me/repository/', json={ - 'url': model.task.github_url, - 'watchers': None - })]) + [call("/v1/finetuning/me/repository/", json={"url": model.task.github_url, "watchers": None})], + ) self.bc.check.calls(Service.put.call_args_list, []) def test__with_one_task__task_is_done__with_log__already_exists(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'DONE', - 'task_type': task_type, + "task_status": "DONE", + "task_type": task_type, } cohort_user = { - 'history_log': { - 'delivered_assignments': [ + "history_log": { + "delivered_assignments": [ { - 'id': 1, - 'type': task_type, + "id": 1, + "type": task_type, }, ], - 'pending_assignments': [], + "pending_assignments": [], } } model = self.bc.database.create(task=task, cohort_user=cohort_user) @@ -462,45 +499,51 @@ def test__with_one_task__task_is_done__with_log__already_exists(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [ - { - 'id': 1, - 'type': task_type, - }, - ], - 'pending_assignments': [], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [ + { + "id": 1, + "type": task_type, + }, + ], + "pending_assignments": [], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) def test__with_one_task__task_is_done__with_log__from_different_items(self): - task_type = random.choice(['LESSON', 'QUIZ', 'PROJECT', 'EXERCISE']) + task_type = random.choice(["LESSON", "QUIZ", "PROJECT", "EXERCISE"]) task = { - 'task_status': 'DONE', - 'task_type': task_type, + "task_status": "DONE", + "task_type": task_type, } cohort_user = { - 'history_log': { - 'delivered_assignments': [ + "history_log": { + "delivered_assignments": [ { - 'id': 3, - 'type': task_type, + "id": 3, + "type": task_type, }, ], - 'pending_assignments': [ + "pending_assignments": [ { - 'id': 2, - 'type': task_type, + "id": 2, + "type": task_type, }, ], } @@ -511,32 +554,38 @@ def test__with_one_task__task_is_done__with_log__from_different_items(self): set_cohort_user_assignments.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - **self.bc.format.to_dict(model.cohort_user), - 'history_log': { - 'delivered_assignments': [ - { - 'id': 3, - 'type': task_type, - }, - { - 'id': 1, - 'type': task_type, - }, - ], - 'pending_assignments': [ - { - 'id': 2, - 'type': task_type, - }, - ], + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + **self.bc.format.to_dict(model.cohort_user), + "history_log": { + "delivered_assignments": [ + { + "id": 3, + "type": task_type, + }, + { + "id": 1, + "type": task_type, + }, + ], + "pending_assignments": [ + { + "id": 2, + "type": task_type, + }, + ], + }, }, - }, - ]) - self.assertEqual(Logger.info.call_args_list, [ - call('Executing set_cohort_user_assignments'), - call('History log saved'), - ]) + ], + ) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Executing set_cohort_user_assignments"), + call("History log saved"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) diff --git a/breathecode/assignments/tests/tasks/tests_student_task_notification.py b/breathecode/assignments/tests/tasks/tests_student_task_notification.py index e4db763c1..8b6b6ef58 100644 --- a/breathecode/assignments/tests/tasks/tests_student_task_notification.py +++ b/breathecode/assignments/tests/tasks/tests_student_task_notification.py @@ -12,14 +12,15 @@ class MediaTestSuite(AssignmentsTestCase): """Test /answer""" + """ 🔽🔽🔽 Without Task """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__without_tasks(self): from logging import Logger @@ -27,311 +28,383 @@ def test_student_task_notification__without_tasks(self): student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.assertEqual(send_email_message.call_args_list, []) - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) - self.assertEqual(Logger.error.call_args_list, [call('Task not found')]) + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) + self.assertEqual(Logger.error.call_args_list, [call("Task not found")]) self.assertEqual(signals.assignment_created.send_robust.call_args_list, []) """ 🔽🔽🔽 With Task and Cohort revision_status PENDING """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__pending__with_task__with_cohort(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'PENDING'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "PENDING"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=1) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Your task "{model.task.title}" has been reviewed', - 'details': 'Your task has been marked as pending', - }, - academy=model.academy) - ]) - - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Your task "{model.task.title}" has been reviewed', + "details": "Your task has been marked as pending", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) - - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) + + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__with_task__pending__with_cohort__url_ends_with_slash(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'PENDING'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "PENDING"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=1) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Your task "{model.task.title}" has been reviewed', - 'details': 'Your task has been marked as pending', - }, - academy=model.academy) - ]) - - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Your task "{model.task.title}" has been reviewed', + "details": "Your task has been marked as pending", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) - - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) + + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__with_task__pending__with_cohort__lang_es(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'PENDING'} - cohort = {'language': 'es'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "PENDING"} + cohort = {"language": "es"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=cohort) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Tu tarea "{model.task.title}" ha sido revisada', - 'details': 'Tu tarea se ha marcado como pendiente', - }, - academy=model.academy) - ]) - - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Tu tarea "{model.task.title}" ha sido revisada', + "details": "Tu tarea se ha marcado como pendiente", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) """ 🔽🔽🔽 With Task and Cohort revision_status APPROVED """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__approved__with_task__with_cohort(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'APPROVED'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "APPROVED"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=1) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Your task "{model.task.title}" has been reviewed', - 'details': 'Your task has been marked as approved', - }, - academy=model.academy) - ]) - - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Your task "{model.task.title}" has been reviewed', + "details": "Your task has been marked as approved", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) - - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) + + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__with_task__approved__with_cohort__url_ends_with_slash(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'APPROVED'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "APPROVED"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=1) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Your task "{model.task.title}" has been reviewed', - 'details': 'Your task has been marked as approved', - }, - academy=model.academy) - ]) - - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Your task "{model.task.title}" has been reviewed', + "details": "Your task has been marked as approved", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) - - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) + + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__with_task__approved__with_cohort__lang_es(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'APPROVED'} - cohort = {'language': 'es'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "APPROVED"} + cohort = {"language": "es"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=cohort) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Tu tarea "{model.task.title}" ha sido revisada', - 'details': 'Tu tarea se ha marcado como aprobada', - }, - academy=model.academy) - ]) - - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Tu tarea "{model.task.title}" ha sido revisada', + "details": "Tu tarea se ha marcado como aprobada", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) """ 🔽🔽🔽 With Task and Cohort revision_status REJECTED """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__rejected__with_task__with_cohort(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'REJECTED'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "REJECTED"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=1) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Your task "{model.task.title}" has been reviewed', - 'details': 'Your task has been marked as rejected', - }, - academy=model.academy) - ]) - - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Your task "{model.task.title}" has been reviewed', + "details": "Your task has been marked as rejected", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) - - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) + + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__with_task__rejected__with_cohort__url_ends_with_slash(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'REJECTED'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "REJECTED"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=1) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Your task "{model.task.title}" has been reviewed', - 'details': 'Your task has been marked as rejected', - }, - academy=model.academy) - ]) - - self.assertEqual(str(Logger.info.call_args_list), str([call('Starting student_task_notification')])) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Your task "{model.task.title}" has been reviewed', + "details": "Your task has been marked as rejected", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(str(Logger.info.call_args_list), str([call("Starting student_task_notification")])) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) - - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) + + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_student_task_notification__with_task__rejected__with_cohort__lang_es(self): from logging import Logger from breathecode.notify.actions import send_email_message - task = {'revision_status': 'REJECTED'} - cohort = {'language': 'es'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + task = {"revision_status": "REJECTED"} + cohort = {"language": "es"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=task, cohort=cohort) Logger.info.call_args_list = [] student_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': f'Tu tarea "{model.task.title}" ha sido revisada', - 'details': 'Tu tarea se ha marcado como rechazada', - }, - academy=model.academy) - ]) - - self.assertEqual(Logger.info.call_args_list, [call('Starting student_task_notification')]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f'Tu tarea "{model.task.title}" ha sido revisada', + "details": "Tu tarea se ha marcado como rechazada", + }, + academy=model.academy, + ) + ], + ) + + self.assertEqual(Logger.info.call_args_list, [call("Starting student_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) diff --git a/breathecode/assignments/tests/tasks/tests_teacher_task_notification.py b/breathecode/assignments/tests/tasks/tests_teacher_task_notification.py index 5afbf0cd6..aeda89747 100644 --- a/breathecode/assignments/tests/tasks/tests_teacher_task_notification.py +++ b/breathecode/assignments/tests/tasks/tests_teacher_task_notification.py @@ -1,6 +1,7 @@ """ Test /answer """ + from unittest.mock import MagicMock, call, patch from breathecode.assignments import signals @@ -11,15 +12,16 @@ class MediaTestSuite(AssignmentsTestCase): """Test /answer""" + """ 🔽🔽🔽 Without env """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(return_value=None)) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_teacher_task_notification__without_env(self): import os from logging import Logger @@ -28,22 +30,22 @@ def test_teacher_task_notification__without_env(self): teacher_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.assertEqual(send_email_message.call_args_list, []) - self.assertEqual(os.getenv.call_args_list, [call('TEACHER_URL')]) - self.assertEqual(Logger.info.call_args_list, [call('Starting teacher_task_notification')]) - self.assertEqual(Logger.error.call_args_list, [call('TEACHER_URL is not set as environment variable')]) + self.assertEqual(os.getenv.call_args_list, [call("TEACHER_URL")]) + self.assertEqual(Logger.info.call_args_list, [call("Starting teacher_task_notification")]) + self.assertEqual(Logger.error.call_args_list, [call("TEACHER_URL is not set as environment variable")]) self.assertEqual(signals.assignment_created.send_robust.call_args_list, []) """ 🔽🔽🔽 Without Task """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(return_value='https://hardcoded.url')) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(return_value="https://hardcoded.url")) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_teacher_task_notification__without_tasks(self): import os from logging import Logger @@ -54,145 +56,173 @@ def test_teacher_task_notification__without_tasks(self): teacher_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.assertEqual(send_email_message.call_args_list, []) - self.assertEqual(os.getenv.call_args_list, [call('TEACHER_URL')]) - self.assertEqual(Logger.info.call_args_list, [call('Starting teacher_task_notification')]) - self.assertEqual(Logger.error.call_args_list, [call('Task not found')]) + self.assertEqual(os.getenv.call_args_list, [call("TEACHER_URL")]) + self.assertEqual(Logger.info.call_args_list, [call("Starting teacher_task_notification")]) + self.assertEqual(Logger.error.call_args_list, [call("Task not found")]) self.assertEqual(signals.assignment_created.send_robust.call_args_list, []) """ 🔽🔽🔽 With Task and Cohort """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(return_value='https://hardcoded.url')) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(return_value="https://hardcoded.url")) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_teacher_task_notification__with_task__with_cohort(self): import os from logging import Logger from breathecode.notify.actions import send_email_message - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=1, cohort=1) - with patch('logging.Logger.info', MagicMock()): + with patch("logging.Logger.info", MagicMock()): teacher_task_notification.delay(1) - self.assertEqual(Logger.info.call_args_list, [call('Starting teacher_task_notification')]) - - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': - f'{model.user.first_name} {model.user.last_name} send their task', - 'details': - (f'{model.user.first_name} {model.user.last_name} send their task "{model.task.title}", ' - 'you can review the task at ' - f'https://hardcoded.url/cohort/{model.cohort.slug}/assignments'), - }, - academy=model.academy) - ]) + self.assertEqual(Logger.info.call_args_list, [call("Starting teacher_task_notification")]) + + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f"{model.user.first_name} {model.user.last_name} send their task", + "details": ( + f'{model.user.first_name} {model.user.last_name} send their task "{model.task.title}", ' + "you can review the task at " + f"https://hardcoded.url/cohort/{model.cohort.slug}/assignments" + ), + }, + academy=model.academy, + ) + ], + ) self.assertEqual( os.getenv.call_args_list, [ - call('ENV', ''), # this is coming from Academy.save - call('TEACHER_URL'), - ]) + call("ENV", ""), # this is coming from Academy.save + call("TEACHER_URL"), + ], + ) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) """ 🔽🔽🔽 With Task and Cohort in spanish """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(return_value='https://hardcoded.url')) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(return_value="https://hardcoded.url")) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_teacher_task_notification__with_task__with_cohort__lang_es(self): import os from logging import Logger from breathecode.notify.actions import send_email_message - cohort = {'language': 'es'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + cohort = {"language": "es"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=1, cohort=cohort) Logger.info.call_args_list = [] teacher_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': - f'{model.user.first_name} {model.user.last_name} envió su tarea', - 'details': (f'{model.user.first_name} {model.user.last_name} envió su tarea "{model.task.title}", ' - 'puedes revisarla en ' - f'https://hardcoded.url/cohort/{model.cohort.slug}/assignments'), - }, - academy=model.academy) - ]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f"{model.user.first_name} {model.user.last_name} envió su tarea", + "details": ( + f'{model.user.first_name} {model.user.last_name} envió su tarea "{model.task.title}", ' + "puedes revisarla en " + f"https://hardcoded.url/cohort/{model.cohort.slug}/assignments" + ), + }, + academy=model.academy, + ) + ], + ) self.assertEqual( os.getenv.call_args_list, [ - call('ENV', ''), # this is coming from Academy.save - call('TEACHER_URL'), - ]) - self.assertEqual(Logger.info.call_args_list, [call('Starting teacher_task_notification')]) + call("ENV", ""), # this is coming from Academy.save + call("TEACHER_URL"), + ], + ) + self.assertEqual(Logger.info.call_args_list, [call("Starting teacher_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) """ 🔽🔽🔽 With Task and Cohort, url ends with / """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(return_value='https://hardcoded.url/')) - @patch('breathecode.assignments.signals.assignment_created', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(return_value="https://hardcoded.url/")) + @patch("breathecode.assignments.signals.assignment_created", MagicMock()) def test_teacher_task_notification__with_task__with_cohort__ends_with_slash(self): import os from logging import Logger from breathecode.notify.actions import send_email_message - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(task=1, cohort=1) Logger.info.call_args_list = [] teacher_task_notification.delay(1) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) - self.assertEqual(send_email_message.call_args_list, [ - call('diagnostic', - model.user.email, { - 'subject': - f'{model.user.first_name} {model.user.last_name} send their task', - 'details': - (f'{model.user.first_name} {model.user.last_name} send their task "{model.task.title}", ' - 'you can review the task at ' - f'https://hardcoded.url/cohort/{model.cohort.slug}/assignments'), - }, - academy=model.academy) - ]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "diagnostic", + model.user.email, + { + "subject": f"{model.user.first_name} {model.user.last_name} send their task", + "details": ( + f'{model.user.first_name} {model.user.last_name} send their task "{model.task.title}", ' + "you can review the task at " + f"https://hardcoded.url/cohort/{model.cohort.slug}/assignments" + ), + }, + academy=model.academy, + ) + ], + ) self.assertEqual( os.getenv.call_args_list, [ - call('ENV', ''), # this is coming from Academy.save - call('TEACHER_URL'), - ]) - self.assertEqual(Logger.info.call_args_list, [call('Starting teacher_task_notification')]) + call("ENV", ""), # this is coming from Academy.save + call("TEACHER_URL"), + ], + ) + self.assertEqual(Logger.info.call_args_list, [call("Starting teacher_task_notification")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(signals.assignment_created.send_robust.call_args_list, - [call(instance=model.task, sender=model.task.__class__)]) + self.assertEqual( + signals.assignment_created.send_robust.call_args_list, + [call(instance=model.task, sender=model.task.__class__)], + ) diff --git a/breathecode/assignments/tests/urls/tests_academy_coderevision.py b/breathecode/assignments/tests/urls/tests_academy_coderevision.py index 8520674ab..60ba656e8 100644 --- a/breathecode/assignments/tests/urls/tests_academy_coderevision.py +++ b/breathecode/assignments/tests/urls/tests_academy_coderevision.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import MagicMock, call @@ -57,7 +58,7 @@ def patch_get(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.get', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.get", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -68,7 +69,7 @@ def patch_post(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.post', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.post", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -76,16 +77,31 @@ def handler(expected, code, headers): @pytest.fixture def get_jwt(bc: Breathecode, monkeypatch): token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - monkeypatch.setattr('linked_services.django.actions.get_jwt', MagicMock(return_value=token)) + monkeypatch.setattr("linked_services.django.actions.get_jwt", MagicMock(return_value=token)) yield token -@pytest.fixture(params=[ - ('linked_services.core.service.Service.__aenter__', Exception, 'App rigobot not found', 'app-not-found', 404, True), - ('linked_services.core.service.Service.__aenter__', SynchronousOnlyOperation, - 'Async is not supported by the worker', 'no-async-support', 500, True), - ('aiohttp.ClientSession.get', Exception, 'random exc', 'unexpected-error', 500, False), -]) +@pytest.fixture( + params=[ + ( + "linked_services.core.service.Service.__aenter__", + Exception, + "App rigobot not found", + "app-not-found", + 404, + True, + ), + ( + "linked_services.core.service.Service.__aenter__", + SynchronousOnlyOperation, + "Async is not supported by the worker", + "no-async-support", + 500, + True, + ), + ("aiohttp.ClientSession.get", Exception, "random exc", "unexpected-error", 500, False), + ] +) def get_exc(request, monkeypatch): path, exc, message, slug, code, is_async = request.param if is_async: @@ -120,17 +136,32 @@ async def async_exc_mock(message): monkeypatch.setattr(path, ContextMock) yield { - 'slug': slug, - 'code': code, + "slug": slug, + "code": code, } -@pytest.fixture(params=[ - ('linked_services.core.service.Service.__aenter__', Exception, 'App rigobot not found', 'app-not-found', 404, True), - ('linked_services.core.service.Service.__aenter__', SynchronousOnlyOperation, - 'Async is not supported by the worker', 'no-async-support', 500, True), - ('aiohttp.ClientSession.post', Exception, 'random exc', 'unexpected-error', 500, False), -]) +@pytest.fixture( + params=[ + ( + "linked_services.core.service.Service.__aenter__", + Exception, + "App rigobot not found", + "app-not-found", + 404, + True, + ), + ( + "linked_services.core.service.Service.__aenter__", + SynchronousOnlyOperation, + "Async is not supported by the worker", + "no-async-support", + 500, + True, + ), + ("aiohttp.ClientSession.post", Exception, "random exc", "unexpected-error", 500, False), + ] +) def post_exc(request, monkeypatch): path, exc, message, slug, code, is_async = request.param if is_async: @@ -165,23 +196,23 @@ async def async_exc_mock(message): monkeypatch.setattr(path, ContextMock) yield { - 'slug': slug, - 'code': code, + "slug": slug, + "code": code, } # When: no auth # Then: response 401 def test_no_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:academy_coderevision') + url = reverse_lazy("assignments:academy_coderevision") response = client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no capability @@ -191,49 +222,48 @@ def test_no_capability(bc: Breathecode, client: APIClient): client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_coderevision') - response = client.get(url, headers={'academy': 1}) + url = reverse_lazy("assignments:academy_coderevision") + response = client.get(url, headers={"academy": 1}) json = response.json() expected = { - 'detail': 'You (user: 1) don\'t have this capability: read_assignment for academy 1', - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_assignment for academy 1", + "status_code": 403, } assert json == expected assert response.status_code == status.HTTP_403_FORBIDDEN - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: raise an exception # Then: response 200 def test_raise_an_exception(bc: Breathecode, client: APIClient, get_exc): - expected = {'detail': get_exc['slug'], 'status_code': get_exc['code']} + expected = {"detail": get_exc["slug"], "status_code": get_exc["code"]} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='read_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="read_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_coderevision') + '?' + bc.format.querystring(query) + url = reverse_lazy("assignments:academy_coderevision") + "?" + bc.format.querystring(query) - response = client.get(url, query, format='json', headers={'Academy': 1}) + response = client.get(url, query, format="json", headers={"Academy": 1}) json = response.json() assert json == expected - assert response.status_code == get_exc['code'] - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert response.status_code == get_exc["code"] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth @@ -241,41 +271,42 @@ def test_raise_an_exception(bc: Breathecode, client: APIClient, get_exc): def test_auth(bc: Breathecode, client: APIClient, patch_get, get_jwt): # bc.request.set_headers(academy=1) - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} code = random.randint(200, 299) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='read_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="read_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_coderevision') + '?' + bc.format.querystring(query) + url = reverse_lazy("assignments:academy_coderevision") + "?" + bc.format.querystring(query) patch_get(expected, code, headers) - response = client.get(url, headers={'academy': 1}) + response = client.get(url, headers={"academy": 1}) assert aiohttp.ClientSession.get.call_args_list == [ - call(f'{model.app.app_url}/v1/finetuning/coderevision', - params=query, - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/finetuning/coderevision", + params=query, + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: no capability @@ -285,50 +316,49 @@ def test_post_no_capability(bc: Breathecode, client: APIClient): client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_coderevision') - response = client.post(url, headers={'academy': 1}) + url = reverse_lazy("assignments:academy_coderevision") + response = client.post(url, headers={"academy": 1}) json = response.json() expected = { - 'detail': 'You (user: 1) don\'t have this capability: crud_assignment for academy 1', - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_assignment for academy 1", + "status_code": 403, } assert json == expected assert response.status_code == status.HTTP_403_FORBIDDEN - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: raise an exception # Then: response 200 def test__post__raise_an_exception(bc: Breathecode, client: APIClient, post_exc): - expected = {'detail': post_exc['slug'], 'status_code': post_exc['code']} + expected = {"detail": post_exc["slug"], "status_code": post_exc["code"]} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='crud_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="crud_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_coderevision') + url = reverse_lazy("assignments:academy_coderevision") - response = client.post(url, query, format='json', headers={'Academy': 1}) + response = client.post(url, query, format="json", headers={"Academy": 1}) json = response.json() assert json == expected - assert response.status_code == post_exc['code'] - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert response.status_code == post_exc["code"] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth @@ -336,41 +366,42 @@ def test__post__raise_an_exception(bc: Breathecode, client: APIClient, post_exc) def test_post_auth(bc: Breathecode, client: APIClient, patch_post, get_jwt): # bc.request.set_headers(academy=1) - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} code = random.randint(200, 299) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='crud_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="crud_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_coderevision') + url = reverse_lazy("assignments:academy_coderevision") patch_post(expected, code, headers) - response = client.post(url, query, headers={'academy': 1}, format='json') + response = client.post(url, query, headers={"academy": 1}, format="json") assert aiohttp.ClientSession.post.call_args_list == [ - call(f'{model.app.app_url}/v1/finetuning/coderevision', - data=query, - json=None, - params={}, - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/finetuning/coderevision", + data=query, + json=None, + params={}, + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] diff --git a/breathecode/assignments/tests/urls/tests_academy_task_id_coderevision.py b/breathecode/assignments/tests/urls/tests_academy_task_id_coderevision.py index d81961183..632689c03 100644 --- a/breathecode/assignments/tests/urls/tests_academy_task_id_coderevision.py +++ b/breathecode/assignments/tests/urls/tests_academy_task_id_coderevision.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import MagicMock, call, patch @@ -26,7 +27,7 @@ def setup(db): @pytest.fixture def get_jwt(bc: Breathecode, monkeypatch): token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - monkeypatch.setattr('linked_services.django.actions.get_jwt', MagicMock(return_value=token)) + monkeypatch.setattr("linked_services.django.actions.get_jwt", MagicMock(return_value=token)) yield token @@ -65,7 +66,7 @@ def patch_get(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.get', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.get", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -76,17 +77,32 @@ def patch_post(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.post', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.post", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler -@pytest.fixture(params=[ - ('linked_services.core.service.Service.__aenter__', Exception, 'App rigobot not found', 'app-not-found', 404, True), - ('linked_services.core.service.Service.__aenter__', SynchronousOnlyOperation, - 'Async is not supported by the worker', 'no-async-support', 500, True), - ('aiohttp.ClientSession.get', Exception, 'random exc', 'unexpected-error', 500, False), -]) +@pytest.fixture( + params=[ + ( + "linked_services.core.service.Service.__aenter__", + Exception, + "App rigobot not found", + "app-not-found", + 404, + True, + ), + ( + "linked_services.core.service.Service.__aenter__", + SynchronousOnlyOperation, + "Async is not supported by the worker", + "no-async-support", + 500, + True, + ), + ("aiohttp.ClientSession.get", Exception, "random exc", "unexpected-error", 500, False), + ] +) def get_exc(request, monkeypatch): path, exc, message, slug, code, is_async = request.param if is_async: @@ -121,17 +137,32 @@ async def async_exc_mock(message): monkeypatch.setattr(path, ContextMock) yield { - 'slug': slug, - 'code': code, + "slug": slug, + "code": code, } -@pytest.fixture(params=[ - ('linked_services.core.service.Service.__aenter__', Exception, 'App rigobot not found', 'app-not-found', 404, True), - ('linked_services.core.service.Service.__aenter__', SynchronousOnlyOperation, - 'Async is not supported by the worker', 'no-async-support', 500, True), - ('aiohttp.ClientSession.post', Exception, 'random exc', 'unexpected-error', 500, False), -]) +@pytest.fixture( + params=[ + ( + "linked_services.core.service.Service.__aenter__", + Exception, + "App rigobot not found", + "app-not-found", + 404, + True, + ), + ( + "linked_services.core.service.Service.__aenter__", + SynchronousOnlyOperation, + "Async is not supported by the worker", + "no-async-support", + 500, + True, + ), + ("aiohttp.ClientSession.post", Exception, "random exc", "unexpected-error", 500, False), + ] +) def post_exc(request, monkeypatch): path, exc, message, slug, code, is_async = request.param if is_async: @@ -166,23 +197,23 @@ async def async_exc_mock(message): monkeypatch.setattr(path, ContextMock) yield { - 'slug': slug, - 'code': code, + "slug": slug, + "code": code, } # When: no auth # Then: response 401 def test_no_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) response = client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no capability @@ -192,24 +223,24 @@ def test_no_capability(bc: Breathecode, client: APIClient): client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) response = client.get(url) json = response.json() expected = { - 'detail': 'You (user: 1) don\'t have this capability: read_assignment for academy 1', - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_assignment for academy 1", + "status_code": 403, } json == expected assert response.status_code == status.HTTP_403_FORBIDDEN - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no tasks # Then: response 404 def test_no_tasks(bc: Breathecode, client: APIClient): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -218,71 +249,72 @@ def test_no_tasks(bc: Breathecode, client: APIClient): mock = MagicMock() mock.raw = iter([json.dumps(expected).encode()]) - mock.headers = {'Content-Type': 'application/json'} + mock.headers = {"Content-Type": "application/json"} code = random.randint(200, 299) mock.status_code = code - mock.reason = 'OK' - - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - role=1, - capability='read_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + mock.reason = "OK" + + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, role=1, capability="read_assignment", app={"slug": "rigobot", "app_url": bc.fake.url()} + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) + + "?" + + bc.format.querystring(query) + ) - with patch.multiple('linked_services.core.service.Service', - __init__=MagicMock(return_value=None), - get=MagicMock(return_value=mock)): - response = client.get(url, headers={'Academy': 1}) + with patch.multiple( + "linked_services.core.service.Service", __init__=MagicMock(return_value=None), get=MagicMock(return_value=mock) + ): + response = client.get(url, headers={"Academy": 1}) bc.check.calls(Service.get.call_args_list, []) - assert response.getvalue().decode('utf-8') == '{"detail":"task-not-found","status_code":404}' + assert response.getvalue().decode("utf-8") == '{"detail":"task-not-found","status_code":404}' assert response.status_code == 404 - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: raise an exception # Then: response 200 def test_raise_an_exception(bc: Breathecode, client: APIClient, get_exc): - expected = {'detail': get_exc['slug'], 'status_code': get_exc['code']} + expected = {"detail": get_exc["slug"], "status_code": get_exc["code"]} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='read_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="read_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) + + "?" + + bc.format.querystring(query) + ) - response = client.get(url, query, format='json', headers={'Academy': 1}) + response = client.get(url, query, format="json", headers={"Academy": 1}) json = response.json() assert json == expected - assert response.status_code == get_exc['code'] - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert response.status_code == get_exc["code"] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth # Then: response 200 def test_auth(bc: Breathecode, client: APIClient, patch_get, get_jwt): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -290,39 +322,43 @@ def test_auth(bc: Breathecode, client: APIClient, patch_get, get_jwt): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_get(expected, code, headers) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='read_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="read_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) + + "?" + + bc.format.querystring(query) + ) - response = client.get(url, headers={'Academy': 1}) + response = client.get(url, headers={"Academy": 1}) json = response.json() assert aiohttp.ClientSession.get.call_args_list == [ - call(f'{model.app.app_url}/v1/finetuning/coderevision', - params={ - **query, - 'repo': model.task.github_url, - }, - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/finetuning/coderevision", + params={ + **query, + "repo": model.task.github_url, + }, + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] assert json == expected assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: no capability @@ -332,85 +368,80 @@ def test_post_no_capability(bc: Breathecode, client: APIClient): client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1}) - response = client.post(url, headers={'academy': 1}) + url = reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) + response = client.post(url, headers={"academy": 1}) json = response.json() expected = { - 'detail': 'You (user: 1) don\'t have this capability: crud_assignment for academy 1', - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_assignment for academy 1", + "status_code": 403, } assert json == expected assert response.status_code == status.HTTP_403_FORBIDDEN - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: raise an exception # Then: response 200 def test__post__raise_an_exception(bc: Breathecode, client: APIClient, post_exc): - expected = {'detail': post_exc['slug'], 'status_code': post_exc['code']} + expected = {"detail": post_exc["slug"], "status_code": post_exc["code"]} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='crud_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="crud_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) - response = client.post(url, query, format='json', headers={'Academy': 1}) + response = client.post(url, query, format="json", headers={"Academy": 1}) json = response.json() assert json == expected - assert response.status_code == post_exc['code'] - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert response.status_code == post_exc["code"] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth # Then: response 200 def test_not_found(bc: Breathecode, client: APIClient, patch_post, get_jwt): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} code = random.randint(200, 299) - model = bc.database.create(profile_academy=1, - role=1, - capability='crud_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + model = bc.database.create( + profile_academy=1, role=1, capability="crud_assignment", app={"slug": "rigobot", "app_url": bc.fake.url()} + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) patch_post(expected, code, headers) - response = client.post(url, query, headers={'academy': 1}, format='json') + response = client.post(url, query, headers={"academy": 1}, format="json") json = response.json() assert aiohttp.ClientSession.post.call_args_list == [] - assert json == {'detail': 'task-not-found', 'status_code': 404} + assert json == {"detail": "task-not-found", "status_code": 404} assert response.status_code == 404 - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: auth @@ -418,44 +449,45 @@ def test_not_found(bc: Breathecode, client: APIClient, patch_post, get_jwt): def test_post_auth(bc: Breathecode, client: APIClient, patch_post, get_jwt): # bc.request.set_headers(academy=1) - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} code = random.randint(200, 299) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='crud_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="crud_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_coderevision', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:academy_task_id_coderevision", kwargs={"task_id": 1}) patch_post(expected, code, headers) - response = client.post(url, query, headers={'academy': 1}, format='json') + response = client.post(url, query, headers={"academy": 1}, format="json") json = response.json() assert aiohttp.ClientSession.post.call_args_list == [ - call(f'{model.app.app_url}/v1/finetuning/coderevision', - data=query, - json=None, - params={ - 'repo': model.task.github_url, - }, - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/finetuning/coderevision", + data=query, + json=None, + params={ + "repo": model.task.github_url, + }, + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] assert json == expected assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] diff --git a/breathecode/assignments/tests/urls/tests_academy_task_id_commitfile.py b/breathecode/assignments/tests/urls/tests_academy_task_id_commitfile.py index 7f0aa163a..c41ee28d8 100644 --- a/breathecode/assignments/tests/urls/tests_academy_task_id_commitfile.py +++ b/breathecode/assignments/tests/urls/tests_academy_task_id_commitfile.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import MagicMock, call @@ -57,7 +58,7 @@ def patch_get(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.get', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.get", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -65,16 +66,31 @@ def handler(expected, code, headers): @pytest.fixture def get_jwt(bc: Breathecode, monkeypatch): token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - monkeypatch.setattr('linked_services.django.actions.get_jwt', MagicMock(return_value=token)) + monkeypatch.setattr("linked_services.django.actions.get_jwt", MagicMock(return_value=token)) yield token -@pytest.fixture(params=[ - ('linked_services.core.service.Service.__aenter__', Exception, 'App rigobot not found', 'app-not-found', 404, True), - ('linked_services.core.service.Service.__aenter__', SynchronousOnlyOperation, - 'Async is not supported by the worker', 'no-async-support', 500, True), - ('aiohttp.ClientSession.get', Exception, 'random exc', 'unexpected-error', 500, False), -]) +@pytest.fixture( + params=[ + ( + "linked_services.core.service.Service.__aenter__", + Exception, + "App rigobot not found", + "app-not-found", + 404, + True, + ), + ( + "linked_services.core.service.Service.__aenter__", + SynchronousOnlyOperation, + "Async is not supported by the worker", + "no-async-support", + 500, + True, + ), + ("aiohttp.ClientSession.get", Exception, "random exc", "unexpected-error", 500, False), + ] +) def get_exc(request, monkeypatch): path, exc, message, slug, code, is_async = request.param if is_async: @@ -109,23 +125,23 @@ async def async_exc_mock(message): monkeypatch.setattr(path, ContextMock) yield { - 'slug': slug, - 'code': code, + "slug": slug, + "code": code, } # When: no auth # Then: response 401 def test_no_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:academy_task_id_commitfile', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:academy_task_id_commitfile", kwargs={"task_id": 1}) response = client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no capability @@ -135,86 +151,87 @@ def test_no_capability(bc: Breathecode, client: APIClient): client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_commitfile', kwargs={'task_id': 1}) - response = client.get(url, headers={'academy': 1, 'accept': 'application/json'}) + url = reverse_lazy("assignments:academy_task_id_commitfile", kwargs={"task_id": 1}) + response = client.get(url, headers={"academy": 1, "accept": "application/json"}) json = response.json() expected = { - 'detail': 'You (user: 1) don\'t have this capability: read_assignment for academy 1', - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_assignment for academy 1", + "status_code": 403, } assert json == expected assert response.status_code == status.HTTP_403_FORBIDDEN - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: raise an exception # Then: response 200 def test_raise_an_exception(bc: Breathecode, client: APIClient, get_exc): - expected = {'detail': get_exc['slug'], 'status_code': get_exc['code']} + expected = {"detail": get_exc["slug"], "status_code": get_exc["code"]} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='read_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="read_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_commitfile', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:academy_task_id_commitfile", kwargs={"task_id": 1}) + + "?" + + bc.format.querystring(query) + ) - response = client.get(url, query, format='json', headers={'Academy': 1, 'accept': 'application/json'}) + response = client.get(url, query, format="json", headers={"Academy": 1, "accept": "application/json"}) json = response.json() assert json == expected - assert response.status_code == get_exc['code'] - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert response.status_code == get_exc["code"] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth # Then: response 200 def test_not_found(bc: Breathecode, client: APIClient, patch_get, get_jwt): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} code = random.randint(200, 299) - model = bc.database.create(profile_academy=1, - role=1, - capability='read_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + model = bc.database.create( + profile_academy=1, role=1, capability="read_assignment", app={"slug": "rigobot", "app_url": bc.fake.url()} + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_commitfile', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:academy_task_id_commitfile", kwargs={"task_id": 1}) + + "?" + + bc.format.querystring(query) + ) patch_get(expected, code, headers) - response = client.get(url, headers={'academy': 1, 'accept': 'application/json'}) + response = client.get(url, headers={"academy": 1, "accept": "application/json"}) json = response.json() assert aiohttp.ClientSession.get.call_args_list == [] - assert json == {'detail': 'task-not-found', 'status_code': 404} + assert json == {"detail": "task-not-found", "status_code": 404} assert response.status_code == 404 - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: auth @@ -222,43 +239,47 @@ def test_not_found(bc: Breathecode, client: APIClient, patch_get, get_jwt): def test_auth(bc: Breathecode, client: APIClient, patch_get, get_jwt): # bc.request.set_headers(academy=1) - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} code = random.randint(200, 299) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - role=1, - capability='read_assignment', - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + role=1, + capability="read_assignment", + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:academy_task_id_commitfile', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:academy_task_id_commitfile", kwargs={"task_id": 1}) + + "?" + + bc.format.querystring(query) + ) patch_get(expected, code, headers) - response = client.get(url, headers={'academy': 1, 'accept': 'application/json'}) + response = client.get(url, headers={"academy": 1, "accept": "application/json"}) json = response.json() assert aiohttp.ClientSession.get.call_args_list == [ - call(f'{model.app.app_url}/v1/finetuning/commitfile', - params={ - **query, - 'repo': model.task.github_url, - }, - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/finetuning/commitfile", + params={ + **query, + "repo": model.task.github_url, + }, + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] assert json == expected assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] diff --git a/breathecode/assignments/tests/urls/tests_final_project_cohort.py b/breathecode/assignments/tests/urls/tests_final_project_cohort.py index 08d264fc0..aa0b3cbbd 100644 --- a/breathecode/assignments/tests/urls/tests_final_project_cohort.py +++ b/breathecode/assignments/tests/urls/tests_final_project_cohort.py @@ -1,6 +1,7 @@ """ Test /academy/cohort//final_project """ + import pytest from django.urls.base import reverse_lazy from linked_services.django.actions import reset_app_cache @@ -17,54 +18,56 @@ def setup(db): def get_serializer(final_project, data={}): return { - 'id': final_project.id, - 'repo_owner': { - 'id': final_project.repo_owner.id, - 'first_name': final_project.repo_owner.first_name, - 'last_name': final_project.repo_owner.last_name, - } if final_project.repo_owner else None, - 'name': final_project.name, - 'one_line_desc': final_project.one_line_desc, - 'description': final_project.description, - 'project_status': final_project.project_status, - 'revision_status': final_project.revision_status, - 'visibility_status': final_project.visibility_status, - 'repo_url': final_project.repo_url, - 'public_url': final_project.public_url, - 'logo_url': final_project.logo_url, - 'screenshot': final_project.screenshot, - 'slides_url': final_project.slides_url, - 'video_demo_url': final_project.video_demo_url, - 'cohort': { - 'id': final_project.cohort.id, - 'name': final_project.cohort.name, - 'slug': final_project.cohort.slug - } if final_project.cohort else None, - 'created_at': final_project.created_at, - 'updated_at': final_project.updated_at, - 'members': [members_serializer(member) for member in final_project.members.all()], - **data + "id": final_project.id, + "repo_owner": ( + { + "id": final_project.repo_owner.id, + "first_name": final_project.repo_owner.first_name, + "last_name": final_project.repo_owner.last_name, + } + if final_project.repo_owner + else None + ), + "name": final_project.name, + "one_line_desc": final_project.one_line_desc, + "description": final_project.description, + "project_status": final_project.project_status, + "revision_status": final_project.revision_status, + "visibility_status": final_project.visibility_status, + "repo_url": final_project.repo_url, + "public_url": final_project.public_url, + "logo_url": final_project.logo_url, + "screenshot": final_project.screenshot, + "slides_url": final_project.slides_url, + "video_demo_url": final_project.video_demo_url, + "cohort": ( + {"id": final_project.cohort.id, "name": final_project.cohort.name, "slug": final_project.cohort.slug} + if final_project.cohort + else None + ), + "created_at": final_project.created_at, + "updated_at": final_project.updated_at, + "members": [members_serializer(member) for member in final_project.members.all()], + **data, } def members_serializer(member, data={}): return { - 'id': member.id, - 'first_name': member.first_name, - 'last_name': member.last_name, - 'profile': { - 'avatar_url': member.profile.avatar_url - } if member.profile is not None else None + "id": member.id, + "first_name": member.first_name, + "last_name": member.last_name, + "profile": {"avatar_url": member.profile.avatar_url} if member.profile is not None else None, } def test_not_authenticated(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:final_project_cohort', kwargs={'cohort_id': 1}) + url = reverse_lazy("assignments:final_project_cohort", kwargs={"cohort_id": 1}) - response = client.get(url, headers={'academy': 1}) + response = client.get(url, headers={"academy": 1}) - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} json = response.json() assert json == expected @@ -73,14 +76,14 @@ def test_not_authenticated(bc: Breathecode, client: APIClient): def test_no_capability(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:final_project_cohort', kwargs={'cohort_id': 1}) + url = reverse_lazy("assignments:final_project_cohort", kwargs={"cohort_id": 1}) model = bc.database.create(user=1) client.force_authenticate(model.user) - response = client.get(url, headers={'academy': 1}) + response = client.get(url, headers={"academy": 1}) - expected = {'detail': "You (user: 1) don't have this capability: read_assignment for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: read_assignment for academy 1", "status_code": 403} json = response.json() assert json == expected @@ -92,14 +95,14 @@ def test_cohort_not_found(bc: Breathecode, client: APIClient): model = bc.database.create( profile_academy=1, role=1, - capability='read_assignment', + capability="read_assignment", ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:final_project_cohort', kwargs={'cohort_id': 2}) - response = client.get(url, headers={'academy': 1}) + url = reverse_lazy("assignments:final_project_cohort", kwargs={"cohort_id": 2}) + response = client.get(url, headers={"academy": 1}) - expected = {'detail': 'cohort-not-found', 'status_code': 404} + expected = {"detail": "cohort-not-found", "status_code": 404} json = response.json() assert expected == json @@ -112,12 +115,12 @@ def test_with_no_projects(bc: Breathecode, client: APIClient): profile_academy=1, role=1, cohort=1, - capability='read_assignment', + capability="read_assignment", ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:final_project_cohort', kwargs={'cohort_id': 1}) - response = client.get(url, headers={'academy': 1}) + url = reverse_lazy("assignments:final_project_cohort", kwargs={"cohort_id": 1}) + response = client.get(url, headers={"academy": 1}) expected = [] json = response.json() @@ -132,23 +135,27 @@ def test_with_projects(bc: Breathecode, client: APIClient): cohort=1, profile_academy=1, role=1, - capability='read_assignment', + capability="read_assignment", ) - model = bc.database.create(final_project={'cohort': model_cohort.cohort}, ) + model = bc.database.create( + final_project={"cohort": model_cohort.cohort}, + ) client.force_authenticate(model_cohort.user) - url = reverse_lazy('assignments:final_project_cohort', kwargs={'cohort_id': 1}) - response = client.get(url, headers={'academy': 1}) + url = reverse_lazy("assignments:final_project_cohort", kwargs={"cohort_id": 1}) + response = client.get(url, headers={"academy": 1}) project = model.final_project expected = [ get_serializer( - project, { - 'created_at': bc.datetime.to_iso_string(project.created_at), - 'updated_at': bc.datetime.to_iso_string(project.updated_at), - }) + project, + { + "created_at": bc.datetime.to_iso_string(project.created_at), + "updated_at": bc.datetime.to_iso_string(project.updated_at), + }, + ) ] json = response.json() diff --git a/breathecode/assignments/tests/urls/tests_final_project_cohort_update.py b/breathecode/assignments/tests/urls/tests_final_project_cohort_update.py index f2581c8f3..81fd8c81f 100644 --- a/breathecode/assignments/tests/urls/tests_final_project_cohort_update.py +++ b/breathecode/assignments/tests/urls/tests_final_project_cohort_update.py @@ -1,6 +1,7 @@ """ Test /academy/cohort//final_project """ + import pytest from django.urls.base import reverse_lazy from linked_services.django.actions import reset_app_cache @@ -17,34 +18,34 @@ def setup(db): def put_serializer(project, data={}): return { - 'id': project.id, - 'logo_url': project.logo_url, - 'name': project.name, - 'one_line_desc': project.one_line_desc, - 'public_url': project.public_url, - 'cohort': project.cohort.id if project.cohort else None, - 'created_at': project.created_at, - 'updated_at': project.created_at, - 'description': project.description, - 'screenshot': project.screenshot, - 'repo_url': project.repo_url, - 'slides_url': project.slides_url, - 'video_demo_url': project.video_demo_url, - 'visibility_status': project.visibility_status, - 'revision_status': project.revision_status, - 'project_status': project.project_status, - 'revision_message': project.revision_message, + "id": project.id, + "logo_url": project.logo_url, + "name": project.name, + "one_line_desc": project.one_line_desc, + "public_url": project.public_url, + "cohort": project.cohort.id if project.cohort else None, + "created_at": project.created_at, + "updated_at": project.created_at, + "description": project.description, + "screenshot": project.screenshot, + "repo_url": project.repo_url, + "slides_url": project.slides_url, + "video_demo_url": project.video_demo_url, + "visibility_status": project.visibility_status, + "revision_status": project.revision_status, + "project_status": project.project_status, + "revision_message": project.revision_message, **data, } def test_not_authenticated(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:final_project_cohort_update', kwargs={'cohort_id': 1, 'final_project_id': 1}) + url = reverse_lazy("assignments:final_project_cohort_update", kwargs={"cohort_id": 1, "final_project_id": 1}) - response = client.put(url, headers={'academy': 1}) + response = client.put(url, headers={"academy": 1}) - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} json = response.json() assert json == expected @@ -53,14 +54,14 @@ def test_not_authenticated(bc: Breathecode, client: APIClient): def test_no_capability(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:final_project_cohort_update', kwargs={'cohort_id': 1, 'final_project_id': 1}) + url = reverse_lazy("assignments:final_project_cohort_update", kwargs={"cohort_id": 1, "final_project_id": 1}) model = bc.database.create(user=1) client.force_authenticate(model.user) - response = client.put(url, headers={'academy': 1}) + response = client.put(url, headers={"academy": 1}) - expected = {'detail': "You (user: 1) don't have this capability: crud_assignment for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: crud_assignment for academy 1", "status_code": 403} json = response.json() assert json == expected @@ -72,14 +73,14 @@ def test_cohort_not_found(bc: Breathecode, client: APIClient): model = bc.database.create( profile_academy=1, role=1, - capability='crud_assignment', + capability="crud_assignment", ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:final_project_cohort_update', kwargs={'cohort_id': 2, 'final_project_id': 1}) - response = client.put(url, headers={'academy': 1}) + url = reverse_lazy("assignments:final_project_cohort_update", kwargs={"cohort_id": 2, "final_project_id": 1}) + response = client.put(url, headers={"academy": 1}) - expected = {'detail': 'cohort-not-found', 'status_code': 404} + expected = {"detail": "cohort-not-found", "status_code": 404} json = response.json() assert expected == json @@ -91,14 +92,14 @@ def test_project_not_found(bc: Breathecode, client: APIClient): model = bc.database.create( profile_academy=1, role=1, - capability='crud_assignment', + capability="crud_assignment", ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:final_project_cohort_update', kwargs={'cohort_id': 1, 'final_project_id': 1}) - response = client.put(url, headers={'academy': 1}) + url = reverse_lazy("assignments:final_project_cohort_update", kwargs={"cohort_id": 1, "final_project_id": 1}) + response = client.put(url, headers={"academy": 1}) - expected = {'detail': 'project-not-found', 'status_code': 404} + expected = {"detail": "project-not-found", "status_code": 404} json = response.json() assert expected == json @@ -108,26 +109,28 @@ def test_project_not_found(bc: Breathecode, client: APIClient): def test_put_undone_project(bc: Breathecode, client: APIClient): model = bc.database.create( - cohort_user={'role': 'STUDENT'}, + cohort_user={"role": "STUDENT"}, cohort=1, profile_academy=1, role=1, - capability='crud_assignment', + capability="crud_assignment", user=1, - final_project={'members': [1]}, + final_project={"members": [1]}, ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:final_project_cohort_update', kwargs={'cohort_id': 1, 'final_project_id': 1}) - response = client.put(url, - headers={'academy': 1}, - data={ - 'revision_status': 'APPROVED', - 'members': [1], - 'cohort': 1, - }) + url = reverse_lazy("assignments:final_project_cohort_update", kwargs={"cohort_id": 1, "final_project_id": 1}) + response = client.put( + url, + headers={"academy": 1}, + data={ + "revision_status": "APPROVED", + "members": [1], + "cohort": 1, + }, + ) - expected = {'detail': 'project-marked-approved-when-pending', 'status_code': 400} + expected = {"detail": "project-marked-approved-when-pending", "status_code": 400} json = response.json() assert expected == json @@ -140,31 +143,29 @@ def test_no_github_members(bc: Breathecode, client: APIClient): cohort=1, profile_academy=1, role=1, - capability='crud_assignment', + capability="crud_assignment", ) - cohort_user_model = bc.database.create(cohort_user={'cohort': model_cohort.cohort, 'role': 'STUDENT'}) + cohort_user_model = bc.database.create(cohort_user={"cohort": model_cohort.cohort, "role": "STUDENT"}) model = bc.database.create( user=1, - final_project={ - 'project_status': 'DONE', - 'cohort': model_cohort.cohort, - 'members': [cohort_user_model.user] - }, + final_project={"project_status": "DONE", "cohort": model_cohort.cohort, "members": [cohort_user_model.user]}, ) client.force_authenticate(model_cohort.user) - url = reverse_lazy('assignments:final_project_cohort_update', kwargs={'cohort_id': 1, 'final_project_id': 1}) - response = client.put(url, - headers={'academy': 1}, - data={ - 'project_status': 'PENDING', - 'members': [2], - 'cohort': 1, - }) + url = reverse_lazy("assignments:final_project_cohort_update", kwargs={"cohort_id": 1, "final_project_id": 1}) + response = client.put( + url, + headers={"academy": 1}, + data={ + "project_status": "PENDING", + "members": [2], + "cohort": 1, + }, + ) - expected = {'detail': 'put-project-property-from-none-members', 'status_code': 400} + expected = {"detail": "put-project-property-from-none-members", "status_code": 400} json = response.json() assert expected == json @@ -177,35 +178,34 @@ def test_put_project(bc: Breathecode, client: APIClient): cohort=1, profile_academy=1, role=1, - capability='crud_assignment', - cohort_user=[{ - 'user_id': n + 1, - 'role': 'STUDENT' - } for n in range(2)], + capability="crud_assignment", + cohort_user=[{"user_id": n + 1, "role": "STUDENT"} for n in range(2)], final_project={ - 'members': [1], - 'project_status': 'DONE', + "members": [1], + "project_status": "DONE", }, ) client.force_authenticate(model.user[0]) - url = reverse_lazy('assignments:final_project_cohort_update', kwargs={'cohort_id': 1, 'final_project_id': 1}) + url = reverse_lazy("assignments:final_project_cohort_update", kwargs={"cohort_id": 1, "final_project_id": 1}) payload = { - 'project_status': 'DONE', - 'revision_status': 'APPROVED', - 'members': [2], - 'cohort': 1, + "project_status": "DONE", + "revision_status": "APPROVED", + "members": [2], + "cohort": 1, } - response = client.put(url, headers={'academy': 1}, data=payload) + response = client.put(url, headers={"academy": 1}, data=payload) json = response.json() final_project = model.final_project - expected = put_serializer(final_project, - data={ - 'created_at': bc.datetime.to_iso_string(final_project.created_at), - 'updated_at': json['updated_at'], - **payload - }) + expected = put_serializer( + final_project, + data={ + "created_at": bc.datetime.to_iso_string(final_project.created_at), + "updated_at": json["updated_at"], + **payload, + }, + ) assert expected == json assert response.status_code == 200 diff --git a/breathecode/assignments/tests/urls/tests_me_coderevision.py b/breathecode/assignments/tests/urls/tests_me_coderevision.py index 5efbf6b7f..542e191da 100644 --- a/breathecode/assignments/tests/urls/tests_me_coderevision.py +++ b/breathecode/assignments/tests/urls/tests_me_coderevision.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import MagicMock, call, patch @@ -58,7 +59,7 @@ def patch_get(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.get', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.get", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -66,21 +67,21 @@ def handler(expected, code, headers): # When: no auth # Then: response 401 def test_no_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:me_coderevision') + url = reverse_lazy("assignments:me_coderevision") response = client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no github account # Then: response 200 def test__no_github_account(bc: Breathecode, client: APIClient, patch_get): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -88,28 +89,28 @@ def test__no_github_account(bc: Breathecode, client: APIClient, patch_get): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_get(expected, code, headers) - task = {'github_url': bc.fake.url()} + task = {"github_url": bc.fake.url()} model = bc.database.create(profile_academy=1, task=task) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_coderevision') + '?' + bc.format.querystring(query) + url = reverse_lazy("assignments:me_coderevision") + "?" + bc.format.querystring(query) response = client.get(url) assert ClientSession.get.call_args_list == [] - assert response.getvalue().decode('utf-8') == '{"detail":"github-account-not-connected","status_code":400}' + assert response.getvalue().decode("utf-8") == '{"detail":"github-account-not-connected","status_code":400}' assert response.status_code == 400 - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth in get # Then: response 200 def test__get__auth(bc: Breathecode, client: APIClient, patch_get): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -117,32 +118,33 @@ def test__get__auth(bc: Breathecode, client: APIClient, patch_get): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_get(expected, code, headers) - task = {'github_url': bc.fake.url()} - credentials_github = {'username': bc.fake.slug()} - model = bc.database.create(profile_academy=1, - task=task, - credentials_github=credentials_github, - app={'slug': 'rigobot'}) + task = {"github_url": bc.fake.url()} + credentials_github = {"username": bc.fake.slug()} + model = bc.database.create( + profile_academy=1, task=task, credentials_github=credentials_github, app={"slug": "rigobot"} + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_coderevision') + '?' + bc.format.querystring(query) + url = reverse_lazy("assignments:me_coderevision") + "?" + bc.format.querystring(query) token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - with patch('linked_services.django.actions.get_jwt', MagicMock(return_value=token)): + with patch("linked_services.django.actions.get_jwt", MagicMock(return_value=token)): response = client.get(url) assert ClientSession.get.call_args_list == [ - call(model.app.app_url + '/v1/finetuning/me/coderevision', - params={ - **query, - 'github_username': model.credentials_github.username, - }, - headers={'Authorization': f'Link App=breathecode,Token={token}'}), + call( + model.app.app_url + "/v1/finetuning/me/coderevision", + params={ + **query, + "github_username": model.credentials_github.username, + }, + headers={"Authorization": f"Link App=breathecode,Token={token}"}, + ), ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] diff --git a/breathecode/assignments/tests/urls/tests_me_coderevision_id_rate.py b/breathecode/assignments/tests/urls/tests_me_coderevision_id_rate.py index 4c8a10300..b53815226 100644 --- a/breathecode/assignments/tests/urls/tests_me_coderevision_id_rate.py +++ b/breathecode/assignments/tests/urls/tests_me_coderevision_id_rate.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import AsyncMock, MagicMock, call, patch @@ -57,7 +58,7 @@ def patch_post(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.post', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.post", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -65,16 +66,31 @@ def handler(expected, code, headers): @pytest.fixture def get_jwt(bc: Breathecode, monkeypatch): token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - monkeypatch.setattr('linked_services.django.actions.get_jwt', MagicMock(return_value=token)) + monkeypatch.setattr("linked_services.django.actions.get_jwt", MagicMock(return_value=token)) yield token -@pytest.fixture(params=[ - ('linked_services.core.service.Service.__aenter__', Exception, 'App rigobot not found', 'app-not-found', 404, True), - ('linked_services.core.service.Service.__aenter__', SynchronousOnlyOperation, - 'Async is not supported by the worker', 'no-async-support', 500, True), - ('aiohttp.ClientSession.post', Exception, 'random exc', 'unexpected-error', 500, False), -]) +@pytest.fixture( + params=[ + ( + "linked_services.core.service.Service.__aenter__", + Exception, + "App rigobot not found", + "app-not-found", + 404, + True, + ), + ( + "linked_services.core.service.Service.__aenter__", + SynchronousOnlyOperation, + "Async is not supported by the worker", + "no-async-support", + 500, + True, + ), + ("aiohttp.ClientSession.post", Exception, "random exc", "unexpected-error", 500, False), + ] +) def post_exc(request, monkeypatch): path, exc, message, slug, code, is_async = request.param if is_async: @@ -109,53 +125,53 @@ async def async_exc_mock(message): monkeypatch.setattr(path, ContextMock) yield { - 'slug': slug, - 'code': code, + "slug": slug, + "code": code, } # When: no auth # Then: response 401 def test_no_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:me_coderevision_id_rate', kwargs={'coderevision_id': 1}) + url = reverse_lazy("assignments:me_coderevision_id_rate", kwargs={"coderevision_id": 1}) response = client.post(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: raise an exception # Then: response 200 def test_raise_an_exception(bc: Breathecode, client: APIClient, post_exc): - expected = {'detail': post_exc['slug'], 'status_code': post_exc['code']} + expected = {"detail": post_exc["slug"], "status_code": post_exc["code"]} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, task=task, app={'slug': 'rigobot', 'app_url': bc.fake.url()}) + task = {"github_url": bc.fake.url()} + model = bc.database.create(profile_academy=1, task=task, app={"slug": "rigobot", "app_url": bc.fake.url()}) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_coderevision_id_rate', kwargs={'coderevision_id': 1}) + url = reverse_lazy("assignments:me_coderevision_id_rate", kwargs={"coderevision_id": 1}) - response = client.post(url, query, format='json') + response = client.post(url, query, format="json") json = response.json() assert json == expected - assert response.status_code == post_exc['code'] - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert response.status_code == post_exc["code"] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth # Then: response 200 def test_auth(bc: Breathecode, client: APIClient, patch_post, get_jwt): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -163,25 +179,27 @@ def test_auth(bc: Breathecode, client: APIClient, patch_post, get_jwt): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, task=task, app={'slug': 'rigobot', 'app_url': bc.fake.url()}) + task = {"github_url": bc.fake.url()} + model = bc.database.create(profile_academy=1, task=task, app={"slug": "rigobot", "app_url": bc.fake.url()}) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_coderevision_id_rate', kwargs={'coderevision_id': 1}) + url = reverse_lazy("assignments:me_coderevision_id_rate", kwargs={"coderevision_id": 1}) - response = client.post(url, query, format='json') + response = client.post(url, query, format="json") assert aiohttp.ClientSession.post.call_args_list == [ - call(f'{model.app.app_url}/v1/finetuning/rate/coderevision/1', - json=None, - data=query, - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/finetuning/rate/coderevision/1", + json=None, + data=query, + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] diff --git a/breathecode/assignments/tests/urls/tests_me_commitfile_id.py b/breathecode/assignments/tests/urls/tests_me_commitfile_id.py index ddcf46a99..ebf765f2d 100644 --- a/breathecode/assignments/tests/urls/tests_me_commitfile_id.py +++ b/breathecode/assignments/tests/urls/tests_me_commitfile_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import MagicMock, call, patch @@ -25,20 +26,20 @@ class MediaTestSuite(AssignmentsTestCase): # When: no auth # Then: response 401 def test_no_auth(self): - url = reverse_lazy('assignments:me_commitfile_id', kwargs={'commitfile_id': 1}) + url = reverse_lazy("assignments:me_commitfile_id", kwargs={"commitfile_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) # When: auth in get # Then: response 200 def test__get__auth(self): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { self.bc.fake.slug(): self.bc.fake.slug(), self.bc.fake.slug(): self.bc.fake.slug(), @@ -47,29 +48,37 @@ def test__get__auth(self): mock = MagicMock() mock.raw = iter([json.dumps(expected).encode()]) - mock.headers = {'Content-Type': 'application/json'} + mock.headers = {"Content-Type": "application/json"} code = random.randint(200, 299) mock.status_code = code - mock.reason = 'OK' + mock.reason = "OK" - task = {'github_url': self.bc.fake.url()} - model = self.bc.database.create(profile_academy=1, task=task, app={'slug': 'rigobot'}) + task = {"github_url": self.bc.fake.url()} + model = self.bc.database.create(profile_academy=1, task=task, app={"slug": "rigobot"}) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_commitfile_id', kwargs={'commitfile_id': 1 - }) + '?' + self.bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_commitfile_id", kwargs={"commitfile_id": 1}) + + "?" + + self.bc.format.querystring(query) + ) token = self.bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - with patch('linked_services.django.actions.get_jwt', MagicMock(return_value=token)): - with patch.multiple('requests', get=MagicMock(return_value=mock)): + with patch("linked_services.django.actions.get_jwt", MagicMock(return_value=token)): + with patch.multiple("requests", get=MagicMock(return_value=mock)): response = self.client.get(url) - self.bc.check.calls(requests.get.call_args_list, [ - call(model.app.app_url + '/v1/finetuning/commitfile/1', - params=query, - stream=True, - headers={'Authorization': f'Link App=breathecode,Token={token}'}), - ]) - - self.assertEqual(response.getvalue().decode('utf-8'), json.dumps(expected)) + self.bc.check.calls( + requests.get.call_args_list, + [ + call( + model.app.app_url + "/v1/finetuning/commitfile/1", + params=query, + stream=True, + headers={"Authorization": f"Link App=breathecode,Token={token}"}, + ), + ], + ) + + self.assertEqual(response.getvalue().decode("utf-8"), json.dumps(expected)) self.assertEqual(response.status_code, code) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) diff --git a/breathecode/assignments/tests/urls/tests_me_task_id_coderevision.py b/breathecode/assignments/tests/urls/tests_me_task_id_coderevision.py index 8dffd7d56..dfd935093 100644 --- a/breathecode/assignments/tests/urls/tests_me_task_id_coderevision.py +++ b/breathecode/assignments/tests/urls/tests_me_task_id_coderevision.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import AsyncMock, MagicMock, call, patch @@ -57,7 +58,7 @@ def patch_get(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.get', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.get", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -68,7 +69,7 @@ def patch_post(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.post', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.post", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -76,21 +77,21 @@ def handler(expected, code, headers): # When: no auth # Then: response 401 def test_no_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) response = client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no tasks # Then: response 404 def test__get__no_tasks(bc: Breathecode, client: APIClient, patch_get): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -98,28 +99,29 @@ def test__get__no_tasks(bc: Breathecode, client: APIClient, patch_get): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_get(expected, code, headers) - model = bc.database.create(profile_academy=1, app={'slug': 'rigobot', 'app_url': bc.fake.url()}) + model = bc.database.create(profile_academy=1, app={"slug": "rigobot", "app_url": bc.fake.url()}) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) response = client.get(url) assert ClientSession.get.call_args_list == [] - assert response.getvalue().decode('utf-8') == '{"detail":"task-not-found","status_code":404}' + assert response.getvalue().decode("utf-8") == '{"detail":"task-not-found","status_code":404}' assert response.status_code == 404 - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no github accounts # Then: response 200 def test__get__no_github_accounts(bc: Breathecode, client: APIClient, patch_get): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -127,29 +129,30 @@ def test__get__no_github_accounts(bc: Breathecode, client: APIClient, patch_get) } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_get(expected, code, headers) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, task=task, app={'slug': 'rigobot', 'app_url': bc.fake.url()}) + task = {"github_url": bc.fake.url()} + model = bc.database.create(profile_academy=1, task=task, app={"slug": "rigobot", "app_url": bc.fake.url()}) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) response = client.get(url) assert ClientSession.get.call_args_list == [] - assert response.getvalue().decode('utf-8') == '{"detail":"github-account-not-connected","status_code":400}' + assert response.getvalue().decode("utf-8") == '{"detail":"github-account-not-connected","status_code":400}' assert response.status_code == 400 - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth # Then: response 200 def test__get__auth(bc: Breathecode, client: APIClient, patch_get): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -157,49 +160,49 @@ def test__get__auth(bc: Breathecode, client: APIClient, patch_get): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_get(expected, code, headers) - task = {'github_url': bc.fake.url()} - credentials_github = {'username': bc.fake.slug()} - model = bc.database.create(profile_academy=1, - task=task, - credentials_github=credentials_github, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }) + task = {"github_url": bc.fake.url()} + credentials_github = {"username": bc.fake.slug()} + model = bc.database.create( + profile_academy=1, + task=task, + credentials_github=credentials_github, + app={"slug": "rigobot", "app_url": bc.fake.url()}, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - with patch('linked_services.django.actions.get_jwt', MagicMock(return_value=token)): + with patch("linked_services.django.actions.get_jwt", MagicMock(return_value=token)): response = client.get(url) assert ClientSession.get.call_args_list == [ call( - model.app.app_url + '/v1/finetuning/me/coderevision', + model.app.app_url + "/v1/finetuning/me/coderevision", params={ **query, - 'repo': model.task.github_url, - 'github_username': model.credentials_github.username, + "repo": model.task.github_url, + "github_username": model.credentials_github.username, }, - headers={'Authorization': f'Link App=breathecode,Token={token}'}, + headers={"Authorization": f"Link App=breathecode,Token={token}"}, ), ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: no tasks # Then: response 404 -@pytest.mark.skip('Temporarily disabled') +@pytest.mark.skip("Temporarily disabled") def test__post__no_consumables(bc: Breathecode, client: APIClient, patch_post): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -207,28 +210,29 @@ def test__post__no_consumables(bc: Breathecode, client: APIClient, patch_post): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - model = bc.database.create(profile_academy=1, app={'slug': 'rigobot', 'app_url': bc.fake.url()}) + model = bc.database.create(profile_academy=1, app={"slug": "rigobot", "app_url": bc.fake.url()}) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) response = client.post(url) assert ClientSession.post.call_args_list == [] - assert response.getvalue().decode('utf-8') == '{"detail":"not-enough-consumables","status_code":402}' + assert response.getvalue().decode("utf-8") == '{"detail":"not-enough-consumables","status_code":402}' assert response.status_code == 402 - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no tasks # Then: response 404 def test__post__no_consumables(bc: Breathecode, client: APIClient, patch_post): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -236,36 +240,39 @@ def test__post__no_consumables(bc: Breathecode, client: APIClient, patch_post): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - model = bc.database.create(profile_academy=1, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - }, - service={ - 'type': 'VOID', - 'slug': 'add_code_review', - }) + model = bc.database.create( + profile_academy=1, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + }, + service={ + "type": "VOID", + "slug": "add_code_review", + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) response = client.post(url) assert ClientSession.post.call_args_list == [] - assert response.getvalue().decode('utf-8') == '{"detail":"with-consumer-not-enough-consumables","status_code":402}' + assert response.getvalue().decode("utf-8") == '{"detail":"with-consumer-not-enough-consumables","status_code":402}' assert response.status_code == 402 - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no tasks # Then: response 404 def test__post__no_tasks(bc: Breathecode, client: APIClient, patch_post): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -273,37 +280,37 @@ def test__post__no_tasks(bc: Breathecode, client: APIClient, patch_post): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - model = bc.database.create(profile_academy=1, - consumable=1, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }, - service={ - 'type': 'VOID', - 'slug': 'add_code_review', - }) + model = bc.database.create( + profile_academy=1, + consumable=1, + app={"slug": "rigobot", "app_url": bc.fake.url()}, + service={ + "type": "VOID", + "slug": "add_code_review", + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) response = client.post(url) assert ClientSession.post.call_args_list == [] - assert response.getvalue().decode('utf-8') == '{"detail":"task-not-found","status_code":404}' + assert response.getvalue().decode("utf-8") == '{"detail":"task-not-found","status_code":404}' assert response.status_code == 404 - assert bc.database.list_of('assignments.Task') == [] + assert bc.database.list_of("assignments.Task") == [] # When: no github accounts # Then: response 200 def test__post__no_github_accounts(bc: Breathecode, client: APIClient, patch_post): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -311,39 +318,42 @@ def test__post__no_github_accounts(bc: Breathecode, client: APIClient, patch_pos } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - consumable=1, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - }, - service={ - 'type': 'VOID', - 'slug': 'add_code_review', - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + consumable=1, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + }, + service={ + "type": "VOID", + "slug": "add_code_review", + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) response = client.post(url) assert ClientSession.post.call_args_list == [] - assert response.getvalue().decode('utf-8') == '{"detail":"github-account-not-connected","status_code":400}' + assert response.getvalue().decode("utf-8") == '{"detail":"github-account-not-connected","status_code":400}' assert response.status_code == 400 - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # When: auth # Then: response 200 def test__post__auth(bc: Breathecode, client: APIClient, patch_post): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -351,93 +361,95 @@ def test__post__auth(bc: Breathecode, client: APIClient, patch_post): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - task = {'github_url': bc.fake.url()} - credentials_github = {'username': bc.fake.slug()} - model = bc.database.create(profile_academy=1, - task=task, - credentials_github=credentials_github, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - }, - consumable=1, - service={ - 'type': 'VOID', - 'slug': 'add_code_review', - }) + task = {"github_url": bc.fake.url()} + credentials_github = {"username": bc.fake.slug()} + model = bc.database.create( + profile_academy=1, + task=task, + credentials_github=credentials_github, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + }, + consumable=1, + service={ + "type": "VOID", + "slug": "add_code_review", + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - with patch('linked_services.django.actions.get_jwt', MagicMock(return_value=token)): - response = client.post(url, query, format='json') + with patch("linked_services.django.actions.get_jwt", MagicMock(return_value=token)): + response = client.post(url, query, format="json") assert ClientSession.post.call_args_list == [ call( - model.app.app_url + '/v1/finetuning/coderevision/', + model.app.app_url + "/v1/finetuning/coderevision/", data=query, json=None, params={ **query, - 'repo': model.task.github_url, - 'github_username': model.credentials_github.username, + "repo": model.task.github_url, + "github_username": model.credentials_github.username, }, - headers={'Authorization': f'Link App=breathecode,Token={token}'}, + headers={"Authorization": f"Link App=breathecode,Token={token}"}, ), ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # Given: A no SAAS student who has paid # When: auth # Then: response 200 -@pytest.mark.parametrize('cohort_user', [ - { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'ACTIVE', - }, - { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'ACTIVE', - }, - { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'GRADUATED', - }, - { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'GRADUATED', - }, -]) -@pytest.mark.parametrize('academy, cohort', [ - ( +@pytest.mark.parametrize( + "cohort_user", + [ { - 'available_as_saas': True + "finantial_status": "FULLY_PAID", + "educational_status": "ACTIVE", }, { - 'available_as_saas': False + "finantial_status": "UP_TO_DATE", + "educational_status": "ACTIVE", }, - ), - ( { - 'available_as_saas': False + "finantial_status": "FULLY_PAID", + "educational_status": "GRADUATED", }, { - 'available_as_saas': None + "finantial_status": "UP_TO_DATE", + "educational_status": "GRADUATED", }, - ), -]) -def test__post__auth__no_saas__finantial_status_no_late(bc: Breathecode, client: APIClient, academy, cohort, - cohort_user, patch_post): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + ], +) +@pytest.mark.parametrize( + "academy, cohort", + [ + ( + {"available_as_saas": True}, + {"available_as_saas": False}, + ), + ( + {"available_as_saas": False}, + {"available_as_saas": None}, + ), + ], +) +def test__post__auth__no_saas__finantial_status_no_late( + bc: Breathecode, client: APIClient, academy, cohort, cohort_user, patch_post +): + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -445,77 +457,75 @@ def test__post__auth__no_saas__finantial_status_no_late(bc: Breathecode, client: } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - task = {'github_url': bc.fake.url()} - credentials_github = {'username': bc.fake.slug()} - model = bc.database.create(profile_academy=1, - task=task, - credentials_github=credentials_github, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - }, - consumable=1, - service={ - 'type': 'VOID', - 'slug': 'add_code_review', - }, - academy=academy, - cohort=cohort, - cohort_user=cohort_user) + task = {"github_url": bc.fake.url()} + credentials_github = {"username": bc.fake.slug()} + model = bc.database.create( + profile_academy=1, + task=task, + credentials_github=credentials_github, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + }, + consumable=1, + service={ + "type": "VOID", + "slug": "add_code_review", + }, + academy=academy, + cohort=cohort, + cohort_user=cohort_user, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - with patch('linked_services.django.actions.get_jwt', MagicMock(return_value=token)): - response = client.post(url, query, format='json') + with patch("linked_services.django.actions.get_jwt", MagicMock(return_value=token)): + response = client.post(url, query, format="json") assert ClientSession.post.call_args_list == [ call( - model.app.app_url + '/v1/finetuning/coderevision/', + model.app.app_url + "/v1/finetuning/coderevision/", data=query, json=None, params={ **query, - 'repo': model.task.github_url, - 'github_username': model.credentials_github.username, + "repo": model.task.github_url, + "github_username": model.credentials_github.username, }, - headers={'Authorization': f'Link App=breathecode,Token={token}'}, + headers={"Authorization": f"Link App=breathecode,Token={token}"}, ), ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] # Given: A no SAAS student who hasn't paid # When: auth # Then: response 402 -@pytest.mark.parametrize('academy, cohort', [ - ( - { - 'available_as_saas': True - }, - { - 'available_as_saas': False - }, - ), - ( - { - 'available_as_saas': False - }, - { - 'available_as_saas': None - }, - ), -]) +@pytest.mark.parametrize( + "academy, cohort", + [ + ( + {"available_as_saas": True}, + {"available_as_saas": False}, + ), + ( + {"available_as_saas": False}, + {"available_as_saas": None}, + ), + ], +) def test__post__auth__no_saas__finantial_status_late(bc: Breathecode, client: APIClient, academy, cohort, patch_post): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -523,41 +533,41 @@ def test__post__auth__no_saas__finantial_status_late(bc: Breathecode, client: AP } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - task = {'github_url': bc.fake.url()} - credentials_github = {'username': bc.fake.slug()} - cohort_user = {'finantial_status': 'LATE', 'educational_status': 'ACTIVE'} - model = bc.database.create(profile_academy=1, - task=task, - credentials_github=credentials_github, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url() - }, - consumable=1, - service={ - 'type': 'VOID', - 'slug': 'add_code_review', - }, - cohort_user=cohort_user, - cohort=cohort, - academy=academy) + task = {"github_url": bc.fake.url()} + credentials_github = {"username": bc.fake.slug()} + cohort_user = {"finantial_status": "LATE", "educational_status": "ACTIVE"} + model = bc.database.create( + profile_academy=1, + task=task, + credentials_github=credentials_github, + app={"slug": "rigobot", "app_url": bc.fake.url()}, + consumable=1, + service={ + "type": "VOID", + "slug": "add_code_review", + }, + cohort_user=cohort_user, + cohort=cohort, + academy=academy, + ) client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_coderevision', kwargs={'task_id': 1 - }) + '?' + bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_coderevision", kwargs={"task_id": 1}) + "?" + bc.format.querystring(query) + ) token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - with patch('linked_services.django.actions.get_jwt', MagicMock(return_value=token)): - response = client.post(url, query, format='json') + with patch("linked_services.django.actions.get_jwt", MagicMock(return_value=token)): + response = client.post(url, query, format="json") assert ClientSession.post.call_args_list == [] x = response.json() - expected = {'detail': 'cohort-user-status-later', 'status_code': 402} + expected = {"detail": "cohort-user-status-later", "status_code": 402} assert x == expected assert response.status_code == 402 - assert bc.database.list_of('assignments.Task') == [bc.format.to_dict(model.task)] + assert bc.database.list_of("assignments.Task") == [bc.format.to_dict(model.task)] diff --git a/breathecode/assignments/tests/urls/tests_task.py b/breathecode/assignments/tests/urls/tests_task.py index 90dbb5db8..9c9c84a6a 100644 --- a/breathecode/assignments/tests/urls/tests_task.py +++ b/breathecode/assignments/tests/urls/tests_task.py @@ -1,6 +1,7 @@ """ Test /answer """ + from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -15,81 +16,78 @@ def get_serializer(self, task, user): return { - 'associated_slug': task.associated_slug, - 'created_at': self.bc.datetime.to_iso_string(task.created_at), - 'updated_at': self.bc.datetime.to_iso_string(task.updated_at), - 'github_url': task.github_url, - 'id': task.id, - 'live_url': task.live_url, - 'revision_status': task.revision_status, - 'task_status': task.task_status, - 'task_type': task.task_type, - 'title': task.title, - 'description': task.description, - 'assignment_telemetry': task.telemetry.telemetry if task.telemetry else None, - 'opened_at': self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, - 'delivered_at': self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, - 'user': { - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name - } + "associated_slug": task.associated_slug, + "created_at": self.bc.datetime.to_iso_string(task.created_at), + "updated_at": self.bc.datetime.to_iso_string(task.updated_at), + "github_url": task.github_url, + "id": task.id, + "live_url": task.live_url, + "revision_status": task.revision_status, + "task_status": task.task_status, + "task_type": task.task_type, + "title": task.title, + "description": task.description, + "assignment_telemetry": task.telemetry.telemetry if task.telemetry else None, + "opened_at": self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, + "delivered_at": self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, + "user": {"first_name": user.first_name, "id": user.id, "last_name": user.last_name}, } class MediaTestSuite(AssignmentsTestCase): """Test /answer""" + """ 🔽🔽🔽 Auth """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__without_auth(self): - url = reverse_lazy('assignments:task') + url = reverse_lazy("assignments:task") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) """ 🔽🔽🔽 Get without ProfileAcademy """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__without_profile_academy(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + url = reverse_lazy("assignments:task") response = self.client.get(url) json = response.json() expected = { - 'detail': 'without-profile-academy', - 'status_code': 400, + "detail": "without-profile-academy", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) """ 🔽🔽🔽 Get without Task """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__without_data(self): model = self.bc.database.create(user=1, profile_academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + url = reverse_lazy("assignments:task") response = self.client.get(url) json = response.json() @@ -97,19 +95,19 @@ def test_task__without_data(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) """ 🔽🔽🔽 Get with Task """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__one_task__cohort_null(self): model = self.bc.database.create(profile_academy=1, task=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + url = reverse_lazy("assignments:task") response = self.client.get(url) json = response.json() @@ -117,19 +115,19 @@ def test_task__one_task__cohort_null(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) """ 🔽🔽🔽 Get with two Task """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__two_tasks(self): model = self.bc.database.create(profile_academy=1, task=2) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + url = reverse_lazy("assignments:task") response = self.client.get(url) json = response.json() @@ -137,19 +135,19 @@ def test_task__two_tasks(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query academy """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_academy__found_zero__academy_not_exists(self): model = self.bc.database.create(profile_academy=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?academy=they-killed-kenny' + url = reverse_lazy("assignments:task") + "?academy=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -157,15 +155,15 @@ def test_task__query_academy__found_zero__academy_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_academy__found_one(self): model = self.bc.database.create(profile_academy=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?academy={model.academy.slug}' + url = reverse_lazy("assignments:task") + f"?academy={model.academy.slug}" response = self.client.get(url) json = response.json() @@ -173,15 +171,15 @@ def test_task__query_academy__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_academy__found_two(self): model = self.bc.database.create(profile_academy=1, task=2, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?academy={model.academy.slug}' + url = reverse_lazy("assignments:task") + f"?academy={model.academy.slug}" response = self.client.get(url) json = response.json() @@ -189,19 +187,19 @@ def test_task__query_academy__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query user """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_user__found_zero__user_not_exists(self): model = self.bc.database.create(profile_academy=1, task=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?user=2' + url = reverse_lazy("assignments:task") + "?user=2" response = self.client.get(url) json = response.json() @@ -209,15 +207,15 @@ def test_task__query_user__found_zero__user_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_user__found_one(self): model = self.bc.database.create(profile_academy=1, task=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?user=1' + url = reverse_lazy("assignments:task") + "?user=1" response = self.client.get(url) json = response.json() @@ -225,15 +223,15 @@ def test_task__query_user__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_user__found_two(self): model = self.bc.database.create(profile_academy=1, task=2, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?user=1' + url = reverse_lazy("assignments:task") + "?user=1" response = self.client.get(url) json = response.json() @@ -241,16 +239,16 @@ def test_task__query_user__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_user__found_two__related_to_two_users(self): - tasks = [{'user_id': 1}, {'user_id': 2}] + tasks = [{"user_id": 1}, {"user_id": 2}] model = self.bc.database.create(profile_academy=1, user=2, task=tasks, cohort=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task') + '?user=1,2' + url = reverse_lazy("assignments:task") + "?user=1,2" response = self.client.get(url) json = response.json() @@ -258,19 +256,19 @@ def test_task__query_user__found_two__related_to_two_users(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query cohort """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_cohort__id__found_zero__cohort_not_exists(self): model = self.bc.database.create(profile_academy=1, task=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?cohort=2' + url = reverse_lazy("assignments:task") + "?cohort=2" response = self.client.get(url) json = response.json() @@ -278,15 +276,15 @@ def test_task__query_cohort__id__found_zero__cohort_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_cohort__slug__found_zero__cohort_not_exists(self): model = self.bc.database.create(profile_academy=1, task=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?cohort=they-killed-kenny' + url = reverse_lazy("assignments:task") + "?cohort=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -294,15 +292,15 @@ def test_task__query_cohort__slug__found_zero__cohort_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_cohort__id__found_one(self): model = self.bc.database.create(profile_academy=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?cohort=1' + url = reverse_lazy("assignments:task") + "?cohort=1" response = self.client.get(url) json = response.json() @@ -310,15 +308,15 @@ def test_task__query_cohort__id__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_cohort__slug__found_one(self): model = self.bc.database.create(profile_academy=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?cohort={model.cohort.slug}' + url = reverse_lazy("assignments:task") + f"?cohort={model.cohort.slug}" response = self.client.get(url) json = response.json() @@ -326,15 +324,15 @@ def test_task__query_cohort__slug__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_cohort__id__found_two(self): model = self.bc.database.create(profile_academy=1, task=2, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?cohort=1' + url = reverse_lazy("assignments:task") + "?cohort=1" response = self.client.get(url) json = response.json() @@ -342,15 +340,15 @@ def test_task__query_cohort__id__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_cohort__slug__found_two(self): model = self.bc.database.create(profile_academy=1, task=2, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?cohort={model.cohort.slug}' + url = reverse_lazy("assignments:task") + f"?cohort={model.cohort.slug}" response = self.client.get(url) json = response.json() @@ -358,16 +356,16 @@ def test_task__query_cohort__slug__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_cohort__id__found_two__related_to_two_users(self): - tasks = [{'cohort_id': 1}, {'cohort_id': 2}] + tasks = [{"cohort_id": 1}, {"cohort_id": 2}] model = self.bc.database.create(profile_academy=1, user=1, task=tasks, cohort=2) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?cohort=1,2' + url = reverse_lazy("assignments:task") + "?cohort=1,2" response = self.client.get(url) json = response.json() @@ -375,16 +373,16 @@ def test_task__query_cohort__id__found_two__related_to_two_users(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_cohort__slug__found_two__related_to_two_users(self): - tasks = [{'cohort_id': 1}, {'cohort_id': 2}] + tasks = [{"cohort_id": 1}, {"cohort_id": 2}] model = self.bc.database.create(profile_academy=1, user=1, task=tasks, cohort=2) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?cohort={model.cohort[0].slug},{model.cohort[1].slug}' + url = reverse_lazy("assignments:task") + f"?cohort={model.cohort[0].slug},{model.cohort[1].slug}" response = self.client.get(url) json = response.json() @@ -392,19 +390,19 @@ def test_task__query_cohort__slug__found_two__related_to_two_users(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query stu_cohort """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_stu_cohort__id__found_zero__cohort_not_exists(self): model = self.bc.database.create(profile_academy=1, task=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?stu_cohort=2' + url = reverse_lazy("assignments:task") + "?stu_cohort=2" response = self.client.get(url) json = response.json() @@ -412,15 +410,15 @@ def test_task__query_stu_cohort__id__found_zero__cohort_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_stu_cohort__slug__found_zero__cohort_not_exists(self): model = self.bc.database.create(profile_academy=1, task=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?stu_cohort=they-killed-kenny' + url = reverse_lazy("assignments:task") + "?stu_cohort=they-killed-kenny" response = self.client.get(url) json = response.json() @@ -428,16 +426,16 @@ def test_task__query_stu_cohort__slug__found_zero__cohort_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_stu_cohort__id__found_one(self): model = self.bc.database.create(profile_academy=1, task=1, cohort=1, cohort_user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?stu_cohort=1' + url = reverse_lazy("assignments:task") + "?stu_cohort=1" response = self.client.get(url) json = response.json() @@ -445,16 +443,16 @@ def test_task__query_stu_cohort__id__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_stu_cohort__slug__found_one(self): model = self.bc.database.create(profile_academy=1, task=1, cohort=1, cohort_user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?stu_cohort={model.cohort.slug}' + url = reverse_lazy("assignments:task") + f"?stu_cohort={model.cohort.slug}" response = self.client.get(url) json = response.json() @@ -462,16 +460,16 @@ def test_task__query_stu_cohort__slug__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_stu_cohort__id__found_two(self): model = self.bc.database.create(profile_academy=1, task=2, cohort=1, cohort_user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?stu_cohort=1' + url = reverse_lazy("assignments:task") + "?stu_cohort=1" response = self.client.get(url) json = response.json() @@ -479,16 +477,16 @@ def test_task__query_stu_cohort__id__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_stu_cohort__slug__found_two(self): model = self.bc.database.create(profile_academy=1, task=2, cohort=1, cohort_user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?stu_cohort={model.cohort.slug}' + url = reverse_lazy("assignments:task") + f"?stu_cohort={model.cohort.slug}" response = self.client.get(url) json = response.json() @@ -496,18 +494,18 @@ def test_task__query_stu_cohort__slug__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_stu_cohort__id__found_two__related_to_two_users(self): - tasks = [{'cohort_id': 1, 'user_id': 1}, {'cohort_id': 2, 'user_id': 2}] - cohort_users = [{'cohort_id': 1, 'user_id': 1}, {'cohort_id': 2, 'user_id': 2}] + tasks = [{"cohort_id": 1, "user_id": 1}, {"cohort_id": 2, "user_id": 2}] + cohort_users = [{"cohort_id": 1, "user_id": 1}, {"cohort_id": 2, "user_id": 2}] model = self.bc.database.create(profile_academy=1, user=2, task=tasks, cohort=2, cohort_user=cohort_users) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task') + '?stu_cohort=1,2' + url = reverse_lazy("assignments:task") + "?stu_cohort=1,2" response = self.client.get(url) json = response.json() @@ -515,18 +513,18 @@ def test_task__query_stu_cohort__id__found_two__related_to_two_users(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_stu_cohort__slug__found_two__related_to_two_users(self): - tasks = [{'cohort_id': 1, 'user_id': 1}, {'cohort_id': 2, 'user_id': 2}] - cohort_users = [{'cohort_id': 1, 'user_id': 1}, {'cohort_id': 2, 'user_id': 2}] + tasks = [{"cohort_id": 1, "user_id": 1}, {"cohort_id": 2, "user_id": 2}] + cohort_users = [{"cohort_id": 1, "user_id": 1}, {"cohort_id": 2, "user_id": 2}] model = self.bc.database.create(profile_academy=1, user=2, task=tasks, cohort=2, cohort_user=cohort_users) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task') + f'?stu_cohort={model.cohort[0].slug},{model.cohort[1].slug}' + url = reverse_lazy("assignments:task") + f"?stu_cohort={model.cohort[0].slug},{model.cohort[1].slug}" response = self.client.get(url) json = response.json() @@ -534,19 +532,19 @@ def test_task__query_stu_cohort__slug__found_two__related_to_two_users(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query edu_status """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_edu_status__found_zero__edu_status_not_exists(self): model = self.bc.database.create(profile_academy=1, task=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?edu_status=ACTIVE' + url = reverse_lazy("assignments:task") + "?edu_status=ACTIVE" response = self.client.get(url) json = response.json() @@ -554,17 +552,17 @@ def test_task__query_edu_status__found_zero__edu_status_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_edu_status__found_one(self): - cohort_user = {'user_id': 1, 'educational_status': 'ACTIVE'} + cohort_user = {"user_id": 1, "educational_status": "ACTIVE"} model = self.bc.database.create(profile_academy=1, task=1, cohort=1, cohort_user=cohort_user) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?edu_status={model.cohort_user.educational_status}' + url = reverse_lazy("assignments:task") + f"?edu_status={model.cohort_user.educational_status}" response = self.client.get(url) json = response.json() @@ -572,17 +570,17 @@ def test_task__query_edu_status__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_edu_status__found_two(self): - cohort_user = {'user_id': 1, 'educational_status': 'ACTIVE'} + cohort_user = {"user_id": 1, "educational_status": "ACTIVE"} model = self.bc.database.create(profile_academy=1, task=2, cohort=1, cohort_user=cohort_user) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?edu_status={model.cohort_user.educational_status}' + url = reverse_lazy("assignments:task") + f"?edu_status={model.cohort_user.educational_status}" response = self.client.get(url) json = response.json() @@ -590,27 +588,27 @@ def test_task__query_edu_status__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_edu_status__found_two__related_to_two_edu_status(self): - tasks = [{'user_id': 1, 'cohort_id': 1}, {'user_id': 2, 'cohort_id': 2}] + tasks = [{"user_id": 1, "cohort_id": 1}, {"user_id": 2, "cohort_id": 2}] cohort_users = [ { - 'user_id': 1, - 'educational_status': 'ACTIVE', + "user_id": 1, + "educational_status": "ACTIVE", }, { - 'user_id': 2, - 'educational_status': 'DROPPED', + "user_id": 2, + "educational_status": "DROPPED", }, ] model = self.bc.database.create(profile_academy=1, user=2, task=tasks, cohort=2, cohort_user=cohort_users) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task') + f'?edu_status=ACTIVE,DROPPED' + url = reverse_lazy("assignments:task") + f"?edu_status=ACTIVE,DROPPED" response = self.client.get(url) json = response.json() @@ -618,19 +616,19 @@ def test_task__query_edu_status__found_two__related_to_two_edu_status(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query teacher """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_teacher__found_zero__academy_not_exists(self): model = self.bc.database.create(profile_academy=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?teacher=1' + url = reverse_lazy("assignments:task") + "?teacher=1" response = self.client.get(url) json = response.json() @@ -638,27 +636,27 @@ def test_task__query_teacher__found_zero__academy_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_teacher__found_one(self): cohort_users = [ { - 'role': 'STUDENT', - 'user_id': 1, - 'cohort_id': 1, + "role": "STUDENT", + "user_id": 1, + "cohort_id": 1, }, { - 'role': 'TEACHER', - 'user_id': 1, - 'cohort_id': 1, + "role": "TEACHER", + "user_id": 1, + "cohort_id": 1, }, ] model = self.bc.database.create(profile_academy=1, task=1, cohort=1, cohort_user=cohort_users) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?teacher=1' + url = reverse_lazy("assignments:task") + f"?teacher=1" response = self.client.get(url) json = response.json() @@ -666,38 +664,38 @@ def test_task__query_teacher__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_teacher__found_two(self): - tasks = [{'user_id': 1, 'cohort_id': 1}, {'user_id': 1, 'cohort_id': 2}] + tasks = [{"user_id": 1, "cohort_id": 1}, {"user_id": 1, "cohort_id": 2}] cohort_users = [ { - 'role': 'STUDENT', - 'user_id': 1, - 'cohort_id': 1, + "role": "STUDENT", + "user_id": 1, + "cohort_id": 1, }, { - 'role': 'STUDENT', - 'user_id': 1, - 'cohort_id': 2, + "role": "STUDENT", + "user_id": 1, + "cohort_id": 2, }, { - 'role': 'TEACHER', - 'user_id': 1, - 'cohort_id': 1, + "role": "TEACHER", + "user_id": 1, + "cohort_id": 1, }, { - 'role': 'TEACHER', - 'user_id': 1, - 'cohort_id': 2, + "role": "TEACHER", + "user_id": 1, + "cohort_id": 2, }, ] model = self.bc.database.create(profile_academy=1, task=tasks, user=1, cohort=2, cohort_user=cohort_users) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?teacher=1' + url = reverse_lazy("assignments:task") + f"?teacher=1" response = self.client.get(url) json = response.json() @@ -705,20 +703,20 @@ def test_task__query_teacher__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query task_status """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_task_status__found_zero__task_status_not_exists(self): - task = {'task_status': 'PENDING'} + task = {"task_status": "PENDING"} model = self.bc.database.create(profile_academy=1, task=task) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?task_status=DONE' + url = reverse_lazy("assignments:task") + "?task_status=DONE" response = self.client.get(url) json = response.json() @@ -726,16 +724,16 @@ def test_task__query_task_status__found_zero__task_status_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_task_status__found_one(self): - task = {'user_id': 1, 'task_status': 'DONE'} + task = {"user_id": 1, "task_status": "DONE"} model = self.bc.database.create(profile_academy=1, task=task) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?task_status=DONE' + url = reverse_lazy("assignments:task") + "?task_status=DONE" response = self.client.get(url) json = response.json() @@ -743,17 +741,17 @@ def test_task__query_task_status__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_task_status__found_two(self): - tasks = [{'user_id': 1, 'task_status': 'DONE'}, {'user_id': 1, 'task_status': 'DONE'}] + tasks = [{"user_id": 1, "task_status": "DONE"}, {"user_id": 1, "task_status": "DONE"}] model = self.bc.database.create(profile_academy=1, task=tasks) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?task_status=DONE' + url = reverse_lazy("assignments:task") + "?task_status=DONE" response = self.client.get(url) json = response.json() @@ -761,16 +759,16 @@ def test_task__query_task_status__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_task_status__found_two__related_to_two_task_status(self): - tasks = [{'task_status': 'DONE'}, {'task_status': 'PENDING'}] + tasks = [{"task_status": "DONE"}, {"task_status": "PENDING"}] model = self.bc.database.create(profile_academy=1, user=1, task=tasks) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?task_status=DONE,PENDING' + url = reverse_lazy("assignments:task") + f"?task_status=DONE,PENDING" response = self.client.get(url) json = response.json() @@ -778,20 +776,20 @@ def test_task__query_task_status__found_two__related_to_two_task_status(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query revision_status """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_revision_status__found_zero__revision_status_not_exists(self): - task = {'revision_status': 'PENDING'} + task = {"revision_status": "PENDING"} model = self.bc.database.create(profile_academy=1, task=task) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?revision_status=APPROVED' + url = reverse_lazy("assignments:task") + "?revision_status=APPROVED" response = self.client.get(url) json = response.json() @@ -799,16 +797,16 @@ def test_task__query_revision_status__found_zero__revision_status_not_exists(sel self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_revision_status__found_one(self): - task = {'user_id': 1, 'revision_status': 'APPROVED'} + task = {"user_id": 1, "revision_status": "APPROVED"} model = self.bc.database.create(profile_academy=1, task=task) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?revision_status=APPROVED' + url = reverse_lazy("assignments:task") + "?revision_status=APPROVED" response = self.client.get(url) json = response.json() @@ -816,16 +814,16 @@ def test_task__query_revision_status__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_revision_status__found_two(self): - tasks = [{'user_id': 1, 'revision_status': 'APPROVED'}, {'user_id': 1, 'revision_status': 'APPROVED'}] + tasks = [{"user_id": 1, "revision_status": "APPROVED"}, {"user_id": 1, "revision_status": "APPROVED"}] model = self.bc.database.create(profile_academy=1, task=tasks) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?revision_status=APPROVED' + url = reverse_lazy("assignments:task") + "?revision_status=APPROVED" response = self.client.get(url) json = response.json() @@ -833,16 +831,16 @@ def test_task__query_revision_status__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_revision_status__found_two__related_to_two_revision_status(self): - tasks = [{'revision_status': 'APPROVED'}, {'revision_status': 'PENDING'}] + tasks = [{"revision_status": "APPROVED"}, {"revision_status": "PENDING"}] model = self.bc.database.create(profile_academy=1, user=1, task=tasks) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?revision_status=APPROVED,PENDING' + url = reverse_lazy("assignments:task") + f"?revision_status=APPROVED,PENDING" response = self.client.get(url) json = response.json() @@ -850,20 +848,20 @@ def test_task__query_revision_status__found_two__related_to_two_revision_status( self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) """ 🔽🔽🔽 Query task_type """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_task_type__found_zero__task_type_not_exists(self): - task = {'task_type': 'QUIZ'} + task = {"task_type": "QUIZ"} model = self.bc.database.create(profile_academy=1, task=task) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?task_type=PROJECT' + url = reverse_lazy("assignments:task") + "?task_type=PROJECT" response = self.client.get(url) json = response.json() @@ -871,16 +869,16 @@ def test_task__query_task_type__found_zero__task_type_not_exists(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_task_type__found_one(self): - task = {'user_id': 1, 'task_type': 'PROJECT'} + task = {"user_id": 1, "task_type": "PROJECT"} model = self.bc.database.create(profile_academy=1, task=task) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?task_type=PROJECT' + url = reverse_lazy("assignments:task") + "?task_type=PROJECT" response = self.client.get(url) json = response.json() @@ -888,16 +886,16 @@ def test_task__query_task_type__found_one(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_task_type__found_two(self): - tasks = [{'user_id': 1, 'task_type': 'PROJECT'}, {'user_id': 1, 'task_type': 'PROJECT'}] + tasks = [{"user_id": 1, "task_type": "PROJECT"}, {"user_id": 1, "task_type": "PROJECT"}] model = self.bc.database.create(profile_academy=1, task=tasks) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + '?task_type=PROJECT' + url = reverse_lazy("assignments:task") + "?task_type=PROJECT" response = self.client.get(url) json = response.json() @@ -905,16 +903,16 @@ def test_task__query_task_type__found_two(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task__query_revision_status__found_two__related_to_two_revision_status(self): - tasks = [{'task_type': 'PROJECT'}, {'task_type': 'QUIZ'}] + tasks = [{"task_type": "PROJECT"}, {"task_type": "QUIZ"}] model = self.bc.database.create(profile_academy=1, user=1, task=tasks) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task') + f'?task_type=PROJECT,QUIZ' + url = reverse_lazy("assignments:task") + f"?task_type=PROJECT,QUIZ" response = self.client.get(url) json = response.json() @@ -922,4 +920,4 @@ def test_task__query_revision_status__found_two__related_to_two_revision_status( self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) diff --git a/breathecode/assignments/tests/urls/tests_task_id.py b/breathecode/assignments/tests/urls/tests_task_id.py index 0690486b1..cae53940d 100644 --- a/breathecode/assignments/tests/urls/tests_task_id.py +++ b/breathecode/assignments/tests/urls/tests_task_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + from unittest.mock import MagicMock, call, patch import pytest @@ -14,133 +15,130 @@ def get_serializer(self, task, user): return { - 'associated_slug': task.associated_slug, - 'created_at': self.bc.datetime.to_iso_string(task.created_at), - 'updated_at': self.bc.datetime.to_iso_string(task.updated_at), - 'github_url': task.github_url, - 'id': task.id, - 'live_url': task.live_url, - 'revision_status': task.revision_status, - 'task_status': task.task_status, - 'task_type': task.task_type, - 'title': task.title, - 'assignment_telemetry': task.telemetry.telemetry if task.telemetry else None, - 'description': task.description, - 'opened_at': self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, - 'delivered_at': self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, - 'user': { - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name - } + "associated_slug": task.associated_slug, + "created_at": self.bc.datetime.to_iso_string(task.created_at), + "updated_at": self.bc.datetime.to_iso_string(task.updated_at), + "github_url": task.github_url, + "id": task.id, + "live_url": task.live_url, + "revision_status": task.revision_status, + "task_status": task.task_status, + "task_type": task.task_type, + "title": task.title, + "assignment_telemetry": task.telemetry.telemetry if task.telemetry else None, + "description": task.description, + "opened_at": self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, + "delivered_at": self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, + "user": {"first_name": user.first_name, "id": user.id, "last_name": user.last_name}, } def put_serializer(self, task, data={}): return { - 'github_url': task.github_url, - 'created_at': self.bc.datetime.to_iso_string(task.created_at), - 'cohort': task.cohort.id if task.cohort else None, - 'id': task.id, - 'description': task.description, - 'live_url': task.live_url, - 'task_type': task.task_type, - 'associated_slug': task.associated_slug, - 'revision_status': task.revision_status, - 'task_status': task.task_status, - 'associated_slug': task.associated_slug, - 'task_type': task.task_type, - 'attachments': [], - 'subtasks': task.subtasks, - 'title': task.title, - 'telemetry': task.telemetry, - 'rigobot_repository_id': task.rigobot_repository_id, - 'opened_at': self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, - 'delivered_at': self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, + "github_url": task.github_url, + "created_at": self.bc.datetime.to_iso_string(task.created_at), + "cohort": task.cohort.id if task.cohort else None, + "id": task.id, + "description": task.description, + "live_url": task.live_url, + "task_type": task.task_type, + "associated_slug": task.associated_slug, + "revision_status": task.revision_status, + "task_status": task.task_status, + "associated_slug": task.associated_slug, + "task_type": task.task_type, + "attachments": [], + "subtasks": task.subtasks, + "title": task.title, + "telemetry": task.telemetry, + "rigobot_repository_id": task.rigobot_repository_id, + "opened_at": self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, + "delivered_at": self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, **data, } def task_row(self, task, data={}): return { - 'id': task.id, - 'associated_slug': task.associated_slug, - 'title': task.title, - 'task_status': task.task_status, - 'revision_status': task.revision_status, - 'task_type': task.task_type, - 'github_url': task.github_url, - 'live_url': task.live_url, - 'description': task.description, - 'cohort_id': task.cohort.id if task.cohort else None, - 'user_id': task.user.id, - 'subtasks': task.subtasks, - 'opened_at': self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, - 'delivered_at': self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, - 'rigobot_repository_id': task.rigobot_repository_id, - 'telemetry_id': task.telemetry, + "id": task.id, + "associated_slug": task.associated_slug, + "title": task.title, + "task_status": task.task_status, + "revision_status": task.revision_status, + "task_type": task.task_type, + "github_url": task.github_url, + "live_url": task.live_url, + "description": task.description, + "cohort_id": task.cohort.id if task.cohort else None, + "user_id": task.user.id, + "subtasks": task.subtasks, + "opened_at": self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, + "delivered_at": self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, + "rigobot_repository_id": task.rigobot_repository_id, + "telemetry_id": task.telemetry, **data, } @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield class MediaTestSuite(AssignmentsTestCase): """Test /answer""" + """ 🔽🔽🔽 Auth """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__without_auth(self): - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get without Task """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__without_task(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'task-not-found', 'status_code': 404} + expected = {"detail": "task-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with Task """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__with_one_task(self): model = self.bc.database.create(user=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) response = self.client.get(url) json = response.json() @@ -148,54 +146,54 @@ def test_task_id__with_one_task(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with Task but the other user """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__with_one_task__but_the_other_user(self): - task = {'user_id': 2} + task = {"user_id": 2} model = self.bc.database.create(user=2, task=task, cohort=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'task-not-found', 'status_code': 404} + expected = {"detail": "task-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Put without Task """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__put__without_tasks(self): from breathecode.assignments.tasks import student_task_notification, teacher_task_notification model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'task-not-found', 'status_code': 404} + expected = {"detail": "task-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) @@ -205,34 +203,34 @@ def test_task_id__put__without_tasks(self): 🔽🔽🔽 Put with Task """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__put__with_one_task(self): from breathecode.assignments.tasks import student_task_notification, teacher_task_notification model = self.bc.database.create(user=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) - data = {'title': 'They killed kennyy'} + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) + data = {"title": "They killed kennyy"} start = self.bc.datetime.now() - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") end = self.bc.datetime.now() json = response.json() - updated_at = self.bc.datetime.from_iso_string(json['updated_at']) + updated_at = self.bc.datetime.from_iso_string(json["updated_at"]) self.bc.check.datetime_in_range(start, end, updated_at) - del json['updated_at'] + del json["updated_at"] expected = put_serializer(self, model.task, data=data) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [task_row(self, model.task, data=data)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [task_row(self, model.task, data=data)]) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) @@ -242,31 +240,31 @@ def test_task_id__put__with_one_task(self): 🔽🔽🔽 Put with Task of other user passing task_status """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__put__with_one_task__with_task_status(self): from breathecode.assignments.tasks import student_task_notification, teacher_task_notification - task = {'task_status': 'PENDING', 'user_id': 2} + task = {"task_status": "PENDING", "user_id": 2} model = self.bc.database.create(user=2, task=task, cohort=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) data = { - 'associated_slug': 'they-killed-kenny', - 'title': 'They killed kenny', - 'task_status': 'DONE', + "associated_slug": "they-killed-kenny", + "title": "They killed kenny", + "task_status": "DONE", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'put-task-status-of-other-user', 'status_code': 400} + expected = {"detail": "put-task-status-of-other-user", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) @@ -276,31 +274,31 @@ def test_task_id__put__with_one_task__with_task_status(self): 🔽🔽🔽 Put with Task of other user passing live_url """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__put__with_one_task__with_live_url(self): from breathecode.assignments.tasks import student_task_notification, teacher_task_notification - task = {'live_url': 'PENDING', 'user_id': 2} + task = {"live_url": "PENDING", "user_id": 2} model = self.bc.database.create(user=2, task=task, cohort=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) data = { - 'associated_slug': 'they-killed-kenny', - 'title': 'They killed kenny', - 'live_url': 'DONE', + "associated_slug": "they-killed-kenny", + "title": "They killed kenny", + "live_url": "DONE", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'put-live-url-of-other-user', 'status_code': 400} + expected = {"detail": "put-live-url-of-other-user", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) @@ -310,31 +308,31 @@ def test_task_id__put__with_one_task__with_live_url(self): 🔽🔽🔽 Put with Task of other user passing github_url """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__put__with_one_task__with_github_url(self): from breathecode.assignments.tasks import student_task_notification, teacher_task_notification - task = {'github_url': 'PENDING', 'user_id': 2} + task = {"github_url": "PENDING", "user_id": 2} model = self.bc.database.create(user=2, task=task, cohort=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) data = { - 'associated_slug': 'they-killed-kenny', - 'title': 'They killed kenny', - 'github_url': 'DONE', + "associated_slug": "they-killed-kenny", + "title": "They killed kenny", + "github_url": "DONE", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'put-github-url-of-other-user', 'status_code': 400} + expected = {"detail": "put-github-url-of-other-user", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) @@ -344,149 +342,155 @@ def test_task_id__put__with_one_task__with_github_url(self): 🔽🔽🔽 Put with Task of other user passing revision_status """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__put__with_one_task__with_revision_status(self): from breathecode.assignments.tasks import student_task_notification, teacher_task_notification - task = {'revision_status': 'PENDING', 'user_id': 2, 'task_status': 'DONE'} + task = {"revision_status": "PENDING", "user_id": 2, "task_status": "DONE"} model = self.bc.database.create(user=2, task=task, cohort=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) data = { - 'title': 'They killed kenny', - 'revision_status': 'APPROVED', + "title": "They killed kenny", + "revision_status": "APPROVED", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'editing-revision-status-but-is-not-teacher-or-assistant', 'status_code': 400} + expected = {"detail": "editing-revision-status-but-is-not-teacher-or-assistant", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__put__with_one_task__with_revision_status__teacher_auth(self): from breathecode.assignments.tasks import student_task_notification, teacher_task_notification - statuses = ['PENDING', 'APPROVED', 'REJECTED', 'IGNORED'] + statuses = ["PENDING", "APPROVED", "REJECTED", "IGNORED"] for index in range(0, 4): current_status = statuses[index] next_status = statuses[index - 1 if index > 0 else 3] - task = {'revision_status': current_status, 'user_id': (index * 2) + 1, 'task_status': 'DONE'} + task = {"revision_status": current_status, "user_id": (index * 2) + 1, "task_status": "DONE"} cohort_users = [ { - 'role': 'STUDENT', - 'user_id': (index * 2) + 1, + "role": "STUDENT", + "user_id": (index * 2) + 1, }, { - 'role': 'TEACHER', - 'user_id': (index * 2) + 2, + "role": "TEACHER", + "user_id": (index * 2) + 2, }, ] model = self.bc.database.create(user=2, task=task, cohort=1, cohort_user=cohort_users) model2 = self.bc.database.create(cohort=1) self.bc.request.authenticate(model.user[1]) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': index + 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": index + 1}) data = { - 'title': 'They killed kenny', - 'revision_status': next_status, + "title": "They killed kenny", + "revision_status": next_status, } start = self.bc.datetime.now() - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") end = self.bc.datetime.now() json = response.json() - updated_at = self.bc.datetime.from_iso_string(json['updated_at']) + updated_at = self.bc.datetime.from_iso_string(json["updated_at"]) self.bc.check.datetime_in_range(start, end, updated_at) - del json['updated_at'] + del json["updated_at"] expected = put_serializer(self, model.task, data=data) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [task_row(self, model.task, data=data)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [task_row(self, model.task, data=data)]) self.assertEqual(student_task_notification.delay.call_args_list, [call(index + 1)]) self.assertEqual(teacher_task_notification.delay.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call((index * 2) + 2, - 'assignment_review_status_updated', - related_type='assignments.Task', - related_id=index + 1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call( + (index * 2) + 2, + "assignment_review_status_updated", + related_type="assignments.Task", + related_id=index + 1, + ), + ], + ) # teardown - self.bc.database.delete('assignments.Task') + self.bc.database.delete("assignments.Task") student_task_notification.delay.call_args_list = [] activity_tasks.add_activity.delay.call_args_list = [] - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_id__put__with_one_task__with_revision_status__staff_auth(self): from breathecode.assignments.tasks import student_task_notification, teacher_task_notification - task = {'revision_status': 'PENDING', 'user_id': 1, 'task_status': 'DONE'} - cohort_user = {'role': 'STUDENT', 'user_id': 1} - profile_academy = {'user_id': 2} - model = self.bc.database.create(user=2, - task=task, - cohort=1, - cohort_user=cohort_user, - profile_academy=profile_academy) + task = {"revision_status": "PENDING", "user_id": 1, "task_status": "DONE"} + cohort_user = {"role": "STUDENT", "user_id": 1} + profile_academy = {"user_id": 2} + model = self.bc.database.create( + user=2, task=task, cohort=1, cohort_user=cohort_user, profile_academy=profile_academy + ) self.bc.request.authenticate(model.user[1]) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) data = { - 'title': 'They killed kenny', - 'revision_status': 'APPROVED', + "title": "They killed kenny", + "revision_status": "APPROVED", } start = self.bc.datetime.now() - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") end = self.bc.datetime.now() json = response.json() - updated_at = self.bc.datetime.from_iso_string(json['updated_at']) + updated_at = self.bc.datetime.from_iso_string(json["updated_at"]) self.bc.check.datetime_in_range(start, end, updated_at) - del json['updated_at'] + del json["updated_at"] expected = put_serializer(self, model.task, data=data) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [task_row(self, model.task, data=data)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [task_row(self, model.task, data=data)]) self.assertEqual(student_task_notification.delay.call_args_list, [call(1)]) self.assertEqual(teacher_task_notification.delay.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(2, 'assignment_review_status_updated', related_type='assignments.Task', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(2, "assignment_review_status_updated", related_type="assignments.Task", related_id=1), + ], + ) """ 🔽🔽🔽 Put prevent mark task as done if it is not delivered """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_status_pending_and_revision_status_pending(self): """Test /task with task_status = pending and revision_status = pending should pass""" @@ -495,32 +499,35 @@ def test_task_status_pending_and_revision_status_pending(self): model = self.bc.database.create(task=1, user=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={ - 'task_id': model.task.id, - }) - data = {'cohort': model['cohort'].id, 'title': 'hello'} + url = reverse_lazy( + "assignments:task_id", + kwargs={ + "task_id": model.task.id, + }, + ) + data = {"cohort": model["cohort"].id, "title": "hello"} response = self.client.put(url, data) json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) - del json['updated_at'] + del json["updated_at"] expected = put_serializer(self, model.task, data=data) self.assertEqual(json, expected) - data['cohort_id'] = data.pop('cohort') - self.assertEqual(self.bc.database.list_of('assignments.Task'), [task_row(self, model.task, data=data)]) + data["cohort_id"] = data.pop("cohort") + self.assertEqual(self.bc.database.list_of("assignments.Task"), [task_row(self, model.task, data=data)]) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_status_pending_and_revision_status_approved(self): """Test /task with task_status = pending and revision_status = approved should fail""" @@ -529,33 +536,36 @@ def test_task_status_pending_and_revision_status_approved(self): model = self.bc.database.create(task=1, user=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={ - 'task_id': model.task.id, - }) + url = reverse_lazy( + "assignments:task_id", + kwargs={ + "task_id": model.task.id, + }, + ) data = { - 'associated_slug': 'hello', - 'title': 'hello', - 'revision_status': 'APPROVED', + "associated_slug": "hello", + "title": "hello", + "revision_status": "APPROVED", } response = self.client.put(url, data) json = response.json() - expected = {'detail': 'task-marked-approved-when-pending', 'status_code': 400} + expected = {"detail": "task-marked-approved-when-pending", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_task_status_pending_and_revision_status_approved_both(self): """Test /task with task_status = pending and revision_status = approved should fail""" @@ -564,72 +574,75 @@ def test_task_status_pending_and_revision_status_approved_both(self): model = self.bc.database.create(task=1, user=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={ - 'task_id': model.task.id, - }) + url = reverse_lazy( + "assignments:task_id", + kwargs={ + "task_id": model.task.id, + }, + ) - data = {'associated_slug': 'hello', 'title': 'hello', 'task_status': 'DONE', 'revision_status': 'APPROVED'} + data = {"associated_slug": "hello", "title": "hello", "task_status": "DONE", "revision_status": "APPROVED"} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'task-marked-approved-when-pending', 'status_code': 400} + expected = {"detail": "task-marked-approved-when-pending", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.assertEqual(student_task_notification.delay.call_args_list, []) self.assertEqual(teacher_task_notification.delay.call_args_list, []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_task_not_found(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'task-not-found', 'status_code': 404} + expected = {"detail": "task-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_task_found_and_deleted(self): model = self.bc.database.create(user=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_task_associated_with_another_user(self): model = self.bc.database.create(user=2, task=1, cohort=1) self.bc.request.authenticate(model.user[1]) - url = reverse_lazy('assignments:task_id', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:task_id", kwargs={"task_id": 1}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'task-not-found-for-this-user', 'status_code': 400} + expected = {"detail": "task-not-found-for-this-user", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) diff --git a/breathecode/assignments/tests/urls/tests_task_id_me_commitfile.py b/breathecode/assignments/tests/urls/tests_task_id_me_commitfile.py index 1f9a18550..1991f1147 100644 --- a/breathecode/assignments/tests/urls/tests_task_id_me_commitfile.py +++ b/breathecode/assignments/tests/urls/tests_task_id_me_commitfile.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import MagicMock, call, patch @@ -17,20 +18,20 @@ class MediaTestSuite(AssignmentsTestCase): # When: no auth # Then: response 401 def test_no_auth(self): - url = reverse_lazy('assignments:me_task_id_commitfile', kwargs={'task_id': 1}) + url = reverse_lazy("assignments:me_task_id_commitfile", kwargs={"task_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) # When: no tasks # Then: response 404 def test__no_tasks(self): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { self.bc.fake.slug(): self.bc.fake.slug(), self.bc.fake.slug(): self.bc.fake.slug(), @@ -39,30 +40,33 @@ def test__no_tasks(self): mock = MagicMock() mock.raw = iter([json.dumps(expected).encode()]) - mock.headers = {'Content-Type': 'application/json'} + mock.headers = {"Content-Type": "application/json"} code = random.randint(200, 299) mock.status_code = code - mock.reason = 'OK' + mock.reason = "OK" - task = {'github_url': self.bc.fake.url()} + task = {"github_url": self.bc.fake.url()} model = self.bc.database.create(profile_academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_commitfile', kwargs={'task_id': 1 - }) + '?' + self.bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_commitfile", kwargs={"task_id": 1}) + + "?" + + self.bc.format.querystring(query) + ) - with patch.multiple('requests', get=MagicMock(return_value=mock)): + with patch.multiple("requests", get=MagicMock(return_value=mock)): response = self.client.get(url) self.bc.check.calls(requests.get.call_args_list, []) - self.assertEqual(response.getvalue().decode('utf-8'), '{"detail":"task-not-found","status_code":404}') + self.assertEqual(response.getvalue().decode("utf-8"), '{"detail":"task-not-found","status_code":404}') self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) # When: no github credentials # Then: response 404 def test__no_github_Credentials(self): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { self.bc.fake.slug(): self.bc.fake.slug(), self.bc.fake.slug(): self.bc.fake.slug(), @@ -71,31 +75,35 @@ def test__no_github_Credentials(self): mock = MagicMock() mock.raw = iter([json.dumps(expected).encode()]) - mock.headers = {'Content-Type': 'application/json'} + mock.headers = {"Content-Type": "application/json"} code = random.randint(200, 299) mock.status_code = code - mock.reason = 'OK' + mock.reason = "OK" - task = {'github_url': self.bc.fake.url()} + task = {"github_url": self.bc.fake.url()} model = self.bc.database.create(profile_academy=1, task=task) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_commitfile', kwargs={'task_id': 1 - }) + '?' + self.bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_commitfile", kwargs={"task_id": 1}) + + "?" + + self.bc.format.querystring(query) + ) - with patch.multiple('requests', get=MagicMock(return_value=mock)): + with patch.multiple("requests", get=MagicMock(return_value=mock)): response = self.client.get(url) self.bc.check.calls(requests.get.call_args_list, []) - self.assertEqual(response.getvalue().decode('utf-8'), - '{"detail":"github-account-not-connected","status_code":400}') + self.assertEqual( + response.getvalue().decode("utf-8"), '{"detail":"github-account-not-connected","status_code":400}' + ) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) # When: auth in get # Then: response 200 def test__get__auth(self): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { self.bc.fake.slug(): self.bc.fake.slug(), self.bc.fake.slug(): self.bc.fake.slug(), @@ -104,33 +112,41 @@ def test__get__auth(self): mock = MagicMock() mock.raw = iter([json.dumps(expected).encode()]) - mock.headers = {'Content-Type': 'application/json'} + mock.headers = {"Content-Type": "application/json"} code = random.randint(200, 299) mock.status_code = code - mock.reason = 'OK' + mock.reason = "OK" - task = {'github_url': self.bc.fake.url()} - model = self.bc.database.create(profile_academy=1, task=task, credentials_github=1, app={'slug': 'rigobot'}) + task = {"github_url": self.bc.fake.url()} + model = self.bc.database.create(profile_academy=1, task=task, credentials_github=1, app={"slug": "rigobot"}) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:me_task_id_commitfile', kwargs={'task_id': 1 - }) + '?' + self.bc.format.querystring(query) + url = ( + reverse_lazy("assignments:me_task_id_commitfile", kwargs={"task_id": 1}) + + "?" + + self.bc.format.querystring(query) + ) token = self.bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - with patch('linked_services.django.actions.get_jwt', MagicMock(return_value=token)): - with patch.multiple('requests', get=MagicMock(return_value=mock)): + with patch("linked_services.django.actions.get_jwt", MagicMock(return_value=token)): + with patch.multiple("requests", get=MagicMock(return_value=mock)): response = self.client.get(url) - self.bc.check.calls(requests.get.call_args_list, [ - call(model.app.app_url + '/v1/finetuning/commitfile', - params={ - **query, - 'repo': model.task.github_url, - 'watcher': model.credentials_github.username, - }, - stream=True, - headers={'Authorization': f'Link App=breathecode,Token={token}'}), - ]) - - self.assertEqual(response.getvalue().decode('utf-8'), json.dumps(expected)) + self.bc.check.calls( + requests.get.call_args_list, + [ + call( + model.app.app_url + "/v1/finetuning/commitfile", + params={ + **query, + "repo": model.task.github_url, + "watcher": model.credentials_github.username, + }, + stream=True, + headers={"Authorization": f"Link App=breathecode,Token={token}"}, + ), + ], + ) + + self.assertEqual(response.getvalue().decode("utf-8"), json.dumps(expected)) self.assertEqual(response.status_code, code) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) diff --git a/breathecode/assignments/tests/urls/tests_user_me_project.py b/breathecode/assignments/tests/urls/tests_user_me_project.py index 44071c473..4835e6986 100644 --- a/breathecode/assignments/tests/urls/tests_user_me_project.py +++ b/breathecode/assignments/tests/urls/tests_user_me_project.py @@ -1,6 +1,7 @@ """ Test /final_project/ """ + from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -13,32 +14,33 @@ def put_serializer(self, project, data={}): return { - 'id': project.id, - 'logo_url': project.logo_url, - 'name': project.name, - 'one_line_desc': project.one_line_desc, - 'public_url': project.public_url, - 'cohort': project.cohort.id if project.cohort else None, - 'created_at': self.bc.datetime.to_iso_string(project.created_at), - 'description': project.description, - 'revision_status': project.revision_status, - 'revision_message': project.revision_message, + "id": project.id, + "logo_url": project.logo_url, + "name": project.name, + "one_line_desc": project.one_line_desc, + "public_url": project.public_url, + "cohort": project.cohort.id if project.cohort else None, + "created_at": self.bc.datetime.to_iso_string(project.created_at), + "description": project.description, + "revision_status": project.revision_status, + "revision_message": project.revision_message, **data, } class FinalProjectTestSuite(AssignmentsTestCase): """Test /final_project""" + """ 🔽🔽🔽 Auth """ def test_final_project_with_no_auth(self): - url = reverse_lazy('assignments:user_me_project', kwargs={'project_id': 1}) + url = reverse_lazy("assignments:user_me_project", kwargs={"project_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -51,11 +53,11 @@ def test_final_project_with_wrong_id(self): profile_academy=1, role=1, ) - url = reverse_lazy('assignments:user_me_project', kwargs={'project_id': 1}) + url = reverse_lazy("assignments:user_me_project", kwargs={"project_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'project-not-found', 'status_code': 400} + expected = {"detail": "project-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -63,43 +65,42 @@ def test_final_project_with_wrong_id(self): def test_final_project_when_not_a_member(self): self.bc.request.set_headers(academy=1) - helper_models = self.bc.database.create(user=[{'id': 1}, {'id': 2}], cohort=1) - project_cohort = helper_models['cohort'] + helper_models = self.bc.database.create(user=[{"id": 1}, {"id": 2}], cohort=1) + project_cohort = helper_models["cohort"] - models = self.bc.database.create(final_project={'members': [2]}) - self.bc.request.authenticate(helper_models['user'][0]) - url = reverse_lazy('assignments:user_me_project', kwargs={'project_id': 1}) - data = {'name': 'Facebook', 'cohort': project_cohort.id} + models = self.bc.database.create(final_project={"members": [2]}) + self.bc.request.authenticate(helper_models["user"][0]) + url = reverse_lazy("assignments:user_me_project", kwargs={"project_id": 1}) + data = {"name": "Facebook", "cohort": project_cohort.id} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'not-a-member', 'status_code': 400} + expected = {"detail": "not-a-member", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_final_project_members_from_different_cohort(self): self.bc.request.set_headers(academy=1) - helper_models = self.bc.database.create(user=[{'id': 1}, {'id': 2}], cohort=2) - self.bc.request.authenticate(helper_models['user'][1]) - - project_cohort = helper_models['cohort'][0] - models = self.bc.database.create(cohort_user={ - 'user': helper_models['user'][1], - 'cohort': helper_models['cohort'][1] - }, - final_project={'members': [1, 2]}) - url = reverse_lazy('assignments:user_me_project', kwargs={'project_id': 1}) - - data = {'name': 'Facebook', 'members': [1, 2], 'cohort': project_cohort.id} + helper_models = self.bc.database.create(user=[{"id": 1}, {"id": 2}], cohort=2) + self.bc.request.authenticate(helper_models["user"][1]) + + project_cohort = helper_models["cohort"][0] + models = self.bc.database.create( + cohort_user={"user": helper_models["user"][1], "cohort": helper_models["cohort"][1]}, + final_project={"members": [1, 2]}, + ) + url = reverse_lazy("assignments:user_me_project", kwargs={"project_id": 1}) + + data = {"name": "Facebook", "members": [1, 2], "cohort": project_cohort.id} response = self.client.put(url, data) json = response.json() expected = { - 'detail': f'All members of this project must belong to the cohort {project_cohort.name} - 0', - 'status_code': 400 + "detail": f"All members of this project must belong to the cohort {project_cohort.name} - 0", + "status_code": 400, } self.assertEqual(json, expected) @@ -108,47 +109,43 @@ def test_final_project_members_from_different_cohort(self): def test_final_project_without_cohort(self): self.bc.request.set_headers(academy=1) - helper_models = self.bc.database.create(user=[{'id': 1}, {'id': 2}]) - self.bc.request.authenticate(helper_models['user'][1]) + helper_models = self.bc.database.create(user=[{"id": 1}, {"id": 2}]) + self.bc.request.authenticate(helper_models["user"][1]) - models = self.bc.database.create(final_project={'members': [1, 2]}) - url = reverse_lazy('assignments:user_me_project', kwargs={'project_id': 1}) + models = self.bc.database.create(final_project={"members": [1, 2]}) + url = reverse_lazy("assignments:user_me_project", kwargs={"project_id": 1}) - data = {'name': 'Facebook', 'members': [1, 2]} + data = {"name": "Facebook", "members": [1, 2]} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'cohort-missing', 'status_code': 400} + expected = {"detail": "cohort-missing", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_final_project(self): self.bc.request.set_headers(academy=1) helper_models = self.bc.database.create(user=1, cohort=1) - cohort_user = self.bc.database.create(cohort_user={ - 'user': helper_models['user'], - 'cohort': helper_models['cohort'] - }) + cohort_user = self.bc.database.create( + cohort_user={"user": helper_models["user"], "cohort": helper_models["cohort"]} + ) - project_cohort = helper_models['cohort'] + project_cohort = helper_models["cohort"] - models = self.bc.database.create(final_project={'members': [helper_models['user']], 'cohort': project_cohort}) + models = self.bc.database.create(final_project={"members": [helper_models["user"]], "cohort": project_cohort}) - self.bc.request.authenticate(helper_models['user']) - url = reverse_lazy('assignments:user_me_project', kwargs={'project_id': 1}) - data = {'name': 'Facebook', 'cohort': project_cohort.id, 'members': [1]} + self.bc.request.authenticate(helper_models["user"]) + url = reverse_lazy("assignments:user_me_project", kwargs={"project_id": 1}) + data = {"name": "Facebook", "cohort": project_cohort.id, "members": [1]} response = self.client.put(url, data) json = response.json() self.assertEqual( json, - put_serializer(self, models['final_project'], { - 'name': 'Facebook', - 'cohort': project_cohort.id, - **json - })) + put_serializer(self, models["final_project"], {"name": "Facebook", "cohort": project_cohort.id, **json}), + ) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/assignments/tests/urls/tests_user_me_task.py b/breathecode/assignments/tests/urls/tests_user_me_task.py index 85c52374b..08fd6d8f9 100644 --- a/breathecode/assignments/tests/urls/tests_user_me_task.py +++ b/breathecode/assignments/tests/urls/tests_user_me_task.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random from datetime import datetime from unittest.mock import MagicMock, call, patch @@ -23,156 +24,153 @@ def db_item(data={}): return { - 'associated_slug': 'live-ball-onto-go', - 'cohort_id': 1, - 'delivered_at': None, - 'description': '', - 'github_url': None, - 'id': 1, - 'live_url': None, - 'opened_at': None, - 'revision_status': 'PENDING', - 'rigobot_repository_id': None, - 'subtasks': None, - 'task_status': 'PENDING', - 'task_type': 'EXERCISE', - 'telemetry_id': None, - 'title': 'Richard Stephens', - 'user_id': 1, - **data + "associated_slug": "live-ball-onto-go", + "cohort_id": 1, + "delivered_at": None, + "description": "", + "github_url": None, + "id": 1, + "live_url": None, + "opened_at": None, + "revision_status": "PENDING", + "rigobot_repository_id": None, + "subtasks": None, + "task_status": "PENDING", + "task_type": "EXERCISE", + "telemetry_id": None, + "title": "Richard Stephens", + "user_id": 1, + **data, } def put_serializer(self, task, data={}): return { - 'associated_slug': task.associated_slug, - 'cohort': task.cohort, - 'created_at': self.bc.datetime.to_iso_string(task.created_at), - 'description': task.description, - 'github_url': task.github_url, - 'id': task.id, - 'live_url': task.live_url, - 'revision_status': task.revision_status, - 'task_status': task.task_status, - 'task_type': task.task_type, - 'title': task.title, - 'updated_at': self.bc.datetime.to_iso_string(task.updated_at), - 'opened_at': self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, - **data + "associated_slug": task.associated_slug, + "cohort": task.cohort, + "created_at": self.bc.datetime.to_iso_string(task.created_at), + "description": task.description, + "github_url": task.github_url, + "id": task.id, + "live_url": task.live_url, + "revision_status": task.revision_status, + "task_status": task.task_status, + "task_type": task.task_type, + "title": task.title, + "updated_at": self.bc.datetime.to_iso_string(task.updated_at), + "opened_at": self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, + **data, } def get_serializer(self, task, user): return { - 'associated_slug': task.associated_slug, - 'created_at': self.bc.datetime.to_iso_string(task.created_at), - 'updated_at': self.bc.datetime.to_iso_string(task.updated_at), - 'github_url': task.github_url, - 'id': task.id, - 'live_url': task.live_url, - 'revision_status': task.revision_status, - 'task_status': task.task_status, - 'task_type': task.task_type, - 'title': task.title, - 'assignment_telemetry': task.telemetry.telemetry if task.telemetry else None, - 'description': task.description, - 'opened_at': self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, - 'delivered_at': self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, - 'user': { - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name - } + "associated_slug": task.associated_slug, + "created_at": self.bc.datetime.to_iso_string(task.created_at), + "updated_at": self.bc.datetime.to_iso_string(task.updated_at), + "github_url": task.github_url, + "id": task.id, + "live_url": task.live_url, + "revision_status": task.revision_status, + "task_status": task.task_status, + "task_type": task.task_type, + "title": task.title, + "assignment_telemetry": task.telemetry.telemetry if task.telemetry else None, + "description": task.description, + "opened_at": self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, + "delivered_at": self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, + "user": {"first_name": user.first_name, "id": user.id, "last_name": user.last_name}, } def post_serializer(data={}): return { - 'associated_slug': 'growth-purpose', - 'attachments': [], - 'cohort': 1, - 'created_at': '2024-04-25T01:09:44.447234Z', - 'delivered_at': None, - 'description': '', - 'github_url': None, - 'id': 1, - 'live_url': None, - 'opened_at': None, - 'revision_status': 'PENDING', - 'rigobot_repository_id': None, - 'subtasks': None, - 'task_status': 'PENDING', - 'task_type': 'EXERCISE', - 'telemetry': None, - 'title': 'Isabella Duffy', - 'updated_at': '2024-04-25T01:09:44.447254Z', + "associated_slug": "growth-purpose", + "attachments": [], + "cohort": 1, + "created_at": "2024-04-25T01:09:44.447234Z", + "delivered_at": None, + "description": "", + "github_url": None, + "id": 1, + "live_url": None, + "opened_at": None, + "revision_status": "PENDING", + "rigobot_repository_id": None, + "subtasks": None, + "task_status": "PENDING", + "task_type": "EXERCISE", + "telemetry": None, + "title": "Isabella Duffy", + "updated_at": "2024-04-25T01:09:44.447254Z", **data, } def put_serializer(self, task, data={}): return { - 'associated_slug': task.associated_slug, - 'cohort': task.cohort.id if task.cohort else None, - 'created_at': self.bc.datetime.to_iso_string(task.created_at), - 'description': task.description, - 'github_url': task.github_url, - 'id': task.id, - 'live_url': task.live_url, - 'revision_status': task.revision_status, - 'task_status': task.task_status, - 'task_type': task.task_type, - 'title': task.title, - 'rigobot_repository_id': task.rigobot_repository_id, - 'attachments': [], - 'subtasks': task.subtasks, - 'telemetry': task.telemetry, - 'opened_at': self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, - 'delivered_at': self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, + "associated_slug": task.associated_slug, + "cohort": task.cohort.id if task.cohort else None, + "created_at": self.bc.datetime.to_iso_string(task.created_at), + "description": task.description, + "github_url": task.github_url, + "id": task.id, + "live_url": task.live_url, + "revision_status": task.revision_status, + "task_status": task.task_status, + "task_type": task.task_type, + "title": task.title, + "rigobot_repository_id": task.rigobot_repository_id, + "attachments": [], + "subtasks": task.subtasks, + "telemetry": task.telemetry, + "opened_at": self.bc.datetime.to_iso_string(task.opened_at) if task.opened_at else task.opened_at, + "delivered_at": self.bc.datetime.to_iso_string(task.delivered_at) if task.delivered_at else task.delivered_at, **data, } @pytest.fixture(autouse=True) def setup(db, monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield class MediaTestSuite(AssignmentsTestCase): """Test /answer""" + """ 🔽🔽🔽 Auth """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_user_me_task__without_auth(self): - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get without Task """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_user_me_task__without_task(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") response = self.client.get(url) json = response.json() @@ -180,21 +178,21 @@ def test_user_me_task__without_task(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with one Task """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_user_me_task__with_one_task(self): model = self.bc.database.create(user=1, task=1, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") response = self.client.get(url) json = response.json() @@ -202,21 +200,21 @@ def test_user_me_task__with_one_task(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with two Task """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_user_me_task__with_two_task(self): model = self.bc.database.create(user=1, task=2, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") response = self.client.get(url) json = response.json() @@ -224,27 +222,23 @@ def test_user_me_task__with_two_task(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with querystring assets """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_user_me_task__with_query_string_assets(self): - model = self.bc.database.create(user=1, - task=[{ - 'associated_slug': 'fine' - }, { - 'associated_slug': 'super' - }], - cohort=1) + model = self.bc.database.create( + user=1, task=[{"associated_slug": "fine"}, {"associated_slug": "super"}], cohort=1 + ) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + '?associated_slug=fine,super' + url = reverse_lazy("assignments:user_me_task") + "?associated_slug=fine,super" response = self.client.get(url) json = response.json() @@ -252,27 +246,23 @@ def test_user_me_task__with_query_string_assets(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with querystring assets no results """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_user_me_task__with_query_string_assets_no_results(self): - model = self.bc.database.create(user=1, - task=[{ - 'associated_slug': 'fine' - }, { - 'associated_slug': 'super' - }], - cohort=1) + model = self.bc.database.create( + user=1, task=[{"associated_slug": "fine"}, {"associated_slug": "super"}], cohort=1 + ) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + '?associated_slug=kenny' + url = reverse_lazy("assignments:user_me_task") + "?associated_slug=kenny" response = self.client.get(url) json = response.json() @@ -280,22 +270,22 @@ def test_user_me_task__with_query_string_assets_no_results(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with one Task but the other user """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_user_me_task__with_one_task__but_the_other_user(self): - task = {'user_id': 2} + task = {"user_id": 2} model = self.bc.database.create(user=2, task=task, cohort=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") response = self.client.get(url) json = response.json() @@ -303,22 +293,22 @@ def test_user_me_task__with_one_task__but_the_other_user(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [self.bc.format.to_dict(model.task)]) + self.assertEqual(self.bc.database.list_of("assignments.Task"), [self.bc.format.to_dict(model.task)]) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with two Task but the other user """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_user_me_task__with_two_tasks__but_the_other_user(self): - task = {'user_id': 2} + task = {"user_id": 2} model = self.bc.database.create(user=2, task=(2, task), cohort=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") response = self.client.get(url) json = response.json() @@ -326,401 +316,422 @@ def test_user_me_task__with_two_tasks__but_the_other_user(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Delete """ - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_tasks_in_bulk_found_and_deleted(self): model = self.bc.database.create(user=1, task=2, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + '?id=1,2' + url = reverse_lazy("assignments:user_me_task") + "?id=1,2" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_tasks_in_bulk_tasks_not_found(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + '?id=1,2' + url = reverse_lazy("assignments:user_me_task") + "?id=1,2" response = self.client.delete(url) json = response.json() expected = { - 'failure': [{ - 'detail': - 'task-not-found', - 'resources': [{ - 'display_field': 'pk', - 'display_value': 1, - 'pk': 1 - }, { - 'display_field': 'pk', - 'display_value': 2, - 'pk': 2 - }], - 'status_code': - 404 - }], - 'success': [] + "failure": [ + { + "detail": "task-not-found", + "resources": [ + {"display_field": "pk", "display_value": 1, "pk": 1}, + {"display_field": "pk", "display_value": 2, "pk": 2}, + ], + "status_code": 404, + } + ], + "success": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_task_in_bulk_associated_with_another_user(self): model = self.bc.database.create(user=2, task=2, cohort=1) self.bc.request.authenticate(model.user[1]) - url = reverse_lazy('assignments:user_me_task') + '?id=1,2' + url = reverse_lazy("assignments:user_me_task") + "?id=1,2" response = self.client.delete(url) json = response.json() expected = { - 'failure': [{ - 'detail': - 'task-not-found-for-this-user', - 'resources': [{ - 'display_field': 'associated_slug', - 'display_value': x.associated_slug, - 'pk': x.pk - } for x in model.task], - 'status_code': - 400 - }], - 'success': [] + "failure": [ + { + "detail": "task-not-found-for-this-user", + "resources": [ + {"display_field": "associated_slug", "display_value": x.associated_slug, "pk": x.pk} + for x in model.task + ], + "status_code": 400, + } + ], + "success": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Put """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put__without_task(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'update-whout-list', 'status_code': 400} + expected = {"detail": "update-whout-list", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__without_task__passing_list(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') - response = self.client.put(url, [], format='json') + url = reverse_lazy("assignments:user_me_task") + response = self.client.put(url, [], format="json") json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Put without Task, one item in body """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__without_task__one_item_in_body(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") data = [{}] - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'missing=task-id', 'status_code': 400} + expected = {"detail": "missing=task-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Put without Task, one item in body, with id """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__without_task__one_item_in_body__with_id(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') - data = [{'id': 1}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("assignments:user_me_task") + data = [{"id": 1}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'task-not-found', 'status_code': 404} + expected = {"detail": "task-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('assignments.Task'), []) + self.assertEqual(self.bc.database.list_of("assignments.Task"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Put with Task, one item in body, with id """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_passing_taks_id(self): model = self.bc.database.create(user=1, task=2, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') - response = self.client.put(url, [{'id': 1}, {'id': 2}], format='json') + url = reverse_lazy("assignments:user_me_task") + response = self.client.put(url, [{"id": 1}, {"id": 2}], format="json") json = response.json() expected = [ - put_serializer(self, x, {'updated_at': self.bc.datetime.to_iso_string(UTC_NOW)}) for x in model.task + put_serializer(self, x, {"updated_at": self.bc.datetime.to_iso_string(UTC_NOW)}) for x in model.task ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), self.bc.format.to_dict(model.task)) + self.assertEqual(self.bc.database.list_of("assignments.Task"), self.bc.format.to_dict(model.task)) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_put_passing_random_values_to_update_task(self): - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(user=1, task=2, cohort=2) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') - - data = [{ - 'id': n, - 'title': self.bc.fake.name(), - 'task_status': random.choice(['PENDING', 'DONE']), - 'revision_status': 'PENDING', - 'github_url': self.bc.fake.url(), - 'live_url': self.bc.fake.url(), - 'description': self.bc.fake.text()[:450], - 'cohort': random.randint(1, 2) - } for n in range(1, 3)] - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - response = self.client.put(url, data, format='json') + url = reverse_lazy("assignments:user_me_task") + + data = [ + { + "id": n, + "title": self.bc.fake.name(), + "task_status": random.choice(["PENDING", "DONE"]), + "revision_status": "PENDING", + "github_url": self.bc.fake.url(), + "live_url": self.bc.fake.url(), + "description": self.bc.fake.text()[:450], + "cohort": random.randint(1, 2), + } + for n in range(1, 3) + ] + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + response = self.client.put(url, data, format="json") json = response.json() expected = [ - put_serializer(self, model.task[x], { - 'updated_at': self.bc.datetime.to_iso_string(UTC_NOW), - **data[x] - }) for x in range(0, 2) + put_serializer(self, model.task[x], {"updated_at": self.bc.datetime.to_iso_string(UTC_NOW), **data[x]}) + for x in range(0, 2) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) for x in range(0, 2): - data[x]['cohort_id'] = data[x].pop('cohort') - self.assertEqual(self.bc.database.list_of('assignments.Task'), [{ - **self.bc.format.to_dict(model.task[x]), - **data[x] - } for x in range(0, 2)]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(model.user.id, 'assignment_status_updated', related_type='assignments.Task', related_id=x.id) - for x in model.task if data[x.id - 1]['task_status'] != x.task_status - ]) + data[x]["cohort_id"] = data[x].pop("cohort") + self.assertEqual( + self.bc.database.list_of("assignments.Task"), + [{**self.bc.format.to_dict(model.task[x]), **data[x]} for x in range(0, 2)], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(model.user.id, "assignment_status_updated", related_type="assignments.Task", related_id=x.id) + for x in model.task + if data[x.id - 1]["task_status"] != x.task_status + ], + ) """ 🔽🔽🔽 Put with Task, one item in body, passing revision_status """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__with_task__one_item_in_body__passing_revision_status(self): - statuses = ['APPROVED', 'REJECTED', 'IGNORED'] + statuses = ["APPROVED", "REJECTED", "IGNORED"] for index in range(0, 3): current_status = statuses[index] next_status = statuses[index - 1 if index > 0 else 2] - task = {'revision_status': current_status, 'task_status': 'DONE'} + task = {"revision_status": current_status, "task_status": "DONE"} model = self.bc.database.create(user=1, task=task, cohort=1) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') - data = [{ - 'id': index + 1, - 'revision_status': next_status, - }] - response = self.client.put(url, data, format='json') + url = reverse_lazy("assignments:user_me_task") + data = [ + { + "id": index + 1, + "revision_status": next_status, + } + ] + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'detail': 'editing-revision-status-but-is-not-teacher-or-assistant', - 'status_code': 400, + "detail": "editing-revision-status-but-is-not-teacher-or-assistant", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [ - self.bc.format.to_dict(model.task), - ]) + self.assertEqual( + self.bc.database.list_of("assignments.Task"), + [ + self.bc.format.to_dict(model.task), + ], + ) # teardown - self.bc.database.delete('assignments.Task') + self.bc.database.delete("assignments.Task") self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Put with Task, one item in body, passing revision_status, teacher is auth """ - @patch('breathecode.assignments.tasks.student_task_notification', MagicMock()) - @patch('breathecode.assignments.tasks.teacher_task_notification', MagicMock()) - @patch('breathecode.assignments.signals.assignment_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.assignments.tasks.student_task_notification", MagicMock()) + @patch("breathecode.assignments.tasks.teacher_task_notification", MagicMock()) + @patch("breathecode.assignments.signals.assignment_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__put__with_task__one_item_in_body__passing_revision_status__teacher_token(self): - statuses = ['APPROVED', 'REJECTED', 'IGNORED'] + statuses = ["APPROVED", "REJECTED", "IGNORED"] for index in range(0, 3): current_status = statuses[index] next_status = statuses[index - 1 if index > 0 else 2] - task = {'revision_status': current_status, 'task_status': 'DONE'} + task = {"revision_status": current_status, "task_status": "DONE"} cohort_users = [ { - 'role': 'STUDENT', - 'user_id': (index * 2) + 1, + "role": "STUDENT", + "user_id": (index * 2) + 1, }, { - 'role': 'TEACHER', - 'user_id': (index * 2) + 2, + "role": "TEACHER", + "user_id": (index * 2) + 2, }, ] - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(user=2, task=task, cohort_user=cohort_users, cohort=1) self.bc.request.authenticate(model.user[1]) - url = reverse_lazy('assignments:user_me_task') - data = [{ - 'id': index + 1, - 'revision_status': next_status, - }] + url = reverse_lazy("assignments:user_me_task") + data = [ + { + "id": index + 1, + "revision_status": next_status, + } + ] start = timezone.now() - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - response = self.client.put(url, data, format='json') + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + response = self.client.put(url, data, format="json") end = timezone.now() json = response.json() json = [ - x for x in json - if self.bc.check.datetime_in_range(start, end, self.bc.datetime.from_iso_string(x['updated_at'])) - or x.pop('updated_at') + x + for x in json + if self.bc.check.datetime_in_range(start, end, self.bc.datetime.from_iso_string(x["updated_at"])) + or x.pop("updated_at") ] - expected = [put_serializer(self, model.task, data={'revision_status': next_status})] + expected = [put_serializer(self, model.task, data={"revision_status": next_status})] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('assignments.Task'), [ - { - **self.bc.format.to_dict(model.task), - 'revision_status': next_status, - }, - ]) + self.assertEqual( + self.bc.database.list_of("assignments.Task"), + [ + { + **self.bc.format.to_dict(model.task), + "revision_status": next_status, + }, + ], + ) self.assertEqual(tasks.student_task_notification.delay.call_args_list, [call(index + 1)]) self.assertEqual(tasks.teacher_task_notification.delay.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(model.user[1].id, - 'assignment_review_status_updated', - related_type='assignments.Task', - related_id=model.task.id), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call( + model.user[1].id, + "assignment_review_status_updated", + related_type="assignments.Task", + related_id=model.task.id, + ), + ], + ) # teardown - self.bc.database.delete('assignments.Task') + self.bc.database.delete("assignments.Task") tasks.student_task_notification.delay.call_args_list = [] activity_tasks.add_activity.delay.call_args_list = [] - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_data(self): - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(user=1, task=2, cohort=2) self.client.force_authenticate(model.user) - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") self.client.get(url) - self.bc.check.calls(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension']), - ]) - - self.bc.check.calls(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=TaskCache, cache_per_user=True, paginate=True), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension"]), + ], + ) + + self.bc.check.calls( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=TaskCache, cache_per_user=True, paginate=True), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) def test_post__no_required_fields(client: capy.Client, database: capy.Database): - url = reverse_lazy('assignments:user_me_task') + url = reverse_lazy("assignments:user_me_task") model = database.create(user=1) client.force_authenticate(model.user) @@ -728,52 +739,59 @@ def test_post__no_required_fields(client: capy.Client, database: capy.Database): response = client.post(url) json = response.json() - expected = [{ - 'associated_slug': ['This field is required.'], - 'title': ['This field is required.'], - 'task_type': ['This field is required.'], - }] + expected = [ + { + "associated_slug": ["This field is required."], + "title": ["This field is required."], + "task_type": ["This field is required."], + } + ] assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert database.list_of('assignments.Task') == [] - - -@pytest.mark.parametrize('task_type', [ - 'PROJECT', - 'QUIZ', - 'LESSON', - 'EXERCISE', -]) -def test_post__created(client: capy.Client, database: capy.Database, fake: capy.Fake, task_type: str, - utc_now: datetime): - url = reverse_lazy('assignments:user_me_task') + assert database.list_of("assignments.Task") == [] + + +@pytest.mark.parametrize( + "task_type", + [ + "PROJECT", + "QUIZ", + "LESSON", + "EXERCISE", + ], +) +def test_post__created( + client: capy.Client, database: capy.Database, fake: capy.Fake, task_type: str, utc_now: datetime +): + url = reverse_lazy("assignments:user_me_task") model = database.create(user=1, cohort=1, city=1, country=1) client.force_authenticate(model.user) data = { - 'associated_slug': fake.slug(), - 'title': fake.name(), - 'task_type': task_type, - 'cohort': 1, + "associated_slug": fake.slug(), + "title": fake.name(), + "task_type": task_type, + "cohort": 1, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - del data['cohort'] + del data["cohort"] expected = [ post_serializer( data={ - 'created_at': utc_now.isoformat().replace('+00:00', 'Z'), - 'updated_at': utc_now.isoformat().replace('+00:00', 'Z'), + "created_at": utc_now.isoformat().replace("+00:00", "Z"), + "updated_at": utc_now.isoformat().replace("+00:00", "Z"), **data, - }), + } + ), ] assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert database.list_of('assignments.Task') == [ + assert database.list_of("assignments.Task") == [ db_item(data), ] diff --git a/breathecode/assignments/urls.py b/breathecode/assignments/urls.py index dd5ec1e17..58f45bd11 100644 --- a/breathecode/assignments/urls.py +++ b/breathecode/assignments/urls.py @@ -20,46 +20,55 @@ FinalProjectCohortView, ) -app_name = 'assignments' +app_name = "assignments" urlpatterns = [ - path('task/', TaskTeacherView.as_view(), name='task'), - path('user/me/task', TaskMeView.as_view(), name='user_me_task'), - path('user/me/final_project', FinalProjectMeView.as_view(), name='user_me_final_project'), - path('user/me/final_project/screenshot', - FinalProjectScreenshotView.as_view(), - name='user_me_final_project_screenshot'), - path('user/me/final_project/', FinalProjectMeView.as_view(), name='user_me_project'), - path('user/me/task/', TaskMeView.as_view(), name='user_me_task_id'), - path('user/me/task//subtasks', SubtaskMeView.as_view(), name='user_me_task_id'), - path('me/telemetry', AssignmentTelemetryView.as_view(), name='me_telemetry'), - path('me/task//commitfile', MeCommitFileView.as_view(), name='me_task_id_commitfile'), - path('me/commitfile/', MeCommitFileView.as_view(), name='me_commitfile_id'), - path('academy/task//commitfile', AcademyCommitFileView.as_view(), name='academy_task_id_commitfile'), - path('academy/task//commitfile/', - AcademyCommitFileView.as_view(), - name='academy_commitfile_id'), - path('me/coderevision', MeCodeRevisionView.as_view(), name='me_coderevision'), - path('me/task//coderevision', MeCodeRevisionView.as_view(), name='me_task_id_coderevision'), - path('me/coderevision//rate', MeCodeRevisionRateView.as_view(), - name='me_coderevision_id_rate'), - path('academy/coderevision', AcademyCodeRevisionView.as_view(), name='academy_coderevision'), - path('academy/task//coderevision', - AcademyCodeRevisionView.as_view(), - name='academy_task_id_coderevision'), - path('academy/coderevision/', - AcademyCodeRevisionView.as_view(), - name='academy_coderevision_id'), - path('user//task', TaskMeView.as_view(), name='user_id_task'), - path('user//task/', TaskMeView.as_view(), name='user_id_task_id'), - path('academy/cohort//task', CohortTaskView.as_view()), - path('academy/cohort//final_project', FinalProjectCohortView.as_view(), name='final_project_cohort'), - path('academy/cohort//final_project/', - FinalProjectCohortView.as_view(), - name='final_project_cohort_update'), - path('academy/user//task', TaskMeView.as_view(), name='academy_user_id_task'), - path('task//deliver/', deliver_assignment_view, name='task_id_deliver_token'), - path('task//deliver', TaskMeDeliverView.as_view(), name='task_id_deliver'), - path('task//attachment', TaskMeAttachmentView.as_view(), name='task_id_attachment'), - path('task/', TaskMeView.as_view(), name='task_id'), - path('sync/cohort//task', sync_cohort_tasks_view, name='sync_cohort_id_task'), + path("task/", TaskTeacherView.as_view(), name="task"), + path("user/me/task", TaskMeView.as_view(), name="user_me_task"), + path("user/me/final_project", FinalProjectMeView.as_view(), name="user_me_final_project"), + path( + "user/me/final_project/screenshot", + FinalProjectScreenshotView.as_view(), + name="user_me_final_project_screenshot", + ), + path("user/me/final_project/", FinalProjectMeView.as_view(), name="user_me_project"), + path("user/me/task/", TaskMeView.as_view(), name="user_me_task_id"), + path("user/me/task//subtasks", SubtaskMeView.as_view(), name="user_me_task_id"), + path("me/telemetry", AssignmentTelemetryView.as_view(), name="me_telemetry"), + path("me/task//commitfile", MeCommitFileView.as_view(), name="me_task_id_commitfile"), + path("me/commitfile/", MeCommitFileView.as_view(), name="me_commitfile_id"), + path("academy/task//commitfile", AcademyCommitFileView.as_view(), name="academy_task_id_commitfile"), + path( + "academy/task//commitfile/", + AcademyCommitFileView.as_view(), + name="academy_commitfile_id", + ), + path("me/coderevision", MeCodeRevisionView.as_view(), name="me_coderevision"), + path("me/task//coderevision", MeCodeRevisionView.as_view(), name="me_task_id_coderevision"), + path( + "me/coderevision//rate", MeCodeRevisionRateView.as_view(), name="me_coderevision_id_rate" + ), + path("academy/coderevision", AcademyCodeRevisionView.as_view(), name="academy_coderevision"), + path( + "academy/task//coderevision", + AcademyCodeRevisionView.as_view(), + name="academy_task_id_coderevision", + ), + path( + "academy/coderevision/", AcademyCodeRevisionView.as_view(), name="academy_coderevision_id" + ), + path("user//task", TaskMeView.as_view(), name="user_id_task"), + path("user//task/", TaskMeView.as_view(), name="user_id_task_id"), + path("academy/cohort//task", CohortTaskView.as_view()), + path("academy/cohort//final_project", FinalProjectCohortView.as_view(), name="final_project_cohort"), + path( + "academy/cohort//final_project/", + FinalProjectCohortView.as_view(), + name="final_project_cohort_update", + ), + path("academy/user//task", TaskMeView.as_view(), name="academy_user_id_task"), + path("task//deliver/", deliver_assignment_view, name="task_id_deliver_token"), + path("task//deliver", TaskMeDeliverView.as_view(), name="task_id_deliver"), + path("task//attachment", TaskMeAttachmentView.as_view(), name="task_id_attachment"), + path("task/", TaskMeView.as_view(), name="task_id"), + path("sync/cohort//task", sync_cohort_tasks_view, name="sync_cohort_id_task"), ] diff --git a/breathecode/assignments/views.py b/breathecode/assignments/views.py index 0d91b7e8e..71478de4a 100644 --- a/breathecode/assignments/views.py +++ b/breathecode/assignments/views.py @@ -51,99 +51,110 @@ logger = logging.getLogger(__name__) MIME_ALLOW = [ - 'image/png', 'image/svg+xml', 'image/jpeg', 'image/gif', 'video/quicktime', 'video/mp4', 'audio/mpeg', - 'application/pdf', 'image/jpg', 'application/octet-stream' + "image/png", + "image/svg+xml", + "image/jpeg", + "image/gif", + "video/quicktime", + "video/mp4", + "audio/mpeg", + "application/pdf", + "image/jpg", + "application/octet-stream", ] -IMAGES_MIME_ALLOW = ['image/png', 'image/svg+xml', 'image/jpeg', 'image/jpg'] +IMAGES_MIME_ALLOW = ["image/png", "image/svg+xml", "image/jpeg", "image/jpg"] -USER_ASSIGNMENTS_BUCKET = os.getenv('USER_ASSIGNMENTS_BUCKET', None) +USER_ASSIGNMENTS_BUCKET = os.getenv("USER_ASSIGNMENTS_BUCKET", None) class TaskTeacherView(APIView): def get(self, request, task_id=None, user_id=None): items = Task.objects.all() - logger.debug(f'Found {items.count()} tasks') + logger.debug(f"Found {items.count()} tasks") - profile_ids = ProfileAcademy.objects.filter(user=request.user.id).values_list('academy__id', flat=True) + profile_ids = ProfileAcademy.objects.filter(user=request.user.id).values_list("academy__id", flat=True) if not profile_ids: raise ValidationException( - 'The quest user must belong to at least one academy to be able to request student tasks', + "The quest user must belong to at least one academy to be able to request student tasks", code=400, - slug='without-profile-academy') + slug="without-profile-academy", + ) items = items.filter(Q(cohort__academy__id__in=profile_ids) | Q(cohort__isnull=True)) - academy = request.GET.get('academy', None) + academy = request.GET.get("academy", None) if academy is not None: - items = items.filter(Q(cohort__academy__slug__in=academy.split(',')) | Q(cohort__isnull=True)) + items = items.filter(Q(cohort__academy__slug__in=academy.split(",")) | Q(cohort__isnull=True)) - user = request.GET.get('user', None) + user = request.GET.get("user", None) if user is not None: - items = items.filter(user__id__in=user.split(',')) + items = items.filter(user__id__in=user.split(",")) # tasks these cohorts (not the users, but the tasks belong to the cohort) - cohort = request.GET.get('cohort', None) + cohort = request.GET.get("cohort", None) if cohort is not None: - cohorts = cohort.split(',') + cohorts = cohort.split(",") ids = [x for x in cohorts if x.isnumeric()] slugs = [x for x in cohorts if not x.isnumeric()] items = items.filter(Q(cohort__slug__in=slugs) | Q(cohort__id__in=ids)) # tasks from users that belong to these cohort - stu_cohort = request.GET.get('stu_cohort', None) + stu_cohort = request.GET.get("stu_cohort", None) if stu_cohort is not None: - ids = stu_cohort.split(',') + ids = stu_cohort.split(",") - stu_cohorts = stu_cohort.split(',') + stu_cohorts = stu_cohort.split(",") ids = [x for x in stu_cohorts if x.isnumeric()] slugs = [x for x in stu_cohorts if not x.isnumeric()] items = items.filter( Q(user__cohortuser__cohort__id__in=ids) | Q(user__cohortuser__cohort__slug__in=slugs), - user__cohortuser__role='STUDENT', + user__cohortuser__role="STUDENT", ) - edu_status = request.GET.get('edu_status', None) + edu_status = request.GET.get("edu_status", None) if edu_status is not None: - items = items.filter(user__cohortuser__educational_status__in=edu_status.split(',')) + items = items.filter(user__cohortuser__educational_status__in=edu_status.split(",")) # tasks from users that belong to these cohort - teacher = request.GET.get('teacher', None) + teacher = request.GET.get("teacher", None) if teacher is not None: - teacher_cohorts = CohortUser.objects.filter(user__id__in=teacher.split(','), - role='TEACHER').values_list('cohort__id', flat=True) - items = items.filter(user__cohortuser__cohort__id__in=teacher_cohorts, - user__cohortuser__role='STUDENT').distinct() + teacher_cohorts = CohortUser.objects.filter(user__id__in=teacher.split(","), role="TEACHER").values_list( + "cohort__id", flat=True + ) + items = items.filter( + user__cohortuser__cohort__id__in=teacher_cohorts, user__cohortuser__role="STUDENT" + ).distinct() - task_status = request.GET.get('task_status', None) + task_status = request.GET.get("task_status", None) if task_status is not None: - items = items.filter(task_status__in=task_status.split(',')) + items = items.filter(task_status__in=task_status.split(",")) - revision_status = request.GET.get('revision_status', None) + revision_status = request.GET.get("revision_status", None) if revision_status is not None: - items = items.filter(revision_status__in=revision_status.split(',')) + items = items.filter(revision_status__in=revision_status.split(",")) - task_type = request.GET.get('task_type', None) + task_type = request.GET.get("task_type", None) if task_type is not None: - items = items.filter(task_type__in=task_type.split(',')) + items = items.filter(task_type__in=task_type.split(",")) - items = items.order_by('created_at') + items = items.order_by("created_at") serializer = TaskGETSerializer(items, many=True) return Response(serializer.data) -@api_view(['POST']) +@api_view(["POST"]) def sync_cohort_tasks_view(request, cohort_id=None): item = Cohort.objects.filter(id=cohort_id).first() if item is None: - raise ValidationException('Cohort not found') + raise ValidationException("Cohort not found") syncronized = sync_cohort_tasks(item) if len(syncronized) == 0: - raise ValidationException('No tasks updated') + raise ValidationException("No tasks updated") serializer = TaskGETSerializer(syncronized, many=True) return Response(serializer.data, status=status.HTTP_200_OK) @@ -164,7 +175,7 @@ class AssignmentTelemetryView(APIView, GenerateLookupsMixin): # serializer = AcademyCommentSerializer(items, many=True) # return handler.response(serializer.data) - @has_permission('upload_assignment_telemetry') + @has_permission("upload_assignment_telemetry") def post(self, request, academy_id=None): webhook = LearnPack.add_webhook_to_log(request.data) @@ -172,14 +183,13 @@ def post(self, request, academy_id=None): if webhook: tasks.async_learnpack_webhook.delay(webhook.id) else: - logger.debug('A request cannot be parsed, maybe you should update `LearnPack' - '.add_webhook_to_log`') + logger.debug("A request cannot be parsed, maybe you should update `LearnPack" ".add_webhook_to_log`") logger.debug(request.data) - return Response('this request couldn\'t no be processed', - status=status.HTTP_400_BAD_REQUEST, - content_type='text/plain') + return Response( + "this request couldn't no be processed", status=status.HTTP_400_BAD_REQUEST, content_type="text/plain" + ) - return Response('ok', content_type='text/plain') + return Response("ok", content_type="text/plain") class FinalProjectScreenshotView(APIView): @@ -189,17 +199,18 @@ def upload(self, request, update=False): lang = get_user_language(request) - files = request.data.getlist('file') - names = request.data.getlist('name') + files = request.data.getlist("file") + names = request.data.getlist("name") - file = request.data.get('file') + file = request.data.get("file") slugs = [] for index in range(0, len(files)): file = files[index] if file.content_type not in IMAGES_MIME_ALLOW: raise ValidationException( - f'You can upload only files on the following formats: {",".join(IMAGES_MIME_ALLOW)}') + f'You can upload only files on the following formats: {",".join(IMAGES_MIME_ALLOW)}' + ) for index in range(0, len(files)): file = files[index] @@ -210,8 +221,8 @@ def upload(self, request, update=False): slugs.append(slug) data = { - 'hash': hash, - 'mime': file.content_type, + "hash": hash, + "mime": file.content_type, } # upload file section @@ -219,18 +230,21 @@ def upload(self, request, update=False): storage = Storage() cloud_file = storage.file(USER_ASSIGNMENTS_BUCKET, hash) cloud_file.upload(file, content_type=file.content_type) - data['url'] = cloud_file.url() + data["url"] = cloud_file.url() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) return data @@ -249,37 +263,37 @@ def get(self, request, project_id=None, user_id=None): if project_id is not None: item = FinalProject.objects.filter(id=project_id, user__id=user_id).first() if item is None: - raise ValidationException('Project not found', code=404, slug='project-not-found') + raise ValidationException("Project not found", code=404, slug="project-not-found") serializer = FinalProjectGETSerializer(item, many=False) return Response(serializer.data) items = FinalProject.objects.filter(members__id=user_id) - project_status = request.GET.get('project_status', None) + project_status = request.GET.get("project_status", None) if project_status is not None: - items = items.filter(project_status__in=project_status.split(',')) + items = items.filter(project_status__in=project_status.split(",")) - members = request.GET.get('members', None) + members = request.GET.get("members", None) if members is not None and isinstance(members, list): items = items.filter(members__id__in=members) - revision_status = request.GET.get('revision_status', None) + revision_status = request.GET.get("revision_status", None) if revision_status is not None: - items = items.filter(revision_status__in=revision_status.split(',')) + items = items.filter(revision_status__in=revision_status.split(",")) - visibility_status = request.GET.get('visibility_status', None) + visibility_status = request.GET.get("visibility_status", None) if visibility_status is not None: - items = items.filter(visibility_status__in=visibility_status.split(',')) + items = items.filter(visibility_status__in=visibility_status.split(",")) else: - items = items.filter(visibility_status='PUBLIC') + items = items.filter(visibility_status="PUBLIC") - cohort = request.GET.get('cohort', None) + cohort = request.GET.get("cohort", None) if cohort is not None: - if cohort == 'null': + if cohort == "null": items = items.filter(cohort__isnull=True) else: - cohorts = cohort.split(',') + cohorts = cohort.split(",") ids = [x for x in cohorts if x.isnumeric()] slugs = [x for x in cohorts if not x.isnumeric()] items = items.filter(Q(cohort__slug__in=slugs) | Q(cohort__id__in=ids)) @@ -297,16 +311,13 @@ def post(self, request, user_id=None): if isinstance(request.data, list) == False: payload = [request.data] - members_set = set(payload[0]['members']) + members_set = set(payload[0]["members"]) members_set.add(user_id) - payload[0]['members'] = list(members_set) - - serializer = PostFinalProjectSerializer(data=payload, - context={ - 'request': request, - 'user_id': user_id - }, - many=True) + payload[0]["members"] = list(members_set) + + serializer = PostFinalProjectSerializer( + data=payload, context={"request": request, "user_id": user_id}, many=True + ) if serializer.is_valid(): serializer.save() # tasks.teacher_task_notification.delay(serializer.data['id']) @@ -319,31 +330,37 @@ def update(_req, data, _id=None, only_validate=True): lang = get_user_language(request) if _id is None: - raise ValidationException('Missing project id to update', slug='missing-project-id') + raise ValidationException("Missing project id to update", slug="missing-project-id") item = FinalProject.objects.filter(id=_id).first() if item is None: - raise ValidationException('Final Project not found', slug='project-not-found') + raise ValidationException("Final Project not found", slug="project-not-found") - if 'cohort' not in data: + if "cohort" not in data: raise ValidationException( - translation(lang, - en='Final project cohort missing', - es='Falta la cohorte del proyecto final', - slug='cohort-missing')) - project_cohort = Cohort.objects.filter(id=data['cohort']).first() - staff = ProfileAcademy.objects.filter(~Q(role__slug='student'), - academy__id=project_cohort.academy.id, - user__id=request.user.id).first() + translation( + lang, + en="Final project cohort missing", + es="Falta la cohorte del proyecto final", + slug="cohort-missing", + ) + ) + project_cohort = Cohort.objects.filter(id=data["cohort"]).first() + staff = ProfileAcademy.objects.filter( + ~Q(role__slug="student"), academy__id=project_cohort.academy.id, user__id=request.user.id + ).first() if not item.members.filter(id=request.user.id).exists() and staff is None: raise ValidationException( - translation(lang, - en='You are not a member of this project', - es='No eres miembro de este proyecto', - slug='not-a-member')) + translation( + lang, + en="You are not a member of this project", + es="No eres miembro de este proyecto", + slug="not-a-member", + ) + ) - serializer = PUTFinalProjectSerializer(item, data=data, context={'request': _req}) + serializer = PUTFinalProjectSerializer(item, data=data, context={"request": _req}) if serializer.is_valid(): if not only_validate: serializer.save() @@ -358,19 +375,20 @@ def update(_req, data, _id=None, only_validate=True): if isinstance(request.data, list) == False: raise ValidationException( - 'You are trying to update many project at once but you didn\'t provide a list on the payload', - slug='update-without-list') + "You are trying to update many project at once but you didn't provide a list on the payload", + slug="update-without-list", + ) for item in request.data: - if 'id' not in item: - item['id'] = None - code, data = update(request, item, item['id'], only_validate=True) + if "id" not in item: + item["id"] = None + code, data = update(request, item, item["id"], only_validate=True) if code != status.HTTP_200_OK: return Response(data, status=code) updated_projects = [] for item in request.data: - code, data = update(request, item, item['id'], only_validate=False) + code, data = update(request, item, item["id"], only_validate=False) if code == status.HTTP_200_OK: updated_projects.append(data) @@ -379,45 +397,40 @@ def update(_req, data, _id=None, only_validate=True): class FinalProjectCohortView(APIView): - @capable_of('read_assignment') + @capable_of("read_assignment") def get(self, request, academy_id, cohort_id): lang = get_user_language(request) cohort = Cohort.objects.filter(id=cohort_id).first() if cohort is None: - raise ValidationException(translation(lang, - en='Cohort not found', - es='Cohorte no encontrada', - slug='cohort-not-found'), - code=404) + raise ValidationException( + translation(lang, en="Cohort not found", es="Cohorte no encontrada", slug="cohort-not-found"), code=404 + ) items = FinalProject.objects.filter(cohort__id=cohort.id) serializer = FinalProjectGETSerializer(items, many=True) return Response(serializer.data) - @capable_of('crud_assignment') + @capable_of("crud_assignment") def put(self, request, academy_id, cohort_id, final_project_id): lang = get_user_language(request) cohort = Cohort.objects.filter(id=cohort_id).first() if cohort is None: - raise ValidationException(translation(lang, - en='Cohort not found', - es='Cohorte no encontrada', - slug='cohort-not-found'), - code=404) + raise ValidationException( + translation(lang, en="Cohort not found", es="Cohorte no encontrada", slug="cohort-not-found"), code=404 + ) item = FinalProject.objects.filter(id=final_project_id).first() if item is None: - raise ValidationException(translation(lang, - en='Project not found', - es='Proyecto no encontrado', - slug='project-not-found'), - code=404) + raise ValidationException( + translation(lang, en="Project not found", es="Proyecto no encontrado", slug="project-not-found"), + code=404, + ) - serializer = PUTFinalProjectSerializer(item, data=request.data, context={'request': request}) + serializer = PUTFinalProjectSerializer(item, data=request.data, context={"request": request}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -425,9 +438,9 @@ def put(self, request, academy_id, cohort_id, final_project_id): class CohortTaskView(APIView, GenerateLookupsMixin): - extensions = APIViewExtensions(cache=TaskCache, sort='-created_at', paginate=True) + extensions = APIViewExtensions(cache=TaskCache, sort="-created_at", paginate=True) - @capable_of('read_assignment') + @capable_of("read_assignment") def get(self, request, cohort_id, academy_id): handler = self.extensions(request) @@ -439,35 +452,35 @@ def get(self, request, cohort_id, academy_id): lookup = {} if isinstance(cohort_id, int) or cohort_id.isnumeric(): - lookup['cohort__id'] = cohort_id + lookup["cohort__id"] = cohort_id else: - lookup['cohort__slug'] = cohort_id + lookup["cohort__slug"] = cohort_id - task_type = request.GET.get('task_type', None) + task_type = request.GET.get("task_type", None) if task_type is not None: - lookup['task_type__in'] = task_type.split(',') + lookup["task_type__in"] = task_type.split(",") - task_status = request.GET.get('task_status', None) + task_status = request.GET.get("task_status", None) if task_status is not None: - lookup['task_status__in'] = task_status.split(',') + lookup["task_status__in"] = task_status.split(",") - revision_status = request.GET.get('revision_status', None) + revision_status = request.GET.get("revision_status", None) if revision_status is not None: - lookup['revision_status__in'] = revision_status.split(',') + lookup["revision_status__in"] = revision_status.split(",") - educational_status = request.GET.get('educational_status', None) + educational_status = request.GET.get("educational_status", None) if educational_status is not None: - lookup['user__cohortuser__educational_status__in'] = educational_status.split(',') + lookup["user__cohortuser__educational_status__in"] = educational_status.split(",") - like = request.GET.get('like', None) - if like is not None and like != 'undefined' and like != '': + like = request.GET.get("like", None) + if like is not None and like != "undefined" and like != "": items = items.filter(Q(associated_slug__icontains=like) | Q(title__icontains=like)) # tasks from users that belong to these cohort - student = request.GET.get('student', None) + student = request.GET.get("student", None) if student is not None: - lookup['user__cohortuser__user__id__in'] = student.split(',') - lookup['user__cohortuser__role'] = 'STUDENT' + lookup["user__cohortuser__user__id__in"] = student.split(",") + lookup["user__cohortuser__role"] = "STUDENT" if educational_status is not None or student is not None: items = items.distinct() @@ -481,12 +494,12 @@ def get(self, request, cohort_id, academy_id): class TaskMeAttachmentView(APIView): - @capable_of('read_assignment') + @capable_of("read_assignment") def get(self, request, task_id, academy_id): item = Task.objects.filter(id=task_id).first() if item is None: - raise ValidationException('Task not found', code=404, slug='task-not-found') + raise ValidationException("Task not found", code=404, slug="task-not-found") allowed = item.user.id == request.user.id if not allowed: @@ -495,7 +508,7 @@ def get(self, request, task_id, academy_id): if not allowed: raise PermissionDenied( - 'Attachments can only be reviewed by their authors or the academy staff with read_assignment capability' + "Attachments can only be reviewed by their authors or the academy staff with read_assignment capability" ) serializer = TaskAttachmentSerializer(item.attachments.all(), many=True) @@ -506,28 +519,28 @@ def upload(self, request, update=False, mime_allow=None): lang = get_user_language(request) - files = request.data.getlist('file') - names = request.data.getlist('name') + files = request.data.getlist("file") + names = request.data.getlist("name") result = { - 'data': [], - 'instance': [], + "data": [], + "instance": [], } - file = request.data.get('file') + file = request.data.get("file") slugs = [] if not file: - raise ValidationException('Missing file in request', code=400) + raise ValidationException("Missing file in request", code=400) if not len(files): - raise ValidationException('empty files in request') + raise ValidationException("empty files in request") if not len(names): for file in files: names.append(file.name) elif len(files) != len(names): - raise ValidationException('numbers of files and names not match') + raise ValidationException("numbers of files and names not match") if mime_allow is None: mime_allow = MIME_ALLOW @@ -545,31 +558,31 @@ def upload(self, request, update=False, mime_allow=None): name = names[index] if len(names) else file.name file_bytes = file.read() hash = hashlib.sha256(file_bytes).hexdigest() - slug = str(request.user.id) + '-' + slugify(name) + slug = str(request.user.id) + "-" + slugify(name) slug_number = UserAttachment.objects.filter(slug__startswith=slug).exclude(hash=hash).count() + 1 if slug_number > 1: while True: roman_number = num_to_roman(slug_number, lower=True) - slug = f'{slug}-{roman_number}' + slug = f"{slug}-{roman_number}" if not slug in slugs: break slug_number = slug_number + 1 slugs.append(slug) data = { - 'hash': hash, - 'slug': slug, - 'mime': file.content_type, - 'name': name, - 'categories': [], - 'user': request.user.id, + "hash": hash, + "slug": slug, + "mime": file.content_type, + "name": name, + "categories": [], + "user": request.user.id, } media = UserAttachment.objects.filter(hash=hash, user__id=request.user.id).first() if media: - data['id'] = media.id - data['url'] = media.url + data["id"] = media.id + data["url"] = media.url else: # upload file section @@ -577,32 +590,36 @@ def upload(self, request, update=False, mime_allow=None): storage = Storage() cloud_file = storage.file(USER_ASSIGNMENTS_BUCKET, hash) cloud_file.upload(file, content_type=file.content_type) - data['url'] = cloud_file.url() + data["url"] = cloud_file.url() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) - - result['data'].append(data) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) + + result["data"].append(data) from django.db.models import Q + query = None - datas_with_id = [x for x in result['data'] if 'id' in x] + datas_with_id = [x for x in result["data"] if "id" in x] for x in datas_with_id: if query: - query = query | Q(id=x['id']) + query = query | Q(id=x["id"]) else: - query = Q(id=x['id']) + query = Q(id=x["id"]) if query: - result['instance'] = UserAttachment.objects.filter(query) + result["instance"] = UserAttachment.objects.filter(query) return result @@ -610,14 +627,13 @@ def put(self, request, task_id): item = Task.objects.filter(id=task_id, user__id=request.user.id).first() if item is None: - raise ValidationException('Task not found', code=404, slug='task-not-found') + raise ValidationException("Task not found", code=404, slug="task-not-found") # TODO: mime types are not being validated on the backend upload = self.upload(request, update=True, mime_allow=None) - serializer = UserAttachmentSerializer(upload['instance'], - data=upload['data'], - context=upload['data'], - many=True) + serializer = UserAttachmentSerializer( + upload["instance"], data=upload["data"], context=upload["data"], many=True + ) if serializer.is_valid(): serializer.save() @@ -646,38 +662,38 @@ def get(self, request, task_id=None, user_id=None): if task_id is not None: item = Task.objects.filter(id=task_id, user__id=user_id).first() if item is None: - raise ValidationException('Task not found', code=404, slug='task-not-found') + raise ValidationException("Task not found", code=404, slug="task-not-found") serializer = TaskGETSerializer(item, many=False) return Response(serializer.data) items = Task.objects.filter(user__id=user_id) - task_type = request.GET.get('task_type', None) + task_type = request.GET.get("task_type", None) if task_type is not None: - items = items.filter(task_type__in=task_type.split(',')) + items = items.filter(task_type__in=task_type.split(",")) - task_status = request.GET.get('task_status', None) + task_status = request.GET.get("task_status", None) if task_status is not None: - items = items.filter(task_status__in=task_status.split(',')) + items = items.filter(task_status__in=task_status.split(",")) - revision_status = request.GET.get('revision_status', None) + revision_status = request.GET.get("revision_status", None) if revision_status is not None: - items = items.filter(revision_status__in=revision_status.split(',')) + items = items.filter(revision_status__in=revision_status.split(",")) - cohort = request.GET.get('cohort', None) + cohort = request.GET.get("cohort", None) if cohort is not None: - if cohort == 'null': + if cohort == "null": items = items.filter(cohort__isnull=True) else: - cohorts = cohort.split(',') + cohorts = cohort.split(",") ids = [x for x in cohorts if x.isnumeric()] slugs = [x for x in cohorts if not x.isnumeric()] items = items.filter(Q(cohort__slug__in=slugs) | Q(cohort__id__in=ids)) - a_slug = request.GET.get('associated_slug', None) + a_slug = request.GET.get("associated_slug", None) if a_slug is not None: - items = items.filter(associated_slug__in=[p.lower() for p in a_slug.split(',')]) + items = items.filter(associated_slug__in=[p.lower() for p in a_slug.split(",")]) items = handler.queryset(items) @@ -688,13 +704,13 @@ def put(self, request, task_id=None): def update(_req, data, _id=None, only_validate=True): if _id is None: - raise ValidationException('Missing task id to update', slug='missing=task-id') + raise ValidationException("Missing task id to update", slug="missing=task-id") item = Task.objects.filter(id=_id).first() if item is None: - raise ValidationException('Task not found', slug='task-not-found', code=404) + raise ValidationException("Task not found", slug="task-not-found", code=404) - serializer = PUTTaskSerializer(item, data=data, context={'request': _req}) + serializer = PUTTaskSerializer(item, data=data, context={"request": _req}) if serializer.is_valid(): if not only_validate: serializer.save() @@ -711,19 +727,20 @@ def update(_req, data, _id=None, only_validate=True): if isinstance(request.data, list) == False: raise ValidationException( - 'You are trying to update many tasks at once but you didn\'t provide a list on the payload', - slug='update-whout-list') + "You are trying to update many tasks at once but you didn't provide a list on the payload", + slug="update-whout-list", + ) for item in request.data: - if 'id' not in item: - item['id'] = None - code, data = update(request, item, item['id'], only_validate=True) + if "id" not in item: + item["id"] = None + code, data = update(request, item, item["id"], only_validate=True) if code != status.HTTP_200_OK: return Response(data, status=code) updated_tasks = [] for item in request.data: - code, data = update(request, item, item['id'], only_validate=False) + code, data = update(request, item, item["id"], only_validate=False) if code == status.HTTP_200_OK: updated_tasks.append(data) @@ -740,14 +757,13 @@ def post(self, request, user_id=None): if isinstance(request.data, list) == False: payload = [request.data] - serializer = PostTaskSerializer(data=payload, context={'request': request, 'user_id': user_id}, many=True) + serializer = PostTaskSerializer(data=payload, context={"request": request, "user_id": user_id}, many=True) if serializer.is_valid(): tasks = serializer.save() # tasks.teacher_task_notification.delay(serializer.data['id']) - tasks_activity.add_activity.delay(request.user.id, - 'open_syllabus_module', - related_type='assignments.Task', - related_id=tasks[0].id) + tasks_activity.add_activity.delay( + request.user.id, "open_syllabus_module", related_type="assignments.Task", related_id=tasks[0].id + ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -757,19 +773,19 @@ def delete(self, request, task_id=None): if task_id is not None: item = Task.objects.filter(id=task_id).first() if item is None: - raise ValidationException('Task not found', code=404, slug='task-not-found') + raise ValidationException("Task not found", code=404, slug="task-not-found") if item.user.id != request.user.id: - raise ValidationException('Task not found for this user', code=400, slug='task-not-found-for-this-user') + raise ValidationException("Task not found for this user", code=400, slug="task-not-found-for-this-user") item.delete() else: # task_id is None: - ids = request.GET.get('id', '') - if ids == '': - raise ValidationException('Missing querystring propery id for bulk delete tasks', slug='missing-id') + ids = request.GET.get("id", "") + if ids == "": + raise ValidationException("Missing querystring propery id for bulk delete tasks", slug="missing-id") - ids_to_delete = [int(id.strip()) if id.strip().isnumeric() else id.strip() for id in ids.split(',')] + ids_to_delete = [int(id.strip()) if id.strip().isnumeric() else id.strip() for id in ids.split(",")] all = Task.objects.filter(id__in=ids_to_delete) do_not_belong = all.exclude(user__id=request.user.id) @@ -786,17 +802,21 @@ def delete(self, request, task_id=None): if do_not_belong: responses.append( - MultiStatusResponse('Task not found for this user', - code=400, - slug='task-not-found-for-this-user', - queryset=do_not_belong)) + MultiStatusResponse( + "Task not found for this user", + code=400, + slug="task-not-found-for-this-user", + queryset=do_not_belong, + ) + ) if ids_to_delete: responses.append( - MultiStatusResponse('Task not found', code=404, slug='task-not-found', queryset=ids_to_delete)) + MultiStatusResponse("Task not found", code=404, slug="task-not-found", queryset=ids_to_delete) + ) if do_not_belong or ids_to_delete: - response = response_207(responses, 'associated_slug') + response = response_207(responses, "associated_slug") belong.delete() return response @@ -807,12 +827,12 @@ def delete(self, request, task_id=None): class TaskMeDeliverView(APIView): - @capable_of('task_delivery_details') + @capable_of("task_delivery_details") def get(self, request, task_id, academy_id): item = Task.objects.filter(id=task_id).first() if item is None: - raise ValidationException('Task not found') + raise ValidationException("Task not found") serializer = TaskGETDeliverSerializer(item, many=False) return Response(serializer.data) @@ -820,79 +840,84 @@ def get(self, request, task_id, academy_id): def deliver_assignment_view(request, task_id, token): - if request.method == 'POST': + if request.method == "POST": _dict = request.POST.copy() form = DeliverAssigntmentForm(_dict) - if 'github_url' not in _dict or _dict['github_url'] == '': - messages.error(request, 'Github URL is required') - return render(request, 'form.html', {'form': form}) + if "github_url" not in _dict or _dict["github_url"] == "": + messages.error(request, "Github URL is required") + return render(request, "form.html", {"form": form}) - token = Token.objects.filter(key=_dict['token']).first() + token = Token.objects.filter(key=_dict["token"]).first() if token is None or token.expires_at < timezone.now(): messages.error(request, f'Invalid or expired deliver token {_dict["token"]}') - return render(request, 'form.html', {'form': form}) + return render(request, "form.html", {"form": form}) - task = Task.objects.filter(id=_dict['task_id']).first() + task = Task.objects.filter(id=_dict["task_id"]).first() if task is None: - messages.error(request, 'Invalid task id') - return render(request, 'form.html', {'form': form}) + messages.error(request, "Invalid task id") + return render(request, "form.html", {"form": form}) deliver_task( task=task, - github_url=_dict['github_url'], - live_url=_dict['live_url'], + github_url=_dict["github_url"], + live_url=_dict["live_url"], ) - if 'callback' in _dict and _dict['callback'] != '': - return HttpResponseRedirect(redirect_to=_dict['callback'] + '?msg=The task has been delivered') + if "callback" in _dict and _dict["callback"] != "": + return HttpResponseRedirect(redirect_to=_dict["callback"] + "?msg=The task has been delivered") else: obj = {} if task.cohort: - obj['COMPANY_INFO_EMAIL'] = task.cohort.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = task.cohort.academy.legal_name or task.cohort.academy.name - obj['COMPANY_LOGO'] = task.cohort.academy.logo_url - obj['COMPANY_NAME'] = task.cohort.academy.name + obj["COMPANY_INFO_EMAIL"] = task.cohort.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = task.cohort.academy.legal_name or task.cohort.academy.name + obj["COMPANY_LOGO"] = task.cohort.academy.logo_url + obj["COMPANY_NAME"] = task.cohort.academy.name - if 'heading' not in obj: - obj['heading'] = task.cohort.academy.name + if "heading" not in obj: + obj["heading"] = task.cohort.academy.name - return render(request, 'message.html', {'message': 'The task has been delivered', **obj}) + return render(request, "message.html", {"message": "The task has been delivered", **obj}) else: task = Task.objects.filter(id=task_id).first() if task is None: - return render(request, 'message.html', { - 'message': f'Invalid assignment id {str(task_id)}', - }) + return render( + request, + "message.html", + { + "message": f"Invalid assignment id {str(task_id)}", + }, + ) _dict = request.GET.copy() - _dict['callback'] = request.GET.get('callback', '') - _dict['token'] = token - _dict['task_name'] = task.title - _dict['task_id'] = task.id + _dict["callback"] = request.GET.get("callback", "") + _dict["token"] = token + _dict["task_name"] = task.title + _dict["task_id"] = task.id form = DeliverAssigntmentForm(_dict) data = {} if task.cohort: - data['COMPANY_INFO_EMAIL'] = task.cohort.academy.feedback_email - data['COMPANY_LEGAL_NAME'] = task.cohort.academy.legal_name or task.cohort.academy.name - data['COMPANY_LOGO'] = task.cohort.academy.logo_url - data['COMPANY_NAME'] = task.cohort.academy.name + data["COMPANY_INFO_EMAIL"] = task.cohort.academy.feedback_email + data["COMPANY_LEGAL_NAME"] = task.cohort.academy.legal_name or task.cohort.academy.name + data["COMPANY_LOGO"] = task.cohort.academy.logo_url + data["COMPANY_NAME"] = task.cohort.academy.name - if 'heading' not in data: - data['heading'] = task.cohort.academy.name + if "heading" not in data: + data["heading"] = task.cohort.academy.name return render( request, - 'form.html', + "form.html", { - 'form': form, + "form": form, # 'heading': 'Deliver project assignment', - 'intro': 'Please fill the following information to deliver your assignment', - 'btn_lable': 'Deliver Assignment', + "intro": "Please fill the following information to deliver your assignment", + "btn_lable": "Deliver Assignment", **data, - }) + }, + ) class SubtaskMeView(APIView): @@ -901,7 +926,7 @@ def get(self, request, task_id): item = Task.objects.filter(id=task_id, user__id=request.user.id).first() if item is None: - raise ValidationException('Task not found', code=404, slug='task-not-found') + raise ValidationException("Task not found", code=404, slug="task-not-found") return Response(item.subtasks) @@ -909,33 +934,35 @@ def put(self, request, task_id): item = Task.objects.filter(id=task_id, user__id=request.user.id).first() if item is None: - raise ValidationException('Task not found', code=404, slug='task-not-found') + raise ValidationException("Task not found", code=404, slug="task-not-found") if not isinstance(request.data, list): - raise ValidationException('Subtasks json must be an array of tasks', code=404, slug='json-as-array') + raise ValidationException("Subtasks json must be an array of tasks", code=404, slug="json-as-array") subtasks_ids = [] for t in request.data: - if not 'id' in t: - raise ValidationException('All substasks must have a unique id', - code=404, - slug='missing-subtask-unique-id') + if not "id" in t: + raise ValidationException( + "All substasks must have a unique id", code=404, slug="missing-subtask-unique-id" + ) else: try: - found = subtasks_ids.index(t['id']) - raise ValidationException(f'Duplicated subtask id {t["id"]} for the assignment on position {found}', - code=404, - slug='duplicated-subtask-unique-id') + found = subtasks_ids.index(t["id"]) + raise ValidationException( + f'Duplicated subtask id {t["id"]} for the assignment on position {found}', + code=404, + slug="duplicated-subtask-unique-id", + ) except Exception: - subtasks_ids.append(t['id']) + subtasks_ids.append(t["id"]) - if not 'status' in t: - raise ValidationException('All substasks must have a status', code=404, slug='missing-subtask-status') - elif t['status'] not in ['DONE', 'PENDING']: - raise ValidationException('Subtask status must be DONE or PENDING, received: ' + t['status']) + if not "status" in t: + raise ValidationException("All substasks must have a status", code=404, slug="missing-subtask-status") + elif t["status"] not in ["DONE", "PENDING"]: + raise ValidationException("Subtask status must be DONE or PENDING, received: " + t["status"]) - if not 'label' in t: - raise ValidationException('All substasks must have a label', code=404, slug='missing-task-label') + if not "label" in t: + raise ValidationException("All substasks must have a label", code=404, slug="missing-task-label") item.subtasks = request.data item.save() @@ -953,14 +980,14 @@ def get_user(self): def get_github_credentials(self): res = None - if hasattr(self.request.user, 'credentialsgithub'): + if hasattr(self.request.user, "credentialsgithub"): res = self.request.user.credentialsgithub return res @sync_to_async def has_github_credentials(self, user): - return hasattr(user, 'credentialsgithub') + return hasattr(user, "credentialsgithub") async def get(self, request, task_id=None): lang = await aget_user_language(request) @@ -970,27 +997,32 @@ async def get(self, request, task_id=None): user = await self.get_user() - if task_id and not (task := await Task.objects.filter(id=task_id, user__id=user.id).exclude(github_url=None - ).afirst()): - raise ValidationException('Task not found', code=404, slug='task-not-found') + if task_id and not ( + task := await Task.objects.filter(id=task_id, user__id=user.id).exclude(github_url=None).afirst() + ): + raise ValidationException("Task not found", code=404, slug="task-not-found") github_credentials = await self.get_github_credentials() if github_credentials is None: - raise ValidationException(translation(lang, - en='You need to connect your Github account first', - es='Necesitas conectar tu cuenta de Github primero', - slug='github-account-not-connected'), - code=400) + raise ValidationException( + translation( + lang, + en="You need to connect your Github account first", + es="Necesitas conectar tu cuenta de Github primero", + slug="github-account-not-connected", + ), + code=400, + ) if task_id and task and task.github_url: - params['repo'] = task.github_url + params["repo"] = task.github_url - params['github_username'] = github_credentials.username + params["github_username"] = github_credentials.username - async with Service('rigobot', user.id, proxy=True) as s: - return await s.get('/v1/finetuning/me/coderevision', params=params) + async with Service("rigobot", user.id, proxy=True) as s: + return await s.get("/v1/finetuning/me/coderevision", params=params) - @consume('add_code_review', consumer=code_revision_service) + @consume("add_code_review", consumer=code_revision_service) async def post(self, request, task_id): lang = await aget_user_language(request) params = {} @@ -1001,21 +1033,25 @@ async def post(self, request, task_id): item = await Task.objects.filter(id=task_id, user__id=user.id).afirst() if item is None: - raise ValidationException('Task not found', code=404, slug='task-not-found') + raise ValidationException("Task not found", code=404, slug="task-not-found") github_credentials = await self.get_github_credentials() if github_credentials is None: - raise ValidationException(translation(lang, - en='You need to connect your Github account first', - es='Necesitas conectar tu cuenta de Github primero', - slug='github-account-not-connected'), - code=400) + raise ValidationException( + translation( + lang, + en="You need to connect your Github account first", + es="Necesitas conectar tu cuenta de Github primero", + slug="github-account-not-connected", + ), + code=400, + ) - params['github_username'] = github_credentials.username - params['repo'] = item.github_url + params["github_username"] = github_credentials.username + params["repo"] = item.github_url - async with Service('rigobot', request.user.id, proxy=True) as s: - return await s.post('/v1/finetuning/coderevision/', data=request.data, params=params) + async with Service("rigobot", request.user.id, proxy=True) as s: + return await s.post("/v1/finetuning/coderevision/", data=request.data, params=params) class AcademyCodeRevisionView(APIView): @@ -1024,11 +1060,15 @@ class AcademyCodeRevisionView(APIView): def get_user(self): return self.request.user - @acapable_of('read_assignment') + @acapable_of("read_assignment") async def get(self, request, academy_id=None, task_id=None, coderevision_id=None): - if task_id and not (task := await Task.objects.filter( - id=task_id, cohort__academy__id=academy_id).exclude(github_url=None).prefetch_related('user').afirst()): - raise ValidationException('Task not found', code=404, slug='task-not-found') + if task_id and not ( + task := await Task.objects.filter(id=task_id, cohort__academy__id=academy_id) + .exclude(github_url=None) + .prefetch_related("user") + .afirst() + ): + raise ValidationException("Task not found", code=404, slug="task-not-found") user = await self.get_user() @@ -1037,21 +1077,24 @@ async def get(self, request, academy_id=None, task_id=None, coderevision_id=None params[key] = request.GET.get(key) if task_id and task and task.github_url: - params['repo'] = task.github_url + params["repo"] = task.github_url - url = '/v1/finetuning/coderevision' + url = "/v1/finetuning/coderevision" if coderevision_id is not None: - url = f'{url}/{coderevision_id}' + url = f"{url}/{coderevision_id}" - async with Service('rigobot', user.id, proxy=True) as s: + async with Service("rigobot", user.id, proxy=True) as s: return await s.get(url, params=params) - @acapable_of('crud_assignment') + @acapable_of("crud_assignment") async def post(self, request, academy_id, task_id=None): - if task_id and not (task := await Task.objects.filter( - id=task_id, cohort__academy__id=academy_id).select_related('user').afirst()): - raise ValidationException('Task not found', code=404, slug='task-not-found') + if task_id and not ( + task := await Task.objects.filter(id=task_id, cohort__academy__id=academy_id) + .select_related("user") + .afirst() + ): + raise ValidationException("Task not found", code=404, slug="task-not-found") user = await self.get_user() @@ -1060,40 +1103,40 @@ async def post(self, request, academy_id, task_id=None): params[key] = request.GET.get(key) if task_id and task and task.github_url: - params['repo'] = task.github_url + params["repo"] = task.github_url - async with Service('rigobot', user.id, proxy=True) as s: - return await s.post('/v1/finetuning/coderevision', data=request.data, params=params) + async with Service("rigobot", user.id, proxy=True) as s: + return await s.post("/v1/finetuning/coderevision", data=request.data, params=params) class AcademyCommitFileView(APIView): - @acapable_of('read_assignment') + @acapable_of("read_assignment") async def get(self, request, academy_id, task_id=None, commitfile_id=None): if task_id and not (task := await Task.objects.filter(id=task_id, cohort__academy__id=academy_id).afirst()): - raise ValidationException('Task not found', code=404, slug='task-not-found') + raise ValidationException("Task not found", code=404, slug="task-not-found") params = {} for key in request.GET.keys(): params[key] = request.GET.get(key) if task_id and task and task.github_url: - params['repo'] = task.github_url + params["repo"] = task.github_url - url = '/v1/finetuning/commitfile' + url = "/v1/finetuning/commitfile" if commitfile_id is not None: - url = f'{url}/{commitfile_id}' + url = f"{url}/{commitfile_id}" - async with Service('rigobot', proxy=True) as s: + async with Service("rigobot", proxy=True) as s: return await s.get(url, params=params) class MeCodeRevisionRateView(APIView): async def post(self, request, coderevision_id): - async with Service('rigobot', request.user.id, proxy=True) as s: - return await s.post(f'/v1/finetuning/rate/coderevision/{coderevision_id}', data=request.data) + async with Service("rigobot", request.user.id, proxy=True) as s: + return await s.post(f"/v1/finetuning/rate/coderevision/{coderevision_id}", data=request.data) class MeCommitFileView(APIView): @@ -1104,28 +1147,30 @@ def get(self, request, commitfile_id=None, task_id=None): for key in request.GET.keys(): params[key] = request.GET.get(key) - url = '/v1/finetuning/commitfile' + url = "/v1/finetuning/commitfile" task = None if commitfile_id is not None: - url = f'{url}/{commitfile_id}' + url = f"{url}/{commitfile_id}" elif not (task := Task.objects.filter(id=task_id, user__id=request.user.id).first()): - raise ValidationException(translation(lang, - en='Task not found', - es='Tarea no encontrada', - slug='task-not-found'), - code=404) - - elif not hasattr(task.user, 'credentialsgithub'): - raise ValidationException(translation(lang, - en='You need to connect your Github account first', - es='Necesitas conectar tu cuenta de Github primero', - slug='github-account-not-connected'), - code=400) + raise ValidationException( + translation(lang, en="Task not found", es="Tarea no encontrada", slug="task-not-found"), code=404 + ) + + elif not hasattr(task.user, "credentialsgithub"): + raise ValidationException( + translation( + lang, + en="You need to connect your Github account first", + es="Necesitas conectar tu cuenta de Github primero", + slug="github-account-not-connected", + ), + code=400, + ) else: - params['repo'] = task.github_url - params['watcher'] = task.user.credentialsgithub.username + params["repo"] = task.github_url + params["watcher"] = task.user.credentialsgithub.username - with Service('rigobot', request.user.id, proxy=True) as s: + with Service("rigobot", request.user.id, proxy=True) as s: return s.get(url, params=params, stream=True) diff --git a/breathecode/authenticate/actions.py b/breathecode/authenticate/actions.py index ac6ad3c73..20f7da6c0 100644 --- a/breathecode/authenticate/actions.py +++ b/breathecode/authenticate/actions.py @@ -37,8 +37,8 @@ def get_app_url(): - url = os.getenv('APP_URL', 'https://4geeks.com') - if url and url[-1] == '/': + url = os.getenv("APP_URL", "https://4geeks.com") + if url and url[-1] == "/": url = url[:-1] return url @@ -46,17 +46,17 @@ def get_app_url(): def get_github_scopes(user): - scopes = ['user'] + scopes = ["user"] belongs_to_academy = ProfileAcademy.objects.filter(user=user).exists() if belongs_to_academy: - scopes.append('repo') + scopes.append("repo") owns_github_organization = AcademyAuthSettings.objects.filter(github_owner=user).exists() if owns_github_organization: - scopes.append('admin:org') + scopes.append("admin:org") - return ' '.join(scopes) + return " ".join(scopes) def get_user(github_id=None, email=None): @@ -73,13 +73,13 @@ def create_user(github_id=None, email=None): return user -def delete_tokens(users=None, status='expired'): +def delete_tokens(users=None, status="expired"): now = timezone.now() tokens = Token.objects.all() if users is not None: tokens = tokens.filter(user__id__in=[users]) - if status == 'expired': + if status == "expired": tokens = Token.objects.filter(expires_at__lt=now) count = len(tokens) @@ -94,19 +94,22 @@ def reset_password(users=None, extra=None, academy=None): extra = {} if users is None or len(users) == 0: - raise Exception('Missing users') + raise Exception("Missing users") for user in users: - token, created = Token.get_or_create(user, token_type='temporal') + token, created = Token.get_or_create(user, token_type="temporal") # returns true or false if the email was send - return send_email_message('pick_password', - user.email, { - 'SUBJECT': 'You asked to reset your password at 4Geeks', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/password/{token}', - **extra, - }, - academy=academy) + return send_email_message( + "pick_password", + user.email, + { + "SUBJECT": "You asked to reset your password at 4Geeks", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/password/{token}", + **extra, + }, + academy=academy, + ) return True @@ -115,22 +118,25 @@ def resend_invite(token=None, email=None, first_name=None, extra=None, academy=N if extra is None: extra = {} - params = {'callback': 'https://admin.4geeks.com'} + params = {"callback": "https://admin.4geeks.com"} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL', '') + '/v1/auth/member/invite/' + str(token) + '?' + querystr - notify_actions.send_email_message('welcome_academy', - email, { - 'email': email, - 'subject': 'Invitation to join 4Geeks', - 'LINK': url, - 'FIST_NAME': first_name, - **extra, - }, - academy=academy) + url = os.getenv("API_URL", "") + "/v1/auth/member/invite/" + str(token) + "?" + querystr + notify_actions.send_email_message( + "welcome_academy", + email, + { + "email": email, + "subject": "Invitation to join 4Geeks", + "LINK": url, + "FIST_NAME": first_name, + **extra, + }, + academy=academy, + ) def server_id(): - key = DeviceId.objects.filter(name='server').values_list('key', flat=True).first() + key = DeviceId.objects.filter(name="server").values_list("key", flat=True).first() if key: return key @@ -140,13 +146,13 @@ def server_id(): n3 = str(randint(0, 100)) letters = string.ascii_lowercase - s1 = ''.join(random.choice(letters) for i in range(2)) - s2 = ''.join(random.choice(letters) for i in range(2)) - s3 = ''.join(random.choice(letters) for i in range(2)) + s1 = "".join(random.choice(letters) for i in range(2)) + s2 = "".join(random.choice(letters) for i in range(2)) + s3 = "".join(random.choice(letters) for i in range(2)) - key = f'{n1}{s1}.{n2}{s2}.{n3}{s3}' + key = f"{n1}{s1}.{n2}{s2}.{n3}{s3}" - device = DeviceId(name='server', key=key) + device = DeviceId(name="server", key=key) device.save() return key @@ -157,13 +163,13 @@ def generate_academy_token(academy_id, force=False): academy = Academy.objects.get(id=academy_id) academy_user = User.objects.filter(username=academy.slug).first() if academy_user is None: - academy_user = User(username=academy.slug, email=f'{academy.slug}@token.com') + academy_user = User(username=academy.slug, email=f"{academy.slug}@token.com") academy_user.save() - role = Role.objects.get(slug='academy_token') + role = Role.objects.get(slug="academy_token") # this profile is for tokens, that is why we need no email validation status=ACTIVE, role must be academy_token # and the email is empty - profile_academy = ProfileAcademy(user=academy_user, academy=academy, role=role, status='ACTIVE') + profile_academy = ProfileAcademy(user=academy_user, academy=academy, role=role, status="ACTIVE") profile_academy.save() if force: @@ -171,7 +177,7 @@ def generate_academy_token(academy_id, force=False): token = Token.objects.filter(user=academy_user).first() if token is None: - token = Token.objects.create(user=academy_user, token_type='permanent') + token = Token.objects.create(user=academy_user, token_type="permanent") token.save() return token @@ -181,13 +187,13 @@ def set_gitpod_user_expiration(gitpoduser_id): gitpod_user = GitpodUser.objects.filter(id=gitpoduser_id).first() if gitpod_user is None: - raise Exception(f'Invalid gitpod user id: {gitpoduser_id}') + raise Exception(f"Invalid gitpod user id: {gitpoduser_id}") # reset status, i don't want to override this value if already set in this function - gitpod_user.delete_status = '' + gitpod_user.delete_status = "" gitpod_user.target_cohort = None - logger.debug(f'Gitpod user: {gitpod_user.id}') + logger.debug(f"Gitpod user: {gitpod_user.id}") # If no user is connected, find the user on breathecode by searching the github credentials if gitpod_user.user is None: github_user = CredentialsGithub.objects.filter(username=gitpod_user.github_username).first() @@ -196,28 +202,42 @@ def set_gitpod_user_expiration(gitpoduser_id): if gitpod_user.user is not None: # find last cohort - cu = gitpod_user.user.cohortuser_set.filter(educational_status__in=['ACTIVE'], - cohort__never_ends=False, - cohort__stage__in=['PREWORK', 'STARTED', 'FINAL_PROJECT' - ]).order_by('-cohort__ending_date').first() + cu = ( + gitpod_user.user.cohortuser_set.filter( + educational_status__in=["ACTIVE"], + cohort__never_ends=False, + cohort__stage__in=["PREWORK", "STARTED", "FINAL_PROJECT"], + ) + .order_by("-cohort__ending_date") + .first() + ) if cu is not None: - gitpod_user.expires_at = cu.cohort.ending_date + datetime.timedelta( - days=14) if cu.cohort.ending_date is not None else None + gitpod_user.expires_at = ( + cu.cohort.ending_date + datetime.timedelta(days=14) if cu.cohort.ending_date is not None else None + ) gitpod_user.academy = cu.cohort.academy gitpod_user.target_cohort = cu.cohort - gitpod_user.delete_status = f'User will be deleted 14 days after cohort {cu.cohort.name} finishes on {cu.cohort.ending_date}' + gitpod_user.delete_status = ( + f"User will be deleted 14 days after cohort {cu.cohort.name} finishes on {cu.cohort.ending_date}" + ) else: # if no active academy was found, at least we can retreive the latest one to asociate the user to an academy - last_cohort = gitpod_user.user.cohortuser_set.filter( - cohort__never_ends=False).order_by('-cohort__ending_date').first() + last_cohort = ( + gitpod_user.user.cohortuser_set.filter(cohort__never_ends=False) + .order_by("-cohort__ending_date") + .first() + ) if last_cohort is not None: gitpod_user.academy = last_cohort.cohort.academy gitpod_user.target_cohort = last_cohort.cohort - gitpod_user.delete_status = 'It will be deleted soon because no active cohort was found, the last one it had active was ' + last_cohort.cohort.name + gitpod_user.delete_status = ( + "It will be deleted soon because no active cohort was found, the last one it had active was " + + last_cohort.cohort.name + ) - if (gitpod_user.user is None or gitpod_user.expires_at is None) and gitpod_user.delete_status == '': + if (gitpod_user.user is None or gitpod_user.expires_at is None) and gitpod_user.delete_status == "": gitpod_user.expires_at = timezone.now() + datetime.timedelta(days=3) - gitpod_user.delete_status = 'User will be deleted because no active cohort could be associated to it, please set a cohort if you want to avoid deletion' + gitpod_user.delete_status = "User will be deleted because no active cohort could be associated to it, please set a cohort if you want to avoid deletion" if gitpod_user.user is not None: conflict = GitpodUser.objects.filter(user=gitpod_user.user).first() @@ -244,11 +264,11 @@ def update_gitpod_users(html): position = 0 while len(findings) > 0: position += 1 - user = {'position': position} + user = {"position": position} match = findings.pop(0) - input_html = html[match.start():match.end()] + input_html = html[match.start() : match.end()] - matches = list(re.finditer(r'>Reactivate<', input_html)) + matches = list(re.finditer(r">Reactivate<", input_html)) if len(matches) > 0: all_inactive_users.append(user) continue @@ -256,35 +276,36 @@ def update_gitpod_users(html): matches = list(re.finditer(r'"assignee-([\w\-]+)"', input_html)) if len(matches) > 0: match = matches.pop(0) - user['assignee'] = match.group(1) + user["assignee"] = match.group(1) matches = list(re.finditer(r'github\.com\/([\w\-]+)"', input_html)) if len(matches) > 0: match = matches.pop(0) - user['github'] = match.group(1) + user["github"] = match.group(1) - logger.debug('Found active user ' + user['github']) + logger.debug("Found active user " + user["github"]) - if user['github'] == 'username' or user['github'] == '': + if user["github"] == "username" or user["github"] == "": continue - if user['github'] in all_usernames: + if user["github"] in all_usernames: raise ValidationException( f"Error: user '{user['github']}' seems to be duplicated on the incoming list from Gitpod", - slug='duplicated-user') + slug="duplicated-user", + ) - all_usernames.append(user['github']) + all_usernames.append(user["github"]) all_active_users.append(user) GitpodUser.objects.exclude(github_username__in=all_usernames).delete() for user in all_active_users: # create if not exists - gitpod_user = GitpodUser.objects.filter(github_username=user['github']).first() + gitpod_user = GitpodUser.objects.filter(github_username=user["github"]).first() if gitpod_user is None: - gitpod_user = GitpodUser(github_username=user['github'], - position_in_gitpod_team=user['position'], - assignee_id=user['assignee']) + gitpod_user = GitpodUser( + github_username=user["github"], position_in_gitpod_team=user["position"], assignee_id=user["assignee"] + ) gitpod_user.save() if set_gitpod_user_expiration(gitpod_user.id) is None: @@ -292,7 +313,7 @@ def update_gitpod_users(html): f'Gitpod user {user["github"]} could not be processed, maybe its duplicated or another user is incorrectly assigned to the Gitpod account' ) - return {'active': all_active_users, 'inactive': all_inactive_users} + return {"active": all_active_users, "inactive": all_inactive_users} def get_user_settings(user_id: int) -> UserSetting: @@ -308,13 +329,14 @@ def get_user_settings(user_id: int) -> UserSetting: # race condition settings, created = UserSetting.objects.get_or_create(user_id=user_id) - if created and (cohort_user := CohortUser.objects.filter(user__id=user_id).exclude(cohort__language='').first()): + if created and (cohort_user := CohortUser.objects.filter(user__id=user_id).exclude(cohort__language="").first()): created = False settings.lang = cohort_user.cohort.language settings.save() - if created and (lead := FormEntry.objects.filter(email=user_id, - browser_lang__isnull=False).exclude(browser_lang='').first()): + if created and ( + lead := FormEntry.objects.filter(email=user_id, browser_lang__isnull=False).exclude(browser_lang="").first() + ): try: settings.lang = lead.browser_lang settings.save() @@ -328,30 +350,33 @@ def get_user_settings(user_id: int) -> UserSetting: # settings.lang = contact.language # settings.save() - if created and (assessment := Assessment.objects.filter(author__id=user_id, - lang__isnull=False).exclude(lang='').first()): + if created and ( + assessment := Assessment.objects.filter(author__id=user_id, lang__isnull=False).exclude(lang="").first() + ): created = False settings.lang = assessment.lang settings.save() - if created and (answer := Answer.objects.filter(user__id=user_id, lang__isnull=False).exclude(lang='').first()): + if created and (answer := Answer.objects.filter(user__id=user_id, lang__isnull=False).exclude(lang="").first()): created = False settings.lang = answer.lang settings.save() - if created and (event := Event.objects.filter(author__id=user_id, lang__isnull=False).exclude(lang='').first()): + if created and (event := Event.objects.filter(author__id=user_id, lang__isnull=False).exclude(lang="").first()): created = False settings.lang = event.lang settings.save() - if created and (user_assessment := UserAssessment.objects.filter(owner__id=user_id, - lang__isnull=False).exclude(lang='').first()): + if created and ( + user_assessment := UserAssessment.objects.filter(owner__id=user_id, lang__isnull=False).exclude(lang="").first() + ): created = False settings.lang = user_assessment.lang settings.save() - if created and (question := Question.objects.filter(author__id=user_id, - lang__isnull=False).exclude(lang='').first()): + if created and ( + question := Question.objects.filter(author__id=user_id, lang__isnull=False).exclude(lang="").first() + ): created = False settings.lang = question.lang settings.save() @@ -365,14 +390,14 @@ def aget_user_settings(user_id: int) -> UserSetting: def get_user_language(request: WSGIRequest | AsyncRequest) -> str: - lang = request.META.get('HTTP_ACCEPT_LANGUAGE') + lang = request.META.get("HTTP_ACCEPT_LANGUAGE") if not lang and request.user.id: settings = get_user_settings(request.user.id) lang = settings.lang if not lang: - lang = 'en' + lang = "en" return lang @@ -386,9 +411,13 @@ def add_to_organization(cohort_id, user_id): cohort_user = CohortUser.objects.filter(cohort__id=cohort_id, user__id=user_id).first() if cohort_user is None: - raise ValidationException(translation(en=f'User {user_id} does not belong to cohort {cohort_id}', - es=f'El usuario {user_id} no pertenece a esta cohort {cohort_id}'), - slug='invalid-cohort-user') + raise ValidationException( + translation( + en=f"User {user_id} does not belong to cohort {cohort_id}", + es=f"El usuario {user_id} no pertenece a esta cohort {cohort_id}", + ), + slug="invalid-cohort-user", + ) academy = cohort_user.cohort.academy user = cohort_user.user @@ -396,21 +425,23 @@ def add_to_organization(cohort_id, user_id): try: github_user = GithubAcademyUser.objects.filter(user=user, academy=academy).first() if github_user is None: - github_user = GithubAcademyUser(academy=academy, - user=user, - storage_status='PENDING', - storage_action='ADD', - storage_synch_at=timezone.now()) + github_user = GithubAcademyUser( + academy=academy, + user=user, + storage_status="PENDING", + storage_action="ADD", + storage_synch_at=timezone.now(), + ) github_user.save() - if github_user.storage_status == 'SYNCHED' and github_user.storage_action == 'ADD': + if github_user.storage_status == "SYNCHED" and github_user.storage_action == "ADD": # user already added - github_user.log('User was already added') + github_user.log("User was already added") return True - github_user.storage_status = 'PENDING' - github_user.storage_action = 'ADD' - github_user.log(f'Scheduled to add to organization because in cohort={cohort_user.cohort.slug}') + github_user.storage_status = "PENDING" + github_user.storage_action = "ADD" + github_user.log(f"Scheduled to add to organization because in cohort={cohort_user.cohort.slug}") github_user.save() return True except Exception as e: @@ -421,43 +452,46 @@ def add_to_organization(cohort_id, user_id): def remove_from_organization(cohort_id, user_id, force=False): - logger.debug(f'Removing user {user_id} from organization') + logger.debug(f"Removing user {user_id} from organization") cohort_user = CohortUser.objects.filter(cohort__id=cohort_id, user__id=user_id).first() if cohort_user is None: - raise ValidationException(translation(en=f'User {user_id} does not belong to cohort {cohort_id}', - es=f'El usuario {user_id} no pertenece a esta cohort {cohort_id}'), - slug='invalid-cohort-user') + raise ValidationException( + translation( + en=f"User {user_id} does not belong to cohort {cohort_id}", + es=f"El usuario {user_id} no pertenece a esta cohort {cohort_id}", + ), + slug="invalid-cohort-user", + ) academy = cohort_user.cohort.academy user = cohort_user.user github_user = GithubAcademyUser.objects.filter(user=user, academy=academy).first() try: - active_cohorts_in_academy = CohortUser.objects.filter(user=user, - cohort__academy=academy, - cohort__never_ends=False, - educational_status='ACTIVE').first() + active_cohorts_in_academy = CohortUser.objects.filter( + user=user, cohort__academy=academy, cohort__never_ends=False, educational_status="ACTIVE" + ).first() if active_cohorts_in_academy is not None and not force: - raise ValidationException(translation( - en= - f'Cannot remove user={user.id} from organization because edu_status is ACTIVE in {active_cohorts_in_academy.cohort.slug}', - es= - f'No se pudo remover usuario id={user.id} de la organization su edu_status=ACTIVE en cohort={active_cohorts_in_academy.cohort.slug}' - ), - slug='still-active') + raise ValidationException( + translation( + en=f"Cannot remove user={user.id} from organization because edu_status is ACTIVE in {active_cohorts_in_academy.cohort.slug}", + es=f"No se pudo remover usuario id={user.id} de la organization su edu_status=ACTIVE en cohort={active_cohorts_in_academy.cohort.slug}", + ), + slug="still-active", + ) if github_user is None: - raise ValidationException(translation( - en= - f'Cannot remove user id={user.id} from organization because it was not found on its list of current members', - es= - f'No se pudo remover usuario id={user.id} de la organization porque no se encontro en su lista de miembros' - ), - slug='user-not-found-in-org') + raise ValidationException( + translation( + en=f"Cannot remove user id={user.id} from organization because it was not found on its list of current members", + es=f"No se pudo remover usuario id={user.id} de la organization porque no se encontro en su lista de miembros", + ), + slug="user-not-found-in-org", + ) - github_user.storage_status = 'PENDING' - github_user.storage_action = 'DELETE' + github_user.storage_status = "PENDING" + github_user.storage_action = "DELETE" github_user.log( - f'Scheduled to remove from organization because edu_status={cohort_user.educational_status} in cohort={cohort_user.cohort.slug}' + f"Scheduled to remove from organization because edu_status={cohort_user.educational_status} in cohort={cohort_user.cohort.slug}" ) github_user.save() return True @@ -476,10 +510,10 @@ def delete_from_github(github_user: GithubAcademyUser): gb = Github(org=settings.github_username, token=settings.github_owner.credentialsgithub.token) gb.delete_org_member(github_user.username) - github_user.log('Successfully deleted in github organization') + github_user.log("Successfully deleted in github organization") return True except Exception as e: - github_user.log('Error calling github API while deleting member from org: ' + str(e)) + github_user.log("Error calling github API while deleting member from org: " + str(e)) return False @@ -495,44 +529,49 @@ def sync_organization_members(academy_id, only_status=None): return False siblings = AcademyAuthSettings.objects.filter(github_username=settings.github_username) - without_sync_active = list(siblings.filter(github_is_sync=False).values_list('academy__slug', flat=True)) - academy_slugs = list(siblings.values_list('academy__slug', flat=True)) + without_sync_active = list(siblings.filter(github_is_sync=False).values_list("academy__slug", flat=True)) + academy_slugs = list(siblings.values_list("academy__slug", flat=True)) if len(without_sync_active) > 0: - raise ValidationException(translation( - en= - f"All organizations with the same username '{settings.github_username}' must activate with github synch before starting to sync members: {', '.join(without_sync_active)}", - es= - f"Todas las organizaciones con el mismo username '{settings.github_username}' deben tener github_synch activo para poder empezar la sincronizacion: {','.join(without_sync_active)}" - ), - slug='not-everyone-in-synch') + raise ValidationException( + translation( + en=f"All organizations with the same username '{settings.github_username}' must activate with github synch before starting to sync members: {', '.join(without_sync_active)}", + es=f"Todas las organizaciones con el mismo username '{settings.github_username}' deben tener github_synch activo para poder empezar la sincronizacion: {','.join(without_sync_active)}", + ), + slug="not-everyone-in-synch", + ) credentials = CredentialsGithub.objects.filter(user=settings.github_owner).first() if settings.github_owner is None or credentials is None: - raise ValidationException(translation( - en='Organization has no owner or it has no github credentials', - es='La organizacion no tiene dueño o no este tiene credenciales para github'), - slug='invalid-owner') + raise ValidationException( + translation( + en="Organization has no owner or it has no github credentials", + es="La organizacion no tiene dueño o no este tiene credenciales para github", + ), + slug="invalid-owner", + ) # retry errored users only from this academy being synched - GithubAcademyUser.objects.filter(academy=settings.academy, - storage_status='ERROR')\ - .update(storage_status='PENDING', storage_synch_at=None) + GithubAcademyUser.objects.filter(academy=settings.academy, storage_status="ERROR").update( + storage_status="PENDING", storage_synch_at=None + ) # users without github credentials are marked as error - no_github_credentials = GithubAcademyUser.objects.filter(academy=settings.academy, - user__credentialsgithub__isnull=True) - no_github_credentials.update(storage_status='ERROR', - storage_log=[GithubAcademyUser.create_log('This user needs connect to github')]) + no_github_credentials = GithubAcademyUser.objects.filter( + academy=settings.academy, user__credentialsgithub__isnull=True + ) + no_github_credentials.update( + storage_status="ERROR", storage_log=[GithubAcademyUser.create_log("This user needs connect to github")] + ) gb = Github(org=settings.github_username, token=settings.github_owner.credentialsgithub.token) try: members = gb.get_org_members() except Exception as e: - settings.add_error('Error fetching members from org: ' + str(e)) + settings.add_error("Error fetching members from org: " + str(e)) raise e - remaining_usernames = set([m['login'] for m in members]) + remaining_usernames = set([m["login"] for m in members]) # only from this academy because we want to duplicate the users on the other academies org_users = GithubAcademyUser.objects.filter(academy=settings.academy) @@ -543,43 +582,46 @@ def sync_organization_members(academy_id, only_status=None): for _member in org_users: github = CredentialsGithub.objects.filter(user=_member.user).first() - if _member.storage_status in ['PENDING'] and _member.storage_action in ['ADD', 'INVITE']: + if _member.storage_status in ["PENDING"] and _member.storage_action in ["ADD", "INVITE"]: if github.username in remaining_usernames: - _member.log('User was already added to github') - _member.storage_status = 'SYNCHED' + _member.log("User was already added to github") + _member.storage_status = "SYNCHED" # change action to ADD just in case it was INVITE (its a confirmation) - _member.storage_action = 'ADD' + _member.storage_action = "ADD" _member.storage_synch_at = now _member.save() else: teams = [] - if settings.github_default_team_ids != '': - teams = [int(id) for id in settings.github_default_team_ids.split(',')] + if settings.github_default_team_ids != "": + teams = [int(id) for id in settings.github_default_team_ids.split(",")] try: gb.invite_org_member(github.email, team_ids=teams) except Exception as e: - settings.add_error('Error inviting member ' + str(github.email) + ' to org: ' + str(e)) + settings.add_error("Error inviting member " + str(github.email) + " to org: " + str(e)) raise e - _member.storage_status = 'SYNCHED' - _member.log(f'Sent invitation to {github.email}') - _member.storage_action = 'INVITE' + _member.storage_status = "SYNCHED" + _member.log(f"Sent invitation to {github.email}") + _member.storage_action = "INVITE" _member.storage_synch_at = now _member.save() - if _member.storage_status in ['PENDING'] and _member.storage_action == 'DELETE': + if _member.storage_status in ["PENDING"] and _member.storage_action == "DELETE": if github.username not in remaining_usernames: - _member.log('User was already deleted from github') - _member.storage_status = 'SYNCHED' + _member.log("User was already deleted from github") + _member.storage_status = "SYNCHED" _member.storage_synch_at = now _member.save() else: # we should not delete if another academy from the same org wants to keep it - added_elsewhere = GithubAcademyUser.objects.filter( - Q(user=_member.user) - | Q(username=github.username)).filter(academy__slug__in=academy_slugs).exclude( - storage_action__in=['DELETE', 'IGNORE']).exclude(id=_member.id).first() + added_elsewhere = ( + GithubAcademyUser.objects.filter(Q(user=_member.user) | Q(username=github.username)) + .filter(academy__slug__in=academy_slugs) + .exclude(storage_action__in=["DELETE", "IGNORE"]) + .exclude(id=_member.id) + .first() + ) if added_elsewhere is None: try: logger.debug( @@ -587,18 +629,18 @@ def sync_organization_members(academy_id, only_status=None): ) gb.delete_org_member(github.username) except Exception as e: - settings.add_error('Error deleting member from org: ' + str(e)) + settings.add_error("Error deleting member from org: " + str(e)) raise e - _member.log('Successfully deleted in github organization') + _member.log("Successfully deleted in github organization") else: _member.log( f"User belongs to another academy '{added_elsewhere.academy.slug}', it will have to be marked as deleted there before it can be deleted from github organization" ) - _member.storage_status = 'SYNCHED' + _member.storage_status = "SYNCHED" _member.storage_synch_at = now _member.save() - github_username = github.username if github is not None else '' + github_username = github.username if github is not None else "" remaining_usernames = set([username for username in remaining_usernames if username != github_username]) # there are some users from github we could not find in THIS academy cohorts @@ -617,20 +659,23 @@ def sync_organization_members(academy_id, only_status=None): unknown_user = _query.first() if unknown_user is None: - unknown_user = GithubAcademyUser(academy=settings.academy, - user=_user, - username=u, - storage_status='UNKNOWN', - storage_action='IGNORE', - storage_synch_at=now) + unknown_user = GithubAcademyUser( + academy=settings.academy, + user=_user, + username=u, + storage_status="UNKNOWN", + storage_action="IGNORE", + storage_synch_at=now, + ) unknown_user.save() - unknown_user.storage_status = 'UNKNOWN' - unknown_user.storage_action = 'IGNORE' + unknown_user.storage_status = "UNKNOWN" + unknown_user.storage_action = "IGNORE" unknown_user.storage_synch_at = now unknown_user.log( "This user is coming from github, we don't know if its a student from your academy or if it should be added or deleted, keep it as IGNORED to avoid deletion", - reset=True) + reset=True, + ) unknown_user.save() return True @@ -638,9 +683,9 @@ def sync_organization_members(academy_id, only_status=None): def accept_invite(accepting_ids=None, user=None): if accepting_ids is not None: - invites = UserInvite.objects.filter(id__in=accepting_ids.split(','), email=user.email, status='PENDING') + invites = UserInvite.objects.filter(id__in=accepting_ids.split(","), email=user.email, status="PENDING") else: - invites = UserInvite.objects.filter(email=user.email, status='PENDING') + invites = UserInvite.objects.filter(email=user.email, status="PENDING") for invite in invites: if invite.academy is not None: @@ -649,40 +694,42 @@ def accept_invite(accepting_ids=None, user=None): if profile is None: role = invite.role if not role: - role = Role.objects.filter(slug='student').first() + role = Role.objects.filter(slug="student").first() # is better generate a role without capability that have a exception in this case if not role: - role = Role(slug='student', name='Student') + role = Role(slug="student", name="Student") role.save() - profile = ProfileAcademy(email=invite.email, - academy=invite.academy, - role=role, - first_name=user.first_name, - last_name=user.last_name) + profile = ProfileAcademy( + email=invite.email, + academy=invite.academy, + role=role, + first_name=user.first_name, + last_name=user.last_name, + ) profile.user = user - profile.status = 'ACTIVE' + profile.status = "ACTIVE" profile.save() if invite.cohort is not None: - role = 'student' - if invite.role is not None and invite.role.slug != 'student': + role = "student" + if invite.role is not None and invite.role.slug != "student": role = invite.role.slug.upper() cu = CohortUser.objects.filter(user=user, cohort=invite.cohort).first() - if cu is None and (role := role.upper()) in ['TEACHER', 'ASSISTANT', 'REVIEWER', 'STUDENT']: - cu = CohortUser(user=user, cohort=invite.cohort, role=role, educational_status='ACTIVE') + if cu is None and (role := role.upper()) in ["TEACHER", "ASSISTANT", "REVIEWER", "STUDENT"]: + cu = CohortUser(user=user, cohort=invite.cohort, role=role, educational_status="ACTIVE") cu.save() elif cu is None: - cu = CohortUser(user=user, cohort=invite.cohort, role='STUDENT', educational_status='ACTIVE') + cu = CohortUser(user=user, cohort=invite.cohort, role="STUDENT", educational_status="ACTIVE") cu.save() if user is not None and invite.user is None: invite.user = user - invite.status = 'ACCEPTED' + invite.status = "ACCEPTED" invite.save() @@ -730,33 +777,36 @@ def accept_invite(accepting_ids=None, user=None): JWT_LIFETIME = 10 -def accept_invite_action(data=None, token=None, lang='en'): +def accept_invite_action(data=None, token=None, lang="en"): from breathecode.payments import tasks as payments_tasks from breathecode.payments.models import Bag, Invoice, Plan if data is None: data = {} - password1 = data.get('password', None) - password2 = data.get('repeat_password', None) + password1 = data.get("password", None) + password2 = data.get("repeat_password", None) - invite = UserInvite.objects.filter(token=str(token), status='PENDING', email__isnull=False).first() + invite = UserInvite.objects.filter(token=str(token), status="PENDING", email__isnull=False).first() if invite is None: raise Exception( - translation(lang, - en='Invalid or expired invitation ' + str(token), - es='Invitación inválida o expirada ' + str(token))) + translation( + lang, + en="Invalid or expired invitation " + str(token), + es="Invitación inválida o expirada " + str(token), + ) + ) - first_name = data.get('first_name', None) - last_name = data.get('last_name', None) - if first_name is None or first_name == '' or last_name is None or last_name == '': - raise Exception(translation(lang, en='Invalid first or last name', es='Nombre o apellido inválido')) + first_name = data.get("first_name", None) + last_name = data.get("last_name", None) + if first_name is None or first_name == "" or last_name is None or last_name == "": + raise Exception(translation(lang, en="Invalid first or last name", es="Nombre o apellido inválido")) if password1 != password2: - raise Exception(translation(lang, en='Passwords don\'t match', es='Las contraseñas no coinciden')) + raise Exception(translation(lang, en="Passwords don't match", es="Las contraseñas no coinciden")) if not password1: - raise Exception(translation(lang, en='Password is empty', es='La contraseña está vacía')) + raise Exception(translation(lang, en="Password is empty", es="La contraseña está vacía")) user = User.objects.filter(email=invite.email).first() if user is None: @@ -770,34 +820,33 @@ def accept_invite_action(data=None, token=None, lang='en'): if profile is None: role = invite.role if not role: - role = Role.objects.filter(slug='student').first() + role = Role.objects.filter(slug="student").first() if not role: raise Exception( - translation(lang, - en='Unexpected error occurred with invite, please contact the ' - 'staff of 4geeks', - es='Ocurrió un error inesperado con la invitación, por favor ' - 'contacta al staff de 4geeks')) - - profile = ProfileAcademy(email=invite.email, - academy=invite.academy, - role=role, - first_name=first_name, - last_name=last_name) - - if invite.first_name is not None and invite.first_name != '': + translation( + lang, + en="Unexpected error occurred with invite, please contact the " "staff of 4geeks", + es="Ocurrió un error inesperado con la invitación, por favor " "contacta al staff de 4geeks", + ) + ) + + profile = ProfileAcademy( + email=invite.email, academy=invite.academy, role=role, first_name=first_name, last_name=last_name + ) + + if invite.first_name is not None and invite.first_name != "": profile.first_name = invite.first_name - if invite.last_name is not None and invite.last_name != '': + if invite.last_name is not None and invite.last_name != "": profile.last_name = invite.last_name profile.user = user - profile.status = 'ACTIVE' + profile.status = "ACTIVE" profile.save() if invite.cohort is not None: - role = 'student' - if invite.role is not None and invite.role.slug != 'student': + role = "student" + if invite.role is not None and invite.role.slug != "student": role = invite.role.slug.upper() cu = CohortUser.objects.filter(user=user, cohort=invite.cohort).first() @@ -807,15 +856,21 @@ def accept_invite_action(data=None, token=None, lang='en'): plan = Plan.objects.filter(cohort_set__cohorts=invite.cohort, invites=invite).first() - if plan and invite.user and invite.cohort.academy.main_currency and ( - invite.cohort.available_as_saas == True or - (invite.cohort.available_as_saas == None and invite.cohort.academy.available_as_saas == True)): + if ( + plan + and invite.user + and invite.cohort.academy.main_currency + and ( + invite.cohort.available_as_saas == True + or (invite.cohort.available_as_saas == None and invite.cohort.academy.available_as_saas == True) + ) + ): utc_now = timezone.now() bag = Bag() - bag.chosen_period = 'NO_SET' - bag.status = 'PAID' - bag.type = 'INVITED' + bag.chosen_period = "NO_SET" + bag.status = "PAID" + bag.type = "INVITED" bag.how_many_installments = 1 bag.academy = invite.cohort.academy bag.user = user @@ -830,18 +885,20 @@ def accept_invite_action(data=None, token=None, lang='en'): bag.plans.add(plan) bag.selected_cohorts.add(invite.cohort) - invoice = Invoice(amount=0, - paid_at=utc_now, - user=invite.user, - bag=bag, - academy=bag.academy, - status='FULFILLED', - currency=bag.academy.main_currency) + invoice = Invoice( + amount=0, + paid_at=utc_now, + user=invite.user, + bag=bag, + academy=bag.academy, + status="FULFILLED", + currency=bag.academy.main_currency, + ) invoice.save() payments_tasks.build_plan_financing.delay(bag.id, invoice.id, is_free=True) - invite.status = 'ACCEPTED' + invite.status = "ACCEPTED" invite.is_email_validated = True invite.save() diff --git a/breathecode/authenticate/admin.py b/breathecode/authenticate/admin.py index b5e447803..986c36d5a 100644 --- a/breathecode/authenticate/admin.py +++ b/breathecode/authenticate/admin.py @@ -48,101 +48,111 @@ logger = logging.getLogger(__name__) -@admin.display(description='Delete all tokens') +@admin.display(description="Delete all tokens") def clean_all_tokens(modeladmin, request, queryset): - user_ids = queryset.values_list('id', flat=True) - delete_tokens(users=user_ids, status='all') + user_ids = queryset.values_list("id", flat=True) + delete_tokens(users=user_ids, status="all") -@admin.display(description='Delete EXPIRED tokens') +@admin.display(description="Delete EXPIRED tokens") def clean_expired_tokens(modeladmin, request, queryset): - user_ids = queryset.values_list('id', flat=True) - delete_tokens(users=user_ids, status='expired') + user_ids = queryset.values_list("id", flat=True) + delete_tokens(users=user_ids, status="expired") -@admin.display(description='Send reset password link') +@admin.display(description="Send reset password link") def send_reset_password(modeladmin, request, queryset): reset_password(users=queryset) @admin.register(CredentialsGithub) class CredentialsGithubAdmin(admin.ModelAdmin): - list_display = ('github_id', 'user_id', 'email', 'token') - search_fields = ['user__first_name', 'user__last_name', 'user__email', 'email'] - raw_id_fields = ['user'] + list_display = ("github_id", "user_id", "email", "token") + search_fields = ["user__first_name", "user__last_name", "user__email", "email"] + raw_id_fields = ["user"] @admin.register(CredentialsGoogle) class CredentialsGoogleAdmin(admin.ModelAdmin): - list_display = ('user', 'token', 'expires_at') + list_display = ("user", "token", "expires_at") search_fields = [ - 'user__first_name', - 'user__last_name', - 'user__email', + "user__first_name", + "user__last_name", + "user__email", ] - raw_id_fields = ['user'] + raw_id_fields = ["user"] @admin.register(CredentialsSlack) class CredentialsSlackAdmin(admin.ModelAdmin): - list_display = ('user', 'app_id', 'bot_user_id', 'team_id', 'team_name') - search_fields = ['user__first_name', 'user__last_name', 'user__email'] - raw_id_fields = ['user'] + list_display = ("user", "app_id", "bot_user_id", "team_id", "team_name") + search_fields = ["user__first_name", "user__last_name", "user__email"] + raw_id_fields = ["user"] @admin.register(CredentialsFacebook) class CredentialsFacebookAdmin(admin.ModelAdmin): - list_display = ('facebook_id', 'user', 'email', 'academy', 'expires_at') + list_display = ("facebook_id", "user", "email", "academy", "expires_at") @admin.register(Token) class TokenAdmin(admin.ModelAdmin): - list_display = ('key', 'token_type', 'expires_at', 'user') - search_fields = ('user__email', 'user__first_name', 'user__last_name') - list_filter = ['token_type'] - raw_id_fields = ['user'] + list_display = ("key", "token_type", "expires_at", "user") + search_fields = ("user__email", "user__first_name", "user__last_name") + list_filter = ["token_type"] + raw_id_fields = ["user"] def get_readonly_fields(self, request, obj=None): - return ['key'] + return ["key"] def accept_selected_users_from_waiting_list(modeladmin, request, queryset: QuerySet[UserInvite]): - queryset = queryset.exclude(process_status='DONE').order_by('id') + queryset = queryset.exclude(process_status="DONE").order_by("id") for x in queryset: tasks.async_accept_user_from_waiting_list.delay(x.id) def accept_all_users_from_waiting_list(modeladmin, request, queryset: QuerySet[UserInvite]): - queryset = UserInvite.objects.all().exclude(process_status='DONE').order_by('id') + queryset = UserInvite.objects.all().exclude(process_status="DONE").order_by("id") for x in queryset: tasks.async_accept_user_from_waiting_list.delay(x.id) def validate_email(modeladmin, request, queryset: QuerySet[UserInvite]): for x in queryset: - email_status = marketing_actions.validate_email(x.email, 'en') - x.email_quality = email_status['score'] + email_status = marketing_actions.validate_email(x.email, "en") + x.email_quality = email_status["score"] x.email_status = email_status x.save() @admin.register(UserInvite) class UserInviteAdmin(admin.ModelAdmin): - search_fields = ['email', 'first_name', 'last_name', 'user__email'] - raw_id_fields = ['user', 'author', 'cohort'] - list_filter = ['academy', 'status', 'is_email_validated', 'process_status', 'role', 'country'] - list_display = ('email', 'is_email_validated', 'first_name', 'last_name', 'status', 'academy', 'token', - 'created_at', 'invite_url', 'country') + search_fields = ["email", "first_name", "last_name", "user__email"] + raw_id_fields = ["user", "author", "cohort"] + list_filter = ["academy", "status", "is_email_validated", "process_status", "role", "country"] + list_display = ( + "email", + "is_email_validated", + "first_name", + "last_name", + "status", + "academy", + "token", + "created_at", + "invite_url", + "country", + ) actions = [accept_selected_users_from_waiting_list, accept_all_users_from_waiting_list, validate_email] def invite_url(self, obj): - params = {'callback': 'https://4geeks.com'} + params = {"callback": "https://4geeks.com"} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + str(obj.token) + '?' + querystr + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(obj.token) + "?" + querystr return format_html(f"invite url") -@admin.display(description='Clear user password') +@admin.display(description="Clear user password") def clear_user_password(modeladmin, request, queryset): for u in queryset: u.set_unusable_password() @@ -151,13 +161,13 @@ def clear_user_password(modeladmin, request, queryset): @admin.register(UserProxy) class UserAdmin(UserAdmin): - list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff', 'github_login') + list_display = ("username", "email", "first_name", "last_name", "is_staff", "github_login") actions = [clean_all_tokens, clean_expired_tokens, send_reset_password, clear_user_password] def get_queryset(self, request): - self.github_callback = 'https://4geeks.com' - self.github_callback = str(base64.urlsafe_b64encode(self.github_callback.encode('utf-8')), 'utf-8') + self.github_callback = "https://4geeks.com" + self.github_callback = str(base64.urlsafe_b64encode(self.github_callback.encode("utf-8")), "utf-8") return super(UserAdmin, self).get_queryset(request) def github_login(self, obj): @@ -168,34 +178,34 @@ def github_login(self, obj): @admin.register(Role) class RoleAdmin(admin.ModelAdmin): - list_display = ('slug', 'name') - filter_horizontal = ('capabilities', ) + list_display = ("slug", "name") + filter_horizontal = ("capabilities",) @admin.register(Capability) class CapabilityAdmin(admin.ModelAdmin): - list_display = ('slug', 'description') + list_display = ("slug", "description") @admin.register(ProfileAcademy) class ProfileAcademyAdmin(admin.ModelAdmin): - list_display = ('user', 'stats', 'email', 'academy', 'role', 'created_at', 'slack', 'facebook') - search_fields = ['user__first_name', 'user__last_name', 'user__email'] - list_filter = ['academy__slug', 'status', 'role__slug'] - actions = change_field(['ACTIVE', 'INVITED'], name='status') - raw_id_fields = ['user'] + list_display = ("user", "stats", "email", "academy", "role", "created_at", "slack", "facebook") + search_fields = ["user__first_name", "user__last_name", "user__email"] + list_filter = ["academy__slug", "status", "role__slug"] + actions = change_field(["ACTIVE", "INVITED"], name="status") + raw_id_fields = ["user"] def get_queryset(self, request): - self.slack_callback = 'https://4geeks.com' - self.slack_callback = str(base64.urlsafe_b64encode(self.slack_callback.encode('utf-8')), 'utf-8') + self.slack_callback = "https://4geeks.com" + self.slack_callback = str(base64.urlsafe_b64encode(self.slack_callback.encode("utf-8")), "utf-8") return super(ProfileAcademyAdmin, self).get_queryset(request) def stats(self, obj): colors = { - 'ACTIVE': 'bg-success', - 'INVITED': 'bg-error', + "ACTIVE": "bg-success", + "INVITED": "bg-error", } return format_html( @@ -208,7 +218,7 @@ def slack(self, obj): f"slack login" ) else: - return 'Pending invite response' + return "Pending invite response" def facebook(self, obj): if obj.user is not None: @@ -216,34 +226,34 @@ def facebook(self, obj): f"facebook login" ) else: - return 'Pending invite response' + return "Pending invite response" @admin.register(Profile) class ProfileAdmin(admin.ModelAdmin): - list_display = ('user', 'phone', 'github_username', 'avatar_url') - search_fields = ['user__first_name', 'user__last_name', 'user__email'] - raw_id_fields = ['user'] + list_display = ("user", "phone", "github_username", "avatar_url") + search_fields = ["user__first_name", "user__last_name", "user__email"] + raw_id_fields = ["user"] # actions = [clean_all_tokens, clean_expired_tokens, send_reset_password] @admin.register(UserSetting) class UserSettingAdmin(admin.ModelAdmin): - list_display = ('user', 'lang', 'main_currency') - search_fields = ['user__first_name', 'user__last_name', 'user__email', 'user__id'] - raw_id_fields = ['user'] - list_filter = ('lang', 'main_currency') + list_display = ("user", "lang", "main_currency") + search_fields = ["user__first_name", "user__last_name", "user__email", "user__id"] + raw_id_fields = ["user"] + list_filter = ("lang", "main_currency") # actions = [clean_all_tokens, clean_expired_tokens, send_reset_password] -@admin.display(description='Generate academy token') +@admin.display(description="Generate academy token") def generate_token(modeladmin, request, queryset): academies = queryset.all() for a in academies: generate_academy_token(a.id) -@admin.display(description='RESET academy token') +@admin.display(description="RESET academy token") def reset_token(modeladmin, request, queryset): academies = queryset.all() for a in academies: @@ -252,7 +262,7 @@ def reset_token(modeladmin, request, queryset): @admin.register(AcademyProxy) class AcademyAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'token') + list_display = ("slug", "name", "token") actions = [generate_token, reset_token] def token(self, obj): @@ -261,8 +271,8 @@ def token(self, obj): @admin.register(DeviceId) class DeviceIdAdmin(admin.ModelAdmin): - list_display = ('name', 'key') - search_fields = ['name'] + list_display = ("name", "key") + search_fields = ["name"] def recalculate_expiration(modeladmin, request, queryset): @@ -271,12 +281,17 @@ def recalculate_expiration(modeladmin, request, queryset): for gpu in gp_users: gpu = set_gitpod_user_expiration(gpu.id) if gpu is None: - messages.add_message(request, messages.ERROR, - f'Error: Gitpod user {gpu.github_username} {gpu.assignee_id} could not be processed') + messages.add_message( + request, + messages.ERROR, + f"Error: Gitpod user {gpu.github_username} {gpu.assignee_id} could not be processed", + ) else: messages.add_message( - request, messages.INFO, - f'Success: Gitpod user {gpu.github_username} {gpu.assignee_id} was successfully processed') + request, + messages.INFO, + f"Success: Gitpod user {gpu.github_username} {gpu.assignee_id} was successfully processed", + ) def async_recalculate_expiration(modeladmin, request, queryset): @@ -290,36 +305,39 @@ def extend_expiration_2_weeks(modeladmin, request, queryset): gp_users = queryset.all() for gpu in gp_users: gpu.expires_at = gpu.expires_at + datetime.timedelta(days=17) - gpu.delete_status = gpu.delete_status + '. The expiration date was extend for 2 weeks days' + gpu.delete_status = gpu.delete_status + ". The expiration date was extend for 2 weeks days" gpu.save() - messages.add_message(request, messages.INFO, 'Success: Expiration was successfully extended') + messages.add_message(request, messages.INFO, "Success: Expiration was successfully extended") def extend_expiration_4_months(modeladmin, request, queryset): gp_users = queryset.all() for gpu in gp_users: gpu.expires_at = gpu.expires_at + datetime.timedelta(days=120) - gpu.delete_status = gpu.delete_status + '. The expiration date was extend for 4 months' + gpu.delete_status = gpu.delete_status + ". The expiration date was extend for 4 months" gpu.save() - messages.add_message(request, messages.INFO, 'Success: Expiration was successfully extended') + messages.add_message(request, messages.INFO, "Success: Expiration was successfully extended") def mark_as_expired(modeladmin, request, queryset): gp_users = queryset.all() for gpu in gp_users: gpu.expires_at = timezone.now() - gpu.delete_status = gpu.delete_status + '. The user was expired by force.' + gpu.delete_status = gpu.delete_status + ". The user was expired by force." gpu.save() - messages.add_message(request, messages.INFO, 'Success: Gitpod user was expired') + messages.add_message(request, messages.INFO, "Success: Gitpod user was expired") @admin.register(GitpodUser) class GitpodUserAdmin(admin.ModelAdmin): - list_display = ('github_username', 'expiration', 'user', 'assignee_id', 'expires_at') - search_fields = ['github_username', 'user__email', 'user__first_name', 'user__last_name', 'assignee_id'] + list_display = ("github_username", "expiration", "user", "assignee_id", "expires_at") + search_fields = ["github_username", "user__email", "user__first_name", "user__last_name", "assignee_id"] actions = [ - async_recalculate_expiration, recalculate_expiration, extend_expiration_2_weeks, extend_expiration_4_months, - mark_as_expired + async_recalculate_expiration, + recalculate_expiration, + extend_expiration_2_weeks, + extend_expiration_4_months, + mark_as_expired, ] def expiration(self, obj): @@ -331,22 +349,24 @@ def expiration(self, obj): return format_html("EXPIRED") elif now > (obj.expires_at + datetime.timedelta(days=3)): return format_html( - f"In {from_now(obj.expires_at, include_days=True)}") + f"In {from_now(obj.expires_at, include_days=True)}" + ) else: return format_html( - f"In {from_now(obj.expires_at, include_days=True)}") + f"In {from_now(obj.expires_at, include_days=True)}" + ) def mark_as_pending_delete(modeladmin, request, queryset): - queryset.all().update(storage_status='PENDING', storage_action='DELETE') + queryset.all().update(storage_status="PENDING", storage_action="DELETE") def mark_as_pending_add(modeladmin, request, queryset): - queryset.all().update(storage_status='PENDING', storage_action='ADD') + queryset.all().update(storage_status="PENDING", storage_action="ADD") def mark_as_ignore(modeladmin, request, queryset): - queryset.all().update(storage_status='SYNCHED', storage_action='IGNORE') + queryset.all().update(storage_status="SYNCHED", storage_action="IGNORE") def clear_storage_log(modeladmin, request, queryset): @@ -365,46 +385,59 @@ def look_for_github_credentials(modeladmin, request, queryset): class UsernameFilter(SimpleListFilter): - title = 'username_type' - parameter_name = 'username_type' + title = "username_type" + parameter_name = "username_type" def lookups(self, request, model_admin): - return [('NONE', 'Without username'), ('FULL', 'With Username')] + return [("NONE", "Without username"), ("FULL", "With Username")] def queryset(self, request, queryset): - if self.value() == 'NONE': - return queryset.filter(Q(username__isnull=True) | Q(username='')) - if self.value() == 'FULL': - return queryset.filter(username__isnull=False).exclude(username='') + if self.value() == "NONE": + return queryset.filter(Q(username__isnull=True) | Q(username="")) + if self.value() == "FULL": + return queryset.filter(username__isnull=False).exclude(username="") @admin.register(GithubAcademyUser) class GithubAcademyUserAdmin(admin.ModelAdmin): - list_display = ('id', 'academy', 'user', 'github', 'storage_status', 'storage_action', 'created_at', 'updated_at') - search_fields = ['username', 'user__email', 'user__first_name', 'user__last_name'] + list_display = ("id", "academy", "user", "github", "storage_status", "storage_action", "created_at", "updated_at") + search_fields = ["username", "user__email", "user__first_name", "user__last_name"] actions = [ - mark_as_pending_delete, mark_as_pending_add, mark_as_ignore, clear_storage_log, look_for_github_credentials + mark_as_pending_delete, + mark_as_pending_add, + mark_as_ignore, + clear_storage_log, + look_for_github_credentials, ] - list_filter = ('academy', 'storage_status', 'storage_action', UsernameFilter) - raw_id_fields = ['user'] + list_filter = ("academy", "storage_status", "storage_action", UsernameFilter) + raw_id_fields = ["user"] def github(self, obj): if obj.username is None: - return 'missing github connect' + return "missing github connect" else: return obj.username @admin.register(GithubAcademyUserLog) class GithubAcademyUserLogAdmin(admin.ModelAdmin): - list_display = ('academy_user', 'academy_name', 'storage_status', 'storage_action', 'created_at', 'valid_until', - 'updated_at') + list_display = ( + "academy_user", + "academy_name", + "storage_status", + "storage_action", + "created_at", + "valid_until", + "updated_at", + ) search_fields = [ - 'academy_user__username', 'academy_user__user__email', 'academy_user__user__first_name', - 'academy_user__user__last_name' + "academy_user__username", + "academy_user__user__email", + "academy_user__user__first_name", + "academy_user__user__last_name", ] # actions = [mark_as_deleted, mark_as_add, mark_as_ignore] - list_filter = ('academy_user__academy__name', 'storage_status', 'storage_action') + list_filter = ("academy_user__academy__name", "storage_status", "storage_action") def academy_name(self, obj): return obj.academy_user.academy.name @@ -421,8 +454,8 @@ def sync_github_members(modeladmin, request, queryset): try: sync_organization_members(s.academy.id) except Exception as e: - logger.error(f'Error while syncing organization members for {s.academy.name}: ' + str(e)) - messages.error(request, f'Error while syncing organization members for {s.academy.name}: ' + str(e)) + logger.error(f"Error while syncing organization members for {s.academy.name}: " + str(e)) + messages.error(request, f"Error while syncing organization members for {s.academy.name}: " + str(e)) def activate_github_sync(modeladmin, request, queryset): @@ -439,15 +472,15 @@ def clean_errors(modeladmin, request, queryset): @admin.register(AcademyAuthSettings) class AcademyAuthSettingsAdmin(admin.ModelAdmin): - list_display = ('academy', 'github_is_sync', 'github_errors', 'github_username', 'github_owner', 'authenticate') - search_fields = ['academy__slug', 'academy__name', 'github__username', 'academy__id'] + list_display = ("academy", "github_is_sync", "github_errors", "github_username", "github_owner", "authenticate") + search_fields = ["academy__slug", "academy__name", "github__username", "academy__id"] actions = (clean_errors, activate_github_sync, deactivate_github_sync, sync_github_members) - raw_id_fields = ['github_owner'] + raw_id_fields = ["github_owner"] def get_queryset(self, request): - self.github_callback = 'https://4geeks.com' - self.github_callback = str(base64.urlsafe_b64encode(self.github_callback.encode('utf-8')), 'utf-8') + self.github_callback = "https://4geeks.com" + self.github_callback = str(base64.urlsafe_b64encode(self.github_callback.encode("utf-8")), "utf-8") return super(AcademyAuthSettingsAdmin, self).get_queryset(request) def github_errors(self, obj): @@ -459,9 +492,9 @@ def github_errors(self, obj): def authenticate(self, obj): settings = AcademyAuthSettings.objects.get(id=obj.id) if settings.github_owner is None: - return format_html('no owner') + return format_html("no owner") - scopes = str(base64.urlsafe_b64encode(b'user repo admin:org'), 'utf-8') + scopes = str(base64.urlsafe_b64encode(b"user repo admin:org"), "utf-8") return format_html( f"connect owner" ) diff --git a/breathecode/authenticate/apps.py b/breathecode/authenticate/apps.py index 5d738ac97..29576ae1a 100644 --- a/breathecode/authenticate/apps.py +++ b/breathecode/authenticate/apps.py @@ -2,7 +2,7 @@ class AcademyConfig(AppConfig): - name = 'breathecode.authenticate' + name = "breathecode.authenticate" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/authenticate/authentication.py b/breathecode/authenticate/authentication.py index 459724349..c5ca23d0b 100644 --- a/breathecode/authenticate/authentication.py +++ b/breathecode/authenticate/authentication.py @@ -6,7 +6,7 @@ from rest_framework.authentication import TokenAuthentication from rest_framework.exceptions import AuthenticationFailed -HTTP_HEADER_ENCODING = 'iso-8859-1' +HTTP_HEADER_ENCODING = "iso-8859-1" def get_authorization_header(request): @@ -15,7 +15,7 @@ def get_authorization_header(request): Hide some test client ickyness where the header can be unicode. """ - auth = request.META.get('HTTP_AUTHORIZATION', b'') + auth = request.META.get("HTTP_AUTHORIZATION", b"") if isinstance(auth, str): # Work around django test client oddness auth = auth.encode(HTTP_HEADER_ENCODING) @@ -33,19 +33,18 @@ class ExpiringTokenAuthentication(TokenAuthentication): def authenticate_credentials(self, key, request=None): from .models import Token - token = Token.objects.select_related('user').filter(key=key).first() + token = Token.objects.select_related("user").filter(key=key).first() if token is None: - raise AuthenticationFailed({'error': 'Invalid or Inactive Token', 'is_authenticated': False}) + raise AuthenticationFailed({"error": "Invalid or Inactive Token", "is_authenticated": False}) if not token.user.is_active: - raise AuthenticationFailed({'error': 'Invalid or inactive user', 'is_authenticated': False}) + raise AuthenticationFailed({"error": "Invalid or inactive user", "is_authenticated": False}) now = timezone.now() if token.expires_at is not None and token.expires_at < now: - raise AuthenticationFailed({ - 'error': 'Token expired at ' + str(token.expires_at), - 'is_authenticated': False - }) + raise AuthenticationFailed( + {"error": "Token expired at " + str(token.expires_at), "is_authenticated": False} + ) return token.user, token @@ -65,16 +64,16 @@ async def authenticate(self, request): return None if len(auth) == 1: - msg = _('Invalid token header. No credentials provided.') + msg = _("Invalid token header. No credentials provided.") raise AuthenticationFailed(msg) elif len(auth) > 2: - msg = _('Invalid token header. Token string should not contain spaces.') + msg = _("Invalid token header. Token string should not contain spaces.") raise AuthenticationFailed(msg) try: token = auth[1].decode() except UnicodeError: - msg = _('Invalid token header. Token string should not contain invalid characters.') + msg = _("Invalid token header. Token string should not contain invalid characters.") raise AuthenticationFailed(msg) return await sync_to_async(self.authenticate_credentials)(token) diff --git a/breathecode/authenticate/exceptions.py b/breathecode/authenticate/exceptions.py index d95db9e88..949c2c60a 100644 --- a/breathecode/authenticate/exceptions.py +++ b/breathecode/authenticate/exceptions.py @@ -9,7 +9,7 @@ def __init__(self, *args: Any): class TokenNotFound(Exception): - def __init__(self, error: str = 'Token not found', *args: Any): + def __init__(self, error: str = "Token not found", *args: Any): super().__init__(error, *args) diff --git a/breathecode/authenticate/forms.py b/breathecode/authenticate/forms.py index d2b949542..0762bfc4d 100644 --- a/breathecode/authenticate/forms.py +++ b/breathecode/authenticate/forms.py @@ -3,48 +3,56 @@ class SyncGithubUsersForm(forms.Form): - html = forms.CharField(widget=forms.Textarea(attrs={ - 'name': 'html', - 'rows': '5', - 'cols': '5', - 'class': 'form-control' - })) + html = forms.CharField( + widget=forms.Textarea(attrs={"name": "html", "rows": "5", "cols": "5", "class": "form-control"}) + ) class ResetPasswordForm(forms.Form): callback = forms.CharField(required=False, widget=forms.HiddenInput()) - email = forms.EmailField(widget=forms.EmailInput(attrs={ - 'label': 'email', - 'class': 'form-control', - }), ) + email = forms.EmailField( + widget=forms.EmailInput( + attrs={ + "label": "email", + "class": "form-control", + } + ), + ) def __init__(self, params, *args, **kwargs): super(forms.Form, self).__init__(params, *args, **kwargs) - self.fields['callback'].widget.attrs.update({'initial': params.get('callback')}) + self.fields["callback"].widget.attrs.update({"initial": params.get("callback")}) self.meta = { - 'heading': 'Forgot password', - 'intro': - 'Fill out your email and we will send a password reset link if we find your email in our database.', - 'btn_label': 'Get password reset link', + "heading": "Forgot password", + "intro": "Fill out your email and we will send a password reset link if we find your email in our database.", + "btn_label": "Get password reset link", } class LoginForm(forms.Form): url = forms.CharField(required=False, widget=forms.HiddenInput()) - email = forms.EmailField(widget=forms.EmailInput(attrs={ - 'label': 'email', - 'class': 'form-control', - }), ) - password = forms.CharField(min_length=8, - widget=forms.PasswordInput(attrs={ - 'type': 'password', - 'class': 'form-control', - })) + email = forms.EmailField( + widget=forms.EmailInput( + attrs={ + "label": "email", + "class": "form-control", + } + ), + ) + password = forms.CharField( + min_length=8, + widget=forms.PasswordInput( + attrs={ + "type": "password", + "class": "form-control", + } + ), + ) def __init__(self, params, *args, **kwargs): super(forms.Form, self).__init__(params, *args, **kwargs) - self.fields['url'].widget.attrs.update({'initial': params.get('url')}) + self.fields["url"].widget.attrs.update({"initial": params.get("url")}) class PickPasswordForm(forms.Form): @@ -52,28 +60,34 @@ class PickPasswordForm(forms.Form): callback = forms.CharField(required=False, widget=forms.HiddenInput()) password1 = forms.CharField( min_length=8, - label='New password', - widget=forms.PasswordInput(attrs={ - 'type': 'password', - 'class': 'form-control', - }), + label="New password", + widget=forms.PasswordInput( + attrs={ + "type": "password", + "class": "form-control", + } + ), + ) + password2 = forms.CharField( + min_length=8, + label="Repeat your new password", + widget=forms.PasswordInput( + attrs={ + "type": "password", + "class": "form-control", + } + ), ) - password2 = forms.CharField(min_length=8, - label='Repeat your new password', - widget=forms.PasswordInput(attrs={ - 'type': 'password', - 'class': 'form-control', - })) def __init__(self, params, *args, **kwargs): super(forms.Form, self).__init__(params, *args, **kwargs) - self.fields['token'].widget.attrs.update({'initial': params.get('token')}) - self.fields['callback'].widget.attrs.update({'initial': params.get('callback')}) + self.fields["token"].widget.attrs.update({"initial": params.get("token")}) + self.fields["callback"].widget.attrs.update({"initial": params.get("callback")}) self.meta = { - 'heading': 'Choose a password for your account', - 'intro': 'Fill out both of the password fields with the same value', - 'btn_label': 'Set my password', + "heading": "Choose a password for your account", + "intro": "Fill out both of the password fields with the same value", + "btn_label": "Set my password", } @@ -82,54 +96,66 @@ class InviteForm(forms.Form): callback = forms.CharField(required=False, widget=forms.HiddenInput()) first_name = forms.CharField( min_length=2, - widget=forms.TextInput(attrs={ - 'type': 'text', - 'label': 'first_name', - 'class': 'form-control', - }), + widget=forms.TextInput( + attrs={ + "type": "text", + "label": "first_name", + "class": "form-control", + } + ), ) last_name = forms.CharField( min_length=2, - widget=forms.TextInput(attrs={ - 'type': 'text', - 'label': 'last_name', - 'class': 'form-control', - }), + widget=forms.TextInput( + attrs={ + "type": "text", + "label": "last_name", + "class": "form-control", + } + ), ) phone = forms.CharField( min_length=8, - widget=forms.TextInput(attrs={ - 'type': 'text', - 'label': 'phone', - 'class': 'form-control', - }), + widget=forms.TextInput( + attrs={ + "type": "text", + "label": "phone", + "class": "form-control", + } + ), ) password = forms.CharField( min_length=8, - widget=forms.PasswordInput(attrs={ - 'type': 'password', - 'label': 'hello', - 'class': 'form-control', - }), + widget=forms.PasswordInput( + attrs={ + "type": "password", + "label": "hello", + "class": "form-control", + } + ), + ) + repeat_password = forms.CharField( + min_length=8, + widget=forms.PasswordInput( + attrs={ + "type": "password", + "class": "form-control", + } + ), ) - repeat_password = forms.CharField(min_length=8, - widget=forms.PasswordInput(attrs={ - 'type': 'password', - 'class': 'form-control', - })) def __init__(self, params, *args, **kwargs): super(forms.Form, self).__init__(params, *args, **kwargs) - token = params['token'] - if len(params['token']) > 0: - token = params['token'][0] - callback = params['callback'] - if len(params['callback']) > 0: - callback = params['callback'][0] + token = params["token"] + if len(params["token"]) > 0: + token = params["token"][0] + callback = params["callback"] + if len(params["callback"]) > 0: + callback = params["callback"][0] - self.fields['token'].widget.attrs.update({'initial': token}) - self.fields['callback'].widget.attrs.update({'initial': callback}) + self.fields["token"].widget.attrs.update({"initial": token}) + self.fields["callback"].widget.attrs.update({"initial": callback}) class PasswordChangeCustomForm(PasswordChangeForm): @@ -137,4 +163,4 @@ class PasswordChangeCustomForm(PasswordChangeForm): def __init__(self, user, *args, **kwargs): super(PasswordChangeCustomForm, self).__init__(user, *args, **kwargs) for field in self.fields: - self.fields[field].widget.attrs['class'] = 'form-control' + self.fields[field].widget.attrs["class"] = "form-control" diff --git a/breathecode/authenticate/management/commands/clean_expired_tokens.py b/breathecode/authenticate/management/commands/clean_expired_tokens.py index e6e42ae3e..09be17920 100644 --- a/breathecode/authenticate/management/commands/clean_expired_tokens.py +++ b/breathecode/authenticate/management/commands/clean_expired_tokens.py @@ -4,8 +4,8 @@ class Command(BaseCommand): - help = 'Delete expired temporal and login tokens' + help = "Delete expired temporal and login tokens" def handle(self, *args, **options): count = delete_tokens() - print(f'{count} tokens were deleted') + print(f"{count} tokens were deleted") diff --git a/breathecode/authenticate/management/commands/confirm_no_saas_emails.py b/breathecode/authenticate/management/commands/confirm_no_saas_emails.py index 15356a1cb..67f64ad25 100644 --- a/breathecode/authenticate/management/commands/confirm_no_saas_emails.py +++ b/breathecode/authenticate/management/commands/confirm_no_saas_emails.py @@ -4,14 +4,14 @@ class Command(BaseCommand): - help = 'Confirm all the emails that are not from a saas academy' + help = "Confirm all the emails that are not from a saas academy" def handle(self, *args, **options): - invites = UserInvite.objects.filter(Q(academy__available_as_saas=False) - | Q(cohort__academy__available_as_saas=False), - is_email_validated=False) + invites = UserInvite.objects.filter( + Q(academy__available_as_saas=False) | Q(cohort__academy__available_as_saas=False), is_email_validated=False + ) n = invites.count() invites.update(is_email_validated=True) - self.stdout.write(self.style.SUCCESS(f'Successfully confirmed {n} invites')) + self.stdout.write(self.style.SUCCESS(f"Successfully confirmed {n} invites")) diff --git a/breathecode/authenticate/management/commands/create_academy_roles.py b/breathecode/authenticate/management/commands/create_academy_roles.py index 509a4f802..5472d186b 100644 --- a/breathecode/authenticate/management/commands/create_academy_roles.py +++ b/breathecode/authenticate/management/commands/create_academy_roles.py @@ -3,576 +3,302 @@ from ...models import Capability, Role CAPABILITIES = [ - { - 'slug': 'read_my_academy', - 'description': 'Read your academy information' - }, - { - 'slug': 'crud_my_academy', - 'description': 'Read, or update your academy information (very high level, almost the academy admin)' - }, - { - 'slug': 'crud_member', - 'description': 'Create, update or delete academy members (very high level, almost the academy admin)' - }, - { - 'slug': 'read_member', - 'description': 'Read academy staff member information' - }, - { - 'slug': 'crud_student', - 'description': 'Create, update or delete students' - }, - { - 'slug': 'read_student', - 'description': 'Read student information' - }, - { - 'slug': 'read_invite', - 'description': 'Read invites from users' - }, - { - 'slug': 'crud_invite', - 'description': 'Create, update or delete invites from users' - }, - { - 'slug': 'invite_resend', - 'description': 'Resent invites for user academies' - }, - { - 'slug': 'read_assignment', - 'description': 'Read assignment information' - }, - { - 'slug': 'read_assignment_sensitive_details', - 'description': - 'The mentor in residence is allowed to see aditional info about the task, like the "delivery url"' - }, - { - 'slug': 'read_shortlink', - 'description': 'Access the list of marketing shortlinks' - }, - { - 'slug': 'crud_shortlink', - 'description': 'Create, update and delete marketing short links' - }, - { - 'slug': 'crud_assignment', - 'description': 'Update assignments' - }, - { - 'slug': 'task_delivery_details', - 'description': 'Get delivery URL for a task, that url can be sent to students for delivery' - }, - { - 'slug': 'read_certificate', - 'description': 'List and read all academy certificates' - }, - { - 'slug': 'crud_certificate', - 'description': 'Create, update or delete student certificates' - }, - { - 'slug': 'read_layout', - 'description': 'Read layouts to generate new certificates' - }, - { - 'slug': 'read_syllabus', - 'description': 'List and read syllabus information' - }, - { - 'slug': 'crud_syllabus', - 'description': 'Create, update or delete syllabus versions' - }, - { - 'slug': 'read_organization', - 'description': 'Read academy organization details' - }, - { - 'slug': 'crud_organization', - 'description': 'Update, create or delete academy organization details' - }, - { - 'slug': 'read_event', - 'description': 'List and retrieve event information' - }, - { - 'slug': 'crud_event', - 'description': 'Create, update or delete event information' - }, - { - 'slug': 'read_event_type', - 'description': 'List and retrieve event type information' - }, - { - 'slug': 'crud_event_type', - 'description': 'Create, update or delete event type information' - }, - { - 'slug': 'read_all_cohort', - 'description': 'List all the cohorts or single cohort information' - }, - { - 'slug': 'read_single_cohort', - 'description': 'single cohort information related to a user' - }, - { - 'slug': 'crud_cohort', - 'description': 'Create, update or delete cohort info' - }, - { - 'slug': 'read_eventcheckin', - 'description': 'List and read all the event_checkins' - }, - { - 'slug': 'read_survey', - 'description': 'List all the nps answers' - }, - { - 'slug': 'crud_survey', - 'description': 'Create, update or delete surveys' - }, - { - 'slug': 'read_nps_answers', - 'description': 'List all the nps answers' - }, - { - 'slug': 'read_lead', - 'description': 'List all the leads' - }, - { - 'slug': 'read_won_lead', - 'description': 'List all the won leads' - }, - { - 'slug': 'crud_lead', - 'description': 'Create, update or delete academy leads' - }, - { - 'slug': 'read_review', - 'description': 'Read review for a particular academy' - }, - { - 'slug': 'crud_review', - 'description': 'Create, update or delete academy reviews' - }, - { - 'slug': 'read_media', - 'description': 'List all the medias' - }, - { - 'slug': 'crud_media', - 'description': 'Create, update or delete academy medias' - }, - { - 'slug': 'read_media_resolution', - 'description': 'List all the medias resolutions' - }, - { - 'slug': 'crud_media_resolution', - 'description': 'Create, update or delete academy media resolutions' - }, - { - 'slug': 'read_cohort_activity', - 'description': 'Read low level activity in a cohort (attendancy, etc.)' - }, - { - 'slug': 'generate_academy_token', - 'description': 'Create a new token only to be used by the academy' - }, - { - 'slug': 'get_academy_token', - 'description': 'Read the academy token' - }, - { - 'slug': 'send_reset_password', - 'description': 'Generate a temporal token and resend forgot password link' - }, - { - 'slug': 'read_activity', - 'description': 'List all the user activities' - }, - { - 'slug': 'crud_activity', - 'description': 'Create, update or delete a user activities' - }, - { - 'slug': 'read_assignment', - 'description': 'List all the assignments' - }, - { - 'slug': 'crud_assignment', - 'description': 'Create, update or delete a assignment' - }, - { - 'slug': 'classroom_activity', - 'description': 'To report student activities during the classroom or cohorts (Specially meant for teachers)' - }, - { - 'slug': 'academy_reporting', - 'description': 'Get detailed reports about the academy activity' - }, - { - 'slug': 'generate_temporal_token', - 'description': 'Generate a temporal token to reset github credential or forgot password' - }, - { - 'slug': 'read_mentorship_service', - 'description': 'Get all mentorship services from one academy' - }, - { - 'slug': 'crud_mentorship_service', - 'description': 'Create, delete or update all mentorship services from one academy' - }, - { - 'slug': 'read_mentorship_agent', - 'description': 'Get all mentorship agents from one academy' - }, - { - 'slug': 'read_mentorship_mentor', - 'description': 'Get all mentorship mentors from one academy' - }, - { - 'slug': 'crud_mentorship_mentor', - 'description': 'Create, delete or update all mentorship mentors from one academy' - }, - { - 'slug': 'read_mentorship_session', - 'description': 'Get all session from one academy' - }, - { - 'slug': 'crud_mentorship_session', - 'description': 'Create, delete or update all session from one academy' - }, - { - 'slug': 'crud_freelancer_bill', - 'description': 'Create, delete or update all freelancer bills from one academy' - }, - { - 'slug': 'read_freelancer_bill', - 'description': 'Read all all freelancer bills from one academy' - }, - { - 'slug': 'crud_mentorship_bill', - 'description': 'Create, delete or update all mentroship bills from one academy' - }, - { - 'slug': 'read_mentorship_bill', - 'description': 'Read all mentroship bills from one academy' - }, - { - 'slug': 'read_asset', - 'description': 'Read all academy registry assets' - }, - { - 'slug': 'crud_asset', - 'description': 'Update, create and delete registry assets' - }, - { - 'slug': 'read_content_variables', - 'description': 'Read all academy content variables used in the asset markdowns' - }, - { - 'slug': 'crud_content_variables', - 'description': 'Update, create and delete content variables used in the asset markdowns' - }, - { - 'slug': 'read_tag', - 'description': 'Read marketing tags and their details' - }, - { - 'slug': 'crud_tag', - 'description': 'Update, create and delete a marketing tag and its details' - }, - { - 'slug': 'get_gitpod_user', - 'description': 'List gitpod user the academy is consuming' - }, - { - 'slug': 'update_gitpod_user', - 'description': 'Update gitpod user expiration based on available information' - }, - { - 'slug': 'get_github_user', - 'description': 'List github user the academy is consuming' - }, - { - 'slug': 'update_github_user', - 'description': 'Update github user expiration based on available information' - }, - { - 'slug': 'sync_organization_users', - 'description': 'Calls for the github API and brings all org users, then tries to synch them' - }, - { - 'slug': 'read_technology', - 'description': 'Read asset technologies' - }, - { - 'slug': 'crud_technology', - 'description': 'Update, create and delete asset technologies' - }, - { - 'slug': 'read_keyword', - 'description': 'Read SEO keywords' - }, - { - 'slug': 'crud_keyword', - 'description': 'Update, create and delete SEO keywords' - }, - { - 'slug': 'read_keywordcluster', - 'description': 'Update, create and delete asset technologies' - }, - { - 'slug': 'crud_keywordcluster', - 'description': 'Update, create and delete asset technologies' - }, - { - 'slug': 'read_cohort_log', - 'description': 'Read the cohort logo that contains attendance and other info logged each day' - }, - { - 'slug': 'crud_cohort_log', - 'description': 'Update and delete things like the cohort attendance, teacher comments, etc' - }, - { - 'slug': 'read_category', - 'description': 'Read categories from the content registry' - }, - { - 'slug': 'crud_category', - 'description': 'Update and delete categories from the content registry' - }, - { - 'slug': 'read_project_invoice', - 'description': 'Read the financial status of a project and invoices' - }, - { - 'slug': 'crud_project_invoice', - 'description': 'Create, Update and delete project invoices' - }, - { - 'slug': 'read_freelance_projects', - 'description': 'Read project details without financials' - }, - { - 'slug': 'read_lead_gen_app', - 'description': 'Read lead generation apps' - }, - { - 'slug': 'chatbot_message', - 'description': 'Speak with a chatbot' - }, - { - 'slug': 'start_or_end_class', - 'description': 'start or end a class' - }, - { - 'slug': 'get_academy_auth_settings', - 'description': 'Settings related to authentication, for example the github auth integration' - }, - { - 'slug': 'crud_academy_auth_settings', - 'description': 'Settings related to authentication, for example the github auth integration' - }, - { - 'slug': 'start_or_end_event', - 'description': 'Start or end event' - }, - { - 'slug': 'read_provisioning_bill', - 'description': 'Read provisioning activities and bills' - }, - { - 'slug': 'crud_provisioning_activity', - 'description': 'Create, update or delete provisioning activities' - }, - { - 'slug': 'read_service', - 'description': 'Read service details' - }, - { - 'slug': 'read_academyservice', - 'description': 'Read Academy Service details' - }, - { - 'slug': 'crud_academyservice', - 'description': 'Crud Academy Service details' - }, - { - 'slug': 'crud_provisioning_bill', - 'description': 'Crud Provisioning Bills', - }, - { - 'slug': 'read_calendly_organization', - 'description': 'Access info about the calendly integration' - }, - { - 'slug': 'create_calendly_organization', - 'description': 'Add a new calendly integration' - }, - { - 'slug': 'reset_calendly_organization', - 'description': 'Reset the calendly token' - }, - { - 'slug': 'delete_calendly_organization', - 'description': 'Delete calendly integration' - }, - { - 'slug': 'crud_assessment', - 'description': 'Manage student quizzes and assessments' - }, - { - 'slug': 'read_user_assessment', - 'description': 'Read user assessment submissions' - }, + {"slug": "read_my_academy", "description": "Read your academy information"}, + { + "slug": "crud_my_academy", + "description": "Read, or update your academy information (very high level, almost the academy admin)", + }, + { + "slug": "crud_member", + "description": "Create, update or delete academy members (very high level, almost the academy admin)", + }, + {"slug": "read_member", "description": "Read academy staff member information"}, + {"slug": "crud_student", "description": "Create, update or delete students"}, + {"slug": "read_student", "description": "Read student information"}, + {"slug": "read_invite", "description": "Read invites from users"}, + {"slug": "crud_invite", "description": "Create, update or delete invites from users"}, + {"slug": "invite_resend", "description": "Resent invites for user academies"}, + {"slug": "read_assignment", "description": "Read assignment information"}, + { + "slug": "read_assignment_sensitive_details", + "description": 'The mentor in residence is allowed to see aditional info about the task, like the "delivery url"', + }, + {"slug": "read_shortlink", "description": "Access the list of marketing shortlinks"}, + {"slug": "crud_shortlink", "description": "Create, update and delete marketing short links"}, + {"slug": "crud_assignment", "description": "Update assignments"}, + { + "slug": "task_delivery_details", + "description": "Get delivery URL for a task, that url can be sent to students for delivery", + }, + {"slug": "read_certificate", "description": "List and read all academy certificates"}, + {"slug": "crud_certificate", "description": "Create, update or delete student certificates"}, + {"slug": "read_layout", "description": "Read layouts to generate new certificates"}, + {"slug": "read_syllabus", "description": "List and read syllabus information"}, + {"slug": "crud_syllabus", "description": "Create, update or delete syllabus versions"}, + {"slug": "read_organization", "description": "Read academy organization details"}, + {"slug": "crud_organization", "description": "Update, create or delete academy organization details"}, + {"slug": "read_event", "description": "List and retrieve event information"}, + {"slug": "crud_event", "description": "Create, update or delete event information"}, + {"slug": "read_event_type", "description": "List and retrieve event type information"}, + {"slug": "crud_event_type", "description": "Create, update or delete event type information"}, + {"slug": "read_all_cohort", "description": "List all the cohorts or single cohort information"}, + {"slug": "read_single_cohort", "description": "single cohort information related to a user"}, + {"slug": "crud_cohort", "description": "Create, update or delete cohort info"}, + {"slug": "read_eventcheckin", "description": "List and read all the event_checkins"}, + {"slug": "read_survey", "description": "List all the nps answers"}, + {"slug": "crud_survey", "description": "Create, update or delete surveys"}, + {"slug": "read_nps_answers", "description": "List all the nps answers"}, + {"slug": "read_lead", "description": "List all the leads"}, + {"slug": "read_won_lead", "description": "List all the won leads"}, + {"slug": "crud_lead", "description": "Create, update or delete academy leads"}, + {"slug": "read_review", "description": "Read review for a particular academy"}, + {"slug": "crud_review", "description": "Create, update or delete academy reviews"}, + {"slug": "read_media", "description": "List all the medias"}, + {"slug": "crud_media", "description": "Create, update or delete academy medias"}, + {"slug": "read_media_resolution", "description": "List all the medias resolutions"}, + {"slug": "crud_media_resolution", "description": "Create, update or delete academy media resolutions"}, + {"slug": "read_cohort_activity", "description": "Read low level activity in a cohort (attendancy, etc.)"}, + {"slug": "generate_academy_token", "description": "Create a new token only to be used by the academy"}, + {"slug": "get_academy_token", "description": "Read the academy token"}, + {"slug": "send_reset_password", "description": "Generate a temporal token and resend forgot password link"}, + {"slug": "read_activity", "description": "List all the user activities"}, + {"slug": "crud_activity", "description": "Create, update or delete a user activities"}, + {"slug": "read_assignment", "description": "List all the assignments"}, + {"slug": "crud_assignment", "description": "Create, update or delete a assignment"}, + { + "slug": "classroom_activity", + "description": "To report student activities during the classroom or cohorts (Specially meant for teachers)", + }, + {"slug": "academy_reporting", "description": "Get detailed reports about the academy activity"}, + { + "slug": "generate_temporal_token", + "description": "Generate a temporal token to reset github credential or forgot password", + }, + {"slug": "read_mentorship_service", "description": "Get all mentorship services from one academy"}, + { + "slug": "crud_mentorship_service", + "description": "Create, delete or update all mentorship services from one academy", + }, + {"slug": "read_mentorship_agent", "description": "Get all mentorship agents from one academy"}, + {"slug": "read_mentorship_mentor", "description": "Get all mentorship mentors from one academy"}, + { + "slug": "crud_mentorship_mentor", + "description": "Create, delete or update all mentorship mentors from one academy", + }, + {"slug": "read_mentorship_session", "description": "Get all session from one academy"}, + {"slug": "crud_mentorship_session", "description": "Create, delete or update all session from one academy"}, + {"slug": "crud_freelancer_bill", "description": "Create, delete or update all freelancer bills from one academy"}, + {"slug": "read_freelancer_bill", "description": "Read all all freelancer bills from one academy"}, + {"slug": "crud_mentorship_bill", "description": "Create, delete or update all mentroship bills from one academy"}, + {"slug": "read_mentorship_bill", "description": "Read all mentroship bills from one academy"}, + {"slug": "read_asset", "description": "Read all academy registry assets"}, + {"slug": "crud_asset", "description": "Update, create and delete registry assets"}, + {"slug": "read_content_variables", "description": "Read all academy content variables used in the asset markdowns"}, + { + "slug": "crud_content_variables", + "description": "Update, create and delete content variables used in the asset markdowns", + }, + {"slug": "read_tag", "description": "Read marketing tags and their details"}, + {"slug": "crud_tag", "description": "Update, create and delete a marketing tag and its details"}, + {"slug": "get_gitpod_user", "description": "List gitpod user the academy is consuming"}, + {"slug": "update_gitpod_user", "description": "Update gitpod user expiration based on available information"}, + {"slug": "get_github_user", "description": "List github user the academy is consuming"}, + {"slug": "update_github_user", "description": "Update github user expiration based on available information"}, + { + "slug": "sync_organization_users", + "description": "Calls for the github API and brings all org users, then tries to synch them", + }, + {"slug": "read_technology", "description": "Read asset technologies"}, + {"slug": "crud_technology", "description": "Update, create and delete asset technologies"}, + {"slug": "read_keyword", "description": "Read SEO keywords"}, + {"slug": "crud_keyword", "description": "Update, create and delete SEO keywords"}, + {"slug": "read_keywordcluster", "description": "Update, create and delete asset technologies"}, + {"slug": "crud_keywordcluster", "description": "Update, create and delete asset technologies"}, + { + "slug": "read_cohort_log", + "description": "Read the cohort logo that contains attendance and other info logged each day", + }, + { + "slug": "crud_cohort_log", + "description": "Update and delete things like the cohort attendance, teacher comments, etc", + }, + {"slug": "read_category", "description": "Read categories from the content registry"}, + {"slug": "crud_category", "description": "Update and delete categories from the content registry"}, + {"slug": "read_project_invoice", "description": "Read the financial status of a project and invoices"}, + {"slug": "crud_project_invoice", "description": "Create, Update and delete project invoices"}, + {"slug": "read_freelance_projects", "description": "Read project details without financials"}, + {"slug": "read_lead_gen_app", "description": "Read lead generation apps"}, + {"slug": "chatbot_message", "description": "Speak with a chatbot"}, + {"slug": "start_or_end_class", "description": "start or end a class"}, + { + "slug": "get_academy_auth_settings", + "description": "Settings related to authentication, for example the github auth integration", + }, + { + "slug": "crud_academy_auth_settings", + "description": "Settings related to authentication, for example the github auth integration", + }, + {"slug": "start_or_end_event", "description": "Start or end event"}, + {"slug": "read_provisioning_bill", "description": "Read provisioning activities and bills"}, + {"slug": "crud_provisioning_activity", "description": "Create, update or delete provisioning activities"}, + {"slug": "read_service", "description": "Read service details"}, + {"slug": "read_academyservice", "description": "Read Academy Service details"}, + {"slug": "crud_academyservice", "description": "Crud Academy Service details"}, + { + "slug": "crud_provisioning_bill", + "description": "Crud Provisioning Bills", + }, + {"slug": "read_calendly_organization", "description": "Access info about the calendly integration"}, + {"slug": "create_calendly_organization", "description": "Add a new calendly integration"}, + {"slug": "reset_calendly_organization", "description": "Reset the calendly token"}, + {"slug": "delete_calendly_organization", "description": "Delete calendly integration"}, + {"slug": "crud_assessment", "description": "Manage student quizzes and assessments"}, + {"slug": "read_user_assessment", "description": "Read user assessment submissions"}, ] ROLES = [ { - 'slug': 'admin', - 'name': 'Admin', - 'caps': [c['slug'] for c in CAPABILITIES], - }, - { - 'slug': - 'academy_token', - 'name': - 'Academy Token', - 'caps': [ - 'read_member', - 'read_syllabus', - 'read_student', - 'read_all_cohort', - 'read_media', - 'read_my_academy', - 'read_invite', - 'read_lead', - 'crud_lead', - 'crud_tag', - 'read_tag', - 'read_technology', - 'read_review', - 'read_shortlink', - 'read_nps_answers', - 'read_won_lead', - 'read_asset', - 'read_category', - 'read_cohort_log', - 'read_lead_gen_app', - 'read_mentorship_service', - 'read_mentorship_mentor', - 'read_freelancer_bill', - 'read_keywordcluster', - 'crud_academyservice', - 'crud_event', - 'crud_mentorship_session', - 'read_calendly_organization', + "slug": "admin", + "name": "Admin", + "caps": [c["slug"] for c in CAPABILITIES], + }, + { + "slug": "academy_token", + "name": "Academy Token", + "caps": [ + "read_member", + "read_syllabus", + "read_student", + "read_all_cohort", + "read_media", + "read_my_academy", + "read_invite", + "read_lead", + "crud_lead", + "crud_tag", + "read_tag", + "read_technology", + "read_review", + "read_shortlink", + "read_nps_answers", + "read_won_lead", + "read_asset", + "read_category", + "read_cohort_log", + "read_lead_gen_app", + "read_mentorship_service", + "read_mentorship_mentor", + "read_freelancer_bill", + "read_keywordcluster", + "crud_academyservice", + "crud_event", + "crud_mentorship_session", + "read_calendly_organization", ], }, { - 'slug': - 'basic', - 'name': - 'Basic (Base)', - 'caps': [ - 'read_media', - 'read_my_academy', - 'read_invite', - 'crud_activity', - 'read_tag', - 'academy_reporting', - 'read_activity', - 'read_technology', - 'read_academyservice', + "slug": "basic", + "name": "Basic (Base)", + "caps": [ + "read_media", + "read_my_academy", + "read_invite", + "crud_activity", + "read_tag", + "academy_reporting", + "read_activity", + "read_technology", + "read_academyservice", ], }, { - 'slug': - 'read_only', - 'name': - 'Read Only (Base)', - 'caps': [ - 'read_academyservice', - 'read_member', - 'read_syllabus', - 'read_student', - 'read_all_cohort', - 'read_media', - 'read_my_academy', - 'read_invite', - 'read_survey', - 'read_tag', - 'read_layout', - 'read_event', - 'read_event_type', - 'read_certificate', - 'read_won_lead', - 'read_eventcheckin', - 'read_review', - 'read_activity', - 'read_shortlink', - 'read_mentorship_service', - 'read_mentorship_mentor', - 'read_lead_gen_app', - 'read_technology', - 'read_service', + "slug": "read_only", + "name": "Read Only (Base)", + "caps": [ + "read_academyservice", + "read_member", + "read_syllabus", + "read_student", + "read_all_cohort", + "read_media", + "read_my_academy", + "read_invite", + "read_survey", + "read_tag", + "read_layout", + "read_event", + "read_event_type", + "read_certificate", + "read_won_lead", + "read_eventcheckin", + "read_review", + "read_activity", + "read_shortlink", + "read_mentorship_service", + "read_mentorship_mentor", + "read_lead_gen_app", + "read_technology", + "read_service", ], }, { - 'slug': - 'staff', - 'name': - 'Staff (Base)', - 'caps': [ - 'chatbot_message', - 'read_member', - 'read_syllabus', - 'read_student', - 'read_all_cohort', - 'read_media', - 'read_my_academy', - 'read_invite', - 'get_academy_token', - 'crud_activity', - 'read_survey', - 'read_tag', - 'read_layout', - 'read_event', - 'read_event_type', - 'read_certificate', - 'academy_reporting', - 'crud_media', - 'read_won_lead', - 'read_eventcheckin', - 'read_review', - 'read_activity', - 'read_shortlink', - 'read_mentorship_service', - 'read_mentorship_mentor', - 'read_lead_gen_app', - 'read_technology', - 'read_service', + "slug": "staff", + "name": "Staff (Base)", + "caps": [ + "chatbot_message", + "read_member", + "read_syllabus", + "read_student", + "read_all_cohort", + "read_media", + "read_my_academy", + "read_invite", + "get_academy_token", + "crud_activity", + "read_survey", + "read_tag", + "read_layout", + "read_event", + "read_event_type", + "read_certificate", + "academy_reporting", + "crud_media", + "read_won_lead", + "read_eventcheckin", + "read_review", + "read_activity", + "read_shortlink", + "read_mentorship_service", + "read_mentorship_mentor", + "read_lead_gen_app", + "read_technology", + "read_service", ], }, { - 'slug': - 'student', - 'name': - 'Student', - 'caps': [ - 'crud_assignment', 'chatbot_message', 'read_syllabus', 'read_assignment', 'read_single_cohort', - 'read_my_academy', 'read_all_cohort', 'crud_activity', 'read_mentorship_service', 'read_mentorship_mentor', - 'read_cohort_log', 'read_service', 'read_academyservice' + "slug": "student", + "name": "Student", + "caps": [ + "crud_assignment", + "chatbot_message", + "read_syllabus", + "read_assignment", + "read_single_cohort", + "read_my_academy", + "read_all_cohort", + "crud_activity", + "read_mentorship_service", + "read_mentorship_mentor", + "read_cohort_log", + "read_service", + "read_academyservice", ], }, ] def extend(roles, slugs): - caps_groups = [item['caps'] for item in roles if item['slug'] in slugs] + caps_groups = [item["caps"] for item in roles if item["slug"] in slugs] inhered_caps = [] for roles in caps_groups: inhered_caps = inhered_caps + roles @@ -608,209 +334,263 @@ def extend_roles(roles: list[RoleType]) -> None: you can extend from more than one role but also add additional capabilities at the end. """ - roles.append({ - 'slug': - 'content_writer', - 'name': - 'Content Writer', - 'caps': - extend(roles, ['basic']) + [ - 'read_keywordcluster', 'read_member', 'read_media', 'read_keyword', 'read_my_academy', 'read_asset', - 'crud_asset', 'read_category', 'crud_category', 'read_content_variables', 'crud_content_variables', - 'crud_assessment' - ] - }) + roles.append( + { + "slug": "content_writer", + "name": "Content Writer", + "caps": extend(roles, ["basic"]) + + [ + "read_keywordcluster", + "read_member", + "read_media", + "read_keyword", + "read_my_academy", + "read_asset", + "crud_asset", + "read_category", + "crud_category", + "read_content_variables", + "crud_content_variables", + "crud_assessment", + ], + } + ) - roles.append({ - 'slug': - 'assistant', - 'name': - 'Teacher Assistant', - 'caps': - extend(roles, ['staff']) + [ - 'read_assignment', 'crud_assignment', 'read_cohort_activity', 'read_nps_answers', 'classroom_activity', - 'read_event', 'read_event_type', 'task_delivery_details', 'crud_cohort', 'read_cohort_log', - 'crud_cohort_log', 'start_or_end_class', 'start_or_end_event', 'read_user_assessment' - ] - }) - roles.append({ - 'slug': - 'career_support', - 'name': - 'Career Support Specialist', - 'caps': - extend(roles, ['staff']) + [ - 'read_certificate', - 'crud_certificate', - 'crud_shortlink', - 'read_mentorship_mentor', - 'crud_mentorship_mentor', - 'read_mentorship_service', - 'crud_mentorship_service', - 'read_mentorship_session', - 'crud_mentorship_session', - 'read_assignment', - 'crud_assignment', - 'crud_mentorship_bill', - 'read_mentorship_bill', - 'classroom_activity', - 'read_asset', - 'task_delivery_details', - ] - }) - roles.append({ - 'slug': 'career_support_head', - 'name': 'Career Support Head', - 'caps': extend(roles, ['career_support', 'content_writer']) + ['crud_syllabus'] - }) - roles.append({ - 'slug': - 'admissions_developer', - 'name': - 'Admissions Developer', - 'caps': - extend(roles, ['staff']) + - ['crud_lead', 'crud_student', 'crud_cohort', 'read_all_cohort', 'read_lead', 'read_activity', 'invite_resend'] - }) - roles.append({ - 'slug': - 'syllabus_coordinator', - 'name': - 'Syllabus Coordinator', - 'caps': - extend(roles, ['staff', 'content_writer']) + - ['crud_syllabus', 'crud_media', 'crud_technology', 'read_freelancer_bill', 'crud_freelancer_bill'] - }) - roles.append({ - 'slug': 'culture_and_recruitment', - 'name': 'Culture and Recruitment', - 'caps': extend(roles, ['staff']) + ['crud_member', 'crud_media'] - }) - roles.append({ - 'slug': - 'graphic_designer', - 'name': - 'Graphic Designer', - 'caps': - extend(roles, ['staff']) + ['read_event', 'read_event_type', 'crud_media', 'read_asset', 'read_media'] - }) - roles.append({ - 'slug': - 'community_manager', - 'name': - 'Manage Syllabus, Exercises and all academy content', - 'caps': - extend(roles, ['staff', 'graphic_designer']) + [ - 'crud_lead', 'crud_event', 'crud_event_type', 'read_eventcheckin', 'read_nps_answers', 'read_lead', - 'read_all_cohort', 'crud_asset', 'read_keywordcluster', 'read_keyword' - ] - }) - roles.append({ - 'slug': - 'growth_manager', - 'name': - 'Growth Manager', - 'caps': - extend(roles, ['staff', 'community_manager']) + [ - 'crud_media', - 'read_activity', - 'read_lead', - 'read_user_assessment', - 'read_won_lead', - 'crud_review', - 'crud_shortlink', - 'crud_tag', - 'crud_keyword', - 'crud_keywordcluster', - 'crud_asset', - 'read_category', - ] - }) - roles.append({ - 'slug': - 'accountant', - 'name': - 'Accountant', - 'caps': - extend(roles, ['staff']) + [ - 'read_freelancer_bill', 'crud_freelancer_bill', 'crud_mentorship_bill', 'read_mentorship_bill', - 'read_project_invoice', 'crud_project_invoice', 'get_github_user', 'read_provisioning_bill', - 'crud_provisioning_bill' - ] - }) - roles.append({ - 'slug': 'homework_reviewer', - 'name': 'Homework Reviewer', - 'caps': extend(roles, ['assistant']) + ['crud_student'] - }) - roles.append({'slug': 'teacher', 'name': 'Teacher', 'caps': extend(roles, ['assistant']) + ['crud_cohort']}) - roles.append({ - 'slug': - 'academy_coordinator', - 'name': - 'Mentor in residence', - 'caps': - extend(roles, ['teacher']) + [ - 'crud_syllabus', - 'crud_cohort', - 'crud_student', - 'crud_survey', - 'read_won_lead', - 'crud_member', - 'send_reset_password', - 'generate_temporal_token', - 'crud_certificate', - 'crud_review', - 'read_assignment_sensitive_details', - 'crud_shortlink', - 'invite_resend', - 'crud_invite', - 'crud_mentorship_mentor', - 'read_mentorship_mentor', - 'read_mentorship_service', - 'crud_mentorship_service', - 'read_mentorship_session', - 'crud_mentorship_session', - 'crud_mentorship_bill', - 'read_mentorship_bill', - 'crud_freelancer_bill', - 'get_gitpod_user', - 'update_gitpod_user', - 'get_github_user', - 'update_github_user', - 'read_project_invoice', - 'read_freelance_projects', - 'sync_organization_users', - 'read_provisioning_bill', - 'read_calendly_organization', - 'reset_calendly_organization', - 'create_calendly_organization', - 'delete_calendly_organization', - ] - }) - roles.append({ - 'slug': - 'country_manager', - 'name': - 'Country Manager', - 'caps': - extend(roles, [ - 'academy_coordinator', - 'student', - 'career_support', - 'growth_manager', - 'admissions_developer', - 'syllabus_coordinator', - 'accountant', - ]) + [ - 'crud_my_academy', 'crud_organization', 'generate_academy_token', 'send_reset_password', - 'generate_temporal_token', 'read_organization', 'crud_provisioning_bill' - ] - }) + roles.append( + { + "slug": "assistant", + "name": "Teacher Assistant", + "caps": extend(roles, ["staff"]) + + [ + "read_assignment", + "crud_assignment", + "read_cohort_activity", + "read_nps_answers", + "classroom_activity", + "read_event", + "read_event_type", + "task_delivery_details", + "crud_cohort", + "read_cohort_log", + "crud_cohort_log", + "start_or_end_class", + "start_or_end_event", + "read_user_assessment", + ], + } + ) + roles.append( + { + "slug": "career_support", + "name": "Career Support Specialist", + "caps": extend(roles, ["staff"]) + + [ + "read_certificate", + "crud_certificate", + "crud_shortlink", + "read_mentorship_mentor", + "crud_mentorship_mentor", + "read_mentorship_service", + "crud_mentorship_service", + "read_mentorship_session", + "crud_mentorship_session", + "read_assignment", + "crud_assignment", + "crud_mentorship_bill", + "read_mentorship_bill", + "classroom_activity", + "read_asset", + "task_delivery_details", + ], + } + ) + roles.append( + { + "slug": "career_support_head", + "name": "Career Support Head", + "caps": extend(roles, ["career_support", "content_writer"]) + ["crud_syllabus"], + } + ) + roles.append( + { + "slug": "admissions_developer", + "name": "Admissions Developer", + "caps": extend(roles, ["staff"]) + + [ + "crud_lead", + "crud_student", + "crud_cohort", + "read_all_cohort", + "read_lead", + "read_activity", + "invite_resend", + ], + } + ) + roles.append( + { + "slug": "syllabus_coordinator", + "name": "Syllabus Coordinator", + "caps": extend(roles, ["staff", "content_writer"]) + + ["crud_syllabus", "crud_media", "crud_technology", "read_freelancer_bill", "crud_freelancer_bill"], + } + ) + roles.append( + { + "slug": "culture_and_recruitment", + "name": "Culture and Recruitment", + "caps": extend(roles, ["staff"]) + ["crud_member", "crud_media"], + } + ) + roles.append( + { + "slug": "graphic_designer", + "name": "Graphic Designer", + "caps": extend(roles, ["staff"]) + + ["read_event", "read_event_type", "crud_media", "read_asset", "read_media"], + } + ) + roles.append( + { + "slug": "community_manager", + "name": "Manage Syllabus, Exercises and all academy content", + "caps": extend(roles, ["staff", "graphic_designer"]) + + [ + "crud_lead", + "crud_event", + "crud_event_type", + "read_eventcheckin", + "read_nps_answers", + "read_lead", + "read_all_cohort", + "crud_asset", + "read_keywordcluster", + "read_keyword", + ], + } + ) + roles.append( + { + "slug": "growth_manager", + "name": "Growth Manager", + "caps": extend(roles, ["staff", "community_manager"]) + + [ + "crud_media", + "read_activity", + "read_lead", + "read_user_assessment", + "read_won_lead", + "crud_review", + "crud_shortlink", + "crud_tag", + "crud_keyword", + "crud_keywordcluster", + "crud_asset", + "read_category", + ], + } + ) + roles.append( + { + "slug": "accountant", + "name": "Accountant", + "caps": extend(roles, ["staff"]) + + [ + "read_freelancer_bill", + "crud_freelancer_bill", + "crud_mentorship_bill", + "read_mentorship_bill", + "read_project_invoice", + "crud_project_invoice", + "get_github_user", + "read_provisioning_bill", + "crud_provisioning_bill", + ], + } + ) + roles.append( + { + "slug": "homework_reviewer", + "name": "Homework Reviewer", + "caps": extend(roles, ["assistant"]) + ["crud_student"], + } + ) + roles.append({"slug": "teacher", "name": "Teacher", "caps": extend(roles, ["assistant"]) + ["crud_cohort"]}) + roles.append( + { + "slug": "academy_coordinator", + "name": "Mentor in residence", + "caps": extend(roles, ["teacher"]) + + [ + "crud_syllabus", + "crud_cohort", + "crud_student", + "crud_survey", + "read_won_lead", + "crud_member", + "send_reset_password", + "generate_temporal_token", + "crud_certificate", + "crud_review", + "read_assignment_sensitive_details", + "crud_shortlink", + "invite_resend", + "crud_invite", + "crud_mentorship_mentor", + "read_mentorship_mentor", + "read_mentorship_service", + "crud_mentorship_service", + "read_mentorship_session", + "crud_mentorship_session", + "crud_mentorship_bill", + "read_mentorship_bill", + "crud_freelancer_bill", + "get_gitpod_user", + "update_gitpod_user", + "get_github_user", + "update_github_user", + "read_project_invoice", + "read_freelance_projects", + "sync_organization_users", + "read_provisioning_bill", + "read_calendly_organization", + "reset_calendly_organization", + "create_calendly_organization", + "delete_calendly_organization", + ], + } + ) + roles.append( + { + "slug": "country_manager", + "name": "Country Manager", + "caps": extend( + roles, + [ + "academy_coordinator", + "student", + "career_support", + "growth_manager", + "admissions_developer", + "syllabus_coordinator", + "accountant", + ], + ) + + [ + "crud_my_academy", + "crud_organization", + "generate_academy_token", + "send_reset_password", + "generate_temporal_token", + "read_organization", + "crud_provisioning_bill", + ], + } + ) class Command(BaseCommand): - help = 'Create default system capabilities' + help = "Create default system capabilities" def handle(self, *args, **options): @@ -818,12 +598,12 @@ def handle(self, *args, **options): caps = get_capabilities() for c in caps: - _cap = Capability.objects.filter(slug=c['slug']).first() + _cap = Capability.objects.filter(slug=c["slug"]).first() if _cap is None: _cap = Capability(**c) _cap.save() else: - _cap.description = c['description'] + _cap.description = c["description"] _cap.save() # These are the MAIN roles, they cannot be deleted by anyone at the academy. @@ -834,12 +614,12 @@ def handle(self, *args, **options): extend_roles(roles) for r in roles: - _r = Role.objects.filter(slug=r['slug']).first() + _r = Role.objects.filter(slug=r["slug"]).first() if _r is None: - _r = Role(slug=r['slug'], name=r['name']) + _r = Role(slug=r["slug"], name=r["name"]) _r.save() _r.capabilities.clear() - r['caps'] = remove_duplicates(r['caps']) - for c in r['caps']: + r["caps"] = remove_duplicates(r["caps"]) + for c in r["caps"]: _r.capabilities.add(c) diff --git a/breathecode/authenticate/management/commands/delete_expired_githubusers.py b/breathecode/authenticate/management/commands/delete_expired_githubusers.py index 049d7d316..4a911ecf2 100644 --- a/breathecode/authenticate/management/commands/delete_expired_githubusers.py +++ b/breathecode/authenticate/management/commands/delete_expired_githubusers.py @@ -5,7 +5,7 @@ class Command(BaseCommand): - help = 'Delete expired temporal and login tokens' + help = "Delete expired temporal and login tokens" def handle(self, *args, **options): self.update_inactive_github_users() @@ -13,35 +13,40 @@ def handle(self, *args, **options): def delete_from_github_org(self): academies: dict[str, Github] = {} - deleted_users = GithubAcademyUser.objects.filter(storage_action='DELETE', storage_status='SYNCHED') + deleted_users = GithubAcademyUser.objects.filter(storage_action="DELETE", storage_status="SYNCHED") for github in deleted_users: if github.academy.id not in academies: settings = AcademyAuthSettings.objects.filter(academy__id=github.academy.id).first() - academies[github.academy.id] = Github(org=settings.github_username, - token=settings.github_owner.credentialsgithub.token) + academies[github.academy.id] = Github( + org=settings.github_username, token=settings.github_owner.credentialsgithub.token + ) gb = academies[github.academy.id] try: gb.delete_org_member(github.username) - github.log('Successfully deleted in github organization') - print('Deleted github user: ' + github.username) + github.log("Successfully deleted in github organization") + print("Deleted github user: " + github.username) except Exception as e: - github.log('Error calling github API while deleting member from org: ' + str(e)) - print('Error deleting github user: ' + github.username) + github.log("Error calling github API while deleting member from org: " + str(e)) + print("Error deleting github user: " + github.username) def is_user_active_in_other_cohorts(self, user, current_cohort, academy): - active_cohorts_count = CohortUser.objects.filter( - user=user, - cohort__academy=academy, - cohort__never_ends=False, - educational_status='ACTIVE', - ).exclude(cohort__id__in=[current_cohort.id]).count() + active_cohorts_count = ( + CohortUser.objects.filter( + user=user, + cohort__academy=academy, + cohort__never_ends=False, + educational_status="ACTIVE", + ) + .exclude(cohort__id__in=[current_cohort.id]) + .count() + ) return active_cohorts_count > 0 def update_inactive_github_users(self): - added_github_users = GithubAcademyUser.objects.filter(storage_action='ADD') - print(str(added_github_users.count()) + ' users found') + added_github_users = GithubAcademyUser.objects.filter(storage_action="ADD") + print(str(added_github_users.count()) + " users found") for github_user in added_github_users: user = github_user.user academy = github_user.academy @@ -49,16 +54,21 @@ def update_inactive_github_users(self): cohort_user = CohortUser.objects.filter( user=user, cohort__never_ends=False, - educational_status__in=['POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED'], - cohort__academy=academy).first() + educational_status__in=["POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"], + cohort__academy=academy, + ).first() if cohort_user is None: continue cohort = cohort_user.cohort if not self.is_user_active_in_other_cohorts(user, cohort, academy): - github_user.storage_action = 'DELETE' - github_user.storage_status = 'PENDING' + github_user.storage_action = "DELETE" + github_user.storage_status = "PENDING" github_user.save() - print('Schedule the following github user for deletion in Academy ' + github_user.academy.name + - '. User: ' + github_user.user.email) + print( + "Schedule the following github user for deletion in Academy " + + github_user.academy.name + + ". User: " + + github_user.user.email + ) diff --git a/breathecode/authenticate/management/commands/fix_avatars.py b/breathecode/authenticate/management/commands/fix_avatars.py index f057288a0..9dda61afe 100644 --- a/breathecode/authenticate/management/commands/fix_avatars.py +++ b/breathecode/authenticate/management/commands/fix_avatars.py @@ -8,17 +8,17 @@ class Command(BaseCommand): - help = 'Sync academies from old breathecode' + help = "Sync academies from old breathecode" def handle(self, *args, **options): - api_url = os.getenv('API_URL', '') - current_avatar_url = api_url + '/static/img/avatar.png' + api_url = os.getenv("API_URL", "") + current_avatar_url = api_url + "/static/img/avatar.png" pending = Profile.objects.filter(avatar_url=current_avatar_url) for profile in pending: avatar_number = random.randint(1, 21) - avatar_url = api_url + f'/static/img/avatar-{avatar_number}.png' + avatar_url = api_url + f"/static/img/avatar-{avatar_number}.png" profile.avatar_url = avatar_url profile.save() - logger.info(f'Fixing {pending.count()} avatars') + logger.info(f"Fixing {pending.count()} avatars") diff --git a/breathecode/authenticate/management/commands/fix_empty_names.py b/breathecode/authenticate/management/commands/fix_empty_names.py index 4a8aec065..855416d3e 100644 --- a/breathecode/authenticate/management/commands/fix_empty_names.py +++ b/breathecode/authenticate/management/commands/fix_empty_names.py @@ -8,38 +8,39 @@ class Command(BaseCommand): - help = 'Sync ProfileAcademy first and last name with User.first_name or last_name' + help = "Sync ProfileAcademy first and last name with User.first_name or last_name" def handle(self, *args, **options): - CharField.register_lookup(Length, 'length') - students_to_sync = ProfileAcademy.objects.filter(Q(first_name__isnull=True) | Q( - first_name='')).exclude(Q(user__first_name__isnull=True) | Q(user__first_name='')) - logger.debug(f'Found {students_to_sync.count()} ProfileAcademy\'s to sync') + CharField.register_lookup(Length, "length") + students_to_sync = ProfileAcademy.objects.filter(Q(first_name__isnull=True) | Q(first_name="")).exclude( + Q(user__first_name__isnull=True) | Q(user__first_name="") + ) + logger.debug(f"Found {students_to_sync.count()} ProfileAcademy's to sync") for stu in students_to_sync: if stu.user is None: continue - if stu.user.first_name != '': - logger.debug(f'Updating student first name for {stu.user.first_name}') + if stu.user.first_name != "": + logger.debug(f"Updating student first name for {stu.user.first_name}") stu.first_name = stu.user.first_name - if stu.user.last_name != '': + if stu.user.last_name != "": stu.last_name = stu.user.last_name stu.save() - students_to_sync = ProfileAcademy.objects.filter(Q(user__first_name__isnull=True) - | Q(user__first_name='')).exclude( - Q(first_name__isnull=True) | Q(first_name='')) - logger.debug(f'Found {students_to_sync.count()} User\'s to sync') + students_to_sync = ProfileAcademy.objects.filter( + Q(user__first_name__isnull=True) | Q(user__first_name="") + ).exclude(Q(first_name__isnull=True) | Q(first_name="")) + logger.debug(f"Found {students_to_sync.count()} User's to sync") for stu in students_to_sync: if stu.user is None: - logger.debug(f'Skip {stu.first_name} {stu.last_name} because it has not user object') + logger.debug(f"Skip {stu.first_name} {stu.last_name} because it has not user object") continue if stu.first_name is not None and len(stu.first_name) > 0: - logger.debug(f'Updating student first name for {stu.first_name}') + logger.debug(f"Updating student first name for {stu.first_name}") stu.user.first_name = stu.first_name if stu.first_name is not None and len(stu.last_name) > 0: stu.user.last_name = stu.last_name stu.user.save() - logger.debug('Finished.') + logger.debug("Finished.") diff --git a/breathecode/authenticate/management/commands/fix_github_academy_user_logs.py b/breathecode/authenticate/management/commands/fix_github_academy_user_logs.py index 6bf554482..c72b8d8ad 100644 --- a/breathecode/authenticate/management/commands/fix_github_academy_user_logs.py +++ b/breathecode/authenticate/management/commands/fix_github_academy_user_logs.py @@ -5,7 +5,7 @@ class Command(BaseCommand): - help = 'Fix github academy user logs valid_until' + help = "Fix github academy user logs valid_until" def handle(self, *args, **options): @@ -14,8 +14,9 @@ def handle(self, *args, **options): for user in users: for academy in academies: - logs = GithubAcademyUserLog.objects.filter(academy_user__user=user, - academy_user__academy=academy).order_by('created_at') + logs = GithubAcademyUserLog.objects.filter( + academy_user__user=user, academy_user__academy=academy + ).order_by("created_at") prev = None for log in logs: diff --git a/breathecode/authenticate/management/commands/generate_student_profiles.py b/breathecode/authenticate/management/commands/generate_student_profiles.py index 209ef1ba7..38d19301c 100644 --- a/breathecode/authenticate/management/commands/generate_student_profiles.py +++ b/breathecode/authenticate/management/commands/generate_student_profiles.py @@ -4,23 +4,25 @@ class Command(BaseCommand): - help = 'Delete expired temporal and login tokens' + help = "Delete expired temporal and login tokens" def handle(self, *args, **options): - student_role = Role.objects.get(slug='student') - cus = CohortUser.objects.filter(role='STUDENT') + student_role = Role.objects.get(slug="student") + cus = CohortUser.objects.filter(role="STUDENT") count = 0 for cu in cus: profile = ProfileAcademy.objects.filter(user=cu.user, academy=cu.cohort.academy).first() if profile is None: count = count + 1 - profile = ProfileAcademy(user=cu.user, - academy=cu.cohort.academy, - role=student_role, - email=cu.user.email, - first_name=cu.user.first_name, - last_name=cu.user.last_name, - status='ACTIVE') + profile = ProfileAcademy( + user=cu.user, + academy=cu.cohort.academy, + role=student_role, + email=cu.user.email, + first_name=cu.user.first_name, + last_name=cu.user.last_name, + status="ACTIVE", + ) profile.save() - print(f'{count} student AcademyProfiles were created') + print(f"{count} student AcademyProfiles were created") diff --git a/breathecode/authenticate/management/commands/run_authenticate.py b/breathecode/authenticate/management/commands/run_authenticate.py index 2c2486259..8891a1e6b 100644 --- a/breathecode/authenticate/management/commands/run_authenticate.py +++ b/breathecode/authenticate/management/commands/run_authenticate.py @@ -6,54 +6,54 @@ logger = logging.getLogger(__name__) -API_URL = os.getenv('API_URL', '') -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +API_URL = os.getenv("API_URL", "") +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" class Command(BaseCommand): - help = 'Commands for authenticate app' + help = "Commands for authenticate app" def add_arguments(self, parser): - parser.add_argument('command', type=str) + parser.add_argument("command", type=str) parser.add_argument( - '--cohorts', + "--cohorts", type=str, default=None, - help='Cohorts slugs to sync', + help="Cohorts slugs to sync", ) parser.add_argument( - '--students', + "--students", type=str, default=None, - help='Cohorts slugs to sync', + help="Cohorts slugs to sync", ) - parser.add_argument('--limit', action='store', dest='limit', type=int, default=0, help='How many to import') + parser.add_argument("--limit", action="store", dest="limit", type=int, default=0, help="How many to import") def handle(self, *args, **options): try: - func = getattr(self, options['command'], 'command_not_found') + func = getattr(self, options["command"], "command_not_found") except TypeError: print(f'Command method for {options["command"]} no Found!') func(options) def clean_expired_tokens(self, options): count = delete_tokens() - print(f'{count} tokens were deleted') + print(f"{count} tokens were deleted") def sanitize_profiles(self, options): profile = Profile.objects.all() for p in profile: - logger.debug('Sanitizing ' + p.user.email) - if p.avatar_url is None or p.avatar_url == '': + logger.debug("Sanitizing " + p.user.email) + if p.avatar_url is None or p.avatar_url == "": avatar_number = randint(1, 21) - p.avatar_url = API_URL + f'/static/img/avatar-{avatar_number}.png' + p.avatar_url = API_URL + f"/static/img/avatar-{avatar_number}.png" if p.github_username is None: - p.github_username = '' + p.github_username = "" else: - matches = re.findall(r'github.com\/(\w+)', p.github_username) + matches = re.findall(r"github.com\/(\w+)", p.github_username) if len(matches) > 0: p.github_username = matches[0] diff --git a/breathecode/authenticate/management/commands/seed_groups.py b/breathecode/authenticate/management/commands/seed_groups.py index 0d9edec1f..f1a482873 100644 --- a/breathecode/authenticate/management/commands/seed_groups.py +++ b/breathecode/authenticate/management/commands/seed_groups.py @@ -5,43 +5,45 @@ class Command(BaseCommand): - help = 'Create default system capabilities' + help = "Create default system capabilities" def handle(self, *args, **options): - student = Group.objects.filter(name='Student').first() - teacher = Group.objects.filter(name='Teacher').first() - default = Group.objects.filter(name='Default').first() - mentor = Group.objects.filter(name='Mentor').first() + student = Group.objects.filter(name="Student").first() + teacher = Group.objects.filter(name="Teacher").first() + default = Group.objects.filter(name="Default").first() + mentor = Group.objects.filter(name="Mentor").first() if not default: - default = Group(name='Default') + default = Group(name="Default") default.save() if not mentor: - mentor = Group(name='Mentor') + mentor = Group(name="Mentor") mentor.save() if not student: - student = Group(name='Student') + student = Group(name="Student") student.save() if not teacher: - teacher = Group(name='Teacher') + teacher = Group(name="Teacher") teacher.save() users = User.objects.filter() default.user_set.set(users) - mentor_ids = MentorProfile.objects.filter().values_list('user__id', flat=True) + mentor_ids = MentorProfile.objects.filter().values_list("user__id", flat=True) mentors = User.objects.filter(id__in=mentor_ids) mentor.user_set.set(mentors) - profile_ids = ProfileAcademy.objects.filter(user__isnull=False, role__slug='student').values_list('user__id', - flat=True) + profile_ids = ProfileAcademy.objects.filter(user__isnull=False, role__slug="student").values_list( + "user__id", flat=True + ) students = User.objects.filter(id__in=profile_ids) student.user_set.set(students) - profile_ids = ProfileAcademy.objects.filter(user__isnull=False, role__slug='teacher').values_list('user__id', - flat=True) + profile_ids = ProfileAcademy.objects.filter(user__isnull=False, role__slug="teacher").values_list( + "user__id", flat=True + ) teachers = User.objects.filter(id__in=profile_ids) teacher.user_set.set(teachers) diff --git a/breathecode/authenticate/management/commands/set_permissions.py b/breathecode/authenticate/management/commands/set_permissions.py index 2e86d7b4b..d9a0a67fb 100644 --- a/breathecode/authenticate/management/commands/set_permissions.py +++ b/breathecode/authenticate/management/commands/set_permissions.py @@ -4,8 +4,8 @@ import itertools CONTENT_TYPE_PROPS = { - 'app_label': 'breathecode', - 'model': 'SortingHat', # the team of django use models name in lower case, use upper case instead + "app_label": "breathecode", + "model": "SortingHat", # the team of django use models name in lower case, use upper case instead } # examples permissions autogenerated by django @@ -16,114 +16,86 @@ PERMISSIONS = [ { - 'name': 'Get my profile', - 'description': 'Get my profile', - 'codename': 'get_my_profile', + "name": "Get my profile", + "description": "Get my profile", + "codename": "get_my_profile", }, { - 'name': 'Create my profile', - 'description': 'Create my profile', - 'codename': 'create_my_profile', + "name": "Create my profile", + "description": "Create my profile", + "codename": "create_my_profile", }, { - 'name': 'Update my profile', - 'description': 'Update my profile', - 'codename': 'update_my_profile', + "name": "Update my profile", + "description": "Update my profile", + "codename": "update_my_profile", }, { - 'name': 'Get my certificate', - 'description': 'Get my certificate', - 'codename': 'get_my_certificate', + "name": "Get my certificate", + "description": "Get my certificate", + "codename": "get_my_certificate", }, { - 'name': 'Get my mentoring sessions', - 'description': 'Get my mentoring sessions', - 'codename': 'get_my_mentoring_sessions', + "name": "Get my mentoring sessions", + "description": "Get my mentoring sessions", + "codename": "get_my_mentoring_sessions", }, { - 'name': 'Join mentorship', - 'description': 'Join mentorship', - 'codename': 'join_mentorship', + "name": "Join mentorship", + "description": "Join mentorship", + "codename": "join_mentorship", }, { - 'name': 'Join live class', - 'description': 'Join live class', - 'codename': 'live_class_join', + "name": "Join live class", + "description": "Join live class", + "codename": "live_class_join", }, { - 'name': 'Join event', - 'description': 'Join event', - 'codename': 'event_join', + "name": "Join event", + "description": "Join event", + "codename": "event_join", }, { - 'name': 'Get my containers', - 'description': 'Get provisioning containers', - 'codename': 'get_containers', + "name": "Get my containers", + "description": "Get provisioning containers", + "codename": "get_containers", }, { - 'name': 'Add code reviews', - 'description': 'Add code reviews', - 'codename': 'add_code_review', + "name": "Add code reviews", + "description": "Add code reviews", + "codename": "add_code_review", }, { - 'name': 'Upload provisioning activity', - 'description': 'Upload provisioning activity', - 'codename': 'upload_provisioning_activity', + "name": "Upload provisioning activity", + "description": "Upload provisioning activity", + "codename": "upload_provisioning_activity", }, { - 'name': 'Upload assignment telemetry', - 'description': 'Upload assignment telementry from learnpack', - 'codename': 'upload_assignment_telemetry', + "name": "Upload assignment telemetry", + "description": "Upload assignment telementry from learnpack", + "codename": "upload_assignment_telemetry", }, ] GROUPS = [ - { - 'name': 'Admin', - 'permissions': [x['codename'] for x in PERMISSIONS], - 'inherit': [] - }, - { - 'name': 'Default', - 'permissions': ['get_my_profile', 'create_my_profile', 'update_my_profile'], - 'inherit': [] - }, - { - 'name': 'Student', - 'permissions': - ['get_my_certificate', 'get_containers', 'get_my_mentoring_sessions', 'upload_assignment_telemetry'], - 'inherit': [] - }, - { - 'name': 'Teacher', - 'permissions': ['add_code_review'], - 'inherit': [] - }, - { - 'name': 'Mentor', - 'permissions': ['join_mentorship', 'get_my_mentoring_sessions'], - 'inherit': [] - }, - { - 'name': 'Mentorships', - 'permissions': ['join_mentorship', 'get_my_mentoring_sessions'], - 'inherit': [] - }, - { - 'name': 'Events', - 'permissions': ['event_join'], - 'inherit': [] - }, - { - 'name': 'Classes', - 'permissions': ['live_class_join'], - 'inherit': [] - }, - { - 'name': 'Legacy', - 'permissions': ['get_my_certificate'], - 'inherit': ['Classes', 'Events', 'Mentorships'] - }, + {"name": "Admin", "permissions": [x["codename"] for x in PERMISSIONS], "inherit": []}, + {"name": "Default", "permissions": ["get_my_profile", "create_my_profile", "update_my_profile"], "inherit": []}, + { + "name": "Student", + "permissions": [ + "get_my_certificate", + "get_containers", + "get_my_mentoring_sessions", + "upload_assignment_telemetry", + ], + "inherit": [], + }, + {"name": "Teacher", "permissions": ["add_code_review"], "inherit": []}, + {"name": "Mentor", "permissions": ["join_mentorship", "get_my_mentoring_sessions"], "inherit": []}, + {"name": "Mentorships", "permissions": ["join_mentorship", "get_my_mentoring_sessions"], "inherit": []}, + {"name": "Events", "permissions": ["event_join"], "inherit": []}, + {"name": "Classes", "permissions": ["live_class_join"], "inherit": []}, + {"name": "Legacy", "permissions": ["get_my_certificate"], "inherit": ["Classes", "Events", "Mentorships"]}, ] @@ -140,7 +112,7 @@ def get_groups(): class Command(BaseCommand): - help = 'Create default system capabilities' + help = "Create default system capabilities" def handle(self, *args, **options): content_type = ContentType.objects.filter(**CONTENT_TYPE_PROPS).first() @@ -157,35 +129,37 @@ def handle(self, *args, **options): permission_instances = {} for permission in permissions: # it can use a django permissions - instance = Permission.objects.filter(codename=permission['codename']).first() + instance = Permission.objects.filter(codename=permission["codename"]).first() # it can create their own permissions if not instance: - instance = Permission(name=permission['name'], - codename=permission['codename'], - content_type=content_type) + instance = Permission( + name=permission["name"], codename=permission["codename"], content_type=content_type + ) instance.save() - permission_instances[permission['codename']] = instance + permission_instances[permission["codename"]] = instance for group in groups: - instance = Group.objects.filter(name=group['name']).first() + instance = Group.objects.filter(name=group["name"]).first() # reset permissions if instance: instance.permissions.clear() else: - instance = Group(name=group['name']) + instance = Group(name=group["name"]) instance.save() # the admin have all the permissions - if group['name'] == 'Admin': + if group["name"] == "Admin": instance.permissions.set(Permission.objects.filter().exclude(content_type=content_type)) permissions = list( - itertools.chain.from_iterable([group['permissions']] + - [x['permissions'] for x in groups if x['name'] in group['inherit']])) + itertools.chain.from_iterable( + [group["permissions"]] + [x["permissions"] for x in groups if x["name"] in group["inherit"]] + ) + ) for permission in permissions: instance.permissions.add(permission_instances[permission]) diff --git a/breathecode/authenticate/management/commands/set_scopes.py b/breathecode/authenticate/management/commands/set_scopes.py index fe0fff646..d8c2df51b 100644 --- a/breathecode/authenticate/management/commands/set_scopes.py +++ b/breathecode/authenticate/management/commands/set_scopes.py @@ -4,35 +4,35 @@ # if it does not require an agreement, add scopes is not necessary APPS = [ { - 'name': 'Rigobot', - 'slug': 'rigobot', - 'require_an_agreement': False, - 'required_scopes': [], - 'optional_scopes': [], + "name": "Rigobot", + "slug": "rigobot", + "require_an_agreement": False, + "required_scopes": [], + "optional_scopes": [], }, ] SCOPES = [ { - 'name': 'Read user', - 'slug': 'read:user', - 'description': 'Can read user information', + "name": "Read user", + "slug": "read:user", + "description": "Can read user information", }, { - 'name': 'Webhook', - 'slug': 'webhook', - 'description': 'Can receive updates from 4Geeks', + "name": "Webhook", + "slug": "webhook", + "description": "Can receive updates from 4Geeks", }, ] class Command(BaseCommand): - help = 'Create default system capabilities' + help = "Create default system capabilities" def handle(self, *args, **options): cache = {} for scope in SCOPES: - slug = scope['slug'] + slug = scope["slug"] x, created = Scope.objects.get_or_create(slug=slug, defaults=scope) if not created: @@ -43,16 +43,16 @@ def handle(self, *args, **options): cache[slug] = x for app in APPS: - slug = app['slug'] - required_scopes = app['required_scopes'] - optional_scopes = app['optional_scopes'] + slug = app["slug"] + required_scopes = app["required_scopes"] + optional_scopes = app["optional_scopes"] x = App.objects.filter(slug=slug).first() if not x: continue for key, value in app.items(): - if key == 'required_scopes' or key == 'optional_scopes': + if key == "required_scopes" or key == "optional_scopes": continue setattr(x, key, value) diff --git a/breathecode/authenticate/management/commands/sync_github_organization.py b/breathecode/authenticate/management/commands/sync_github_organization.py index 884536792..3c3d76f94 100644 --- a/breathecode/authenticate/management/commands/sync_github_organization.py +++ b/breathecode/authenticate/management/commands/sync_github_organization.py @@ -4,14 +4,14 @@ class Command(BaseCommand): - help = 'Delete expired temporal and login tokens' + help = "Delete expired temporal and login tokens" def handle(self, *args, **options): aca_settings = AcademyAuthSettings.objects.filter(github_is_sync=True) for settings in aca_settings: - print(f'Synching academy {settings.academy.name} organization users') + print(f"Synching academy {settings.academy.name} organization users") try: sync_organization_members(settings.academy.id) except Exception as e: - print(f'Error synching members for academy {settings.academy.id}: ' + str(e)) + print(f"Error synching members for academy {settings.academy.id}: " + str(e)) diff --git a/breathecode/authenticate/management/commands/sync_names.py b/breathecode/authenticate/management/commands/sync_names.py index 6fe95d680..ed32bca29 100644 --- a/breathecode/authenticate/management/commands/sync_names.py +++ b/breathecode/authenticate/management/commands/sync_names.py @@ -4,46 +4,48 @@ class Command(BaseCommand): - help = 'Delete expired temporal and login tokens' + help = "Delete expired temporal and login tokens" def handle(self, *args, **options): empty_profiles = ProfileAcademy.objects.filter( - Q(first_name__isnull=True) | Q(first_name=''), Q(user__first_name__isnull=False)) - print(f'Found {str(empty_profiles.count())} profiles out of sync') + Q(first_name__isnull=True) | Q(first_name=""), Q(user__first_name__isnull=False) + ) + print(f"Found {str(empty_profiles.count())} profiles out of sync") save = False for profile in empty_profiles: if profile.user is None: continue - if profile.first_name is None or profile.first_name == '': - if profile.user.first_name is not None and profile.user.first_name != '': + if profile.first_name is None or profile.first_name == "": + if profile.user.first_name is not None and profile.user.first_name != "": save = True profile.first_name = profile.user.first_name - if profile.last_name is None or profile.last_name == '': - if profile.user.last_name is not None and profile.user.last_name != '': + if profile.last_name is None or profile.last_name == "": + if profile.user.last_name is not None and profile.user.last_name != "": save = True profile.last_name = profile.user.last_name if save: profile.save() - profiles = ProfileAcademy.objects.filter(Q(first_name__isnull=False), - Q(user__first_name__isnull=True) | Q(user__first_name='')) - print(f'Found {str(profiles.count())} users out of sync') + profiles = ProfileAcademy.objects.filter( + Q(first_name__isnull=False), Q(user__first_name__isnull=True) | Q(user__first_name="") + ) + print(f"Found {str(profiles.count())} users out of sync") for p in profiles: if p.user is None: continue - if p.user.first_name is None or p.user.first_name == '': - if p.first_name is not None and p.first_name != '': + if p.user.first_name is None or p.user.first_name == "": + if p.first_name is not None and p.first_name != "": save = True p.user.first_name = p.first_name - if p.user.last_name is None or p.user.last_name == '': - if p.last_name is not None and p.last_name != '': + if p.user.last_name is None or p.user.last_name == "": + if p.last_name is not None and p.last_name != "": save = True p.user.last_name = p.last_name diff --git a/breathecode/authenticate/migrations/0001_initial.py b/breathecode/authenticate/migrations/0001_initial.py index 5d6316815..26c19aba0 100644 --- a/breathecode/authenticate/migrations/0001_initial.py +++ b/breathecode/authenticate/migrations/0001_initial.py @@ -15,23 +15,25 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='CredentialsGithub', + name="CredentialsGithub", fields=[ - ('github_id', models.IntegerField(primary_key=True, serialize=False)), - ('token', models.CharField(max_length=255)), - ('email', models.CharField(max_length=150, unique=True)), - ('avatar_url', models.CharField(max_length=255)), - ('name', models.CharField(max_length=150)), - ('blog', models.CharField(max_length=150)), - ('bio', models.CharField(max_length=255)), - ('company', models.CharField(max_length=150)), - ('twitter_username', models.CharField(blank=True, max_length=50, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', - models.OneToOneField(blank=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("github_id", models.IntegerField(primary_key=True, serialize=False)), + ("token", models.CharField(max_length=255)), + ("email", models.CharField(max_length=150, unique=True)), + ("avatar_url", models.CharField(max_length=255)), + ("name", models.CharField(max_length=150)), + ("blog", models.CharField(max_length=150)), + ("bio", models.CharField(max_length=255)), + ("company", models.CharField(max_length=150)), + ("twitter_username", models.CharField(blank=True, max_length=50, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "user", + models.OneToOneField( + blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0002_credentialsquickbooks.py b/breathecode/authenticate/migrations/0002_credentialsquickbooks.py index 6ed4a7675..4d395aeac 100644 --- a/breathecode/authenticate/migrations/0002_credentialsquickbooks.py +++ b/breathecode/authenticate/migrations/0002_credentialsquickbooks.py @@ -9,21 +9,23 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0001_initial'), + ("authenticate", "0001_initial"), ] operations = [ migrations.CreateModel( - name='CredentialsQuickBooks', + name="CredentialsQuickBooks", fields=[ - ('quibooks_code', models.CharField(max_length=255, primary_key=True, serialize=False)), - ('quibooks_realmid', models.CharField(max_length=255)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', - models.OneToOneField(blank=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("quibooks_code", models.CharField(max_length=255, primary_key=True, serialize=False)), + ("quibooks_realmid", models.CharField(max_length=255)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "user", + models.OneToOneField( + blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0003_auto_20200728_2129.py b/breathecode/authenticate/migrations/0003_auto_20200728_2129.py index db27a732d..f94522ab8 100644 --- a/breathecode/authenticate/migrations/0003_auto_20200728_2129.py +++ b/breathecode/authenticate/migrations/0003_auto_20200728_2129.py @@ -6,33 +6,33 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0002_credentialsquickbooks'), + ("authenticate", "0002_credentialsquickbooks"), ] operations = [ migrations.AlterField( - model_name='credentialsgithub', - name='avatar_url', + model_name="credentialsgithub", + name="avatar_url", field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AlterField( - model_name='credentialsgithub', - name='bio', + model_name="credentialsgithub", + name="bio", field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AlterField( - model_name='credentialsgithub', - name='blog', + model_name="credentialsgithub", + name="blog", field=models.CharField(blank=True, max_length=150, null=True), ), migrations.AlterField( - model_name='credentialsgithub', - name='company', + model_name="credentialsgithub", + name="company", field=models.CharField(blank=True, max_length=150, null=True), ), migrations.AlterField( - model_name='credentialsgithub', - name='name', + model_name="credentialsgithub", + name="name", field=models.CharField(blank=True, max_length=150, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0004_token.py b/breathecode/authenticate/migrations/0004_token.py index 8d1d5f33f..e25f88fc7 100644 --- a/breathecode/authenticate/migrations/0004_token.py +++ b/breathecode/authenticate/migrations/0004_token.py @@ -9,26 +9,30 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0003_auto_20200728_2129'), + ("authenticate", "0003_auto_20200728_2129"), ] operations = [ migrations.CreateModel( - name='Token', + name="Token", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')), - ('key', models.CharField(db_index=True, max_length=40, unique=True)), - ('token_type', models.CharField(default='temporal', max_length=64)), - ('expires_at', models.CharField(max_length=64)), - ('user', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='auth_token', - to=settings.AUTH_USER_MODEL, - verbose_name='User')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("created", models.DateTimeField(auto_now_add=True, verbose_name="Created")), + ("key", models.CharField(db_index=True, max_length=40, unique=True)), + ("token_type", models.CharField(default="temporal", max_length=64)), + ("expires_at", models.CharField(max_length=64)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="auth_token", + to=settings.AUTH_USER_MODEL, + verbose_name="User", + ), + ), ], options={ - 'unique_together': {('user', 'token_type')}, + "unique_together": {("user", "token_type")}, }, ), ] diff --git a/breathecode/authenticate/migrations/0005_auto_20200814_0428.py b/breathecode/authenticate/migrations/0005_auto_20200814_0428.py index c8b55f204..7b99aa560 100644 --- a/breathecode/authenticate/migrations/0005_auto_20200814_0428.py +++ b/breathecode/authenticate/migrations/0005_auto_20200814_0428.py @@ -7,13 +7,13 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0004_token'), + ("authenticate", "0004_token"), ] operations = [ migrations.AlterField( - model_name='token', - name='expires_at', + model_name="token", + name="expires_at", field=models.DateTimeField(), ), ] diff --git a/breathecode/authenticate/migrations/0006_auto_20200907_2320.py b/breathecode/authenticate/migrations/0006_auto_20200907_2320.py index ffdaec540..0221ed2f9 100644 --- a/breathecode/authenticate/migrations/0006_auto_20200907_2320.py +++ b/breathecode/authenticate/migrations/0006_auto_20200907_2320.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0005_auto_20200814_0428'), + ("authenticate", "0005_auto_20200814_0428"), ] operations = [ migrations.AlterField( - model_name='token', - name='expires_at', + model_name="token", + name="expires_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0007_profile_userautentication.py b/breathecode/authenticate/migrations/0007_profile_userautentication.py index 27835a4fa..0ef43b6aa 100644 --- a/breathecode/authenticate/migrations/0007_profile_userautentication.py +++ b/breathecode/authenticate/migrations/0007_profile_userautentication.py @@ -10,31 +10,33 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('auth', '0012_alter_user_first_name_max_length'), - ('authenticate', '0006_auto_20200907_2320'), + ("auth", "0012_alter_user_first_name_max_length"), + ("authenticate", "0006_auto_20200907_2320"), ] operations = [ migrations.CreateModel( - name='UserAutentication', + name="UserAutentication", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), migrations.CreateModel( - name='Profile', + name="Profile", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('avatar_url', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("avatar_url", models.CharField(blank=True, default=None, max_length=255, null=True)), + ( + "user", + models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0008_auto_20201005_2253.py b/breathecode/authenticate/migrations/0008_auto_20201005_2253.py index 90aea9bc7..5d48343d1 100644 --- a/breathecode/authenticate/migrations/0008_auto_20201005_2253.py +++ b/breathecode/authenticate/migrations/0008_auto_20201005_2253.py @@ -6,23 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0007_profile_userautentication'), + ("authenticate", "0007_profile_userautentication"), ] operations = [ migrations.AddField( - model_name='profile', - name='bio', + model_name="profile", + name="bio", field=models.CharField(blank=True, max_length=255, null=True), ), migrations.AddField( - model_name='profile', - name='blog', + model_name="profile", + name="blog", field=models.CharField(blank=True, max_length=150, null=True), ), migrations.AddField( - model_name='profile', - name='twitter_username', + model_name="profile", + name="twitter_username", field=models.CharField(blank=True, max_length=50, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0009_auto_20201006_0022.py b/breathecode/authenticate/migrations/0009_auto_20201006_0022.py index 0107fa488..74b68b17f 100644 --- a/breathecode/authenticate/migrations/0009_auto_20201006_0022.py +++ b/breathecode/authenticate/migrations/0009_auto_20201006_0022.py @@ -8,12 +8,12 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0008_auto_20201005_2253'), + ("authenticate", "0008_auto_20201005_2253"), ] operations = [ migrations.AlterUniqueTogether( - name='token', - unique_together={('user', 'key')}, + name="token", + unique_together={("user", "key")}, ), ] diff --git a/breathecode/authenticate/migrations/0010_auto_20201105_0531.py b/breathecode/authenticate/migrations/0010_auto_20201105_0531.py index e41218fb4..f7f4ee43f 100644 --- a/breathecode/authenticate/migrations/0010_auto_20201105_0531.py +++ b/breathecode/authenticate/migrations/0010_auto_20201105_0531.py @@ -9,71 +9,75 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('auth', '0012_alter_user_first_name_max_length'), + ("admissions", "0011_auto_20201006_0058"), + ("auth", "0012_alter_user_first_name_max_length"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0009_auto_20201006_0022'), + ("authenticate", "0009_auto_20201006_0022"), ] operations = [ migrations.CreateModel( - name='CredentialsSlack', + name="CredentialsSlack", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('token', models.CharField(max_length=255)), - ('bot_user_id', models.CharField(max_length=50)), - ('app_id', models.CharField(max_length=50)), - ('authed_user', models.CharField(max_length=50)), - ('team_id', models.CharField(max_length=50)), - ('team_name', models.CharField(max_length=100)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', - models.OneToOneField(blank=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("token", models.CharField(max_length=255)), + ("bot_user_id", models.CharField(max_length=50)), + ("app_id", models.CharField(max_length=50)), + ("authed_user", models.CharField(max_length=50)), + ("team_id", models.CharField(max_length=50)), + ("team_name", models.CharField(max_length=100)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "user", + models.OneToOneField( + blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), migrations.CreateModel( - name='ProfileAcademy', + name="ProfileAcademy", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), migrations.CreateModel( - name='Role', + name="Role", fields=[ - ('slug', models.SlugField(max_length=25, primary_key=True, serialize=False)), - ('name', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("slug", models.SlugField(max_length=25, primary_key=True, serialize=False)), + ("name", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), - migrations.DeleteModel(name='UserAutentication', ), + migrations.DeleteModel( + name="UserAutentication", + ), migrations.CreateModel( - name='UserProxy', + name="UserProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), migrations.AddField( - model_name='profileacademy', - name='role', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='authenticate.role'), + model_name="profileacademy", + name="role", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="authenticate.role"), ), migrations.AddField( - model_name='profileacademy', - name='user', + model_name="profileacademy", + name="user", field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ] diff --git a/breathecode/authenticate/migrations/0011_credentialsfacebook.py b/breathecode/authenticate/migrations/0011_credentialsfacebook.py index 06ba1b2bd..632c41907 100644 --- a/breathecode/authenticate/migrations/0011_credentialsfacebook.py +++ b/breathecode/authenticate/migrations/0011_credentialsfacebook.py @@ -9,26 +9,31 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('admissions', '0011_auto_20201006_0058'), - ('authenticate', '0010_auto_20201105_0531'), + ("admissions", "0011_auto_20201006_0058"), + ("authenticate", "0010_auto_20201105_0531"), ] operations = [ migrations.CreateModel( - name='CredentialsFacebook', + name="CredentialsFacebook", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('token', models.CharField(max_length=255)), - ('expires_at', models.DateTimeField()), - ('facebook_id', models.BigIntegerField(default=None, null=True)), - ('email', models.CharField(default=None, max_length=150, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.OneToOneField(blank=True, on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("token", models.CharField(max_length=255)), + ("expires_at", models.DateTimeField()), + ("facebook_id", models.BigIntegerField(default=None, null=True)), + ("email", models.CharField(default=None, max_length=150, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.OneToOneField( + blank=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + ( + "user", + models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0012_auto_20201110_0727.py b/breathecode/authenticate/migrations/0012_auto_20201110_0727.py index 91d922372..c716fb4a9 100644 --- a/breathecode/authenticate/migrations/0012_auto_20201110_0727.py +++ b/breathecode/authenticate/migrations/0012_auto_20201110_0727.py @@ -7,36 +7,38 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0011_credentialsfacebook'), + ("authenticate", "0011_credentialsfacebook"), ] operations = [ migrations.AddField( - model_name='profile', - name='github_username', + model_name="profile", + name="github_username", field=models.CharField(blank=True, max_length=50, null=True), ), migrations.AddField( - model_name='profile', - name='linkedin_url', + model_name="profile", + name="linkedin_url", field=models.CharField(blank=True, max_length=50, null=True), ), migrations.AddField( - model_name='profile', - name='phone', + model_name="profile", + name="phone", field=models.CharField( blank=True, - default='', + default="", max_length=17, validators=[ django.core.validators.RegexValidator( message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", - regex='^\\+?1?\\d{9,15}$') - ]), + regex="^\\+?1?\\d{9,15}$", + ) + ], + ), ), migrations.AddField( - model_name='profile', - name='portfolio_url', + model_name="profile", + name="portfolio_url", field=models.CharField(blank=True, max_length=50, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0013_auto_20201216_0512.py b/breathecode/authenticate/migrations/0013_auto_20201216_0512.py index 321075ec6..e29533c13 100644 --- a/breathecode/authenticate/migrations/0013_auto_20201216_0512.py +++ b/breathecode/authenticate/migrations/0013_auto_20201216_0512.py @@ -7,48 +7,50 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0012_auto_20201110_0727'), + ("authenticate", "0012_auto_20201110_0727"), ] operations = [ migrations.CreateModel( - name='Capability', + name="Capability", fields=[ - ('slug', models.SlugField(max_length=25, primary_key=True, serialize=False)), - ('description', models.CharField(blank=True, default=None, max_length=255, null=True)), + ("slug", models.SlugField(max_length=25, primary_key=True, serialize=False)), + ("description", models.CharField(blank=True, default=None, max_length=255, null=True)), ], ), migrations.AddField( - model_name='profileacademy', - name='address', + model_name="profileacademy", + name="address", field=models.CharField(default=None, max_length=255, null=True), ), migrations.AddField( - model_name='profileacademy', - name='first_name', + model_name="profileacademy", + name="first_name", field=models.CharField(default=None, max_length=100, null=True), ), migrations.AddField( - model_name='profileacademy', - name='last_name', + model_name="profileacademy", + name="last_name", field=models.CharField(default=None, max_length=100, null=True), ), migrations.AddField( - model_name='profileacademy', - name='phone', + model_name="profileacademy", + name="phone", field=models.CharField( blank=True, - default='', + default="", max_length=17, validators=[ django.core.validators.RegexValidator( message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", - regex='^\\+?1?\\d{9,15}$') - ]), + regex="^\\+?1?\\d{9,15}$", + ) + ], + ), ), migrations.AddField( - model_name='role', - name='capabilities', - field=models.ManyToManyField(to='authenticate.Capability'), + model_name="role", + name="capabilities", + field=models.ManyToManyField(to="authenticate.Capability"), ), ] diff --git a/breathecode/authenticate/migrations/0014_auto_20201218_0534.py b/breathecode/authenticate/migrations/0014_auto_20201218_0534.py index 88b39142a..c95ac5f88 100644 --- a/breathecode/authenticate/migrations/0014_auto_20201218_0534.py +++ b/breathecode/authenticate/migrations/0014_auto_20201218_0534.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0013_auto_20201216_0512'), + ("authenticate", "0013_auto_20201216_0512"), ] operations = [ migrations.AlterField( - model_name='profileacademy', - name='address', + model_name="profileacademy", + name="address", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0015_profile_show_tutorial.py b/breathecode/authenticate/migrations/0015_profile_show_tutorial.py index 2a5b60f24..fe81f9cb2 100644 --- a/breathecode/authenticate/migrations/0015_profile_show_tutorial.py +++ b/breathecode/authenticate/migrations/0015_profile_show_tutorial.py @@ -6,14 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0014_auto_20201218_0534'), + ("authenticate", "0014_auto_20201218_0534"), ] operations = [ migrations.AddField( - model_name='profile', - name='show_tutorial', - field=models.BooleanField(default=True, - help_text='Set true if you want to show the tutorial on the user UI/UX'), + model_name="profile", + name="show_tutorial", + field=models.BooleanField( + default=True, help_text="Set true if you want to show the tutorial on the user UI/UX" + ), ), ] diff --git a/breathecode/authenticate/migrations/0016_auto_20210113_0359.py b/breathecode/authenticate/migrations/0016_auto_20210113_0359.py index 3db0d4532..cc103052d 100644 --- a/breathecode/authenticate/migrations/0016_auto_20210113_0359.py +++ b/breathecode/authenticate/migrations/0016_auto_20210113_0359.py @@ -10,72 +10,79 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('admissions', '0014_auto_20201218_0534'), - ('authenticate', '0015_profile_show_tutorial'), + ("admissions", "0014_auto_20201218_0534"), + ("authenticate", "0015_profile_show_tutorial"), ] operations = [ migrations.AddField( - model_name='profileacademy', - name='email', + model_name="profileacademy", + name="email", field=models.CharField(default=None, max_length=150, null=True), ), migrations.AddField( - model_name='profileacademy', - name='status', - field=models.CharField(choices=[('INVITED', 'Invited'), ('ACTIVE', 'Active')], - default='INVITED', - max_length=15), + model_name="profileacademy", + name="status", + field=models.CharField( + choices=[("INVITED", "Invited"), ("ACTIVE", "Active")], default="INVITED", max_length=15 + ), ), migrations.AlterField( - model_name='profileacademy', - name='user', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL), + model_name="profileacademy", + name="user", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + ), ), migrations.CreateModel( - name='UserInvite', + name="UserInvite", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('email', models.CharField(default=None, max_length=150, null=True)), - ('first_name', models.CharField(default=None, max_length=100, null=True)), - ('last_name', models.CharField(default=None, max_length=100, null=True)), - ('token', models.CharField(max_length=255)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('ACCEPTED', 'Accepted')], - default='PENDING', - max_length=15)), - ('phone', - models.CharField( - blank=True, - default='', - max_length=17, - validators=[ - django.core.validators.RegexValidator( - message= - "Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", - regex='^\\+?1?\\d{9,15}$') - ])), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), - ('cohort', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort')), - ('role', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='authenticate.role')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("email", models.CharField(default=None, max_length=150, null=True)), + ("first_name", models.CharField(default=None, max_length=100, null=True)), + ("last_name", models.CharField(default=None, max_length=100, null=True)), + ("token", models.CharField(max_length=255)), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("ACCEPTED", "Accepted")], default="PENDING", max_length=15 + ), + ), + ( + "phone", + models.CharField( + blank=True, + default="", + max_length=17, + validators=[ + django.core.validators.RegexValidator( + message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", + regex="^\\+?1?\\d{9,15}$", + ) + ], + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + ("author", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "cohort", + models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), + ), + ( + "role", + models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="authenticate.role" + ), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0017_auto_20210113_0644.py b/breathecode/authenticate/migrations/0017_auto_20210113_0644.py index ec2c30a0e..af375bc45 100644 --- a/breathecode/authenticate/migrations/0017_auto_20210113_0644.py +++ b/breathecode/authenticate/migrations/0017_auto_20210113_0644.py @@ -7,41 +7,39 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0014_auto_20201218_0534'), - ('authenticate', '0016_auto_20210113_0359'), + ("admissions", "0014_auto_20201218_0534"), + ("authenticate", "0016_auto_20210113_0359"), ] operations = [ migrations.AlterField( - model_name='userinvite', - name='academy', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="userinvite", + name="academy", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), ), migrations.AlterField( - model_name='userinvite', - name='cohort', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="userinvite", + name="cohort", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), ), migrations.AlterField( - model_name='userinvite', - name='role', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='authenticate.role'), + model_name="userinvite", + name="role", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="authenticate.role" + ), ), migrations.AlterField( - model_name='userinvite', - name='token', + model_name="userinvite", + name="token", field=models.CharField(max_length=255, unique=True), ), ] diff --git a/breathecode/authenticate/migrations/0018_credentialsgithub_username.py b/breathecode/authenticate/migrations/0018_credentialsgithub_username.py index 8c5bd1672..8cca44df9 100644 --- a/breathecode/authenticate/migrations/0018_credentialsgithub_username.py +++ b/breathecode/authenticate/migrations/0018_credentialsgithub_username.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0017_auto_20210113_0644'), + ("authenticate", "0017_auto_20210113_0644"), ] operations = [ migrations.AddField( - model_name='credentialsgithub', - name='username', + model_name="credentialsgithub", + name="username", field=models.CharField(blank=True, max_length=35, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0019_auto_20210121_2142.py b/breathecode/authenticate/migrations/0019_auto_20210121_2142.py index 5ba1b8dd0..f3642c490 100644 --- a/breathecode/authenticate/migrations/0019_auto_20210121_2142.py +++ b/breathecode/authenticate/migrations/0019_auto_20210121_2142.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0018_credentialsgithub_username'), + ("authenticate", "0018_credentialsgithub_username"), ] operations = [ migrations.AlterField( - model_name='capability', - name='slug', + model_name="capability", + name="slug", field=models.SlugField(max_length=40, primary_key=True, serialize=False), ), ] diff --git a/breathecode/authenticate/migrations/0020_userinvite_sent_at.py b/breathecode/authenticate/migrations/0020_userinvite_sent_at.py index 47b103c18..be9cb100f 100644 --- a/breathecode/authenticate/migrations/0020_userinvite_sent_at.py +++ b/breathecode/authenticate/migrations/0020_userinvite_sent_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0019_auto_20210121_2142'), + ("authenticate", "0019_auto_20210121_2142"), ] operations = [ migrations.AddField( - model_name='userinvite', - name='sent_at', + model_name="userinvite", + name="sent_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0021_deviceid.py b/breathecode/authenticate/migrations/0021_deviceid.py index 92c45bcf8..7a18ab660 100644 --- a/breathecode/authenticate/migrations/0021_deviceid.py +++ b/breathecode/authenticate/migrations/0021_deviceid.py @@ -6,16 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0020_userinvite_sent_at'), + ("authenticate", "0020_userinvite_sent_at"), ] operations = [ migrations.CreateModel( - name='DeviceId', + name="DeviceId", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=40)), - ('key', models.CharField(max_length=64)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=40)), + ("key", models.CharField(max_length=64)), ], ), ] diff --git a/breathecode/authenticate/migrations/0022_credentialsgoogle.py b/breathecode/authenticate/migrations/0022_credentialsgoogle.py index 343af840e..f08ee51e0 100644 --- a/breathecode/authenticate/migrations/0022_credentialsgoogle.py +++ b/breathecode/authenticate/migrations/0022_credentialsgoogle.py @@ -9,21 +9,23 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0021_deviceid'), + ("authenticate", "0021_deviceid"), ] operations = [ migrations.CreateModel( - name='CredentialsGoogle', + name="CredentialsGoogle", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('token', models.CharField(max_length=255)), - ('refresh_token', models.CharField(max_length=255)), - ('expires_at', models.DateTimeField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("token", models.CharField(max_length=255)), + ("refresh_token", models.CharField(max_length=255)), + ("expires_at", models.DateTimeField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "user", + models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0023_academyproxy.py b/breathecode/authenticate/migrations/0023_academyproxy.py index 7a0dabc34..08e32ff74 100644 --- a/breathecode/authenticate/migrations/0023_academyproxy.py +++ b/breathecode/authenticate/migrations/0023_academyproxy.py @@ -5,19 +5,19 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0023_auto_20210812_2153'), - ('authenticate', '0022_credentialsgoogle'), + ("admissions", "0023_auto_20210812_2153"), + ("authenticate", "0022_credentialsgoogle"), ] operations = [ migrations.CreateModel( - name='AcademyProxy', + name="AcademyProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('admissions.academy', ), + bases=("admissions.academy",), ), ] diff --git a/breathecode/authenticate/migrations/0024_auto_20220124_2225.py b/breathecode/authenticate/migrations/0024_auto_20220124_2225.py index 885fb01be..fbecfb78f 100644 --- a/breathecode/authenticate/migrations/0024_auto_20220124_2225.py +++ b/breathecode/authenticate/migrations/0024_auto_20220124_2225.py @@ -9,24 +9,24 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0023_academyproxy'), + ("authenticate", "0023_academyproxy"), ] operations = [ migrations.AlterField( - model_name='userinvite', - name='author', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="userinvite", + name="author", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), migrations.AlterField( - model_name='userinvite', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('ACCEPTED', 'Accepted'), - ('WAITING_LIST', 'Waiting list')], - default='PENDING', - max_length=15), + model_name="userinvite", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("ACCEPTED", "Accepted"), ("WAITING_LIST", "Waiting list")], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/authenticate/migrations/0025_alter_userinvite_status.py b/breathecode/authenticate/migrations/0025_alter_userinvite_status.py index 2fe81bdc5..ee2e2ba42 100644 --- a/breathecode/authenticate/migrations/0025_alter_userinvite_status.py +++ b/breathecode/authenticate/migrations/0025_alter_userinvite_status.py @@ -6,16 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0024_auto_20220124_2225'), + ("authenticate", "0024_auto_20220124_2225"), ] operations = [ migrations.AlterField( - model_name='userinvite', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('REJECTED', 'Rejected'), ('ACCEPTED', 'Accepted'), - ('WAITING_LIST', 'Waiting list')], - default='PENDING', - max_length=15), + model_name="userinvite", + name="status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("REJECTED", "Rejected"), + ("ACCEPTED", "Accepted"), + ("WAITING_LIST", "Waiting list"), + ], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/authenticate/migrations/0026_gitpoduser.py b/breathecode/authenticate/migrations/0026_gitpoduser.py index c5693fac6..a92bb1548 100644 --- a/breathecode/authenticate/migrations/0026_gitpoduser.py +++ b/breathecode/authenticate/migrations/0026_gitpoduser.py @@ -8,41 +8,51 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0040_auto_20220510_2208'), + ("admissions", "0040_auto_20220510_2208"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0025_alter_userinvite_status'), + ("authenticate", "0025_alter_userinvite_status"), ] operations = [ migrations.CreateModel( - name='GitpodUser', + name="GitpodUser", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('github_username', models.CharField(max_length=40)), - ('assignee_id', models.CharField(max_length=64)), - ('position_in_gitpod_team', models.PositiveSmallIntegerField()), - ('delete_status', models.TextField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('expires_at', - models.DateTimeField( - blank=True, - default=None, - help_text= - 'If a gitpod user is not connected to a real user and academy in the database, it will be deleted ASAP', - null=True)), - ('academy', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.academy')), - ('user', - models.OneToOneField(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("github_username", models.CharField(max_length=40)), + ("assignee_id", models.CharField(max_length=64)), + ("position_in_gitpod_team", models.PositiveSmallIntegerField()), + ("delete_status", models.TextField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "expires_at", + models.DateTimeField( + blank=True, + default=None, + help_text="If a gitpod user is not connected to a real user and academy in the database, it will be deleted ASAP", + null=True, + ), + ), + ( + "academy", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.academy", + ), + ), + ( + "user", + models.OneToOneField( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0027_gitpoduser_target_cohort.py b/breathecode/authenticate/migrations/0027_gitpoduser_target_cohort.py index 40303db65..215506e63 100644 --- a/breathecode/authenticate/migrations/0027_gitpoduser_target_cohort.py +++ b/breathecode/authenticate/migrations/0027_gitpoduser_target_cohort.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0040_auto_20220510_2208'), - ('authenticate', '0026_gitpoduser'), + ("admissions", "0040_auto_20220510_2208"), + ("authenticate", "0026_gitpoduser"), ] operations = [ migrations.AddField( - model_name='gitpoduser', - name='target_cohort', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.cohort'), + model_name="gitpoduser", + name="target_cohort", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.cohort", + ), ), ] diff --git a/breathecode/authenticate/migrations/0028_auto_20220704_0322.py b/breathecode/authenticate/migrations/0028_auto_20220704_0322.py index 47794e9d9..d0b267527 100644 --- a/breathecode/authenticate/migrations/0028_auto_20220704_0322.py +++ b/breathecode/authenticate/migrations/0028_auto_20220704_0322.py @@ -6,20 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0027_gitpoduser_target_cohort'), + ("authenticate", "0027_gitpoduser_target_cohort"), ] operations = [ migrations.AddField( - model_name='userinvite', - name='process_message', - field=models.CharField(default='', max_length=150), + model_name="userinvite", + name="process_message", + field=models.CharField(default="", max_length=150), ), migrations.AddField( - model_name='userinvite', - name='process_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('ERROR', 'Error')], - default='PENDING', - max_length=7), + model_name="userinvite", + name="process_status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("ERROR", "Error")], default="PENDING", max_length=7 + ), ), ] diff --git a/breathecode/authenticate/migrations/0029_usersetting.py b/breathecode/authenticate/migrations/0029_usersetting.py index 87ec70915..a815bba80 100644 --- a/breathecode/authenticate/migrations/0029_usersetting.py +++ b/breathecode/authenticate/migrations/0029_usersetting.py @@ -7,18 +7,22 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0028_auto_20220704_0322'), + ("authenticate", "0028_auto_20220704_0322"), ] operations = [ migrations.CreateModel( - name='UserSetting', + name="UserSetting", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(default='en', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + default="en", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0030_auto_20221129_2354.py b/breathecode/authenticate/migrations/0030_auto_20221129_2354.py index db752c2af..12b326ae5 100644 --- a/breathecode/authenticate/migrations/0030_auto_20221129_2354.py +++ b/breathecode/authenticate/migrations/0030_auto_20221129_2354.py @@ -8,25 +8,24 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0001_initial'), + ("payments", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0029_usersetting'), + ("authenticate", "0029_usersetting"), ] operations = [ migrations.AddField( - model_name='usersetting', - name='main_currency', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='payments.currency'), + model_name="usersetting", + name="main_currency", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="payments.currency" + ), ), migrations.AddField( - model_name='usersetting', - name='user', - field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, - related_name='settings', - to=settings.AUTH_USER_MODEL), + model_name="usersetting", + name="user", + field=models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, related_name="settings", to=settings.AUTH_USER_MODEL + ), ), ] diff --git a/breathecode/authenticate/migrations/0031_userinvite_syllabus.py b/breathecode/authenticate/migrations/0031_userinvite_syllabus.py index 234ed7507..3485b09a3 100644 --- a/breathecode/authenticate/migrations/0031_userinvite_syllabus.py +++ b/breathecode/authenticate/migrations/0031_userinvite_syllabus.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), - ('authenticate', '0030_auto_20221129_2354'), + ("admissions", "0048_academy_main_currency"), + ("authenticate", "0030_auto_20221129_2354"), ] operations = [ migrations.AddField( - model_name='userinvite', - name='syllabus', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.syllabus'), + model_name="userinvite", + name="syllabus", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.syllabus", + ), ), ] diff --git a/breathecode/authenticate/migrations/0032_academyauthsettings_githubacademyuser.py b/breathecode/authenticate/migrations/0032_academyauthsettings_githubacademyuser.py index 6271b1418..be833e115 100644 --- a/breathecode/authenticate/migrations/0032_academyauthsettings_githubacademyuser.py +++ b/breathecode/authenticate/migrations/0032_academyauthsettings_githubacademyuser.py @@ -8,61 +8,89 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0054_cohortuser_history_log'), + ("admissions", "0054_cohortuser_history_log"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0031_userinvite_syllabus'), + ("authenticate", "0031_userinvite_syllabus"), ] operations = [ migrations.CreateModel( - name='GithubAcademyUser', + name="GithubAcademyUser", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('username', - models.SlugField(default=None, - help_text='Only used when the username has not been found on 4Geeks', - max_length=40, - null=True)), - ('storage_status', - models.CharField(choices=[('PENDING', 'Pending'), ('SYNCHED', 'Synched'), ('ERROR', 'Error'), - ('UNKNOWN', 'Unknown')], - default='PENDING', - max_length=20)), - ('storage_action', - models.CharField(choices=[('ADD', 'Add'), ('DELETE', 'Delete'), ('INVITE', 'Invite'), - ('IGNORE', 'Ignore')], - default='ADD', - max_length=20)), - ('storage_log', models.JSONField(blank=True, default=None, null=True)), - ('storage_synch_at', models.DateTimeField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('user', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "username", + models.SlugField( + default=None, + help_text="Only used when the username has not been found on 4Geeks", + max_length=40, + null=True, + ), + ), + ( + "storage_status", + models.CharField( + choices=[ + ("PENDING", "Pending"), + ("SYNCHED", "Synched"), + ("ERROR", "Error"), + ("UNKNOWN", "Unknown"), + ], + default="PENDING", + max_length=20, + ), + ), + ( + "storage_action", + models.CharField( + choices=[("ADD", "Add"), ("DELETE", "Delete"), ("INVITE", "Invite"), ("IGNORE", "Ignore")], + default="ADD", + max_length=20, + ), + ), + ("storage_log", models.JSONField(blank=True, default=None, null=True)), + ("storage_synch_at", models.DateTimeField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "user", + models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='AcademyAuthSettings', + name="AcademyAuthSettings", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('github_username', models.SlugField(max_length=40, unique=True)), - ('github_default_team_ids', - models.SlugField( - default='', - help_text='User will be invited to this github team ID when joining the github organization', - max_length=40)), - ('github_is_sync', - models.BooleanField(default=False, help_text='If true, will try synching every few hours')), - ('academy', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('github_owner', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("github_username", models.SlugField(max_length=40, unique=True)), + ( + "github_default_team_ids", + models.SlugField( + default="", + help_text="User will be invited to this github team ID when joining the github organization", + max_length=40, + ), + ), + ( + "github_is_sync", + models.BooleanField(default=False, help_text="If true, will try synching every few hours"), + ), + ("academy", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "github_owner", + models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0033_auto_20230303_0306.py b/breathecode/authenticate/migrations/0033_auto_20230303_0306.py index 7d92d4b0c..3b6102e3f 100644 --- a/breathecode/authenticate/migrations/0033_auto_20230303_0306.py +++ b/breathecode/authenticate/migrations/0033_auto_20230303_0306.py @@ -9,45 +9,50 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0032_academyauthsettings_githubacademyuser'), + ("authenticate", "0032_academyauthsettings_githubacademyuser"), ] operations = [ migrations.AlterField( - model_name='academyauthsettings', - name='github_default_team_ids', + model_name="academyauthsettings", + name="github_default_team_ids", field=models.SlugField( blank=True, - default='', - help_text='User will be invited to this github team ID when joining the github organization', - max_length=40), + default="", + help_text="User will be invited to this github team ID when joining the github organization", + max_length=40, + ), ), migrations.AlterField( - model_name='academyauthsettings', - name='github_owner', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL), + model_name="academyauthsettings", + name="github_owner", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), ), migrations.AlterField( - model_name='academyauthsettings', - name='github_username', + model_name="academyauthsettings", + name="github_username", field=models.SlugField(blank=True, max_length=40), ), migrations.AlterField( - model_name='githubacademyuser', - name='storage_synch_at', + model_name="githubacademyuser", + name="storage_synch_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='githubacademyuser', - name='username', - field=models.SlugField(blank=True, - default=None, - help_text='Only used when the username has not been found on 4Geeks', - max_length=40, - null=True), + model_name="githubacademyuser", + name="username", + field=models.SlugField( + blank=True, + default=None, + help_text="Only used when the username has not been found on 4Geeks", + max_length=40, + null=True, + ), ), ] diff --git a/breathecode/authenticate/migrations/0034_academyauthsettings_github_error_log.py b/breathecode/authenticate/migrations/0034_academyauthsettings_github_error_log.py index 0edc747b6..e8ba0d7e0 100644 --- a/breathecode/authenticate/migrations/0034_academyauthsettings_github_error_log.py +++ b/breathecode/authenticate/migrations/0034_academyauthsettings_github_error_log.py @@ -6,16 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0033_auto_20230303_0306'), + ("authenticate", "0033_auto_20230303_0306"), ] operations = [ migrations.AddField( - model_name='academyauthsettings', - name='github_error_log', - field=models.JSONField(blank=True, - default=None, - help_text='Error trace log for github API communication', - null=True), + model_name="academyauthsettings", + name="github_error_log", + field=models.JSONField( + blank=True, default=None, help_text="Error trace log for github API communication", null=True + ), ), ] diff --git a/breathecode/authenticate/migrations/0035_alter_academyauthsettings_github_default_team_ids.py b/breathecode/authenticate/migrations/0035_alter_academyauthsettings_github_default_team_ids.py index 5b0d04d48..82df514d0 100644 --- a/breathecode/authenticate/migrations/0035_alter_academyauthsettings_github_default_team_ids.py +++ b/breathecode/authenticate/migrations/0035_alter_academyauthsettings_github_default_team_ids.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0034_academyauthsettings_github_error_log'), + ("authenticate", "0034_academyauthsettings_github_error_log"), ] operations = [ migrations.AlterField( - model_name='academyauthsettings', - name='github_default_team_ids', + model_name="academyauthsettings", + name="github_default_team_ids", field=models.CharField( blank=True, - default='', - help_text='User will be invited to this github team ID when joining the github organization', - max_length=40), + default="", + help_text="User will be invited to this github team ID when joining the github organization", + max_length=40, + ), ), ] diff --git a/breathecode/authenticate/migrations/0036_githubacademyuserlog.py b/breathecode/authenticate/migrations/0036_githubacademyuserlog.py index 1bd553777..044d55e5a 100644 --- a/breathecode/authenticate/migrations/0036_githubacademyuserlog.py +++ b/breathecode/authenticate/migrations/0036_githubacademyuserlog.py @@ -7,28 +7,41 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0035_alter_academyauthsettings_github_default_team_ids'), + ("authenticate", "0035_alter_academyauthsettings_github_default_team_ids"), ] operations = [ migrations.CreateModel( - name='GithubAcademyUserLog', + name="GithubAcademyUserLog", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('storage_status', - models.CharField(choices=[('PENDING', 'Pending'), ('SYNCHED', 'Synched'), ('ERROR', 'Error'), - ('UNKNOWN', 'Unknown')], - default='PENDING', - max_length=20)), - ('storage_action', - models.CharField(choices=[('ADD', 'Add'), ('DELETE', 'Delete'), ('INVITE', 'Invite'), - ('IGNORE', 'Ignore')], - default='ADD', - max_length=20)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy_user', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='authenticate.githubacademyuser')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "storage_status", + models.CharField( + choices=[ + ("PENDING", "Pending"), + ("SYNCHED", "Synched"), + ("ERROR", "Error"), + ("UNKNOWN", "Unknown"), + ], + default="PENDING", + max_length=20, + ), + ), + ( + "storage_action", + models.CharField( + choices=[("ADD", "Add"), ("DELETE", "Delete"), ("INVITE", "Invite"), ("IGNORE", "Ignore")], + default="ADD", + max_length=20, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy_user", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="authenticate.githubacademyuser"), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0037_auto_20230509_0801.py b/breathecode/authenticate/migrations/0037_auto_20230509_0801.py index 130dd64a8..5aba576c9 100644 --- a/breathecode/authenticate/migrations/0037_auto_20230509_0801.py +++ b/breathecode/authenticate/migrations/0037_auto_20230509_0801.py @@ -8,38 +8,46 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0036_githubacademyuserlog'), + ("authenticate", "0036_githubacademyuserlog"), ] operations = [ migrations.AlterField( - model_name='profile', - name='bio', + model_name="profile", + name="bio", field=models.CharField( blank=True, - help_text= - 'User biography, this will be used the bio in the lang of the user, otherwise frontend will usethe Profile translation', + help_text="User biography, this will be used the bio in the lang of the user, otherwise frontend will usethe Profile translation", max_length=255, - null=True), + null=True, + ), ), migrations.CreateModel( - name='ProfileTranslation', + name="ProfileTranslation", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - unique=True, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('bio', - models.CharField( - help_text= - 'User biography, this will be used the bio in the lang of the user, otherwise frontend will usethe Profile translation', - max_length=255)), - ('profile', - models.ForeignKey(help_text='Profile', - on_delete=django.db.models.deletion.CASCADE, - to='authenticate.profile')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + unique=True, + validators=[breathecode.utils.validators.language.validate_language_code], + ), + ), + ( + "bio", + models.CharField( + help_text="User biography, this will be used the bio in the lang of the user, otherwise frontend will usethe Profile translation", + max_length=255, + ), + ), + ( + "profile", + models.ForeignKey( + help_text="Profile", on_delete=django.db.models.deletion.CASCADE, to="authenticate.profile" + ), + ), ], ), ] diff --git a/breathecode/authenticate/migrations/0038_auto_20230513_0007.py b/breathecode/authenticate/migrations/0038_auto_20230513_0007.py index 4af6223db..dbd0b033a 100644 --- a/breathecode/authenticate/migrations/0038_auto_20230513_0007.py +++ b/breathecode/authenticate/migrations/0038_auto_20230513_0007.py @@ -6,22 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0037_auto_20230509_0801'), + ("authenticate", "0037_auto_20230509_0801"), ] operations = [ migrations.AlterField( - model_name='profile', - name='bio', + model_name="profile", + name="bio", field=models.CharField( blank=True, help_text="User biography in user's language. Will be used if there are no ProfileTranslations.", max_length=255, - null=True), + null=True, + ), ), migrations.AlterField( - model_name='profiletranslation', - name='bio', + model_name="profiletranslation", + name="bio", field=models.CharField(max_length=255), ), ] diff --git a/breathecode/authenticate/migrations/0039_auto_20230611_0534.py b/breathecode/authenticate/migrations/0039_auto_20230611_0534.py index e6d7987a3..c572aef98 100644 --- a/breathecode/authenticate/migrations/0039_auto_20230611_0534.py +++ b/breathecode/authenticate/migrations/0039_auto_20230611_0534.py @@ -6,29 +6,43 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0038_auto_20230513_0007'), + ("authenticate", "0038_auto_20230513_0007"), ] operations = [ migrations.AddField( - model_name='githubacademyuserlog', - name='valid_until', + model_name="githubacademyuserlog", + name="valid_until", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='githubacademyuser', - name='storage_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('SYNCHED', 'Synched'), ('ERROR', 'Error'), - ('UNKNOWN', 'Unknown'), ('PAYMENT_CONFLICT', 'Payment conflict')], - default='PENDING', - max_length=20), + model_name="githubacademyuser", + name="storage_status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("SYNCHED", "Synched"), + ("ERROR", "Error"), + ("UNKNOWN", "Unknown"), + ("PAYMENT_CONFLICT", "Payment conflict"), + ], + default="PENDING", + max_length=20, + ), ), migrations.AlterField( - model_name='githubacademyuserlog', - name='storage_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('SYNCHED', 'Synched'), ('ERROR', 'Error'), - ('UNKNOWN', 'Unknown'), ('PAYMENT_CONFLICT', 'Payment conflict')], - default='PENDING', - max_length=20), + model_name="githubacademyuserlog", + name="storage_status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("SYNCHED", "Synched"), + ("ERROR", "Error"), + ("UNKNOWN", "Unknown"), + ("PAYMENT_CONFLICT", "Payment conflict"), + ], + default="PENDING", + max_length=20, + ), ), ] diff --git a/breathecode/authenticate/migrations/0040_userinvite_is_email_validated.py b/breathecode/authenticate/migrations/0040_userinvite_is_email_validated.py index 38ace10e1..8c0aed78f 100644 --- a/breathecode/authenticate/migrations/0040_userinvite_is_email_validated.py +++ b/breathecode/authenticate/migrations/0040_userinvite_is_email_validated.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0039_auto_20230611_0534'), + ("authenticate", "0039_auto_20230611_0534"), ] operations = [ migrations.AddField( - model_name='userinvite', - name='is_email_validated', + model_name="userinvite", + name="is_email_validated", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/authenticate/migrations/0041_auto_20230725_0322.py b/breathecode/authenticate/migrations/0041_auto_20230725_0322.py index 2ad486e61..932fbb83a 100644 --- a/breathecode/authenticate/migrations/0041_auto_20230725_0322.py +++ b/breathecode/authenticate/migrations/0041_auto_20230725_0322.py @@ -9,27 +9,31 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0040_userinvite_is_email_validated'), + ("authenticate", "0040_userinvite_is_email_validated"), ] operations = [ migrations.AddField( - model_name='userinvite', - name='user', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name='invites', - to=settings.AUTH_USER_MODEL), + model_name="userinvite", + name="user", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="invites", + to=settings.AUTH_USER_MODEL, + ), ), migrations.AlterField( - model_name='userinvite', - name='author', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name='invites_by_author', - to=settings.AUTH_USER_MODEL), + model_name="userinvite", + name="author", + field=models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="invites_by_author", + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/breathecode/authenticate/migrations/0042_auto_20230805_0323.py b/breathecode/authenticate/migrations/0042_auto_20230805_0323.py index e688e01ee..afbb55741 100644 --- a/breathecode/authenticate/migrations/0042_auto_20230805_0323.py +++ b/breathecode/authenticate/migrations/0042_auto_20230805_0323.py @@ -9,155 +9,222 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0041_auto_20230725_0322'), + ("authenticate", "0041_auto_20230725_0322"), ] operations = [ migrations.CreateModel( - name='App', + name="App", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(help_text='Descriptive and unique name of the app', - max_length=25, - unique=True)), - ('slug', - models.SlugField( - help_text='Unique slug for the app, it must be url friendly and please avoid to change it', - unique=True)), - ('description', - models.CharField(help_text='Description of the app, it will appear on the authorize UI', - max_length=255)), - ('algorithm', - models.CharField(choices=[('HMAC_SHA256', 'HMAC-SHA256'), ('HMAC_SHA512', 'HMAC_SHA512'), - ('ED25519', 'ED25519')], - default='HMAC_SHA512', - max_length=11)), - ('strategy', - models.CharField(choices=[('JWT', 'Json Web Token'), ('SIGNATURE', 'Signature')], - default='JWT', - max_length=9)), - ('schema', - models.CharField( - choices=[('LINK', 'Link')], - default='LINK', - help_text='Schema to use for the auth process to r2epresent how the apps will communicate', - max_length=4)), - ('agreement_version', - models.IntegerField(default=1, help_text='Version of the agreement, based in the scopes')), - ('private_key', models.CharField(blank=True, max_length=255)), - ('public_key', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('require_an_agreement', - models.BooleanField(default=True, - help_text='If true, the user will be required to accept an agreement')), - ('webhook_url', models.URLField()), - ('redirect_url', models.URLField()), - ('app_url', models.URLField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "name", + models.CharField(help_text="Descriptive and unique name of the app", max_length=25, unique=True), + ), + ( + "slug", + models.SlugField( + help_text="Unique slug for the app, it must be url friendly and please avoid to change it", + unique=True, + ), + ), + ( + "description", + models.CharField( + help_text="Description of the app, it will appear on the authorize UI", max_length=255 + ), + ), + ( + "algorithm", + models.CharField( + choices=[ + ("HMAC_SHA256", "HMAC-SHA256"), + ("HMAC_SHA512", "HMAC_SHA512"), + ("ED25519", "ED25519"), + ], + default="HMAC_SHA512", + max_length=11, + ), + ), + ( + "strategy", + models.CharField( + choices=[("JWT", "Json Web Token"), ("SIGNATURE", "Signature")], default="JWT", max_length=9 + ), + ), + ( + "schema", + models.CharField( + choices=[("LINK", "Link")], + default="LINK", + help_text="Schema to use for the auth process to r2epresent how the apps will communicate", + max_length=4, + ), + ), + ( + "agreement_version", + models.IntegerField(default=1, help_text="Version of the agreement, based in the scopes"), + ), + ("private_key", models.CharField(blank=True, max_length=255)), + ("public_key", models.CharField(blank=True, default=None, max_length=255, null=True)), + ( + "require_an_agreement", + models.BooleanField( + default=True, help_text="If true, the user will be required to accept an agreement" + ), + ), + ("webhook_url", models.URLField()), + ("redirect_url", models.URLField()), + ("app_url", models.URLField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Scope', + name="Scope", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', - models.CharField(help_text='Descriptive and unique name that appears on the authorize UI', - max_length=25, - unique=True)), - ('slug', models.CharField(help_text='{action}:{data} for example read:repo', max_length=15, - unique=True)), - ('description', models.CharField(help_text='Description of the scope', max_length=255)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "name", + models.CharField( + help_text="Descriptive and unique name that appears on the authorize UI", + max_length=25, + unique=True, + ), + ), + ( + "slug", + models.CharField(help_text="{action}:{data} for example read:repo", max_length=15, unique=True), + ), + ("description", models.CharField(help_text="Description of the scope", max_length=255)), ], ), migrations.CreateModel( - name='OptionalScopeSet', + name="OptionalScopeSet", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('optional_scopes', models.ManyToManyField(blank=True, to='authenticate.Scope')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("optional_scopes", models.ManyToManyField(blank=True, to="authenticate.Scope")), ], ), migrations.CreateModel( - name='LegacyKey', + name="LegacyKey", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('algorithm', - models.CharField(choices=[('HMAC_SHA256', 'HMAC-SHA256'), ('HMAC_SHA512', 'HMAC_SHA512'), - ('ED25519', 'ED25519')], - max_length=11)), - ('strategy', - models.CharField(choices=[('JWT', 'Json Web Token'), ('SIGNATURE', 'Signature')], max_length=9)), - ('schema', models.CharField(choices=[('LINK', 'Link')], max_length=4)), - ('private_key', models.CharField(blank=True, max_length=255)), - ('public_key', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('webhook_url', models.URLField()), - ('redirect_url', models.URLField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('app', - models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, - related_name='legacy_key', - to='authenticate.app')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "algorithm", + models.CharField( + choices=[ + ("HMAC_SHA256", "HMAC-SHA256"), + ("HMAC_SHA512", "HMAC_SHA512"), + ("ED25519", "ED25519"), + ], + max_length=11, + ), + ), + ( + "strategy", + models.CharField(choices=[("JWT", "Json Web Token"), ("SIGNATURE", "Signature")], max_length=9), + ), + ("schema", models.CharField(choices=[("LINK", "Link")], max_length=4)), + ("private_key", models.CharField(blank=True, max_length=255)), + ("public_key", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("webhook_url", models.URLField()), + ("redirect_url", models.URLField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "app", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, related_name="legacy_key", to="authenticate.app" + ), + ), ], ), migrations.CreateModel( - name='AppUserAgreement', + name="AppUserAgreement", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('agreement_version', - models.IntegerField(default=1, help_text='Version of the agreement that was accepted')), - ('agreed_at', models.DateTimeField()), - ('app', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='authenticate.app')), - ('optional_scope_set', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='app_user_agreement', - to='authenticate.optionalscopeset')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "agreement_version", + models.IntegerField(default=1, help_text="Version of the agreement that was accepted"), + ), + ("agreed_at", models.DateTimeField()), + ("app", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="authenticate.app")), + ( + "optional_scope_set", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="app_user_agreement", + to="authenticate.optionalscopeset", + ), + ), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( - name='AppRequiredScope', + name="AppRequiredScope", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('agreed_at', models.DateTimeField()), - ('app', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='m2m_required_scopes', - to='authenticate.app')), - ('scope', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='m2m_required_scopes', - to='authenticate.scope')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("agreed_at", models.DateTimeField()), + ( + "app", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="m2m_required_scopes", + to="authenticate.app", + ), + ), + ( + "scope", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="m2m_required_scopes", + to="authenticate.scope", + ), + ), ], ), migrations.CreateModel( - name='AppOptionalScope', + name="AppOptionalScope", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('agreed_at', models.DateTimeField()), - ('app', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='m2m_optional_scopes', - to='authenticate.app')), - ('scope', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='m2m_optional_scopes', - to='authenticate.scope')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("agreed_at", models.DateTimeField()), + ( + "app", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="m2m_optional_scopes", + to="authenticate.app", + ), + ), + ( + "scope", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="m2m_optional_scopes", + to="authenticate.scope", + ), + ), ], ), migrations.AddField( - model_name='app', - name='optional_scopes', - field=models.ManyToManyField(blank=True, - related_name='app_optional_scopes', - through='authenticate.AppOptionalScope', - to='authenticate.Scope'), + model_name="app", + name="optional_scopes", + field=models.ManyToManyField( + blank=True, + related_name="app_optional_scopes", + through="authenticate.AppOptionalScope", + to="authenticate.Scope", + ), ), migrations.AddField( - model_name='app', - name='required_scopes', - field=models.ManyToManyField(blank=True, - related_name='app_required_scopes', - through='authenticate.AppRequiredScope', - to='authenticate.Scope'), + model_name="app", + name="required_scopes", + field=models.ManyToManyField( + blank=True, + related_name="app_required_scopes", + through="authenticate.AppRequiredScope", + to="authenticate.Scope", + ), ), ] diff --git a/breathecode/authenticate/migrations/0043_auto_20230817_0837.py b/breathecode/authenticate/migrations/0043_auto_20230817_0837.py index 1c3d92122..5b0834878 100644 --- a/breathecode/authenticate/migrations/0043_auto_20230817_0837.py +++ b/breathecode/authenticate/migrations/0043_auto_20230817_0837.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0042_auto_20230805_0323'), + ("authenticate", "0042_auto_20230805_0323"), ] operations = [ migrations.AlterField( - model_name='appoptionalscope', - name='agreed_at', + model_name="appoptionalscope", + name="agreed_at", field=models.DateTimeField(auto_now_add=True), ), migrations.AlterField( - model_name='apprequiredscope', - name='agreed_at', + model_name="apprequiredscope", + name="agreed_at", field=models.DateTimeField(auto_now_add=True), ), ] diff --git a/breathecode/authenticate/migrations/0044_auto_20230908_1941.py b/breathecode/authenticate/migrations/0044_auto_20230908_1941.py index 243caf631..ae2e30d87 100644 --- a/breathecode/authenticate/migrations/0044_auto_20230908_1941.py +++ b/breathecode/authenticate/migrations/0044_auto_20230908_1941.py @@ -6,37 +6,34 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0043_auto_20230817_0837'), + ("authenticate", "0043_auto_20230817_0837"), ] operations = [ migrations.AddField( - model_name='userinvite', - name='asset_slug', - field=models.SlugField(blank=True, - help_text='If set, the user signed up because of an Asset', - max_length=40, - null=True), + model_name="userinvite", + name="asset_slug", + field=models.SlugField( + blank=True, help_text="If set, the user signed up because of an Asset", max_length=40, null=True + ), ), migrations.AddField( - model_name='userinvite', - name='conversion_info', - field=models.JSONField(blank=True, - default=None, - help_text='UTMs and other conversion information.', - null=True), + model_name="userinvite", + name="conversion_info", + field=models.JSONField( + blank=True, default=None, help_text="UTMs and other conversion information.", null=True + ), ), migrations.AddField( - model_name='userinvite', - name='event_slug', - field=models.SlugField(blank=True, - help_text='If set, the user signed up because of an Event', - max_length=40, - null=True), + model_name="userinvite", + name="event_slug", + field=models.SlugField( + blank=True, help_text="If set, the user signed up because of an Event", max_length=40, null=True + ), ), migrations.AddField( - model_name='userinvite', - name='has_marketing_consent', + model_name="userinvite", + name="has_marketing_consent", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/authenticate/migrations/0044_auto_20231017_0605.py b/breathecode/authenticate/migrations/0044_auto_20231017_0605.py index b512ddf11..10241d5c5 100644 --- a/breathecode/authenticate/migrations/0044_auto_20231017_0605.py +++ b/breathecode/authenticate/migrations/0044_auto_20231017_0605.py @@ -6,48 +6,47 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0043_auto_20230817_0837'), + ("authenticate", "0043_auto_20230817_0837"), ] operations = [ migrations.AlterField( - model_name='profile', - name='show_tutorial', - field=models.BooleanField(db_index=True, - default=True, - help_text='Set true if you want to show the tutorial on the user UI/UX'), + model_name="profile", + name="show_tutorial", + field=models.BooleanField( + db_index=True, default=True, help_text="Set true if you want to show the tutorial on the user UI/UX" + ), ), migrations.AlterField( - model_name='profileacademy', - name='email', + model_name="profileacademy", + name="email", field=models.CharField(db_index=True, default=None, max_length=150, null=True), ), migrations.AlterField( - model_name='profileacademy', - name='first_name', + model_name="profileacademy", + name="first_name", field=models.CharField(db_index=True, default=None, max_length=100, null=True), ), migrations.AlterField( - model_name='profileacademy', - name='last_name', + model_name="profileacademy", + name="last_name", field=models.CharField(db_index=True, default=None, max_length=100, null=True), ), migrations.AlterField( - model_name='profileacademy', - name='status', - field=models.CharField(choices=[('INVITED', 'Invited'), ('ACTIVE', 'Active')], - db_index=True, - default='INVITED', - max_length=15), + model_name="profileacademy", + name="status", + field=models.CharField( + choices=[("INVITED", "Invited"), ("ACTIVE", "Active")], db_index=True, default="INVITED", max_length=15 + ), ), migrations.AlterField( - model_name='token', - name='expires_at', + model_name="token", + name="expires_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='token', - name='token_type', - field=models.CharField(db_index=True, default='temporal', max_length=64), + model_name="token", + name="token_type", + field=models.CharField(db_index=True, default="temporal", max_length=64), ), ] diff --git a/breathecode/authenticate/migrations/0045_merge_0044_auto_20230908_1941_0044_auto_20231017_0605.py b/breathecode/authenticate/migrations/0045_merge_0044_auto_20230908_1941_0044_auto_20231017_0605.py index 4a47eb720..a153696cd 100644 --- a/breathecode/authenticate/migrations/0045_merge_0044_auto_20230908_1941_0044_auto_20231017_0605.py +++ b/breathecode/authenticate/migrations/0045_merge_0044_auto_20230908_1941_0044_auto_20231017_0605.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0044_auto_20230908_1941'), - ('authenticate', '0044_auto_20231017_0605'), + ("authenticate", "0044_auto_20230908_1941"), + ("authenticate", "0044_auto_20231017_0605"), ] operations = [] diff --git a/breathecode/authenticate/migrations/0046_auto_20231024_1958.py b/breathecode/authenticate/migrations/0046_auto_20231024_1958.py index fc7ad03ea..a55c4bbb5 100644 --- a/breathecode/authenticate/migrations/0046_auto_20231024_1958.py +++ b/breathecode/authenticate/migrations/0046_auto_20231024_1958.py @@ -6,28 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0045_merge_0044_auto_20230908_1941_0044_auto_20231017_0605'), + ("authenticate", "0045_merge_0044_auto_20230908_1941_0044_auto_20231017_0605"), ] operations = [ migrations.AddField( - model_name='userinvite', - name='city', + model_name="userinvite", + name="city", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AddField( - model_name='userinvite', - name='country', + model_name="userinvite", + name="country", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AddField( - model_name='userinvite', - name='latitude', + model_name="userinvite", + name="latitude", field=models.DecimalField(blank=True, decimal_places=15, default=None, max_digits=30, null=True), ), migrations.AddField( - model_name='userinvite', - name='longitude', + model_name="userinvite", + name="longitude", field=models.DecimalField(blank=True, decimal_places=15, default=None, max_digits=30, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0047_auto_20231026_2210.py b/breathecode/authenticate/migrations/0047_auto_20231026_2210.py index 3292392ec..03840a759 100644 --- a/breathecode/authenticate/migrations/0047_auto_20231026_2210.py +++ b/breathecode/authenticate/migrations/0047_auto_20231026_2210.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0046_auto_20231024_1958'), + ("authenticate", "0046_auto_20231024_1958"), ] operations = [ migrations.AddField( - model_name='userinvite', - name='email_quality', + model_name="userinvite", + name="email_quality", field=models.FloatField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='userinvite', - name='email_status', + model_name="userinvite", + name="email_status", field=models.JSONField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/authenticate/migrations/0048_auto_20231128_1224.py b/breathecode/authenticate/migrations/0048_auto_20231128_1224.py index de7cc29fa..97b9dfe78 100644 --- a/breathecode/authenticate/migrations/0048_auto_20231128_1224.py +++ b/breathecode/authenticate/migrations/0048_auto_20231128_1224.py @@ -6,24 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0047_auto_20231026_2210'), + ("authenticate", "0047_auto_20231026_2210"), ] operations = [ migrations.AlterField( - model_name='userinvite', - name='asset_slug', - field=models.SlugField(blank=True, - help_text='If set, the user signed up because of an Asset', - max_length=60, - null=True), + model_name="userinvite", + name="asset_slug", + field=models.SlugField( + blank=True, help_text="If set, the user signed up because of an Asset", max_length=60, null=True + ), ), migrations.AlterField( - model_name='userinvite', - name='event_slug', - field=models.SlugField(blank=True, - help_text='If set, the user signed up because of an Event', - max_length=120, - null=True), + model_name="userinvite", + name="event_slug", + field=models.SlugField( + blank=True, help_text="If set, the user signed up because of an Event", max_length=120, null=True + ), ), ] diff --git a/breathecode/authenticate/migrations/0049_delete_appuseragreement.py b/breathecode/authenticate/migrations/0049_delete_appuseragreement.py index 16d8c9fed..738f00aee 100644 --- a/breathecode/authenticate/migrations/0049_delete_appuseragreement.py +++ b/breathecode/authenticate/migrations/0049_delete_appuseragreement.py @@ -6,9 +6,11 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0048_auto_20231128_1224'), + ("authenticate", "0048_auto_20231128_1224"), ] operations = [ - migrations.DeleteModel(name='AppUserAgreement', ), + migrations.DeleteModel( + name="AppUserAgreement", + ), ] diff --git a/breathecode/authenticate/migrations/0050_remove_app_optional_scopes_and_more.py b/breathecode/authenticate/migrations/0050_remove_app_optional_scopes_and_more.py index a21d29e05..cf374dc2a 100644 --- a/breathecode/authenticate/migrations/0050_remove_app_optional_scopes_and_more.py +++ b/breathecode/authenticate/migrations/0050_remove_app_optional_scopes_and_more.py @@ -6,31 +6,37 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0049_delete_appuseragreement'), + ("authenticate", "0049_delete_appuseragreement"), ] operations = [ migrations.RemoveField( - model_name='app', - name='optional_scopes', + model_name="app", + name="optional_scopes", ), migrations.RemoveField( - model_name='apprequiredscope', - name='app', + model_name="apprequiredscope", + name="app", ), migrations.RemoveField( - model_name='apprequiredscope', - name='scope', + model_name="apprequiredscope", + name="scope", ), migrations.RemoveField( - model_name='app', - name='required_scopes', + model_name="app", + name="required_scopes", ), migrations.RemoveField( - model_name='optionalscopeset', - name='optional_scopes', + model_name="optionalscopeset", + name="optional_scopes", + ), + migrations.DeleteModel( + name="AppOptionalScope", + ), + migrations.DeleteModel( + name="AppRequiredScope", + ), + migrations.DeleteModel( + name="OptionalScopeSet", ), - migrations.DeleteModel(name='AppOptionalScope', ), - migrations.DeleteModel(name='AppRequiredScope', ), - migrations.DeleteModel(name='OptionalScopeSet', ), ] diff --git a/breathecode/authenticate/migrations/0051_delete_legacykey.py b/breathecode/authenticate/migrations/0051_delete_legacykey.py index 7c20da39b..8d04f28a5 100644 --- a/breathecode/authenticate/migrations/0051_delete_legacykey.py +++ b/breathecode/authenticate/migrations/0051_delete_legacykey.py @@ -6,9 +6,11 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0050_remove_app_optional_scopes_and_more'), + ("authenticate", "0050_remove_app_optional_scopes_and_more"), ] operations = [ - migrations.DeleteModel(name='LegacyKey', ), + migrations.DeleteModel( + name="LegacyKey", + ), ] diff --git a/breathecode/authenticate/migrations/0052_delete_scope_remove_app_agreement_version_and_more.py b/breathecode/authenticate/migrations/0052_delete_scope_remove_app_agreement_version_and_more.py index 324f278a3..a723a7e30 100644 --- a/breathecode/authenticate/migrations/0052_delete_scope_remove_app_agreement_version_and_more.py +++ b/breathecode/authenticate/migrations/0052_delete_scope_remove_app_agreement_version_and_more.py @@ -6,61 +6,63 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0051_delete_legacykey'), + ("authenticate", "0051_delete_legacykey"), ] operations = [ - migrations.DeleteModel(name='Scope', ), + migrations.DeleteModel( + name="Scope", + ), migrations.RemoveField( - model_name='app', - name='agreement_version', + model_name="app", + name="agreement_version", ), migrations.RemoveField( - model_name='app', - name='algorithm', + model_name="app", + name="algorithm", ), migrations.RemoveField( - model_name='app', - name='app_url', + model_name="app", + name="app_url", ), migrations.RemoveField( - model_name='app', - name='created_at', + model_name="app", + name="created_at", ), migrations.RemoveField( - model_name='app', - name='description', + model_name="app", + name="description", ), migrations.RemoveField( - model_name='app', - name='private_key', + model_name="app", + name="private_key", ), migrations.RemoveField( - model_name='app', - name='public_key', + model_name="app", + name="public_key", ), migrations.RemoveField( - model_name='app', - name='redirect_url', + model_name="app", + name="redirect_url", ), migrations.RemoveField( - model_name='app', - name='require_an_agreement', + model_name="app", + name="require_an_agreement", ), migrations.RemoveField( - model_name='app', - name='schema', + model_name="app", + name="schema", ), migrations.RemoveField( - model_name='app', - name='slug', + model_name="app", + name="slug", ), migrations.RemoveField( - model_name='app', - name='strategy', + model_name="app", + name="strategy", ), migrations.RemoveField( - model_name='app', - name='webhook_url', + model_name="app", + name="webhook_url", ), ] diff --git a/breathecode/authenticate/migrations/0053_remove_app_updated_at.py b/breathecode/authenticate/migrations/0053_remove_app_updated_at.py index 056091b43..2bedda786 100644 --- a/breathecode/authenticate/migrations/0053_remove_app_updated_at.py +++ b/breathecode/authenticate/migrations/0053_remove_app_updated_at.py @@ -6,12 +6,12 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0052_delete_scope_remove_app_agreement_version_and_more'), + ("authenticate", "0052_delete_scope_remove_app_agreement_version_and_more"), ] operations = [ migrations.RemoveField( - model_name='app', - name='updated_at', + model_name="app", + name="updated_at", ), ] diff --git a/breathecode/authenticate/migrations/0054_academyauthsettings_auto_sync_content_and_more.py b/breathecode/authenticate/migrations/0054_academyauthsettings_auto_sync_content_and_more.py index a2c30e4e0..ddce042f9 100644 --- a/breathecode/authenticate/migrations/0054_academyauthsettings_auto_sync_content_and_more.py +++ b/breathecode/authenticate/migrations/0054_academyauthsettings_auto_sync_content_and_more.py @@ -6,19 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0053_remove_app_updated_at'), + ("authenticate", "0053_remove_app_updated_at"), ] operations = [ migrations.AddField( - model_name='academyauthsettings', - name='auto_sync_content', + model_name="academyauthsettings", + name="auto_sync_content", field=models.BooleanField( - default=False, help_text='If true, will attempt to create WebhookSubscription on each asset repo'), + default=False, help_text="If true, will attempt to create WebhookSubscription on each asset repo" + ), ), migrations.AlterField( - model_name='academyauthsettings', - name='github_is_sync', - field=models.BooleanField(default=False, help_text='If true, will try synching users every few hours'), + model_name="academyauthsettings", + name="github_is_sync", + field=models.BooleanField(default=False, help_text="If true, will try synching users every few hours"), ), ] diff --git a/breathecode/authenticate/models.py b/breathecode/authenticate/models.py index 18a66eeba..5051811c7 100644 --- a/breathecode/authenticate/models.py +++ b/breathecode/authenticate/models.py @@ -26,12 +26,26 @@ from .signals import academy_invite_accepted __all__ = [ - 'User', 'Group', 'ContentType', 'Permission', 'UserProxy', 'Profile', 'Capability', 'Role', 'UserInvite', - 'ProfileAcademy', 'CredentialsGithub', 'CredentialsSlack', 'CredentialsFacebook', 'CredentialsQuickBooks', - 'CredentialsGoogle', 'DeviceId', 'Token' + "User", + "Group", + "ContentType", + "Permission", + "UserProxy", + "Profile", + "Capability", + "Role", + "UserInvite", + "ProfileAcademy", + "CredentialsGithub", + "CredentialsSlack", + "CredentialsFacebook", + "CredentialsQuickBooks", + "CredentialsGoogle", + "DeviceId", + "Token", ] -TOKEN_TYPE = ['login', 'one_time', 'temporal', 'permanent'] +TOKEN_TYPE = ["login", "one_time", "temporal", "permanent"] LOGIN_TOKEN_LIFETIME = timezone.timedelta(days=1) TEMPORAL_TOKEN_LIFETIME = timezone.timedelta(minutes=10) @@ -55,17 +69,20 @@ class Profile(models.Model): max_length=255, blank=True, null=True, - help_text='User biography in user\'s language. Will be used if there are no ProfileTranslations.') + help_text="User biography in user's language. Will be used if there are no ProfileTranslations.", + ) phone_regex = RegexValidator( - regex=r'^\+?1?\d{9,15}$', - message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.") - phone = models.CharField(validators=[phone_regex], max_length=17, blank=True, - default='') # validators should be a list + regex=r"^\+?1?\d{9,15}$", + message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", + ) + phone = models.CharField( + validators=[phone_regex], max_length=17, blank=True, default="" + ) # validators should be a list - show_tutorial = models.BooleanField(default=True, - help_text='Set true if you want to show the tutorial on the user UI/UX', - db_index=True) + show_tutorial = models.BooleanField( + default=True, help_text="Set true if you want to show the tutorial on the user UI/UX", db_index=True + ) twitter_username = models.CharField(max_length=50, blank=True, null=True) github_username = models.CharField(max_length=50, blank=True, null=True) @@ -76,22 +93,24 @@ class Profile(models.Model): class ProfileTranslation(models.Model): - profile = models.ForeignKey(Profile, on_delete=models.CASCADE, help_text='Profile') - lang = models.CharField(max_length=5, - validators=[validate_language_code], - unique=True, - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') + profile = models.ForeignKey(Profile, on_delete=models.CASCADE, help_text="Profile") + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + unique=True, + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) bio = models.CharField(max_length=255) def __str__(self) -> str: - return f'{self.lang}: {self.profile.user.email}' + return f"{self.lang}: {self.profile.user.email}" class UserSetting(models.Model): - user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='settings') - lang = models.CharField(max_length=5, default='en', validators=[validate_language_code]) - main_currency = models.ForeignKey('payments.Currency', on_delete=models.SET_NULL, null=True, blank=True) + user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="settings") + lang = models.CharField(max_length=5, default="en", validators=[validate_language_code]) + main_currency = models.ForeignKey("payments.Currency", on_delete=models.SET_NULL, null=True, blank=True) def save(self, *args, **kwargs): self.full_clean() @@ -103,7 +122,7 @@ class Capability(models.Model): description = models.CharField(max_length=255, blank=True, null=True, default=None) def __str__(self): - return f'{self.slug}' + return f"{self.slug}" class Role(models.Model): @@ -115,27 +134,27 @@ class Role(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.slug})' + return f"{self.name} ({self.slug})" -PENDING = 'PENDING' -ACCEPTED = 'ACCEPTED' -REJECTED = 'REJECTED' -WAITING_LIST = 'WAITING_LIST' +PENDING = "PENDING" +ACCEPTED = "ACCEPTED" +REJECTED = "REJECTED" +WAITING_LIST = "WAITING_LIST" INVITE_STATUS = ( - (PENDING, 'Pending'), - (REJECTED, 'Rejected'), - (ACCEPTED, 'Accepted'), - (WAITING_LIST, 'Waiting list'), + (PENDING, "Pending"), + (REJECTED, "Rejected"), + (ACCEPTED, "Accepted"), + (WAITING_LIST, "Waiting list"), ) -PENDING = 'PENDING' -DONE = 'DONE' -ERROR = 'ERROR' +PENDING = "PENDING" +DONE = "DONE" +ERROR = "ERROR" PROCESS_STATUS = ( - (PENDING, 'Pending'), - (DONE, 'Done'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (DONE, "Done"), + (ERROR, "Error"), ) @@ -153,46 +172,41 @@ def __init__(self, *args, **kwargs): is_email_validated = models.BooleanField(default=False) has_marketing_consent = models.BooleanField(default=False) - user = models.ForeignKey(User, - on_delete=models.CASCADE, - null=True, - default=None, - blank=True, - related_name='invites') + user = models.ForeignKey( + User, on_delete=models.CASCADE, null=True, default=None, blank=True, related_name="invites" + ) academy = models.ForeignKey(Academy, on_delete=models.CASCADE, null=True, default=None, blank=True) - syllabus = models.ForeignKey('admissions.Syllabus', on_delete=models.CASCADE, null=True, default=None, blank=True) + syllabus = models.ForeignKey("admissions.Syllabus", on_delete=models.CASCADE, null=True, default=None, blank=True) cohort = models.ForeignKey(Cohort, on_delete=models.CASCADE, null=True, default=None, blank=True) role = models.ForeignKey(Role, on_delete=models.CASCADE, null=True, default=None, blank=True) - event_slug = models.SlugField(max_length=120, - blank=True, - null=True, - help_text='If set, the user signed up because of an Event') - asset_slug = models.SlugField(max_length=60, - blank=True, - null=True, - help_text='If set, the user signed up because of an Asset') + event_slug = models.SlugField( + max_length=120, blank=True, null=True, help_text="If set, the user signed up because of an Event" + ) + asset_slug = models.SlugField( + max_length=60, blank=True, null=True, help_text="If set, the user signed up because of an Asset" + ) first_name = models.CharField(max_length=100, default=None, null=True) last_name = models.CharField(max_length=100, default=None, null=True) token = models.CharField(max_length=255, unique=True) - author = models.ForeignKey(User, - on_delete=models.CASCADE, - null=True, - default=None, - related_name='invites_by_author') + author = models.ForeignKey( + User, on_delete=models.CASCADE, null=True, default=None, related_name="invites_by_author" + ) status = models.CharField(max_length=15, choices=INVITE_STATUS, default=PENDING) process_status = models.CharField(max_length=7, choices=PROCESS_STATUS, default=PENDING) - process_message = models.CharField(max_length=150, default='') + process_message = models.CharField(max_length=150, default="") phone_regex = RegexValidator( - regex=r'^\+?1?\d{9,15}$', - message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.") - phone = models.CharField(validators=[phone_regex], max_length=17, blank=True, - default='') # validators should be a list + regex=r"^\+?1?\d{9,15}$", + message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", + ) + phone = models.CharField( + validators=[phone_regex], max_length=17, blank=True, default="" + ) # validators should be a list created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -203,16 +217,15 @@ def __init__(self, *args, **kwargs): latitude = models.DecimalField(max_digits=30, decimal_places=15, null=True, default=None, blank=True) longitude = models.DecimalField(max_digits=30, decimal_places=15, null=True, default=None, blank=True) - conversion_info = models.JSONField(default=None, - blank=True, - null=True, - help_text='UTMs and other conversion information.') + conversion_info = models.JSONField( + default=None, blank=True, null=True, help_text="UTMs and other conversion information." + ) email_quality = models.FloatField(default=None, blank=True, null=True) email_status = models.JSONField(default=None, blank=True, null=True) def __str__(self): - return f'Invite for {self.email}' + return f"Invite for {self.email}" def save(self, *args, **kwargs): import breathecode.authenticate.tasks as tasks_authenticate @@ -224,7 +237,7 @@ def save(self, *args, **kwargs): status_updated = True if self.pk and self._email and self.email != self._email: - raise forms.ValidationError('Email is readonly') + raise forms.ValidationError("Email is readonly") super().save(*args, **kwargs) # Call the "real" save() method. @@ -239,11 +252,11 @@ def save(self, *args, **kwargs): self._old_status = self.status -INVITED = 'INVITED' -ACTIVE = 'ACTIVE' +INVITED = "INVITED" +ACTIVE = "ACTIVE" PROFILE_ACADEMY_STATUS = ( - (INVITED, 'Invited'), - (ACTIVE, 'Active'), + (INVITED, "Invited"), + (ACTIVE, "Active"), ) @@ -264,10 +277,12 @@ def __init__(self, *args, **kwargs): address = models.CharField(max_length=255, blank=True, default=None, null=True) phone_regex = RegexValidator( - regex=r'^\+?1?\d{9,15}$', - message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.") - phone = models.CharField(validators=[phone_regex], max_length=17, blank=True, - default='') # validators should be a list + regex=r"^\+?1?\d{9,15}$", + message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", + ) + phone = models.CharField( + validators=[phone_regex], max_length=17, blank=True, default="" + ) # validators should be a list status = models.CharField(max_length=15, choices=PROFILE_ACADEMY_STATUS, default=INVITED, db_index=True) @@ -275,11 +290,11 @@ def __init__(self, *args, **kwargs): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.email} for academy ({self.academy.name})' + return f"{self.email} for academy ({self.academy.name})" def save(self, *args, **kwargs): - if self.__old_status != self.status and self.status == 'ACTIVE': + if self.__old_status != self.status and self.status == "ACTIVE": academy_invite_accepted.send_robust(instance=self, sender=ProfileAcademy) super().save(*args, **kwargs) # Call the "real" save() method. @@ -303,7 +318,7 @@ class CredentialsGithub(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.email} ({self.user.id})' + return f"{self.email} ({self.user.id})" def save(self, *args, **kwargs): if self.email: @@ -319,17 +334,18 @@ class AcademyAuthSettings(models.Model): github_default_team_ids = models.CharField( max_length=40, blank=True, - default='', - help_text='User will be invited to this github team ID when joining the github organization') - github_is_sync = models.BooleanField(default=False, - blank=False, - help_text='If true, will try synching users every few hours') - github_error_log = models.JSONField(default=None, - blank=True, - null=True, - help_text='Error trace log for github API communication') + default="", + help_text="User will be invited to this github team ID when joining the github organization", + ) + github_is_sync = models.BooleanField( + default=False, blank=False, help_text="If true, will try synching users every few hours" + ) + github_error_log = models.JSONField( + default=None, blank=True, null=True, help_text="Error trace log for github API communication" + ) auto_sync_content = models.BooleanField( - default=False, help_text='If true, will attempt to create WebhookSubscription on each asset repo') + default=False, help_text="If true, will attempt to create WebhookSubscription on each asset repo" + ) def add_error(self, msg): if self.github_error_log is None: @@ -340,9 +356,9 @@ def add_error(self, msg): def to_datetime(date_str): return datetime.fromisoformat(date_str) - self.github_error_log = [e for e in self.github_error_log if thirty_days_old < to_datetime(e['at'])] + self.github_error_log = [e for e in self.github_error_log if thirty_days_old < to_datetime(e["at"])] - self.github_error_log.append({'msg': msg, 'at': str(timezone.now())}) + self.github_error_log.append({"msg": msg, "at": str(timezone.now())}) self.save() return self.github_error_log @@ -352,27 +368,27 @@ def clean_errors(self, msg): return self.github_error_log -PENDING = 'PENDING' -SYNCHED = 'SYNCHED' -UNKNOWN = 'UNKNOWN' -PAYMENT_CONFLICT = 'PAYMENT_CONFLICT' +PENDING = "PENDING" +SYNCHED = "SYNCHED" +UNKNOWN = "UNKNOWN" +PAYMENT_CONFLICT = "PAYMENT_CONFLICT" STORAGE_STATUS = ( - (PENDING, 'Pending'), - (SYNCHED, 'Synched'), - (ERROR, 'Error'), - (UNKNOWN, 'Unknown'), - (PAYMENT_CONFLICT, 'Payment conflict'), + (PENDING, "Pending"), + (SYNCHED, "Synched"), + (ERROR, "Error"), + (UNKNOWN, "Unknown"), + (PAYMENT_CONFLICT, "Payment conflict"), ) -ADD = 'ADD' -INVITE = 'INVITE' -DELETE = 'DELETE' -IGNORE = 'IGNORE' +ADD = "ADD" +INVITE = "INVITE" +DELETE = "DELETE" +IGNORE = "IGNORE" STORAGE_ACTION = ( - (ADD, 'Add'), - (DELETE, 'Delete'), - (INVITE, 'Invite'), - (IGNORE, 'Ignore'), + (ADD, "Add"), + (DELETE, "Delete"), + (INVITE, "Invite"), + (IGNORE, "Ignore"), ) @@ -385,11 +401,13 @@ def __init__(self, *args, **kwargs): academy = models.ForeignKey(Academy, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.SET_NULL, default=None, null=True) - username = models.SlugField(max_length=40, - default=None, - null=True, - blank=True, - help_text='Only used when the username has not been found on 4Geeks') + username = models.SlugField( + max_length=40, + default=None, + null=True, + blank=True, + help_text="Only used when the username has not been found on 4Geeks", + ) storage_status = models.CharField(max_length=20, choices=STORAGE_STATUS, default=PENDING) storage_action = models.CharField(max_length=20, choices=STORAGE_ACTION, default=ADD) storage_log = models.JSONField(default=None, null=True, blank=True) @@ -401,13 +419,13 @@ def __init__(self, *args, **kwargs): def __str__(self): if self.user is None: - return str(self.id) + ' ' + str(self.username) + return str(self.id) + " " + str(self.username) else: - return str(self.user.email) + ' ' + str(self.username) + return str(self.user.email) + " " + str(self.username) @staticmethod def create_log(msg): - return {'msg': msg, 'at': str(timezone.now())} + return {"msg": msg, "at": str(timezone.now())} def log(self, msg, reset=True): @@ -429,8 +447,8 @@ def save(self, *args, **kwargs): exit_op = super().save(*args, **kwargs) - if has_mutated and self.storage_status == 'SYNCHED': - prev = GithubAcademyUserLog.objects.filter(academy_user=self).order_by('-created_at').first() + if has_mutated and self.storage_status == "SYNCHED": + prev = GithubAcademyUserLog.objects.filter(academy_user=self).order_by("-created_at").first() user_log = GithubAcademyUserLog( academy_user=self, @@ -472,7 +490,7 @@ class CredentialsSlack(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.user.email} ({self.authed_user})' + return f"{self.user.email} ({self.authed_user})" class CredentialsFacebook(models.Model): @@ -489,7 +507,7 @@ class CredentialsFacebook(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'Team {str(self.user)}' + return f"Team {str(self.user)}" class CredentialsQuickBooks(models.Model): @@ -516,25 +534,24 @@ class Token(rest_framework.authtoken.models.Token): """Bearer Token that support different types like `'login'`, `'temporal'` or `'permanent'`.""" key = models.CharField(max_length=40, db_index=True, unique=True) - #Foreign key relationship to user for many-to-one relationship - user = models.ForeignKey(settings.AUTH_USER_MODEL, - related_name='auth_token', - on_delete=models.CASCADE, - verbose_name=_('User')) - token_type = models.CharField(max_length=64, default='temporal', db_index=True) + # Foreign key relationship to user for many-to-one relationship + user = models.ForeignKey( + settings.AUTH_USER_MODEL, related_name="auth_token", on_delete=models.CASCADE, verbose_name=_("User") + ) + token_type = models.CharField(max_length=64, default="temporal", db_index=True) expires_at = models.DateTimeField(default=None, blank=True, null=True, db_index=True) def save(self, *args, **kwargs): without_expire_at = not self.expires_at - if without_expire_at and self.token_type == 'login': + if without_expire_at and self.token_type == "login": utc_now = timezone.now() self.expires_at = utc_now + LOGIN_TOKEN_LIFETIME - if without_expire_at and self.token_type == 'temporal': + if without_expire_at and self.token_type == "temporal": utc_now = timezone.now() self.expires_at = utc_now + TEMPORAL_TOKEN_LIFETIME - if self.token_type == 'one_time' or self.token_type == 'permanent': + if self.token_type == "one_time" or self.token_type == "permanent": self.expires_at = None super().save(*args, **kwargs) @@ -548,32 +565,33 @@ def delete_expired_tokens() -> None: @classmethod def get_or_create(cls, user, token_type: str, **kwargs: Any): utc_now = timezone.now() - kwargs['token_type'] = token_type + kwargs["token_type"] = token_type cls.delete_expired_tokens() if token_type not in TOKEN_TYPE: raise InvalidTokenType(f'Invalid token_type, correct values are {", ".join(TOKEN_TYPE)}') - has_hours_length = 'hours_length' in kwargs - has_expires_at = 'expires_at' in kwargs + has_hours_length = "hours_length" in kwargs + has_expires_at = "expires_at" in kwargs - if (token_type == 'one_time' or token_type == 'permanent') and (has_hours_length or has_expires_at): - raise BadArguments(f'You can\'t provide token_type=\'{token_type}\' and ' - 'has_hours_length or has_expires_at together') + if (token_type == "one_time" or token_type == "permanent") and (has_hours_length or has_expires_at): + raise BadArguments( + f"You can't provide token_type='{token_type}' and " "has_hours_length or has_expires_at together" + ) if has_hours_length and has_expires_at: - raise BadArguments('You can\'t provide hours_length and expires_at argument together') + raise BadArguments("You can't provide hours_length and expires_at argument together") if has_hours_length: - kwargs['expires_at'] = utc_now + timezone.timedelta(hours=kwargs['hours_length']) - del kwargs['hours_length'] + kwargs["expires_at"] = utc_now + timezone.timedelta(hours=kwargs["hours_length"]) + del kwargs["hours_length"] token = None created = False try: - if token_type == 'one_time': + if token_type == "one_time": raise TryToGetOrCreateAOneTimeToken() token, created = Token.objects.get_or_create(user=user, **kwargs) @@ -597,7 +615,7 @@ def get_valid(cls, token: str): @classmethod def validate_and_destroy(cls, hash: str) -> User: - token = Token.objects.filter(key=hash, token_type='one_time').first() + token = Token.objects.filter(key=hash, token_type="one_time").first() if not token: raise TokenNotFound() @@ -608,7 +626,7 @@ def validate_and_destroy(cls, hash: str) -> User: class Meta: # ensure user and name are unique - unique_together = (('user', 'key'), ) + unique_together = (("user", "key"),) class DeviceId(models.Model): @@ -632,7 +650,7 @@ class GitpodUser(models.Model): default=None, null=True, blank=True, - help_text='If a gitpod user is not connected to a real user and academy in the database, it will be deleted ASAP' + help_text="If a gitpod user is not connected to a real user and academy in the database, it will be deleted ASAP", ) @@ -644,6 +662,6 @@ class App(models.Model): """ def __init__(self, *args, **kwargs): - raise DeprecationWarning('authenticate.App was deprecated, use linked_services.App instead') + raise DeprecationWarning("authenticate.App was deprecated, use linked_services.App instead") - name = models.CharField(max_length=25, unique=True, help_text='Descriptive and unique name of the app') + name = models.CharField(max_length=25, unique=True, help_text="Descriptive and unique name of the app") diff --git a/breathecode/authenticate/permissions/contexts.py b/breathecode/authenticate/permissions/contexts.py index 306d9a706..8e8366533 100644 --- a/breathecode/authenticate/permissions/contexts.py +++ b/breathecode/authenticate/permissions/contexts.py @@ -3,15 +3,15 @@ def user(client: LaunchDarkly, user: User): - key = f'{user.id}' - name = f'{user.first_name} {user.last_name} ({user.email})' - kind = 'user' + key = f"{user.id}" + name = f"{user.first_name} {user.last_name} ({user.email})" + kind = "user" context = { - 'id': user.id, - 'email': user.email, - 'username': user.username, - 'date_joined': user.date_joined, - 'groups': [x.name for x in user.groups.all()], + "id": user.id, + "email": user.email, + "username": user.username, + "date_joined": user.date_joined, + "groups": [x.name for x in user.groups.all()], } return client.context(key, name, kind, context) diff --git a/breathecode/authenticate/permissions/flags.py b/breathecode/authenticate/permissions/flags.py index b9e2626a9..4d5f36ff1 100644 --- a/breathecode/authenticate/permissions/flags.py +++ b/breathecode/authenticate/permissions/flags.py @@ -1,8 +1,7 @@ -__all__ = ['api'] +__all__ = ["api"] -class API: - ... +class API: ... api = API() diff --git a/breathecode/authenticate/receivers.py b/breathecode/authenticate/receivers.py index 27c184d7b..d668054bf 100644 --- a/breathecode/authenticate/receivers.py +++ b/breathecode/authenticate/receivers.py @@ -41,20 +41,20 @@ def set_user_group(sender, instance, created: bool, **_): # prevent errors with migrations try: if sender == User: - group = Group.objects.filter(name='Default').first() + group = Group.objects.filter(name="Default").first() groups = instance.groups - is_valid_profile_academy = sender == ProfileAcademy and instance.user and instance.status == 'ACTIVE' - if is_valid_profile_academy and instance.role.slug == 'student': - group = Group.objects.filter(name='Student').first() + is_valid_profile_academy = sender == ProfileAcademy and instance.user and instance.status == "ACTIVE" + if is_valid_profile_academy and instance.role.slug == "student": + group = Group.objects.filter(name="Student").first() groups = instance.user.groups - if is_valid_profile_academy and instance.role.slug == 'teacher': - group = Group.objects.filter(name='Teacher').first() + if is_valid_profile_academy and instance.role.slug == "teacher": + group = Group.objects.filter(name="Teacher").first() groups = instance.user.groups if sender == MentorProfile: - group = Group.objects.filter(name='Mentor').first() + group = Group.objects.filter(name="Mentor").first() groups = instance.user.groups if groups and group: @@ -77,24 +77,26 @@ def unset_user_group(sender, instance, **_): group = None groups = None - is_valid_profile_academy = sender == ProfileAcademy and instance.user and instance.status == 'ACTIVE' - if is_valid_profile_academy and instance.role.slug == 'student': - should_be_deleted = not ProfileAcademy.objects.filter(user=instance.user, role__slug='student', - status='ACTIVE').exists() + is_valid_profile_academy = sender == ProfileAcademy and instance.user and instance.status == "ACTIVE" + if is_valid_profile_academy and instance.role.slug == "student": + should_be_deleted = not ProfileAcademy.objects.filter( + user=instance.user, role__slug="student", status="ACTIVE" + ).exists() - group = Group.objects.filter(name='Student').first() + group = Group.objects.filter(name="Student").first() groups = instance.user.groups - if is_valid_profile_academy and instance.role.slug == 'teacher': - should_be_deleted = not ProfileAcademy.objects.filter(user=instance.user, role__slug='teacher', - status='ACTIVE').exists() + if is_valid_profile_academy and instance.role.slug == "teacher": + should_be_deleted = not ProfileAcademy.objects.filter( + user=instance.user, role__slug="teacher", status="ACTIVE" + ).exists() - group = Group.objects.filter(name='Teacher').first() + group = Group.objects.filter(name="Teacher").first() groups = instance.user.groups if sender == MentorProfile: should_be_deleted = not MentorProfile.objects.filter(user=instance.user).exists() - group = Group.objects.filter(name='Mentor').first() + group = Group.objects.filter(name="Mentor").first() groups = instance.user.groups if should_be_deleted and groups and group: @@ -113,11 +115,11 @@ def post_delete_cohort_user(sender, instance, **_): if instance.cohort.never_ends: return None - logger.debug('Cohort user deleted, removing from organization') + logger.debug("Cohort user deleted, removing from organization") args = (instance.cohort.id, instance.user.id) - kwargs = {'force': True} + kwargs = {"force": True} - manager = schedule_task(async_remove_from_organization, '3w') + manager = schedule_task(async_remove_from_organization, "3w") if not manager.exists(*args, **kwargs): manager.call(*args, **kwargs) @@ -125,8 +127,8 @@ def post_delete_cohort_user(sender, instance, **_): @receiver(student_edu_status_updated, sender=CohortUser) def post_save_cohort_user(sender, instance, **_): - logger.debug('User educational status updated to: ' + str(instance.educational_status)) - if instance.educational_status == 'ACTIVE': + logger.debug("User educational status updated to: " + str(instance.educational_status)) + if instance.educational_status == "ACTIVE": # never ending cohorts cannot be in synch with github if instance.cohort.never_ends: @@ -136,13 +138,16 @@ def post_save_cohort_user(sender, instance, **_): else: args = (instance.cohort.id, instance.user.id) - manager = schedule_task(async_remove_from_organization, '3w') + manager = schedule_task(async_remove_from_organization, "3w") if not manager.exists(*args): manager.call(*args) @receiver(invite_status_updated, sender=UserInvite) def handle_invite_accepted(sender: Type[UserInvite], instance: UserInvite, **_): - if instance.status == 'ACCEPTED' and not instance.user and User.objects.filter( - email=instance.email).exists() is False: + if ( + instance.status == "ACCEPTED" + and not instance.user + and User.objects.filter(email=instance.email).exists() is False + ): tasks.create_user_from_invite.apply_async(args=[instance.id], countdown=60) diff --git a/breathecode/authenticate/serializers.py b/breathecode/authenticate/serializers.py index 0f4180521..405177cb4 100644 --- a/breathecode/authenticate/serializers.py +++ b/breathecode/authenticate/serializers.py @@ -37,6 +37,7 @@ class GetSmallCohortSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -47,6 +48,7 @@ class GetSmallCohortSerializer(serpy.Serializer): class GetSmallAcademySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -55,6 +57,7 @@ class GetSmallAcademySerializer(serpy.Serializer): class UserTinySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() username = serpy.Field() @@ -65,6 +68,7 @@ class UserTinySerializer(serpy.Serializer): class UserBigSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() email = serpy.Field() @@ -73,6 +77,7 @@ class UserBigSerializer(serpy.Serializer): class GitpodUserSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() github_username = serpy.Field() @@ -87,6 +92,7 @@ class GitpodUserSmallSerializer(serpy.Serializer): class AcademyTinySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -96,6 +102,7 @@ class AcademyTinySerializer(serpy.Serializer): class CohortTinySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. slug = serpy.Field() name = serpy.Field() @@ -103,6 +110,7 @@ class CohortTinySerializer(serpy.Serializer): class TokenSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. user = UserTinySerializer() key = serpy.Field() @@ -110,14 +118,15 @@ class TokenSmallSerializer(serpy.Serializer): reset_github_url = serpy.MethodField() def get_reset_password_url(self, obj): - return os.getenv('API_URL') + '/v1/auth/password/' + str(obj.key) + return os.getenv("API_URL") + "/v1/auth/password/" + str(obj.key) def get_reset_github_url(self, obj): - return os.getenv('API_URL') + '/v1/auth/github/' + str(obj.key) + return os.getenv("API_URL") + "/v1/auth/github/" + str(obj.key) class RoleSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + id = serpy.MethodField() slug = serpy.Field() name = serpy.Field() @@ -128,6 +137,7 @@ def get_id(self, obj): class RoleBigSerializer(serpy.Serializer): """The serializer schema definition.""" + id = serpy.MethodField() slug = serpy.Field() name = serpy.Field() @@ -138,11 +148,12 @@ def get_id(self, obj): return obj.slug def get_capabilities(self, obj): - return obj.capabilities.all().values_list('slug', flat=True) + return obj.capabilities.all().values_list("slug", flat=True) class GithubSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() name = serpy.Field() @@ -151,6 +162,7 @@ class GithubSmallSerializer(serpy.Serializer): class GithubUserSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() academy = GetSmallAcademySerializer(required=False) @@ -213,11 +225,12 @@ class UserInviteSerializer(UserInviteNoUrlSerializer): def get_invite_url(self, _invite): if _invite.token is None: return None - return os.getenv('API_URL') + '/v1/auth/member/invite/' + str(_invite.token) + return os.getenv("API_URL") + "/v1/auth/member/invite/" + str(_invite.token) class AcademySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -244,6 +257,7 @@ def get_cohort(self, obj): class ProfileAcademySmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() academy = AcademySerializer() @@ -256,6 +270,7 @@ def get_role(self, obj): class AcademySmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -264,6 +279,7 @@ class AcademySmallSerializer(serpy.Serializer): class UserSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() email = serpy.Field() @@ -273,13 +289,13 @@ class UserSmallSerializer(serpy.Serializer): profile = serpy.MethodField() def get_github(self, obj): - if not hasattr(obj, 'credentialsgithub'): + if not hasattr(obj, "credentialsgithub"): return None return GithubSmallSerializer(obj.credentialsgithub).data def get_profile(self, obj): - if not hasattr(obj, 'profile'): + if not hasattr(obj, "profile"): return None return GetProfileSmallSerializer(obj.profile).data @@ -287,6 +303,7 @@ def get_profile(self, obj): class UserSuperSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() email = serpy.Field() @@ -295,7 +312,7 @@ class UserSuperSmallSerializer(serpy.Serializer): profile = serpy.MethodField() def get_profile(self, obj): - if not hasattr(obj, 'profile'): + if not hasattr(obj, "profile"): return None return GetProfileSmallSerializer(obj.profile).data @@ -303,6 +320,7 @@ def get_profile(self, obj): class GetProfileAcademySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -320,11 +338,12 @@ class GetProfileAcademySerializer(serpy.Serializer): invite_url = serpy.MethodField() def get_invite_url(self, _invite): - return os.getenv('API_URL') + '/v1/auth/academy/html/invite' + return os.getenv("API_URL") + "/v1/auth/academy/html/invite" class GetProfileAcademySmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -343,6 +362,7 @@ class GetProfileAcademySmallSerializer(serpy.Serializer): class GetProfileAcademyTinySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -373,7 +393,7 @@ class AppUserSerializer(serpy.Serializer): profile = serpy.MethodField() def get_profile(self, obj): - if not hasattr(obj, 'profile'): + if not hasattr(obj, "profile"): return None return GetProfileSmallSerializer(obj.profile).data @@ -400,6 +420,7 @@ def get_up_to_date(self, obj): class SettingsSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. lang = serpy.Field() main_currency = serpy.Field() @@ -407,6 +428,7 @@ class SettingsSerializer(serpy.Serializer): class UserSerializer(AppUserSerializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. roles = serpy.MethodField() @@ -419,7 +441,7 @@ def get_permissions(self, obj): for group in obj.groups.all(): permissions |= group.permissions.all() - return GetPermissionSmallSerializer(permissions.distinct().order_by('-id'), many=True).data + return GetPermissionSmallSerializer(permissions.distinct().order_by("-id"), many=True).data def get_roles(self, obj): roles = ProfileAcademy.objects.filter(user=obj.id) @@ -432,6 +454,7 @@ def get_settings(self, obj): class GroupSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -439,6 +462,7 @@ class GroupSerializer(serpy.Serializer): class AuthSettingsBigSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() academy = AcademyTinySerializer() @@ -458,18 +482,18 @@ class AcademyAuthSettingsSerializer(serializers.ModelSerializer): class Meta: model = AcademyAuthSettings - exclude = ('academy', 'github_error_log') + exclude = ("academy", "github_error_log") def create(self, validated_data): - return super().create({**validated_data, 'academy': Academy.filter(id=self.context['academy_id']).first()}) + return super().create({**validated_data, "academy": Academy.filter(id=self.context["academy_id"]).first()}) class StaffSerializer(serializers.ModelSerializer): class Meta: model = ProfileAcademy - fields = ('user', 'role', 'academy', 'first_name', 'last_name', 'address', 'phone', 'status') + fields = ("user", "role", "academy", "first_name", "last_name", "address", "phone", "status") class UserMeProfileSerializer(serializers.ModelSerializer): @@ -477,7 +501,7 @@ class UserMeProfileSerializer(serializers.ModelSerializer): class Meta: model = Profile exclude = () - read_only_fields = ('user', ) + read_only_fields = ("user",) class UserMeSerializer(serializers.ModelSerializer): @@ -485,27 +509,26 @@ class UserMeSerializer(serializers.ModelSerializer): class Meta: model = User - exclude = ('is_active', 'is_staff', 'password', 'username') + exclude = ("is_active", "is_staff", "password", "username") # def create(self, validated_data): def update(self, instance, validated_data): - profile_data = validated_data.pop('profile', None) + profile_data = validated_data.pop("profile", None) if profile_data: serializer = None try: - serializer = UserMeProfileSerializer(self.instance.profile, - data={ - **profile_data, 'user': self.instance.id - }) + serializer = UserMeProfileSerializer( + self.instance.profile, data={**profile_data, "user": self.instance.id} + ) except Profile.DoesNotExist: - serializer = UserMeProfileSerializer(data={**profile_data, 'user': self.instance.id}) + serializer = UserMeProfileSerializer(data={**profile_data, "user": self.instance.id}) if serializer and serializer.is_valid(): serializer.save() else: - raise ValidationException('Error saving user profile') + raise ValidationException("Error saving user profile") return super().update(self.instance, validated_data) @@ -514,215 +537,249 @@ class UserSettingsSerializer(serializers.ModelSerializer): class Meta: model = UserSetting - exclude = ('user', ) + exclude = ("user",) class MemberPOSTSerializer(serializers.ModelSerializer): invite = serializers.BooleanField(write_only=True, required=False) - cohort = serializers.ListField(child=serializers.IntegerField(write_only=True, required=False), - write_only=True, - required=False) + cohort = serializers.ListField( + child=serializers.IntegerField(write_only=True, required=False), write_only=True, required=False + ) user = serializers.IntegerField(write_only=True, required=False) status = serializers.CharField(read_only=True) class Meta: model = ProfileAcademy - fields = ('email', 'role', 'user', 'first_name', 'last_name', 'address', 'phone', 'invite', 'cohort', 'status') + fields = ("email", "role", "user", "first_name", "last_name", "address", "phone", "invite", "cohort", "status") def validate(self, data): - lang = data.get('lang', 'en') + lang = data.get("lang", "en") - if 'email' in data and data['email']: - data['email'] = data['email'].lower() - user = User.objects.filter(email=data['email']).first() + if "email" in data and data["email"]: + data["email"] = data["email"].lower() + user = User.objects.filter(email=data["email"]).first() if user: - data['user'] = user.id + data["user"] = user.id - if 'user' not in data: - if 'invite' not in data or data['invite'] != True: - raise ValidationException('User does not exists, do you want to invite it?', slug='user-not-found') + if "user" not in data: + if "invite" not in data or data["invite"] != True: + raise ValidationException("User does not exists, do you want to invite it?", slug="user-not-found") - elif 'email' not in data: - raise ValidationException('Please specify user id or member email', slug='no-email-or-id') + elif "email" not in data: + raise ValidationException("Please specify user id or member email", slug="no-email-or-id") - already = ProfileAcademy.objects.filter(email=data['email'], academy=self.context['academy_id']).exists() + already = ProfileAcademy.objects.filter(email=data["email"], academy=self.context["academy_id"]).exists() if already: raise ValidationException( - 'There is a member already in this academy with this email, or with invitation to this ' - 'email pending', + "There is a member already in this academy with this email, or with invitation to this " + "email pending", code=400, - slug='already-exists-with-this-email') + slug="already-exists-with-this-email", + ) - elif 'user' in data: - student_role = Role.objects.filter(slug='student').first() + elif "user" in data: + student_role = Role.objects.filter(slug="student").first() - already = ProfileAcademy.objects.filter( - user=data['user'], academy=self.context['academy_id']).exclude(role=student_role).first() + already = ( + ProfileAcademy.objects.filter(user=data["user"], academy=self.context["academy_id"]) + .exclude(role=student_role) + .first() + ) if already: - raise ValidationException(f'This user is already a member of this academy as {str(already.role)}', - slug='already-exists') + raise ValidationException( + f"This user is already a member of this academy as {str(already.role)}", slug="already-exists" + ) - academy_id = data['academy'] if 'academy' in data else self.context['academy_id'] - if 'user' in data: - user = User.objects.filter(id=data['user']).first() + academy_id = data["academy"] if "academy" in data else self.context["academy_id"] + if "user" in data: + user = User.objects.filter(id=data["user"]).first() else: - user = User.objects.filter(email=data['email']).first() - if 'user' in data: - profile_academy = ProfileAcademy.objects.filter(user__id=data['user'], - academy__id=academy_id, first_name__isnull=False, \ - last_name__isnull=False).exclude(first_name='', last_name='').first() + user = User.objects.filter(email=data["email"]).first() + if "user" in data: + profile_academy = ( + ProfileAcademy.objects.filter( + user__id=data["user"], academy__id=academy_id, first_name__isnull=False, last_name__isnull=False + ) + .exclude(first_name="", last_name="") + .first() + ) else: - profile_academy = ProfileAcademy.objects.filter(email=data['email'], - academy__id=academy_id, first_name__isnull=False, \ - last_name__isnull=False).exclude(first_name='', last_name='').first() - if 'first_name' not in data: - data['first_name'] = '' - if not data['first_name'] and profile_academy: + profile_academy = ( + ProfileAcademy.objects.filter( + email=data["email"], academy__id=academy_id, first_name__isnull=False, last_name__isnull=False + ) + .exclude(first_name="", last_name="") + .first() + ) + if "first_name" not in data: + data["first_name"] = "" + if not data["first_name"] and profile_academy: - data['first_name'] = profile_academy.first_name - if not data['first_name'] and user: + data["first_name"] = profile_academy.first_name + if not data["first_name"] and user: - data['first_name'] = user.first_name - if not data['first_name']: - raise ValidationException(translation(lang, - en='Unable to find first name on this user', - es='Imposible encontrar el nombre en este usuario', - slug='first-name-not-found'), - code=400) + data["first_name"] = user.first_name + if not data["first_name"]: + raise ValidationException( + translation( + lang, + en="Unable to find first name on this user", + es="Imposible encontrar el nombre en este usuario", + slug="first-name-not-found", + ), + code=400, + ) - if 'last_name' not in data: - data['last_name'] = '' + if "last_name" not in data: + data["last_name"] = "" - if not data['last_name'] and profile_academy: + if not data["last_name"] and profile_academy: - data['last_name'] = profile_academy.last_name + data["last_name"] = profile_academy.last_name - if not data['last_name'] and user: + if not data["last_name"] and user: - data['last_name'] = user.last_name + data["last_name"] = user.last_name - if not data['last_name']: - raise ValidationException(translation(lang, - en='Unable to find last name on this user', - es='Imposible encontrar el apellido en este usuario', - slug='last-name-not-found'), - code=400) + if not data["last_name"]: + raise ValidationException( + translation( + lang, + en="Unable to find last name on this user", + es="Imposible encontrar el apellido en este usuario", + slug="last-name-not-found", + ), + code=400, + ) - event = data.get('event', None) + event = data.get("event", None) if event is not None: try: args = {} if isinstance(event, int): - args['id'] = event + args["id"] = event else: - args['slug'] = event + args["slug"] = event event = Event.objects.filter(**args).get() - data['event_slug'] = event.slug + data["event_slug"] = event.slug except Exception: - raise ValidationException(translation(lang, - en='Unable to find the given Event', - es='Imposible encontrar el Evento dado', - slug='event-not-found'), - code=400) + raise ValidationException( + translation( + lang, + en="Unable to find the given Event", + es="Imposible encontrar el Evento dado", + slug="event-not-found", + ), + code=400, + ) - asset = data.get('asset', None) + asset = data.get("asset", None) if asset is not None: try: args = {} if isinstance(asset, int): - args['id'] = asset + args["id"] = asset else: - args['slug'] = asset + args["slug"] = asset asset = Asset.objects.filter(**args).get() - data['asset_slug'] = asset.slug + data["asset_slug"] = asset.slug except Exception: - raise ValidationException(translation(lang, - en='Unable to find the given Asset', - es='Imposible encontrar el Asset dado', - slug='asset-not-found'), - code=400) + raise ValidationException( + translation( + lang, + en="Unable to find the given Asset", + es="Imposible encontrar el Asset dado", + slug="asset-not-found", + ), + code=400, + ) - conversion_info = data.get('conversion_info', None) + conversion_info = data.get("conversion_info", None) validate_conversion_info(conversion_info, lang) return data def create(self, validated_data): - academy = Academy.objects.filter(id=self.context.get('academy_id')).first() + academy = Academy.objects.filter(id=self.context.get("academy_id")).first() if academy is None: - raise ValidationException('Academy not found') + raise ValidationException("Academy not found") - role = validated_data['role'] + role = validated_data["role"] cohort = [] - if 'cohort' in validated_data: + if "cohort" in validated_data: - cohort_list = validated_data.pop('cohort') + cohort_list = validated_data.pop("cohort") for cohort_id in cohort_list: cohort_search = Cohort.objects.filter(id=cohort_id).first() if cohort_search is None: - raise ValidationException('Cohort not found', slug='cohort-not-found') + raise ValidationException("Cohort not found", slug="cohort-not-found") cohort.append(cohort_search) user = None email = None - status = 'INVITED' + status = "INVITED" # if the user already exists, we don't consider it and invite, we add the user immediately to the academy. - if 'user' in validated_data: - user = User.objects.filter(id=validated_data['user']).first() + if "user" in validated_data: + user = User.objects.filter(id=validated_data["user"]).first() if user is None: - raise ValidationException('User not found', code=400, slug='user-not-found') + raise ValidationException("User not found", code=400, slug="user-not-found") email = user.email - status = 'ACTIVE' + status = "ACTIVE" - student_role = Role.objects.filter(slug='student').first() + student_role = Role.objects.filter(slug="student").first() already_as_student = ProfileAcademy.objects.filter(user=user, academy=academy.id, role=student_role).first() # avoid double students on the same academy and cohort if already_as_student is not None: - return super().update(already_as_student, { - **validated_data, - 'email': email, - 'user': user, - 'academy': academy, - 'role': role, - 'status': status, - }) + return super().update( + already_as_student, + { + **validated_data, + "email": email, + "user": user, + "academy": academy, + "role": role, + "status": status, + }, + ) # if there is not user (first time) it will be considere an invite - if 'user' not in validated_data: - validated_data.pop('invite') # the front end sends invite=true so we need to remove it - email = validated_data['email'].lower() + if "user" not in validated_data: + validated_data.pop("invite") # the front end sends invite=true so we need to remove it + email = validated_data["email"].lower() - if (len(cohort) == 0): + if len(cohort) == 0: cohort = [None] for single_cohort in cohort: query = { - 'cohort': single_cohort, - 'email__iexact': email, - 'author': self.context.get('request').user, + "cohort": single_cohort, + "email__iexact": email, + "author": self.context.get("request").user, } # if the cohort is not specified, process to find if the user was invite ignoring the cohort if not single_cohort: - del query['cohort'] + del query["cohort"] invite = UserInvite.objects.filter(**query).first() # avoid double invite if invite is not None: - raise ValidationException('You already invited this user, check for previous invites and resend', - code=400, - slug='already-invited') + raise ValidationException( + "You already invited this user, check for previous invites and resend", + code=400, + slug="already-invited", + ) for single_cohort in cohort: # prevent duplicate token (very low probability) @@ -731,41 +788,47 @@ def create(self, validated_data): if not UserInvite.objects.filter(token=token).exists(): break - invite = UserInvite(email=email, - first_name=validated_data['first_name'], - last_name=validated_data['last_name'], - academy=academy, - cohort=single_cohort, - role=role, - author=self.context.get('request').user, - token=token) + invite = UserInvite( + email=email, + first_name=validated_data["first_name"], + last_name=validated_data["last_name"], + academy=academy, + cohort=single_cohort, + role=role, + author=self.context.get("request").user, + token=token, + ) invite.save() - logger.debug('Sending invite email to ' + email) + logger.debug("Sending invite email to " + email) - params = {'callback': 'https://admin.4geeks.com'} + params = {"callback": "https://admin.4geeks.com"} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(invite.token) + '?' + querystr - - notify_actions.send_email_message('welcome_academy', - email, { - 'email': email, - 'subject': 'Welcome to ' + academy.name, - 'LINK': url, - 'FIST_NAME': validated_data['first_name'], - }, - academy=academy) + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(invite.token) + "?" + querystr + + notify_actions.send_email_message( + "welcome_academy", + email, + { + "email": email, + "subject": "Welcome to " + academy.name, + "LINK": url, + "FIST_NAME": validated_data["first_name"], + }, + academy=academy, + ) # add member to the academy (the cohort is inside validated_data - return super().create({ - **validated_data, - 'email': email, - 'user': user, - 'academy': academy, - 'role': role, - 'status': status, - }) + return super().create( + { + **validated_data, + "email": email, + "user": user, + "academy": academy, + "role": role, + "status": status, + } + ) # This method is almost repeated but now for students instead of academy members. @@ -786,141 +849,148 @@ def create(self, validated_data): # This method is almost repeated but now for students instead of academy memebers. class StudentPOSTSerializer(serializers.ModelSerializer): invite = serializers.BooleanField(write_only=True, required=False) - cohort = serializers.ListField(child=serializers.IntegerField(write_only=True, required=False), - write_only=True, - required=False) - plans = serializers.ListField(child=serializers.IntegerField(write_only=True, required=False), - write_only=True, - required=False) + cohort = serializers.ListField( + child=serializers.IntegerField(write_only=True, required=False), write_only=True, required=False + ) + plans = serializers.ListField( + child=serializers.IntegerField(write_only=True, required=False), write_only=True, required=False + ) user = serializers.IntegerField(write_only=True, required=False) status = serializers.CharField(read_only=True) class Meta: model = ProfileAcademy - fields = ('email', 'user', 'first_name', 'last_name', 'address', 'phone', 'invite', 'cohort', 'status', 'plans') + fields = ("email", "user", "first_name", "last_name", "address", "phone", "invite", "cohort", "status", "plans") list_serializer_class = StudentPOSTListSerializer def validate(self, data): - if 'email' in data and data['email']: - data['email'] = data['email'].lower() - user = User.objects.filter(email=data['email']).first() + if "email" in data and data["email"]: + data["email"] = data["email"].lower() + user = User.objects.filter(email=data["email"]).first() if user: - data['user'] = user.id + data["user"] = user.id - if 'user' not in data: - if 'invite' not in data or data['invite'] != True: - raise ValidationException('User does not exists, do you want to invite it?', slug='user-not-found') - elif 'email' not in data: - raise ValidationException('Please specify user id or student email', slug='no-email-or-id') + if "user" not in data: + if "invite" not in data or data["invite"] != True: + raise ValidationException("User does not exists, do you want to invite it?", slug="user-not-found") + elif "email" not in data: + raise ValidationException("Please specify user id or student email", slug="no-email-or-id") - already = ProfileAcademy.objects.filter(email=data['email'], academy=self.context['academy_id']).first() + already = ProfileAcademy.objects.filter(email=data["email"], academy=self.context["academy_id"]).first() if already: - raise ValidationException('There is a student already in this academy, or with invitation pending', - slug='already-exists-with-this-email') + raise ValidationException( + "There is a student already in this academy, or with invitation pending", + slug="already-exists-with-this-email", + ) - elif 'user' in data: - already = ProfileAcademy.objects.filter(user=data['user'], academy=self.context['academy_id']).first() + elif "user" in data: + already = ProfileAcademy.objects.filter(user=data["user"], academy=self.context["academy_id"]).first() if already: - raise ValidationException('This user is already a member of this academy staff', - code=400, - slug='already-exists') + raise ValidationException( + "This user is already a member of this academy staff", code=400, slug="already-exists" + ) return data def create(self, validated_data): from breathecode.payments.models import Plan - academy = Academy.objects.filter(id=self.context.get('academy_id')).first() + academy = Academy.objects.filter(id=self.context.get("academy_id")).first() if academy is None: - raise ValidationException('Academy not found') + raise ValidationException("Academy not found") - role = Role.objects.filter(slug='student').first() + role = Role.objects.filter(slug="student").first() if role is None: - raise ValidationException('Role student not found', slug='role-not-found') + raise ValidationException("Role student not found", slug="role-not-found") cohort = [] - if 'cohort' in validated_data: + if "cohort" in validated_data: - cohort_list = validated_data.pop('cohort') + cohort_list = validated_data.pop("cohort") for cohort_id in cohort_list: cohort_search = Cohort.objects.filter(id=cohort_id).first() if cohort_search is None: - raise ValidationException('Cohort not found', slug='cohort-not-found') + raise ValidationException("Cohort not found", slug="cohort-not-found") cohort.append(cohort_search) user = None email = None - status = 'INVITED' - if 'user' in validated_data: - user = User.objects.filter(id=validated_data['user']).first() + status = "INVITED" + if "user" in validated_data: + user = User.objects.filter(id=validated_data["user"]).first() if user is None: - raise ValidationException('User not found', slug='user-not-found') + raise ValidationException("User not found", slug="user-not-found") email = user.email - token, created = Token.get_or_create(user, token_type='temporal') - querystr = urllib.parse.urlencode({'callback': get_app_url(), 'token': token}) - url = os.getenv('API_URL') + '/v1/auth/academy/html/invite?' + querystr + token, created = Token.get_or_create(user, token_type="temporal") + querystr = urllib.parse.urlencode({"callback": get_app_url(), "token": token}) + url = os.getenv("API_URL") + "/v1/auth/academy/html/invite?" + querystr - if 'invite' in validated_data: - del validated_data['invite'] + if "invite" in validated_data: + del validated_data["invite"] - if 'plans' in validated_data: - del validated_data['plans'] + if "plans" in validated_data: + del validated_data["plans"] - profile_academy = ProfileAcademy.objects.create(**{ - **validated_data, - 'email': email, - 'user': user, - 'academy': academy, - 'role': role, - 'status': status, - }) + profile_academy = ProfileAcademy.objects.create( + **{ + **validated_data, + "email": email, + "user": user, + "academy": academy, + "role": role, + "status": status, + } + ) profile_academy.save() - notify_actions.send_email_message('academy_invite', - email, { - 'subject': f'Invitation to study at {academy.name}', - 'invites': [ProfileAcademySmallSerializer(profile_academy).data], - 'user': UserSmallSerializer(user).data, - 'LINK': url, - }, - academy=academy) + notify_actions.send_email_message( + "academy_invite", + email, + { + "subject": f"Invitation to study at {academy.name}", + "invites": [ProfileAcademySmallSerializer(profile_academy).data], + "user": UserSmallSerializer(user).data, + "LINK": url, + }, + academy=academy, + ) return profile_academy plans: list[Plan] = [] - if 'plans' in validated_data: - plan_list = validated_data.pop('plans') + if "plans" in validated_data: + plan_list = validated_data.pop("plans") for plan_id in plan_list: plan = Plan.objects.filter(id=plan_id).first() if plan is None: - raise ValidationException('Plan not found', slug='plan-not-found') + raise ValidationException("Plan not found", slug="plan-not-found") plans.append(plan) - if 'user' not in validated_data: - validated_data.pop('invite') # the front end sends invite=true so we need to remove it - email = validated_data['email'].lower() + if "user" not in validated_data: + validated_data.pop("invite") # the front end sends invite=true so we need to remove it + email = validated_data["email"].lower() - if (len(cohort) == 0): + if len(cohort) == 0: cohort = [None] for single_cohort in cohort: query = { - 'cohort': single_cohort, - 'email__iexact': email, - 'author': self.context.get('request').user, + "cohort": single_cohort, + "email__iexact": email, + "author": self.context.get("request").user, } # if the cohort is not specified, process to find if the user was invite ignoring the cohort if not single_cohort: - del query['cohort'] + del query["cohort"] invite = UserInvite.objects.filter(**query).first() if invite is not None: - raise ValidationException('You already invited this user', code=400, slug='already-invited') + raise ValidationException("You already invited this user", code=400, slug="already-invited") - if (len(cohort) == 0): + if len(cohort) == 0: cohort = [None] for single_cohort in cohort: @@ -930,104 +1000,118 @@ def create(self, validated_data): if not UserInvite.objects.filter(token=token).exists(): break - invite = UserInvite(email=email, - first_name=validated_data['first_name'], - last_name=validated_data['last_name'], - academy=academy, - cohort=single_cohort, - role=role, - author=self.context.get('request').user, - token=token) + invite = UserInvite( + email=email, + first_name=validated_data["first_name"], + last_name=validated_data["last_name"], + academy=academy, + cohort=single_cohort, + role=role, + author=self.context.get("request").user, + token=token, + ) invite.save() - logger.debug('Sending invite email to ' + email) + logger.debug("Sending invite email to " + email) - querystr = urllib.parse.urlencode({'callback': get_app_url()}) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(invite.token) + '?' + querystr + querystr = urllib.parse.urlencode({"callback": get_app_url()}) + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(invite.token) + "?" + querystr - notify_actions.send_email_message('welcome_academy', - email, { - 'email': email, - 'subject': 'Welcome to ' + academy.name, - 'LINK': url, - 'FIST_NAME': validated_data['first_name'] - }, - academy=academy) + notify_actions.send_email_message( + "welcome_academy", + email, + { + "email": email, + "subject": "Welcome to " + academy.name, + "LINK": url, + "FIST_NAME": validated_data["first_name"], + }, + academy=academy, + ) for plan in plans: plan.invites.add(invite) - if 'plans' in validated_data: - del validated_data['plans'] + if "plans" in validated_data: + del validated_data["plans"] - return ProfileAcademy.objects.create(**{ - **validated_data, - 'email': email, - 'user': user, - 'academy': academy, - 'role': role, - 'status': status, - }) + return ProfileAcademy.objects.create( + **{ + **validated_data, + "email": email, + "user": user, + "academy": academy, + "role": role, + "status": status, + } + ) class MemberPUTSerializer(serializers.ModelSerializer): class Meta: model = ProfileAcademy - fields = ('user', 'role', 'academy', 'first_name', 'last_name', 'phone', 'address') + fields = ("user", "role", "academy", "first_name", "last_name", "phone", "address") def validate(self, data): - lang = data.get('lang', 'en') + lang = data.get("lang", "en") - profile_academy = ProfileAcademy.objects.filter(user=data['user'], academy=data['academy']).first() + profile_academy = ProfileAcademy.objects.filter(user=data["user"], academy=data["academy"]).first() if not profile_academy: - raise ValidationError('User not found on this particular academy') + raise ValidationError("User not found on this particular academy") - if 'first_name' not in data: - data['first_name'] = '' + if "first_name" not in data: + data["first_name"] = "" - if not data['first_name'] and profile_academy: + if not data["first_name"] and profile_academy: - data['first_name'] = profile_academy.first_name + data["first_name"] = profile_academy.first_name - if not data['first_name']: + if not data["first_name"]: - data['first_name'] = data['user'].first_name + data["first_name"] = data["user"].first_name - if not data['first_name']: - raise ValidationException(translation(lang, - en='Unable to find first name on this user', - es='Imposible encontrar el nombre en este usuario', - slug='first-name-not-founded'), - code=400) + if not data["first_name"]: + raise ValidationException( + translation( + lang, + en="Unable to find first name on this user", + es="Imposible encontrar el nombre en este usuario", + slug="first-name-not-founded", + ), + code=400, + ) - if 'last_name' not in data: - data['last_name'] = '' + if "last_name" not in data: + data["last_name"] = "" - if not data['last_name'] and profile_academy: + if not data["last_name"] and profile_academy: - data['last_name'] = profile_academy.last_name + data["last_name"] = profile_academy.last_name - if not data['last_name']: + if not data["last_name"]: - data['last_name'] = data['user'].last_name + data["last_name"] = data["user"].last_name - if not data['last_name']: - raise ValidationException(translation(lang, - en='Unable to find last name on this user', - es='Imposible encontrar el apellido en este usuario', - slug='last-name-not-founded'), - code=400) + if not data["last_name"]: + raise ValidationException( + translation( + lang, + en="Unable to find last name on this user", + es="Imposible encontrar el apellido en este usuario", + slug="last-name-not-founded", + ), + code=400, + ) return data def update(self, instance, validated_data): - if instance.user.first_name is None or instance.user.first_name == '': - instance.user.first_name = instance.first_name or '' - if instance.user.last_name is None or instance.user.last_name == '': - instance.user.last_name = instance.last_name or '' + if instance.user.first_name is None or instance.user.first_name == "": + instance.user.first_name = instance.first_name or "" + if instance.user.last_name is None or instance.user.last_name == "": + instance.user.last_name = instance.last_name or "" instance.user.save() return super().update(instance, validated_data) @@ -1037,7 +1121,7 @@ class PUTGithubUserSerializer(serializers.ModelSerializer): class Meta: model = GithubAcademyUser - exclude = ('storage_status', 'user', 'academy', 'storage_log', 'storage_synch_at', 'username') + exclude = ("storage_status", "user", "academy", "storage_log", "storage_synch_at", "username") # def validate(self, data): @@ -1049,15 +1133,15 @@ class Meta: def update(self, instance, validated_data): - if instance.storage_action != validated_data['storage_action'] or instance.storage_action == 'ADD': + if instance.storage_action != validated_data["storage_action"] or instance.storage_action == "ADD": # manually ignoring a contact is synched immediately - if validated_data['storage_action'] == 'IGNORE': - validated_data['storage_status'] = 'SYNCHED' + if validated_data["storage_action"] == "IGNORE": + validated_data["storage_status"] = "SYNCHED" # anything else has to be processed later else: - validated_data['storage_status'] = 'PENDING' - validated_data['storage_log'] = [ - GithubAcademyUser.create_log('User was manually scheduled to be ' + validated_data['storage_action']) + validated_data["storage_status"] = "PENDING" + validated_data["storage_log"] = [ + GithubAcademyUser.create_log("User was manually scheduled to be " + validated_data["storage_action"]) ] return super().update(instance, validated_data) @@ -1067,49 +1151,49 @@ class POSTGithubUserSerializer(serializers.ModelSerializer): class Meta: model = GithubAcademyUser - exclude = ('storage_status', 'academy', 'storage_log', 'storage_synch_at', 'username') + exclude = ("storage_status", "academy", "storage_log", "storage_synch_at", "username") def validate(self, data): - academy_id = self.context.get('academy_id') - already = GithubAcademyUser.objects.filter(user=data['user'], academy=academy_id).first() + academy_id = self.context.get("academy_id") + already = GithubAcademyUser.objects.filter(user=data["user"], academy=academy_id).first() if already: - raise ValidationError('User already belongs to the organization') + raise ValidationError("User already belongs to the organization") - github = CredentialsGithub.objects.filter(user=data['user']).first() + github = CredentialsGithub.objects.filter(user=data["user"]).first() if github is None: - raise ValidationError('No github credentials found for user') + raise ValidationError("No github credentials found for user") - return {**data, 'username': github.username} + return {**data, "username": github.username} def create(self, validated_data): # anything else has to be processed later - validated_data['storage_action'] = 'ADD' - validated_data['storage_status'] = 'PENDING' - validated_data['storage_log'] = [GithubAcademyUser.create_log('User was manually added')] + validated_data["storage_action"] = "ADD" + validated_data["storage_status"] = "PENDING" + validated_data["storage_log"] = [GithubAcademyUser.create_log("User was manually added")] - return super().create({ - **validated_data, 'academy': Academy.objects.filter(id=self.context['academy_id']).first() - }) + return super().create( + {**validated_data, "academy": Academy.objects.filter(id=self.context["academy_id"]).first()} + ) class AuthSerializer(serializers.Serializer): - email = serializers.EmailField(label='Email') - password = serializers.CharField(label='Password', style={'input_type': 'password'}, trim_whitespace=False) + email = serializers.EmailField(label="Email") + password = serializers.CharField(label="Password", style={"input_type": "password"}, trim_whitespace=False) def validate(self, attrs): - email = attrs.get('email') - password = attrs.get('password') + email = attrs.get("email") + password = attrs.get("password") if email and password: email = email.lower() user = User.objects.filter(Q(email__iexact=email) | Q(username=email)).first() if not user: - msg = 'Unable to log in with provided credentials.' + msg = "Unable to log in with provided credentials." raise serializers.ValidationError(msg, code=403) if user.check_password(password) != True: - msg = 'Unable to log in with provided credentials.' + msg = "Unable to log in with provided credentials." raise serializers.ValidationError(msg, code=403) # The authenticate call simply returns None for is_active=False # users. (Assuming the default ModelBackend authentication @@ -1118,19 +1202,20 @@ def validate(self, attrs): msg = 'Must include "username" and "password".' raise serializers.ValidationError(msg, code=403) - if user and not UserInvite.objects.filter(email__iexact=email, status='ACCEPTED', - is_email_validated=True).exists(): - invites = UserInvite.objects.filter(email__iexact=email, status='ACCEPTED', - is_email_validated=False).order_by('-id') + if ( + user + and not UserInvite.objects.filter(email__iexact=email, status="ACCEPTED", is_email_validated=True).exists() + ): + invites = UserInvite.objects.filter( + email__iexact=email, status="ACCEPTED", is_email_validated=False + ).order_by("-id") data = UserInviteNoUrlSerializer(invites, many=True).data - raise ValidationException('You need to validate your email first', - slug='email-not-validated', - silent=True, - code=403, - data=data) + raise ValidationException( + "You need to validate your email first", slug="email-not-validated", silent=True, code=403, data=data + ) - attrs['user'] = user + attrs["user"] = user return attrs @@ -1138,12 +1223,12 @@ class UserInvitePUTSerializer(serializers.ModelSerializer): class Meta: model = UserInvite - fields = ('status', 'id') + fields = ("status", "id") def validate(self, data): - if 'status' not in data: - raise ValidationException('Missing status on invite') + if "status" not in data: + raise ValidationException("Missing status on invite") return data @@ -1152,8 +1237,16 @@ class GetGitpodUserSerializer(serializers.ModelSerializer): class Meta: model = GitpodUser - exclude = ('updated_at', 'created_at', 'user', 'academy', 'assignee_id', 'github_username', - 'position_in_gitpod_team', 'delete_status') + exclude = ( + "updated_at", + "created_at", + "user", + "academy", + "assignee_id", + "github_username", + "position_in_gitpod_team", + "delete_status", + ) class ProfileSerializer(serializers.ModelSerializer): @@ -1172,216 +1265,266 @@ class UserInviteWaitingListSerializer(serializers.ModelSerializer): class Meta: model = UserInvite - fields = ('id', 'email', 'first_name', 'last_name', 'phone', 'cohort', 'syllabus', 'access_token', 'plan', - 'plans', 'user', 'country', 'city', 'latitude', 'longitude', 'status', 'conversion_info', - 'asset_slug', 'event_slug') + fields = ( + "id", + "email", + "first_name", + "last_name", + "phone", + "cohort", + "syllabus", + "access_token", + "plan", + "plans", + "user", + "country", + "city", + "latitude", + "longitude", + "status", + "conversion_info", + "asset_slug", + "event_slug", + ) def validate(self, data: dict[str, str]): from breathecode.marketing.models import Course from breathecode.payments.models import Plan - country = data['country'] if 'country' in data else None - forbidden_countries = ['spain'] + country = data["country"] if "country" in data else None + forbidden_countries = ["spain"] - lang = self.context.get('lang', 'en') - if 'email' not in data: + lang = self.context.get("lang", "en") + if "email" not in data: raise ValidationException( - translation(lang, en='Email is required', es='El email es requerido', slug='without-email')) + translation(lang, en="Email is required", es="El email es requerido", slug="without-email") + ) - data['email'] = data['email'].lower() + data["email"] = data["email"].lower() extra = {} plan = None - if plan_pk := self.context.get('plan'): + if plan_pk := self.context.get("plan"): try: kwargs = {} if isinstance(plan_pk, int): - kwargs['id'] = plan_pk + kwargs["id"] = plan_pk else: - kwargs['slug'] = plan_pk + kwargs["slug"] = plan_pk plan = Plan.objects.filter(**kwargs).get() - extra['plans'] = plan + extra["plans"] = plan except Exception: raise ValidationException( - translation(lang, en='Plan not found', es='Plan no encontrado', slug='plan-not-found')) + translation(lang, en="Plan not found", es="Plan no encontrado", slug="plan-not-found") + ) course = None - if course_pk := self.context.get('course'): + if course_pk := self.context.get("course"): try: kwargs = {} if isinstance(course_pk, int): - kwargs['id'] = course_pk + kwargs["id"] = course_pk else: - kwargs['slug'] = course_pk + kwargs["slug"] = course_pk course = Course.objects.filter(**kwargs).get() - extra['courses'] = course + extra["courses"] = course except Exception: raise ValidationException( - translation(lang, en='Course not found', es='Curso no encontrado', slug='course-not-found')) + translation(lang, en="Course not found", es="Curso no encontrado", slug="course-not-found") + ) - if cohort := data.get('cohort'): - extra['cohort'] = cohort + if cohort := data.get("cohort"): + extra["cohort"] = cohort - if syllabus := data.get('syllabus'): - extra['syllabus'] = syllabus + if syllabus := data.get("syllabus"): + extra["syllabus"] = syllabus - have_pending_invites = UserInvite.objects.filter(Q(academy__available_as_saas=False) - | Q(cohort__academy__available_as_saas=False), - email=data['email'], - status='PENDING') - have_accepted_invites = UserInvite.objects.filter(email=data['email'], status='ACCEPTED').first() + have_pending_invites = UserInvite.objects.filter( + Q(academy__available_as_saas=False) | Q(cohort__academy__available_as_saas=False), + email=data["email"], + status="PENDING", + ) + have_accepted_invites = UserInvite.objects.filter(email=data["email"], status="ACCEPTED").first() if not have_accepted_invites and have_pending_invites: names = [x.academy.name if x.academy else x.cohort.academy.name for x in have_pending_invites] - raise ValidationException(translation( - lang, - en=f'You have a pending invites from {", ".join(names)} that you need to accept before ' - 'being able to log in. Check your email inbox to accept it or speak to your program ' - 'manager.', - es=f'Tienes una invitación pendiente de parte de {", ".join(names)} que debes aceptar ' - 'antes de poder registrarte. Revisa tu buzón de correo electrónico o habla con el ' - 'coordinador del curso para conseguir el link a la invitación.'), - slug='invite-exists') + raise ValidationException( + translation( + lang, + en=f'You have a pending invites from {", ".join(names)} that you need to accept before ' + "being able to log in. Check your email inbox to accept it or speak to your program " + "manager.", + es=f'Tienes una invitación pendiente de parte de {", ".join(names)} que debes aceptar ' + "antes de poder registrarte. Revisa tu buzón de correo electrónico o habla con el " + "coordinador del curso para conseguir el link a la invitación.", + ), + slug="invite-exists", + ) - invites = UserInvite.objects.filter(email=data['email'], **extra) + invites = UserInvite.objects.filter(email=data["email"], **extra) - if not self.instance and invites.filter(status='WAITING_LIST').exists(): + if not self.instance and invites.filter(status="WAITING_LIST").exists(): raise ValidationException( - translation(lang, - en='User already exists in the waiting list', - es='El usuario ya existe en la lista de espera', - slug='user-invite-exists')) + translation( + lang, + en="User already exists in the waiting list", + es="El usuario ya existe en la lista de espera", + slug="user-invite-exists", + ) + ) - if not self.instance and invites.filter(status='PENDING').exists(): + if not self.instance and invites.filter(status="PENDING").exists(): raise ValidationException( - translation(lang, - en='Check your email! You have a pending invitation link that needs to be accepted', - es='¡Revisa tu correo! Tienes un link con una invitación pendiente que debe ser aceptada', - slug='user-invite-exists-status-pending')) + translation( + lang, + en="Check your email! You have a pending invitation link that needs to be accepted", + es="¡Revisa tu correo! Tienes un link con una invitación pendiente que debe ser aceptada", + slug="user-invite-exists-status-pending", + ) + ) - if not self.instance and invites.filter(status='ACCEPTED').exists(): - raise ValidationException(translation(lang, - en='You are already a member of 4Geeks.com, go ahead and log in', - es='Ya eres miembro de 4Geeks.com, inicia sesión en su lugar'), - silent=True, - slug='user-invite-exists-status-accepted') + if not self.instance and invites.filter(status="ACCEPTED").exists(): + raise ValidationException( + translation( + lang, + en="You are already a member of 4Geeks.com, go ahead and log in", + es="Ya eres miembro de 4Geeks.com, inicia sesión en su lugar", + ), + silent=True, + slug="user-invite-exists-status-accepted", + ) - user = User.objects.filter(email=data['email']).first() + user = User.objects.filter(email=data["email"]).first() if user: - for i in UserInvite.objects.filter(user__isnull=True, email=data['email'], status='ACCEPTED'): + for i in UserInvite.objects.filter(user__isnull=True, email=data["email"], status="ACCEPTED"): i.user = user i.save() if not self.instance and user: - raise ValidationException(translation(lang, - en='User already exists, go ahead and log in instead.', - es='El usuario ya existe, inicie sesión en su lugar.'), - slug='user-exists', - silent=True) + raise ValidationException( + translation( + lang, + en="User already exists, go ahead and log in instead.", + es="El usuario ya existe, inicie sesión en su lugar.", + ), + slug="user-exists", + silent=True, + ) self.user = user - self.email = data['email'] + self.email = data["email"] self.plan = plan self.course = course if course and syllabus and not course.syllabus.filter(id=syllabus.id).exists(): raise ValidationException( - translation(lang, - en='The syllabus does not belong to the course', - es='El syllabus no pertenece al curso', - slug='syllabus-not-belong-to-course')) + translation( + lang, + en="The syllabus does not belong to the course", + es="El syllabus no pertenece al curso", + slug="syllabus-not-belong-to-course", + ) + ) if country is not None and country.lower() in forbidden_countries: - data['status'] = 'WAITING_LIST' - data['process_status'] = 'PENDING' + data["status"] = "WAITING_LIST" + data["process_status"] = "PENDING" elif plan and plan.has_waiting_list == True: - data['status'] = 'WAITING_LIST' - data['process_status'] = 'PENDING' + data["status"] = "WAITING_LIST" + data["process_status"] = "PENDING" elif plan and plan.has_waiting_list == False: - data['status'] = 'ACCEPTED' - data['process_status'] = 'DONE' + data["status"] = "ACCEPTED" + data["process_status"] = "DONE" elif course and course.has_waiting_list == True: - data['academy'] = course.academy - data['syllabus'] = syllabus - data['status'] = 'WAITING_LIST' - data['process_status'] = 'PENDING' + data["academy"] = course.academy + data["syllabus"] = syllabus + data["status"] = "WAITING_LIST" + data["process_status"] = "PENDING" elif course and course.has_waiting_list == False: - data['academy'] = course.academy - data['syllabus'] = syllabus - data['status'] = 'ACCEPTED' - data['process_status'] = 'DONE' + data["academy"] = course.academy + data["syllabus"] = syllabus + data["status"] = "ACCEPTED" + data["process_status"] = "DONE" elif cohort: - data['academy'] = cohort.academy - data['cohort'] = cohort - data['status'] = 'ACCEPTED' - data['process_status'] = 'DONE' + data["academy"] = cohort.academy + data["cohort"] = cohort + data["status"] = "ACCEPTED" + data["process_status"] = "DONE" elif syllabus and Cohort.objects.filter(syllabus_version__syllabus=syllabus).exists(): - data['syllabus'] = syllabus - data['status'] = 'ACCEPTED' - data['process_status'] = 'DONE' + data["syllabus"] = syllabus + data["status"] = "ACCEPTED" + data["process_status"] = "DONE" else: - data['status'] = 'ACCEPTED' - data['process_status'] = 'DONE' + data["status"] = "ACCEPTED" + data["process_status"] = "DONE" self.cohort = cohort self.syllabus = syllabus if not self.instance: - data['token'] = hashlib.sha512((data['email']).encode('UTF-8') + os.urandom(64)).hexdigest() + data["token"] = hashlib.sha512((data["email"]).encode("UTF-8") + os.urandom(64)).hexdigest() - event = data.get('event', None) + event = data.get("event", None) if event is not None: try: args = {} if isinstance(event, int): - args['id'] = event + args["id"] = event else: - args['slug'] = event + args["slug"] = event event = Event.objects.filter(**args).get() - data['event_slug'] = event.slug + data["event_slug"] = event.slug except Exception: - raise ValidationException(translation(lang, - en='Unable to find the given Event', - es='Imposible encontrar el Evento dado', - slug='event-not-found'), - code=400) + raise ValidationException( + translation( + lang, + en="Unable to find the given Event", + es="Imposible encontrar el Evento dado", + slug="event-not-found", + ), + code=400, + ) - asset = data.get('asset', None) + asset = data.get("asset", None) if asset is not None: try: args = {} if isinstance(asset, int): - args['id'] = asset + args["id"] = asset else: - args['slug'] = asset + args["slug"] = asset asset = Asset.objects.filter(**args).get() - data['asset_slug'] = asset.slug + data["asset_slug"] = asset.slug except Exception: - raise ValidationException(translation(lang, - en='Unable to find the given Asset', - es='Imposible encontrar el Asset dado', - slug='asset-not-found'), - code=400) + raise ValidationException( + translation( + lang, + en="Unable to find the given Asset", + es="Imposible encontrar el Asset dado", + slug="asset-not-found", + ), + code=400, + ) - conversion_info = data.get('conversion_info', None) + conversion_info = data.get("conversion_info", None) validate_conversion_info(conversion_info, lang) return data @@ -1415,9 +1558,9 @@ def update(self, *args, **kwargs): return instance def get_access_token(self, obj: UserInvite): - lang = self.context.get('lang', 'en') + lang = self.context.get("lang", "en") - if obj.status != 'ACCEPTED': + if obj.status != "ACCEPTED": return None # if should be created within the signal @@ -1425,12 +1568,14 @@ def get_access_token(self, obj: UserInvite): self.user = User.objects.filter(email=self.email).first() if not self.user: - self.user = User(email=obj.email, - username=obj.email, - first_name=obj.first_name, - last_name=obj.last_name, - is_staff=False, - is_active=True) + self.user = User( + email=obj.email, + username=obj.email, + first_name=obj.first_name, + last_name=obj.last_name, + is_staff=False, + is_active=True, + ) self.user.save() # create default settings for user @@ -1440,22 +1585,25 @@ def get_access_token(self, obj: UserInvite): subject = translation( lang, - en='4Geeks - Validate account', - es='4Geeks - Valida tu cuenta', + en="4Geeks - Validate account", + es="4Geeks - Valida tu cuenta", ) - notify_actions.send_email_message('verify_email', - self.user.email, { - 'SUBJECT': subject, - 'LANG': lang, - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/password/{obj.token}', - }, - academy=obj.academy) + notify_actions.send_email_message( + "verify_email", + self.user.email, + { + "SUBJECT": subject, + "LANG": lang, + "LINK": os.getenv("API_URL", "") + f"/v1/auth/password/{obj.token}", + }, + academy=obj.academy, + ) self.instance.user = self.user self.instance.save() - token, _ = Token.get_or_create(user=self.user, token_type='login') + token, _ = Token.get_or_create(user=self.user, token_type="login") return token.key def get_plans(self, obj: UserInvite): diff --git a/breathecode/authenticate/tasks.py b/breathecode/authenticate/tasks.py index 1cf7d3ca4..1138564c7 100644 --- a/breathecode/authenticate/tasks.py +++ b/breathecode/authenticate/tasks.py @@ -14,36 +14,36 @@ from .actions import add_to_organization, remove_from_organization, set_gitpod_user_expiration -API_URL = os.getenv('API_URL', '') +API_URL = os.getenv("API_URL", "") logger = logging.getLogger(__name__) @task(priority=TaskPriority.REALTIME.value) def async_validate_email_invite(invite_id, **_): - logger.debug(f'Validating email for invite {invite_id}') + logger.debug(f"Validating email for invite {invite_id}") user_invite = UserInvite.objects.filter(id=invite_id).first() if user_invite is None: - raise RetryTask(f'UserInvite {invite_id} not found') + raise RetryTask(f"UserInvite {invite_id} not found") try: - email_status = validate_email(user_invite.email, 'en') - if email_status['score'] <= 0.60: - user_invite.status = 'REJECTED' - user_invite.process_status = 'ERROR' - user_invite.process_message = 'Your email is invalid' - user_invite.email_quality = email_status['score'] + email_status = validate_email(user_invite.email, "en") + if email_status["score"] <= 0.60: + user_invite.status = "REJECTED" + user_invite.process_status = "ERROR" + user_invite.process_message = "Your email is invalid" + user_invite.email_quality = email_status["score"] user_invite.email_status = email_status except ValidationException as e: - user_invite.status = 'REJECTED' - user_invite.process_status = 'ERROR' + user_invite.status = "REJECTED" + user_invite.process_status = "ERROR" user_invite.process_message = str(e) except Exception: - raise RetryTask(f'Retrying email validation for invite {invite_id}') + raise RetryTask(f"Retrying email validation for invite {invite_id}") user_invite.save() @@ -52,7 +52,7 @@ def async_validate_email_invite(invite_id, **_): @shared_task(priority=TaskPriority.ACADEMY.value) def async_set_gitpod_user_expiration(gitpoduser_id): - logger.debug(f'Recalculate gitpoduser expiration for {gitpoduser_id}') + logger.debug(f"Recalculate gitpoduser expiration for {gitpoduser_id}") return set_gitpod_user_expiration(gitpoduser_id) is not None @@ -70,80 +70,87 @@ def async_remove_from_organization(cohort_id, user_id, force=False): def async_accept_user_from_waiting_list(user_invite_id: int) -> None: from .models import UserInvite - logger.debug(f'Process to accept UserInvite {user_invite_id}') + logger.debug(f"Process to accept UserInvite {user_invite_id}") if not (invite := UserInvite.objects.filter(id=user_invite_id).first()): - logger.error(f'UserInvite {user_invite_id} not found') + logger.error(f"UserInvite {user_invite_id} not found") return if not invite.email: - invite.status = 'ACCEPTED' - invite.process_status = 'ERROR' + invite.status = "ACCEPTED" + invite.process_status = "ERROR" invite.process_message = "Can't determine the user email" invite.save() return if user := User.objects.filter(email=invite.email).first(): - invite.status = 'ACCEPTED' - invite.process_status = 'DONE' - invite.process_message = f'User already exists with the id {user.id}' + invite.status = "ACCEPTED" + invite.process_status = "DONE" + invite.process_message = f"User already exists with the id {user.id}" invite.save() return - user = User(username=invite.email, - email=invite.email, - first_name=invite.first_name or '', - last_name=invite.last_name or '') + user = User( + username=invite.email, email=invite.email, first_name=invite.first_name or "", last_name=invite.last_name or "" + ) user.save() invite.user = user - invite.status = 'ACCEPTED' - invite.process_status = 'DONE' - invite.process_message = f'Registered as User with id {user.id}' + invite.status = "ACCEPTED" + invite.process_status = "DONE" + invite.process_message = f"Registered as User with id {user.id}" invite.save() - notify_actions.send_email_message('pick_password', - user.email, { - 'SUBJECT': 'Set your password at 4Geeks', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/password/{invite.token}', - }, - academy=invite.academy) + notify_actions.send_email_message( + "pick_password", + user.email, + { + "SUBJECT": "Set your password at 4Geeks", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/password/{invite.token}", + }, + academy=invite.academy, + ) @task(priority=TaskPriority.STUDENT.value) def create_user_from_invite(user_invite_id: int, **_): - logger.info('Running create_user_from_invite task') + logger.info("Running create_user_from_invite task") - if not (user_invite := UserInvite.objects.filter(id=user_invite_id).only( - 'email', 'first_name', 'last_name', 'status', 'user_id', 'token', 'academy__id').first()): - raise RetryTask('User invite not found') + if not ( + user_invite := UserInvite.objects.filter(id=user_invite_id) + .only("email", "first_name", "last_name", "status", "user_id", "token", "academy__id") + .first() + ): + raise RetryTask("User invite not found") - if user_invite.status != 'ACCEPTED': - raise AbortTask('User invite is not accepted') + if user_invite.status != "ACCEPTED": + raise AbortTask("User invite is not accepted") - if user_invite.user or (user := User.objects.filter(email=user_invite.email).only('id').first()): + if user_invite.user or (user := User.objects.filter(email=user_invite.email).only("id").first()): if not user_invite.user: user_invite.user = user user_invite.save() - raise AbortTask('User invite is already associated to a user') + raise AbortTask("User invite is already associated to a user") if not user_invite.email: - raise AbortTask('No email found') + raise AbortTask("No email found") user = User() user.username = user_invite.email user.email = user_invite.email - user.first_name = user_invite.first_name or '' - user.last_name = user_invite.last_name or '' + user.first_name = user_invite.first_name or "" + user.last_name = user_invite.last_name or "" user.save() if user_invite.token: notify_actions.send_email_message( - 'pick_password', - user.email, { - 'SUBJECT': 'Set your password at 4Geeks', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/password/{user_invite.token}' + "pick_password", + user.email, + { + "SUBJECT": "Set your password at 4Geeks", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/password/{user_invite.token}", }, - academy=user_invite.academy) + academy=user_invite.academy, + ) diff --git a/breathecode/authenticate/tests/actions/tests_github_sync.py b/breathecode/authenticate/tests/actions/tests_github_sync.py index 8077f8e11..8db424d13 100644 --- a/breathecode/authenticate/tests/actions/tests_github_sync.py +++ b/breathecode/authenticate/tests/actions/tests_github_sync.py @@ -1,6 +1,7 @@ """ Test mentorhips """ + from unittest.mock import MagicMock, patch from capyc.rest_framework.exceptions import ValidationException @@ -11,7 +12,7 @@ def get_org_members(): - return [{'login': 'some-github-username'}, {'login': 'second-username'}] + return [{"login": "some-github-username"}, {"login": "second-username"}] class SyncGithubUsersTestSuite(AuthTestCase): @@ -25,9 +26,9 @@ def test_add_to_organization_no_cohort(self): with self.assertRaises(ValidationException) as context: add_to_organization(models.cohort.id, models.user.id) - self.assertEqual(context.exception.slug, 'invalid-cohort-user') + self.assertEqual(context.exception.slug, "invalid-cohort-user") - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_add_to_organization_success(self): """ When a student enters into a cohort, but he was not in the academy @@ -37,47 +38,45 @@ def test_add_to_organization_success(self): result = add_to_organization(models.cohort.id, models.user.id) self.assertEqual(result, True) - users = self.bc.database.list_of('authenticate.GithubAcademyUser') + users = self.bc.database.list_of("authenticate.GithubAcademyUser") self.assertEqual(len(users), 1) - self.assertEqual(models.user.id, users[0]['id']) - self.assertEqual('PENDING', users[0]['storage_status']) - self.assertEqual('ADD', users[0]['storage_action']) + self.assertEqual(models.user.id, users[0]["id"]) + self.assertEqual("PENDING", users[0]["storage_status"]) + self.assertEqual("ADD", users[0]["storage_action"]) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_add_to_organization_already_added(self): """ No need to double add student if it was already added previously """ - models = self.bc.database.create(user=True, - cohort=True, - cohort_user=True, - github_academy_user={'storage_status': 'SYNCHED'}) + models = self.bc.database.create( + user=True, cohort=True, cohort_user=True, github_academy_user={"storage_status": "SYNCHED"} + ) result = add_to_organization(models.cohort.id, models.user.id) self.assertEqual(result, True) - users = self.bc.database.list_of('authenticate.GithubAcademyUser') - self.assertEqual('SYNCHED', users[0]['storage_status']) - self.assertEqual('ADD', users[0]['storage_action']) + users = self.bc.database.list_of("authenticate.GithubAcademyUser") + self.assertEqual("SYNCHED", users[0]["storage_status"]) + self.assertEqual("ADD", users[0]["storage_action"]) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_add_to_organization_success_previously_errored(self): """ It there was a previous error, we should still try and re-attempt """ - models = self.bc.database.create(user=True, - cohort=True, - cohort_user=True, - github_academy_user={'storage_status': 'ERROR'}) + models = self.bc.database.create( + user=True, cohort=True, cohort_user=True, github_academy_user={"storage_status": "ERROR"} + ) result = add_to_organization(models.cohort.id, models.user.id) self.assertEqual(result, True) - users = self.bc.database.list_of('authenticate.GithubAcademyUser') - self.assertEqual('PENDING', users[0]['storage_status']) - self.assertEqual('ADD', users[0]['storage_action']) + users = self.bc.database.list_of("authenticate.GithubAcademyUser") + self.assertEqual("PENDING", users[0]["storage_status"]) + self.assertEqual("ADD", users[0]["storage_action"]) def test_remove_from_organization__no_cohort(self): @@ -91,9 +90,9 @@ def test_remove_from_organization__no_cohort(self): with self.assertRaises(ValidationException) as context: remove_from_organization(models.cohort.id, models.user.id) - self.assertEqual(context.exception.slug, 'invalid-cohort-user') + self.assertEqual(context.exception.slug, "invalid-cohort-user") - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_remove_from_organization__no_org_user(self): """ If user its not part of an organization, it cannot be removed @@ -101,7 +100,7 @@ def test_remove_from_organization__no_org_user(self): models = self.bc.database.create( user=True, - cohort={'never_ends': True}, + cohort={"never_ends": True}, cohort_user=True, # github_academy_user={ 'storage_status': 'ERROR'} ) @@ -109,9 +108,9 @@ def test_remove_from_organization__no_org_user(self): with self.assertRaises(ValidationException) as context: remove_from_organization(models.cohort.id, models.user.id) - self.assertEqual(context.exception.slug, 'user-not-found-in-org') + self.assertEqual(context.exception.slug, "user-not-found-in-org") - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_remove_from_organization__still_active(self): """ Trying to remove someone that its still active in any cohort @@ -122,16 +121,16 @@ def test_remove_from_organization__still_active(self): user=True, cohort=True, cohort_user=True, - cohort_user_kwargs={'educational_status': 'ACTIVE'}, + cohort_user_kwargs={"educational_status": "ACTIVE"}, ) with self.assertRaises(ValidationException) as context: remove_from_organization(models.cohort.id, models.user.id) - self.assertEqual(context.exception.slug, 'still-active') + self.assertEqual(context.exception.slug, "still-active") - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.db.models.signals.post_save.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.db.models.signals.post_save.send_robust", MagicMock()) def test_remove_from_organization__still_active_another_org(self): """ Trying to remove someone that its still active in another @@ -142,88 +141,83 @@ def test_remove_from_organization__still_active_another_org(self): user=True, cohort=True, cohort_user=True, - cohort_user_kwargs={'educational_status': 'ACTIVE'}, + cohort_user_kwargs={"educational_status": "ACTIVE"}, ) models2 = self.bc.database.create( cohort=True, cohort_user=True, - cohort_kwargs={'academy': models.academy}, + cohort_kwargs={"academy": models.academy}, cohort_user_kwargs={ - 'user': models.user, + "user": models.user, }, ) with self.assertRaises(ValidationException) as context: remove_from_organization(models2.cohort.id, models.user.id) - self.assertEqual(context.exception.slug, 'still-active') + self.assertEqual(context.exception.slug, "still-active") - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) def test_sync_organization_members__no_sync(self): - """ - - """ + """ """ models = self.bc.database.create(academy=True) - models2 = self.bc.database.create(academy_auth_settings=True, - github_academy_user=True, - academy_auth_settings_kwargs={ - 'academy': models.academy, - 'github_is_sync': False, - }, - github_academy_user_kwargs={'academy': models.academy}) + models2 = self.bc.database.create( + academy_auth_settings=True, + github_academy_user=True, + academy_auth_settings_kwargs={ + "academy": models.academy, + "github_is_sync": False, + }, + github_academy_user_kwargs={"academy": models.academy}, + ) result = sync_organization_members(models.academy.id) self.assertEqual(result, False) - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) def test_sync_organization_members__no_settings(self): - """ - - """ + """ """ models = self.bc.database.create(academy=True) - models2 = self.bc.database.create(github_academy_user=True, - github_academy_user_kwargs={'academy': models.academy}) + models2 = self.bc.database.create( + github_academy_user=True, github_academy_user_kwargs={"academy": models.academy} + ) result = sync_organization_members(models.academy.id) self.assertEqual(result, False) - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) def test_sync_organization_members__all_must_be_sync(self): """ If all organizations with the same user dont are not in sync, we will not sync """ - models = self.bc.database.create(academy=True, - academy_auth_settings=True, - github_academy_user=True, - academy_auth_settings_kwargs={ - 'github_is_sync': True, - 'github_username': 'some-username' - }) - models2 = self.bc.database.create(academy=True, - academy_auth_settings=True, - github_academy_user=True, - academy_auth_settings_kwargs={ - 'github_is_sync': False, - 'github_username': 'some-username' - }) + models = self.bc.database.create( + academy=True, + academy_auth_settings=True, + github_academy_user=True, + academy_auth_settings_kwargs={"github_is_sync": True, "github_username": "some-username"}, + ) + models2 = self.bc.database.create( + academy=True, + academy_auth_settings=True, + github_academy_user=True, + academy_auth_settings_kwargs={"github_is_sync": False, "github_username": "some-username"}, + ) with self.assertRaises(ValidationException) as context: sync_organization_members(models.academy.id) - self.assertEqual(context.exception.slug, 'not-everyone-in-synch') + self.assertEqual(context.exception.slug, "not-everyone-in-synch") - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) def test_sync_organization_members_invalid_owner_no_githubcredentials(self): - """ - - """ + """ """ models = self.bc.database.create( user=True, @@ -231,230 +225,244 @@ def test_sync_organization_members_invalid_owner_no_githubcredentials(self): academy=True, academy_auth_settings=True, github_academy_user=True, - academy_auth_settings_kwargs={ - 'github_is_sync': True, - 'github_username': 'some-username' - }) + academy_auth_settings_kwargs={"github_is_sync": True, "github_username": "some-username"}, + ) with self.assertRaises(ValidationException) as context: sync_organization_members(models.academy.id) - self.assertEqual(context.exception.slug, 'invalid-owner') + self.assertEqual(context.exception.slug, "invalid-owner") - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) - @patch('breathecode.services.github.Github.invite_org_member', MagicMock()) - @patch('breathecode.services.github.Github.delete_org_member', MagicMock()) + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.invite_org_member", MagicMock()) + @patch("breathecode.services.github.Github.delete_org_member", MagicMock()) def test_sync_organization_members__no_githubcredentials_marked_as_error(self): """ Any AcademyUser that has not github connection must be marked as error """ - models = self.bc.database.create(user=True, - credentials_github=True, - academy=True, - academy_auth_settings=True, - academy_auth_settings_kwargs={ - 'github_is_sync': True, - 'github_username': 'some-username' - }) - - models2 = self.bc.database.create(user=True, - credentials_github=False, - github_academy_user=True, - github_academy_user_kwargs={ - 'storage_status': 'PENDING', - 'storage_action': 'ADD', - 'academy': models.academy - }) - - models3 = self.bc.database.create(user=True, - credentials_github=True, - github_academy_user=True, - github_academy_user_kwargs={ - 'storage_status': 'PENDING', - 'storage_action': 'REMOVE', - 'academy': models.academy - }) + models = self.bc.database.create( + user=True, + credentials_github=True, + academy=True, + academy_auth_settings=True, + academy_auth_settings_kwargs={"github_is_sync": True, "github_username": "some-username"}, + ) + + models2 = self.bc.database.create( + user=True, + credentials_github=False, + github_academy_user=True, + github_academy_user_kwargs={ + "storage_status": "PENDING", + "storage_action": "ADD", + "academy": models.academy, + }, + ) + + models3 = self.bc.database.create( + user=True, + credentials_github=True, + github_academy_user=True, + github_academy_user_kwargs={ + "storage_status": "PENDING", + "storage_action": "REMOVE", + "academy": models.academy, + }, + ) sync_organization_members(models.academy.id) - GithubAcademyUser = self.bc.database.get_model('authenticate.GithubAcademyUser') + GithubAcademyUser = self.bc.database.get_model("authenticate.GithubAcademyUser") users = GithubAcademyUser.objects.filter(user__credentialsgithub__isnull=True).exclude(user__isnull=True) self.assertEqual(users.count(), 1) no_credentials_user = users.first() - self.assertEqual(no_credentials_user.storage_status, 'ERROR') - self.assertEqual([l['msg'] for l in no_credentials_user.storage_log], - [GithubAcademyUser.create_log('This user needs connect to github')['msg']]) + self.assertEqual(no_credentials_user.storage_status, "ERROR") + self.assertEqual( + [l["msg"] for l in no_credentials_user.storage_log], + [GithubAcademyUser.create_log("This user needs connect to github")["msg"]], + ) - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) - @patch('breathecode.services.github.Github.invite_org_member', MagicMock()) - @patch('breathecode.services.github.Github.delete_org_member', MagicMock()) + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.invite_org_member", MagicMock()) + @patch("breathecode.services.github.Github.delete_org_member", MagicMock()) def test_sync_organization_members_sync_pending_add(self): """ Users with pending add and delete sync should be invited using the gitpod API """ - github_email = 'sam@mail.com' - models = self.bc.database.create(user=True, - credentials_github=True, - academy=True, - academy_auth_settings=True, - academy_auth_settings_kwargs={ - 'github_is_sync': True, - 'github_username': 'some-username' - }) - - models2 = self.bc.database.create(user=True, - credentials_github=True, - credentials_github_kwargs={'email': github_email}, - github_academy_user=True, - github_academy_user_kwargs={ - 'storage_status': 'PENDING', - 'storage_action': 'ADD', - 'academy': models.academy - }) + github_email = "sam@mail.com" + models = self.bc.database.create( + user=True, + credentials_github=True, + academy=True, + academy_auth_settings=True, + academy_auth_settings_kwargs={"github_is_sync": True, "github_username": "some-username"}, + ) + + models2 = self.bc.database.create( + user=True, + credentials_github=True, + credentials_github_kwargs={"email": github_email}, + github_academy_user=True, + github_academy_user_kwargs={ + "storage_status": "PENDING", + "storage_action": "ADD", + "academy": models.academy, + }, + ) models3 = self.bc.database.create( user=True, credentials_github=True, # this username is coming from the mock on the top of the file - credentials_github_kwargs={'username': 'some-github-username'}, + credentials_github_kwargs={"username": "some-github-username"}, github_academy_user=True, github_academy_user_kwargs={ - 'storage_status': 'PENDING', - 'storage_action': 'DELETE', - 'academy': models.academy - }) + "storage_status": "PENDING", + "storage_action": "DELETE", + "academy": models.academy, + }, + ) sync_organization_members(models.academy.id) # test for add - GithubAcademyUser = self.bc.database.get_model('authenticate.GithubAcademyUser') + GithubAcademyUser = self.bc.database.get_model("authenticate.GithubAcademyUser") user = GithubAcademyUser.objects.get(id=models2.github_academy_user.id) - self.assertEqual(user.storage_status, 'SYNCHED') - self.assertEqual(user.storage_action, 'INVITE') - self.assertEqual([l['msg'] for l in user.storage_log], - [GithubAcademyUser.create_log(f'Sent invitation to {github_email}')['msg']]) + self.assertEqual(user.storage_status, "SYNCHED") + self.assertEqual(user.storage_action, "INVITE") + self.assertEqual( + [l["msg"] for l in user.storage_log], + [GithubAcademyUser.create_log(f"Sent invitation to {github_email}")["msg"]], + ) # test for success user = GithubAcademyUser.objects.get(id=models3.github_academy_user.id) - self.assertEqual(user.storage_status, 'SYNCHED') - self.assertEqual(user.storage_action, 'DELETE') - self.assertEqual([l['msg'] for l in user.storage_log], - [GithubAcademyUser.create_log(f'Successfully deleted in github organization')['msg']]) - - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) - @patch('breathecode.services.github.Github.invite_org_member', MagicMock()) - @patch('breathecode.services.github.Github.delete_org_member', MagicMock()) + self.assertEqual(user.storage_status, "SYNCHED") + self.assertEqual(user.storage_action, "DELETE") + self.assertEqual( + [l["msg"] for l in user.storage_log], + [GithubAcademyUser.create_log(f"Successfully deleted in github organization")["msg"]], + ) + + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.invite_org_member", MagicMock()) + @patch("breathecode.services.github.Github.delete_org_member", MagicMock()) def test_sync_organization_members_succesfully_deleted(self): """ User was found in github and deleted from both, organiztion and github """ - github_email = 'sam@mail.com' - models = self.bc.database.create(user=True, - credentials_github=True, - academy=True, - academy_auth_settings=True, - academy_auth_settings_kwargs={ - 'github_is_sync': True, - 'github_username': 'academy-username' - }) + github_email = "sam@mail.com" + models = self.bc.database.create( + user=True, + credentials_github=True, + academy=True, + academy_auth_settings=True, + academy_auth_settings_kwargs={"github_is_sync": True, "github_username": "academy-username"}, + ) models3 = self.bc.database.create( user=True, credentials_github=True, credentials_github_kwargs={ # this has to match the mocked response from github api - 'username': 'some-github-username' + "username": "some-github-username" }, github_academy_user=True, github_academy_user_kwargs={ - 'storage_status': 'PENDING', - 'storage_action': 'DELETE', - 'academy': models.academy - }) + "storage_status": "PENDING", + "storage_action": "DELETE", + "academy": models.academy, + }, + ) sync_organization_members(models.academy.id) # already deleted from github user = GithubAcademyUser.objects.get(id=models3.github_academy_user.id) - self.assertEqual(user.storage_status, 'SYNCHED') - self.assertEqual(user.storage_action, 'DELETE') - self.assertEqual([l['msg'] for l in user.storage_log], - [GithubAcademyUser.create_log(f'Successfully deleted in github organization')['msg']]) - - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) - @patch('breathecode.services.github.Github.invite_org_member', MagicMock()) - @patch('breathecode.services.github.Github.delete_org_member', MagicMock()) + self.assertEqual(user.storage_status, "SYNCHED") + self.assertEqual(user.storage_action, "DELETE") + self.assertEqual( + [l["msg"] for l in user.storage_log], + [GithubAcademyUser.create_log(f"Successfully deleted in github organization")["msg"]], + ) + + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.invite_org_member", MagicMock()) + @patch("breathecode.services.github.Github.delete_org_member", MagicMock()) def test_sync_organization_members_already_deleted(self): """ User was already deleted because its github username was not included in the github API incoming usernames """ - github_email = 'sam@mail.com' - models = self.bc.database.create(user=True, - credentials_github=True, - academy=True, - academy_auth_settings=True, - academy_auth_settings_kwargs={ - 'github_is_sync': True, - 'github_username': 'academy-username' - }) + github_email = "sam@mail.com" + models = self.bc.database.create( + user=True, + credentials_github=True, + academy=True, + academy_auth_settings=True, + academy_auth_settings_kwargs={"github_is_sync": True, "github_username": "academy-username"}, + ) models3 = self.bc.database.create( user=True, credentials_github=True, credentials_github_kwargs={ # username dont match the mocked github api response on purpose - 'username': 'some-github-username-dont-match' + "username": "some-github-username-dont-match" }, github_academy_user=True, github_academy_user_kwargs={ - 'storage_status': 'PENDING', - 'storage_action': 'DELETE', - 'academy': models.academy - }) + "storage_status": "PENDING", + "storage_action": "DELETE", + "academy": models.academy, + }, + ) sync_organization_members(models.academy.id) # already deleted from github user = GithubAcademyUser.objects.get(id=models3.github_academy_user.id) - self.assertEqual(user.storage_status, 'SYNCHED') - self.assertEqual(user.storage_action, 'DELETE') - self.assertEqual([l['msg'] for l in user.storage_log], - [GithubAcademyUser.create_log(f'User was already deleted from github')['msg']]) - - @patch('breathecode.services.github.Github.get_org_members', MagicMock(side_effect=get_org_members)) - @patch('breathecode.services.github.Github.invite_org_member', MagicMock()) - @patch('breathecode.services.github.Github.delete_org_member', MagicMock()) + self.assertEqual(user.storage_status, "SYNCHED") + self.assertEqual(user.storage_action, "DELETE") + self.assertEqual( + [l["msg"] for l in user.storage_log], + [GithubAcademyUser.create_log(f"User was already deleted from github")["msg"]], + ) + + @patch("breathecode.services.github.Github.get_org_members", MagicMock(side_effect=get_org_members)) + @patch("breathecode.services.github.Github.invite_org_member", MagicMock()) + @patch("breathecode.services.github.Github.delete_org_member", MagicMock()) def test_sync_organization__unknown_github_users(self): """ Some other users found on github added as unknown on the organization """ - github_email = 'sam@mail.com' - models = self.bc.database.create(user=True, - credentials_github=True, - academy=True, - academy_auth_settings=True, - academy_auth_settings_kwargs={ - 'github_is_sync': True, - 'github_username': 'academy-username' - }) - - models2 = self.bc.database.create(user=True, - credentials_github=True, - credentials_github_kwargs={'username': 'second-username'}, - academy=True, - academy_auth_settings=True, - academy_auth_settings_kwargs={ - 'github_is_sync': True, - 'github_username': models.academy_auth_settings.github_username - }) + github_email = "sam@mail.com" + models = self.bc.database.create( + user=True, + credentials_github=True, + academy=True, + academy_auth_settings=True, + academy_auth_settings_kwargs={"github_is_sync": True, "github_username": "academy-username"}, + ) + + models2 = self.bc.database.create( + user=True, + credentials_github=True, + credentials_github_kwargs={"username": "second-username"}, + academy=True, + academy_auth_settings=True, + academy_auth_settings_kwargs={ + "github_is_sync": True, + "github_username": models.academy_auth_settings.github_username, + }, + ) sync_organization_members(models.academy.id) # already deleted from github - unknown = GithubAcademyUser.objects.filter(academy__id=models.academy.id, storage_status='UNKNOWN') + unknown = GithubAcademyUser.objects.filter(academy__id=models.academy.id, storage_status="UNKNOWN") self.assertEqual(unknown.count(), 2) diff --git a/breathecode/authenticate/tests/admin/tests_accept_all_users_from_waiting_list.py b/breathecode/authenticate/tests/admin/tests_accept_all_users_from_waiting_list.py index 4116fcb89..2faea0124 100644 --- a/breathecode/authenticate/tests/admin/tests_accept_all_users_from_waiting_list.py +++ b/breathecode/authenticate/tests/admin/tests_accept_all_users_from_waiting_list.py @@ -10,9 +10,9 @@ class ModelProfileAcademyTestSuite(AuthTestCase): 🔽🔽🔽 With zero UserInvite """ - @patch('breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay', MagicMock()) + @patch("breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay", MagicMock()) def test_with_zero_user_invite(self): - UserInvite = self.bc.database.get_model('authenticate.UserInvite') + UserInvite = self.bc.database.get_model("authenticate.UserInvite") queryset = UserInvite.objects.filter() result = accept_all_users_from_waiting_list(None, None, queryset) @@ -24,11 +24,11 @@ def test_with_zero_user_invite(self): 🔽🔽🔽 With two UserInvite """ - @patch('breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay', MagicMock()) + @patch("breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay", MagicMock()) def test_with_two_user_invites(self): self.bc.database.create(user_invite=2) - UserInvite = self.bc.database.get_model('authenticate.UserInvite') + UserInvite = self.bc.database.get_model("authenticate.UserInvite") queryset = UserInvite.objects.filter() result = accept_all_users_from_waiting_list(None, None, queryset) @@ -40,7 +40,7 @@ def test_with_two_user_invites(self): 🔽🔽🔽 With four UserInvite, selecting just two items """ - @patch('breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay', MagicMock()) + @patch("breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay", MagicMock()) def test_with_four_user_invites__selecting_just_two_items(self): options = {1, 2, 3, 4} self.bc.database.create(user_invite=4) @@ -53,29 +53,32 @@ def test_with_four_user_invites__selecting_just_two_items(self): options.discard(selected) ids.append(selected) - UserInvite = self.bc.database.get_model('authenticate.UserInvite') + UserInvite = self.bc.database.get_model("authenticate.UserInvite") queryset = UserInvite.objects.filter(id__in=ids) result = accept_all_users_from_waiting_list(None, None, queryset) self.assertEqual(result, None) - self.assertEqual(tasks.async_accept_user_from_waiting_list.delay.call_args_list, [ - call(1), - call(2), - call(3), - call(4), - ]) + self.assertEqual( + tasks.async_accept_user_from_waiting_list.delay.call_args_list, + [ + call(1), + call(2), + call(3), + call(4), + ], + ) """ 🔽🔽🔽 With three UserInvite, passing all the valid statuses """ - @patch('breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay', MagicMock()) + @patch("breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay", MagicMock()) def test_with_three_user_invites__passing_all_the_valid_statuses(self): - user_invites = [{'process_status': x} for x in ['PENDING', 'DONE', 'ERROR']] + user_invites = [{"process_status": x} for x in ["PENDING", "DONE", "ERROR"]] self.bc.database.create(user_invite=user_invites) - UserInvite = self.bc.database.get_model('authenticate.UserInvite') + UserInvite = self.bc.database.get_model("authenticate.UserInvite") queryset = UserInvite.objects.filter() result = accept_all_users_from_waiting_list(None, None, queryset) diff --git a/breathecode/authenticate/tests/admin/tests_accept_selected_users_from_waiting_list.py b/breathecode/authenticate/tests/admin/tests_accept_selected_users_from_waiting_list.py index 530baac20..481c15208 100644 --- a/breathecode/authenticate/tests/admin/tests_accept_selected_users_from_waiting_list.py +++ b/breathecode/authenticate/tests/admin/tests_accept_selected_users_from_waiting_list.py @@ -10,9 +10,9 @@ class ModelProfileAcademyTestSuite(AuthTestCase): 🔽🔽🔽 With zero UserInvite """ - @patch('breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay', MagicMock()) + @patch("breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay", MagicMock()) def test_with_zero_user_invite(self): - UserInvite = self.bc.database.get_model('authenticate.UserInvite') + UserInvite = self.bc.database.get_model("authenticate.UserInvite") queryset = UserInvite.objects.filter() result = accept_selected_users_from_waiting_list(None, None, queryset) @@ -24,11 +24,11 @@ def test_with_zero_user_invite(self): 🔽🔽🔽 With two UserInvite """ - @patch('breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay', MagicMock()) + @patch("breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay", MagicMock()) def test_with_two_user_invites(self): self.bc.database.create(user_invite=2) - UserInvite = self.bc.database.get_model('authenticate.UserInvite') + UserInvite = self.bc.database.get_model("authenticate.UserInvite") queryset = UserInvite.objects.filter() result = accept_selected_users_from_waiting_list(None, None, queryset) @@ -40,7 +40,7 @@ def test_with_two_user_invites(self): 🔽🔽🔽 With four UserInvite, selecting just two items """ - @patch('breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay', MagicMock()) + @patch("breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay", MagicMock()) def test_with_four_user_invites__selecting_just_two_items(self): options = {1, 2, 3, 4} self.bc.database.create(user_invite=4) @@ -55,27 +55,30 @@ def test_with_four_user_invites__selecting_just_two_items(self): ids.sort() - UserInvite = self.bc.database.get_model('authenticate.UserInvite') + UserInvite = self.bc.database.get_model("authenticate.UserInvite") queryset = UserInvite.objects.filter(id__in=ids) result = accept_selected_users_from_waiting_list(None, None, queryset) self.assertEqual(result, None) - self.assertEqual(tasks.async_accept_user_from_waiting_list.delay.call_args_list, [ - call(ids[0]), - call(ids[1]), - ]) + self.assertEqual( + tasks.async_accept_user_from_waiting_list.delay.call_args_list, + [ + call(ids[0]), + call(ids[1]), + ], + ) """ 🔽🔽🔽 With three UserInvite, passing all the valid statuses """ - @patch('breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay', MagicMock()) + @patch("breathecode.authenticate.tasks.async_accept_user_from_waiting_list.delay", MagicMock()) def test_with_three_user_invites__passing_all_the_valid_statuses(self): - user_invites = [{'process_status': x} for x in ['PENDING', 'DONE', 'ERROR']] + user_invites = [{"process_status": x} for x in ["PENDING", "DONE", "ERROR"]] self.bc.database.create(user_invite=user_invites) - UserInvite = self.bc.database.get_model('authenticate.UserInvite') + UserInvite = self.bc.database.get_model("authenticate.UserInvite") queryset = UserInvite.objects.filter() result = accept_selected_users_from_waiting_list(None, None, queryset) diff --git a/breathecode/authenticate/tests/management/commands/tests_confirm_no_saas_emails.py b/breathecode/authenticate/tests/management/commands/tests_confirm_no_saas_emails.py index 9ef4f6773..b2bcef413 100644 --- a/breathecode/authenticate/tests/management/commands/tests_confirm_no_saas_emails.py +++ b/breathecode/authenticate/tests/management/commands/tests_confirm_no_saas_emails.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + from unittest.mock import MagicMock, call, patch from django.core.management.base import OutputWrapper @@ -16,27 +17,33 @@ class AcademyCohortTestSuite(AuthTestCase): # When: No invites # Then: Shouldn't do anything - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_0_invites(self): command = Command() result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Successfully confirmed 0 invites'), - ]) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Successfully confirmed 0 invites"), + ], + ) # Given: 2 UserInvite, 1 Academy # When: email is not validated and academy is not available as saas # Then: validate all emails - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_2_invites__no_saas__academy(self): - academy = {'available_as_saas': False} - user_invites = [{ - 'email': self.bc.fake.email(), - 'is_email_validated': False, - } for _ in range(2)] + academy = {"available_as_saas": False} + user_invites = [ + { + "email": self.bc.fake.email(), + "is_email_validated": False, + } + for _ in range(2) + ] model = self.bc.database.create(user_invite=user_invites, academy=academy) @@ -44,31 +51,40 @@ def test_2_invites__no_saas__academy(self): result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite[0]), - 'is_email_validated': True, - }, - { - **self.bc.format.to_dict(model.user_invite[1]), - 'is_email_validated': True, - }, - ]) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Successfully confirmed 2 invites'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite[0]), + "is_email_validated": True, + }, + { + **self.bc.format.to_dict(model.user_invite[1]), + "is_email_validated": True, + }, + ], + ) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Successfully confirmed 2 invites"), + ], + ) # Given: 2 UserInvite, 1 Academy, 1 Cohort # When: email is not validated and cohort from an academy is not available as saas # Then: validate all emails - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_2_invites__no_saas__cohort(self): - academy = {'available_as_saas': False} - user_invites = [{ - 'academy_id': None, - 'email': self.bc.fake.email(), - 'is_email_validated': False, - } for _ in range(2)] + academy = {"available_as_saas": False} + user_invites = [ + { + "academy_id": None, + "email": self.bc.fake.email(), + "is_email_validated": False, + } + for _ in range(2) + ] model = self.bc.database.create(user_invite=user_invites, academy=academy, cohort=1) @@ -76,30 +92,39 @@ def test_2_invites__no_saas__cohort(self): result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite[0]), - 'is_email_validated': True, - }, - { - **self.bc.format.to_dict(model.user_invite[1]), - 'is_email_validated': True, - }, - ]) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Successfully confirmed 2 invites'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite[0]), + "is_email_validated": True, + }, + { + **self.bc.format.to_dict(model.user_invite[1]), + "is_email_validated": True, + }, + ], + ) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Successfully confirmed 2 invites"), + ], + ) # Given: 2 UserInvite, 1 Academy # When: email is not validated and academy is not available as saas # Then: Shouldn't do anything - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_2_invites__saas__academy(self): - academy = {'available_as_saas': True} - user_invites = [{ - 'email': self.bc.fake.email(), - 'is_email_validated': False, - } for _ in range(2)] + academy = {"available_as_saas": True} + user_invites = [ + { + "email": self.bc.fake.email(), + "is_email_validated": False, + } + for _ in range(2) + ] model = self.bc.database.create(user_invite=user_invites, academy=academy) @@ -107,26 +132,35 @@ def test_2_invites__saas__academy(self): result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite[0]), - self.bc.format.to_dict(model.user_invite[1]), - ]) - - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Successfully confirmed 0 invites'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite[0]), + self.bc.format.to_dict(model.user_invite[1]), + ], + ) + + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Successfully confirmed 0 invites"), + ], + ) # Given: 2 UserInvite, 1 Academy, 1 Cohort # When: email is not validated and cohort from an academy is not available as saas # Then: Shouldn't do anything - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_2_invites__saas__cohort(self): - academy = {'available_as_saas': True} - user_invites = [{ - 'academy_id': None, - 'email': self.bc.fake.email(), - 'is_email_validated': False, - } for _ in range(2)] + academy = {"available_as_saas": True} + user_invites = [ + { + "academy_id": None, + "email": self.bc.fake.email(), + "is_email_validated": False, + } + for _ in range(2) + ] model = self.bc.database.create(user_invite=user_invites, academy=academy, cohort=1) @@ -134,25 +168,34 @@ def test_2_invites__saas__cohort(self): result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite[0]), - self.bc.format.to_dict(model.user_invite[1]), - ]) - - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Successfully confirmed 0 invites'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite[0]), + self.bc.format.to_dict(model.user_invite[1]), + ], + ) + + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Successfully confirmed 0 invites"), + ], + ) # Given: 2 UserInvite, 1 Academy # When: email is validated and academy is not available as saas # Then: Shouldn't do anything - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_2_invites__email_already_validated__no_saas__academy(self): - academy = {'available_as_saas': False} - user_invites = [{ - 'email': self.bc.fake.email(), - 'is_email_validated': True, - } for _ in range(2)] + academy = {"available_as_saas": False} + user_invites = [ + { + "email": self.bc.fake.email(), + "is_email_validated": True, + } + for _ in range(2) + ] model = self.bc.database.create(user_invite=user_invites, academy=academy) @@ -160,26 +203,35 @@ def test_2_invites__email_already_validated__no_saas__academy(self): result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite[0]), - self.bc.format.to_dict(model.user_invite[1]), - ]) - - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Successfully confirmed 0 invites'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite[0]), + self.bc.format.to_dict(model.user_invite[1]), + ], + ) + + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Successfully confirmed 0 invites"), + ], + ) # Given: 2 UserInvite, 1 Academy, 1 Cohort # When: email is validated and cohort from an academy is not available as saas # Then: Shouldn't do anything - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_2_invites__email_already_validated__no_saas__cohort(self): - academy = {'available_as_saas': False} - user_invites = [{ - 'academy_id': None, - 'email': self.bc.fake.email(), - 'is_email_validated': True, - } for _ in range(2)] + academy = {"available_as_saas": False} + user_invites = [ + { + "academy_id": None, + "email": self.bc.fake.email(), + "is_email_validated": True, + } + for _ in range(2) + ] model = self.bc.database.create(user_invite=user_invites, academy=academy, cohort=1) @@ -187,11 +239,17 @@ def test_2_invites__email_already_validated__no_saas__cohort(self): result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite[0]), - self.bc.format.to_dict(model.user_invite[1]), - ]) - - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Successfully confirmed 0 invites'), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite[0]), + self.bc.format.to_dict(model.user_invite[1]), + ], + ) + + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Successfully confirmed 0 invites"), + ], + ) diff --git a/breathecode/authenticate/tests/management/commands/tests_fix_avatars.py b/breathecode/authenticate/tests/management/commands/tests_fix_avatars.py index 656386c9e..8ec5e08b0 100644 --- a/breathecode/authenticate/tests/management/commands/tests_fix_avatars.py +++ b/breathecode/authenticate/tests/management/commands/tests_fix_avatars.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import os import random import logging @@ -22,20 +23,20 @@ class AcademyCohortTestSuite(AuthTestCase): 🔽🔽🔽 With zero Profile """ - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_with_zero_profiles(self): command = Command() result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), []) - self.assertEqual(logging.Logger.info.call_args_list, [call('Fixing 0 avatars')]) + self.assertEqual(self.bc.database.list_of("authenticate.Profile"), []) + self.assertEqual(logging.Logger.info.call_args_list, [call("Fixing 0 avatars")]) """ 🔽🔽🔽 With two Profile, avatar_url is null """ - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_with_two_profiles__avatar_url_is_null(self): model = self.bc.database.create(profile=2) logging.Logger.info.call_args_list = [] @@ -45,18 +46,18 @@ def test_with_two_profiles__avatar_url_is_null(self): self.assertEqual(result, None) self.assertEqual( - self.bc.database.list_of('authenticate.Profile'), + self.bc.database.list_of("authenticate.Profile"), self.bc.format.to_dict(model.profile), ) - self.assertEqual(logging.Logger.info.call_args_list, [call('Fixing 0 avatars')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Fixing 0 avatars")]) """ 🔽🔽🔽 With two Profile, avatar_url is set, does'nt match with API_URL """ - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_with_two_profiles__avatar_url_is_set__does_not_match_with_api_url(self): - profiles = [{'avatar_url': self.bc.fake.url()[0:-1]} for _ in range(0, 2)] + profiles = [{"avatar_url": self.bc.fake.url()[0:-1]} for _ in range(0, 2)] model = self.bc.database.create(profile=profiles) logging.Logger.info.call_args_list = [] @@ -65,52 +66,50 @@ def test_with_two_profiles__avatar_url_is_set__does_not_match_with_api_url(self) self.assertEqual(result, None) self.assertEqual( - self.bc.database.list_of('authenticate.Profile'), + self.bc.database.list_of("authenticate.Profile"), self.bc.format.to_dict(model.profile), ) - self.assertEqual(logging.Logger.info.call_args_list, [call('Fixing 0 avatars')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Fixing 0 avatars")]) """ 🔽🔽🔽 With two Profile, avatar_url is set, match with API_URL """ - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_with_two_profiles__avatar_url_is_set__match_with_api_url(self): api_url = self.bc.fake.url()[0:-1] - latest_avatar_url = api_url + '/static/img/avatar.png' - profiles = [{'avatar_url': latest_avatar_url} for _ in range(0, 2)] + latest_avatar_url = api_url + "/static/img/avatar.png" + profiles = [{"avatar_url": latest_avatar_url} for _ in range(0, 2)] model = self.bc.database.create(profile=profiles) logging.Logger.info.call_args_list = [] random_numbers = [random.randint(1, 21) for _ in range(0, 2)] - with patch('random.randint') as randint_mock: + with patch("random.randint") as randint_mock: randint_mock.side_effect = random_numbers - with patch('os.getenv') as getenv_mock: - getenv_mock.side_effect = apply_get_env({'API_URL': api_url}) + with patch("os.getenv") as getenv_mock: + getenv_mock.side_effect = apply_get_env({"API_URL": api_url}) command = Command() result = command.handle() - self.assertEqual(os.getenv.call_args_list, [call('API_URL', '')]) + self.assertEqual(os.getenv.call_args_list, [call("API_URL", "")]) self.assertEqual(random.randint.call_args_list, [call(1, 21), call(1, 21)]) self.assertEqual(result, None) self.assertEqual( - self.bc.database.list_of('authenticate.Profile'), + self.bc.database.list_of("authenticate.Profile"), [ { **self.bc.format.to_dict(model.profile[0]), - 'avatar_url': - api_url + f'/static/img/avatar-{random_numbers[0]}.png', + "avatar_url": api_url + f"/static/img/avatar-{random_numbers[0]}.png", }, { **self.bc.format.to_dict(model.profile[1]), - 'avatar_url': - api_url + f'/static/img/avatar-{random_numbers[1]}.png', + "avatar_url": api_url + f"/static/img/avatar-{random_numbers[1]}.png", }, ], ) - self.assertEqual(logging.Logger.info.call_args_list, [call('Fixing 2 avatars')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Fixing 2 avatars")]) diff --git a/breathecode/authenticate/tests/management/commands/tests_fix_github_academy_user_logs.py b/breathecode/authenticate/tests/management/commands/tests_fix_github_academy_user_logs.py index 001fa593d..f14e3db2e 100644 --- a/breathecode/authenticate/tests/management/commands/tests_fix_github_academy_user_logs.py +++ b/breathecode/authenticate/tests/management/commands/tests_fix_github_academy_user_logs.py @@ -10,8 +10,8 @@ T3 = T2 + timezone.timedelta(days=1) T4 = T3 + timezone.timedelta(days=1) -storage_statuses = ['PENDING', 'SYNCHED', 'ERROR', 'UNKNOWN', 'PAYMENT_CONFLICT'] -storage_actions = ['ADD', 'INVITE', 'DELETE', 'IGNORE'] +storage_statuses = ["PENDING", "SYNCHED", "ERROR", "UNKNOWN", "PAYMENT_CONFLICT"] +storage_actions = ["ADD", "INVITE", "DELETE", "IGNORE"] @pytest.fixture(autouse=True) @@ -25,14 +25,14 @@ def test__nothing_to_migrate(bc: Breathecode): command = Command() command.handle() - assert bc.database.list_of('authenticate.GithubAcademyUserLog') == [] + assert bc.database.list_of("authenticate.GithubAcademyUserLog") == [] # When: Changes the storage_status # Then: it should link the previous log with the new one -@pytest.mark.parametrize('storage_status', storage_statuses) +@pytest.mark.parametrize("storage_status", storage_statuses) @pytest.mark.parametrize( - 'storage_action1,storage_action2,storage_action3', + "storage_action1,storage_action2,storage_action3", [ (storage_actions[0], storage_actions[1], storage_actions[2]), # +1 (storage_actions[0], storage_actions[2], storage_actions[0]), # +2 @@ -41,19 +41,19 @@ def test__nothing_to_migrate(bc: Breathecode): (storage_actions[1], storage_actions[2], storage_actions[3]), # +1 (storage_actions[1], storage_actions[3], storage_actions[1]), # +2 (storage_actions[1], storage_actions[0], storage_actions[3]), # +3 - # (storage_actions[2], storage_actions[3], storage_actions[0]), # +1 (storage_actions[2], storage_actions[0], storage_actions[2]), # +2 (storage_actions[2], storage_actions[1], storage_actions[0]), # +3 - # (storage_actions[3], storage_actions[0], storage_actions[1]), # +1 (storage_actions[3], storage_actions[1], storage_actions[3]), # +2 (storage_actions[3], storage_actions[2], storage_actions[1]), # +3 - ]) -def test__storage_action_does_not_avoid_the_indexation(bc: Breathecode, monkeypatch, storage_status, storage_action1, - storage_action2, storage_action3): + ], +) +def test__storage_action_does_not_avoid_the_indexation( + bc: Breathecode, monkeypatch, storage_status, storage_action1, storage_action2, storage_action3 +): delta = timezone.timedelta(hours=random.randint(1, 24)) base_user1 = bc.database.create(user=1, github_academy_user=1) @@ -61,92 +61,98 @@ def test__storage_action_does_not_avoid_the_indexation(bc: Breathecode, monkeypa # first status github_academy_user_log = { - 'valid_until': None, - 'storage_status': storage_status, - 'storage_action': storage_action1, + "valid_until": None, + "storage_status": storage_status, + "storage_action": storage_action1, } - monkeypatch.setattr('django.utils.timezone.now', lambda: T1) + monkeypatch.setattr("django.utils.timezone.now", lambda: T1) user1__github_academy_user_log1 = bc.database.create( user=base_user1.user, academy=base_user1.academy, github_academy_user=base_user1.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log - monkeypatch.setattr('django.utils.timezone.now', lambda: T1 + delta) + monkeypatch.setattr("django.utils.timezone.now", lambda: T1 + delta) user2__github_academy_user_log1 = bc.database.create( user=base_user2.user, github_academy_user=base_user2.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log # second status - github_academy_user_log['storage_action'] = storage_action2 + github_academy_user_log["storage_action"] = storage_action2 - monkeypatch.setattr('django.utils.timezone.now', lambda: T2) + monkeypatch.setattr("django.utils.timezone.now", lambda: T2) user1__github_academy_user_log2 = bc.database.create( user=base_user1.user, academy=base_user1.academy, github_academy_user=base_user1.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log - monkeypatch.setattr('django.utils.timezone.now', lambda: T2 + delta) + monkeypatch.setattr("django.utils.timezone.now", lambda: T2 + delta) user2__github_academy_user_log2 = bc.database.create( user=base_user2.user, github_academy_user=base_user2.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log # third status - github_academy_user_log['storage_action'] = storage_action3 + github_academy_user_log["storage_action"] = storage_action3 - monkeypatch.setattr('django.utils.timezone.now', lambda: T3) + monkeypatch.setattr("django.utils.timezone.now", lambda: T3) user1__github_academy_user_log3 = bc.database.create( user=base_user1.user, academy=base_user1.academy, github_academy_user=base_user1.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log - monkeypatch.setattr('django.utils.timezone.now', lambda: T3 + delta) + monkeypatch.setattr("django.utils.timezone.now", lambda: T3 + delta) user2__github_academy_user_log3 = bc.database.create( user=base_user2.user, github_academy_user=base_user2.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log command = Command() command.handle() - assert bc.database.list_of('authenticate.GithubAcademyUserLog') == [ + assert bc.database.list_of("authenticate.GithubAcademyUserLog") == [ { **bc.format.to_dict(user1__github_academy_user_log1), - 'valid_until': T2, + "valid_until": T2, }, { **bc.format.to_dict(user2__github_academy_user_log1), - 'valid_until': T2 + delta, + "valid_until": T2 + delta, }, { **bc.format.to_dict(user1__github_academy_user_log2), - 'valid_until': T3, + "valid_until": T3, }, { **bc.format.to_dict(user2__github_academy_user_log2), - 'valid_until': T3 + delta, + "valid_until": T3 + delta, }, { **bc.format.to_dict(user1__github_academy_user_log3), - 'valid_until': None, + "valid_until": None, }, { **bc.format.to_dict(user2__github_academy_user_log3), - 'valid_until': None, + "valid_until": None, }, ] # When: Changes the storage_action # Then: it should link the previous log with the new one -@pytest.mark.parametrize('storage_action', storage_actions) +@pytest.mark.parametrize("storage_action", storage_actions) @pytest.mark.parametrize( - 'storage_status1,storage_status2,storage_status3', + "storage_status1,storage_status2,storage_status3", [ (storage_statuses[0], storage_statuses[1], storage_statuses[2]), # +1 (storage_statuses[0], storage_statuses[2], storage_statuses[4]), # +2 @@ -157,109 +163,114 @@ def test__storage_action_does_not_avoid_the_indexation(bc: Breathecode, monkeypa (storage_statuses[1], storage_statuses[3], storage_statuses[0]), # +2 (storage_statuses[1], storage_statuses[4], storage_statuses[3]), # +3 (storage_statuses[1], storage_statuses[0], storage_statuses[4]), # +4 - # (storage_statuses[2], storage_statuses[3], storage_statuses[4]), # +1 (storage_statuses[2], storage_statuses[4], storage_statuses[1]), # +2 (storage_statuses[2], storage_statuses[0], storage_statuses[3]), # +3 (storage_statuses[2], storage_statuses[1], storage_statuses[0]), # +4 - # (storage_statuses[3], storage_statuses[4], storage_statuses[0]), # +1 (storage_statuses[3], storage_statuses[0], storage_statuses[2]), # +2 (storage_statuses[3], storage_statuses[1], storage_statuses[4]), # +3 (storage_statuses[3], storage_statuses[2], storage_statuses[1]), # +4 - # (storage_statuses[4], storage_statuses[0], storage_statuses[1]), # +1 (storage_statuses[4], storage_statuses[1], storage_statuses[3]), # +2 (storage_statuses[4], storage_statuses[2], storage_statuses[0]), # +3 (storage_statuses[4], storage_statuses[3], storage_statuses[2]), # +4 - ]) -def test__storage_status_does_not_avoid_the_indexation(bc: Breathecode, monkeypatch, storage_action, storage_status1, - storage_status2, storage_status3): + ], +) +def test__storage_status_does_not_avoid_the_indexation( + bc: Breathecode, monkeypatch, storage_action, storage_status1, storage_status2, storage_status3 +): delta = timezone.timedelta(hours=random.randint(1, 24)) base_user1 = bc.database.create(user=1, github_academy_user=1) base_user2 = bc.database.create(user=1, github_academy_user=1) # first status github_academy_user_log = { - 'valid_until': None, - 'storage_status': storage_status1, - 'storage_action': storage_action, + "valid_until": None, + "storage_status": storage_status1, + "storage_action": storage_action, } - monkeypatch.setattr('django.utils.timezone.now', lambda: T1) + monkeypatch.setattr("django.utils.timezone.now", lambda: T1) user1__github_academy_user_log1 = bc.database.create( user=base_user1.user, academy=base_user1.academy, github_academy_user=base_user1.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log - monkeypatch.setattr('django.utils.timezone.now', lambda: T1 + delta) + monkeypatch.setattr("django.utils.timezone.now", lambda: T1 + delta) user2__github_academy_user_log1 = bc.database.create( user=base_user2.user, github_academy_user=base_user2.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log # second status - github_academy_user_log['storage_status'] = storage_status2 + github_academy_user_log["storage_status"] = storage_status2 - monkeypatch.setattr('django.utils.timezone.now', lambda: T2) + monkeypatch.setattr("django.utils.timezone.now", lambda: T2) user1__github_academy_user_log2 = bc.database.create( user=base_user1.user, academy=base_user1.academy, github_academy_user=base_user1.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log - monkeypatch.setattr('django.utils.timezone.now', lambda: T2 + delta) + monkeypatch.setattr("django.utils.timezone.now", lambda: T2 + delta) user2__github_academy_user_log2 = bc.database.create( user=base_user2.user, github_academy_user=base_user2.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log # third status - github_academy_user_log['storage_status'] = storage_status3 + github_academy_user_log["storage_status"] = storage_status3 - monkeypatch.setattr('django.utils.timezone.now', lambda: T3) + monkeypatch.setattr("django.utils.timezone.now", lambda: T3) user1__github_academy_user_log3 = bc.database.create( user=base_user1.user, academy=base_user1.academy, github_academy_user=base_user1.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log - monkeypatch.setattr('django.utils.timezone.now', lambda: T3 + delta) + monkeypatch.setattr("django.utils.timezone.now", lambda: T3 + delta) user2__github_academy_user_log3 = bc.database.create( user=base_user2.user, github_academy_user=base_user2.github_academy_user, - github_academy_user_log=github_academy_user_log).github_academy_user_log + github_academy_user_log=github_academy_user_log, + ).github_academy_user_log command = Command() command.handle() - assert bc.database.list_of('authenticate.GithubAcademyUserLog') == [ + assert bc.database.list_of("authenticate.GithubAcademyUserLog") == [ { **bc.format.to_dict(user1__github_academy_user_log1), - 'valid_until': T2, + "valid_until": T2, }, { **bc.format.to_dict(user2__github_academy_user_log1), - 'valid_until': T2 + delta, + "valid_until": T2 + delta, }, { **bc.format.to_dict(user1__github_academy_user_log2), - 'valid_until': T3, + "valid_until": T3, }, { **bc.format.to_dict(user2__github_academy_user_log2), - 'valid_until': T3 + delta, + "valid_until": T3 + delta, }, { **bc.format.to_dict(user1__github_academy_user_log3), - 'valid_until': None, + "valid_until": None, }, { **bc.format.to_dict(user2__github_academy_user_log3), - 'valid_until': None, + "valid_until": None, }, ] diff --git a/breathecode/authenticate/tests/management/commands/tests_seed_groups.py b/breathecode/authenticate/tests/management/commands/tests_seed_groups.py index 510029f10..83a61cd81 100644 --- a/breathecode/authenticate/tests/management/commands/tests_seed_groups.py +++ b/breathecode/authenticate/tests/management/commands/tests_seed_groups.py @@ -13,7 +13,7 @@ def sort_by_id(items): - return sorted(items, key=lambda x: x['id']) + return sorted(items, key=lambda x: x["id"]) class TokenTestSuite(AuthTestCase): @@ -27,358 +27,250 @@ def test__execute__with_three_users(self): command.handle() # the rest of elements are generated by django, is better ignored it - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Teacher"}, + ], + ) for user in model.user: - self.assertEqual(self.bc.format.table(user.groups.all()), [{'id': 1, 'name': 'Default'}]) + self.assertEqual(self.bc.format.table(user.groups.all()), [{"id": 1, "name": "Default"}]) """ 🔽🔽🔽 With three User and MentorProfile """ def test__execute__with_three_users_and_three_mentor_profiles(self): - mentor_profiles = [{'user_id': n} for n in range(1, 4)] + mentor_profiles = [{"user_id": n} for n in range(1, 4)] model = self.bc.database.create(user=3, mentor_profile=mentor_profiles) command = Command() command.handle() # the rest of elements are generated by django, is better ignored it - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Teacher"}, + ], + ) for user in model.user: - self.assertEqual(self.bc.format.table(user.groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - ]) + self.assertEqual( + self.bc.format.table(user.groups.all()), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + ], + ) """ 🔽🔽🔽 With three User and MentorProfile """ def test__execute__with_three_users__with_three_profile_academies__other_role(self): - profile_academies = [{'user_id': n} for n in range(1, 4)] + profile_academies = [{"user_id": n} for n in range(1, 4)] model = self.bc.database.create(user=3, profile_academies=profile_academies, role=1) command = Command() command.handle() # the rest of elements are generated by django, is better ignored it - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Teacher"}, + ], + ) for user in model.user: - self.assertEqual(self.bc.format.table(user.groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - ]) + self.assertEqual( + self.bc.format.table(user.groups.all()), + [ + {"id": 1, "name": "Default"}, + ], + ) """ 🔽🔽🔽 With three User and MentorProfile, role student """ def test__execute__with_three_users__with_three_profile_academies__role_student(self): - profile_academies = [{'user_id': n} for n in range(1, 4)] - model = self.bc.database.create(user=3, profile_academy=profile_academies, role='student') + profile_academies = [{"user_id": n} for n in range(1, 4)] + model = self.bc.database.create(user=3, profile_academy=profile_academies, role="student") command = Command() command.handle() # the rest of elements are generated by django, is better ignored it - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Teacher"}, + ], + ) for user in model.user: - self.assertEqual(self.bc.format.table(user.groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 3, - 'name': 'Student' - }, - ]) + self.assertEqual( + self.bc.format.table(user.groups.all()), + [ + {"id": 1, "name": "Default"}, + {"id": 3, "name": "Student"}, + ], + ) """ 🔽🔽🔽 With three User and MentorProfile, role teacher """ def test__execute__with_three_users__with_three_profile_academies__role_teacher(self): - profile_academies = [{'user_id': n} for n in range(1, 4)] - model = self.bc.database.create(user=3, profile_academy=profile_academies, role='teacher') + profile_academies = [{"user_id": n} for n in range(1, 4)] + model = self.bc.database.create(user=3, profile_academy=profile_academies, role="teacher") command = Command() command.handle() # the rest of elements are generated by django, is better ignored it - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Teacher"}, + ], + ) for user in model.user: - self.assertEqual(self.bc.format.table(user.groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.format.table(user.groups.all()), + [ + {"id": 1, "name": "Default"}, + {"id": 4, "name": "Teacher"}, + ], + ) """ 🔽🔽🔽 With three User, MentorProfile and Profile Academy, role student """ def test__execute__with_three_users_of_all_tables__role_student(self): - profile_academies = [{'user_id': n} for n in range(1, 4)] - mentor_profiles = [{'user_id': n} for n in range(1, 4)] - model = self.bc.database.create(user=3, - profile_academy=profile_academies, - mentor_profile=mentor_profiles, - role='student') + profile_academies = [{"user_id": n} for n in range(1, 4)] + mentor_profiles = [{"user_id": n} for n in range(1, 4)] + model = self.bc.database.create( + user=3, profile_academy=profile_academies, mentor_profile=mentor_profiles, role="student" + ) command = Command() command.handle() # the rest of elements are generated by django, is better ignored it - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Teacher"}, + ], + ) for user in model.user: - self.assertEqual(self.bc.format.table(user.groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - ]) + self.assertEqual( + self.bc.format.table(user.groups.all()), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + ], + ) """ 🔽🔽🔽 With three User, MentorProfile and Profile Academy, role teacher """ def test__execute__with_three_users_of_all_tables__role_teacher(self): - profile_academies = [{'user_id': n} for n in range(1, 4)] - mentor_profiles = [{'user_id': n} for n in range(1, 4)] - model = self.bc.database.create(user=3, - profile_academy=profile_academies, - mentor_profile=mentor_profiles, - role='teacher') + profile_academies = [{"user_id": n} for n in range(1, 4)] + mentor_profiles = [{"user_id": n} for n in range(1, 4)] + model = self.bc.database.create( + user=3, profile_academy=profile_academies, mentor_profile=mentor_profiles, role="teacher" + ) command = Command() command.handle() # the rest of elements are generated by django, is better ignored it - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Teacher"}, + ], + ) for user in model.user: - self.assertEqual(self.bc.format.table(user.groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) + self.assertEqual( + self.bc.format.table(user.groups.all()), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 4, "name": "Teacher"}, + ], + ) """ 🔽🔽🔽 With three User, MentorProfile and Profile Academy, three different cases """ def test__execute__with_three_users_of_all_tables__role_student__three_different_cases(self): - profile_academies = [{'user_id': 1, 'role_id': 'student'}, {'user_id': 2, 'role_id': 'teacher'}] - mentor_profiles = [{'user_id': 2}, {'user_id': 3}] - roles = [{'slug': x, 'name': x} for x in ['student', 'teacher']] - model = self.bc.database.create(user=3, - profile_academy=profile_academies, - mentor_profile=mentor_profiles, - role=roles) + profile_academies = [{"user_id": 1, "role_id": "student"}, {"user_id": 2, "role_id": "teacher"}] + mentor_profiles = [{"user_id": 2}, {"user_id": 3}] + roles = [{"slug": x, "name": x} for x in ["student", "teacher"]] + model = self.bc.database.create( + user=3, profile_academy=profile_academies, mentor_profile=mentor_profiles, role=roles + ) command = Command() command.handle() # the rest of elements are generated by django, is better ignored it - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) - - self.assertEqual(self.bc.format.table(model.user[0].groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 3, - 'name': 'Student' - }, - ]) - self.assertEqual(self.bc.format.table(model.user[1].groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - { - 'id': 4, - 'name': 'Teacher' - }, - ]) - self.assertEqual(self.bc.format.table(model.user[2].groups.all()), [ - { - 'id': 1, - 'name': 'Default' - }, - { - 'id': 2, - 'name': 'Mentor' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Teacher"}, + ], + ) + + self.assertEqual( + self.bc.format.table(model.user[0].groups.all()), + [ + {"id": 1, "name": "Default"}, + {"id": 3, "name": "Student"}, + ], + ) + self.assertEqual( + self.bc.format.table(model.user[1].groups.all()), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + {"id": 4, "name": "Teacher"}, + ], + ) + self.assertEqual( + self.bc.format.table(model.user[2].groups.all()), + [ + {"id": 1, "name": "Default"}, + {"id": 2, "name": "Mentor"}, + ], + ) diff --git a/breathecode/authenticate/tests/management/commands/tests_set_permissions.py b/breathecode/authenticate/tests/management/commands/tests_set_permissions.py index c2ffe8ed6..a37294ebe 100644 --- a/breathecode/authenticate/tests/management/commands/tests_set_permissions.py +++ b/breathecode/authenticate/tests/management/commands/tests_set_permissions.py @@ -8,53 +8,45 @@ PERMISSIONS = [ { - 'name': 'Can delete job', - 'description': 'Can delete job', - 'codename': 'delete_job', + "name": "Can delete job", + "description": "Can delete job", + "codename": "delete_job", }, { - 'name': 'Get my profile', - 'description': 'Get my profile', - 'codename': 'get_my_profile', + "name": "Get my profile", + "description": "Get my profile", + "codename": "get_my_profile", }, { - 'name': 'Create my profile', - 'description': 'Create my profile', - 'codename': 'create_my_profile', + "name": "Create my profile", + "description": "Create my profile", + "codename": "create_my_profile", }, { - 'name': 'Update my profile', - 'description': 'Update my profile', - 'codename': 'update_my_profile', + "name": "Update my profile", + "description": "Update my profile", + "codename": "update_my_profile", }, ] GROUPS = [ + {"name": "Admin", "permissions": [x["codename"] for x in PERMISSIONS], "inherit": []}, { - 'name': 'Admin', - 'permissions': [x['codename'] for x in PERMISSIONS], - 'inherit': [] + "name": "Default", + "permissions": ["delete_job", "get_my_profile", "create_my_profile", "update_my_profile"], + "inherit": [], }, { - 'name': 'Default', - 'permissions': ['delete_job', 'get_my_profile', 'create_my_profile', 'update_my_profile'], - 'inherit': [] - }, - { - 'name': 'Student', - 'permissions': ['delete_job', 'get_my_profile', 'create_my_profile', 'update_my_profile'], - 'inherit': [] - }, - { - 'name': 'Legacy', - 'permissions': [], - 'inherit': ['Default', 'Student', 'Legacy'] + "name": "Student", + "permissions": ["delete_job", "get_my_profile", "create_my_profile", "update_my_profile"], + "inherit": [], }, + {"name": "Legacy", "permissions": [], "inherit": ["Default", "Student", "Legacy"]}, ] def sort_by_id(items): - return sorted(items, key=lambda x: x['id']) + return sorted(items, key=lambda x: x["id"]) class TokenTestSuite(AuthTestCase): @@ -62,11 +54,11 @@ class TokenTestSuite(AuthTestCase): def setUp(self): super().setUp() - ContentType = self.bc.database.get_model('contenttypes.ContentType') - Permission = self.bc.database.get_model('auth.Permission') + ContentType = self.bc.database.get_model("contenttypes.ContentType") + Permission = self.bc.database.get_model("auth.Permission") - content_type = ContentType.objects.filter().order_by('-id').first() - permission = Permission.objects.filter().order_by('-id').first() + content_type = ContentType.objects.filter().order_by("-id").first() + permission = Permission.objects.filter().order_by("-id").first() # the behavior of permissions is not exact, this changes every time you add a model self.latest_content_type_id = content_type.id @@ -82,9 +74,9 @@ def test__format__permissions(self): from breathecode.authenticate.management.commands.set_permissions import PERMISSIONS for permission in PERMISSIONS: - self.assertRegex(permission['name'], r'^[a-zA-Z ]+$') - self.assertRegex(permission['description'], r'^[a-zA-Z,. _()"]+$') - self.assertRegex(permission['codename'], r'^[a-z_]+$') + self.assertRegex(permission["name"], r"^[a-zA-Z ]+$") + self.assertRegex(permission["description"], r'^[a-zA-Z,. _()"]+$') + self.assertRegex(permission["codename"], r"^[a-z_]+$") self.assertEqual(len(permission), 3) """ @@ -95,13 +87,13 @@ def test__format__groups(self): from breathecode.authenticate.management.commands.set_permissions import GROUPS for group in GROUPS: - self.assertRegex(group['name'], r'^[a-zA-Z ]+$') + self.assertRegex(group["name"], r"^[a-zA-Z ]+$") - for permission in group['permissions']: - self.assertRegex(permission, r'^[a-z_]+$') + for permission in group["permissions"]: + self.assertRegex(permission, r"^[a-z_]+$") - for g in group['inherit']: - self.assertTrue(g in [x['name'] for x in GROUPS]) + for g in group["inherit"]: + self.assertTrue(g in [x["name"] for x in GROUPS]) self.assertEqual(len(group), 3) @@ -109,11 +101,13 @@ def test__format__groups(self): 🔽🔽🔽 execute successfully """ - @patch('breathecode.authenticate.management.commands.set_permissions.get_permissions', - MagicMock(return_value=PERMISSIONS)) - @patch('breathecode.authenticate.management.commands.set_permissions.get_groups', MagicMock(return_value=GROUPS)) + @patch( + "breathecode.authenticate.management.commands.set_permissions.get_permissions", + MagicMock(return_value=PERMISSIONS), + ) + @patch("breathecode.authenticate.management.commands.set_permissions.get_groups", MagicMock(return_value=GROUPS)) def test__execute__ends_successfully(self): - Permission = self.bc.database.get_model('auth.Permission') + Permission = self.bc.database.get_model("auth.Permission") permissions = self.bc.format.to_dict(Permission.objects.all()) command = Command() @@ -121,164 +115,167 @@ def test__execute__ends_successfully(self): # the rest of elements are generated by django, is better ignored it self.assertEqual( - self.bc.database.list_of('contenttypes.ContentType')[-1:], [ + self.bc.database.list_of("contenttypes.ContentType")[-1:], + [ { - 'app_label': 'breathecode', - 'id': self.latest_content_type_id + 1, - 'model': 'SortingHat', + "app_label": "breathecode", + "id": self.latest_content_type_id + 1, + "model": "SortingHat", }, - ]) + ], + ) # the rest of elements are generated by django, is better ignored it self.assertEqual( - sort_by_id(self.bc.database.list_of('auth.Permission'))[-3:], [ + sort_by_id(self.bc.database.list_of("auth.Permission"))[-3:], + [ { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 3, - 'name': 'Update my profile', + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 3, + "name": "Update my profile", }, - ]) + ], + ) - self.assertEqual(self.bc.database.list_of('auth.Group'), [ - { - 'id': 1, - 'name': 'Admin' - }, - { - 'id': 2, - 'name': 'Default' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Legacy' - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.Group"), + [ + {"id": 1, "name": "Admin"}, + {"id": 2, "name": "Default"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Legacy"}, + ], + ) self.assertEqual( - sort_by_id(self.bc.format.to_dict(Group.objects.filter(name='Admin').first().permissions.all())), [ + sort_by_id(self.bc.format.to_dict(Group.objects.filter(name="Admin").first().permissions.all())), + [ *sort_by_id(permissions), { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 3, - 'name': 'Update my profile' + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 3, + "name": "Update my profile", }, - ]) + ], + ) self.assertEqual( - sort_by_id(self.bc.format.to_dict(Group.objects.filter(name='Default').first().permissions.all())), [ + sort_by_id(self.bc.format.to_dict(Group.objects.filter(name="Default").first().permissions.all())), + [ { - 'codename': 'delete_job', - 'content_type_id': self.job_content_type_id, - 'id': self.can_delete_job_permission_id, - 'name': 'Can delete job' + "codename": "delete_job", + "content_type_id": self.job_content_type_id, + "id": self.can_delete_job_permission_id, + "name": "Can delete job", }, { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 3, - 'name': 'Update my profile' + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 3, + "name": "Update my profile", }, - ]) + ], + ) self.assertEqual( - sort_by_id(self.bc.format.to_dict(Group.objects.filter(name='Student').first().permissions.all())), [ + sort_by_id(self.bc.format.to_dict(Group.objects.filter(name="Student").first().permissions.all())), + [ { - 'codename': 'delete_job', - 'content_type_id': self.job_content_type_id, - 'id': self.can_delete_job_permission_id, - 'name': 'Can delete job' + "codename": "delete_job", + "content_type_id": self.job_content_type_id, + "id": self.can_delete_job_permission_id, + "name": "Can delete job", }, { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + 3, - 'name': 'Update my profile' + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + 3, + "name": "Update my profile", }, - ]) + ], + ) """ 🔽🔽🔽 execute successfully, all the elements exists previously """ - @patch('breathecode.authenticate.management.commands.set_permissions.get_permissions', - MagicMock(return_value=PERMISSIONS)) - @patch('breathecode.authenticate.management.commands.set_permissions.get_groups', MagicMock(return_value=GROUPS)) + @patch( + "breathecode.authenticate.management.commands.set_permissions.get_permissions", + MagicMock(return_value=PERMISSIONS), + ) + @patch("breathecode.authenticate.management.commands.set_permissions.get_groups", MagicMock(return_value=GROUPS)) def test__execute__ends_successfully__run_second_time(self): num_permissions_was_deleted = 3 permission = [ { - 'name': 'Get my profile', - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1 + "name": "Get my profile", + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, }, { - 'name': 'Create my profile', - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1 + "name": "Create my profile", + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, }, { - 'name': 'Update my profile', - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1 + "name": "Update my profile", + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, }, ] content_type = { - 'app_label': 'breathecode', - 'model': 'SortingHat', + "app_label": "breathecode", + "model": "SortingHat", } permission_ids = [ self.latest_permission_id + 1, @@ -287,20 +284,20 @@ def test__execute__ends_successfully__run_second_time(self): ] groups = [ { - 'name': 'Admin', - 'permissions': permission_ids, + "name": "Admin", + "permissions": permission_ids, }, { - 'name': 'Default', - 'permissions': permission_ids, + "name": "Default", + "permissions": permission_ids, }, { - 'name': 'Student', - 'permissions': permission_ids, + "name": "Student", + "permissions": permission_ids, }, ] - Permission = self.bc.database.get_model('auth.Permission') + Permission = self.bc.database.get_model("auth.Permission") permissions = self.bc.format.to_dict(Permission.objects.all()) model = self.bc.database.create(permission=permission, content_type=content_type, group=groups) @@ -309,162 +306,165 @@ def test__execute__ends_successfully__run_second_time(self): # the rest of elements are generated by django, is better ignored it self.assertEqual( - self.bc.database.list_of('contenttypes.ContentType')[-1:], [ + self.bc.database.list_of("contenttypes.ContentType")[-1:], + [ { - 'app_label': 'breathecode', - 'id': self.latest_content_type_id + 1, - 'model': 'SortingHat', + "app_label": "breathecode", + "id": self.latest_content_type_id + 1, + "model": "SortingHat", }, - ]) + ], + ) # the rest of elements are generated by django, is better ignored it self.assertEqual( - sort_by_id(self.bc.database.list_of('auth.Permission'))[-3:], [ + sort_by_id(self.bc.database.list_of("auth.Permission"))[-3:], + [ { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 3, - 'name': 'Update my profile', + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 3, + "name": "Update my profile", }, - ]) + ], + ) - self.assertEqual(sort_by_id(self.bc.database.list_of('auth.Group')), [ - { - 'id': 1, - 'name': 'Admin' - }, - { - 'id': 2, - 'name': 'Default' - }, - { - 'id': 3, - 'name': 'Student' - }, - { - 'id': 4, - 'name': 'Legacy' - }, - ]) + self.assertEqual( + sort_by_id(self.bc.database.list_of("auth.Group")), + [ + {"id": 1, "name": "Admin"}, + {"id": 2, "name": "Default"}, + {"id": 3, "name": "Student"}, + {"id": 4, "name": "Legacy"}, + ], + ) self.assertEqual( - sort_by_id(self.bc.format.to_dict(Group.objects.filter(name='Admin').first().permissions.all())), [ + sort_by_id(self.bc.format.to_dict(Group.objects.filter(name="Admin").first().permissions.all())), + [ *sort_by_id(permissions), { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 3, - 'name': 'Update my profile' + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 3, + "name": "Update my profile", }, - ]) + ], + ) self.assertEqual( - sort_by_id(self.bc.format.to_dict(Group.objects.filter(name='Default').first().permissions.all())), [ + sort_by_id(self.bc.format.to_dict(Group.objects.filter(name="Default").first().permissions.all())), + [ { - 'codename': 'delete_job', - 'content_type_id': self.job_content_type_id, - 'id': self.can_delete_job_permission_id, - 'name': 'Can delete job' + "codename": "delete_job", + "content_type_id": self.job_content_type_id, + "id": self.can_delete_job_permission_id, + "name": "Can delete job", }, { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 3, - 'name': 'Update my profile' + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 3, + "name": "Update my profile", }, - ]) + ], + ) self.assertEqual( - sort_by_id(self.bc.format.to_dict(Group.objects.filter(name='Student').first().permissions.all())), [ + sort_by_id(self.bc.format.to_dict(Group.objects.filter(name="Student").first().permissions.all())), + [ { - 'codename': 'delete_job', - 'content_type_id': self.job_content_type_id, - 'id': self.can_delete_job_permission_id, - 'name': 'Can delete job' + "codename": "delete_job", + "content_type_id": self.job_content_type_id, + "id": self.can_delete_job_permission_id, + "name": "Can delete job", }, { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 3, - 'name': 'Update my profile' + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 3, + "name": "Update my profile", }, - ]) + ], + ) self.assertEqual( - sort_by_id(self.bc.format.to_dict(Group.objects.filter(name='Legacy').first().permissions.all())), [ + sort_by_id(self.bc.format.to_dict(Group.objects.filter(name="Legacy").first().permissions.all())), + [ { - 'codename': 'delete_job', - 'content_type_id': self.job_content_type_id, - 'id': self.can_delete_job_permission_id, - 'name': 'Can delete job' + "codename": "delete_job", + "content_type_id": self.job_content_type_id, + "id": self.can_delete_job_permission_id, + "name": "Can delete job", }, { - 'codename': 'get_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 1, - 'name': 'Get my profile' + "codename": "get_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 1, + "name": "Get my profile", }, { - 'codename': 'create_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 2, - 'name': 'Create my profile' + "codename": "create_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 2, + "name": "Create my profile", }, { - 'codename': 'update_my_profile', - 'content_type_id': self.latest_content_type_id + 1, - 'id': self.latest_permission_id + num_permissions_was_deleted + 3, - 'name': 'Update my profile' + "codename": "update_my_profile", + "content_type_id": self.latest_content_type_id + 1, + "id": self.latest_permission_id + num_permissions_was_deleted + 3, + "name": "Update my profile", }, - ]) + ], + ) """ 🔽🔽🔽 execute, if it emit a exception, this test fail diff --git a/breathecode/authenticate/tests/management/commands/tests_set_scopes.py b/breathecode/authenticate/tests/management/commands/tests_set_scopes.py index b55f878a1..68d2da555 100644 --- a/breathecode/authenticate/tests/management/commands/tests_set_scopes.py +++ b/breathecode/authenticate/tests/management/commands/tests_set_scopes.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import random from unittest.mock import MagicMock, call, patch @@ -17,146 +18,167 @@ class AcademyCohortTestSuite(AuthTestCase): def test__apps_format(self): for app in APPS: - self.assertRegex(app['slug'], r'^[a-z:_]+$') - self.assertRegex(app['name'], r'^[a-zA-Z ]+$') - self.assertTrue(app['require_an_agreement'] in [True, False]) + self.assertRegex(app["slug"], r"^[a-z:_]+$") + self.assertRegex(app["name"], r"^[a-zA-Z ]+$") + self.assertTrue(app["require_an_agreement"] in [True, False]) - for scope in app['required_scopes']: - self.assertRegex(scope, r'^[a-z:_]+$') + for scope in app["required_scopes"]: + self.assertRegex(scope, r"^[a-z:_]+$") - for scope in app['optional_scopes']: - self.assertRegex(scope, r'^[a-z:-]+$') + for scope in app["optional_scopes"]: + self.assertRegex(scope, r"^[a-z:-]+$") def test__scopes_format(self): for scope in SCOPES: - self.assertRegex(scope['slug'], r'^[a-z:-]+$') - self.assertRegex(scope['name'], r'^[a-zA-Z ]+$') - self.assertTrue('description' in scope) + self.assertRegex(scope["slug"], r"^[a-z:-]+$") + self.assertRegex(scope["name"], r"^[a-zA-Z ]+$") + self.assertTrue("description" in scope) # When: No apps # Then: Shouldn't made any app def test_no_apps(self): - SCOPES = [{ - 'name': self.bc.fake.name(), - 'slug': self.bc.fake.slug()[:15].replace('-', '_'), - 'description': self.bc.fake.text()[:255], - } for _ in range(4)] + SCOPES = [ + { + "name": self.bc.fake.name(), + "slug": self.bc.fake.slug()[:15].replace("-", "_"), + "description": self.bc.fake.text()[:255], + } + for _ in range(4) + ] APPS = [ { - 'name': self.bc.fake.name(), - 'slug': self.bc.fake.slug()[:15].replace('-', '_'), - 'require_an_agreement': bool(random.randint(0, 1)), - 'required_scopes': [SCOPES[0]['slug'], SCOPES[1]['slug']], - 'optional_scopes': [SCOPES[2]['slug'], SCOPES[3]['slug']], + "name": self.bc.fake.name(), + "slug": self.bc.fake.slug()[:15].replace("-", "_"), + "require_an_agreement": bool(random.randint(0, 1)), + "required_scopes": [SCOPES[0]["slug"], SCOPES[1]["slug"]], + "optional_scopes": [SCOPES[2]["slug"], SCOPES[3]["slug"]], }, ] - with patch('breathecode.authenticate.management.commands.set_scopes.APPS', APPS): - with patch('breathecode.authenticate.management.commands.set_scopes.SCOPES', SCOPES): + with patch("breathecode.authenticate.management.commands.set_scopes.APPS", APPS): + with patch("breathecode.authenticate.management.commands.set_scopes.SCOPES", SCOPES): command = Command() result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('linked_services.Scope'), [ - { - **SCOPES[0], - 'id': 1, - }, - { - **SCOPES[1], - 'id': 2, - }, - { - **SCOPES[2], - 'id': 3, - }, - { - **SCOPES[3], - 'id': 4, - }, - ]) - self.assertEqual(self.bc.database.list_of('linked_services.App'), []) + self.assertEqual( + self.bc.database.list_of("linked_services.Scope"), + [ + { + **SCOPES[0], + "id": 1, + }, + { + **SCOPES[1], + "id": 2, + }, + { + **SCOPES[2], + "id": 3, + }, + { + **SCOPES[3], + "id": 4, + }, + ], + ) + self.assertEqual(self.bc.database.list_of("linked_services.App"), []) # When: 1 app # Then: Must updated it - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_1_app(self): - SCOPES = [{ - 'name': self.bc.fake.name(), - 'slug': self.bc.fake.slug()[:15].replace('-', '_'), - 'description': self.bc.fake.text()[:255], - } for _ in range(4)] + SCOPES = [ + { + "name": self.bc.fake.name(), + "slug": self.bc.fake.slug()[:15].replace("-", "_"), + "description": self.bc.fake.text()[:255], + } + for _ in range(4) + ] APPS = [ { - 'name': self.bc.fake.name(), - 'slug': self.bc.fake.slug()[:15].replace('-', '_'), - 'require_an_agreement': bool(random.randint(0, 1)), - 'required_scopes': [SCOPES[0]['slug'], SCOPES[1]['slug']], - 'optional_scopes': [SCOPES[2]['slug'], SCOPES[3]['slug']], + "name": self.bc.fake.name(), + "slug": self.bc.fake.slug()[:15].replace("-", "_"), + "require_an_agreement": bool(random.randint(0, 1)), + "required_scopes": [SCOPES[0]["slug"], SCOPES[1]["slug"]], + "optional_scopes": [SCOPES[2]["slug"], SCOPES[3]["slug"]], }, ] - app = {'slug': APPS[0]['slug']} + app = {"slug": APPS[0]["slug"]} model = self.bc.database.create(app=app) - with patch('breathecode.authenticate.management.commands.set_scopes.APPS', APPS): - with patch('breathecode.authenticate.management.commands.set_scopes.SCOPES', SCOPES): + with patch("breathecode.authenticate.management.commands.set_scopes.APPS", APPS): + with patch("breathecode.authenticate.management.commands.set_scopes.SCOPES", SCOPES): command = Command() result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('linked_services.Scope'), [ - { - **SCOPES[0], - 'id': 1, - }, - { - **SCOPES[1], - 'id': 2, - }, - { - **SCOPES[2], - 'id': 3, - }, - { - **SCOPES[3], - 'id': 4, - }, - ]) - self.assertEqual(self.bc.database.list_of('linked_services.App'), [ - { - **self.bc.format.to_dict(model.app), - 'name': APPS[0]['name'], - 'require_an_agreement': APPS[0]['require_an_agreement'], - }, - ]) - self.assertEqual(self.bc.database.list_of('linked_services.AppRequiredScope'), [ - { - 'agreed_at': UTC_NOW, - 'app_id': 1, - 'id': 1, - 'scope_id': 1, - }, - { - 'agreed_at': UTC_NOW, - 'app_id': 1, - 'id': 2, - 'scope_id': 2, - }, - ]) - self.assertEqual(self.bc.database.list_of('linked_services.AppOptionalScope'), [ - { - 'agreed_at': UTC_NOW, - 'app_id': 1, - 'id': 1, - 'scope_id': 3, - }, - { - 'agreed_at': UTC_NOW, - 'app_id': 1, - 'id': 2, - 'scope_id': 4, - }, - ]) + self.assertEqual( + self.bc.database.list_of("linked_services.Scope"), + [ + { + **SCOPES[0], + "id": 1, + }, + { + **SCOPES[1], + "id": 2, + }, + { + **SCOPES[2], + "id": 3, + }, + { + **SCOPES[3], + "id": 4, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("linked_services.App"), + [ + { + **self.bc.format.to_dict(model.app), + "name": APPS[0]["name"], + "require_an_agreement": APPS[0]["require_an_agreement"], + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("linked_services.AppRequiredScope"), + [ + { + "agreed_at": UTC_NOW, + "app_id": 1, + "id": 1, + "scope_id": 1, + }, + { + "agreed_at": UTC_NOW, + "app_id": 1, + "id": 2, + "scope_id": 2, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("linked_services.AppOptionalScope"), + [ + { + "agreed_at": UTC_NOW, + "app_id": 1, + "id": 1, + "scope_id": 3, + }, + { + "agreed_at": UTC_NOW, + "app_id": 1, + "id": 2, + "scope_id": 4, + }, + ], + ) diff --git a/breathecode/authenticate/tests/mixins/__init__.py b/breathecode/authenticate/tests/mixins/__init__.py index 450d982b5..29481c330 100644 --- a/breathecode/authenticate/tests/mixins/__init__.py +++ b/breathecode/authenticate/tests/mixins/__init__.py @@ -1,5 +1,6 @@ """ Admissions mixins """ + from .auth_test_case import AuthTestCase # noqa: F401 from .slack_test_case import SlackTestCase # noqa: F401 diff --git a/breathecode/authenticate/tests/mixins/auth_test_case.py b/breathecode/authenticate/tests/mixins/auth_test_case.py index 218054611..4272df163 100644 --- a/breathecode/authenticate/tests/mixins/auth_test_case.py +++ b/breathecode/authenticate/tests/mixins/auth_test_case.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.authenticate.models import ProfileAcademy from unittest.mock import patch from django.contrib.auth.models import User @@ -9,23 +10,28 @@ from mixer.backend.django import mixer from django.core.cache import cache from breathecode.tests.mixins import ModelsMixin -from breathecode.tests.mocks import (GOOGLE_CLOUD_PATH, apply_google_cloud_client_mock, apply_google_cloud_bucket_mock, - apply_google_cloud_blob_mock) +from breathecode.tests.mocks import ( + GOOGLE_CLOUD_PATH, + apply_google_cloud_client_mock, + apply_google_cloud_bucket_mock, + apply_google_cloud_blob_mock, +) class AuthTestCase(APITestCase, ModelsMixin): """APITestCase with auth methods""" + # token = None user = None email = None - password = 'pass1234' + password = "pass1234" token = None def setUp(self): """Before each test""" cache.clear() - user = mixer.blend('auth.User') + user = mixer.blend("auth.User") user.set_password(self.password) user.save() @@ -33,29 +39,29 @@ def setUp(self): self.email = user.email self.client = APIClient() - params = {'user': user} - github = mixer.blend('authenticate.CredentialsGithub', **params) + params = {"user": user} + github = mixer.blend("authenticate.CredentialsGithub", **params) github.save() - def create_user(self, email='', password=''): + def create_user(self, email="", password=""): """Get login response""" - if email == '': + if email == "": email = self.email - if password == '': + if password == "": password = self.password - url = reverse_lazy('authenticate:login') - data = {'email': email, 'password': password} + url = reverse_lazy("authenticate:login") + data = {"email": email, "password": password} return self.client.post(url, data) - def login(self, email='', password=''): + def login(self, email="", password=""): """Login""" response = self.create_user(email=email, password=password) - if 'token' in response.data.keys(): - self.token = str(response.data['token']) - self.client.credentials(HTTP_AUTHORIZATION=f'Token {self.token}') + if "token" in response.data.keys(): + self.token = str(response.data["token"]) + self.client.credentials(HTTP_AUTHORIZATION=f"Token {self.token}") return response @@ -69,89 +75,92 @@ def headers(self, **kargs): headers = {} items = [ - index for index in kargs + index + for index in kargs if kargs[index] and (isinstance(kargs[index], str) or isinstance(kargs[index], int)) ] for index in items: - headers[f'HTTP_{index.upper()}'] = str(kargs[index]) + headers[f"HTTP_{index.upper()}"] = str(kargs[index]) self.client.credentials(**headers) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - def generate_models(self, - authenticate=False, - user=False, - academy=False, - profile_academy=False, - role='', - capability='', - profile_academy_status='', - credentials_github=False, - profile=False, - profile_kwargs={}, - github_academy_user={}, - models={}): + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + def generate_models( + self, + authenticate=False, + user=False, + academy=False, + profile_academy=False, + role="", + capability="", + profile_academy_status="", + credentials_github=False, + profile=False, + profile_kwargs={}, + github_academy_user={}, + models={}, + ): """Generate models""" # TODO: rewrite authenticate tests to use the global generate_models self.maxDiff = None models = models.copy() - if not 'user' in models and (user or authenticate or profile_academy or credentials_github): - models['user'] = mixer.blend('auth.User') - models['user'].set_password(self.password) - models['user'].save() + if not "user" in models and (user or authenticate or profile_academy or credentials_github): + models["user"] = mixer.blend("auth.User") + models["user"].set_password(self.password) + models["user"].save() - if not 'profile' in models and profile: + if not "profile" in models and profile: kargs = {} - if 'user' in models: - kargs['user'] = models['user'] + if "user" in models: + kargs["user"] = models["user"] kargs = {**kargs, **profile_kwargs} - models['profile'] = mixer.blend('authenticate.Profile', **kargs) + models["profile"] = mixer.blend("authenticate.Profile", **kargs) - if not 'credentials_github' in models and credentials_github: - kargs = {'user': models['user']} + if not "credentials_github" in models and credentials_github: + kargs = {"user": models["user"]} - models['credentials_github'] = mixer.blend('authenticate.CredentialsGithub', **kargs) + models["credentials_github"] = mixer.blend("authenticate.CredentialsGithub", **kargs) if authenticate: - self.client.force_authenticate(user=models['user']) + self.client.force_authenticate(user=models["user"]) - if not 'academy' in models and (academy or profile_academy): - models['academy'] = mixer.blend('admissions.Academy') + if not "academy" in models and (academy or profile_academy): + models["academy"] = mixer.blend("admissions.Academy") - if not 'capability' in models and capability: + if not "capability" in models and capability: kargs = { - 'slug': capability, - 'description': capability, + "slug": capability, + "description": capability, } - models['capability'] = mixer.blend('authenticate.Capability', **kargs) + models["capability"] = mixer.blend("authenticate.Capability", **kargs) - if not 'role' in models and role: + if not "role" in models and role: kargs = { - 'slug': role, - 'name': role, - 'capabilities': [models['capability']], + "slug": role, + "name": role, + "capabilities": [models["capability"]], } - models['role'] = mixer.blend('authenticate.Role', **kargs) + models["role"] = mixer.blend("authenticate.Role", **kargs) - if not 'profile_academy' in models and profile_academy: + if not "profile_academy" in models and profile_academy: kargs = {} if user or authenticate: - kargs['user'] = models['user'] - kargs['academy'] = models['academy'] - kargs['role'] = models['role'] + kargs["user"] = models["user"] + kargs["academy"] = models["academy"] + kargs["role"] = models["role"] if profile_academy_status: - kargs['status'] = profile_academy_status + kargs["status"] = profile_academy_status - models['profile_academy'] = mixer.blend('authenticate.ProfileAcademy', **kargs) + models["profile_academy"] = mixer.blend("authenticate.ProfileAcademy", **kargs) return models diff --git a/breathecode/authenticate/tests/mixins/development_environment.py b/breathecode/authenticate/tests/mixins/development_environment.py index ad0c09d3d..8f01b4a32 100644 --- a/breathecode/authenticate/tests/mixins/development_environment.py +++ b/breathecode/authenticate/tests/mixins/development_environment.py @@ -1,10 +1,11 @@ """ Collections of mixins used to login in authorize microservice """ + import os -class DevelopmentEnvironment(): +class DevelopmentEnvironment: def __init__(self): - os.environ['ENV'] = 'development' + os.environ["ENV"] = "development" diff --git a/breathecode/authenticate/tests/mixins/new_auth_test_case.py b/breathecode/authenticate/tests/mixins/new_auth_test_case.py index 8b9f0f54c..f93f06634 100644 --- a/breathecode/authenticate/tests/mixins/new_auth_test_case.py +++ b/breathecode/authenticate/tests/mixins/new_auth_test_case.py @@ -1,15 +1,30 @@ import os from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, TokenMixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + TokenMixin, + BreathecodeMixin, +) -class AuthTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, DatetimeMixin, - TokenMixin, BreathecodeMixin): +class AuthTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + TokenMixin, + BreathecodeMixin, +): """AdmissionsTestCase with auth methods""" def setUp(self): - os.environ['API_URL'] = 'http://localhost:8000' + os.environ["API_URL"] = "http://localhost:8000" self.generate_queries() self.set_test_instance(self) diff --git a/breathecode/authenticate/tests/mixins/slack_test_case.py b/breathecode/authenticate/tests/mixins/slack_test_case.py index eb74fbe1c..0f29878b9 100644 --- a/breathecode/authenticate/tests/mixins/slack_test_case.py +++ b/breathecode/authenticate/tests/mixins/slack_test_case.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + import base64 import urllib from django.urls.base import reverse_lazy @@ -11,16 +12,17 @@ # class SlackTestCase(APITestCase, DevelopmentEnvironment): class SlackTestCase(APITestCase): """APITestCase with Slack methods""" - url_callback = 'https://google.co.ve' + + url_callback = "https://google.co.ve" academy = None def slack(self): """Get /slack""" - url = reverse_lazy('authenticate:slack') - params = {'url': base64.b64encode(self.url_callback.encode('utf-8')), 'user': 1, 'a': self.academy} - return self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:slack") + params = {"url": base64.b64encode(self.url_callback.encode("utf-8")), "user": 1, "a": self.academy} + return self.client.get(f"{url}?{urllib.parse.urlencode(params)}") def get_academy(self): """Generate a academy with mixer""" - academy = mixer.blend('admissions.Academy') + academy = mixer.blend("admissions.Academy") self.academy = academy diff --git a/breathecode/authenticate/tests/mocks/__init__.py b/breathecode/authenticate/tests/mocks/__init__.py index 5efeefc78..bd334d675 100644 --- a/breathecode/authenticate/tests/mocks/__init__.py +++ b/breathecode/authenticate/tests/mocks/__init__.py @@ -1,6 +1,7 @@ """ Admissions mixins """ + from .slack_requests_mock import SlackRequestsMock # noqa: F401 from .github_requests_mock import GithubRequestsMock # noqa: F401 from .google_cloud_storage_mock import GoogleCloudStorageMock # noqa: F401 diff --git a/breathecode/authenticate/tests/mocks/fake_response.py b/breathecode/authenticate/tests/mocks/fake_response.py index 7527bd884..ba5136a55 100644 --- a/breathecode/authenticate/tests/mocks/fake_response.py +++ b/breathecode/authenticate/tests/mocks/fake_response.py @@ -1,5 +1,6 @@ -class FakeResponse(): +class FakeResponse: """Simutate Response to be used by mocks""" + status_code = 200 data = {} diff --git a/breathecode/authenticate/tests/mocks/github_requests_mock.py b/breathecode/authenticate/tests/mocks/github_requests_mock.py index 237d99c30..952786922 100644 --- a/breathecode/authenticate/tests/mocks/github_requests_mock.py +++ b/breathecode/authenticate/tests/mocks/github_requests_mock.py @@ -1,103 +1,95 @@ """ Collections of mocks used to login in authorize microservice """ + from .fake_response import FakeResponse from .requests_mock import requests_mock -class GithubRequestsMock(): +class GithubRequestsMock: """Github requests mock""" - token = 'e72e16c7e42f292c6912e7710c838347ae178b4a' + + token = "e72e16c7e42f292c6912e7710c838347ae178b4a" @staticmethod def user(): """Static https://api.github.com/user""" - return FakeResponse(status_code=200, - data={ - 'login': 'jefer94', - 'id': 3018142, - 'node_id': 'MDQ6VXNlcjMwMTgxNDI=', - 'avatar_url': 'https://avatars2.githubusercontent.com/u/3018142?v=4', - 'gravatar_id': '', - 'url': 'https://api.github.com/users/jefer94', - 'html_url': 'https://github.com/jefer94', - 'followers_url': 'https://api.github.com/users/jefer94/followers', - 'following_url': 'https://api.github.com/users/jefer94/following{/other_user}', - 'gists_url': 'https://api.github.com/users/jefer94/gists{/gist_id}', - 'starred_url': 'https://api.github.com/users/jefer94/starred{/owner}{/repo}', - 'subscriptions_url': 'https://api.github.com/users/jefer94/subscriptions', - 'organizations_url': 'https://api.github.com/users/jefer94/orgs', - 'repos_url': 'https://api.github.com/users/jefer94/repos', - 'events_url': 'https://api.github.com/users/jefer94/events{/privacy}', - 'received_events_url': 'https://api.github.com/users/jefer94/received_events', - 'type': 'User', - 'site_admin': False, - 'name': 'Jeferson De Freitas', - 'company': '@chocoland ', - 'blog': 'https://www.facebook.com/chocoland.framework', - 'location': 'Colombia, Magdalena, Santa Marta, Gaira', - 'email': 'jdefreitaspinto@gmail.com', - 'hireable': True, - 'bio': 'I am an Computer engineer, Full-stack Developer and React Developer, I likes' + - ' an API good, the clean code, the good programming practices', - 'twitter_username': None, - 'public_repos': 70, - 'public_gists': 1, - 'followers': 9, - 'following': 5, - 'created_at': '2012-12-11T17:00:30Z', - 'updated_at': '2020-10-29T19:15:13Z', - 'private_gists': 0, - 'total_private_repos': 2, - 'owned_private_repos': 1, - 'disk_usage': 211803, - 'collaborators': 0, - 'two_factor_authentication': False, - 'plan': { - 'name': 'free', - 'space': 976562499, - 'collaborators': 0, - 'private_repos': 10000 - } - }) + return FakeResponse( + status_code=200, + data={ + "login": "jefer94", + "id": 3018142, + "node_id": "MDQ6VXNlcjMwMTgxNDI=", + "avatar_url": "https://avatars2.githubusercontent.com/u/3018142?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/jefer94", + "html_url": "https://github.com/jefer94", + "followers_url": "https://api.github.com/users/jefer94/followers", + "following_url": "https://api.github.com/users/jefer94/following{/other_user}", + "gists_url": "https://api.github.com/users/jefer94/gists{/gist_id}", + "starred_url": "https://api.github.com/users/jefer94/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/jefer94/subscriptions", + "organizations_url": "https://api.github.com/users/jefer94/orgs", + "repos_url": "https://api.github.com/users/jefer94/repos", + "events_url": "https://api.github.com/users/jefer94/events{/privacy}", + "received_events_url": "https://api.github.com/users/jefer94/received_events", + "type": "User", + "site_admin": False, + "name": "Jeferson De Freitas", + "company": "@chocoland ", + "blog": "https://www.facebook.com/chocoland.framework", + "location": "Colombia, Magdalena, Santa Marta, Gaira", + "email": "jdefreitaspinto@gmail.com", + "hireable": True, + "bio": "I am an Computer engineer, Full-stack Developer and React Developer, I likes" + + " an API good, the clean code, the good programming practices", + "twitter_username": None, + "public_repos": 70, + "public_gists": 1, + "followers": 9, + "following": 5, + "created_at": "2012-12-11T17:00:30Z", + "updated_at": "2020-10-29T19:15:13Z", + "private_gists": 0, + "total_private_repos": 2, + "owned_private_repos": 1, + "disk_usage": 211803, + "collaborators": 0, + "two_factor_authentication": False, + "plan": {"name": "free", "space": 976562499, "collaborators": 0, "private_repos": 10000}, + }, + ) @staticmethod def user_emails(): """Static https://api.github.com/user/emails""" - return FakeResponse(status_code=200, - data=[{ - 'email': 'jeferson-94@hotmail.com', - 'primary': False, - 'verified': True, - 'visibility': None - }, { - 'email': 'jdefreitaspinto@gmail.com', - 'primary': True, - 'verified': True, - 'visibility': 'public' - }]) + return FakeResponse( + status_code=200, + data=[ + {"email": "jeferson-94@hotmail.com", "primary": False, "verified": True, "visibility": None}, + {"email": "jdefreitaspinto@gmail.com", "primary": True, "verified": True, "visibility": "public"}, + ], + ) @staticmethod def access_token(): """Static https://github.com/login/oauth/access_token""" - return FakeResponse(status_code=200, - data={ - 'access_token': GithubRequestsMock.token, - 'scope': 'repo,gist', - 'token_type': 'bearer' - }) + return FakeResponse( + status_code=200, + data={"access_token": GithubRequestsMock.token, "scope": "repo,gist", "token_type": "bearer"}, + ) @staticmethod def apply_get_requests_mock(): """Apply get requests mock""" routes = { - 'https://api.github.com/user': GithubRequestsMock.user(), - 'https://api.github.com/user/emails': GithubRequestsMock.user_emails() + "https://api.github.com/user": GithubRequestsMock.user(), + "https://api.github.com/user/emails": GithubRequestsMock.user_emails(), } return requests_mock(routes) @staticmethod def apply_post_requests_mock(): """Apply post requests mock""" - routes = {'https://github.com/login/oauth/access_token': GithubRequestsMock.access_token()} - return requests_mock(routes, method='post') + routes = {"https://github.com/login/oauth/access_token": GithubRequestsMock.access_token()} + return requests_mock(routes, method="post") diff --git a/breathecode/authenticate/tests/mocks/google_cloud_storage_mock.py b/breathecode/authenticate/tests/mocks/google_cloud_storage_mock.py index 595d33a6d..d41e54951 100644 --- a/breathecode/authenticate/tests/mocks/google_cloud_storage_mock.py +++ b/breathecode/authenticate/tests/mocks/google_cloud_storage_mock.py @@ -1,10 +1,11 @@ """ Collections of mocks used to login in authorize microservice """ + from unittest.mock import Mock -class GoogleCloudStorageMock(): +class GoogleCloudStorageMock: @staticmethod def get_bucket_object(): diff --git a/breathecode/authenticate/tests/mocks/mocks.py b/breathecode/authenticate/tests/mocks/mocks.py index 279ae948c..641b4b717 100644 --- a/breathecode/authenticate/tests/mocks/mocks.py +++ b/breathecode/authenticate/tests/mocks/mocks.py @@ -1,10 +1,11 @@ """ Collections of mocks used to login in authorize microservice """ + from unittest.mock import Mock -class GoogleCloudStorageMock(): +class GoogleCloudStorageMock: @staticmethod def get_bucket_object(): @@ -15,8 +16,9 @@ def side_effect(): return Mock(side_effect=side_effect) -class FakeResponse(): +class FakeResponse: """Simutate Response to be used by mocks""" + status_code = 200 data = {} @@ -29,152 +31,142 @@ def json(self): return self.data -def requests_mock(routes: dict, method='get'): +def requests_mock(routes: dict, method="get"): """Arequests mock""" - if method == 'get': + if method == "get": def side_effect(url, headers=None): - return routes.get(url, f'unhandled request {url}') - elif method == 'post': + return routes.get(url, f"unhandled request {url}") + + elif method == "post": def side_effect(url, data=None, headers=None): - return routes.get(url, f'unhandled request {url}') + return routes.get(url, f"unhandled request {url}") + else: - raise Exception(f'{method} are not implemented too') + raise Exception(f"{method} are not implemented too") return Mock(side_effect=side_effect) -class GithubRequestsMock(): +class GithubRequestsMock: """Github requests mock""" - token = 'e72e16c7e42f292c6912e7710c838347ae178b4a' + + token = "e72e16c7e42f292c6912e7710c838347ae178b4a" @staticmethod def user(): """Static https://api.github.com/user""" - return FakeResponse(status_code=200, - data={ - 'login': 'jefer94', - 'id': 3018142, - 'node_id': 'MDQ6VXNlcjMwMTgxNDI=', - 'avatar_url': 'https://avatars2.githubusercontent.com/u/3018142?v=4', - 'gravatar_id': '', - 'url': 'https://api.github.com/users/jefer94', - 'html_url': 'https://github.com/jefer94', - 'followers_url': 'https://api.github.com/users/jefer94/followers', - 'following_url': 'https://api.github.com/users/jefer94/following{/other_user}', - 'gists_url': 'https://api.github.com/users/jefer94/gists{/gist_id}', - 'starred_url': 'https://api.github.com/users/jefer94/starred{/owner}{/repo}', - 'subscriptions_url': 'https://api.github.com/users/jefer94/subscriptions', - 'organizations_url': 'https://api.github.com/users/jefer94/orgs', - 'repos_url': 'https://api.github.com/users/jefer94/repos', - 'events_url': 'https://api.github.com/users/jefer94/events{/privacy}', - 'received_events_url': 'https://api.github.com/users/jefer94/received_events', - 'type': 'User', - 'site_admin': False, - 'name': 'Jeferson De Freitas', - 'company': '@chocoland ', - 'blog': 'https://www.facebook.com/chocoland.framework', - 'location': 'Colombia, Magdalena, Santa Marta, Gaira', - 'email': 'jdefreitaspinto@gmail.com', - 'hireable': True, - 'bio': 'I am an Computer engineer, Full-stack Developer and React Developer, I likes' + - ' an API good, the clean code, the good programming practices', - 'twitter_username': None, - 'public_repos': 70, - 'public_gists': 1, - 'followers': 9, - 'following': 5, - 'created_at': '2012-12-11T17:00:30Z', - 'updated_at': '2020-10-29T19:15:13Z', - 'private_gists': 0, - 'total_private_repos': 2, - 'owned_private_repos': 1, - 'disk_usage': 211803, - 'collaborators': 0, - 'two_factor_authentication': False, - 'plan': { - 'name': 'free', - 'space': 976562499, - 'collaborators': 0, - 'private_repos': 10000 - } - }) + return FakeResponse( + status_code=200, + data={ + "login": "jefer94", + "id": 3018142, + "node_id": "MDQ6VXNlcjMwMTgxNDI=", + "avatar_url": "https://avatars2.githubusercontent.com/u/3018142?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/jefer94", + "html_url": "https://github.com/jefer94", + "followers_url": "https://api.github.com/users/jefer94/followers", + "following_url": "https://api.github.com/users/jefer94/following{/other_user}", + "gists_url": "https://api.github.com/users/jefer94/gists{/gist_id}", + "starred_url": "https://api.github.com/users/jefer94/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/jefer94/subscriptions", + "organizations_url": "https://api.github.com/users/jefer94/orgs", + "repos_url": "https://api.github.com/users/jefer94/repos", + "events_url": "https://api.github.com/users/jefer94/events{/privacy}", + "received_events_url": "https://api.github.com/users/jefer94/received_events", + "type": "User", + "site_admin": False, + "name": "Jeferson De Freitas", + "company": "@chocoland ", + "blog": "https://www.facebook.com/chocoland.framework", + "location": "Colombia, Magdalena, Santa Marta, Gaira", + "email": "jdefreitaspinto@gmail.com", + "hireable": True, + "bio": "I am an Computer engineer, Full-stack Developer and React Developer, I likes" + + " an API good, the clean code, the good programming practices", + "twitter_username": None, + "public_repos": 70, + "public_gists": 1, + "followers": 9, + "following": 5, + "created_at": "2012-12-11T17:00:30Z", + "updated_at": "2020-10-29T19:15:13Z", + "private_gists": 0, + "total_private_repos": 2, + "owned_private_repos": 1, + "disk_usage": 211803, + "collaborators": 0, + "two_factor_authentication": False, + "plan": {"name": "free", "space": 976562499, "collaborators": 0, "private_repos": 10000}, + }, + ) @staticmethod def user_emails(): """Static https://api.github.com/user/emails""" - return FakeResponse(status_code=200, - data=[{ - 'email': 'jeferson-94@hotmail.com', - 'primary': False, - 'verified': True, - 'visibility': None - }, { - 'email': 'jdefreitaspinto@gmail.com', - 'primary': True, - 'verified': True, - 'visibility': 'public' - }]) + return FakeResponse( + status_code=200, + data=[ + {"email": "jeferson-94@hotmail.com", "primary": False, "verified": True, "visibility": None}, + {"email": "jdefreitaspinto@gmail.com", "primary": True, "verified": True, "visibility": "public"}, + ], + ) @staticmethod def access_token(): """Static https://github.com/login/oauth/access_token""" - return FakeResponse(status_code=200, - data={ - 'access_token': GithubRequestsMock.token, - 'scope': 'repo,gist', - 'token_type': 'bearer' - }) + return FakeResponse( + status_code=200, + data={"access_token": GithubRequestsMock.token, "scope": "repo,gist", "token_type": "bearer"}, + ) @staticmethod def apply_get_requests_mock(): """Apply get requests mock""" routes = { - 'https://api.github.com/user': GithubRequestsMock.user(), - 'https://api.github.com/user/emails': GithubRequestsMock.user_emails() + "https://api.github.com/user": GithubRequestsMock.user(), + "https://api.github.com/user/emails": GithubRequestsMock.user_emails(), } return requests_mock(routes) @staticmethod def apply_post_requests_mock(): """Apply post requests mock""" - routes = {'https://github.com/login/oauth/access_token': GithubRequestsMock.access_token()} - return requests_mock(routes, method='post') + routes = {"https://github.com/login/oauth/access_token": GithubRequestsMock.access_token()} + return requests_mock(routes, method="post") -class SlackRequestsMock(): +class SlackRequestsMock: """Github requests mock""" - token = 'e72e16c7e42f292c6912e7710c838347ae178b4a' + + token = "e72e16c7e42f292c6912e7710c838347ae178b4a" @staticmethod def access(): """Static https://slack.com/api/oauth.v2.access""" - return FakeResponse(status_code=200, - data={ - 'ok': True, - 'access_token': 'xoxb-17653672481-19874698323-pdFZKVeTuE8sk7oOcBrzbqgy', - 'token_type': 'bot', - 'scope': 'commands,incoming-webhook', - 'bot_user_id': 'U0KRQLJ9H', - 'app_id': 'A0KRD7HC3', - 'team': { - 'name': 'Slack Softball Team', - 'id': 'T9TK3CUKW' - }, - 'enterprise': { - 'name': 'slack-sports', - 'id': 'E12345678' - }, - 'authed_user': { - 'id': 'U1234', - 'scope': 'chat:write', - 'access_token': 'xoxp-1234', - 'token_type': 'user' - } - }) + return FakeResponse( + status_code=200, + data={ + "ok": True, + "access_token": "xoxb-17653672481-19874698323-pdFZKVeTuE8sk7oOcBrzbqgy", + "token_type": "bot", + "scope": "commands,incoming-webhook", + "bot_user_id": "U0KRQLJ9H", + "app_id": "A0KRD7HC3", + "team": {"name": "Slack Softball Team", "id": "T9TK3CUKW"}, + "enterprise": {"name": "slack-sports", "id": "E12345678"}, + "authed_user": { + "id": "U1234", + "scope": "chat:write", + "access_token": "xoxp-1234", + "token_type": "user", + }, + }, + ) @staticmethod def apply_post_requests_mock(): """Apply get requests mock""" - routes = {'https://slack.com/api/oauth.v2.access': SlackRequestsMock.access()} - return requests_mock(routes, method='post') + routes = {"https://slack.com/api/oauth.v2.access": SlackRequestsMock.access()} + return requests_mock(routes, method="post") diff --git a/breathecode/authenticate/tests/mocks/requests_mock.py b/breathecode/authenticate/tests/mocks/requests_mock.py index e2837e2c1..e1cc696c5 100644 --- a/breathecode/authenticate/tests/mocks/requests_mock.py +++ b/breathecode/authenticate/tests/mocks/requests_mock.py @@ -1,16 +1,18 @@ from unittest.mock import Mock -def requests_mock(routes: dict, method='get'): +def requests_mock(routes: dict, method="get"): """Requests mock""" - if method == 'get': + if method == "get": def side_effect(url, headers=None, timeout=30): - return routes.get(url, f'unhandled request {url}') - elif method == 'post': + return routes.get(url, f"unhandled request {url}") + + elif method == "post": def side_effect(url, data=None, json=None, headers=None, timeout=30): - return routes.get(url, f'unhandled request {url}') + return routes.get(url, f"unhandled request {url}") + else: - raise Exception(f'{method} are not implemented too') + raise Exception(f"{method} are not implemented too") return Mock(side_effect=side_effect) diff --git a/breathecode/authenticate/tests/mocks/slack_requests_mock.py b/breathecode/authenticate/tests/mocks/slack_requests_mock.py index 8505d5cf1..2c0ada5c0 100644 --- a/breathecode/authenticate/tests/mocks/slack_requests_mock.py +++ b/breathecode/authenticate/tests/mocks/slack_requests_mock.py @@ -1,43 +1,41 @@ """ Collections of mocks used to login in authorize microservice """ + from .fake_response import FakeResponse from .requests_mock import requests_mock -class SlackRequestsMock(): +class SlackRequestsMock: """Github requests mock""" - token = 'e72e16c7e42f292c6912e7710c838347ae178b4a' + + token = "e72e16c7e42f292c6912e7710c838347ae178b4a" @staticmethod def access(): """Static https://slack.com/api/oauth.v2.access""" - return FakeResponse(status_code=200, - data={ - 'ok': True, - 'access_token': 'xoxb-17653672481-19874698323-pdFZKVeTuE8sk7oOcBrzbqgy', - 'token_type': 'bot', - 'scope': 'commands,incoming-webhook', - 'bot_user_id': 'U0KRQLJ9H', - 'app_id': 'A0KRD7HC3', - 'team': { - 'name': 'Slack Softball Team', - 'id': 'T9TK3CUKW' - }, - 'enterprise': { - 'name': 'slack-sports', - 'id': 'E12345678' - }, - 'authed_user': { - 'id': 'U1234', - 'scope': 'chat:write', - 'access_token': 'xoxp-1234', - 'token_type': 'user' - } - }) + return FakeResponse( + status_code=200, + data={ + "ok": True, + "access_token": "xoxb-17653672481-19874698323-pdFZKVeTuE8sk7oOcBrzbqgy", + "token_type": "bot", + "scope": "commands,incoming-webhook", + "bot_user_id": "U0KRQLJ9H", + "app_id": "A0KRD7HC3", + "team": {"name": "Slack Softball Team", "id": "T9TK3CUKW"}, + "enterprise": {"name": "slack-sports", "id": "E12345678"}, + "authed_user": { + "id": "U1234", + "scope": "chat:write", + "access_token": "xoxp-1234", + "token_type": "user", + }, + }, + ) @staticmethod def apply_post_requests_mock(): """Apply get requests mock""" - routes = {'https://slack.com/api/oauth.v2.access': SlackRequestsMock.access()} - return requests_mock(routes, method='post') + routes = {"https://slack.com/api/oauth.v2.access": SlackRequestsMock.access()} + return requests_mock(routes, method="post") diff --git a/breathecode/authenticate/tests/models/tests_token.py b/breathecode/authenticate/tests/models/tests_token.py index f125d614b..2711f8b7c 100644 --- a/breathecode/authenticate/tests/models/tests_token.py +++ b/breathecode/authenticate/tests/models/tests_token.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + from datetime import timedelta from time import sleep from django.utils import timezone @@ -25,7 +26,7 @@ def test_get_or_create__bad_arguments(self): def test_get_or_create__bad_user(self): with self.assertRaises(InvalidTokenType) as _: - Token.get_or_create(None, 'they-killed-kenny') + Token.get_or_create(None, "they-killed-kenny") self.assertEqual(self.all_token_dict(), []) @@ -45,13 +46,13 @@ def test_get_or_create__token_type_login(self): start = timezone.now() model = self.generate_models(user=True) - Token.get_or_create(model.user, token_type='login') + Token.get_or_create(model.user, token_type="login") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - expires_at = db[0]['expires_at'] - token = db[0]['key'] + created = db[0]["created"] + expires_at = db[0]["expires_at"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) @@ -61,23 +62,23 @@ def test_get_or_create__token_type_login(self): self.assertGreater(expires_at, end + timedelta(days=1) - timedelta(seconds=10)) self.assertToken(token) - del db[0]['created'] - del db[0]['expires_at'] - del db[0]['key'] + del db[0]["created"] + del db[0]["expires_at"] + del db[0]["key"] - self.assertEqual(db, [{'id': 1, 'token_type': 'login', 'user_id': 1}]) + self.assertEqual(db, [{"id": 1, "token_type": "login", "user_id": 1}]) def test_get_or_create__token_type_login__passing_hours_length(self): start = timezone.now() model = self.generate_models(user=True) - Token.get_or_create(model.user, token_type='login', hours_length=2) + Token.get_or_create(model.user, token_type="login", hours_length=2) end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - expires_at = db[0]['expires_at'] - token = db[0]['key'] + created = db[0]["created"] + expires_at = db[0]["expires_at"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) @@ -87,37 +88,42 @@ def test_get_or_create__token_type_login__passing_hours_length(self): self.assertGreater(expires_at, end + timedelta(hours=2) - timedelta(seconds=10)) self.assertToken(token) - del db[0]['created'] - del db[0]['expires_at'] - del db[0]['key'] + del db[0]["created"] + del db[0]["expires_at"] + del db[0]["key"] - self.assertEqual(db, [{'id': 1, 'token_type': 'login', 'user_id': 1}]) + self.assertEqual(db, [{"id": 1, "token_type": "login", "user_id": 1}]) def test_get_or_create__token_type_login__passing_expires_at(self): expires_at = timezone.now() + timedelta(days=7) start = timezone.now() model = self.generate_models(user=True) - Token.get_or_create(model.user, token_type='login', expires_at=expires_at) + Token.get_or_create(model.user, token_type="login", expires_at=expires_at) end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - token = db[0]['key'] + created = db[0]["created"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) self.assertToken(token) - del db[0]['created'] - del db[0]['key'] + del db[0]["created"] + del db[0]["key"] - self.assertEqual(db, [{ - 'id': 1, - 'token_type': 'login', - 'user_id': 1, - 'expires_at': expires_at, - }]) + self.assertEqual( + db, + [ + { + "id": 1, + "token_type": "login", + "user_id": 1, + "expires_at": expires_at, + } + ], + ) """ 🔽🔽🔽 get_or_create token_type one_time @@ -127,33 +133,38 @@ def test_get_or_create__token_type_one_time(self): start = timezone.now() model = self.generate_models(user=True) - Token.get_or_create(model.user, token_type='one_time') + Token.get_or_create(model.user, token_type="one_time") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - token = db[0]['key'] + created = db[0]["created"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) self.assertToken(token) - del db[0]['created'] - del db[0]['key'] + del db[0]["created"] + del db[0]["key"] - self.assertEqual(db, [{ - 'id': 1, - 'token_type': 'one_time', - 'user_id': 1, - 'expires_at': None, - }]) + self.assertEqual( + db, + [ + { + "id": 1, + "token_type": "one_time", + "user_id": 1, + "expires_at": None, + } + ], + ) def test_get_or_create__token_type_one_time__passing_hours_length(self): model = self.generate_models(user=True) with self.assertRaises(BadArguments) as _: - Token.get_or_create(model.user, token_type='one_time', hours_length=2) + Token.get_or_create(model.user, token_type="one_time", hours_length=2) self.assertEqual(self.all_token_dict(), []) @@ -162,7 +173,7 @@ def test_get_or_create__token_type_one_time__passing_expires_at(self): model = self.generate_models(user=True) with self.assertRaises(BadArguments) as _: - Token.get_or_create(model.user, token_type='one_time', expires_at=expires_at) + Token.get_or_create(model.user, token_type="one_time", expires_at=expires_at) self.assertEqual(self.all_token_dict(), []) @@ -174,33 +185,38 @@ def test_get_or_create__token_type_permanent(self): start = timezone.now() model = self.generate_models(user=True) - Token.get_or_create(model.user, token_type='permanent') + Token.get_or_create(model.user, token_type="permanent") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - token = db[0]['key'] + created = db[0]["created"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) self.assertToken(token) - del db[0]['created'] - del db[0]['key'] + del db[0]["created"] + del db[0]["key"] - self.assertEqual(db, [{ - 'id': 1, - 'token_type': 'permanent', - 'user_id': 1, - 'expires_at': None, - }]) + self.assertEqual( + db, + [ + { + "id": 1, + "token_type": "permanent", + "user_id": 1, + "expires_at": None, + } + ], + ) def test_get_or_create__token_type_permanent__passing_hours_length(self): model = self.generate_models(user=True) with self.assertRaises(BadArguments) as _: - Token.get_or_create(model.user, token_type='permanent', hours_length=2) + Token.get_or_create(model.user, token_type="permanent", hours_length=2) self.assertEqual(self.all_token_dict(), []) @@ -209,7 +225,7 @@ def test_get_or_create__token_type_permanent__passing_expires_at(self): model = self.generate_models(user=True) with self.assertRaises(BadArguments) as _: - Token.get_or_create(model.user, token_type='permanent', expires_at=expires_at) + Token.get_or_create(model.user, token_type="permanent", expires_at=expires_at) self.assertEqual(self.all_token_dict(), []) @@ -221,13 +237,13 @@ def test_get_or_create__token_type_temporal(self): start = timezone.now() model = self.generate_models(user=True) - Token.get_or_create(model.user, token_type='temporal') + Token.get_or_create(model.user, token_type="temporal") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - expires_at = db[0]['expires_at'] - token = db[0]['key'] + created = db[0]["created"] + expires_at = db[0]["expires_at"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) @@ -237,23 +253,23 @@ def test_get_or_create__token_type_temporal(self): self.assertGreater(expires_at, end + timedelta(minutes=10) - timedelta(seconds=10)) self.assertToken(token) - del db[0]['created'] - del db[0]['expires_at'] - del db[0]['key'] + del db[0]["created"] + del db[0]["expires_at"] + del db[0]["key"] - self.assertEqual(db, [{'id': 1, 'token_type': 'temporal', 'user_id': 1}]) + self.assertEqual(db, [{"id": 1, "token_type": "temporal", "user_id": 1}]) def test_get_or_create__token_type_temporal__passing_hours_length(self): start = timezone.now() model = self.generate_models(user=True) - Token.get_or_create(model.user, token_type='temporal', hours_length=2) + Token.get_or_create(model.user, token_type="temporal", hours_length=2) end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - expires_at = db[0]['expires_at'] - token = db[0]['key'] + created = db[0]["created"] + expires_at = db[0]["expires_at"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) @@ -263,37 +279,42 @@ def test_get_or_create__token_type_temporal__passing_hours_length(self): self.assertGreater(expires_at, end + timedelta(hours=2) - timedelta(seconds=10)) self.assertToken(token) - del db[0]['created'] - del db[0]['expires_at'] - del db[0]['key'] + del db[0]["created"] + del db[0]["expires_at"] + del db[0]["key"] - self.assertEqual(db, [{'id': 1, 'token_type': 'temporal', 'user_id': 1}]) + self.assertEqual(db, [{"id": 1, "token_type": "temporal", "user_id": 1}]) def test_get_or_create__token_type_temporal__passing_expires_at(self): expires_at = timezone.now() + timedelta(days=7) start = timezone.now() model = self.generate_models(user=True) - Token.get_or_create(model.user, token_type='temporal', expires_at=expires_at) + Token.get_or_create(model.user, token_type="temporal", expires_at=expires_at) end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - token = db[0]['key'] + created = db[0]["created"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) self.assertToken(token) - del db[0]['created'] - del db[0]['key'] + del db[0]["created"] + del db[0]["key"] - self.assertEqual(db, [{ - 'id': 1, - 'token_type': 'temporal', - 'user_id': 1, - 'expires_at': expires_at, - }]) + self.assertEqual( + db, + [ + { + "id": 1, + "token_type": "temporal", + "user_id": 1, + "expires_at": expires_at, + } + ], + ) """ 🔽🔽🔽 get_or_create hours_length and expires_at together @@ -304,7 +325,7 @@ def test_get_or_create__token_type_login__hours_length_and_expires_at_together(s expires_at = timezone.now() with self.assertRaises(BadArguments) as _: - Token.get_or_create(model.user, token_type='login', hours_length=2, expires_at=expires_at) + Token.get_or_create(model.user, token_type="login", hours_length=2, expires_at=expires_at) self.assertEqual(self.all_token_dict(), []) @@ -313,7 +334,7 @@ def test_get_or_create__token_type_one_time__hours_length_and_expires_at_togethe expires_at = timezone.now() with self.assertRaises(BadArguments) as _: - Token.get_or_create(model.user, token_type='one_time', hours_length=2, expires_at=expires_at) + Token.get_or_create(model.user, token_type="one_time", hours_length=2, expires_at=expires_at) self.assertEqual(self.all_token_dict(), []) @@ -322,7 +343,7 @@ def test_get_or_create__token_type_permanent__hours_length_and_expires_at_togeth expires_at = timezone.now() with self.assertRaises(BadArguments) as _: - Token.get_or_create(model.user, token_type='permanent', hours_length=2, expires_at=expires_at) + Token.get_or_create(model.user, token_type="permanent", hours_length=2, expires_at=expires_at) self.assertEqual(self.all_token_dict(), []) @@ -331,7 +352,7 @@ def test_get_or_create__token_type_temporal__hours_length_and_expires_at_togethe expires_at = timezone.now() with self.assertRaises(BadArguments) as _: - Token.get_or_create(model.user, token_type='temporal', hours_length=2, expires_at=expires_at) + Token.get_or_create(model.user, token_type="temporal", hours_length=2, expires_at=expires_at) self.assertEqual(self.all_token_dict(), []) @@ -342,16 +363,16 @@ def test_get_or_create__token_type_temporal__hours_length_and_expires_at_togethe def test_get_or_create__token_type_login__token_exists(self): start = timezone.now() expires_at = timezone.now() - timedelta(days=1, seconds=1) - token_kwargs = {'expires_at': expires_at, 'token_type': 'login'} + token_kwargs = {"expires_at": expires_at, "token_type": "login"} model = self.generate_models(user=True, token=True, token_kwargs=token_kwargs) - Token.get_or_create(model.user, token_type='login') + Token.get_or_create(model.user, token_type="login") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - expires_at = db[0]['expires_at'] - token = db[0]['key'] + created = db[0]["created"] + expires_at = db[0]["expires_at"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) @@ -361,25 +382,25 @@ def test_get_or_create__token_type_login__token_exists(self): self.assertGreater(expires_at, end + timedelta(days=1) - timedelta(seconds=10)) self.assertToken(token) - del db[0]['created'] - del db[0]['expires_at'] - del db[0]['key'] + del db[0]["created"] + del db[0]["expires_at"] + del db[0]["key"] - self.assertEqual(db, [{'id': 2, 'token_type': 'login', 'user_id': 1}]) + self.assertEqual(db, [{"id": 2, "token_type": "login", "user_id": 1}]) def test_get_or_create__token_type_temporal__token_exists(self): start = timezone.now() expires_at = timezone.now() - timedelta(days=1, seconds=1) - token_kwargs = {'expires_at': expires_at, 'token_type': 'temporal'} + token_kwargs = {"expires_at": expires_at, "token_type": "temporal"} model = self.generate_models(user=True, token=True, token_kwargs=token_kwargs) - Token.get_or_create(model.user, token_type='temporal') + Token.get_or_create(model.user, token_type="temporal") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - expires_at = db[0]['expires_at'] - token = db[0]['key'] + created = db[0]["created"] + expires_at = db[0]["expires_at"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) @@ -389,64 +410,60 @@ def test_get_or_create__token_type_temporal__token_exists(self): self.assertGreater(expires_at, end + timedelta(minutes=10) - timedelta(seconds=10)) self.assertToken(token) - del db[0]['created'] - del db[0]['expires_at'] - del db[0]['key'] + del db[0]["created"] + del db[0]["expires_at"] + del db[0]["key"] - self.assertEqual(db, [{'id': 2, 'token_type': 'temporal', 'user_id': 1}]) + self.assertEqual(db, [{"id": 2, "token_type": "temporal", "user_id": 1}]) def test_get_or_create__token_type_one_time__token_exists(self): start = timezone.now() expires_at = None - token_kwargs = {'expires_at': expires_at, 'token_type': 'one_time'} + token_kwargs = {"expires_at": expires_at, "token_type": "one_time"} model = self.generate_models(user=True, token=True, token_kwargs=token_kwargs) - Token.get_or_create(model.user, token_type='one_time') + Token.get_or_create(model.user, token_type="one_time") end = timezone.now() db = self.all_token_dict() - created = db[1]['created'] - token = db[1]['key'] + created = db[1]["created"] + token = db[1]["key"] self.assertGreater(created, start) self.assertLess(created, end) self.assertToken(token) - del db[1]['created'] - del db[1]['key'] + del db[1]["created"] + del db[1]["key"] self.assertEqual( db, - [self.model_to_dict(model, 'token'), { - 'id': 2, - 'token_type': 'one_time', - 'user_id': 1, - 'expires_at': None - }]) + [self.model_to_dict(model, "token"), {"id": 2, "token_type": "one_time", "user_id": 1, "expires_at": None}], + ) def test_get_or_create__token_type_permanent__token_exists(self): start = timezone.now() expires_at = None - token_kwargs = {'expires_at': expires_at, 'token_type': 'permanent'} + token_kwargs = {"expires_at": expires_at, "token_type": "permanent"} model = self.generate_models(user=True, token=True, token_kwargs=token_kwargs) - Token.get_or_create(model.user, token_type='permanent') + Token.get_or_create(model.user, token_type="permanent") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - token = db[0]['key'] + created = db[0]["created"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) self.assertToken(token) - del db[0]['created'] - del db[0]['key'] + del db[0]["created"] + del db[0]["key"] - self.assertEqual(db, [{'id': 1, 'token_type': 'permanent', 'user_id': 1, 'expires_at': None}]) + self.assertEqual(db, [{"id": 1, "token_type": "permanent", "user_id": 1, "expires_at": None}]) """ 🔽🔽🔽 get_or_create two Token exists and this are expired @@ -455,17 +472,17 @@ def test_get_or_create__token_type_permanent__token_exists(self): def test_get_or_create__token_type_login__token_exists__token_expired(self): start = timezone.now() expires_at = timezone.now() - timedelta(days=1, seconds=1) - token_kwargs = {'expires_at': expires_at, 'token_type': 'login'} + token_kwargs = {"expires_at": expires_at, "token_type": "login"} base = self.generate_models(user=True) models = [self.generate_models(token=True, token_kwargs=token_kwargs, models=base) for _ in range(0, 2)] - Token.get_or_create(base.user, token_type='login') + Token.get_or_create(base.user, token_type="login") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - expires_at = db[0]['expires_at'] - token = db[0]['key'] + created = db[0]["created"] + expires_at = db[0]["expires_at"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) @@ -475,26 +492,26 @@ def test_get_or_create__token_type_login__token_exists__token_expired(self): self.assertGreater(expires_at, end + timedelta(days=1) - timedelta(seconds=10)) self.assertToken(token) - del db[0]['created'] - del db[0]['expires_at'] - del db[0]['key'] + del db[0]["created"] + del db[0]["expires_at"] + del db[0]["key"] - self.assertEqual(db, [{'id': 3, 'token_type': 'login', 'user_id': 1}]) + self.assertEqual(db, [{"id": 3, "token_type": "login", "user_id": 1}]) def test_get_or_create__token_type_temporal__token_exists__token_expired(self): start = timezone.now() expires_at = timezone.now() - timedelta(days=1, seconds=1) - token_kwargs = {'expires_at': expires_at, 'token_type': 'temporal'} + token_kwargs = {"expires_at": expires_at, "token_type": "temporal"} base = self.generate_models(user=True) models = [self.generate_models(token=True, token_kwargs=token_kwargs, models=base) for _ in range(0, 2)] - Token.get_or_create(base.user, token_type='temporal') + Token.get_or_create(base.user, token_type="temporal") end = timezone.now() db = self.all_token_dict() - created = db[0]['created'] - expires_at = db[0]['expires_at'] - token = db[0]['key'] + created = db[0]["created"] + expires_at = db[0]["expires_at"] + token = db[0]["key"] self.assertGreater(created, start) self.assertLess(created, end) @@ -504,11 +521,11 @@ def test_get_or_create__token_type_temporal__token_exists__token_expired(self): self.assertGreater(expires_at, end + timedelta(minutes=10) - timedelta(seconds=10)) self.assertToken(token) - del db[0]['created'] - del db[0]['expires_at'] - del db[0]['key'] + del db[0]["created"] + del db[0]["expires_at"] + del db[0]["key"] - self.assertEqual(db, [{'id': 3, 'token_type': 'temporal', 'user_id': 1}]) + self.assertEqual(db, [{"id": 3, "token_type": "temporal", "user_id": 1}]) """ 🔽🔽🔽 validate_and_destroy bad arguments @@ -522,7 +539,7 @@ def test_validate_and_destroy__bad_arguments(self): def test_validate_and_destroy__bad_user(self): with self.assertRaises(TokenNotFound) as _: - Token.validate_and_destroy('they-killed-kenny') + Token.validate_and_destroy("they-killed-kenny") self.assertEqual(self.all_token_dict(), []) @@ -539,38 +556,38 @@ def test_validate_and_destroy__bad_hash(self): """ def test_validate_and_destroy__type_login(self): - token_kwargs = {'token_type': 'login'} + token_kwargs = {"token_type": "login"} model = self.generate_models(user=True, token=True, token_kwargs=token_kwargs) with self.assertRaises(TokenNotFound) as _: Token.validate_and_destroy(model.token.key) - self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, 'token')]) + self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, "token")]) def test_validate_and_destroy__type_temporal(self): - token_kwargs = {'token_type': 'temporal'} + token_kwargs = {"token_type": "temporal"} model = self.generate_models(user=True, token=True, token_kwargs=token_kwargs) with self.assertRaises(TokenNotFound) as _: Token.validate_and_destroy(model.token.key) - self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, 'token')]) + self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, "token")]) def test_validate_and_destroy__type_permanent(self): - token_kwargs = {'token_type': 'permanent'} + token_kwargs = {"token_type": "permanent"} model = self.generate_models(user=True, token=True, token_kwargs=token_kwargs) with self.assertRaises(TokenNotFound) as _: Token.validate_and_destroy(model.token.key) - self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, 'token')]) + self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, "token")]) """ 🔽🔽🔽 validate_and_destroy token_type is one_time """ def test_validate_and_destroy__type_one_time(self): - token_kwargs = {'token_type': 'one_time'} + token_kwargs = {"token_type": "one_time"} model = self.generate_models(user=True, token=True, token_kwargs=token_kwargs) result = Token.validate_and_destroy(model.token.key) @@ -582,7 +599,7 @@ def test_validate_and_destroy__type_one_time(self): """ def test_validate_and_destroy__token_not_exists(self): - result = Token.get_valid('they-killed-kenny') + result = Token.get_valid("they-killed-kenny") self.assertEqual(result, None) self.assertEqual(self.all_token_dict(), []) @@ -593,10 +610,10 @@ def test_validate_and_destroy__token_not_exists(self): def test_validate_and_destroy__token_not_found(self): model = self.generate_models(token=True) - result = Token.get_valid('they-killed-kenny') + result = Token.get_valid("they-killed-kenny") self.assertEqual(result, None) - self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, 'token')]) + self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, "token")]) """ 🔽🔽🔽 get_valid Token exists @@ -607,7 +624,7 @@ def test_validate_and_destroy__token_exists(self): result = Token.get_valid(model.token.key) self.assertEqual(result, model.token) - self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, 'token')]) + self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, "token")]) """ 🔽🔽🔽 delete_expired_tokens Token not exists @@ -631,7 +648,7 @@ def test_validate_and_destroy__token_not_exists__with_arg(self): def test_validate_and_destroy__token_exists_but_are_expired(self): now = timezone.now() - token_kwargs = {'expires_at': now - timedelta(seconds=1)} + token_kwargs = {"expires_at": now - timedelta(seconds=1)} self.generate_models(token=True, token_kwargs=token_kwargs) result = Token.delete_expired_tokens() @@ -640,7 +657,7 @@ def test_validate_and_destroy__token_exists_but_are_expired(self): def test_validate_and_destroy__token_exists_but_are_expired__with_arg(self): now = timezone.now() - token_kwargs = {'expires_at': now - timedelta(seconds=1)} + token_kwargs = {"expires_at": now - timedelta(seconds=1)} self.generate_models(token=True, token_kwargs=token_kwargs) result = Token.delete_expired_tokens() @@ -653,18 +670,18 @@ def test_validate_and_destroy__token_exists_but_are_expired__with_arg(self): def test_validate_and_destroy__token_exists_and_is_valid(self): now = timezone.now() - token_kwargs = {'expires_at': now + timedelta(minutes=1)} + token_kwargs = {"expires_at": now + timedelta(minutes=1)} model = self.generate_models(token=True, token_kwargs=token_kwargs) result = Token.delete_expired_tokens() self.assertEqual(result, None) - self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, 'token')]) + self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, "token")]) def test_validate_and_destroy__token_exists_and_is_valid__with_arg(self): now = timezone.now() - token_kwargs = {'expires_at': now + timedelta(minutes=1)} + token_kwargs = {"expires_at": now + timedelta(minutes=1)} model = self.generate_models(token=True, token_kwargs=token_kwargs) result = Token.delete_expired_tokens() self.assertEqual(result, None) - self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, 'token')]) + self.assertEqual(self.all_token_dict(), [self.model_to_dict(model, "token")]) diff --git a/breathecode/authenticate/tests/permissions/contexts/tests_user.py b/breathecode/authenticate/tests/permissions/contexts/tests_user.py index 44d945eeb..c16479ef1 100644 --- a/breathecode/authenticate/tests/permissions/contexts/tests_user.py +++ b/breathecode/authenticate/tests/permissions/contexts/tests_user.py @@ -8,11 +8,11 @@ def serializer(user): return { - 'id': user.id, - 'email': user.email, - 'username': user.username, - 'date_joined': user.date_joined, - 'groups': [x.name for x in user.groups.all()], + "id": user.id, + "email": user.email, + "username": user.username, + "date_joined": user.date_joined, + "groups": [x.name for x in user.groups.all()], } @@ -21,24 +21,30 @@ def serializer(user): class AcademyEventTestSuite(AuthTestCase): - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.context', MagicMock(return_value=value)) + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.context", MagicMock(return_value=value)) def test_make_right_calls(self): model = self.bc.database.create(user=1) ld = LaunchDarkly() result = user(ld, model.user) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) contexts = serializer(model.user) print(LaunchDarkly.context.call_args_list) - self.assertEqual(LaunchDarkly.context.call_args_list, [ - call('1', f'{model.user.first_name} {model.user.last_name} ({model.user.email})', 'user', contexts), - ]) + self.assertEqual( + LaunchDarkly.context.call_args_list, + [ + call("1", f"{model.user.first_name} {model.user.last_name} ({model.user.email})", "user", contexts), + ], + ) self.assertEqual(result, value) diff --git a/breathecode/authenticate/tests/receivers/tests_set_user_group.py b/breathecode/authenticate/tests/receivers/tests_set_user_group.py index 2e69aad76..c1dba6da7 100644 --- a/breathecode/authenticate/tests/receivers/tests_set_user_group.py +++ b/breathecode/authenticate/tests/receivers/tests_set_user_group.py @@ -11,7 +11,7 @@ def test_adding_a_profile_academy(self, enable_signals): model = self.bc.database.create(profile_academy=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), []) + self.assertEqual(self.bc.database.list_of("auth.Group"), []) self.assertEqual(self.bc.format.table(model.profile_academy.user.groups.all()), []) """ @@ -25,7 +25,7 @@ def test_adding_a_profile_academy__the_group_name_does_not_match(self, enable_si model1 = self.bc.database.create(group=1) # keep before user model2 = self.bc.database.create(profile_academy=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model1.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model1.group)]) self.assertEqual(self.bc.format.table(model2.profile_academy.user.groups.all()), []) """ @@ -36,11 +36,11 @@ def test_adding_a_profile_academy__the_role_slug_does_not_match(self, enable_sig enable_signals() # keep separated - group = {'name': 'Student'} + group = {"name": "Student"} model1 = self.bc.database.create(group=group) # keep before user model2 = self.bc.database.create(profile_academy=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model1.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model1.group)]) self.assertEqual(self.bc.format.table(model2.profile_academy.user.groups.all()), []) """ @@ -51,11 +51,11 @@ def test_adding_a_profile_academy__the_group_name_and_role_slug_match__status_in enable_signals() # keep separated - group = {'name': 'Student'} + group = {"name": "Student"} model1 = self.bc.database.create(group=group) # keep before user - model2 = self.bc.database.create(profile_academy=1, role='student') + model2 = self.bc.database.create(profile_academy=1, role="student") - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model1.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model1.group)]) self.assertEqual(self.bc.format.table(model2.profile_academy.user.groups.all()), []) """ @@ -66,15 +66,18 @@ def test_adding_a_profile_academy__the_group_name_and_role_slug_match__status_ac enable_signals() # keep separated - group = {'name': 'Student'} - profile_academy = {'status': 'ACTIVE'} + group = {"name": "Student"} + profile_academy = {"status": "ACTIVE"} model1 = self.bc.database.create(group=group) # keep before user - model2 = self.bc.database.create(profile_academy=profile_academy, role='student') + model2 = self.bc.database.create(profile_academy=profile_academy, role="student") - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model1.group)]) - self.assertEqual(self.bc.format.table(model2.profile_academy.user.groups.all()), [ - self.bc.format.to_dict(model1.group), - ]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model1.group)]) + self.assertEqual( + self.bc.format.table(model2.profile_academy.user.groups.all()), + [ + self.bc.format.to_dict(model1.group), + ], + ) class TestModelUser(LegacyAPITestCase): @@ -87,7 +90,7 @@ def test_adding_a_user(self, enable_signals): model = self.bc.database.create(user=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), []) + self.assertEqual(self.bc.database.list_of("auth.Group"), []) self.assertEqual(self.bc.format.table(model.user.groups.all()), []) """ @@ -101,7 +104,7 @@ def test_adding_a_user__the_group_name_does_not_match(self, enable_signals): model1 = self.bc.database.create(group=1) # keep before user model2 = self.bc.database.create(user=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model1.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model1.group)]) self.assertEqual(self.bc.format.table(model2.user.groups.all()), []) """ @@ -111,12 +114,12 @@ def test_adding_a_user__the_group_name_does_not_match(self, enable_signals): def test_adding_a_user__the_group_name_match(self, enable_signals): enable_signals() - group = {'name': 'Default'} + group = {"name": "Default"} model1 = self.bc.database.create(group=group) # keep before user model2 = self.bc.database.create(user=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model1.group)]) - self.assertEqual(self.bc.format.table(model2.user.groups.all()), [{'id': 1, 'name': 'Default'}]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model1.group)]) + self.assertEqual(self.bc.format.table(model2.user.groups.all()), [{"id": 1, "name": "Default"}]) class TestModelMentorProfile(LegacyAPITestCase): @@ -129,7 +132,7 @@ def test_adding_a_mentor_profile(self, enable_signals): model = self.bc.database.create(mentor_profile=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), []) + self.assertEqual(self.bc.database.list_of("auth.Group"), []) self.assertEqual(self.bc.format.table(model.mentor_profile.user.groups.all()), []) """ @@ -143,7 +146,7 @@ def test_adding_a_mentor_profile__the_group_name_does_not_match(self, enable_sig model1 = self.bc.database.create(group=1) # keep before user model2 = self.bc.database.create(mentor_profile=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model1.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model1.group)]) self.assertEqual(self.bc.format.table(model2.mentor_profile.user.groups.all()), []) """ @@ -154,11 +157,14 @@ def test_adding_a_mentor_profile__the_group_name_match(self, enable_signals): enable_signals() # keep separated - group = {'name': 'Mentor'} + group = {"name": "Mentor"} model1 = self.bc.database.create(group=group) # keep before user model2 = self.bc.database.create(mentor_profile=1) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model1.group)]) - self.assertEqual(self.bc.format.table(model2.mentor_profile.user.groups.all()), [ - self.bc.format.to_dict(model1.group), - ]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model1.group)]) + self.assertEqual( + self.bc.format.table(model2.mentor_profile.user.groups.all()), + [ + self.bc.format.to_dict(model1.group), + ], + ) diff --git a/breathecode/authenticate/tests/receivers/tests_unset_user_group.py b/breathecode/authenticate/tests/receivers/tests_unset_user_group.py index 3ed8fbe79..bf5a9ff8d 100644 --- a/breathecode/authenticate/tests/receivers/tests_unset_user_group.py +++ b/breathecode/authenticate/tests/receivers/tests_unset_user_group.py @@ -12,85 +12,91 @@ class TestModelProfileAcademy(LegacyAPITestCase): 🔽🔽🔽 Adding a ProfileAcademy, with bad role """ - @patch('django.db.models.signals.post_save.send_robust', MagicMock()) + @patch("django.db.models.signals.post_save.send_robust", MagicMock()) def test_adding_a_profile_academy__with_bad_role(self, enable_signals): enable_signals() - group = {'name': 'Student'} + group = {"name": "Student"} model = self.bc.database.create(profile_academy=1, group=group) model.profile_academy.delete() - self.assertEqual(self.bc.database.list_of('auth.Group'), [{'id': 1, 'name': 'Student'}]) - self.assertEqual(self.bc.format.table(model.user.groups.all()), [{'id': 1, 'name': 'Student'}]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [{"id": 1, "name": "Student"}]) + self.assertEqual(self.bc.format.table(model.user.groups.all()), [{"id": 1, "name": "Student"}]) """ 🔽🔽🔽 Adding a ProfileAcademy, with right role, status INVITED """ - @patch('django.db.models.signals.post_save.send_robust', MagicMock()) + @patch("django.db.models.signals.post_save.send_robust", MagicMock()) def test_adding_a_profile_academy__with_right_role__status_invited(self, enable_signals): enable_signals() - cases = ['student', 'teacher'] + cases = ["student", "teacher"] for case in cases: group_name = capitalize(case) - group = {'name': group_name} + group = {"name": group_name} model = self.bc.database.create(profile_academy=1, group=group, role=case) model.profile_academy.delete() - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) - self.assertEqual(self.bc.format.table(model.user.groups.all()), [ - self.bc.format.to_dict(model.group), - ]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) + self.assertEqual( + self.bc.format.table(model.user.groups.all()), + [ + self.bc.format.to_dict(model.group), + ], + ) # teardown - self.bc.database.delete('auth.Group') + self.bc.database.delete("auth.Group") """ 🔽🔽🔽 Adding a ProfileAcademy, with right role, status ACTIVE """ - @patch('django.db.models.signals.post_save.send_robust', MagicMock()) + @patch("django.db.models.signals.post_save.send_robust", MagicMock()) def test_adding_a_profile_academy__with_right_role__status_active(self, enable_signals): enable_signals() - cases = ['student', 'teacher'] + cases = ["student", "teacher"] for case in cases: group_name = capitalize(case) - group = {'name': group_name} - profile_academy = {'status': 'ACTIVE'} + group = {"name": group_name} + profile_academy = {"status": "ACTIVE"} model = self.bc.database.create(profile_academy=profile_academy, group=group, role=case) model.profile_academy.delete() - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) self.assertEqual(self.bc.format.table(model.user.groups.all()), []) # teardown - self.bc.database.delete('auth.Group') + self.bc.database.delete("auth.Group") """ 🔽🔽🔽 Adding two ProfileAcademy, with right role, status ACTIVE """ - @patch('django.db.models.signals.post_save.send_robust', MagicMock()) + @patch("django.db.models.signals.post_save.send_robust", MagicMock()) def test_adding_two_profile_academy__with_right_role__status_active(self, enable_signals): enable_signals() - cases = ['student', 'teacher'] + cases = ["student", "teacher"] for case in cases: group_name = capitalize(case) - group = {'name': group_name} - profile_academy = {'status': 'ACTIVE'} + group = {"name": group_name} + profile_academy = {"status": "ACTIVE"} model = self.bc.database.create(profile_academy=(2, profile_academy), group=group, role=case) model.profile_academy[0].delete() - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) - self.assertEqual(self.bc.format.table(model.user.groups.all()), [ - self.bc.format.to_dict(model.group), - ]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) + self.assertEqual( + self.bc.format.table(model.user.groups.all()), + [ + self.bc.format.to_dict(model.group), + ], + ) # teardown - self.bc.database.delete('auth.Group') + self.bc.database.delete("auth.Group") class TestModelMentorProfile(LegacyAPITestCase): @@ -101,12 +107,12 @@ class TestModelMentorProfile(LegacyAPITestCase): def test_adding_a_mentor_profile(self, enable_signals): enable_signals() - group = {'name': 'Mentor'} + group = {"name": "Mentor"} model = self.bc.database.create(mentor_profile=1, group=group) model.mentor_profile.delete() - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) self.assertEqual(self.bc.format.table(model.user.groups.all()), []) """ @@ -116,12 +122,15 @@ def test_adding_a_mentor_profile(self, enable_signals): def test_adding_two_mentor_profile(self, enable_signals): enable_signals() - group = {'name': 'Mentor'} + group = {"name": "Mentor"} model = self.bc.database.create(mentor_profile=2, group=group) model.mentor_profile[0].delete() - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) - self.assertEqual(self.bc.format.table(model.user.groups.all()), [ - self.bc.format.to_dict(model.group), - ]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) + self.assertEqual( + self.bc.format.table(model.user.groups.all()), + [ + self.bc.format.to_dict(model.group), + ], + ) diff --git a/breathecode/authenticate/tests/signals/tests_cohort_user_deleted.py b/breathecode/authenticate/tests/signals/tests_cohort_user_deleted.py index 264466651..e78fef8eb 100644 --- a/breathecode/authenticate/tests/signals/tests_cohort_user_deleted.py +++ b/breathecode/authenticate/tests/signals/tests_cohort_user_deleted.py @@ -8,7 +8,7 @@ @pytest.fixture(autouse=True) def setup(db, enable_signals): - enable_signals('breathecode.authenticate.signals.cohort_user_deleted', 'django.db.models.signals.pre_delete') + enable_signals("breathecode.authenticate.signals.cohort_user_deleted", "django.db.models.signals.pre_delete") yield @@ -22,23 +22,23 @@ def test_same_call_two_times(bc: Breathecode, set_datetime): for x in model.cohort_user: x.delete() - assert bc.database.list_of('task_manager.ScheduledTask') == [ + assert bc.database.list_of("task_manager.ScheduledTask") == [ { - 'arguments': { - 'args': [ + "arguments": { + "args": [ 1, 1, ], - 'kwargs': { - 'force': True, + "kwargs": { + "force": True, }, }, - 'duration': delta, - 'eta': now + delta, - 'id': 1, - 'status': 'PENDING', - 'task_module': 'breathecode.authenticate.tasks', - 'task_name': 'async_remove_from_organization', + "duration": delta, + "eta": now + delta, + "id": 1, + "status": "PENDING", + "task_module": "breathecode.authenticate.tasks", + "task_name": "async_remove_from_organization", }, ] @@ -48,44 +48,44 @@ def test_different_calls(bc: Breathecode, set_datetime): now = timezone.now() set_datetime(now) - model = bc.database.create(cohort_user=[{'user_id': n + 1} for n in range(2)], user=2) + model = bc.database.create(cohort_user=[{"user_id": n + 1} for n in range(2)], user=2) for x in model.cohort_user: x.delete() - assert bc.database.list_of('task_manager.ScheduledTask') == [ + assert bc.database.list_of("task_manager.ScheduledTask") == [ { - 'arguments': { - 'args': [ + "arguments": { + "args": [ 1, 1, ], - 'kwargs': { - 'force': True, + "kwargs": { + "force": True, }, }, - 'duration': delta, - 'eta': now + delta, - 'id': 1, - 'status': 'PENDING', - 'task_module': 'breathecode.authenticate.tasks', - 'task_name': 'async_remove_from_organization', + "duration": delta, + "eta": now + delta, + "id": 1, + "status": "PENDING", + "task_module": "breathecode.authenticate.tasks", + "task_name": "async_remove_from_organization", }, { - 'arguments': { - 'args': [ + "arguments": { + "args": [ 1, 2, ], - 'kwargs': { - 'force': True, + "kwargs": { + "force": True, }, }, - 'duration': delta, - 'eta': now + delta, - 'id': 2, - 'status': 'PENDING', - 'task_module': 'breathecode.authenticate.tasks', - 'task_name': 'async_remove_from_organization', + "duration": delta, + "eta": now + delta, + "id": 2, + "status": "PENDING", + "task_module": "breathecode.authenticate.tasks", + "task_name": "async_remove_from_organization", }, ] diff --git a/breathecode/authenticate/tests/signals/tests_invite_status_updated.py b/breathecode/authenticate/tests/signals/tests_invite_status_updated.py index e43c8741c..e73b3e446 100644 --- a/breathecode/authenticate/tests/signals/tests_invite_status_updated.py +++ b/breathecode/authenticate/tests/signals/tests_invite_status_updated.py @@ -7,24 +7,24 @@ @pytest.fixture(autouse=True) def setup(db, enable_signals, monkeypatch): - monkeypatch.setattr('breathecode.authenticate.tasks.create_user_from_invite.apply_async', MagicMock()) - enable_signals('breathecode.authenticate.signals.invite_status_updated') + monkeypatch.setattr("breathecode.authenticate.tasks.create_user_from_invite.apply_async", MagicMock()) + enable_signals("breathecode.authenticate.signals.invite_status_updated") yield -@pytest.mark.parametrize('user_invite_with_user', [True, False]) -@pytest.mark.parametrize('user_amount', [0, 2]) -@pytest.mark.parametrize('status', ['PENDING', 'WAITING_LIST', 'REJECTED']) +@pytest.mark.parametrize("user_invite_with_user", [True, False]) +@pytest.mark.parametrize("user_amount", [0, 2]) +@pytest.mark.parametrize("status", ["PENDING", "WAITING_LIST", "REJECTED"]) def test_the_requirements_are_not_met(bc: Breathecode, fake, user_amount, status, user_invite_with_user): if user_invite_with_user and user_amount == 0: return email = fake.email() - user_invite = {'status': status, 'email': email, 'user_id': None} + user_invite = {"status": status, "email": email, "user_id": None} if user_invite_with_user: - user_invite['user_id'] = 1 + user_invite["user_id"] = 1 - user = {'email': email} + user = {"email": email} if user_amount: user = (user_amount, user) @@ -34,24 +34,24 @@ def test_the_requirements_are_not_met(bc: Breathecode, fake, user_amount, status model = bc.database.create(user_invite=user_invite, user=user) - assert bc.database.list_of('authenticate.UserInvite') == [bc.format.to_dict(model.user_invite)] + assert bc.database.list_of("authenticate.UserInvite") == [bc.format.to_dict(model.user_invite)] if user_amount == 0: - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("auth.User") == [] else: - assert bc.database.list_of('auth.User') == bc.format.to_dict(model.user) + assert bc.database.list_of("auth.User") == bc.format.to_dict(model.user) assert tasks.create_user_from_invite.apply_async.call_args_list == [] def test_the_requirements_are_met(bc: Breathecode, fake): - user_invites = [{'status': 'ACCEPTED', 'email': fake.email()} for _ in range(0, 2)] + user_invites = [{"status": "ACCEPTED", "email": fake.email()} for _ in range(0, 2)] model = bc.database.create(user_invite=user_invites) - assert bc.database.list_of('authenticate.UserInvite') == bc.format.to_dict(model.user_invite) - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("authenticate.UserInvite") == bc.format.to_dict(model.user_invite) + assert bc.database.list_of("auth.User") == [] assert tasks.create_user_from_invite.apply_async.call_args_list == [ call(args=[1], countdown=60), call(args=[2], countdown=60), diff --git a/breathecode/authenticate/tests/signals/tests_student_edu_status_updated.py b/breathecode/authenticate/tests/signals/tests_student_edu_status_updated.py index deebdc31d..56a073f6e 100644 --- a/breathecode/authenticate/tests/signals/tests_student_edu_status_updated.py +++ b/breathecode/authenticate/tests/signals/tests_student_edu_status_updated.py @@ -11,24 +11,24 @@ @pytest.fixture(autouse=True) def patch(db, enable_signals, monkeypatch): - enable_signals('breathecode.admissions.signals.student_edu_status_updated') - monkeypatch.setattr('breathecode.certificate.tasks.async_generate_certificate.delay', MagicMock()) - monkeypatch.setattr('breathecode.feedback.tasks.process_student_graduation.delay', MagicMock()) - monkeypatch.setattr('breathecode.marketing.tasks.add_cohort_task_to_student.delay', MagicMock()) - monkeypatch.setattr('breathecode.authenticate.tasks.async_add_to_organization.delay', MagicMock()) + enable_signals("breathecode.admissions.signals.student_edu_status_updated") + monkeypatch.setattr("breathecode.certificate.tasks.async_generate_certificate.delay", MagicMock()) + monkeypatch.setattr("breathecode.feedback.tasks.process_student_graduation.delay", MagicMock()) + monkeypatch.setattr("breathecode.marketing.tasks.add_cohort_task_to_student.delay", MagicMock()) + monkeypatch.setattr("breathecode.authenticate.tasks.async_add_to_organization.delay", MagicMock()) yield -ACTIVE = 'ACTIVE' -POSTPONED = 'POSTPONED' -SUSPENDED = 'SUSPENDED' -GRADUATED = 'GRADUATED' -DROPPED = 'DROPPED' +ACTIVE = "ACTIVE" +POSTPONED = "POSTPONED" +SUSPENDED = "SUSPENDED" +GRADUATED = "GRADUATED" +DROPPED = "DROPPED" def all_edutational_statuses_but_active(): - statuses = ['POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED'] + statuses = ["POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"] for old_status in statuses: for new_status in statuses: @@ -42,69 +42,70 @@ class TestAsyncRemoveFromOrganization: def test_the_requirements_are_not_met(self, bc: Breathecode): - status = 'ACTIVE' - bc.database.create(cohort_user={'educational_status': status}) + status = "ACTIVE" + bc.database.create(cohort_user={"educational_status": status}) - assert bc.database.list_of('task_manager.ScheduledTask') == [] + assert bc.database.list_of("task_manager.ScheduledTask") == [] assert async_add_to_organization.delay.call_args_list == [call(1, 1)] - @pytest.mark.parametrize('status', ['POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED']) + @pytest.mark.parametrize("status", ["POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"]) def test_the_requirements_are_met(self, bc: Breathecode, status, set_datetime): delta = timedelta(days=21) now = timezone.now() set_datetime(now) - bc.database.create(cohort_user={'educational_status': status}) + bc.database.create(cohort_user={"educational_status": status}) - assert bc.database.list_of('task_manager.ScheduledTask') == [ + assert bc.database.list_of("task_manager.ScheduledTask") == [ { - 'arguments': { - 'args': [ + "arguments": { + "args": [ 1, 1, ], - 'kwargs': {}, + "kwargs": {}, }, - 'duration': delta, - 'eta': now + delta, - 'id': 1, - 'status': 'PENDING', - 'task_module': 'breathecode.authenticate.tasks', - 'task_name': 'async_remove_from_organization', + "duration": delta, + "eta": now + delta, + "id": 1, + "status": "PENDING", + "task_module": "breathecode.authenticate.tasks", + "task_name": "async_remove_from_organization", }, ] assert async_add_to_organization.delay.call_args_list == [] - @pytest.mark.parametrize('old_status, new_status', [*all_edutational_statuses_but_active()]) - def test_the_requirements_are_met__it_is_not_triggered_twice(self, bc: Breathecode, set_datetime, old_status, - new_status): + @pytest.mark.parametrize("old_status, new_status", [*all_edutational_statuses_but_active()]) + def test_the_requirements_are_met__it_is_not_triggered_twice( + self, bc: Breathecode, set_datetime, old_status, new_status + ): delta = timedelta(days=21) now = timezone.now() set_datetime(now) - model = bc.database.create(cohort_user=(2, {'educational_status': old_status})) + model = bc.database.create(cohort_user=(2, {"educational_status": old_status})) for x in model.cohort_user: x.educational_status = new_status x.save() - assert bc.database.list_of('task_manager.ScheduledTask') == [ + assert bc.database.list_of("task_manager.ScheduledTask") == [ { - 'arguments': { - 'args': [ + "arguments": { + "args": [ 1, 1, ], - 'kwargs': {}, + "kwargs": {}, }, - 'duration': delta, - 'eta': now + delta, - 'id': 1, - 'status': 'PENDING', - 'task_module': 'breathecode.authenticate.tasks', - 'task_name': 'async_remove_from_organization', + "duration": delta, + "eta": now + delta, + "id": 1, + "status": "PENDING", + "task_module": "breathecode.authenticate.tasks", + "task_name": "async_remove_from_organization", }, ] assert async_add_to_organization.delay.call_args_list == [] @@ -112,20 +113,24 @@ def test_the_requirements_are_met__it_is_not_triggered_twice(self, bc: Breatheco class TestGenerateCertificate: - @pytest.mark.parametrize('available_as_saas, educational_status', [ - (False, 'GRADUATED'), - (True, 'ACTIVE'), - (True, 'POSTPONED'), - (True, 'SUSPENDED'), - (True, 'DROPPED'), - ]) + @pytest.mark.parametrize( + "available_as_saas, educational_status", + [ + (False, "GRADUATED"), + (True, "ACTIVE"), + (True, "POSTPONED"), + (True, "SUSPENDED"), + (True, "DROPPED"), + ], + ) def test_the_requirements_are_not_met(self, bc: Breathecode, available_as_saas, educational_status): - bc.database.create(cohort_user={'educational_status': educational_status}, - cohort={'available_as_saas': available_as_saas}) + bc.database.create( + cohort_user={"educational_status": educational_status}, cohort={"available_as_saas": available_as_saas} + ) assert async_generate_certificate.delay.call_args_list == [] def test_the_requirements_met(self, bc: Breathecode): - bc.database.create(cohort_user={'educational_status': 'GRADUATED'}, cohort={'available_as_saas': True}) + bc.database.create(cohort_user={"educational_status": "GRADUATED"}, cohort={"available_as_saas": True}) assert async_generate_certificate.delay.call_args_list == [call(1, 1)] diff --git a/breathecode/authenticate/tests/tasks/tests_async_accept_user_from_waiting_list.py b/breathecode/authenticate/tests/tasks/tests_async_accept_user_from_waiting_list.py index 40b0adeaa..85cc7aefb 100644 --- a/breathecode/authenticate/tests/tasks/tests_async_accept_user_from_waiting_list.py +++ b/breathecode/authenticate/tests/tasks/tests_async_accept_user_from_waiting_list.py @@ -15,9 +15,9 @@ @pytest.fixture(autouse=True) def setup(monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('breathecode.authenticate.tasks.create_user_from_invite.apply_async', MagicMock()) - monkeypatch.setattr('breathecode.authenticate.tasks.create_user_from_invite.delay', MagicMock()) - monkeypatch.setattr('breathecode.authenticate.tasks.async_validate_email_invite.delay', MagicMock()) + monkeypatch.setattr("breathecode.authenticate.tasks.create_user_from_invite.apply_async", MagicMock()) + monkeypatch.setattr("breathecode.authenticate.tasks.create_user_from_invite.delay", MagicMock()) + monkeypatch.setattr("breathecode.authenticate.tasks.async_validate_email_invite.delay", MagicMock()) yield @@ -27,23 +27,23 @@ class ModelProfileAcademyTestSuite(AuthTestCase): 🔽🔽🔽 With zero UserInvite """ - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_with_zero_user_invite(self): result = async_accept_user_from_waiting_list(1) self.assertEqual(result, None) - self.assertEqual(Logger.error.call_args_list, [call('UserInvite 1 not found')]) - self.assertEqual(self.bc.database.list_of('auth.User'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(Logger.error.call_args_list, [call("UserInvite 1 not found")]) + self.assertEqual(self.bc.database.list_of("auth.User"), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) self.assertEqual(notify_actions.send_email_message.call_args_list, []) """ 🔽🔽🔽 With one UserInvite """ - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_with_one_user_invite(self): model = self.bc.database.create(user_invite=1) @@ -51,15 +51,18 @@ def test_with_one_user_invite(self): self.assertEqual(result, None) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('auth.User'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite), - 'process_message': "Can't determine the user email", - 'process_status': 'ERROR', - 'status': 'ACCEPTED', - }, - ]) + self.assertEqual(self.bc.database.list_of("auth.User"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "process_message": "Can't determine the user email", + "process_status": "ERROR", + "status": "ACCEPTED", + }, + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) @@ -67,10 +70,10 @@ def test_with_one_user_invite(self): 🔽🔽🔽 With one UserInvite, with email """ - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_with_one_user_invite__with_email(self): - user_invite = {'email': self.bc.fake.email()} + user_invite = {"email": self.bc.fake.email()} model = self.bc.database.create(user_invite=user_invite) start = timezone.now() @@ -81,30 +84,39 @@ def test_with_one_user_invite__with_email(self): self.assertEqual(Logger.error.call_args_list, []) users = [ - x for x in self.bc.database.list_of('auth.User') - if self.bc.check.datetime_in_range(start, end, x['date_joined']) or x.pop('date_joined') + x + for x in self.bc.database.list_of("auth.User") + if self.bc.check.datetime_in_range(start, end, x["date_joined"]) or x.pop("date_joined") ] - self.assertEqual(users, [{ - 'email': model.user_invite.email, - 'first_name': '', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': '', - 'password': '', - 'username': model.user_invite.email, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite), - 'process_message': 'Registered as User with id 1', - 'process_status': 'DONE', - }, - ]) + self.assertEqual( + users, + [ + { + "email": model.user_invite.email, + "first_name": "", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "", + "password": "", + "username": model.user_invite.email, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "process_message": "Registered as User with id 1", + "process_status": "DONE", + }, + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) @@ -112,10 +124,10 @@ def test_with_one_user_invite__with_email(self): 🔽🔽🔽 With one UserInvite, with email """ - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_with_one_user_invite__with_email(self): - user_invite = {'email': self.bc.fake.email()} + user_invite = {"email": self.bc.fake.email()} model = self.bc.database.create(user_invite=user_invite) start = timezone.now() @@ -126,56 +138,71 @@ def test_with_one_user_invite__with_email(self): self.assertEqual(Logger.error.call_args_list, []) users = [ - x for x in self.bc.database.list_of('auth.User') - if self.bc.check.datetime_in_range(start, end, x['date_joined']) or x.pop('date_joined') + x + for x in self.bc.database.list_of("auth.User") + if self.bc.check.datetime_in_range(start, end, x["date_joined"]) or x.pop("date_joined") ] - self.assertEqual(users, [{ - 'email': model.user_invite.email, - 'first_name': '', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': '', - 'password': '', - 'username': model.user_invite.email, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite), - 'process_message': 'Registered as User with id 1', - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - }, - ]) - - token = self.bc.database.get('authenticate.Token', 1, dict=False) + self.assertEqual( + users, + [ + { + "email": model.user_invite.email, + "first_name": "", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "", + "password": "", + "username": model.user_invite.email, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "process_message": "Registered as User with id 1", + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + }, + ], + ) + + token = self.bc.database.get("authenticate.Token", 1, dict=False) self.assertEqual( str(notify_actions.send_email_message.call_args_list), - str([ - call('pick_password', - model.user_invite.email, { - 'SUBJECT': 'Set your password at 4Geeks', - 'LINK': f'http://localhost:8000/v1/auth/password/{model.user_invite.token}' - }, - academy=None) - ])) + str( + [ + call( + "pick_password", + model.user_invite.email, + { + "SUBJECT": "Set your password at 4Geeks", + "LINK": f"http://localhost:8000/v1/auth/password/{model.user_invite.token}", + }, + academy=None, + ) + ] + ), + ) """ 🔽🔽🔽 With one UserInvite, with email """ - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_with_one_user_invite__with_email__user_already_exists(self): email = self.bc.fake.email() - user = {'email': email} - user_invite = {'email': email} + user = {"email": email} + user_invite = {"email": email} model = self.bc.database.create(user=user, user_invite=user_invite) start = timezone.now() @@ -185,17 +212,23 @@ def test_with_one_user_invite__with_email__user_already_exists(self): self.assertEqual(result, None) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite), - 'process_message': 'User already exists with the id 1', - 'process_status': 'DONE', - 'status': 'ACCEPTED', - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "process_message": "User already exists with the id 1", + "process_status": "DONE", + "status": "ACCEPTED", + }, + ], + ) self.assertEqual(str(notify_actions.send_email_message.call_args_list), str([])) diff --git a/breathecode/authenticate/tests/tasks/tests_async_validate_email_invite.py b/breathecode/authenticate/tests/tasks/tests_async_validate_email_invite.py index 412b1b0a4..fe86a1ef0 100644 --- a/breathecode/authenticate/tests/tasks/tests_async_validate_email_invite.py +++ b/breathecode/authenticate/tests/tasks/tests_async_validate_email_invite.py @@ -10,46 +10,50 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('logging.Logger.error', MagicMock()) + monkeypatch.setattr("logging.Logger.error", MagicMock()) yield @pytest.fixture def validation_res(patch_request): validation_res = { - 'quality_score': (random.random() * 0.4) + 0.6, - 'email_quality': (random.random() * 0.4) + 0.6, - 'is_valid_format': { - 'value': True, + "quality_score": (random.random() * 0.4) + 0.6, + "email_quality": (random.random() * 0.4) + 0.6, + "is_valid_format": { + "value": True, }, - 'is_mx_found': { - 'value': True, + "is_mx_found": { + "value": True, }, - 'is_smtp_valid': { - 'value': True, + "is_smtp_valid": { + "value": True, }, - 'is_catchall_email': { - 'value': True, + "is_catchall_email": { + "value": True, }, - 'is_role_email': { - 'value': True, + "is_role_email": { + "value": True, }, - 'is_disposable_email': { - 'value': False, + "is_disposable_email": { + "value": False, }, - 'is_free_email': { - 'value': True, + "is_free_email": { + "value": True, }, } - patch_request([ - ( - call('get', - 'https://emailvalidation.abstractapi.com/v1/?api_key=None&email=pokemon@potato.io', - params=None, - timeout=10), - validation_res, - ), - ]) + patch_request( + [ + ( + call( + "get", + "https://emailvalidation.abstractapi.com/v1/?api_key=None&email=pokemon@potato.io", + params=None, + timeout=10, + ), + validation_res, + ), + ] + ) return validation_res @@ -58,40 +62,44 @@ def custom_res(patch_request): def wrapper(data={}): validation_res = { - 'quality_score': (random.random() * 0.4) + 0.6, - 'email_quality': (random.random() * 0.4) + 0.6, - 'is_valid_format': { - 'value': True, + "quality_score": (random.random() * 0.4) + 0.6, + "email_quality": (random.random() * 0.4) + 0.6, + "is_valid_format": { + "value": True, }, - 'is_mx_found': { - 'value': True, + "is_mx_found": { + "value": True, }, - 'is_smtp_valid': { - 'value': True, + "is_smtp_valid": { + "value": True, }, - 'is_catchall_email': { - 'value': True, + "is_catchall_email": { + "value": True, }, - 'is_role_email': { - 'value': True, + "is_role_email": { + "value": True, }, - 'is_disposable_email': { - 'value': False, + "is_disposable_email": { + "value": False, }, - 'is_free_email': { - 'value': True, + "is_free_email": { + "value": True, }, **data, } - patch_request([ - ( - call('get', - 'https://emailvalidation.abstractapi.com/v1/?api_key=None&email=pokemon@potato.io', - params=None, - timeout=10), - validation_res, - ), - ]) + patch_request( + [ + ( + call( + "get", + "https://emailvalidation.abstractapi.com/v1/?api_key=None&email=pokemon@potato.io", + params=None, + timeout=10, + ), + validation_res, + ), + ] + ) return validation_res return wrapper @@ -100,130 +108,146 @@ def wrapper(data={}): @pytest.fixture def error_res(patch_request): validation_res = { - 'success': False, - 'error': { - 'code': 210, - 'type': 'no_email_address_supplied', - 'info': 'Please specify an email address. [Example: support@apilayer.com]', + "success": False, + "error": { + "code": 210, + "type": "no_email_address_supplied", + "info": "Please specify an email address. [Example: support@apilayer.com]", }, } - patch_request([ - ( - call('get', - 'https://emailvalidation.abstractapi.com/v1/?api_key=None&email=pokemon@potato.io', - params=None, - timeout=10), - validation_res, - ), - ]) + patch_request( + [ + ( + call( + "get", + "https://emailvalidation.abstractapi.com/v1/?api_key=None&email=pokemon@potato.io", + params=None, + timeout=10, + ), + validation_res, + ), + ] + ) return validation_res @pytest.fixture def exception_res(monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('requests.get', MagicMock(side_effect=Exception('random error'))) + monkeypatch.setattr("requests.get", MagicMock(side_effect=Exception("random error"))) yield def test_no_invites(database: capy.Database): async_validate_email_invite.delay(1) - assert database.list_of('authenticate.UserInvite') == [] - assert database.list_of('auth.User') == [] + assert database.list_of("authenticate.UserInvite") == [] + assert database.list_of("auth.User") == [] assert logging.Logger.error.call_args_list == [ - call('UserInvite 1 not found', exc_info=True), + call("UserInvite 1 not found", exc_info=True), ] def test_throws_exception(database: capy.Database, format: capy.Format, exception_res): - model = database.create(user_invite={'email': 'pokemon@potato.io'}) + model = database.create(user_invite={"email": "pokemon@potato.io"}) async_validate_email_invite.delay(1) - assert database.list_of('authenticate.UserInvite') == [ + assert database.list_of("authenticate.UserInvite") == [ format.to_obj_repr(model.user_invite), ] - assert database.list_of('auth.User') == [] + assert database.list_of("auth.User") == [] assert logging.Logger.error.call_args_list == [ - call('Retrying email validation for invite 1', exc_info=True), - call('Retrying email validation for invite 1', exc_info=True), + call("Retrying email validation for invite 1", exc_info=True), + call("Retrying email validation for invite 1", exc_info=True), ] def test_bad_response(database: capy.Database, format: capy.Format, error_res): - model = database.create(user_invite={'email': 'pokemon@potato.io'}) + model = database.create(user_invite={"email": "pokemon@potato.io"}) async_validate_email_invite.delay(1) - assert database.list_of('authenticate.UserInvite') == [ + assert database.list_of("authenticate.UserInvite") == [ { **format.to_obj_repr(model.user_invite), - 'process_message': 'email-validation-error', - 'process_status': 'ERROR', - 'status': 'REJECTED', + "process_message": "email-validation-error", + "process_status": "ERROR", + "status": "REJECTED", }, ] - assert database.list_of('auth.User') == [] + assert database.list_of("auth.User") == [] assert logging.Logger.error.call_args_list == [] -@pytest.mark.parametrize('data, error', [ - ({ - 'is_disposable_email': { - 'value': True, - }, - }, 'disposable-email'), - ({ - 'is_mx_found': { - 'value': False, - }, - }, 'invalid-email'), - ({ - 'quality_score': 0.59, - }, 'poor-quality-email'), -]) +@pytest.mark.parametrize( + "data, error", + [ + ( + { + "is_disposable_email": { + "value": True, + }, + }, + "disposable-email", + ), + ( + { + "is_mx_found": { + "value": False, + }, + }, + "invalid-email", + ), + ( + { + "quality_score": 0.59, + }, + "poor-quality-email", + ), + ], +) def test_invalid_email_response(database: capy.Database, format: capy.Format, custom_res, data, error): custom_res(data) - model = database.create(user_invite={'email': 'pokemon@potato.io'}) + model = database.create(user_invite={"email": "pokemon@potato.io"}) async_validate_email_invite.delay(1) - assert database.list_of('authenticate.UserInvite') == [ + assert database.list_of("authenticate.UserInvite") == [ { **format.to_obj_repr(model.user_invite), - 'process_message': error, - 'process_status': 'ERROR', - 'status': 'REJECTED', + "process_message": error, + "process_status": "ERROR", + "status": "REJECTED", }, ] - assert database.list_of('auth.User') == [] + assert database.list_of("auth.User") == [] assert logging.Logger.error.call_args_list == [] def test_good_response(database: capy.Database, format: capy.Format, validation_res): - model = database.create(user_invite={'email': 'pokemon@potato.io'}) + model = database.create(user_invite={"email": "pokemon@potato.io"}) async_validate_email_invite.delay(1) - assert database.list_of('authenticate.UserInvite') == [ + assert database.list_of("authenticate.UserInvite") == [ { **format.to_obj_repr(model.user_invite), - 'email_quality': validation_res['quality_score'], - 'email_status': { - 'catch_all': validation_res['is_catchall_email']['value'], - 'disposable': validation_res['is_disposable_email']['value'], - 'domain': 'potato.io', - 'email': 'pokemon@potato.io', - 'format_valid': validation_res['is_valid_format']['value'], - 'free': validation_res['is_free_email']['value'], - 'mx_found': validation_res['is_mx_found']['value'], - 'role': validation_res['is_role_email']['value'], - 'smtp_check': validation_res['is_smtp_valid']['value'], - 'score': validation_res['quality_score'], - 'user': 'pokemon' + "email_quality": validation_res["quality_score"], + "email_status": { + "catch_all": validation_res["is_catchall_email"]["value"], + "disposable": validation_res["is_disposable_email"]["value"], + "domain": "potato.io", + "email": "pokemon@potato.io", + "format_valid": validation_res["is_valid_format"]["value"], + "free": validation_res["is_free_email"]["value"], + "mx_found": validation_res["is_mx_found"]["value"], + "role": validation_res["is_role_email"]["value"], + "smtp_check": validation_res["is_smtp_valid"]["value"], + "score": validation_res["quality_score"], + "user": "pokemon", }, }, ] - assert database.list_of('auth.User') == [] + assert database.list_of("auth.User") == [] assert logging.Logger.error.call_args_list == [] diff --git a/breathecode/authenticate/tests/tasks/tests_create_user_from_invite.py b/breathecode/authenticate/tests/tasks/tests_create_user_from_invite.py index cf4496853..82a888a88 100644 --- a/breathecode/authenticate/tests/tasks/tests_create_user_from_invite.py +++ b/breathecode/authenticate/tests/tasks/tests_create_user_from_invite.py @@ -11,30 +11,30 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('breathecode.authenticate.tasks.async_validate_email_invite.delay', MagicMock()) - monkeypatch.setattr('logging.Logger.error', MagicMock()) - monkeypatch.setattr('breathecode.notify.actions.send_email_message', MagicMock()) - monkeypatch.setattr('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + monkeypatch.setattr("breathecode.authenticate.tasks.async_validate_email_invite.delay", MagicMock()) + monkeypatch.setattr("logging.Logger.error", MagicMock()) + monkeypatch.setattr("breathecode.notify.actions.send_email_message", MagicMock()) + monkeypatch.setattr("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) yield def prepare(db: list[dict]): - return [x for x in db if isinstance(x.pop('date_joined'), datetime)] + return [x for x in db if isinstance(x.pop("date_joined"), datetime)] def user_serializer(data={}): return { - 'email': '', - 'first_name': '', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': '', - 'password': '', - 'username': '', + "email": "", + "first_name": "", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "", + "password": "", + "username": "", **data, } @@ -42,79 +42,79 @@ def user_serializer(data={}): def test_no_invites(bc: Breathecode): create_user_from_invite.delay(1) - assert bc.database.list_of('authenticate.UserInvite') == [] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("authenticate.UserInvite") == [] + assert bc.database.list_of("auth.User") == [] - assert logging.Logger.error.call_args_list == [call('User invite not found', exc_info=True)] + assert logging.Logger.error.call_args_list == [call("User invite not found", exc_info=True)] -@pytest.mark.parametrize('status', ['PENDING', 'WAITING_LIST', 'REJECTED']) +@pytest.mark.parametrize("status", ["PENDING", "WAITING_LIST", "REJECTED"]) def test_invite_not_accepted(bc: Breathecode, status): - model = bc.database.create(user_invite={'status': status}) + model = bc.database.create(user_invite={"status": status}) create_user_from_invite.delay(1) - assert bc.database.list_of('authenticate.UserInvite') == [bc.format.to_dict(model.user_invite)] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("authenticate.UserInvite") == [bc.format.to_dict(model.user_invite)] + assert bc.database.list_of("auth.User") == [] - assert logging.Logger.error.call_args_list == [call('User invite is not accepted', exc_info=True)] + assert logging.Logger.error.call_args_list == [call("User invite is not accepted", exc_info=True)] def test_no_email(bc: Breathecode): - model = bc.database.create(user_invite={'status': 'ACCEPTED'}) + model = bc.database.create(user_invite={"status": "ACCEPTED"}) create_user_from_invite.delay(1) - assert bc.database.list_of('authenticate.UserInvite') == [bc.format.to_dict(model.user_invite)] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("authenticate.UserInvite") == [bc.format.to_dict(model.user_invite)] + assert bc.database.list_of("auth.User") == [] - assert logging.Logger.error.call_args_list == [call('No email found', exc_info=True)] + assert logging.Logger.error.call_args_list == [call("No email found", exc_info=True)] -@pytest.mark.parametrize('is_linked_the_user', [True, False]) +@pytest.mark.parametrize("is_linked_the_user", [True, False]) def test_user_exists(bc: Breathecode, fake, is_linked_the_user): email = fake.email() - user = {'email': email} - user_invite = {'status': 'ACCEPTED', 'email': email, 'user_id': None} + user = {"email": email} + user_invite = {"status": "ACCEPTED", "email": email, "user_id": None} if is_linked_the_user: - user_invite['user_id'] = 1 + user_invite["user_id"] = 1 model = bc.database.create(user_invite=user_invite, user=user) create_user_from_invite.delay(1) - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ { **bc.format.to_dict(model.user_invite), - 'user_id': 1, + "user_id": 1, }, ] - assert bc.database.list_of('auth.User') == [bc.format.to_dict(model.user)] + assert bc.database.list_of("auth.User") == [bc.format.to_dict(model.user)] - assert logging.Logger.error.call_args_list == [call('User invite is already associated to a user', exc_info=True)] + assert logging.Logger.error.call_args_list == [call("User invite is already associated to a user", exc_info=True)] def test_invite_accepted(bc: Breathecode, fake): email = fake.email() - user_invite = {'status': 'ACCEPTED', 'email': email, 'first_name': fake.first_name(), 'last_name': fake.last_name()} + user_invite = {"status": "ACCEPTED", "email": email, "first_name": fake.first_name(), "last_name": fake.last_name()} model = bc.database.create(user_invite=user_invite) create_user_from_invite.delay(1) - assert bc.database.list_of('authenticate.UserInvite') == [bc.format.to_dict(model.user_invite)] + assert bc.database.list_of("authenticate.UserInvite") == [bc.format.to_dict(model.user_invite)] - del user_invite['status'] - user_invite['username'] = email + del user_invite["status"] + user_invite["username"] = email - assert prepare(bc.database.list_of('auth.User')) == [user_serializer({'id': 1, **user_invite})] + assert prepare(bc.database.list_of("auth.User")) == [user_serializer({"id": 1, **user_invite})] assert logging.Logger.error.call_args_list == [] assert notify_actions.send_email_message.call_args_list == [ - call('pick_password', - email, { - 'SUBJECT': 'Set your password at 4Geeks', - 'LINK': '/v1/auth/password/' + model.user_invite.token - }, - academy=None) + call( + "pick_password", + email, + {"SUBJECT": "Set your password at 4Geeks", "LINK": "/v1/auth/password/" + model.user_invite.token}, + academy=None, + ) ] diff --git a/breathecode/authenticate/tests/urls/tests_academy_html_invite.py b/breathecode/authenticate/tests/urls/tests_academy_html_invite.py index c52bd43cb..2261e487a 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_html_invite.py +++ b/breathecode/authenticate/tests/urls/tests_academy_html_invite.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os import urllib.parse @@ -14,73 +15,82 @@ # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_page_without_invites(): request = None - APP_URL = os.getenv('APP_URL', '')[:-1] + APP_URL = os.getenv("APP_URL", "")[:-1] return loader.render_to_string( - 'message.html', { - 'MESSAGE': f'You don\'t have any more pending invites', - 'BUTTON': 'Continue to 4Geeks', - 'BUTTON_TARGET': '_blank', - 'LINK': APP_URL - }, request) + "message.html", + { + "MESSAGE": f"You don't have any more pending invites", + "BUTTON": "Continue to 4Geeks", + "BUTTON_TARGET": "_blank", + "LINK": APP_URL, + }, + request, + ) def render_page_with_pending_invites(model): request = None - APP_URL = os.getenv('APP_URL', '')[:-1] + APP_URL = os.getenv("APP_URL", "")[:-1] profile_academies = [] - if 'profile_academy' in model: - profile_academies = model.profile_academy if isinstance(model.profile_academy, - list) else [model.profile_academy] + if "profile_academy" in model: + profile_academies = ( + model.profile_academy if isinstance(model.profile_academy, list) else [model.profile_academy] + ) # excluding the accepted invited - profile_academies = [x for x in profile_academies if x.status != 'ACTIVE'] + profile_academies = [x for x in profile_academies if x.status != "ACTIVE"] - querystr = urllib.parse.urlencode({'callback': APP_URL, 'token': model.token.key}) - url = os.getenv('API_URL') + '/v1/auth/academy/html/invite?' + querystr + querystr = urllib.parse.urlencode({"callback": APP_URL, "token": model.token.key}) + url = os.getenv("API_URL") + "/v1/auth/academy/html/invite?" + querystr return loader.render_to_string( - 'academy_invite.html', { - 'subject': - f'Invitation to study at 4Geeks.com', - 'invites': [{ - 'id': profile_academy.id, - 'academy': { - 'id': profile_academy.academy.id, - 'name': profile_academy.academy.name, - 'slug': profile_academy.academy.slug, - 'timezone': profile_academy.academy.timezone, - }, - 'role': profile_academy.role.slug, - 'created_at': profile_academy.created_at, - } for profile_academy in profile_academies], - 'LINK': - url, - 'user': { - 'id': model.user.id, - 'email': model.user.email, - 'first_name': model.user.first_name, - } - }, request) + "academy_invite.html", + { + "subject": f"Invitation to study at 4Geeks.com", + "invites": [ + { + "id": profile_academy.id, + "academy": { + "id": profile_academy.academy.id, + "name": profile_academy.academy.name, + "slug": profile_academy.academy.slug, + "timezone": profile_academy.academy.timezone, + }, + "role": profile_academy.role.slug, + "created_at": profile_academy.created_at, + } + for profile_academy in profile_academies + ], + "LINK": url, + "user": { + "id": model.user.id, + "email": model.user.email, + "first_name": model.user.first_name, + }, + }, + request, + ) class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test_academy_html_invite__without_auth(self): - url = reverse_lazy('authenticate:academy_html_invite') + url = reverse_lazy("authenticate:academy_html_invite") response = self.client.get(url) - hash = self.bc.format.to_base64('/v1/auth/academy/html/invite') + hash = self.bc.format.to_base64("/v1/auth/academy/html/invite") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) """ 🔽🔽🔽 GET without ProfileAcademy @@ -89,8 +99,8 @@ def test_academy_html_invite__without_auth(self): def test_academy_html_invite__without_profile_academy(self): model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('authenticate:academy_html_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("authenticate:academy_html_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -98,15 +108,15 @@ def test_academy_html_invite__without_profile_academy(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) """ 🔽🔽🔽 GET with one ProfileAcademy @@ -115,8 +125,8 @@ def test_academy_html_invite__without_profile_academy(self): def test_academy_html_invite__with_one_profile_academy(self): model = self.bc.database.create(user=1, token=1, profile_academy=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('authenticate:academy_html_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("authenticate:academy_html_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -124,17 +134,20 @@ def test_academy_html_invite__with_one_profile_academy(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 GET with two ProfileAcademy @@ -143,8 +156,8 @@ def test_academy_html_invite__with_one_profile_academy(self): def test_academy_html_invite__with_two_profile_academy(self): model = self.bc.database.create(user=1, token=1, profile_academy=2) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('authenticate:academy_html_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("authenticate:academy_html_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -152,16 +165,17 @@ def test_academy_html_invite__with_two_profile_academy(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - self.bc.format.to_dict(model.profile_academy)) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), self.bc.format.to_dict(model.profile_academy) + ) """ 🔽🔽🔽 GET with two ProfileAcademy, accepting both @@ -170,8 +184,8 @@ def test_academy_html_invite__with_two_profile_academy(self): def test_academy_html_invite__with_two_profile_academy__accepting_both(self): model = self.bc.database.create(user=1, token=1, profile_academy=2) - querystring = self.bc.format.to_querystring({'token': model.token.key, 'accepting': '1,2'}) - url = reverse_lazy('authenticate:academy_html_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key, "accepting": "1,2"}) + url = reverse_lazy("authenticate:academy_html_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -179,19 +193,24 @@ def test_academy_html_invite__with_two_profile_academy__accepting_both(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - **self.bc.format.to_dict(profile_academy), - 'status': 'ACTIVE', - } for profile_academy in model.profile_academy]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + **self.bc.format.to_dict(profile_academy), + "status": "ACTIVE", + } + for profile_academy in model.profile_academy + ], + ) """ 🔽🔽🔽 GET with two ProfileAcademy, rejecting both @@ -200,8 +219,8 @@ def test_academy_html_invite__with_two_profile_academy__accepting_both(self): def test_academy_html_invite__with_two_profile_academy__rejecting_both(self): model = self.bc.database.create(user=1, token=1, profile_academy=2) - querystring = self.bc.format.to_querystring({'token': model.token.key, 'rejecting': '1,2'}) - url = reverse_lazy('authenticate:academy_html_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key, "rejecting": "1,2"}) + url = reverse_lazy("authenticate:academy_html_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -209,12 +228,12 @@ def test_academy_html_invite__with_two_profile_academy__rejecting_both(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) diff --git a/breathecode/authenticate/tests/urls/tests_academy_id_member.py b/breathecode/authenticate/tests/urls/tests_academy_id_member.py index 80acee906..f62e1c005 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_id_member.py +++ b/breathecode/authenticate/tests/urls/tests_academy_id_member.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member """ + from unittest.mock import MagicMock, patch from django.urls.base import reverse_lazy @@ -12,9 +13,9 @@ from ..mixins.new_auth_test_case import AuthTestCase -@capable_of('read_member') +@capable_of("read_member") def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) @@ -26,62 +27,65 @@ class MemberGetDuckTestSuite(AuthTestCase): def test_academy_id_member_without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': 1}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_id_member__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': 1}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_member for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_member for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_academy_id_member__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, capability='read_member', role='role', profile_academy=1) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': n}) + model = self.bc.database.create(authenticate=True, capability="read_member", role="role", profile_academy=1) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": n}) response = self.client.get(url) json = response.json() - self.bc.check.partial_equality(json, [{'academy': {'id': n}}]) + self.bc.check.partial_equality(json, [{"academy": {"id": n}}]) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.get', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.get", MagicMock(side_effect=view_method_mock)) def test_academy_id_member__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=[{"academy_id": id} for id in range(1, 4)] + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': n}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": n}) response = self.client.get(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': n}} + expected = {"args": [], "kwargs": {"academy_id": n}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -95,40 +99,46 @@ class MemberPostDuckTestSuite(AuthTestCase): def test_academy_id_member_without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': 1}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": 1}) response = self.client.post(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_id_member__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': 1}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": 1}) response = self.client.post(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_member for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: crud_member for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_academy_id_member__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, capability='crud_member', role='role', profile_academy=1) + model = self.bc.database.create(authenticate=True, capability="crud_member", role="role", profile_academy=1) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': n}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": n}) response = self.client.post(url) json = response.json() - expected = {'role': ['This field is required.']} + expected = {"role": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -137,23 +147,20 @@ def test_academy_id_member__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.post', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.post", MagicMock(side_effect=view_method_mock)) def test_academy_id_member__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=[{"academy_id": id} for id in range(1, 4)] + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': n}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": n}) response = self.client.post(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': n}} + expected = {"args": [], "kwargs": {"academy_id": n}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -167,40 +174,46 @@ class MemberDeleteDuckTestSuite(AuthTestCase): def test_academy_id_member_without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': 1}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": 1}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_id_member__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': 1}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": 1}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_member for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: crud_member for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_academy_id_member__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, capability='crud_member', role='role', profile_academy=1) + model = self.bc.database.create(authenticate=True, capability="crud_member", role="role", profile_academy=1) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': n}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": n}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'delete-is-forbidden', 'status_code': 403} + expected = {"detail": "delete-is-forbidden", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -209,23 +222,20 @@ def test_academy_id_member__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.delete', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.delete", MagicMock(side_effect=view_method_mock)) def test_academy_id_member__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=[{"academy_id": id} for id in range(1, 4)] + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_id_member', kwargs={'academy_id': n}) + url = reverse_lazy("authenticate:academy_id_member", kwargs={"academy_id": n}) response = self.client.delete(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': n}} + expected = {"args": [], "kwargs": {"academy_id": n}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/authenticate/tests/urls/tests_academy_id_member_id.py b/breathecode/authenticate/tests/urls/tests_academy_id_member_id.py index ecf3cec42..000c2e253 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_id_member_id.py +++ b/breathecode/authenticate/tests/urls/tests_academy_id_member_id.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + from unittest.mock import MagicMock, patch from django.urls.base import reverse_lazy @@ -13,9 +14,9 @@ from ..mixins.new_auth_test_case import AuthTestCase -@capable_of('read_member') +@capable_of("read_member") def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) @@ -27,70 +28,69 @@ class MemberGetDuckTestSuite(AuthTestCase): def test_academy_id_member_id_without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_id_member_id', kwargs={'academy_id': 1, 'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_id_member_id", kwargs={"academy_id": 1, "user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_id_member_id__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_id_member_id', kwargs={'academy_id': 1, 'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_id_member_id", kwargs={"academy_id": 1, "user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_member for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_member for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_academy_id_member_id__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, capability='read_member', role='role', profile_academy=1) - url = reverse_lazy('authenticate:academy_id_member_id', - kwargs={ - 'academy_id': n, - 'user_id_or_email': f'{n}' - }) + model = self.bc.database.create(authenticate=True, capability="read_member", role="role", profile_academy=1) + url = reverse_lazy( + "authenticate:academy_id_member_id", kwargs={"academy_id": n, "user_id_or_email": f"{n}"} + ) response = self.client.get(url) json = response.json() - self.bc.check.partial_equality(json, {'academy': {'id': n}}) + self.bc.check.partial_equality(json, {"academy": {"id": n}}) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.get', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.get", MagicMock(side_effect=view_method_mock)) def test_academy_id_member_id__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=[{"academy_id": id} for id in range(1, 4)] + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_id_member_id', - kwargs={ - 'academy_id': n, - 'user_id_or_email': f'{n}' - }) + url = reverse_lazy( + "authenticate:academy_id_member_id", kwargs={"academy_id": n, "user_id_or_email": f"{n}"} + ) response = self.client.get(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': n, 'user_id_or_email': f'{n}'}} + expected = {"args": [], "kwargs": {"academy_id": n, "user_id_or_email": f"{n}"}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -104,63 +104,68 @@ class MemberPutDuckTestSuite(AuthTestCase): def test_academy_id_member_id_without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_id_member_id', kwargs={'academy_id': 1, 'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_id_member_id", kwargs={"academy_id": 1, "user_id_or_email": "1"}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_id_member_id__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_id_member_id', kwargs={'academy_id': 1, 'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_id_member_id", kwargs={"academy_id": 1, "user_id_or_email": "1"}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_member for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: crud_member for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_academy_id_member_id__with_auth(self): profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), - 'phone': self.bc.fake.phone_number() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), + "phone": self.bc.fake.phone_number(), } for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, - capability='crud_member', - role='role', - profile_academy=profile_academy) + model = self.bc.database.create( + authenticate=True, capability="crud_member", role="role", profile_academy=profile_academy + ) - url = reverse_lazy('authenticate:academy_id_member_id', kwargs={'academy_id': n, 'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_id_member_id", kwargs={"academy_id": n, "user_id_or_email": "1"}) response = self.client.put(url) json = response.json() - expected = {'role': ['This field is required.']} + expected = {"role": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_academy_id_member_id__with_wrong_required_fields(self): - model = self.bc.database.create(authenticate=True, capability='crud_member', role='role', profile_academy=1) + model = self.bc.database.create(authenticate=True, capability="crud_member", role="role", profile_academy=1) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_id_member_id', kwargs={'academy_id': 1, 'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_id_member_id", kwargs={"academy_id": 1, "user_id_or_email": "1"}) response = self.client.put(url) json = response.json() - expected = {'detail': 'email-not-found', 'status_code': 400} + expected = {"detail": "email-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -169,27 +174,22 @@ def test_academy_id_member_id__with_wrong_required_fields(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.put', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.put", MagicMock(side_effect=view_method_mock)) def test_academy_id_member_id__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=[{"academy_id": id} for id in range(1, 4)] + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_id_member_id', - kwargs={ - 'academy_id': n, - 'user_id_or_email': f'{n}' - }) + url = reverse_lazy( + "authenticate:academy_id_member_id", kwargs={"academy_id": n, "user_id_or_email": f"{n}"} + ) response = self.client.put(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': n, 'user_id_or_email': f'{n}'}} + expected = {"args": [], "kwargs": {"academy_id": n, "user_id_or_email": f"{n}"}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -203,40 +203,44 @@ class MemberDeleteDuckTestSuite(AuthTestCase): def test_academy_id_member_id_without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_id_member_id', kwargs={'academy_id': 1, 'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_id_member_id", kwargs={"academy_id": 1, "user_id_or_email": "1"}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_id_member_id__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_id_member_id', kwargs={'academy_id': 1, 'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_id_member_id", kwargs={"academy_id": 1, "user_id_or_email": "1"}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_member for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: crud_member for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_academy_id_member_id__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, capability='crud_member', role='role', profile_academy=1) + model = self.bc.database.create(authenticate=True, capability="crud_member", role="role", profile_academy=1) - url = reverse_lazy('authenticate:academy_id_member_id', - kwargs={ - 'academy_id': n, - 'user_id_or_email': f'{n}' - }) + url = reverse_lazy( + "authenticate:academy_id_member_id", kwargs={"academy_id": n, "user_id_or_email": f"{n}"} + ) response = self.client.delete(url) expected = b'{"detail":"delete-is-forbidden","status_code":403}' @@ -247,27 +251,22 @@ def test_academy_id_member_id__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.delete', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.delete", MagicMock(side_effect=view_method_mock)) def test_academy_id_member_id__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=[{"academy_id": id} for id in range(1, 4)] + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_id_member_id', - kwargs={ - 'academy_id': n, - 'user_id_or_email': f'{n}' - }) + url = reverse_lazy( + "authenticate:academy_id_member_id", kwargs={"academy_id": n, "user_id_or_email": f"{n}"} + ) response = self.client.delete(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': n, 'user_id_or_email': f'{n}'}} + expected = {"args": [], "kwargs": {"academy_id": n, "user_id_or_email": f"{n}"}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/authenticate/tests/urls/tests_academy_invite_id.py b/breathecode/authenticate/tests/urls/tests_academy_invite_id.py index 072180895..8ece39fe1 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_invite_id.py +++ b/breathecode/authenticate/tests/urls/tests_academy_invite_id.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + from unittest.mock import MagicMock, patch from django.urls.base import reverse_lazy @@ -12,9 +13,9 @@ from ..mixins.new_auth_test_case import AuthTestCase -@capable_of('invite_resend') +@capable_of("invite_resend") def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) @@ -26,40 +27,46 @@ class MemberGetDuckTestSuite(AuthTestCase): def test_duck_test__without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_invite_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:academy_invite_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_duck_test__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_invite_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:academy_invite_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_invite for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_invite for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_duck_test__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, capability='read_invite', role='role', profile_academy=1) + model = self.bc.database.create(authenticate=True, capability="read_invite", role="role", profile_academy=1) - url = reverse_lazy('authenticate:academy_invite_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:academy_invite_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -68,23 +75,23 @@ def test_duck_test__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.AcademyInviteView.get', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.AcademyInviteView.get", MagicMock(side_effect=view_method_mock)) def test_duck_test__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='invite_resend', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, + capability="invite_resend", + role="role", + profile_academy=[{"academy_id": id} for id in range(1, 4)], + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_invite_id', kwargs={'invite_id': n}) + url = reverse_lazy("authenticate:academy_invite_id", kwargs={"invite_id": n}) response = self.client.get(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': '1', 'invite_id': n}} + expected = {"args": [], "kwargs": {"academy_id": "1", "invite_id": n}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -98,43 +105,48 @@ class MemberPutDuckTestSuite(AuthTestCase): def test_academy_id_member_id_without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_invite_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:academy_invite_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_id_member_id__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_invite_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:academy_invite_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: invite_resend for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: invite_resend for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_academy_id_member_id__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, - capability='invite_resend', - role='role', - profile_academy=1) + model = self.bc.database.create( + authenticate=True, capability="invite_resend", role="role", profile_academy=1 + ) - url = reverse_lazy('authenticate:academy_invite_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:academy_invite_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -143,23 +155,23 @@ def test_academy_id_member_id__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.AcademyInviteView.put', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.AcademyInviteView.put", MagicMock(side_effect=view_method_mock)) def test_academy_id_member_id__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='invite_resend', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, + capability="invite_resend", + role="role", + profile_academy=[{"academy_id": id} for id in range(1, 4)], + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_invite_id', kwargs={'invite_id': n}) + url = reverse_lazy("authenticate:academy_invite_id", kwargs={"invite_id": n}) response = self.client.put(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': '1', 'invite_id': n}} + expected = {"args": [], "kwargs": {"academy_id": "1", "invite_id": n}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/authenticate/tests/urls/tests_academy_member.py b/breathecode/authenticate/tests/urls/tests_academy_member.py index 4a3674b00..2fab6befa 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_member.py +++ b/breathecode/authenticate/tests/urls/tests_academy_member.py @@ -1,6 +1,7 @@ """ Test cases for /academy/member """ + import os import urllib.parse from random import choice @@ -20,14 +21,14 @@ # the test have too must lines, that's split in many test suite PROFILE_ACADEMY_STATUS = [ - 'INVITED', - 'ACTIVE', + "INVITED", + "ACTIVE", ] -@capable_of('read_member') +@capable_of("read_member") def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) @@ -40,62 +41,58 @@ def getrandbits(n): def format_profile_academy(self, profile_academy, role, academy): return { - 'academy': { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug + "academy": {"id": academy.id, "name": academy.name, "slug": academy.slug}, + "address": profile_academy.address, + "created_at": self.datetime_to_iso(profile_academy.created_at), + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + "role": { + "id": role.slug, + "name": role.name, + "slug": role.slug, }, - 'address': profile_academy.address, - 'created_at': self.datetime_to_iso(profile_academy.created_at), - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - 'role': { - 'id': role.slug, - 'name': role.name, - 'slug': role.slug, + "status": profile_academy.status, + "user": { + "email": profile_academy.user.email, + "first_name": profile_academy.user.first_name, + "profile": None, + "id": profile_academy.user.id, + "last_name": profile_academy.user.last_name, }, - 'status': profile_academy.status, - 'user': { - 'email': profile_academy.user.email, - 'first_name': profile_academy.user.first_name, - 'profile': None, - 'id': profile_academy.user.id, - 'last_name': profile_academy.user.last_name - } } def generate_user_invite(data: dict) -> dict: return { - 'academy_id': None, - 'author_id': None, - 'cohort_id': None, - 'email': None, - 'first_name': None, - 'id': 0, - 'last_name': None, - 'phone': '', - 'role_id': None, - 'sent_at': None, - 'status': 'PENDING', - 'is_email_validated': False, - 'conversion_info': None, - 'has_marketing_consent': False, - 'event_slug': None, - 'asset_slug': None, - 'token': '', - 'process_message': '', - 'process_status': 'PENDING', - 'user_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'email_quality': None, - 'email_status': None, + "academy_id": None, + "author_id": None, + "cohort_id": None, + "email": None, + "first_name": None, + "id": 0, + "last_name": None, + "phone": "", + "role_id": None, + "sent_at": None, + "status": "PENDING", + "is_email_validated": False, + "conversion_info": None, + "has_marketing_consent": False, + "event_slug": None, + "asset_slug": None, + "token": "", + "process_message": "", + "process_status": "PENDING", + "user_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "email_quality": None, + "email_status": None, **data, } @@ -106,22 +103,21 @@ class MemberSetOfDuckTestSuite(AuthTestCase): 🔽🔽🔽 GET check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.get', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.get", MagicMock(side_effect=view_method_mock)) def test_academy_member__get__with_auth___mock_view(self): - profile_academies = [{'academy_id': id} for id in range(1, 4)] - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=profile_academies) + profile_academies = [{"academy_id": id} for id in range(1, 4)] + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=profile_academies + ) for n in range(1, 4): self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_member') - response = self.client.get(url, headers={'academy': str(n)}) + url = reverse_lazy("authenticate:academy_member") + response = self.client.get(url, headers={"academy": str(n)}) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': str(n)}} + expected = {"args": [], "kwargs": {"academy_id": str(n)}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -130,22 +126,21 @@ def test_academy_member__get__with_auth___mock_view(self): 🔽🔽🔽 POST check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.post', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.post", MagicMock(side_effect=view_method_mock)) def test_academy_member__post__with_auth___mock_view(self): - profile_academies = [{'academy_id': id} for id in range(1, 4)] - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=profile_academies) + profile_academies = [{"academy_id": id} for id in range(1, 4)] + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=profile_academies + ) for n in range(1, 4): self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_member') - response = self.client.post(url, headers={'academy': str(n)}) + url = reverse_lazy("authenticate:academy_member") + response = self.client.post(url, headers={"academy": str(n)}) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': str(n)}} + expected = {"args": [], "kwargs": {"academy_id": str(n)}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -154,22 +149,21 @@ def test_academy_member__post__with_auth___mock_view(self): 🔽🔽🔽 DELETE check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.delete', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.delete", MagicMock(side_effect=view_method_mock)) def test_academy_member__delete__with_auth___mock_view(self): - profile_academies = [{'academy_id': id} for id in range(1, 4)] - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=profile_academies) + profile_academies = [{"academy_id": id} for id in range(1, 4)] + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=profile_academies + ) for n in range(1, 4): self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_member') - response = self.client.delete(url, headers={'academy': str(n)}) + url = reverse_lazy("authenticate:academy_member") + response = self.client.delete(url, headers={"academy": str(n)}) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': str(n)}} + expected = {"args": [], "kwargs": {"academy_id": str(n)}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -182,78 +176,83 @@ class MemberGetTestSuite(AuthTestCase): def test_academy_member_without_auth(self): """Test /academy/member without auth""" - url = reverse_lazy('authenticate:academy_member') + url = reverse_lazy("authenticate:academy_member") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_member_without_capability(self): self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_member') - response = self.client.get(url, headers={'academy': 1}) + url = reverse_lazy("authenticate:academy_member") + response = self.client.get(url, headers={"academy": 1}) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_member " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: read_member " "for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_academy_member(self): - role = 'hitman' - model = self.bc.database.create(authenticate=True, role=role, capability='read_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - response = self.client.get(url, headers={'academy': 1}) + role = "hitman" + model = self.bc.database.create(authenticate=True, role=role, capability="read_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model['profile_academy'].academy.id, - 'name': model['profile_academy'].academy.name, - 'slug': model['profile_academy'].academy.slug - }, - 'address': model['profile_academy'].address, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'phone': model['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'INVITED', - 'user': { - 'email': model['profile_academy'].user.email, - 'first_name': model['profile_academy'].user.first_name, - 'profile': None, - 'id': model['profile_academy'].user.id, - 'last_name': model['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model["profile_academy"].academy.id, + "name": model["profile_academy"].academy.name, + "slug": model["profile_academy"].academy.slug, + }, + "address": model["profile_academy"].address, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "phone": model["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "INVITED", + "user": { + "email": model["profile_academy"].user.email, + "first_name": model["profile_academy"].user.first_name, + "profile": None, + "id": model["profile_academy"].user.id, + "last_name": model["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + } + ], + ) """ 🔽🔽🔽 GET with profile @@ -261,58 +260,57 @@ def test_academy_member(self): def test_academy_member__with_profile(self): - role = 'hitman' - model = self.bc.database.create(authenticate=True, - role=role, - capability='read_member', - profile_academy=True, - profile=True) - url = reverse_lazy('authenticate:academy_member') - response = self.client.get(url, headers={'academy': 1}) + role = "hitman" + model = self.bc.database.create( + authenticate=True, role=role, capability="read_member", profile_academy=True, profile=True + ) + url = reverse_lazy("authenticate:academy_member") + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model['profile_academy'].academy.id, - 'name': model['profile_academy'].academy.name, - 'slug': model['profile_academy'].academy.slug - }, - 'address': model['profile_academy'].address, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'phone': model['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'INVITED', - 'user': { - 'email': model['profile_academy'].user.email, - 'first_name': model['profile_academy'].user.first_name, - 'profile': { - 'avatar_url': None + expected = [ + { + "academy": { + "id": model["profile_academy"].academy.id, + "name": model["profile_academy"].academy.name, + "slug": model["profile_academy"].academy.slug, + }, + "address": model["profile_academy"].address, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "phone": model["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "INVITED", + "user": { + "email": model["profile_academy"].user.email, + "first_name": model["profile_academy"].user.first_name, + "profile": {"avatar_url": None}, + "id": model["profile_academy"].user.id, + "last_name": model["profile_academy"].user.last_name, }, - 'id': model['profile_academy'].user.id, - 'last_name': model['profile_academy'].user.last_name } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + } + ], + ) """ 🔽🔽🔽 GET with github @@ -320,56 +318,57 @@ def test_academy_member__with_profile(self): def test_academy_member__with_github(self): - role = 'hitman' - model = self.bc.database.create(authenticate=True, - role=role, - capability='read_member', - profile_academy=True, - credentials_github=True) - url = reverse_lazy('authenticate:academy_member') - response = self.client.get(url, headers={'academy': 1}) + role = "hitman" + model = self.bc.database.create( + authenticate=True, role=role, capability="read_member", profile_academy=True, credentials_github=True + ) + url = reverse_lazy("authenticate:academy_member") + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model['profile_academy'].academy.id, - 'name': model['profile_academy'].academy.name, - 'slug': model['profile_academy'].academy.slug - }, - 'address': model['profile_academy'].address, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'phone': model['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'INVITED', - 'user': { - 'email': model['profile_academy'].user.email, - 'first_name': model['profile_academy'].user.first_name, - 'profile': None, - 'id': model['profile_academy'].user.id, - 'last_name': model['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model["profile_academy"].academy.id, + "name": model["profile_academy"].academy.name, + "slug": model["profile_academy"].academy.slug, + }, + "address": model["profile_academy"].address, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "phone": model["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "INVITED", + "user": { + "email": model["profile_academy"].user.email, + "first_name": model["profile_academy"].user.first_name, + "profile": None, + "id": model["profile_academy"].user.id, + "last_name": model["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + } + ], + ) """ 🔽🔽🔽 GET query like @@ -377,87 +376,90 @@ def test_academy_member__with_github(self): def test_academy_member_query_like_full_name_status_active(self): - role = 'hitman' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "hitman" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'ACTIVE' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "ACTIVE", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', - 'status': 'ACTIVE' + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", + "status": "ACTIVE", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?like=Rene Descartes' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?like=Rene Descartes" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'ACTIVE', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "ACTIVE", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'ACTIVE', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'ACTIVE', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "hitman", + "status": "ACTIVE", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "hitman", + "status": "ACTIVE", + "user_id": 1, + }, + ], + ) """ 🔽🔽🔽 GET query allow_students @@ -465,691 +467,715 @@ def test_academy_member_query_like_full_name_status_active(self): def test_academy_member_query_allow_students(self): - role = 'student' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "student" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'ACTIVE' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "ACTIVE", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?include=student' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?include=student" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'student', - 'name': 'student', - 'slug': 'student' - }, - 'status': 'ACTIVE', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "student", "name": "student", "slug": "student"}, + "status": "ACTIVE", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'student', - 'status': 'ACTIVE', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "student", + "status": "ACTIVE", + "user_id": 1, + } + ], + ) def test_academy_member_query_like_first_name_status_active(self): - role = 'hitman' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "hitman" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'ACTIVE' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "ACTIVE", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', - 'status': 'ACTIVE' + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", + "status": "ACTIVE", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?like=Rene' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?like=Rene" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'ACTIVE', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "ACTIVE", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'ACTIVE', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'ACTIVE', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "hitman", + "status": "ACTIVE", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "hitman", + "status": "ACTIVE", + "user_id": 1, + }, + ], + ) def test_academy_member_query_like_last_name_status_active(self): - role = 'hitman' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "hitman" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'ACTIVE' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "ACTIVE", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', - 'status': 'ACTIVE' + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", + "status": "ACTIVE", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?like=Descartes' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?like=Descartes" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'ACTIVE', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "ACTIVE", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'ACTIVE', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'ACTIVE', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "hitman", + "status": "ACTIVE", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "hitman", + "status": "ACTIVE", + "user_id": 1, + }, + ], + ) def test_academy_member_query_like_email_status_active(self): - role = 'hitman' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "hitman" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'ACTIVE' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "ACTIVE", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', - 'status': 'ACTIVE' + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", + "status": "ACTIVE", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?like=b@b.com' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?like=b@b.com" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'ACTIVE', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "ACTIVE", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'ACTIVE', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'ACTIVE', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "hitman", + "status": "ACTIVE", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "hitman", + "status": "ACTIVE", + "user_id": 1, + }, + ], + ) def test_academy_member_query_like_full_name_status_invited(self): - role = 'hitman' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "hitman" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'INVITED' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "INVITED", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', - 'status': 'INVITED' + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", + "status": "INVITED", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?like=Rene Descartes' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?like=Rene Descartes" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'INVITED', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "INVITED", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + }, + ], + ) def test_academy_member_query_like_first_name_status_invited(self): - role = 'hitman' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "hitman" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'INVITED' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "INVITED", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', - 'status': 'INVITED' + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", + "status": "INVITED", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?like=Rene' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?like=Rene" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'INVITED', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "INVITED", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + }, + ], + ) def test_academy_member_query_like_last_name_status_invited(self): - role = 'hitman' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "hitman" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'INVITED' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "INVITED", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', - 'status': 'INVITED' + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", + "status": "INVITED", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?like=Descartes' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?like=Descartes" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'INVITED', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "INVITED", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + }, + ], + ) def test_academy_member_query_like_email_status_invited(self): - role = 'hitman' - base = self.bc.database.create(authenticate=True, role=role, capability='read_member') + role = "hitman" + base = self.bc.database.create(authenticate=True, role=role, capability="read_member") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', - 'status': 'INVITED' + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", + "status": "INVITED", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', - 'status': 'INVITED' + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", + "status": "INVITED", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_member') - url = f'{base_url}?like=b@b.com' + base_url = reverse_lazy("authenticate:academy_member") + url = f"{base_url}?like=b@b.com" - response = self.client.get(url, headers={'academy': 1}) + response = self.client.get(url, headers={"academy": 1}) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'hitman', - 'name': 'hitman', - 'slug': 'hitman' - }, - 'status': 'INVITED', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "hitman", "name": "hitman", "slug": "hitman"}, + "status": "INVITED", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'hitman', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "hitman", + "status": "INVITED", + "user_id": 1, + }, + ], + ) """ 🔽🔽🔽 GET query status """ def test_academy_member__query_status__bad_status(self): - base = self.bc.database.create(user=1, role=1, capability='read_member') + base = self.bc.database.create(user=1, role=1, capability="read_member") for status in PROFILE_ACADEMY_STATUS: bad_status = [x for x in PROFILE_ACADEMY_STATUS if status != x][0] - profile_academy = {'status': status} + profile_academy = {"status": status} model = self.bc.database.create(profile_academy=(2, profile_academy), models=base) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_member') + f'?status={bad_status}' - response = self.client.get(url, headers={'academy': model.academy.id}) + url = reverse_lazy("authenticate:academy_member") + f"?status={bad_status}" + response = self.client.get(url, headers={"academy": model.academy.id}) json = response.json() expected = [] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - self.bc.format.to_dict(model.profile_academy)) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), self.bc.format.to_dict(model.profile_academy) + ) - self.bc.database.delete('authenticate.ProfileAcademy') + self.bc.database.delete("authenticate.ProfileAcademy") def test_academy_member__query_status__one_status__uppercase(self): - base = self.bc.database.create(user=1, role=1, capability='read_member') + base = self.bc.database.create(user=1, role=1, capability="read_member") for status in PROFILE_ACADEMY_STATUS: - profile_academy = {'status': status} + profile_academy = {"status": status} model = self.bc.database.create(profile_academy=(2, profile_academy), models=base) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_member') + f'?status={status.upper()}' - response = self.client.get(url, headers={'academy': model.academy.id}) + url = reverse_lazy("authenticate:academy_member") + f"?status={status.upper()}" + response = self.client.get(url, headers={"academy": model.academy.id}) json = response.json() expected = [ @@ -1158,21 +1184,22 @@ def test_academy_member__query_status__one_status__uppercase(self): ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - self.bc.format.to_dict(model.profile_academy)) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), self.bc.format.to_dict(model.profile_academy) + ) - self.bc.database.delete('authenticate.ProfileAcademy') + self.bc.database.delete("authenticate.ProfileAcademy") def test_academy_member__query_status__one_status__lowercase(self): - base = self.bc.database.create(user=1, role=1, capability='read_member') + base = self.bc.database.create(user=1, role=1, capability="read_member") for status in PROFILE_ACADEMY_STATUS: - profile_academy = {'status': status} + profile_academy = {"status": status} model = self.bc.database.create(profile_academy=(2, profile_academy), models=base) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_member') + f'?status={status.lower()}' - response = self.client.get(url, headers={'academy': model.academy.id}) + url = reverse_lazy("authenticate:academy_member") + f"?status={status.lower()}" + response = self.client.get(url, headers={"academy": model.academy.id}) json = response.json() expected = [ @@ -1181,10 +1208,11 @@ def test_academy_member__query_status__one_status__lowercase(self): ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - self.bc.format.to_dict(model.profile_academy)) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), self.bc.format.to_dict(model.profile_academy) + ) - self.bc.database.delete('authenticate.ProfileAcademy') + self.bc.database.delete("authenticate.ProfileAcademy") """ 🔽🔽🔽 GET query roles @@ -1192,762 +1220,876 @@ def test_academy_member__query_status__one_status__lowercase(self): def test_academy_member_with_zero_roles(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='read_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - url = f'{url}?roles=' - response = self.client.get(url, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="read_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + url = f"{url}?roles=" + response = self.client.get(url, headers={"academy": 1}) json = response.json() - self.assertEqual(json, [ - format_profile_academy(self, model.profile_academy, model.role, model.academy), - ]) + self.assertEqual( + json, + [ + format_profile_academy(self, model.profile_academy, model.role, model.academy), + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) def test_academy_member_with_one_roles(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='read_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - url = f'{url}?roles={role}' - response = self.client.get(url, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="read_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + url = f"{url}?roles={role}" + response = self.client.get(url, headers={"academy": 1}) json = response.json() profile_academy = self.get_profile_academy(1) - self.assertEqual(json, [{ - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, - }, - 'address': None, - 'created_at': self.bc.datetime.to_iso_string(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': model['profile_academy'].id, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, - }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - }]) + self.assertEqual( + json, + [ + { + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, + }, + "address": None, + "created_at": self.bc.datetime.to_iso_string(profile_academy.created_at), + "email": None, + "first_name": None, + "id": model["profile_academy"].id, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, + }, + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) def test_academy_member_with_two_roles(self): """Test /academy/:id/member""" - roles = ['konan', 'pain'] + roles = ["konan", "pain"] models = [ - self.bc.database.create(authenticate=True, role=roles[0], capability='read_member', profile_academy=True) + self.bc.database.create(authenticate=True, role=roles[0], capability="read_member", profile_academy=True) ] models = models + [ - self.bc.database.create(authenticate=True, - role=roles[1], - capability='read_member', - profile_academy=True, - models={'academy': models[0]['academy']}) + self.bc.database.create( + authenticate=True, + role=roles[1], + capability="read_member", + profile_academy=True, + models={"academy": models[0]["academy"]}, + ) ] - url = reverse_lazy('authenticate:academy_member') - args = ','.join(roles) - url = f'{url}?roles={args}' - response = self.client.get(url, headers={'academy': 1}) + url = reverse_lazy("authenticate:academy_member") + args = ",".join(roles) + url = f"{url}?roles={args}" + response = self.client.get(url, headers={"academy": 1}) json = response.json() - self.assertEqual(json, [{ - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, - }, - 'address': - None, - 'created_at': - self.bc.datetime.to_iso_string(self.get_profile_academy(model['profile_academy'].id).created_at), - 'email': - None, - 'first_name': - None, - 'id': - model['profile_academy'].id, - 'last_name': - None, - 'phone': - '', - 'role': { - 'id': roles[model['profile_academy'].id - 1], - 'name': roles[model['profile_academy'].id - 1], - 'slug': roles[model['profile_academy'].id - 1], - }, - 'status': - 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - } for model in models]) + self.assertEqual( + json, + [ + { + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, + }, + "address": None, + "created_at": self.bc.datetime.to_iso_string( + self.get_profile_academy(model["profile_academy"].id).created_at + ), + "email": None, + "first_name": None, + "id": model["profile_academy"].id, + "last_name": None, + "phone": "", + "role": { + "id": roles[model["profile_academy"].id - 1], + "name": roles[model["profile_academy"].id - 1], + "slug": roles[model["profile_academy"].id - 1], + }, + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + for model in models + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1 + index, - 'last_name': None, - 'phone': '', - 'role_id': roles[index], - 'status': 'INVITED', - 'user_id': 1 + index, - } for index in range(0, 2)]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1 + index, + "last_name": None, + "phone": "", + "role_id": roles[index], + "status": "INVITED", + "user_id": 1 + index, + } + for index in range(0, 2) + ], + ) """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_academy_member__spy_extensions(self): - role = 'hitman' - model = self.bc.database.create(authenticate=True, role=role, capability='read_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - self.client.get(url, headers={'academy': 1}) + role = "hitman" + model = self.bc.database.create(authenticate=True, role=role, capability="read_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + self.client.get(url, headers={"academy": 1}) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension"]), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_academy_member__spy_extension_arguments(self): - role = 'hitman' - model = self.bc.database.create(authenticate=True, role=role, capability='read_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - self.client.get(url, headers={'academy': 1}) + role = "hitman" + model = self.bc.database.create(authenticate=True, role=role, capability="read_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + self.client.get(url, headers={"academy": 1}) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(paginate=True), + ], + ) class MemberPostTestSuite(AuthTestCase): """Authentication test suite""" - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_member__post__no_data(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') + model = self.bc.database.create(authenticate=True, role=role, capability="crud_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") data = {} - response = self.client.post(url, data, headers={'academy': 1}) + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() - expected = {'role': ['This field is required.']} + expected = {"role": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_member__post__no_user__invite_is_false(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - data = {'role': role, 'invite': False} - response = self.client.post(url, data, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + data = {"role": role, "invite": False} + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() - expected = {'detail': 'user-not-found', 'status_code': 400} + expected = {"detail": "user-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_member__post__no_invite(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - data = {'role': role, 'invite': True} - response = self.client.post(url, data, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + data = {"role": role, "invite": True} + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() - expected = {'detail': 'no-email-or-id', 'status_code': 400} + expected = {"detail": "no-email-or-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_member__post__exists_profile_academy_with_this_email__is_none(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - profile_academy = {'email': None} - model = self.bc.database.create(authenticate=True, - role=role, - capability='crud_member', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_member') + profile_academy = {"email": None} + model = self.bc.database.create( + authenticate=True, role=role, capability="crud_member", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_member") data = { - 'role': 'student', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': model.profile_academy.email, + "role": "student", + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": model.profile_academy.email, } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-exists-with-this-email', 'status_code': 400} + expected = {"detail": "already-exists-with-this-email", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [self.bc.format.to_dict(model.profile_academy)]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), [self.bc.format.to_dict(model.profile_academy)] + ) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_member__post__exists_profile_academy_with_this_email__with_email(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - profile_academy = {'email': 'dude@dude.dude'} - model = self.bc.database.create(authenticate=True, - role=role, - capability='crud_member', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_member') + profile_academy = {"email": "dude@dude.dude"} + model = self.bc.database.create( + authenticate=True, role=role, capability="crud_member", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_member") data = { - 'role': 'student', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': model.profile_academy.email, + "role": "student", + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": model.profile_academy.email, } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-exists-with-this-email', 'status_code': 400} + expected = {"detail": "already-exists-with-this-email", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [self.bc.format.to_dict(model.profile_academy)]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), [self.bc.format.to_dict(model.profile_academy)] + ) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_member__post__user_with_not_student_role(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - data = {'role': role, 'user': model['user'].id, 'first_name': 'Kenny', 'last_name': 'McKornick'} - response = self.client.post(url, data, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + data = {"role": role, "user": model["user"].id, "first_name": "Kenny", "last_name": "McKornick"} + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-exists', 'status_code': 400} + expected = {"detail": "already-exists", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_member__post__user_with_student_role(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member') - data = {'role': role, 'user': model['user'].id, 'first_name': 'Kenny', 'last_name': 'McKornick'} - response = self.client.post(url, data, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member") + data = {"role": role, "user": model["user"].id, "first_name": "Kenny", "last_name": "McKornick"} + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'address': None, - 'email': profile_academy.email, - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'role': role, - 'status': 'ACTIVE', - }) + json, + { + "address": None, + "email": profile_academy.email, + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "role": role, + "status": "ACTIVE", + }, + ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': profile_academy.email, - 'first_name': 'Kenny', - 'id': 1, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': role, - 'status': 'ACTIVE', - 'user_id': 1, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": profile_academy.email, + "first_name": "Kenny", + "id": 1, + "last_name": "McKornick", + "phone": "", + "role_id": role, + "status": "ACTIVE", + "user_id": 1, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_member__post__teacher_with_student_role(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_member', profile_academy=True) - model2 = self.bc.database.create(role='teacher', capability='crud_member') - url = reverse_lazy('authenticate:academy_member') - data = {'role': 'teacher', 'user': model['user'].id, 'first_name': 'Kenny', 'last_name': 'McKornick'} - response = self.client.post(url, data, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_member", profile_academy=True) + model2 = self.bc.database.create(role="teacher", capability="crud_member") + url = reverse_lazy("authenticate:academy_member") + data = {"role": "teacher", "user": model["user"].id, "first_name": "Kenny", "last_name": "McKornick"} + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'address': None, - 'email': profile_academy.email, - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'role': 'teacher', - 'status': 'ACTIVE', - }) + json, + { + "address": None, + "email": profile_academy.email, + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "role": "teacher", + "status": "ACTIVE", + }, + ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': profile_academy.email, - 'first_name': 'Kenny', - 'id': 1, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': 'teacher', - 'status': 'ACTIVE', - 'user_id': 1, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": profile_academy.email, + "first_name": "Kenny", + "id": 1, + "last_name": "McKornick", + "phone": "", + "role_id": "teacher", + "status": "ACTIVE", + "user_id": 1, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_member__post__without_user_in_data(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_member', profile_academy=1) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_member", profile_academy=1) - url = reverse_lazy('authenticate:academy_member') + url = reverse_lazy("authenticate:academy_member") data = { - 'role': 'student', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', + "role": "student", + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() expected = { - 'address': None, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'role': 'student', - 'status': 'INVITED', + "address": None, + "email": "dude@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "role": "student", + "status": "INVITED", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), { - 'academy_id': 1, - 'address': None, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'id': 2, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': None, - } - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + { + "academy_id": 1, + "address": None, + "email": "dude@dude.dude", + "first_name": "Kenny", + "id": 2, + "last_name": "McKornick", + "phone": "", + "role_id": "student", + "status": "INVITED", + "user_id": None, + }, + ], + ) - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': 'https://admin.4geeks.com'} + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": "https://admin.4geeks.com"} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - generate_user_invite({ - 'id': 1, - 'academy_id': 1, - 'author_id': 1, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'role_id': 'student', - 'token': TOKEN, - 'syllabus_id': None, - }), - ]) - self.assertEqual(actions.send_email_message.call_args_list, [ - call('welcome_academy', - 'dude@dude.dude', { - 'email': 'dude@dude.dude', - 'subject': 'Welcome to ' + model.academy.name, - 'LINK': url, - 'FIST_NAME': 'Kenny' - }, - academy=model.academy) - ]) - - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr + + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + generate_user_invite( + { + "id": 1, + "academy_id": 1, + "author_id": 1, + "email": "dude@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "role_id": "student", + "token": TOKEN, + "syllabus_id": None, + } + ), + ], + ) + self.assertEqual( + actions.send_email_message.call_args_list, + [ + call( + "welcome_academy", + "dude@dude.dude", + { + "email": "dude@dude.dude", + "subject": "Welcome to " + model.academy.name, + "LINK": url, + "FIST_NAME": "Kenny", + }, + academy=model.academy, + ) + ], + ) + + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_member__post__cohort_not_found(self): """Test /academy/:id/member""" - role = 'god' - - user = {'email': 'dude@dude.dude'} - user_invite = {'email': 'dude2@dude.dude'} - model = self.bc.database.create(authenticate=True, - user=user, - user_invite=user_invite, - role=role, - skip_cohort=True, - capability='crud_member', - profile_academy=1) - - url = reverse_lazy('authenticate:academy_member') + role = "god" + + user = {"email": "dude@dude.dude"} + user_invite = {"email": "dude2@dude.dude"} + model = self.bc.database.create( + authenticate=True, + user=user, + user_invite=user_invite, + role=role, + skip_cohort=True, + capability="crud_member", + profile_academy=1, + ) + + url = reverse_lazy("authenticate:academy_member") data = { - 'role': 'god', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'cohort': [1], - 'invite': True, - 'email': 'dude2@dude.dude', + "role": "god", + "first_name": "Kenny", + "last_name": "McKornick", + "cohort": [1], + "invite": True, + "email": "dude2@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'cohort-not-found', 'status_code': 400} + expected = {"detail": "cohort-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': 'https://admin.4geeks.com'} + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": "https://admin.4geeks.com"} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_member__post__without_user_in_data__invite_already_exists__cohort_none_in_data(self): """Test /academy/:id/member""" - role = 'god' + role = "god" - user = {'email': 'dude@dude.dude'} - user_invite = {'email': 'dude2@dude.dude'} - model = self.bc.database.create(authenticate=True, - user=user, - user_invite=user_invite, - role=role, - capability='crud_member', - profile_academy=1) + user = {"email": "dude@dude.dude"} + user_invite = {"email": "dude2@dude.dude"} + model = self.bc.database.create( + authenticate=True, + user=user, + user_invite=user_invite, + role=role, + capability="crud_member", + profile_academy=1, + ) - url = reverse_lazy('authenticate:academy_member') + url = reverse_lazy("authenticate:academy_member") data = { - 'role': 'god', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude2@dude.dude', + "role": "god", + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude2@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-invited', 'status_code': 400} + expected = {"detail": "already-invited", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': 'https://admin.4geeks.com'} + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": "https://admin.4geeks.com"} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_member__post__without_user_in_data__invite_already_exists__diff_cohort_in_data(self): """Test /academy/:id/member""" - role = 'god' - - user = {'email': 'dude@dude.dude'} - user_invite = {'email': 'dude2@dude.dude'} - model = self.bc.database.create(authenticate=True, - user=user, - user_invite=user_invite, - cohort=2, - role=role, - capability='crud_member', - profile_academy=1) - - url = reverse_lazy('authenticate:academy_member') + role = "god" + + user = {"email": "dude@dude.dude"} + user_invite = {"email": "dude2@dude.dude"} + model = self.bc.database.create( + authenticate=True, + user=user, + user_invite=user_invite, + cohort=2, + role=role, + capability="crud_member", + profile_academy=1, + ) + + url = reverse_lazy("authenticate:academy_member") data = { - 'role': 'god', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'cohort': [2], - 'invite': True, - 'email': 'dude2@dude.dude', + "role": "god", + "first_name": "Kenny", + "last_name": "McKornick", + "cohort": [2], + "invite": True, + "email": "dude2@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() expected = { - 'address': None, - 'email': 'dude2@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'role': role, - 'status': 'INVITED', + "address": None, + "email": "dude2@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "role": role, + "status": "INVITED", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), { - 'academy_id': 1, - 'address': None, - 'email': 'dude2@dude.dude', - 'first_name': 'Kenny', - 'id': 2, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': None, - } - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + { + "academy_id": 1, + "address": None, + "email": "dude2@dude.dude", + "first_name": "Kenny", + "id": 2, + "last_name": "McKornick", + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": None, + }, + ], + ) - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': 'https://admin.4geeks.com'} + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": "https://admin.4geeks.com"} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - generate_user_invite({ - 'id': 1, - 'cohort_id': 1, - 'academy_id': 1, - 'author_id': 1, - 'email': 'dude2@dude.dude', - 'role_id': role, - 'token': model.user_invite.token, - 'syllabus_id': None, - }), - generate_user_invite({ - 'id': 2, - 'cohort_id': 2, - 'academy_id': 1, - 'author_id': 1, - 'email': 'dude2@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'role_id': role, - 'token': TOKEN, - 'syllabus_id': None, - }), - ]) - - self.assertEqual(actions.send_email_message.call_args_list, [ - call('welcome_academy', - 'dude2@dude.dude', { - 'email': 'dude2@dude.dude', - 'subject': 'Welcome to ' + model.academy.name, - 'LINK': url, - 'FIST_NAME': 'Kenny', - }, - academy=model.academy) - ]) - - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr + + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + generate_user_invite( + { + "id": 1, + "cohort_id": 1, + "academy_id": 1, + "author_id": 1, + "email": "dude2@dude.dude", + "role_id": role, + "token": model.user_invite.token, + "syllabus_id": None, + } + ), + generate_user_invite( + { + "id": 2, + "cohort_id": 2, + "academy_id": 1, + "author_id": 1, + "email": "dude2@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "role_id": role, + "token": TOKEN, + "syllabus_id": None, + } + ), + ], + ) + + self.assertEqual( + actions.send_email_message.call_args_list, + [ + call( + "welcome_academy", + "dude2@dude.dude", + { + "email": "dude2@dude.dude", + "subject": "Welcome to " + model.academy.name, + "LINK": url, + "FIST_NAME": "Kenny", + }, + academy=model.academy, + ) + ], + ) + + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_member__post__without_user_in_data__user_already_exists(self): """Test /academy/:id/member""" - role = 'god' + role = "god" - user = {'email': 'dude@dude.dude'} - model = self.bc.database.create(authenticate=True, - user=user, - user_invite=user, - role=role, - capability='crud_member', - profile_academy=1) + user = {"email": "dude@dude.dude"} + model = self.bc.database.create( + authenticate=True, user=user, user_invite=user, role=role, capability="crud_member", profile_academy=1 + ) - url = reverse_lazy('authenticate:academy_member') + url = reverse_lazy("authenticate:academy_member") data = { - 'role': 'god', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': model.user.email, + "role": "god", + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": model.user.email, } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-exists', 'status_code': 400} + expected = {"detail": "already-exists", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': 'https://admin.4geeks.com'} + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": "https://admin.4geeks.com"} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) self.assertEqual(actions.send_email_message.call_args_list, []) @@ -1960,17 +2102,21 @@ class MemberDeleteTestSuite(AuthTestCase): def test_academy_member_delete_without_args_in_url_or_bulk(self): """Test /cohort/:id/user without auth""" - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_member', - role='potato') - url = reverse_lazy('authenticate:academy_member') - response = self.client.delete(url, headers={'academy': 1}) + model = self.bc.database.create( + authenticate=True, profile_academy=True, capability="crud_member", role="potato" + ) + url = reverse_lazy("authenticate:academy_member") + response = self.client.delete(url, headers={"academy": 1}) json = response.json() - expected = {'detail': 'delete-is-forbidden', 'status_code': 403} + expected = {"detail": "delete-is-forbidden", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - **self.model_to_dict(model, 'profile_academy'), - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + **self.model_to_dict(model, "profile_academy"), + } + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_academy_member_id.py b/breathecode/authenticate/tests/urls/tests_academy_member_id.py index cfc389310..2d803a01a 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_member_id.py +++ b/breathecode/authenticate/tests/urls/tests_academy_member_id.py @@ -14,9 +14,9 @@ from ..mixins.new_auth_test_case import AuthTestCase -@capable_of('read_member') +@capable_of("read_member") def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) @@ -26,23 +26,22 @@ class MemberSetOfDuckTestSuite(AuthTestCase): 🔽🔽🔽 GET check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.get', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.get", MagicMock(side_effect=view_method_mock)) def test_academy_member__get__with_auth___mock_view(self): - profile_academies = [{'academy_id': id} for id in range(1, 4)] - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=profile_academies) + profile_academies = [{"academy_id": id} for id in range(1, 4)] + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=profile_academies + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=n) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': f'{n}'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": f"{n}"}) response = self.client.get(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': str(n), 'user_id_or_email': f'{n}'}} + expected = {"args": [], "kwargs": {"academy_id": str(n), "user_id_or_email": f"{n}"}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -51,23 +50,22 @@ def test_academy_member__get__with_auth___mock_view(self): 🔽🔽🔽 PUT check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.put', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.put", MagicMock(side_effect=view_method_mock)) def test_academy_member__put__with_auth___mock_view(self): - profile_academies = [{'academy_id': id} for id in range(1, 4)] - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=profile_academies) + profile_academies = [{"academy_id": id} for id in range(1, 4)] + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=profile_academies + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=n) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': f'{n}'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": f"{n}"}) response = self.client.put(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': str(n), 'user_id_or_email': f'{n}'}} + expected = {"args": [], "kwargs": {"academy_id": str(n), "user_id_or_email": f"{n}"}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -76,23 +74,22 @@ def test_academy_member__put__with_auth___mock_view(self): 🔽🔽🔽 DELETE check the param is being passed """ - @patch('breathecode.authenticate.views.MemberView.delete', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MemberView.delete", MagicMock(side_effect=view_method_mock)) def test_academy_member__delete__with_auth___mock_view(self): - profile_academies = [{'academy_id': id} for id in range(1, 4)] - model = self.bc.database.create(academy=3, - capability='read_member', - role='role', - profile_academy=profile_academies) + profile_academies = [{"academy_id": id} for id in range(1, 4)] + model = self.bc.database.create( + academy=3, capability="read_member", role="role", profile_academy=profile_academies + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=n) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': f'{n}'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": f"{n}"}) response = self.client.delete(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': str(n), 'user_id_or_email': f'{n}'}} + expected = {"args": [], "kwargs": {"academy_id": str(n), "user_id_or_email": f"{n}"}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -100,663 +97,698 @@ def test_academy_member__delete__with_auth___mock_view(self): class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id_without_auth(self): """Test /academy/:id/member/:id without auth""" self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id_without_capability(self): """Test /academy/:id/member/:id""" self.bc.request.set_headers(academy=1) self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_member " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: read_member " "for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id_without_academy(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - self.generate_models(authenticate=True, role=role, capability='read_member') - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + self.generate_models(authenticate=True, role=role, capability="read_member") + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_member " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: read_member " "for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 GET without data, passing id """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__passing_id__not_found(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='read_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '2'}) + model = self.generate_models(authenticate=True, role=role, capability="read_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "2"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'profile-academy-not-found', 'status_code': 404} + expected = {"detail": "profile-academy-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) """ 🔽🔽🔽 GET with data, passing id """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__passing_id(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='read_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models(authenticate=True, role=role, capability="read_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - del json['invite_url'] # removing this because i will not hardcode it on the test + del json["invite_url"] # removing this because i will not hardcode it on the test profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + json, + { + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - 'address': None, - 'created_at': datetime_to_iso_format(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, + "address": None, + "created_at": datetime_to_iso_format(profile_academy.created_at), + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'github': None, - 'profile': None, + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "github": None, + "profile": None, }, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 GET without data, passing email """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__passing_email__not_found(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='read_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': 'dude@dude.dude'}) + model = self.generate_models(authenticate=True, role=role, capability="read_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "dude@dude.dude"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'profile-academy-not-found', 'status_code': 404} + expected = {"detail": "profile-academy-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 GET with data, passing email """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__passing_id(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - email = 'dude@dude.dude' - user = {'email': email} - model = self.generate_models(authenticate=True, - user=user, - role=role, - capability='read_member', - profile_academy=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': email}) + email = "dude@dude.dude" + user = {"email": email} + model = self.generate_models( + authenticate=True, user=user, role=role, capability="read_member", profile_academy=True + ) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": email}) response = self.client.get(url) json = response.json() - del json['invite_url'] # removing this because i will not hardcode it on the test + del json["invite_url"] # removing this because i will not hardcode it on the test profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + json, + { + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - 'address': None, - 'created_at': datetime_to_iso_format(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, + "address": None, + "created_at": datetime_to_iso_format(profile_academy.created_at), + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'github': None, - 'profile': None, + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "github": None, + "profile": None, }, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 GET with profile and github """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__with_profile__with_github(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='read_member', - profile_academy=True, - credentials_github=True, - profile=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models( + authenticate=True, + role=role, + capability="read_member", + profile_academy=True, + credentials_github=True, + profile=True, + ) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - del json['invite_url'] # removing this because i will not hardcode it on the test + del json["invite_url"] # removing this because i will not hardcode it on the test profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + json, + { + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - 'address': None, - 'created_at': datetime_to_iso_format(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, + "address": None, + "created_at": datetime_to_iso_format(profile_academy.created_at), + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'github': { - 'avatar_url': model['user'].credentialsgithub.avatar_url, - 'name': model['user'].credentialsgithub.name, - 'username': model['user'].credentialsgithub.username, - }, - 'profile': { - 'avatar_url': model['user'].profile.avatar_url + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "github": { + "avatar_url": model["user"].credentialsgithub.avatar_url, + "name": model["user"].credentialsgithub.name, + "username": model["user"].credentialsgithub.username, }, + "profile": {"avatar_url": model["user"].profile.avatar_url}, }, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 GET with github """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id_with_github(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='read_member', - profile_academy=True, - credentials_github=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models( + authenticate=True, role=role, capability="read_member", profile_academy=True, credentials_github=True + ) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - del json['invite_url'] # removing this because i will not hardcode it on the test + del json["invite_url"] # removing this because i will not hardcode it on the test profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + json, + { + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - 'address': None, - 'created_at': datetime_to_iso_format(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, + "address": None, + "created_at": datetime_to_iso_format(profile_academy.created_at), + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'github': { - 'avatar_url': None, - 'name': None, - 'username': None - }, - 'profile': None, + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "github": {"avatar_url": None, "name": None, "username": None}, + "profile": None, }, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 PUT capability """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__without_capability(self): """Test /academy/:id/member/:id""" self.bc.request.set_headers(academy=1) self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_member " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: crud_member " "for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 POST without required fields """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__post__without__first_name(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) model = self.generate_models( authenticate=True, role=role, - capability='crud_member', + capability="crud_member", profile_academy=profile_academy, ) - url = reverse_lazy('authenticate:academy_member') + url = reverse_lazy("authenticate:academy_member") - data = {'role': role, 'invite': True, 'last_name': self.bc.fake.last_name(), 'email': self.bc.fake.email()} + data = {"role": role, "invite": True, "last_name": self.bc.fake.last_name(), "email": self.bc.fake.email()} response = self.client.post(url, data) json = response.json() - expected = {'detail': 'first-name-not-found', 'status_code': 400} + expected = {"detail": "first-name-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__post__without__last_name(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) model = self.generate_models( authenticate=True, role=role, - capability='crud_member', + capability="crud_member", profile_academy=profile_academy, ) - url = reverse_lazy('authenticate:academy_member') + url = reverse_lazy("authenticate:academy_member") - data = {'role': role, 'invite': True, 'first_name': self.bc.fake.first_name(), 'email': self.bc.fake.email()} + data = {"role": role, "invite": True, "first_name": self.bc.fake.first_name(), "email": self.bc.fake.email()} response = self.client.post(url, data) json = response.json() - expected = {'detail': 'last-name-not-found', 'status_code': 400} + expected = {"detail": "last-name-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__post__without__email(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) model = self.generate_models( authenticate=True, role=role, - capability='crud_member', + capability="crud_member", profile_academy=profile_academy, ) - url = reverse_lazy('authenticate:academy_member') + url = reverse_lazy("authenticate:academy_member") data = { - 'role': role, - 'invite': True, - 'last_name': self.bc.fake.last_name(), - 'first_name': self.bc.fake.first_name() + "role": role, + "invite": True, + "last_name": self.bc.fake.last_name(), + "first_name": self.bc.fake.first_name(), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'no-email-or-id', 'status_code': 400} + expected = {"detail": "no-email-or-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT without required fields """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__without_any_required_fields(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) model = self.generate_models( authenticate=True, role=role, - capability='crud_member', + capability="crud_member", profile_academy=1, ) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': 'dude@dude.dude'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "dude@dude.dude"}) response = self.client.put(url) json = response.json() - expected = {'detail': 'email-not-found', 'status_code': 400} + expected = {"detail": "email-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) def test_academy_member_id_put_without__first_name(self): profile_academy = { - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), - 'phone': self.bc.fake.phone_number(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), + "phone": self.bc.fake.phone_number(), } - user = {'first_name': '', 'last_name': self.bc.fake.last_name(), 'email': self.bc.fake.email()} + user = {"first_name": "", "last_name": self.bc.fake.last_name(), "email": self.bc.fake.email()} self.bc.request.set_headers(academy=1) - model = self.bc.database.create(user=user, - authenticate=True, - capability='crud_member', - role='role', - profile_academy=profile_academy) + model = self.bc.database.create( + user=user, authenticate=True, capability="crud_member", role="role", profile_academy=profile_academy + ) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': model.user.id}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": model.user.id}) data = { - 'role': 'role', - 'invite': True, + "role": "role", + "invite": True, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'first-name-not-founded', 'status_code': 400} + expected = {"detail": "first-name-not-founded", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_academy_member_id_put_without__last_name(self): profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'email': self.bc.fake.email(), - 'phone': self.bc.fake.phone_number(), + "first_name": self.bc.fake.first_name(), + "email": self.bc.fake.email(), + "phone": self.bc.fake.phone_number(), } - user = {'first_name': self.bc.fake.first_name(), 'last_name': '', 'email': self.bc.fake.email()} + user = {"first_name": self.bc.fake.first_name(), "last_name": "", "email": self.bc.fake.email()} self.bc.request.set_headers(academy=1) - model = self.bc.database.create(user=user, - authenticate=True, - capability='crud_member', - role='role', - profile_academy=profile_academy) + model = self.bc.database.create( + user=user, authenticate=True, capability="crud_member", role="role", profile_academy=profile_academy + ) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': model.user.id}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": model.user.id}) data = { - 'role': 'role', - 'invite': True, + "role": "role", + "invite": True, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'last-name-not-founded', 'status_code': 400} + expected = {"detail": "last-name-not-founded", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_academy_member_id_put_without__email(self): profile_academy = { - 'last_name': self.bc.fake.last_name(), - 'first_name': self.bc.fake.first_name(), - 'phone': self.bc.fake.phone_number(), + "last_name": self.bc.fake.last_name(), + "first_name": self.bc.fake.first_name(), + "phone": self.bc.fake.phone_number(), } for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, - capability='crud_member', - role='role', - profile_academy=profile_academy) + model = self.bc.database.create( + authenticate=True, capability="crud_member", role="role", profile_academy=profile_academy + ) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': n}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": n}) response = self.client.put(url) json = response.json() - expected = {'detail': 'email-not-found', 'status_code': 400} + expected = {"detail": "email-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__without_email(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'phone': self.bc.fake.phone_number(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "phone": self.bc.fake.phone_number(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.bc.database.create(authenticate=True, - role=role, - capability='crud_member', - profile_academy=profile_academy) + model = self.bc.database.create( + authenticate=True, role=role, capability="crud_member", profile_academy=profile_academy + ) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '2'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "2"}) - data = {'role': role, 'last_name': self.bc.fake.last_name(), 'first_name': self.bc.fake.first_name()} - response = self.client.put(url, data, format='json') + data = {"role": role, "last_name": self.bc.fake.last_name(), "first_name": self.bc.fake.first_name()} + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'user-not-found', 'status_code': 400} + expected = {"detail": "user-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_academy_member_id_put_without__phone(self): profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), - 'phone': '', + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), + "phone": "", } - user = {'first_name': self.bc.fake.first_name(), 'phone': '', 'email': self.bc.fake.email()} + user = {"first_name": self.bc.fake.first_name(), "phone": "", "email": self.bc.fake.email()} self.bc.request.set_headers(academy=1) - model = self.bc.database.create(user=user, - authenticate=True, - capability='crud_member', - role='role', - profile_academy=profile_academy) + model = self.bc.database.create( + user=user, authenticate=True, capability="crud_member", role="role", profile_academy=profile_academy + ) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': model.user.id}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": model.user.id}) data = { - 'role': 'role', - 'invite': True, - 'phone': None, + "role": "role", + "invite": True, + "phone": None, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'phone': ['This field may not be null.']} + expected = {"phone": ["This field may not be null."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -765,276 +797,294 @@ def test_academy_member_id_put_without__phone(self): 🔽🔽🔽 PUT role does not exists """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__role_does_not_exists(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'phone': self.bc.fake.phone_number(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "phone": self.bc.fake.phone_number(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='crud_member', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '2'}) + model = self.generate_models( + authenticate=True, role=role, capability="crud_member", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "2"}) - data = {'role': 'mirai-nikki'} - response = self.client.put(url, data, format='json') + data = {"role": "mirai-nikki"} + response = self.client.put(url, data, format="json") json = response.json() - expected = {'role': ['Invalid pk "mirai-nikki" - object does not exist.']} + expected = {"role": ['Invalid pk "mirai-nikki" - object does not exist.']} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT user not exists, it's use the post serializer """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__user_does_not_exists(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'phone': self.bc.fake.phone_number(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "phone": self.bc.fake.phone_number(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='crud_member', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '2'}) + model = self.generate_models( + authenticate=True, role=role, capability="crud_member", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "2"}) - data = {'role': role} - response = self.client.put(url, data, format='json') + data = {"role": role} + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'first-name-not-found', 'status_code': 400} + expected = {"detail": "first-name-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT User exists but without a ProfileAcademy, it's use the post serializer """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__user_exists_but_without_profile_academy(self): """Test /academy/:id/member/:id""" phone = self.bc.fake.phone_number() profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'phone': phone, - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "phone": phone, + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(role=role, user=2, capability='crud_member', profile_academy=profile_academy) + model = self.generate_models(role=role, user=2, capability="crud_member", profile_academy=profile_academy) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '2'}) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "2"}) - data = {'role': role} - response = self.client.put(url, data, format='json') + data = {"role": role} + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'address': None, - 'email': model.user[1].email, - 'first_name': model.user[1].first_name, - 'last_name': model.user[1].last_name, - 'phone': '', - 'role': role, - 'status': 'ACTIVE', + "address": None, + "email": model.user[1].email, + "first_name": model.user[1].first_name, + "last_name": model.user[1].last_name, + "phone": "", + "role": role, + "status": "ACTIVE", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - { - **self.bc.format.to_dict(model.profile_academy), - 'id': 2, - 'email': model.user[1].email, - 'first_name': model.user[1].first_name, - 'last_name': model.user[1].last_name, - 'phone': '', - 'role_id': role, - 'status': 'ACTIVE', - 'user_id': 2, - }, - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + { + **self.bc.format.to_dict(model.profile_academy), + "id": 2, + "email": model.user[1].email, + "first_name": model.user[1].first_name, + "last_name": model.user[1].last_name, + "phone": "", + "role_id": role, + "status": "ACTIVE", + "user_id": 2, + }, + ], + ) """ 🔽🔽🔽 PUT with data """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__with_data(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'phone': self.bc.fake.phone_number(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "phone": self.bc.fake.phone_number(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='crud_member', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models( + authenticate=True, role=role, capability="crud_member", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) - data = {'role': role} - response = self.client.put(url, data, format='json') + data = {"role": role} + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'academy': model.academy.id, - 'address': model.profile_academy.address, - 'first_name': model.profile_academy.first_name, - 'last_name': model.profile_academy.last_name, - 'phone': model.profile_academy.phone, - 'role': role, - 'user': model.user.id, + "academy": model.academy.id, + "address": model.profile_academy.address, + "first_name": model.profile_academy.first_name, + "last_name": model.profile_academy.last_name, + "phone": model.profile_academy.phone, + "role": role, + "user": model.user.id, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__with_null_names(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'phone': self.bc.fake.phone_number(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "phone": self.bc.fake.phone_number(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - user={ - 'first_name': '', - 'last_name': '' - }, - role=role, - capability='crud_member', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) - - data = {'role': role, 'first_name': None, 'last_name': None} - response = self.client.put(url, data, format='json') + model = self.generate_models( + authenticate=True, + user={"first_name": "", "last_name": ""}, + role=role, + capability="crud_member", + profile_academy=profile_academy, + ) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) + + data = {"role": role, "first_name": None, "last_name": None} + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'academy': model.academy.id, - 'address': model.profile_academy.address, - 'first_name': model.profile_academy.first_name, - 'last_name': model.profile_academy.last_name, - 'phone': model.profile_academy.phone, - 'role': role, - 'user': model.user.id, + "academy": model.academy.id, + "address": model.profile_academy.address, + "first_name": model.profile_academy.first_name, + "last_name": model.profile_academy.last_name, + "phone": model.profile_academy.phone, + "role": role, + "user": model.user.id, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT with data, changing values """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__put__with_data__changing_values(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'phone': self.bc.fake.phone_number(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "phone": self.bc.fake.phone_number(), + "email": self.bc.fake.email(), } - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='crud_member', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models( + authenticate=True, role=role, capability="crud_member", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "1"}) - data = {'role': role, 'first_name': 'Lord', 'last_name': 'Valdomero'} - response = self.client.put(url, data, format='json') + data = {"role": role, "first_name": "Lord", "last_name": "Valdomero"} + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'academy': model.academy.id, - 'address': model.profile_academy.address, - 'first_name': 'Lord', - 'last_name': 'Valdomero', - 'phone': model.profile_academy.phone, - 'role': role, - 'user': model.user.id, + "academy": model.academy.id, + "address": model.profile_academy.address, + "first_name": "Lord", + "last_name": "Valdomero", + "phone": model.profile_academy.phone, + "role": role, + "user": model.user.id, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - { - **self.bc.format.to_dict(model.profile_academy), - 'first_name': 'Lord', - 'last_name': 'Valdomero', - }, - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + **self.bc.format.to_dict(model.profile_academy), + "first_name": "Lord", + "last_name": "Valdomero", + }, + ], + ) """ 🔽🔽🔽 DELETE with data, passing email """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_member_id__delete__passing_email(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='crud_member', profile_academy=True) - url = reverse_lazy('authenticate:academy_member_id', kwargs={'user_id_or_email': 'dude@dude.dude'}) + model = self.generate_models(authenticate=True, role=role, capability="crud_member", profile_academy=True) + url = reverse_lazy("authenticate:academy_member_id", kwargs={"user_id_or_email": "dude@dude.dude"}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'delete-is-forbidden', 'status_code': 403} + expected = {"detail": "delete-is-forbidden", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) diff --git a/breathecode/authenticate/tests/urls/tests_academy_member_id_invite.py b/breathecode/authenticate/tests/urls/tests_academy_member_id_invite.py index 7bcde65f1..bb4ca4574 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_member_id_invite.py +++ b/breathecode/authenticate/tests/urls/tests_academy_member_id_invite.py @@ -14,63 +14,63 @@ def generate_user_invite(self, model, user_invite, arguments={}): return { - 'academy': { - 'id': model.academy.id, - 'name': model.academy.name, - 'slug': model.academy.slug, - 'logo_url': model.academy.logo_url, + "academy": { + "id": model.academy.id, + "name": model.academy.name, + "slug": model.academy.slug, + "logo_url": model.academy.logo_url, }, - 'cohort': { - 'name': model.cohort.name, - 'slug': model.cohort.slug, + "cohort": { + "name": model.cohort.name, + "slug": model.cohort.slug, }, - 'created_at': self.bc.datetime.to_iso_string(user_invite.created_at), - 'email': user_invite.email, - 'first_name': user_invite.first_name, - 'id': user_invite.id, - 'invite_url': f'http://localhost:8000/v1/auth/member/invite/{user_invite.token}', - 'last_name': user_invite.last_name, - 'role': { - 'id': model.role.slug, - 'name': model.role.name, - 'slug': model.role.slug, + "created_at": self.bc.datetime.to_iso_string(user_invite.created_at), + "email": user_invite.email, + "first_name": user_invite.first_name, + "id": user_invite.id, + "invite_url": f"http://localhost:8000/v1/auth/member/invite/{user_invite.token}", + "last_name": user_invite.last_name, + "role": { + "id": model.role.slug, + "name": model.role.name, + "slug": model.role.slug, }, - 'sent_at': user_invite.sent_at, - 'status': user_invite.status, - 'token': user_invite.token, + "sent_at": user_invite.sent_at, + "status": user_invite.status, + "token": user_invite.token, **arguments, } def generate_profile_academy(self, model, profile_academy, arguments={}): return { - 'academy': { - 'id': model.academy.id, - 'name': model.academy.name, - 'slug': model.academy.slug, + "academy": { + "id": model.academy.id, + "name": model.academy.name, + "slug": model.academy.slug, }, - 'address': profile_academy.address, - 'created_at': self.bc.datetime.to_iso_string(profile_academy.created_at), - 'email': profile_academy.email, - 'phone': profile_academy.phone, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'invite_url': 'http://localhost:8000/v1/auth/academy/html/invite', - 'last_name': profile_academy.last_name, - 'role': { - 'id': model.role.slug, - 'name': model.role.name, - 'slug': model.role.slug, + "address": profile_academy.address, + "created_at": self.bc.datetime.to_iso_string(profile_academy.created_at), + "email": profile_academy.email, + "phone": profile_academy.phone, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "invite_url": "http://localhost:8000/v1/auth/academy/html/invite", + "last_name": profile_academy.last_name, + "role": { + "id": model.role.slug, + "name": model.role.name, + "slug": model.role.slug, }, - 'user': { - 'email': model.user.email, - 'first_name': model.user.first_name, - 'github': None, - 'id': model.user.id, - 'last_name': model.user.last_name, - 'profile': None, + "user": { + "email": model.user.email, + "first_name": model.user.first_name, + "github": None, + "id": model.user.id, + "last_name": model.user.last_name, + "profile": None, }, - 'status': profile_academy.status, + "status": profile_academy.status, **arguments, } @@ -79,76 +79,79 @@ def generate_send_email_message(self, model): email = None academy = None - if 'profile_academy' in model: + if "profile_academy" in model: email = model.profile_academy.user.email - elif 'user_invite' in model: + elif "user_invite" in model: email = model.user_invite.email - if 'academy' in model: + if "academy" in model: academy = model.academy return [ - call('academy_invite', - email, { - 'subject': - f'Invitation to study at {model.academy.name}', - 'invites': [{ - 'id': model.profile_academy.id, - 'academy': { - 'id': model.academy.id, - 'name': model.academy.name, - 'slug': model.academy.slug, - 'timezone': model.academy.timezone, - }, - 'role': model.role.slug, - 'created_at': model.profile_academy.created_at, - }], - 'user': { - 'id': model.user.id, - 'email': model.user.email, - 'first_name': model.user.first_name, - 'last_name': model.user.last_name, - 'github': None, - 'profile': None - }, - 'LINK': - 'http://localhost:8000/v1/auth/academy/html/invite', - }, - academy=academy), + call( + "academy_invite", + email, + { + "subject": f"Invitation to study at {model.academy.name}", + "invites": [ + { + "id": model.profile_academy.id, + "academy": { + "id": model.academy.id, + "name": model.academy.name, + "slug": model.academy.slug, + "timezone": model.academy.timezone, + }, + "role": model.role.slug, + "created_at": model.profile_academy.created_at, + } + ], + "user": { + "id": model.user.id, + "email": model.user.email, + "first_name": model.user.first_name, + "last_name": model.user.last_name, + "github": None, + "profile": None, + }, + "LINK": "http://localhost:8000/v1/auth/academy/html/invite", + }, + academy=academy, + ), ] class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test_resend_invite__no_auth(self): - """Test """ + """Test""" self.headers(academy=1) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) def test_resend_invite__no_capability(self): - """Test """ + """Test""" self.headers(academy=1) model = self.generate_models(authenticate=True, profile_academy=True, syllabus=True) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: invite_resend for " - 'academy 1', - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: invite_resend for " "academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) @@ -158,17 +161,17 @@ def test_resend_invite__no_capability(self): """ def test_resend_invite__get__with_capability(self): - """Test """ + """Test""" self.headers(academy=1) model = self.generate_models(authenticate=True, syllabus=True) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_invite for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_invite for academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -178,19 +181,17 @@ def test_resend_invite__get__with_capability(self): """ def test_resend_invite__get__profile_academy_and_user_invite_not_found(self): - """Test """ + """Test""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_invite', - role='potato', - syllabus=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_invite", role="potato", syllabus=True + ) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 2}) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 2}) response = self.client.get(url) json = response.json() - expected = {'detail': 'profile-academy-not-found', 'status_code': 404} + expected = {"detail": "profile-academy-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) @@ -199,20 +200,18 @@ def test_resend_invite__get__profile_academy_and_user_invite_not_found(self): """ def test_resend_invite__get__profile_academy_with_status_active(self): - """Test """ + """Test""" self.headers(academy=1) - profile_academy = {'status': 'ACTIVE'} - model = self.generate_models(authenticate=True, - profile_academy=profile_academy, - capability='read_invite', - role=1, - syllabus=1) + profile_academy = {"status": "ACTIVE"} + model = self.generate_models( + authenticate=True, profile_academy=profile_academy, capability="read_invite", role=1, syllabus=1 + ) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'user-invite-and-profile-academy-with-status-invited-not-found', 'status_code': 404} + expected = {"detail": "user-invite-and-profile-academy-with-status-invited-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) @@ -221,15 +220,13 @@ def test_resend_invite__get__profile_academy_with_status_active(self): """ def test_resend_invite__put__profile_academy_with_status_invited(self): - """Test """ + """Test""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=1, - capability='read_invite', - role='potato', - syllabus=1) + model = self.generate_models( + authenticate=True, profile_academy=1, capability="read_invite", role="potato", syllabus=1 + ) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.get(url) json = response.json() @@ -244,16 +241,13 @@ def test_resend_invite__put__profile_academy_with_status_invited(self): """ def test_resend_invite__put__profile_academy_with_status_invited__(self): - """Test """ + """Test""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=1, - capability='read_invite', - role='potato', - user_invite=1, - syllabus=1) - - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=1, capability="read_invite", role="potato", user_invite=1, syllabus=1 + ) + + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.get(url) json = response.json() @@ -267,17 +261,17 @@ def test_resend_invite__put__profile_academy_with_status_invited__(self): """ def test_resend_invite__put__with_capability(self): - """Test """ + """Test""" self.headers(academy=1) model = self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1359}) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1359}) response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: invite_resend for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: invite_resend for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -287,17 +281,17 @@ def test_resend_invite__put__with_capability(self): 🔽🔽🔽 PUT ProfileAcademy not found """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_resend_invite__put__profile_academy_not_found(self): - """Test """ + """Test""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=1, role=1, capability='invite_resend') - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 2}) + model = self.generate_models(authenticate=True, profile_academy=1, role=1, capability="invite_resend") + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 2}) response = self.client.put(url) json = response.json() - expected = {'detail': 'profile-academy-not-found', 'status_code': 400} + expected = {"detail": "profile-academy-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -307,13 +301,13 @@ def test_resend_invite__put__profile_academy_not_found(self): 🔽🔽🔽 PUT with ProfileAcademy """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_resend_invite__put__with_profile_academy(self): - """Test """ + """Test""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=1, role=1, capability='invite_resend') - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + model = self.generate_models(authenticate=True, profile_academy=1, role=1, capability="invite_resend") + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.put(url) json = response.json() @@ -324,107 +318,119 @@ def test_resend_invite__put__with_profile_academy(self): self.assertEqual(actions.send_email_message.call_args_list, generate_send_email_message(self, model)) @patch( - 'requests.post', - apply_requests_post_mock([(201, f"https://api.mailgun.net/v3/{os.environ.get('MAILGUN_DOMAIN')}/messages", {}) - ])) + "requests.post", + apply_requests_post_mock( + [(201, f"https://api.mailgun.net/v3/{os.environ.get('MAILGUN_DOMAIN')}/messages", {})] + ), + ) def test_resend_invite_with_invitation(self): - """Test """ + """Test""" self.headers(academy=1) - profile_academy_kwargs = {'email': 'email@dotdotdotdot.dot'} - user_invite_kwargs = {'email': 'email@dotdotdotdot.dot'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='invite_resend', - role='potato', - syllabus=True, - user_invite=True, - profile_academy_kwargs=profile_academy_kwargs, - user_invite_kwargs=user_invite_kwargs) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + profile_academy_kwargs = {"email": "email@dotdotdotdot.dot"} + user_invite_kwargs = {"email": "email@dotdotdotdot.dot"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="invite_resend", + role="potato", + syllabus=True, + user_invite=True, + profile_academy_kwargs=profile_academy_kwargs, + user_invite_kwargs=user_invite_kwargs, + ) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.put(url) json = response.json() - created = json['created_at'] - sent = json['sent_at'] - del json['sent_at'] - del json['created_at'] + created = json["created_at"] + sent = json["sent_at"] + del json["sent_at"] + del json["created_at"] expected = { - 'id': 1, - 'status': 'PENDING', - 'email': 'email@dotdotdotdot.dot', - 'first_name': None, - 'last_name': None, - 'token': model.user_invite.token, - 'invite_url': f'http://localhost:8000/v1/auth/member/invite/{model.user_invite.token}', - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - 'logo_url': model['academy'].logo_url, - }, - 'role': { - 'id': 'potato', - 'name': 'potato', - 'slug': 'potato' + "id": 1, + "status": "PENDING", + "email": "email@dotdotdotdot.dot", + "first_name": None, + "last_name": None, + "token": model.user_invite.token, + "invite_url": f"http://localhost:8000/v1/auth/member/invite/{model.user_invite.token}", + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + "logo_url": model["academy"].logo_url, }, - 'cohort': { - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, + "role": {"id": "potato", "name": "potato", "slug": "potato"}, + "cohort": { + "slug": model["cohort"].slug, + "name": model["cohort"].name, }, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - all_user_invite = [x for x in self.all_user_invite_dict() if x.pop('sent_at')] - self.assertEqual(all_user_invite, [{ - 'user_id': None, - 'id': model['user_invite'].id, - 'email': model['user_invite'].email, - 'academy_id': model['user_invite'].academy_id, - 'cohort_id': model['user_invite'].cohort_id, - 'role_id': model['user_invite'].role_id, - 'first_name': model['user_invite'].first_name, - 'last_name': model['user_invite'].last_name, - 'is_email_validated': model['user_invite'].is_email_validated, - 'conversion_info': None, - 'has_marketing_consent': False, - 'event_slug': None, - 'asset_slug': None, - 'token': model['user_invite'].token, - 'author_id': model['user_invite'].author_id, - 'status': model['user_invite'].status, - 'phone': model['user_invite'].phone, - 'process_message': model['user_invite'].process_message, - 'process_status': model['user_invite'].process_status, - 'syllabus_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'email_quality': None, - 'email_status': None, - }]) + all_user_invite = [x for x in self.all_user_invite_dict() if x.pop("sent_at")] + self.assertEqual( + all_user_invite, + [ + { + "user_id": None, + "id": model["user_invite"].id, + "email": model["user_invite"].email, + "academy_id": model["user_invite"].academy_id, + "cohort_id": model["user_invite"].cohort_id, + "role_id": model["user_invite"].role_id, + "first_name": model["user_invite"].first_name, + "last_name": model["user_invite"].last_name, + "is_email_validated": model["user_invite"].is_email_validated, + "conversion_info": None, + "has_marketing_consent": False, + "event_slug": None, + "asset_slug": None, + "token": model["user_invite"].token, + "author_id": model["user_invite"].author_id, + "status": model["user_invite"].status, + "phone": model["user_invite"].phone, + "process_message": model["user_invite"].process_message, + "process_status": model["user_invite"].process_status, + "syllabus_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "email_quality": None, + "email_status": None, + } + ], + ) def test_resend_invite_recently(self): - """Test """ + """Test""" self.headers(academy=1) past_time = timezone.now() - timedelta(seconds=100) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='invite_resend', - role='potato', - syllabus=True, - user_invite=True, - token=True, - user_invite_kwargs={'sent_at': past_time}) - url = reverse_lazy('authenticate:academy_member_id_invite', kwargs={'profileacademy_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="invite_resend", + role="potato", + syllabus=True, + user_invite=True, + token=True, + user_invite_kwargs={"sent_at": past_time}, + ) + url = reverse_lazy("authenticate:academy_member_id_invite", kwargs={"profileacademy_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'sent-at-diff-less-two-minutes', 'status_code': 400} + expected = {"detail": "sent-at-diff-less-two-minutes", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.all_user_invite_dict(), [{ - **self.model_to_dict(model, 'user_invite'), - 'sent_at': past_time, - }]) + self.assertEqual( + self.all_user_invite_dict(), + [ + { + **self.model_to_dict(model, "user_invite"), + "sent_at": past_time, + } + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_academy_student.py b/breathecode/authenticate/tests/urls/tests_academy_student.py index b7c4036ce..a5c11da1d 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_student.py +++ b/breathecode/authenticate/tests/urls/tests_academy_student.py @@ -1,6 +1,7 @@ """ Test cases for /academy/student """ + import os import urllib.parse from random import choice @@ -17,46 +18,46 @@ from ..mixins.new_auth_test_case import AuthTestCase PROFILE_ACADEMY_STATUS = [ - 'INVITED', - 'ACTIVE', + "INVITED", + "ACTIVE", ] def generate_user_invite(data: dict) -> dict: return { - 'academy_id': None, - 'author_id': None, - 'cohort_id': None, - 'email': None, - 'is_email_validated': False, - 'conversion_info': None, - 'has_marketing_consent': False, - 'event_slug': None, - 'asset_slug': None, - 'first_name': None, - 'id': 0, - 'last_name': None, - 'phone': '', - 'role_id': None, - 'sent_at': None, - 'status': 'PENDING', - 'token': '', - 'process_message': '', - 'process_status': 'PENDING', - 'user_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'email_quality': None, - 'email_status': None, + "academy_id": None, + "author_id": None, + "cohort_id": None, + "email": None, + "is_email_validated": False, + "conversion_info": None, + "has_marketing_consent": False, + "event_slug": None, + "asset_slug": None, + "first_name": None, + "id": 0, + "last_name": None, + "phone": "", + "role_id": None, + "sent_at": None, + "status": "PENDING", + "token": "", + "process_message": "", + "process_status": "PENDING", + "user_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "email_quality": None, + "email_status": None, **data, } UTC_NOW = timezone.now() -A_YEAR_AGO = dateparse.parse_date('2014-01-01') +A_YEAR_AGO = dateparse.parse_date("2014-01-01") def getrandbits(n): @@ -68,31 +69,27 @@ def getrandbits(n): def format_profile_academy(self, profile_academy, role, academy): return { - 'academy': { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug + "academy": {"id": academy.id, "name": academy.name, "slug": academy.slug}, + "address": profile_academy.address, + "created_at": self.datetime_to_iso(profile_academy.created_at), + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + "role": { + "id": role.slug, + "name": role.name, + "slug": role.slug, }, - 'address': profile_academy.address, - 'created_at': self.datetime_to_iso(profile_academy.created_at), - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - 'role': { - 'id': role.slug, - 'name': role.name, - 'slug': role.slug, + "status": profile_academy.status, + "user": { + "email": profile_academy.user.email, + "first_name": profile_academy.user.first_name, + "profile": None, + "id": profile_academy.user.id, + "last_name": profile_academy.user.last_name, }, - 'status': profile_academy.status, - 'user': { - 'email': profile_academy.user.email, - 'first_name': profile_academy.user.first_name, - 'profile': None, - 'id': profile_academy.user.id, - 'last_name': profile_academy.user.last_name - } } @@ -101,119 +98,128 @@ class StudentGetTestSuite(AuthTestCase): def test_academy_student_without_auth(self): """Test /academy/student without auth""" - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_student_without_capability(self): """Test /academy/student""" self.headers(academy=1) self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_student " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: read_student " "for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_academy_student_without_academy(self): """Test /academy/student""" self.headers(academy=1) - role = 'konan' - self.bc.database.create(authenticate=True, role=role, capability='read_student') - url = reverse_lazy('authenticate:academy_student') + role = "konan" + self.bc.database.create(authenticate=True, role=role, capability="read_student") + url = reverse_lazy("authenticate:academy_student") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_student " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: read_student " "for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_academy_student_without_student(self): """Test /academy/student""" self.headers(academy=1) - role = 'konan' - model = self.bc.database.create(authenticate=True, role=role, capability='read_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student') + role = "konan" + model = self.bc.database.create(authenticate=True, role=role, capability="read_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student") response = self.client.get(url) json = response.json() self.assertEqual(json, []) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': 'konan', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": "konan", + "status": "INVITED", + "user_id": 1, + } + ], + ) def test_academy_student(self): """Test /academy/student""" self.headers(academy=1) - role = 'student' - model = self.bc.database.create(authenticate=True, role=role, capability='read_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student') + role = "student" + model = self.bc.database.create(authenticate=True, role=role, capability="read_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student") response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': model['profile_academy'].academy.id, - 'name': model['profile_academy'].academy.name, - 'slug': model['profile_academy'].academy.slug - }, - 'address': model['profile_academy'].address, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'phone': model['profile_academy'].phone, - 'role': { - 'id': 'student', - 'name': 'student', - 'slug': 'student' - }, - 'status': 'INVITED', - 'user': { - 'email': model['profile_academy'].user.email, - 'first_name': model['profile_academy'].user.first_name, - 'profile': None, - 'id': model['profile_academy'].user.id, - 'last_name': model['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model["profile_academy"].academy.id, + "name": model["profile_academy"].academy.name, + "slug": model["profile_academy"].academy.slug, + }, + "address": model["profile_academy"].address, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "phone": model["profile_academy"].phone, + "role": {"id": "student", "name": "student", "slug": "student"}, + "status": "INVITED", + "user": { + "email": model["profile_academy"].user.email, + "first_name": model["profile_academy"].user.first_name, + "profile": None, + "id": model["profile_academy"].user.id, + "last_name": model["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": "student", + "status": "INVITED", + "user_id": 1, + } + ], + ) """ 🔽🔽🔽 With profile @@ -222,58 +228,57 @@ def test_academy_student(self): def test_academy_student__with_profile(self): """Test /academy/student""" self.headers(academy=1) - role = 'student' - model = self.bc.database.create(authenticate=True, - role=role, - capability='read_student', - profile_academy=True, - profile=True) - url = reverse_lazy('authenticate:academy_student') + role = "student" + model = self.bc.database.create( + authenticate=True, role=role, capability="read_student", profile_academy=True, profile=True + ) + url = reverse_lazy("authenticate:academy_student") response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': model['profile_academy'].academy.id, - 'name': model['profile_academy'].academy.name, - 'slug': model['profile_academy'].academy.slug - }, - 'address': model['profile_academy'].address, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'phone': model['profile_academy'].phone, - 'role': { - 'id': 'student', - 'name': 'student', - 'slug': 'student' - }, - 'status': 'INVITED', - 'user': { - 'email': model['profile_academy'].user.email, - 'first_name': model['profile_academy'].user.first_name, - 'profile': { - 'avatar_url': None + expected = [ + { + "academy": { + "id": model["profile_academy"].academy.id, + "name": model["profile_academy"].academy.name, + "slug": model["profile_academy"].academy.slug, + }, + "address": model["profile_academy"].address, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "phone": model["profile_academy"].phone, + "role": {"id": "student", "name": "student", "slug": "student"}, + "status": "INVITED", + "user": { + "email": model["profile_academy"].user.email, + "first_name": model["profile_academy"].user.first_name, + "profile": {"avatar_url": None}, + "id": model["profile_academy"].user.id, + "last_name": model["profile_academy"].user.last_name, }, - 'id': model['profile_academy'].user.id, - 'last_name': model['profile_academy'].user.last_name } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": "student", + "status": "INVITED", + "user_id": 1, + } + ], + ) """ 🔽🔽🔽 GET query like @@ -282,363 +287,380 @@ def test_academy_student__with_profile(self): def test_academy_student_query_like_full_name(self): """Test /academy/student""" self.headers(academy=1) - base = self.bc.database.create(authenticate=True, role='student', capability='read_student') + base = self.bc.database.create(authenticate=True, role="student", capability="read_student") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Rene', - 'last_name': 'Lopez', + "email": "a@a.com", + "first_name": "Rene", + "last_name": "Lopez", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_student') - url = f'{base_url}?like=Rene Descartes' + base_url = reverse_lazy("authenticate:academy_student") + url = f"{base_url}?like=Rene Descartes" response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'student', - 'name': 'student', - 'slug': 'student' - }, - 'status': 'INVITED', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "student", "name": "student", "slug": "student"}, + "status": "INVITED", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "student", + "status": "INVITED", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "student", + "status": "INVITED", + "user_id": 1, + }, + ], + ) def test_academy_student_query_like_first_name(self): """Test /academy/student""" self.headers(academy=1) - base = self.bc.database.create(authenticate=True, role='student', capability='read_student') + base = self.bc.database.create(authenticate=True, role="student", capability="read_student") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_student') - url = f'{base_url}?like=Rene' + base_url = reverse_lazy("authenticate:academy_student") + url = f"{base_url}?like=Rene" response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'student', - 'name': 'student', - 'slug': 'student' - }, - 'status': 'INVITED', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "student", "name": "student", "slug": "student"}, + "status": "INVITED", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "student", + "status": "INVITED", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "student", + "status": "INVITED", + "user_id": 1, + }, + ], + ) def test_academy_student_query_like_last_name(self): """Test /academy/student""" self.headers(academy=1) - base = self.bc.database.create(authenticate=True, role='student', capability='read_student') + base = self.bc.database.create(authenticate=True, role="student", capability="read_student") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_student') - url = f'{base_url}?like=Descartes' + base_url = reverse_lazy("authenticate:academy_student") + url = f"{base_url}?like=Descartes" response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'student', - 'name': 'student', - 'slug': 'student' - }, - 'status': 'INVITED', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "student", "name": "student", "slug": "student"}, + "status": "INVITED", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "student", + "status": "INVITED", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "student", + "status": "INVITED", + "user_id": 1, + }, + ], + ) def test_academy_student_query_like_email(self): """Test /academy/student""" self.headers(academy=1) - base = self.bc.database.create(authenticate=True, role='student', capability='read_student') + base = self.bc.database.create(authenticate=True, role="student", capability="read_student") profile_academy_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } profile_academy_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", } - model_1 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs, - models=base) + model_1 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs, models=base + ) - model_2 = self.bc.database.create(profile_academy=True, - profile_academy_kwargs=profile_academy_kwargs_2, - models=base) + model_2 = self.bc.database.create( + profile_academy=True, profile_academy_kwargs=profile_academy_kwargs_2, models=base + ) - base_url = reverse_lazy('authenticate:academy_student') - url = f'{base_url}?like=b@b.com' + base_url = reverse_lazy("authenticate:academy_student") + url = f"{base_url}?like=b@b.com" response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': model_1['profile_academy'].academy.id, - 'name': model_1['profile_academy'].academy.name, - 'slug': model_1['profile_academy'].academy.slug - }, - 'address': model_1['profile_academy'].address, - 'created_at': self.datetime_to_iso(model_1['profile_academy'].created_at), - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': model_1['profile_academy'].id, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role': { - 'id': 'student', - 'name': 'student', - 'slug': 'student' - }, - 'status': 'INVITED', - 'user': { - 'email': model_1['profile_academy'].user.email, - 'first_name': model_1['profile_academy'].user.first_name, - 'profile': None, - 'id': model_1['profile_academy'].user.id, - 'last_name': model_1['profile_academy'].user.last_name + expected = [ + { + "academy": { + "id": model_1["profile_academy"].academy.id, + "name": model_1["profile_academy"].academy.name, + "slug": model_1["profile_academy"].academy.slug, + }, + "address": model_1["profile_academy"].address, + "created_at": self.datetime_to_iso(model_1["profile_academy"].created_at), + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": model_1["profile_academy"].id, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role": {"id": "student", "name": "student", "slug": "student"}, + "status": "INVITED", + "user": { + "email": model_1["profile_academy"].user.email, + "first_name": model_1["profile_academy"].user.first_name, + "profile": None, + "id": model_1["profile_academy"].user.id, + "last_name": model_1["profile_academy"].user.last_name, + }, } - }] + ] self.assertEqual(json, expected) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': model_1['profile_academy'].address, - 'email': model_1['profile_academy'].email, - 'first_name': model_1['profile_academy'].first_name, - 'id': 1, - 'last_name': model_1['profile_academy'].last_name, - 'phone': model_1['profile_academy'].phone, - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }, { - 'academy_id': 2, - 'address': model_2['profile_academy'].address, - 'email': model_2['profile_academy'].email, - 'first_name': model_2['profile_academy'].first_name, - 'id': 2, - 'last_name': model_2['profile_academy'].last_name, - 'phone': model_2['profile_academy'].phone, - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 1 - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": model_1["profile_academy"].address, + "email": model_1["profile_academy"].email, + "first_name": model_1["profile_academy"].first_name, + "id": 1, + "last_name": model_1["profile_academy"].last_name, + "phone": model_1["profile_academy"].phone, + "role_id": "student", + "status": "INVITED", + "user_id": 1, + }, + { + "academy_id": 2, + "address": model_2["profile_academy"].address, + "email": model_2["profile_academy"].email, + "first_name": model_2["profile_academy"].first_name, + "id": 2, + "last_name": model_2["profile_academy"].last_name, + "phone": model_2["profile_academy"].phone, + "role_id": "student", + "status": "INVITED", + "user_id": 1, + }, + ], + ) """ 🔽🔽🔽 GET query status """ def test_academy_student__query_status__bad_status(self): - base = self.bc.database.create(user=1, role='student', capability='read_student') + base = self.bc.database.create(user=1, role="student", capability="read_student") for status in PROFILE_ACADEMY_STATUS: bad_status = [x for x in PROFILE_ACADEMY_STATUS if status != x][0] - profile_academy = {'status': status} + profile_academy = {"status": status} model = self.bc.database.create(profile_academy=(2, profile_academy), models=base) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_student') + f'?status={bad_status}' + url = reverse_lazy("authenticate:academy_student") + f"?status={bad_status}" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - self.bc.format.to_dict(model.profile_academy)) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), self.bc.format.to_dict(model.profile_academy) + ) - self.bc.database.delete('authenticate.ProfileAcademy') + self.bc.database.delete("authenticate.ProfileAcademy") def test_academy_student__query_status__one_status__uppercase(self): - base = self.bc.database.create(user=1, role='student', capability='read_student') + base = self.bc.database.create(user=1, role="student", capability="read_student") for status in PROFILE_ACADEMY_STATUS: - profile_academy = {'status': status} + profile_academy = {"status": status} model = self.bc.database.create(profile_academy=(2, profile_academy), models=base) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_student') + f'?status={status.upper()}' - response = self.client.get(url, headers={'academy': model.academy.id}) + url = reverse_lazy("authenticate:academy_student") + f"?status={status.upper()}" + response = self.client.get(url, headers={"academy": model.academy.id}) json = response.json() expected = [ @@ -647,21 +669,22 @@ def test_academy_student__query_status__one_status__uppercase(self): ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - self.bc.format.to_dict(model.profile_academy)) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), self.bc.format.to_dict(model.profile_academy) + ) - self.bc.database.delete('authenticate.ProfileAcademy') + self.bc.database.delete("authenticate.ProfileAcademy") def test_academy_student__query_status__one_status__lowercase(self): - base = self.bc.database.create(user=1, role='student', capability='read_student') + base = self.bc.database.create(user=1, role="student", capability="read_student") for status in PROFILE_ACADEMY_STATUS: - profile_academy = {'status': status} + profile_academy = {"status": status} model = self.bc.database.create(profile_academy=(2, profile_academy), models=base) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_student') + f'?status={status.lower()}' - response = self.client.get(url, headers={'academy': model.academy.id}) + url = reverse_lazy("authenticate:academy_student") + f"?status={status.lower()}" + response = self.client.get(url, headers={"academy": model.academy.id}) json = response.json() expected = [ @@ -670,901 +693,993 @@ def test_academy_student__query_status__one_status__lowercase(self): ] self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - self.bc.format.to_dict(model.profile_academy)) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), self.bc.format.to_dict(model.profile_academy) + ) - self.bc.database.delete('authenticate.ProfileAcademy') + self.bc.database.delete("authenticate.ProfileAcademy") """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_academy_student__spy_extensions(self): """Test /academy/student""" self.headers(academy=1) - role = 'konan' - model = self.bc.database.create(authenticate=True, role=role, capability='read_student', profile_academy=True) + role = "konan" + model = self.bc.database.create(authenticate=True, role=role, capability="read_student", profile_academy=True) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_academy_student__spy_extension_arguments(self): """Test /academy/student""" self.headers(academy=1) - role = 'konan' - model = self.bc.database.create(authenticate=True, role=role, capability='read_student', profile_academy=True) + role = "konan" + model = self.bc.database.create(authenticate=True, role=role, capability="read_student", profile_academy=True) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(paginate=True, sort='-created_at'), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(paginate=True, sort="-created_at"), + ], + ) class StudentPostTestSuite(AuthTestCase): - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_student__post__no_user__invite_is_false(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student') - data = {'role': role, 'invite': False} - response = self.client.post(url, data, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student") + data = {"role": role, "invite": False} + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() - expected = {'detail': 'user-not-found', 'status_code': 400} + expected = {"detail": "user-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_student__post__no_invite(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student') - data = {'role': role, 'invite': True} - response = self.client.post(url, data, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student") + data = {"role": role, "invite": True} + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() - expected = {'detail': 'no-email-or-id', 'status_code': 400} + expected = {"detail": "no-email-or-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_student__post__exists_profile_academy_with_this_email__is_none(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - profile_academy = {'email': None} - model = self.bc.database.create(authenticate=True, - role=role, - capability='crud_student', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_student') + profile_academy = {"email": None} + model = self.bc.database.create( + authenticate=True, role=role, capability="crud_student", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': model.profile_academy.email, + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": model.profile_academy.email, } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-exists-with-this-email', 'status_code': 400} + expected = {"detail": "already-exists-with-this-email", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [self.bc.format.to_dict(model.profile_academy)]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), [self.bc.format.to_dict(model.profile_academy)] + ) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_student__post__exists_profile_academy_with_this_email__with_email(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - profile_academy = {'email': 'dude@dude.dude'} - model = self.bc.database.create(authenticate=True, - role=role, - capability='crud_student', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_student') + profile_academy = {"email": "dude@dude.dude"} + model = self.bc.database.create( + authenticate=True, role=role, capability="crud_student", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': model.profile_academy.email, + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": model.profile_academy.email, } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-exists-with-this-email', 'status_code': 400} + expected = {"detail": "already-exists-with-this-email", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [self.bc.format.to_dict(model.profile_academy)]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), [self.bc.format.to_dict(model.profile_academy)] + ) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_student__post__user_with_not_student_role(self): """Test /academy/:id/member""" - role = 'konan' + role = "konan" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student') - data = {'role': role, 'user': model['user'].id, 'first_name': 'Kenny', 'last_name': 'McKornick'} - response = self.client.post(url, data, headers={'academy': 1}) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student") + data = {"role": role, "user": model["user"].id, "first_name": "Kenny", "last_name": "McKornick"} + response = self.client.post(url, data, headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-exists', 'status_code': 400} + expected = {"detail": "already-exists", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) """ 🔽🔽🔽 Without Role student """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_student__post__without_role_student(self): """Test /academy/:id/member""" - role = 'hitman' + role = "hitman" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_student', profile_academy=1) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_student", profile_academy=1) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'role-not-found', 'status_code': 400} + expected = {"detail": "role-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) """ 🔽🔽🔽 POST with Cohort in body """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_student__post__with_cohort_in_body(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - model = self.bc.database.create(authenticate=True, - role=role, - skip_cohort=True, - capability='crud_student', - profile_academy=1) + model = self.bc.database.create( + authenticate=True, role=role, skip_cohort=True, capability="crud_student", profile_academy=1 + ) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', - 'cohort': [1], + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", + "cohort": [1], } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'cohort-not-found', 'status_code': 400} + expected = {"detail": "cohort-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) """ 🔽🔽🔽 POST data with user but not found """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_student__post__with_user_but_not_found(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_student', profile_academy=1) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_student", profile_academy=1) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', - 'user': 2, + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", + "user": 2, } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'user-not-found', 'status_code': 400} + expected = {"detail": "user-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) """ 🔽🔽🔽 POST data with User and Cohort in body """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_academy_student__post__with_user_and_cohort_in_data(self): """Test /academy/:id/member""" - roles = [{'name': 'konan', 'slug': 'konan'}, {'name': 'student', 'slug': 'student'}] + roles = [{"name": "konan", "slug": "konan"}, {"name": "student", "slug": "student"}] - model = self.bc.database.create(role=roles, user=2, cohort=1, capability='crud_student', profile_academy=1) + model = self.bc.database.create(role=roles, user=2, cohort=1, capability="crud_student", profile_academy=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', - 'user': 2, - 'cohort': [1], + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", + "user": 2, + "cohort": [1], } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() expected = { - 'address': None, - 'email': model.user[1].email, - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'status': 'INVITED', + "address": None, + "email": model.user[1].email, + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "status": "INVITED", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), { - 'academy_id': 1, - 'address': None, - 'email': model.user[1].email, - 'first_name': 'Kenny', - 'id': 2, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 2, - } - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + { + "academy_id": 1, + "address": None, + "email": model.user[1].email, + "first_name": "Kenny", + "id": 2, + "last_name": "McKornick", + "phone": "", + "role_id": "student", + "status": "INVITED", + "user_id": 2, + }, + ], + ) - token = self.bc.database.get('authenticate.Token', 1, dict=False) - querystr = urllib.parse.urlencode({'callback': os.getenv('APP_URL', '')[:-1], 'token': token}) - url = os.getenv('API_URL') + '/v1/auth/academy/html/invite?' + querystr + token = self.bc.database.get("authenticate.Token", 1, dict=False) + querystr = urllib.parse.urlencode({"callback": os.getenv("APP_URL", "")[:-1], "token": token}) + url = os.getenv("API_URL") + "/v1/auth/academy/html/invite?" + querystr - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) assert actions.send_email_message.call_args_list == [ - call('academy_invite', - model.user[1].email, { - 'subject': - f'Invitation to study at {model.academy.name}', - 'invites': [{ - 'id': 2, - 'academy': { - 'id': 1, - 'name': model.academy.name, - 'slug': model.academy.slug, - 'timezone': None - }, - 'role': 'student', - 'created_at': UTC_NOW - }], - 'user': { - 'id': 2, - 'email': model.user[1].email, - 'first_name': model.user[1].first_name, - 'last_name': model.user[1].last_name, - 'github': None, - 'profile': None - }, - 'LINK': - url, - }, - academy=model.academy), + call( + "academy_invite", + model.user[1].email, + { + "subject": f"Invitation to study at {model.academy.name}", + "invites": [ + { + "id": 2, + "academy": { + "id": 1, + "name": model.academy.name, + "slug": model.academy.slug, + "timezone": None, + }, + "role": "student", + "created_at": UTC_NOW, + } + ], + "user": { + "id": 2, + "email": model.user[1].email, + "first_name": model.user[1].first_name, + "last_name": model.user[1].last_name, + "github": None, + "profile": None, + }, + "LINK": url, + }, + academy=model.academy, + ), ] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) """ 🔽🔽🔽 POST data with User and Cohort in body """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_academy_student__post__with_user__it_ignore_the_param_plans(self): """Test /academy/:id/member""" - roles = [{'name': 'konan', 'slug': 'konan'}, {'name': 'student', 'slug': 'student'}] + roles = [{"name": "konan", "slug": "konan"}, {"name": "student", "slug": "student"}] - model = self.bc.database.create(role=roles, user=2, cohort=1, capability='crud_student', profile_academy=1) + model = self.bc.database.create(role=roles, user=2, cohort=1, capability="crud_student", profile_academy=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', - 'user': 2, - 'cohort': [1], - 'plans': [1], + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", + "user": 2, + "cohort": [1], + "plans": [1], } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() expected = { - 'address': None, - 'email': model.user[1].email, - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'status': 'INVITED', + "address": None, + "email": model.user[1].email, + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "status": "INVITED", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), { - 'academy_id': 1, - 'address': None, - 'email': model.user[1].email, - 'first_name': 'Kenny', - 'id': 2, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': 2, - } - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + { + "academy_id": 1, + "address": None, + "email": model.user[1].email, + "first_name": "Kenny", + "id": 2, + "last_name": "McKornick", + "phone": "", + "role_id": "student", + "status": "INVITED", + "user_id": 2, + }, + ], + ) - token = self.bc.database.get('authenticate.Token', 1, dict=False) - querystr = urllib.parse.urlencode({'callback': os.getenv('APP_URL', '')[:-1], 'token': token}) - url = os.getenv('API_URL') + '/v1/auth/academy/html/invite?' + querystr + token = self.bc.database.get("authenticate.Token", 1, dict=False) + querystr = urllib.parse.urlencode({"callback": os.getenv("APP_URL", "")[:-1], "token": token}) + url = os.getenv("API_URL") + "/v1/auth/academy/html/invite?" + querystr - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) assert actions.send_email_message.call_args_list == [ - call('academy_invite', - model.user[1].email, { - 'subject': - f'Invitation to study at {model.academy.name}', - 'invites': [{ - 'id': 2, - 'academy': { - 'id': 1, - 'name': model.academy.name, - 'slug': model.academy.slug, - 'timezone': None - }, - 'role': 'student', - 'created_at': UTC_NOW - }], - 'user': { - 'id': 2, - 'email': model.user[1].email, - 'first_name': model.user[1].first_name, - 'last_name': model.user[1].last_name, - 'github': None, - 'profile': None - }, - 'LINK': - url, - }, - academy=model.academy), + call( + "academy_invite", + model.user[1].email, + { + "subject": f"Invitation to study at {model.academy.name}", + "invites": [ + { + "id": 2, + "academy": { + "id": 1, + "name": model.academy.name, + "slug": model.academy.slug, + "timezone": None, + }, + "role": "student", + "created_at": UTC_NOW, + } + ], + "user": { + "id": 2, + "email": model.user[1].email, + "first_name": model.user[1].first_name, + "last_name": model.user[1].last_name, + "github": None, + "profile": None, + }, + "LINK": url, + }, + academy=model.academy, + ), ] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) """ 🔽🔽🔽 POST data without user """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_student__post__without_user_in_data(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_student', profile_academy=1) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_student", profile_academy=1) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() expected = { - 'address': None, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'status': 'INVITED', + "address": None, + "email": "dude@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "status": "INVITED", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), { - 'academy_id': 1, - 'address': None, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'id': 2, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': None, - } - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + { + "academy_id": 1, + "address": None, + "email": "dude@dude.dude", + "first_name": "Kenny", + "id": 2, + "last_name": "McKornick", + "phone": "", + "role_id": "student", + "status": "INVITED", + "user_id": None, + }, + ], + ) - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': os.getenv('APP_URL', '')[:-1]} + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": os.getenv("APP_URL", "")[:-1]} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - generate_user_invite({ - 'id': 1, - 'academy_id': 1, - 'author_id': 1, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'role_id': 'student', - 'token': TOKEN, - 'syllabus_id': None, - }), - ]) + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr + + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + generate_user_invite( + { + "id": 1, + "academy_id": 1, + "author_id": 1, + "email": "dude@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "role_id": "student", + "token": TOKEN, + "syllabus_id": None, + } + ), + ], + ) assert actions.send_email_message.call_args_list == [ - call('welcome_academy', - 'dude@dude.dude', { - 'email': 'dude@dude.dude', - 'subject': 'Welcome to ' + model.academy.name, - 'LINK': url, - 'FIST_NAME': 'Kenny' - }, - academy=model.academy) + call( + "welcome_academy", + "dude@dude.dude", + { + "email": "dude@dude.dude", + "subject": "Welcome to " + model.academy.name, + "LINK": url, + "FIST_NAME": "Kenny", + }, + academy=model.academy, + ) ] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) """ 🔽🔽🔽 POST data without user, provided plan not found """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_student__post__without_user_in_data__plan_not_found(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - model = self.bc.database.create(authenticate=True, role=role, capability='crud_student', profile_academy=1) + model = self.bc.database.create(authenticate=True, role=role, capability="crud_student", profile_academy=1) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', - 'plans': [1], + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", + "plans": [1], } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'plan-not-found', 'status_code': 400} + expected = {"detail": "plan-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': ''} + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": ""} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) """ 🔽🔽🔽 POST data without user, provided plan not found """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_student__post__without_user_in_data__with_plan(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - plan = {'time_of_life': None, 'time_of_life_unit': None} - model = self.bc.database.create(authenticate=True, - role=role, - capability='crud_student', - profile_academy=1, - plan=plan) + plan = {"time_of_life": None, "time_of_life_unit": None} + model = self.bc.database.create( + authenticate=True, role=role, capability="crud_student", profile_academy=1, plan=plan + ) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', - 'plans': [1], + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", + "plans": [1], } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() expected = { - 'address': None, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'status': 'INVITED', + "address": None, + "email": "dude@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "status": "INVITED", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), { - 'academy_id': 1, - 'address': None, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'id': 2, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': None, - } - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + { + "academy_id": 1, + "address": None, + "email": "dude@dude.dude", + "first_name": "Kenny", + "id": 2, + "last_name": "McKornick", + "phone": "", + "role_id": "student", + "status": "INVITED", + "user_id": None, + }, + ], + ) - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': os.getenv('APP_URL', '')[:-1]} + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": os.getenv("APP_URL", "")[:-1]} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - generate_user_invite({ - 'id': 1, - 'academy_id': 1, - 'author_id': 1, - 'email': 'dude@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'role_id': 'student', - 'token': TOKEN, - 'syllabus_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - }), - ]) + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr + + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + generate_user_invite( + { + "id": 1, + "academy_id": 1, + "author_id": 1, + "email": "dude@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "role_id": "student", + "token": TOKEN, + "syllabus_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, + } + ), + ], + ) assert actions.send_email_message.call_args_list == [ - call('welcome_academy', - 'dude@dude.dude', { - 'email': 'dude@dude.dude', - 'subject': 'Welcome to ' + model.academy.name, - 'LINK': url, - 'FIST_NAME': 'Kenny' - }, - academy=model.academy), + call( + "welcome_academy", + "dude@dude.dude", + { + "email": "dude@dude.dude", + "subject": "Welcome to " + model.academy.name, + "LINK": url, + "FIST_NAME": "Kenny", + }, + academy=model.academy, + ), ] - self.assertEqual(self.bc.database.list_of('payments.Plan'), [ - self.bc.format.to_dict(model.plan), - ]) - - plan = self.bc.database.get('payments.Plan', 1, dict=False) + self.assertEqual( + self.bc.database.list_of("payments.Plan"), + [ + self.bc.format.to_dict(model.plan), + ], + ) + + plan = self.bc.database.get("payments.Plan", 1, dict=False) self.bc.check.queryset_with_pks(plan.invites.all(), [1]) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_student__post__without_user_in_data__invite_already_exists__cohort_none_in_data(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - user = {'email': 'dude@dude.dude'} - user_invite = {'email': 'dude2@dude.dude'} - model = self.bc.database.create(authenticate=True, - user=user, - user_invite=user_invite, - role=role, - capability='crud_student', - profile_academy=1) + user = {"email": "dude@dude.dude"} + user_invite = {"email": "dude2@dude.dude"} + model = self.bc.database.create( + authenticate=True, + user=user, + user_invite=user_invite, + role=role, + capability="crud_student", + profile_academy=1, + ) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude2@dude.dude', + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude2@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-invited', 'status_code': 400} + expected = {"detail": "already-invited", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': ''} + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": ""} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) assert actions.send_email_message.call_args_list == [] - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('random.getrandbits', MagicMock(side_effect=getrandbits)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("random.getrandbits", MagicMock(side_effect=getrandbits)) def test_academy_student__post__without_user_in_data__invite_already_exists__diff_cohort_in_data(self): """Test /academy/:id/member""" - role = 'student' - - user = {'email': 'dude@dude.dude'} - user_invite = {'email': 'dude2@dude.dude'} - model = self.bc.database.create(authenticate=True, - user=user, - user_invite=user_invite, - cohort=2, - role=role, - capability='crud_student', - profile_academy=1) - - url = reverse_lazy('authenticate:academy_student') + role = "student" + + user = {"email": "dude@dude.dude"} + user_invite = {"email": "dude2@dude.dude"} + model = self.bc.database.create( + authenticate=True, + user=user, + user_invite=user_invite, + cohort=2, + role=role, + capability="crud_student", + profile_academy=1, + ) + + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'cohort': [2], - 'invite': True, - 'email': 'dude2@dude.dude', + "first_name": "Kenny", + "last_name": "McKornick", + "cohort": [2], + "invite": True, + "email": "dude2@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() expected = { - 'address': None, - 'email': 'dude2@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'phone': '', - 'status': 'INVITED', + "address": None, + "email": "dude2@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "phone": "", + "status": "INVITED", } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), { - 'academy_id': 1, - 'address': None, - 'email': 'dude2@dude.dude', - 'first_name': 'Kenny', - 'id': 2, - 'last_name': 'McKornick', - 'phone': '', - 'role_id': 'student', - 'status': 'INVITED', - 'user_id': None, - } - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + { + "academy_id": 1, + "address": None, + "email": "dude2@dude.dude", + "first_name": "Kenny", + "id": 2, + "last_name": "McKornick", + "phone": "", + "role_id": "student", + "status": "INVITED", + "user_id": None, + }, + ], + ) - params = {'callback': os.getenv('APP_URL', '')[:-1]} + params = {"callback": os.getenv("APP_URL", "")[:-1]} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr - - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - generate_user_invite({ - 'id': 1, - 'cohort_id': 1, - 'academy_id': 1, - 'author_id': 1, - 'email': 'dude2@dude.dude', - 'role_id': 'student', - 'token': model.user_invite.token, - 'syllabus_id': None, - }), - generate_user_invite({ - 'id': 2, - 'cohort_id': 2, - 'academy_id': 1, - 'author_id': 1, - 'email': 'dude2@dude.dude', - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'role_id': 'student', - 'token': TOKEN, - 'syllabus_id': None, - }), - ]) + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr + + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + generate_user_invite( + { + "id": 1, + "cohort_id": 1, + "academy_id": 1, + "author_id": 1, + "email": "dude2@dude.dude", + "role_id": "student", + "token": model.user_invite.token, + "syllabus_id": None, + } + ), + generate_user_invite( + { + "id": 2, + "cohort_id": 2, + "academy_id": 1, + "author_id": 1, + "email": "dude2@dude.dude", + "first_name": "Kenny", + "last_name": "McKornick", + "role_id": "student", + "token": TOKEN, + "syllabus_id": None, + } + ), + ], + ) assert actions.send_email_message.call_args_list == [ - call('welcome_academy', - 'dude2@dude.dude', { - 'email': 'dude2@dude.dude', - 'subject': 'Welcome to ' + model.academy.name, - 'LINK': url, - 'FIST_NAME': 'Kenny' - }, - academy=model.academy) + call( + "welcome_academy", + "dude2@dude.dude", + { + "email": "dude2@dude.dude", + "subject": "Welcome to " + model.academy.name, + "LINK": url, + "FIST_NAME": "Kenny", + }, + academy=model.academy, + ) ] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_academy_student__post__without_user_in_data__user_already_exists(self): """Test /academy/:id/member""" - role = 'student' + role = "student" - user = {'email': 'dude@dude.dude'} - model = self.bc.database.create(authenticate=True, - user=user, - user_invite=user, - role=role, - capability='crud_student', - profile_academy=1) + user = {"email": "dude@dude.dude"} + model = self.bc.database.create( + authenticate=True, user=user, user_invite=user, role=role, capability="crud_student", profile_academy=1 + ) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") data = { - 'first_name': 'Kenny', - 'last_name': 'McKornick', - 'invite': True, - 'email': 'dude@dude.dude', + "first_name": "Kenny", + "last_name": "McKornick", + "invite": True, + "email": "dude@dude.dude", } - response = self.client.post(url, data, format='json', headers={'academy': 1}) + response = self.client.post(url, data, format="json", headers={"academy": 1}) json = response.json() - expected = {'detail': 'already-exists', 'status_code': 400} + expected = {"detail": "already-exists", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) - - invite = self.bc.database.get('authenticate.UserInvite', 1, dict=False) - params = {'callback': ''} + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) + + invite = self.bc.database.get("authenticate.UserInvite", 1, dict=False) + params = {"callback": ""} querystr = urllib.parse.urlencode(params) - url = os.getenv('API_URL') + '/v1/auth/member/invite/' + \ - str(TOKEN) + '?' + querystr + url = os.getenv("API_URL") + "/v1/auth/member/invite/" + str(TOKEN) + "?" + querystr - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) assert actions.send_email_message.call_args_list == [] - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) class StudentDeleteTestSuite(AuthTestCase): def test_academy_student_delete_without_auth(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") response = self.client.delete(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -1573,12 +1688,12 @@ def test_academy_student_delete_without_auth(self): def test_academy_student_delete_without_header(self): """Test /cohort/:id/user without auth""" model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") response = self.client.delete(url) json = response.json() expected = { - 'detail': 'Missing academy_id parameter expected for the endpoint url or \'Academy\' header', - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -1589,12 +1704,12 @@ def test_academy_student_delete_without_capability(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_student') + url = reverse_lazy("authenticate:academy_student") response = self.client.delete(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_student for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_student for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -1604,17 +1719,21 @@ def test_academy_student_delete_without_capability(self): def test_academy_student_delete_without_args_in_url_or_bulk(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.bc.database.create(authenticate=True, - profile_academy=True, - capability='crud_student', - role='student') - url = reverse_lazy('authenticate:academy_student') + model = self.bc.database.create( + authenticate=True, profile_academy=True, capability="crud_student", role="student" + ) + url = reverse_lazy("authenticate:academy_student") response = self.client.delete(url) json = response.json() - expected = {'detail': 'delete-is-forbidden', 'status_code': 403} + expected = {"detail": "delete-is-forbidden", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.all_profile_academy_dict(), [{ - **self.model_to_dict(model, 'profile_academy'), - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + **self.model_to_dict(model, "profile_academy"), + } + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_academy_student_id.py b/breathecode/authenticate/tests/urls/tests_academy_student_id.py index 956416837..9376e014b 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_student_id.py +++ b/breathecode/authenticate/tests/urls/tests_academy_student_id.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + from unittest.mock import MagicMock, patch from django.urls.base import reverse_lazy @@ -14,637 +15,697 @@ class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id_without_auth(self): """Test /academy/:id/member/:id without auth""" - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id_without_capability(self): """Test /academy/:id/member/:id""" self.headers(academy=1) self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_student " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: read_student " "for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 GET without data, passing id """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__passing_id__not_found(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='read_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '2'}) + model = self.generate_models(authenticate=True, role=role, capability="read_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "2"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'profile-academy-not-found', 'status_code': 404} + expected = {"detail": "profile-academy-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__passing_id(self): """Test /academy/:id/member/:id""" self.headers(academy=1) - role = 'konan' - model = self.generate_models(authenticate=True, role=role, capability='read_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + role = "konan" + model = self.generate_models(authenticate=True, role=role, capability="read_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'invite_url': 'https://dotdotdotdotdot.dot/v1/auth/academy/html/invite', - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + json, + { + "invite_url": "https://dotdotdotdotdot.dot/v1/auth/academy/html/invite", + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - 'address': None, - 'created_at': datetime_to_iso_format(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, + "address": None, + "created_at": datetime_to_iso_format(profile_academy.created_at), + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'github': None, - 'profile': None, + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "github": None, + "profile": None, }, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__passing_email(self): """Test /academy/:id/member/:id""" self.headers(academy=1) - role = 'konan' - model = self.generate_models(authenticate=True, role=role, capability='read_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': model.user.email}) + role = "konan" + model = self.generate_models(authenticate=True, role=role, capability="read_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": model.user.email}) response = self.client.get(url) json = response.json() profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'invite_url': 'https://dotdotdotdotdot.dot/v1/auth/academy/html/invite', - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + json, + { + "invite_url": "https://dotdotdotdotdot.dot/v1/auth/academy/html/invite", + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - 'address': None, - 'created_at': datetime_to_iso_format(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, + "address": None, + "created_at": datetime_to_iso_format(profile_academy.created_at), + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'github': None, - 'profile': None, + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "github": None, + "profile": None, }, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) """ 🔽🔽🔽 GET with profile ans github """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__with_profile__with_github(self): """Test /academy/:id/member/:id""" self.headers(academy=1) - role = 'konan' - model = self.generate_models(authenticate=True, - role=role, - capability='read_student', - profile_academy=True, - credentials_github=True, - profile=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + role = "konan" + model = self.generate_models( + authenticate=True, + role=role, + capability="read_student", + profile_academy=True, + credentials_github=True, + profile=True, + ) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'invite_url': 'https://dotdotdotdotdot.dot/v1/auth/academy/html/invite', - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + json, + { + "invite_url": "https://dotdotdotdotdot.dot/v1/auth/academy/html/invite", + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - 'address': None, - 'created_at': datetime_to_iso_format(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, + "address": None, + "created_at": datetime_to_iso_format(profile_academy.created_at), + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'github': { - 'avatar_url': model['user'].credentialsgithub.avatar_url, - 'name': model['user'].credentialsgithub.name, - 'username': model['user'].credentialsgithub.username, - }, - 'profile': { - 'avatar_url': model['user'].profile.avatar_url + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "github": { + "avatar_url": model["user"].credentialsgithub.avatar_url, + "name": model["user"].credentialsgithub.name, + "username": model["user"].credentialsgithub.username, }, + "profile": {"avatar_url": model["user"].profile.avatar_url}, }, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) - - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) + + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id_with_github(self): """Test /academy/:id/member/:id""" self.headers(academy=1) - role = 'konan' - model = self.generate_models(authenticate=True, - role=role, - capability='read_student', - profile_academy=True, - credentials_github=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + role = "konan" + model = self.generate_models( + authenticate=True, role=role, capability="read_student", profile_academy=True, credentials_github=True + ) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) response = self.client.get(url) json = response.json() profile_academy = self.get_profile_academy(1) self.assertEqual( - json, { - 'invite_url': 'https://dotdotdotdotdot.dot/v1/auth/academy/html/invite', - 'academy': { - 'id': model['academy'].id, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + json, + { + "invite_url": "https://dotdotdotdotdot.dot/v1/auth/academy/html/invite", + "academy": { + "id": model["academy"].id, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - 'address': None, - 'created_at': datetime_to_iso_format(profile_academy.created_at), - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role': { - 'id': role, - 'name': role, - 'slug': role, + "address": None, + "created_at": datetime_to_iso_format(profile_academy.created_at), + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role": { + "id": role, + "name": role, + "slug": role, }, - 'status': 'INVITED', - 'user': { - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'github': { - 'avatar_url': None, - 'name': None, - 'username': None - }, - 'profile': None, + "status": "INVITED", + "user": { + "email": model["user"].email, + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "github": {"avatar_url": None, "name": None, "username": None}, + "profile": None, }, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_profile_academy_dict(), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) """ 🔽🔽🔽 PUT capability """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__without_capability(self): """Test /academy/:id/member/:id""" self.bc.request.set_headers(academy=1) self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_student " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: crud_student " "for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 PUT passing email """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__passing_email(self): """Test /academy/:id/member/:id""" - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='crud_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': 'dude@dude.dude'}) + model = self.generate_models(authenticate=True, role=role, capability="crud_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "dude@dude.dude"}) response = self.client.put(url) json = response.json() - expected = {'detail': 'user-id-is-not-numeric', 'status_code': 404} + expected = {"detail": "user-id-is-not-numeric", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT user not exists """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__user_does_not_exists(self): """Test /academy/:id/member/:id""" - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='crud_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '2'}) + model = self.generate_models(authenticate=True, role=role, capability="crud_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "2"}) response = self.client.put(url) json = response.json() - expected = {'detail': 'profile-academy-not-found', 'status_code': 404} + expected = {"detail": "profile-academy-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT changing role """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__changing_role(self): """Test /academy/:id/member/:id""" - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(role=role, user=1, capability='crud_student', profile_academy=True) + model = self.generate_models(role=role, user=1, capability="crud_student", profile_academy=True) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) - data = {'role': 'nut'} - response = self.client.put(url, data, format='json') + data = {"role": "nut"} + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'trying-to-change-role', 'status_code': 400} + expected = {"detail": "trying-to-change-role", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT changing a staff """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__changing_a_staff(self): """Test /academy/:id/member/:id""" - role = 'konan' + role = "konan" self.bc.request.set_headers(academy=1) - model = self.generate_models(role=role, user=1, capability='crud_student', profile_academy=True) + model = self.generate_models(role=role, user=1, capability="crud_student", profile_academy=True) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) data = {} - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'trying-to-change-a-staff', 'status_code': 400} + expected = {"detail": "trying-to-change-a-staff", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT User exists but without a ProfileAcademy """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__user_exists_but_without_profile_academy(self): """Test /academy/:id/member/:id""" - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(role=role, user=2, capability='crud_student', profile_academy=True) + model = self.generate_models(role=role, user=2, capability="crud_student", profile_academy=True) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '2'}) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "2"}) response = self.client.put(url) json = response.json() - expected = {'detail': 'profile-academy-not-found', 'status_code': 404} + expected = {"detail": "profile-academy-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT with data """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__with_data(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), } - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='crud_student', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models( + authenticate=True, role=role, capability="crud_student", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) response = self.client.put(url) json = response.json() expected = { - 'academy': model.academy.id, - 'address': model.profile_academy.address, - 'first_name': model.profile_academy.first_name, - 'last_name': model.profile_academy.last_name, - 'phone': model.profile_academy.phone, - 'role': role, - 'user': model.user.id, + "academy": model.academy.id, + "address": model.profile_academy.address, + "first_name": model.profile_academy.first_name, + "last_name": model.profile_academy.last_name, + "phone": model.profile_academy.phone, + "role": role, + "user": model.user.id, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__without__first_name(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), } - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='crud_student', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models( + authenticate=True, role=role, capability="crud_student", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) data = { - 'first_name': '', - 'user': model.user.id, + "first_name": "", + "user": model.user.id, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'first_name': ['This field may not be blank.']} + expected = {"first_name": ["This field may not be blank."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__without__last_name(self): """Test /academy/:id/member/:id""" profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), } - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='crud_student', - profile_academy=profile_academy) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models( + authenticate=True, role=role, capability="crud_student", profile_academy=profile_academy + ) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) data = { - 'last_name': '', - 'user': model.user.id, + "last_name": "", + "user": model.user.id, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'last_name': ['This field may not be blank.']} + expected = {"last_name": ["This field may not be blank."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__without_required_fields(self): """Test /academy/:id/member/:id""" - role = 'crud_student' + role = "crud_student" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='crud_student', profile_academy=1) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models(authenticate=True, role=role, capability="crud_student", profile_academy=1) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) response = self.client.put(url) json = response.json() - expected = {'detail': 'trying-to-change-a-staff', 'status_code': 400} + expected = {"detail": "trying-to-change-a-staff", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 PUT with data, changing values """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__put__with_data__changing_values(self): """Test /academy/:id/member/:id""" - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='crud_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': '1'}) + model = self.generate_models(authenticate=True, role=role, capability="crud_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "1"}) - data = {'first_name': 'Lord', 'last_name': 'Valdomero'} - response = self.client.put(url, data, format='json') + data = {"first_name": "Lord", "last_name": "Valdomero"} + response = self.client.put(url, data, format="json") json = response.json() expected = { - 'academy': model.academy.id, - 'address': model.profile_academy.address, - 'first_name': 'Lord', - 'last_name': 'Valdomero', - 'phone': model.profile_academy.phone, - 'role': role, - 'user': model.user.id, + "academy": model.academy.id, + "address": model.profile_academy.address, + "first_name": "Lord", + "last_name": "Valdomero", + "phone": model.profile_academy.phone, + "role": role, + "user": model.user.id, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - { - **self.bc.format.to_dict(model.profile_academy), - 'first_name': 'Lord', - 'last_name': 'Valdomero', - }, - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + **self.bc.format.to_dict(model.profile_academy), + "first_name": "Lord", + "last_name": "Valdomero", + }, + ], + ) """ 🔽🔽🔽 DELETE with data, passing email """ - @patch('os.getenv', MagicMock(return_value='https://dotdotdotdotdot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dotdotdotdotdot.dot")) def test_academy_student_id__delete__passing_email(self): """Test /academy/:id/member/:id""" - role = 'student' + role = "student" self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, role=role, capability='crud_student', profile_academy=True) - url = reverse_lazy('authenticate:academy_student_id', kwargs={'user_id_or_email': 'dude@dude.dude'}) + model = self.generate_models(authenticate=True, role=role, capability="crud_student", profile_academy=True) + url = reverse_lazy("authenticate:academy_student_id", kwargs={"user_id_or_email": "dude@dude.dude"}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'delete-is-forbidden', 'status_code': 403} + expected = {"detail": "delete-is-forbidden", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [{ - 'academy_id': 1, - 'address': None, - 'email': None, - 'first_name': None, - 'id': 1, - 'last_name': None, - 'phone': '', - 'role_id': role, - 'status': 'INVITED', - 'user_id': 1, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + "academy_id": 1, + "address": None, + "email": None, + "first_name": None, + "id": 1, + "last_name": None, + "phone": "", + "role_id": role, + "status": "INVITED", + "user_id": 1, + } + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_academy_token.py b/breathecode/authenticate/tests/urls/tests_academy_token.py index e7022b31d..fff2bbf44 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_token.py +++ b/breathecode/authenticate/tests/urls/tests_academy_token.py @@ -10,61 +10,68 @@ class AuthenticateTestSuite(AuthTestCase): def test_academy_token_without_auth(self): """Test /academy/:id/member/:id without auth""" - url = reverse_lazy('authenticate:academy_token') + url = reverse_lazy("authenticate:academy_token") response = self.client.post(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_token_get_without_capability(self): """Test /academy/:id/member/:id without auth""" self.headers(academy=1) self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:academy_token') + url = reverse_lazy("authenticate:academy_token") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: get_academy_token " - 'for academy 1', - 'status_code': 403 - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: get_academy_token " "for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_academy_token_get_without_user(self): """Test /academy/:id/member/:id without auth""" - role = 'konan' + role = "konan" self.headers(academy=1) - self.generate_models(authenticate=True, role=role, capability='get_academy_token', profile_academy=True) - url = reverse_lazy('authenticate:academy_token') + self.generate_models(authenticate=True, role=role, capability="get_academy_token", profile_academy=True) + url = reverse_lazy("authenticate:academy_token") response = self.client.get(url) json = response.json() - expected = {'detail': 'academy-token-not-found', 'status_code': 400} + expected = {"detail": "academy-token-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_academy_token_get_without_token(self): """Test /academy/:id/member/:id without auth""" - role = 'konan' + role = "konan" self.headers(academy=1) - user_kwargs = {'username': 'kenny'} - academy_kwargs = {'slug': 'kenny'} - self.generate_models(authenticate=True, - role=role, - user=True, - capability='get_academy_token', - profile_academy=True, - user_kwargs=user_kwargs, - academy_kwargs=academy_kwargs) - url = reverse_lazy('authenticate:academy_token') + user_kwargs = {"username": "kenny"} + academy_kwargs = {"slug": "kenny"} + self.generate_models( + authenticate=True, + role=role, + user=True, + capability="get_academy_token", + profile_academy=True, + user_kwargs=user_kwargs, + academy_kwargs=academy_kwargs, + ) + url = reverse_lazy("authenticate:academy_token") response = self.client.get(url) json = response.json() - expected = {'detail': 'academy-token-not-found', 'status_code': 400} + expected = {"detail": "academy-token-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -73,242 +80,289 @@ def test_academy_token_post_without_capability(self): """Test /academy/:id/member/:id without auth""" self.headers(academy=1) self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:academy_token') + url = reverse_lazy("authenticate:academy_token") response = self.client.post(url) json = response.json() self.assertEqual( - json, { - 'detail': "You (user: 1) don't have this capability: generate_academy_token " - 'for academy 1', - 'status_code': 403 - }) + json, + { + "detail": "You (user: 1) don't have this capability: generate_academy_token " "for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_academy_token_post(self): """Test /academy/:id/member/:id without auth""" - role = 'academy_token' + role = "academy_token" self.headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='generate_academy_token', - profile_academy=True) - url = reverse_lazy('authenticate:academy_token') + model = self.generate_models( + authenticate=True, role=role, capability="generate_academy_token", profile_academy=True + ) + url = reverse_lazy("authenticate:academy_token") response = self.client.post(url) json = response.json() - token_pattern = re.compile(r'[0-9a-zA-Z]{,40}$') - expected = {'token_type': 'permanent', 'expires_at': None} + token_pattern = re.compile(r"[0-9a-zA-Z]{,40}$") + expected = {"token_type": "permanent", "expires_at": None} token = self.get_token(1) user = self.get_user(2) - self.assertEqual(self.all_token_dict(), [{ - 'created': token.created, - 'expires_at': json['expires_at'], - 'id': 1, - 'key': json['token'], - 'token_type': json['token_type'], - 'user_id': 2 - }]) - self.assertEqual(bool(token_pattern.match(json['token'])), True) - del json['token'] + self.assertEqual( + self.all_token_dict(), + [ + { + "created": token.created, + "expires_at": json["expires_at"], + "id": 1, + "key": json["token"], + "token_type": json["token_type"], + "user_id": 2, + } + ], + ) + self.assertEqual(bool(token_pattern.match(json["token"])), True) + del json["token"] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_user_dict(), [{ - **self.model_to_dict(model, 'user'), - }, { - 'date_joined': user.date_joined, - 'username': model['academy'].slug, - 'email': f"{model['academy'].slug}@token.com", - 'first_name': '', - 'id': 2, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': '', - 'password': '', - 'is_active': True - }]) - self.assertEqual(self.all_profile_academy_dict(), [{ - **self.model_to_dict(model, 'profile_academy'), - }, { - 'academy_id': model['academy'].id, - 'address': None, - 'email': None, - 'first_name': None, - 'last_name': None, - 'id': 2, - 'phone': '', - 'role_id': role, - 'status': 'ACTIVE', - 'user_id': 2 - }]) - self.assertEqual(self.all_role_dict(), [{ - **self.model_to_dict(model, 'role'), - }]) + self.assertEqual( + self.all_user_dict(), + [ + { + **self.model_to_dict(model, "user"), + }, + { + "date_joined": user.date_joined, + "username": model["academy"].slug, + "email": f"{model['academy'].slug}@token.com", + "first_name": "", + "id": 2, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "", + "password": "", + "is_active": True, + }, + ], + ) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + **self.model_to_dict(model, "profile_academy"), + }, + { + "academy_id": model["academy"].id, + "address": None, + "email": None, + "first_name": None, + "last_name": None, + "id": 2, + "phone": "", + "role_id": role, + "status": "ACTIVE", + "user_id": 2, + }, + ], + ) + self.assertEqual( + self.all_role_dict(), + [ + { + **self.model_to_dict(model, "role"), + } + ], + ) def test_academy_token_post_refresh_token(self): """Test /academy/:id/member/:id without auth""" - role = 'academy_token' + role = "academy_token" self.headers(academy=1) - academy_kwargs = {'slug': 'academy-a'} - user_kwargs = {'username': 'academy-a'} + academy_kwargs = {"slug": "academy-a"} + user_kwargs = {"username": "academy-a"} model = self.generate_models( authenticate=True, role=role, user=True, academy_kwargs=academy_kwargs, - capability='generate_academy_token', + capability="generate_academy_token", profile_academy=True, token=True, user_kwargs=user_kwargs, ) - url = reverse_lazy('authenticate:academy_token') + url = reverse_lazy("authenticate:academy_token") response = self.client.post(url) - expected = {'token_type': 'permanent', 'expires_at': None} + expected = {"token_type": "permanent", "expires_at": None} json = response.json() - token_pattern = re.compile(r'[0-9a-zA-Z]{,40}$') + token_pattern = re.compile(r"[0-9a-zA-Z]{,40}$") token = self.get_token(2) - self.assertEqual(self.all_token_dict(), [{ - 'created': token.created, - 'expires_at': json['expires_at'], - 'id': 2, - 'key': json['token'], - 'token_type': json['token_type'], - 'user_id': model['user'].id - }]) - self.assertEqual(bool(token_pattern.match(json['token'])), True) - del json['token'] + self.assertEqual( + self.all_token_dict(), + [ + { + "created": token.created, + "expires_at": json["expires_at"], + "id": 2, + "key": json["token"], + "token_type": json["token_type"], + "user_id": model["user"].id, + } + ], + ) + self.assertEqual(bool(token_pattern.match(json["token"])), True) + del json["token"] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_user_dict(), [{ - **self.model_to_dict(model, 'user'), - }]) - self.assertEqual(self.all_profile_academy_dict(), [{ - **self.model_to_dict(model, 'profile_academy'), - }]) + self.assertEqual( + self.all_user_dict(), + [ + { + **self.model_to_dict(model, "user"), + } + ], + ) + self.assertEqual( + self.all_profile_academy_dict(), + [ + { + **self.model_to_dict(model, "profile_academy"), + } + ], + ) def test_academy_token_with_other_endpoints(self): """Test /academy/:id/member/:id without auth""" - role = 'academy_token' + role = "academy_token" self.headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - academy=True, - capability='generate_academy_token', - profile_academy=True, - form_entry=True) - url = reverse_lazy('authenticate:academy_token') + model = self.generate_models( + authenticate=True, + role=role, + academy=True, + capability="generate_academy_token", + profile_academy=True, + form_entry=True, + ) + url = reverse_lazy("authenticate:academy_token") response = self.client.post(url) json = response.json() - token_pattern = re.compile(r'[0-9a-zA-Z]{,40}$') + token_pattern = re.compile(r"[0-9a-zA-Z]{,40}$") - self.assertEqual(bool(token_pattern.match(json['token'])), True) + self.assertEqual(bool(token_pattern.match(json["token"])), True) self.assertEqual(response.status_code, status.HTTP_200_OK) self.client.logout() - self.client.credentials(HTTP_AUTHORIZATION='Token ' + json['token']) + self.client.credentials(HTTP_AUTHORIZATION="Token " + json["token"]) - url = reverse_lazy('marketing:lead_all') + url = reverse_lazy("marketing:lead_all") response = self.client.get(url) json = response.json() - self.assertDatetime(json[0]['created_at']) - del json[0]['created_at'] - - expected = [{ - 'academy': { - 'id': model.form_entry.academy.id, - 'name': model.form_entry.academy.name, - 'slug': model.form_entry.academy.slug - }, - 'country': model.form_entry.country, - 'course': model.form_entry.course, - 'email': model.form_entry.email, - 'client_comments': model.form_entry.client_comments, - 'first_name': model.form_entry.first_name, - 'gclid': model.form_entry.gclid, - 'id': model.form_entry.id, - 'language': model.form_entry.language, - 'last_name': model.form_entry.last_name, - 'lead_type': model.form_entry.lead_type, - 'location': model.form_entry.location, - 'storage_status': model.form_entry.storage_status, - 'tags': model.form_entry.tags, - 'utm_campaign': model.form_entry.utm_campaign, - 'utm_medium': model.form_entry.utm_medium, - 'utm_source': model.form_entry.utm_source, - 'utm_url': model.form_entry.utm_url, - 'sex': model.form_entry.sex, - 'custom_fields': model.form_entry.custom_fields, - 'utm_placement': model.form_entry.utm_placement, - 'utm_plan': model.form_entry.utm_plan, - 'utm_term': model.form_entry.utm_term, - }] + self.assertDatetime(json[0]["created_at"]) + del json[0]["created_at"] + + expected = [ + { + "academy": { + "id": model.form_entry.academy.id, + "name": model.form_entry.academy.name, + "slug": model.form_entry.academy.slug, + }, + "country": model.form_entry.country, + "course": model.form_entry.course, + "email": model.form_entry.email, + "client_comments": model.form_entry.client_comments, + "first_name": model.form_entry.first_name, + "gclid": model.form_entry.gclid, + "id": model.form_entry.id, + "language": model.form_entry.language, + "last_name": model.form_entry.last_name, + "lead_type": model.form_entry.lead_type, + "location": model.form_entry.location, + "storage_status": model.form_entry.storage_status, + "tags": model.form_entry.tags, + "utm_campaign": model.form_entry.utm_campaign, + "utm_medium": model.form_entry.utm_medium, + "utm_source": model.form_entry.utm_source, + "utm_url": model.form_entry.utm_url, + "sex": model.form_entry.sex, + "custom_fields": model.form_entry.custom_fields, + "utm_placement": model.form_entry.utm_placement, + "utm_plan": model.form_entry.utm_plan, + "utm_term": model.form_entry.utm_term, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_no_token_with_other_endpoints(self): """Test /academy/:id/member/:id without auth""" - role = 'academy_token' + role = "academy_token" self.headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - academy=True, - capability='generate_academy_token', - profile_academy=True, - form_entry=True) - url = reverse_lazy('authenticate:academy_token') + model = self.generate_models( + authenticate=True, + role=role, + academy=True, + capability="generate_academy_token", + profile_academy=True, + form_entry=True, + ) + url = reverse_lazy("authenticate:academy_token") response = self.client.post(url) json = response.json() - token_pattern = re.compile(r'[0-9a-zA-Z]{,40}$') + token_pattern = re.compile(r"[0-9a-zA-Z]{,40}$") - self.assertEqual(bool(token_pattern.match(json['token'])), True) + self.assertEqual(bool(token_pattern.match(json["token"])), True) self.assertEqual(response.status_code, status.HTTP_200_OK) self.client.logout() - url = reverse_lazy('marketing:lead_all') + url = reverse_lazy("marketing:lead_all") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_token_showing_on_other_endpoints(self): """Test /academy/:id/member/:id without auth""" - role = 'academy_token' + role = "academy_token" self.headers(academy=1) - model = self.generate_models(authenticate=True, - role=role, - capability='generate_academy_token', - profile_academy=True) - url = reverse_lazy('authenticate:academy_token') + model = self.generate_models( + authenticate=True, role=role, capability="generate_academy_token", profile_academy=True + ) + url = reverse_lazy("authenticate:academy_token") response = self.client.post(url) json = response.json() - token_pattern = re.compile(r'[0-9a-zA-Z]{,40}$') + token_pattern = re.compile(r"[0-9a-zA-Z]{,40}$") - self.assertEqual(bool(token_pattern.match(json['token'])), True) + self.assertEqual(bool(token_pattern.match(json["token"])), True) self.assertEqual(response.status_code, status.HTTP_200_OK) - url = reverse_lazy('authenticate:user') + url = reverse_lazy("authenticate:user") response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'id': model['user'].id, - 'email': model['user'].email, - 'first_name': model['user'].first_name, - 'last_name': model['user'].last_name, - 'github': None, - 'profile': None, - }]) + self.assertEqual( + json, + [ + { + "id": model["user"].id, + "email": model["user"].email, + "first_name": model["user"].first_name, + "last_name": model["user"].last_name, + "github": None, + "profile": None, + } + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_academy_user_invite.py b/breathecode/authenticate/tests/urls/tests_academy_user_invite.py index 401012338..182ebce5f 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_user_invite.py +++ b/breathecode/authenticate/tests/urls/tests_academy_user_invite.py @@ -1,6 +1,7 @@ """ Test cases for /academy/user/me/invite && academy/user/invite """ + from random import choice from django.urls.base import reverse_lazy @@ -11,39 +12,39 @@ from ..mixins.new_auth_test_case import AuthTestCase STATUSES = [ - 'PENDING', - 'REJECTED', - 'ACCEPTED', - 'WAITING_LIST', + "PENDING", + "REJECTED", + "ACCEPTED", + "WAITING_LIST", ] def generate_user_invite(self, model, user_invite, arguments={}): return { - 'academy': { - 'id': model.academy.id, - 'name': model.academy.name, - 'slug': model.academy.slug, - 'logo_url': model.academy.logo_url, + "academy": { + "id": model.academy.id, + "name": model.academy.name, + "slug": model.academy.slug, + "logo_url": model.academy.logo_url, }, - 'cohort': { - 'name': model.cohort.name, - 'slug': model.cohort.slug, + "cohort": { + "name": model.cohort.name, + "slug": model.cohort.slug, }, - 'created_at': self.bc.datetime.to_iso_string(user_invite.created_at), - 'email': user_invite.email, - 'first_name': user_invite.first_name, - 'id': user_invite.id, - 'invite_url': f'http://localhost:8000/v1/auth/member/invite/{user_invite.token}', - 'last_name': user_invite.last_name, - 'role': { - 'id': model.role.slug, - 'name': model.role.name, - 'slug': model.role.slug, + "created_at": self.bc.datetime.to_iso_string(user_invite.created_at), + "email": user_invite.email, + "first_name": user_invite.first_name, + "id": user_invite.id, + "invite_url": f"http://localhost:8000/v1/auth/member/invite/{user_invite.token}", + "last_name": user_invite.last_name, + "role": { + "id": model.role.slug, + "name": model.role.name, + "slug": model.role.slug, }, - 'sent_at': user_invite.sent_at, - 'status': user_invite.status, - 'token': user_invite.token, + "sent_at": user_invite.sent_at, + "status": user_invite.status, + "token": user_invite.token, **arguments, } @@ -54,19 +55,19 @@ class AuthenticateTestSuite(AuthTestCase): """ def test_academy_user_invite__without_auth(self): - url = reverse_lazy('authenticate:academy_user_invite') + url = reverse_lazy("authenticate:academy_user_invite") response = self.client.delete(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_academy_user_invite__wrong_academy(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:academy_user_invite') + url = reverse_lazy("authenticate:academy_user_invite") response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -80,13 +81,16 @@ def test_academy_user_invite__get__without_capability(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + url = reverse_lazy("authenticate:academy_user_invite") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_invite for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_invite for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ @@ -94,12 +98,12 @@ def test_academy_user_invite__get__without_capability(self): """ def test_academy_user_invite__get__without_data(self): - model = self.generate_models(user=1, profile_academy=1, role=1, capability='read_invite') + model = self.generate_models(user=1, profile_academy=1, role=1, capability="read_invite") self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + url = reverse_lazy("authenticate:academy_user_invite") response = self.client.get(url) json = response.json() @@ -113,17 +117,15 @@ def test_academy_user_invite__get__without_data(self): """ def test_academy_user_invite__get__with_two_user_invite__status_invite(self): - user_invite = {'status': 'PENDING'} - model = self.generate_models(user=1, - profile_academy=1, - role=1, - capability='read_invite', - user_invite=(2, user_invite)) + user_invite = {"status": "PENDING"} + model = self.generate_models( + user=1, profile_academy=1, role=1, capability="read_invite", user_invite=(2, user_invite) + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + url = reverse_lazy("authenticate:academy_user_invite") response = self.client.get(url) json = response.json() @@ -131,26 +133,24 @@ def test_academy_user_invite__get__with_two_user_invite__status_invite(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), self.bc.format.to_dict(model.user_invite)) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite)) """ 🔽🔽🔽 GET with two UserInvite with statuses will be ignored """ def test_academy_user_invite__get__with_two_user_invite__statuses_will_be_ignored(self): - statuses_will_be_ignored = [x for x in STATUSES if x != 'PENDING'] + statuses_will_be_ignored = [x for x in STATUSES if x != "PENDING"] for x in statuses_will_be_ignored: - user_invite = {'status': x} - model = self.generate_models(user=1, - profile_academy=1, - role=1, - capability='read_invite', - user_invite=(2, user_invite)) + user_invite = {"status": x} + model = self.generate_models( + user=1, profile_academy=1, role=1, capability="read_invite", user_invite=(2, user_invite) + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + url = reverse_lazy("authenticate:academy_user_invite") response = self.client.get(url) json = response.json() @@ -158,11 +158,12 @@ def test_academy_user_invite__get__with_two_user_invite__statuses_will_be_ignore self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - self.bc.format.to_dict(model.user_invite)) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite) + ) # teardown - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 GET with two UserInvite passing one status @@ -170,32 +171,31 @@ def test_academy_user_invite__get__with_two_user_invite__statuses_will_be_ignore def test_academy_user_invite__get__with_two_user_invite__passing_one_status(self): for x in STATUSES: - user_invite = {'status': x} - model = self.generate_models(user=1, - profile_academy=1, - role=1, - capability='read_invite', - user_invite=(2, user_invite)) + user_invite = {"status": x} + model = self.generate_models( + user=1, profile_academy=1, role=1, capability="read_invite", user_invite=(2, user_invite) + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + f'?status={x}' + url = reverse_lazy("authenticate:academy_user_invite") + f"?status={x}" response = self.client.get(url) json = response.json() expected = [ - generate_user_invite(self, model, user_invite, arguments={'status': x}) + generate_user_invite(self, model, user_invite, arguments={"status": x}) for user_invite in reversed(model.user_invite) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - self.bc.format.to_dict(model.user_invite)) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite) + ) # teardown - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 GET with two UserInvite passing two statuses @@ -206,42 +206,43 @@ def test_academy_user_invite__get__with_two_user_invite__passing_two_statuses(se current = n # is possible bin function return a string start with 'b' - binary = ''.join(bin(n + 1)[-2:].split('b')) + binary = "".join(bin(n + 1)[-2:].split("b")) # 4 = '100', it take '00' = 0 next = int(binary, 2) first_status = STATUSES[current] second_status = STATUSES[next] - user_invites = [{'status': first_status}, {'status': second_status}] - model = self.generate_models(user=1, - profile_academy=1, - role=1, - capability='read_invite', - user_invite=user_invites) + user_invites = [{"status": first_status}, {"status": second_status}] + model = self.generate_models( + user=1, profile_academy=1, role=1, capability="read_invite", user_invite=user_invites + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + f'?status={first_status},{second_status}' + url = reverse_lazy("authenticate:academy_user_invite") + f"?status={first_status},{second_status}" response = self.client.get(url) json = response.json() expected = [ - generate_user_invite(self, - model, - user_invite, - arguments={'status': first_status if user_invite.id % 2 == 1 else second_status}) + generate_user_invite( + self, + model, + user_invite, + arguments={"status": first_status if user_invite.id % 2 == 1 else second_status}, + ) for user_invite in reversed(model.user_invite) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - self.bc.format.to_dict(model.user_invite)) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite) + ) # teardown - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 DELETE auth capability @@ -253,37 +254,37 @@ def test_academy_user_invite__delete__without_capability(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + url = reverse_lazy("authenticate:academy_user_invite") response = self.client.delete(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_invite for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: crud_invite for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 DELETE providing two id in the params and two UserInvite """ def test_academy_user_invite__delete__in_bulk_with_two_invites(self): - user_invites = [{'email': choice(['a@a.com', 'b@b.com', 'c@c.com'])} for _ in range(0, 2)] - model = self.generate_models(academy=1, - capability='crud_invite', - role=1, - user=1, - profile_academy=1, - user_invite=user_invites) + user_invites = [{"email": choice(["a@a.com", "b@b.com", "c@c.com"])} for _ in range(0, 2)] + model = self.generate_models( + academy=1, capability="crud_invite", role=1, user=1, profile_academy=1, user_invite=user_invites + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + '?id=1,2' + url = reverse_lazy("authenticate:academy_user_invite") + "?id=1,2" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 DELETE not providing the id param and one UserInvite @@ -291,27 +292,27 @@ def test_academy_user_invite__delete__in_bulk_with_two_invites(self): def test_academy_user_invite__delete__without_passing_ids(self): - invite_kwargs = {'email': choice(['a@a.com', 'b@b.com', 'c@c.com'])} - model = self.generate_models(academy=1, - capability='crud_invite', - user=1, - role='potato', - user_invite=invite_kwargs, - profile_academy=1) + invite_kwargs = {"email": choice(["a@a.com", "b@b.com", "c@c.com"])} + model = self.generate_models( + academy=1, capability="crud_invite", user=1, role="potato", user_invite=invite_kwargs, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + url = reverse_lazy("authenticate:academy_user_invite") response = self.client.delete(url) json = response.json() - self.bc.check.partial_equality(json, {'detail': 'missing_ids'}) + self.bc.check.partial_equality(json, {"detail": "missing_ids"}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) """ 🔽🔽🔽 DELETE providing the id param and one UserInvite but from another academy @@ -319,22 +320,22 @@ def test_academy_user_invite__delete__without_passing_ids(self): def test_academy_user_invite__delete__passing_ids__deleting_invite_of_another_academy(self): - invite_kwargs = {'email': choice(['a@a.com', 'b@b.com', 'c@c.com']), 'academy_id': 2} - model = self.generate_models(academy=2, - capability='crud_invite', - user=1, - role='potato', - user_invite=invite_kwargs, - profile_academy=1) + invite_kwargs = {"email": choice(["a@a.com", "b@b.com", "c@c.com"]), "academy_id": 2} + model = self.generate_models( + academy=2, capability="crud_invite", user=1, role="potato", user_invite=invite_kwargs, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_invite') + '?id=1' + url = reverse_lazy("authenticate:academy_user_invite") + "?id=1" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_academy_user_me_invite.py b/breathecode/authenticate/tests/urls/tests_academy_user_me_invite.py index 55e21da77..81ce58017 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_user_me_invite.py +++ b/breathecode/authenticate/tests/urls/tests_academy_user_me_invite.py @@ -1,6 +1,7 @@ """ Set of tests for MeInviteView, this include duck tests """ + from random import choice from unittest.mock import MagicMock, patch @@ -12,7 +13,7 @@ def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) @@ -24,20 +25,23 @@ class MemberGetDuckTestSuite(AuthTestCase): def test_duck_test__without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_user_me_invite') + url = reverse_lazy("authenticate:academy_user_me_invite") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_duck_test__with_auth(self): model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_user_me_invite') + url = reverse_lazy("authenticate:academy_user_me_invite") response = self.client.get(url) json = response.json() @@ -50,16 +54,16 @@ def test_duck_test__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MeInviteView.get', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MeInviteView.get", MagicMock(side_effect=view_method_mock)) def test_duck_test__with_auth___mock_view(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_me_invite') + url = reverse_lazy("authenticate:academy_user_me_invite") response = self.client.get(url) json = response.json() - expected = {'args': [], 'kwargs': {}} + expected = {"args": [], "kwargs": {}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -73,24 +77,27 @@ class MemberPutDuckTestSuite(AuthTestCase): def test_duck_test__without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_user_me_invite') + url = reverse_lazy("authenticate:academy_user_me_invite") response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_duck_test__with_auth(self): model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_user_me_invite') + url = reverse_lazy("authenticate:academy_user_me_invite") response = self.client.put(url) json = response.json() - expected = {'detail': 'missing-status', 'status_code': 400} + expected = {"detail": "missing-status", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -99,16 +106,16 @@ def test_duck_test__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MeInviteView.put', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MeInviteView.put", MagicMock(side_effect=view_method_mock)) def test_duck_test__with_auth___mock_view(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_me_invite') + url = reverse_lazy("authenticate:academy_user_me_invite") response = self.client.put(url) json = response.json() - expected = {'args': [], 'kwargs': {}} + expected = {"args": [], "kwargs": {}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/authenticate/tests/urls/tests_academy_user_me_invite_status.py b/breathecode/authenticate/tests/urls/tests_academy_user_me_invite_status.py index f00bec7e9..104703125 100644 --- a/breathecode/authenticate/tests/urls/tests_academy_user_me_invite_status.py +++ b/breathecode/authenticate/tests/urls/tests_academy_user_me_invite_status.py @@ -1,6 +1,7 @@ """ Set of tests for MeInviteView, this include duck tests """ + from random import choice from unittest.mock import MagicMock, patch @@ -12,7 +13,7 @@ def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) @@ -24,24 +25,27 @@ class MemberPutDuckTestSuite(AuthTestCase): def test_duck_test__without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:academy_user_me_invite_status', kwargs={'new_status': 'pending'}) + url = reverse_lazy("authenticate:academy_user_me_invite_status", kwargs={"new_status": "pending"}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_duck_test__with_auth(self): model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:academy_user_me_invite_status', kwargs={'new_status': 'pending'}) + url = reverse_lazy("authenticate:academy_user_me_invite_status", kwargs={"new_status": "pending"}) response = self.client.put(url) json = response.json() - expected = {'detail': 'invalid-status', 'status_code': 400} + expected = {"detail": "invalid-status", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -50,23 +54,23 @@ def test_duck_test__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.MeInviteView.put', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MeInviteView.put", MagicMock(side_effect=view_method_mock)) def test_duck_test__with_auth___mock_view(self): statuses = [ - 'pending', - 'rejected', - 'accepted', - 'waiting_list', + "pending", + "rejected", + "accepted", + "waiting_list", ] model = self.bc.database.create(user=1) for x in statuses: self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:academy_user_me_invite_status', kwargs={'new_status': x}) + url = reverse_lazy("authenticate:academy_user_me_invite_status", kwargs={"new_status": x}) response = self.client.put(url) json = response.json() - expected = {'args': [], 'kwargs': {'new_status': x}} + expected = {"args": [], "kwargs": {"new_status": x}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/authenticate/tests/urls/tests_app_user.py b/breathecode/authenticate/tests/urls/tests_app_user.py index c64a95bfa..51284a072 100644 --- a/breathecode/authenticate/tests/urls/tests_app_user.py +++ b/breathecode/authenticate/tests/urls/tests_app_user.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + from unittest.mock import MagicMock, patch import pytest @@ -12,27 +13,27 @@ def credentials_github_serializer(credentials_github): return { - 'avatar_url': credentials_github.avatar_url, - 'name': credentials_github.name, - 'username': credentials_github.username, + "avatar_url": credentials_github.avatar_url, + "name": credentials_github.name, + "username": credentials_github.username, } def profile_serializer(credentials_github): return { - 'avatar_url': credentials_github.avatar_url, + "avatar_url": credentials_github.avatar_url, } def get_serializer(user, credentials_github=None, profile=None, **data): return { - 'email': user.email, - 'username': user.username, - 'first_name': user.first_name, - 'github': credentials_github_serializer(credentials_github) if credentials_github else None, - 'id': user.id, - 'last_name': user.last_name, - 'profile': profile_serializer(profile) if profile else None, + "email": user.email, + "username": user.username, + "first_name": user.first_name, + "github": credentials_github_serializer(credentials_github) if credentials_github else None, + "id": user.id, + "last_name": user.last_name, + "profile": profile_serializer(profile) if profile else None, **data, } @@ -40,8 +41,9 @@ def get_serializer(user, credentials_github=None, profile=None, **data): @pytest.fixture(autouse=True) def setup(monkeypatch): from linked_services.django.actions import reset_app_cache + reset_app_cache() - monkeypatch.setattr('linked_services.django.tasks.check_credentials.delay', MagicMock()) + monkeypatch.setattr("linked_services.django.tasks.check_credentials.delay", MagicMock()) yield @@ -50,13 +52,13 @@ class AuthenticateTestSuite(AuthTestCase): # When: no auth # Then: return 401 def test_no_auth(self): - url = reverse_lazy('authenticate:app_user') + url = reverse_lazy("authenticate:app_user") response = self.client.get(url) json = response.json() expected = { - 'detail': 'no-authorization-header', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "no-authorization-header", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -65,100 +67,108 @@ def test_no_auth(self): # When: Sign with an user # Then: return 200 def test_sign_with_user__get_own_info(self): - app = {'require_an_agreement': False, 'slug': 'rigobot'} - credentials_githubs = [{'user_id': x + 1} for x in range(2)] - profiles = [{'user_id': x + 1} for x in range(2)] - model = self.bc.database.create(user=2, - app=app, - profile=profiles, - credentials_github=credentials_githubs, - first_party_credentials={ - 'app': { - 'rigobot': 1, - }, - }) + app = {"require_an_agreement": False, "slug": "rigobot"} + credentials_githubs = [{"user_id": x + 1} for x in range(2)] + profiles = [{"user_id": x + 1} for x in range(2)] + model = self.bc.database.create( + user=2, + app=app, + profile=profiles, + credentials_github=credentials_githubs, + first_party_credentials={ + "app": { + "rigobot": 1, + }, + }, + ) self.bc.request.sign_jwt_link(model.app, 1) - url = reverse_lazy('authenticate:app_user') + url = reverse_lazy("authenticate:app_user") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.user[0], - model.credentials_github[0], - model.profile[0], - date_joined=self.bc.datetime.to_iso_string(model.user[0].date_joined)) + get_serializer( + model.user[0], + model.credentials_github[0], + model.profile[0], + date_joined=self.bc.datetime.to_iso_string(model.user[0].date_joined), + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), self.bc.format.to_dict(model.user)) + self.assertEqual(self.bc.database.list_of("auth.User"), self.bc.format.to_dict(model.user)) # When: Sign with an user # Then: return 200 def test_sign_with_user__get_info_from_another(self): - app = {'require_an_agreement': False, 'slug': 'rigobot'} - credentials_githubs = [{'user_id': x + 1} for x in range(2)] - profiles = [{'user_id': x + 1} for x in range(2)] - model = self.bc.database.create(user=2, - app=app, - profile=profiles, - credentials_github=credentials_githubs, - first_party_credentials={ - 'app': { - 'rigobot': 1, - }, - }) + app = {"require_an_agreement": False, "slug": "rigobot"} + credentials_githubs = [{"user_id": x + 1} for x in range(2)] + profiles = [{"user_id": x + 1} for x in range(2)] + model = self.bc.database.create( + user=2, + app=app, + profile=profiles, + credentials_github=credentials_githubs, + first_party_credentials={ + "app": { + "rigobot": 1, + }, + }, + ) self.bc.request.sign_jwt_link(model.app, 1) - url = reverse_lazy('authenticate:app_user_id', kwargs={'user_id': 2}) + url = reverse_lazy("authenticate:app_user_id", kwargs={"user_id": 2}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'user-with-no-access', - 'silent': True, - 'silent_code': 'user-with-no-access', - 'status_code': 403, + "detail": "user-with-no-access", + "silent": True, + "silent_code": "user-with-no-access", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('auth.User'), self.bc.format.to_dict(model.user)) + self.assertEqual(self.bc.database.list_of("auth.User"), self.bc.format.to_dict(model.user)) # When: Sign without an user # Then: return 200 def test_sign_without_user(self): - app = {'require_an_agreement': False} - credentials_githubs = [{'user_id': x + 1} for x in range(2)] - profiles = [{'user_id': x + 1} for x in range(2)] + app = {"require_an_agreement": False} + credentials_githubs = [{"user_id": x + 1} for x in range(2)] + profiles = [{"user_id": x + 1} for x in range(2)] model = self.bc.database.create(user=2, app=app, profile=profiles, credentials_github=credentials_githubs) self.bc.request.sign_jwt_link(model.app) for user in model.user: - url = reverse_lazy('authenticate:app_user_id', kwargs={'user_id': user.id}) + url = reverse_lazy("authenticate:app_user_id", kwargs={"user_id": user.id}) response = self.client.get(url) json = response.json() - expected = get_serializer(user, - model.credentials_github[user.id - 1], - model.profile[user.id - 1], - date_joined=self.bc.datetime.to_iso_string(user.date_joined)) + expected = get_serializer( + user, + model.credentials_github[user.id - 1], + model.profile[user.id - 1], + date_joined=self.bc.datetime.to_iso_string(user.date_joined), + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), self.bc.format.to_dict(model.user)) + self.assertEqual(self.bc.database.list_of("auth.User"), self.bc.format.to_dict(model.user)) # When: Sign user with no agreement # Then: return 200 def test_user_with_no_agreement(self): - app = {'require_an_agreement': False, 'require_an_agreement': True} - credentials_github = {'user_id': 1} - profile = {'user_id': 1} + app = {"require_an_agreement": False, "require_an_agreement": True} + credentials_github = {"user_id": 1} + profile = {"user_id": 1} model = self.bc.database.create(user=1, app=app, profile=profile, credentials_github=credentials_github) self.bc.request.sign_jwt_link(model.app) - url = reverse_lazy('authenticate:app_user') + url = reverse_lazy("authenticate:app_user") response = self.client.get(url) json = response.json() @@ -166,32 +176,32 @@ def test_user_with_no_agreement(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) # When: Sign user with agreement # Then: return 200 def test_user_with_agreement(self): - app = {'require_an_agreement': False, 'require_an_agreement': True} - credentials_github = {'user_id': 1} - profile = {'user_id': 1} - model = self.bc.database.create(user=1, - app=app, - profile=profile, - credentials_github=credentials_github, - app_user_agreement=1) + app = {"require_an_agreement": False, "require_an_agreement": True} + credentials_github = {"user_id": 1} + profile = {"user_id": 1} + model = self.bc.database.create( + user=1, app=app, profile=profile, credentials_github=credentials_github, app_user_agreement=1 + ) self.bc.request.sign_jwt_link(model.app) - url = reverse_lazy('authenticate:app_user') + url = reverse_lazy("authenticate:app_user") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.user, - model.credentials_github, - model.profile, - date_joined=self.bc.datetime.to_iso_string(model.user.date_joined)) + get_serializer( + model.user, + model.credentials_github, + model.profile, + date_joined=self.bc.datetime.to_iso_string(model.user.date_joined), + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) diff --git a/breathecode/authenticate/tests/urls/tests_app_user_id.py b/breathecode/authenticate/tests/urls/tests_app_user_id.py index f6102407d..664dbe277 100644 --- a/breathecode/authenticate/tests/urls/tests_app_user_id.py +++ b/breathecode/authenticate/tests/urls/tests_app_user_id.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + from unittest.mock import MagicMock import pytest @@ -12,37 +13,38 @@ def credentials_github_serializer(credentials_github): return { - 'avatar_url': credentials_github.avatar_url, - 'name': credentials_github.name, - 'username': credentials_github.username, + "avatar_url": credentials_github.avatar_url, + "name": credentials_github.name, + "username": credentials_github.username, } def profile_serializer(credentials_github): return { - 'avatar_url': credentials_github.avatar_url, + "avatar_url": credentials_github.avatar_url, } def get_serializer(user, credentials_github=None, profile=None, **data): return { - 'email': user.email, - 'username': user.username, - 'first_name': user.first_name, - 'github': credentials_github_serializer(credentials_github) if credentials_github else None, - 'id': user.id, - 'last_name': user.last_name, - 'date_joined': user.date_joined, - 'profile': profile_serializer(profile) if profile else None, - **data + "email": user.email, + "username": user.username, + "first_name": user.first_name, + "github": credentials_github_serializer(credentials_github) if credentials_github else None, + "id": user.id, + "last_name": user.last_name, + "date_joined": user.date_joined, + "profile": profile_serializer(profile) if profile else None, + **data, } @pytest.fixture(autouse=True) def setup(monkeypatch): from linked_services.django.actions import reset_app_cache + reset_app_cache() - monkeypatch.setattr('linked_services.django.tasks.check_credentials.delay', MagicMock()) + monkeypatch.setattr("linked_services.django.tasks.check_credentials.delay", MagicMock()) yield @@ -51,13 +53,13 @@ class AuthenticateTestSuite(AuthTestCase): # When: no auth # Then: return 401 def test_no_auth(self): - url = reverse_lazy('authenticate:app_user_id', kwargs={'user_id': 1}) + url = reverse_lazy("authenticate:app_user_id", kwargs={"user_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'no-authorization-header', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "no-authorization-header", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -66,134 +68,142 @@ def test_no_auth(self): # When: Sign with an user # Then: return 200 def test_sign_with_user__get_own_info(self): - app = {'require_an_agreement': False, 'slug': 'rigobot'} - credentials_githubs = [{'user_id': x + 1} for x in range(2)] - profiles = [{'user_id': x + 1} for x in range(2)] - model = self.bc.database.create(user=2, - app=app, - profile=profiles, - credentials_github=credentials_githubs, - first_party_credentials={ - 'app': { - 'rigobot': 1, - }, - }) + app = {"require_an_agreement": False, "slug": "rigobot"} + credentials_githubs = [{"user_id": x + 1} for x in range(2)] + profiles = [{"user_id": x + 1} for x in range(2)] + model = self.bc.database.create( + user=2, + app=app, + profile=profiles, + credentials_github=credentials_githubs, + first_party_credentials={ + "app": { + "rigobot": 1, + }, + }, + ) self.bc.request.sign_jwt_link(model.app, 1) - url = reverse_lazy('authenticate:app_user_id', kwargs={'user_id': 1}) + url = reverse_lazy("authenticate:app_user_id", kwargs={"user_id": 1}) response = self.client.get(url) json = response.json() - expected = get_serializer(model.user[0], - model.credentials_github[0], - model.profile[0], - date_joined=self.bc.datetime.to_iso_string(model.user[0].date_joined)) + expected = get_serializer( + model.user[0], + model.credentials_github[0], + model.profile[0], + date_joined=self.bc.datetime.to_iso_string(model.user[0].date_joined), + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), self.bc.format.to_dict(model.user)) + self.assertEqual(self.bc.database.list_of("auth.User"), self.bc.format.to_dict(model.user)) # When: Sign with an user # Then: return 200 def test_sign_with_user__get_info_from_another(self): - app = {'require_an_agreement': False, 'slug': 'rigobot'} - credentials_githubs = [{'user_id': x + 1} for x in range(2)] - profiles = [{'user_id': x + 1} for x in range(2)] - model = self.bc.database.create(user=2, - app=app, - profile=profiles, - credentials_github=credentials_githubs, - first_party_credentials={ - 'app': { - 'rigobot': 1, - }, - }) + app = {"require_an_agreement": False, "slug": "rigobot"} + credentials_githubs = [{"user_id": x + 1} for x in range(2)] + profiles = [{"user_id": x + 1} for x in range(2)] + model = self.bc.database.create( + user=2, + app=app, + profile=profiles, + credentials_github=credentials_githubs, + first_party_credentials={ + "app": { + "rigobot": 1, + }, + }, + ) self.bc.request.sign_jwt_link(model.app, 1) - url = reverse_lazy('authenticate:app_user_id', kwargs={'user_id': 2}) + url = reverse_lazy("authenticate:app_user_id", kwargs={"user_id": 2}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'user-with-no-access', - 'silent': True, - 'silent_code': 'user-with-no-access', - 'status_code': 403, + "detail": "user-with-no-access", + "silent": True, + "silent_code": "user-with-no-access", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('auth.User'), self.bc.format.to_dict(model.user)) + self.assertEqual(self.bc.database.list_of("auth.User"), self.bc.format.to_dict(model.user)) # When: Sign without an user # Then: return 200 def test_sign_without_user(self): - app = {'require_an_agreement': False} - credentials_githubs = [{'user_id': x + 1} for x in range(2)] - profiles = [{'user_id': x + 1} for x in range(2)] + app = {"require_an_agreement": False} + credentials_githubs = [{"user_id": x + 1} for x in range(2)] + profiles = [{"user_id": x + 1} for x in range(2)] model = self.bc.database.create(user=2, app=app, profile=profiles, credentials_github=credentials_githubs) self.bc.request.sign_jwt_link(model.app) for user in model.user: - url = reverse_lazy('authenticate:app_user_id', kwargs={'user_id': user.id}) + url = reverse_lazy("authenticate:app_user_id", kwargs={"user_id": user.id}) response = self.client.get(url) json = response.json() - expected = get_serializer(user, - model.credentials_github[user.id - 1], - model.profile[user.id - 1], - date_joined=self.bc.datetime.to_iso_string(user.date_joined)) + expected = get_serializer( + user, + model.credentials_github[user.id - 1], + model.profile[user.id - 1], + date_joined=self.bc.datetime.to_iso_string(user.date_joined), + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), self.bc.format.to_dict(model.user)) + self.assertEqual(self.bc.database.list_of("auth.User"), self.bc.format.to_dict(model.user)) # When: Sign user with no agreement # Then: return 200 def test_user_with_no_agreement(self): - app = {'require_an_agreement': False, 'require_an_agreement': True} - credentials_github = {'user_id': 1} - profile = {'user_id': 1} + app = {"require_an_agreement": False, "require_an_agreement": True} + credentials_github = {"user_id": 1} + profile = {"user_id": 1} model = self.bc.database.create(user=1, app=app, profile=profile, credentials_github=credentials_github) self.bc.request.sign_jwt_link(model.app) - url = reverse_lazy('authenticate:app_user_id', kwargs={'user_id': 1}) + url = reverse_lazy("authenticate:app_user_id", kwargs={"user_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'user-not-found', - 'silent': True, - 'silent_code': 'user-not-found', - 'status_code': 404, + "detail": "user-not-found", + "silent": True, + "silent_code": "user-not-found", + "status_code": 404, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) # When: Sign user with agreement # Then: return 200 def test_user_with_agreement(self): - app = {'require_an_agreement': False, 'require_an_agreement': True} - credentials_github = {'user_id': 1} - profile = {'user_id': 1} - model = self.bc.database.create(user=1, - app=app, - profile=profile, - credentials_github=credentials_github, - app_user_agreement=1) + app = {"require_an_agreement": False, "require_an_agreement": True} + credentials_github = {"user_id": 1} + profile = {"user_id": 1} + model = self.bc.database.create( + user=1, app=app, profile=profile, credentials_github=credentials_github, app_user_agreement=1 + ) self.bc.request.sign_jwt_link(model.app) - url = reverse_lazy('authenticate:app_user_id', kwargs={'user_id': 1}) + url = reverse_lazy("authenticate:app_user_id", kwargs={"user_id": 1}) response = self.client.get(url) json = response.json() - expected = get_serializer(model.user, - model.credentials_github, - model.profile, - date_joined=self.bc.datetime.to_iso_string(model.user.date_joined)) + expected = get_serializer( + model.user, + model.credentials_github, + model.profile, + date_joined=self.bc.datetime.to_iso_string(model.user.date_joined), + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) diff --git a/breathecode/authenticate/tests/urls/tests_app_webhook.py b/breathecode/authenticate/tests/urls/tests_app_webhook.py index f753f1cfc..1560c8624 100644 --- a/breathecode/authenticate/tests/urls/tests_app_webhook.py +++ b/breathecode/authenticate/tests/urls/tests_app_webhook.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import pytest from django.urls.base import reverse_lazy from linked_services.django.actions import reset_app_cache @@ -17,13 +18,13 @@ def setup(db): def test_user_without_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('authenticate:app_webhook') - response = client.post(url, {}, format='json') + url = reverse_lazy("authenticate:app_webhook") + response = client.post(url, {}, format="json") json = response.json() expected = { - 'detail': 'no-authorization-header', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "no-authorization-header", + "status_code": status.HTTP_401_UNAUTHORIZED, } assert json == expected @@ -32,166 +33,186 @@ def test_user_without_auth(bc: Breathecode, client: APIClient): def db_item(data={}): return { - 'id': 0, - 'app_id': 0, - 'type': 'unknown', - 'user_id': 0, - 'external_id': None, - 'url': None, - 'data': {}, - 'processed': False, - 'attempts': 0, - 'status': 'PENDING', - 'status_text': None, + "id": 0, + "app_id": 0, + "type": "unknown", + "user_id": 0, + "external_id": None, + "url": None, + "data": {}, + "processed": False, + "attempts": 0, + "status": "PENDING", + "status_text": None, **data, } -@pytest.mark.parametrize('how_many,webhook_id,webhook_type,user_id,data', [ - (1, None, None, None, None), - (2, 1, 'user.created', 1, { - 'random': 'data' - }), - (2, 1, 'user.updated', 1, [ - { - 'random': 'data1', - }, - { - 'random': 'data2', - }, - ]), -]) +@pytest.mark.parametrize( + "how_many,webhook_id,webhook_type,user_id,data", + [ + (1, None, None, None, None), + (2, 1, "user.created", 1, {"random": "data"}), + ( + 2, + 1, + "user.updated", + 1, + [ + { + "random": "data1", + }, + { + "random": "data2", + }, + ], + ), + ], +) def test_webhook_not_registered(bc: Breathecode, client: APIClient, how_many, webhook_id, webhook_type, user_id, data): - app = {'require_an_agreement': False} + app = {"require_an_agreement": False} model = bc.database.create(user=2, app=app) input = { - 'id': webhook_id, - 'data': data, - 'type': webhook_type, + "id": webhook_id, + "data": data, + "type": webhook_type, } if how_many != 1: input = [ input, { - 'id': webhook_id + 1 if webhook_id else None, - 'data': data, - 'type': webhook_type, + "id": webhook_id + 1 if webhook_id else None, + "data": data, + "type": webhook_type, }, ] bc.request.sign_jwt_link(model.app, user_id, client=client) - url = reverse_lazy('authenticate:app_webhook') - response = client.post(url, input, format='json') + url = reverse_lazy("authenticate:app_webhook") + response = client.post(url, input, format="json") - assert response.content == b'' + assert response.content == b"" assert response.status_code == status.HTTP_204_NO_CONTENT db = [ - db_item({ - 'id': 1, - 'app_id': 1, - 'user_id': user_id, - 'status': 'PENDING', - 'external_id': webhook_id, - 'type': webhook_type or 'unknown', - 'data': data, - }), + db_item( + { + "id": 1, + "app_id": 1, + "user_id": user_id, + "status": "PENDING", + "external_id": webhook_id, + "type": webhook_type or "unknown", + "data": data, + } + ), ] if how_many != 1: db += [ - db_item({ - 'id': 2, - 'app_id': 1, - 'user_id': user_id, - 'status': 'PENDING', - 'external_id': webhook_id + 1 if webhook_id else None, - 'type': webhook_type or 'unknown', - 'data': data, - }), + db_item( + { + "id": 2, + "app_id": 1, + "user_id": user_id, + "status": "PENDING", + "external_id": webhook_id + 1 if webhook_id else None, + "type": webhook_type or "unknown", + "data": data, + } + ), ] - assert bc.database.list_of('linked_services.FirstPartyWebhookLog') == db + assert bc.database.list_of("linked_services.FirstPartyWebhookLog") == db @pytest.mark.parametrize( - 'how_many,webhook_id,webhook_type,user_id,data', + "how_many,webhook_id,webhook_type,user_id,data", [ # (1, None, None, None, None), - (2, 1, 'user.created', 1, { - 'random': 'data' - }), - (2, 1, 'user.updated', 1, [ - { - 'random': 'data1', - }, - { - 'random': 'data2', - }, - ]), - ]) + (2, 1, "user.created", 1, {"random": "data"}), + ( + 2, + 1, + "user.updated", + 1, + [ + { + "random": "data1", + }, + { + "random": "data2", + }, + ], + ), + ], +) def test_webhook_registered(bc: Breathecode, client: APIClient, how_many, webhook_id, webhook_type, user_id, data): - app = {'require_an_agreement': False} + app = {"require_an_agreement": False} first_party_webhook_log = { - 'external_id': webhook_id, - 'type': webhook_type, - 'user_id': user_id, + "external_id": webhook_id, + "type": webhook_type, + "user_id": user_id, } if how_many != 1: first_party_webhook_log = [ first_party_webhook_log, { - 'external_id': webhook_id + 1 if webhook_id else None, - 'type': webhook_type, - 'user_id': user_id, + "external_id": webhook_id + 1 if webhook_id else None, + "type": webhook_type, + "user_id": user_id, }, ] model = bc.database.create(user=2, app=app, first_party_webhook_log=first_party_webhook_log) input = { - 'id': webhook_id, - 'data': data, - 'type': webhook_type, + "id": webhook_id, + "data": data, + "type": webhook_type, } if how_many != 1: input = [ input, { - 'id': webhook_id + 1 if webhook_id else None, - 'data': data, - 'type': webhook_type, + "id": webhook_id + 1 if webhook_id else None, + "data": data, + "type": webhook_type, }, ] bc.request.sign_jwt_link(model.app, user_id, client=client) - url = reverse_lazy('authenticate:app_webhook') - response = client.post(url, input, format='json') + url = reverse_lazy("authenticate:app_webhook") + response = client.post(url, input, format="json") - assert response.content == b'' + assert response.content == b"" assert response.status_code == status.HTTP_204_NO_CONTENT db = [ - db_item({ - 'id': 1, - 'app_id': 1, - 'user_id': user_id, - 'status': 'PENDING', - 'external_id': webhook_id, - 'type': webhook_type or 'unknown', - 'data': data, - }), + db_item( + { + "id": 1, + "app_id": 1, + "user_id": user_id, + "status": "PENDING", + "external_id": webhook_id, + "type": webhook_type or "unknown", + "data": data, + } + ), ] if how_many != 1: db += [ - db_item({ - 'id': 2, - 'app_id': 1, - 'user_id': user_id, - 'status': 'PENDING', - 'external_id': webhook_id + 1 if webhook_id else None, - 'type': webhook_type or 'unknown', - 'data': data, - }), + db_item( + { + "id": 2, + "app_id": 1, + "user_id": user_id, + "status": "PENDING", + "external_id": webhook_id + 1 if webhook_id else None, + "type": webhook_type or "unknown", + "data": data, + } + ), ] - assert bc.database.list_of('linked_services.FirstPartyWebhookLog') == db + assert bc.database.list_of("linked_services.FirstPartyWebhookLog") == db diff --git a/breathecode/authenticate/tests/urls/tests_appuseragreement.py b/breathecode/authenticate/tests/urls/tests_appuseragreement.py index 4dcd1e035..d550e509d 100644 --- a/breathecode/authenticate/tests/urls/tests_appuseragreement.py +++ b/breathecode/authenticate/tests/urls/tests_appuseragreement.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import random from datetime import timedelta from unittest.mock import MagicMock, patch @@ -19,8 +20,8 @@ def get_serializer(app, data={}): return { - 'app': app.slug, - 'up_to_date': True, + "app": app.slug, + "up_to_date": True, **data, } @@ -32,20 +33,20 @@ class AuthenticateTestSuite(AuthTestCase): # Then: return 401 def test__auth__without_auth(self): """Test /logout without auth""" - url = reverse_lazy('authenticate:appuseragreement') + url = reverse_lazy("authenticate:appuseragreement") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('linked_services.AppUserAgreement'), []) + self.assertEqual(self.bc.database.list_of("linked_services.AppUserAgreement"), []) # When: no agreements # Then: return empty list def test__no_agreements(self): - url = reverse_lazy('authenticate:appuseragreement') + url = reverse_lazy("authenticate:appuseragreement") model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) @@ -55,58 +56,58 @@ def test__no_agreements(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('linked_services.AppUserAgreement'), []) + self.assertEqual(self.bc.database.list_of("linked_services.AppUserAgreement"), []) # teardown - self.bc.database.delete('authenticate.Token') + self.bc.database.delete("authenticate.Token") # When: have agreements, agreement_version match # Then: return list of agreements def test__have_agreements__version_match(self): - url = reverse_lazy('authenticate:appuseragreement') + url = reverse_lazy("authenticate:appuseragreement") version = random.randint(1, 100) - app = {'agreement_version': version} - app_user_agreements = [{'agreement_version': version, 'app_id': x + 1} for x in range(2)] + app = {"agreement_version": version} + app_user_agreements = [{"agreement_version": version, "app_id": x + 1} for x in range(2)] model = self.bc.database.create(user=1, app=(2, app), app_user_agreement=app_user_agreements) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.app[0], {'up_to_date': True}), - get_serializer(model.app[1], {'up_to_date': True}), + get_serializer(model.app[0], {"up_to_date": True}), + get_serializer(model.app[1], {"up_to_date": True}), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('linked_services.AppUserAgreement'), + self.bc.database.list_of("linked_services.AppUserAgreement"), self.bc.format.to_dict(model.app_user_agreement), ) # When: have agreements, agreement_version match # Then: return list of agreements def test__have_agreements__version_does_not_match(self): - url = reverse_lazy('authenticate:appuseragreement') + url = reverse_lazy("authenticate:appuseragreement") version1 = random.randint(1, 100) version2 = random.randint(1, 100) while version1 == version2: version2 = random.randint(1, 100) - app = {'agreement_version': version1} - app_user_agreements = [{'agreement_version': version2, 'app_id': x + 1} for x in range(2)] + app = {"agreement_version": version1} + app_user_agreements = [{"agreement_version": version2, "app_id": x + 1} for x in range(2)] model = self.bc.database.create(user=1, app=(2, app), app_user_agreement=app_user_agreements) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.app[0], {'up_to_date': False}), - get_serializer(model.app[1], {'up_to_date': False}), + get_serializer(model.app[0], {"up_to_date": False}), + get_serializer(model.app[1], {"up_to_date": False}), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('linked_services.AppUserAgreement'), + self.bc.database.list_of("linked_services.AppUserAgreement"), self.bc.format.to_dict(model.app_user_agreement), ) diff --git a/breathecode/authenticate/tests/urls/tests_confirmation_token.py b/breathecode/authenticate/tests/urls/tests_confirmation_token.py index 643a74355..4127405fa 100644 --- a/breathecode/authenticate/tests/urls/tests_confirmation_token.py +++ b/breathecode/authenticate/tests/urls/tests_confirmation_token.py @@ -1,6 +1,7 @@ """ This file contains test over AcademyInviteView, if it change, the duck tests will deleted """ + import os import random import re @@ -25,18 +26,18 @@ # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_message(message): request = None - context = {'MESSAGE': message, 'BUTTON': None, 'LINK': os.getenv('APP_URL', '')} + context = {"MESSAGE": message, "BUTTON": None, "LINK": os.getenv("APP_URL", "")} - return loader.render_to_string('message.html', context, request) + return loader.render_to_string("message.html", context, request) def post_serializer(self, user_invite, data={}): return { - 'created_at': self.bc.datetime.to_iso_string(user_invite.created_at), - 'email': user_invite.email, - 'id': user_invite.id, - 'sent_at': user_invite.sent_at, - 'status': user_invite.status, + "created_at": self.bc.datetime.to_iso_string(user_invite.created_at), + "email": user_invite.email, + "id": user_invite.id, + "sent_at": user_invite.sent_at, + "status": user_invite.status, **data, } @@ -45,269 +46,293 @@ class AuthenticateJSONTestSuite(AuthTestCase): # When: No invites # Then: Return 404 def test_not_found(self): - """Test """ + """Test""" - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': 'hash'}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": "hash"}) - response = self.client.get(url, format='json', headers={'accept': 'application/json'}) + response = self.client.get(url, format="json", headers={"accept": "application/json"}) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) # Given: 1 UserInvite # When: No email # Then: Return 400 def test_no_email(self): - """Test """ + """Test""" model = self.bc.database.create(user_invite=1) - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": model.user_invite.token}) - response = self.client.get(url, format='json', headers={'accept': 'application/json'}) + response = self.client.get(url, format="json", headers={"accept": "application/json"}) json = response.json() - expected = {'detail': 'without-email', 'status_code': 400} + expected = {"detail": "without-email", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) # Given: 1 UserInvite # When: Email already validated # Then: Return 400 def test_already_validated(self): - """Test """ + """Test""" user_invite = { - 'email': self.bc.fake.email(), - 'is_email_validated': True, + "email": self.bc.fake.email(), + "is_email_validated": True, } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": model.user_invite.token}) - response = self.client.get(url, format='json', headers={'accept': 'application/json'}) + response = self.client.get(url, format="json", headers={"accept": "application/json"}) json = response.json() - expected = {'detail': 'email-already-validated', 'status_code': 400} + expected = {"detail": "email-already-validated", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) # Given: 1 UserInvite # When: email and email is not validated # Then: Return 200 def test_done(self): - """Test """ + """Test""" - user_invite = {'email': self.bc.fake.email()} + user_invite = {"email": self.bc.fake.email()} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": model.user_invite.token}) - response = self.client.get(url, format='json', headers={'accept': 'application/json'}) + response = self.client.get(url, format="json", headers={"accept": "application/json"}) json = response.json() expected = post_serializer(self, model.user_invite) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite), - 'is_email_validated': True, - }, - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "is_email_validated": True, + }, + ], + ) # Given: 1 UserInvite # When: Email and email already validated # Then: Return 400 def test_2_errors(self): - """Test """ + """Test""" user_invite = { - 'is_email_validated': True, + "is_email_validated": True, } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": model.user_invite.token}) - response = self.client.get(url, format='json', headers={'accept': 'application/json'}) + response = self.client.get(url, format="json", headers={"accept": "application/json"}) json = response.json() expected = [ { - 'detail': 'without-email', - 'status_code': 400, + "detail": "without-email", + "status_code": 400, }, { - 'detail': 'email-already-validated', - 'status_code': 400, + "detail": "email-already-validated", + "status_code": 400, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) class AuthenticateHTMLTestSuite(AuthTestCase): # When: No invites # Then: Return 404 def test_not_found(self): - """Test """ + """Test""" - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': 'hash'}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": "hash"}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('user-invite-not-found') + expected = render_message("user-invite-not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) # Given: 1 UserInvite # When: No email # Then: Return 400 def test_no_email(self): - """Test """ + """Test""" model = self.bc.database.create(user_invite=1) - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": model.user_invite.token}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('without-email.') + expected = render_message("without-email.") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) # Given: 1 UserInvite # When: Email already validated # Then: Return 400 def test_already_validated(self): - """Test """ + """Test""" user_invite = { - 'email': self.bc.fake.email(), - 'is_email_validated': True, + "email": self.bc.fake.email(), + "is_email_validated": True, } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": model.user_invite.token}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('email-already-validated.') + expected = render_message("email-already-validated.") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) # Given: 1 UserInvite # When: email and email is not validated # Then: Return 200 def test_done(self): - """Test """ + """Test""" - user_invite = {'email': self.bc.fake.email()} + user_invite = {"email": self.bc.fake.email()} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": model.user_invite.token}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('Your email was validated, you can close this page.') + expected = render_message("Your email was validated, you can close this page.") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite), - 'is_email_validated': True, - }, - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "is_email_validated": True, + }, + ], + ) # Given: 1 UserInvite # When: Email and email already validated # Then: Return 400 def test_2_errors(self): - """Test """ + """Test""" user_invite = { - 'is_email_validated': True, + "is_email_validated": True, } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:confirmation_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:confirmation_token", kwargs={"token": model.user_invite.token}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('without-email. email-already-validated.') + expected = render_message("without-email. email-already-validated.") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_github.py b/breathecode/authenticate/tests/urls/tests_github.py index 4dcf03067..7a224666e 100644 --- a/breathecode/authenticate/tests/urls/tests_github.py +++ b/breathecode/authenticate/tests/urls/tests_github.py @@ -1,11 +1,13 @@ """ Test cases for /user """ + import urllib import os from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AuthTestCase + # from ..mocks import GithubRequestsMock @@ -14,29 +16,29 @@ class AuthenticateTestSuite(AuthTestCase): def test_github_without_url(self): """Test /github without auth""" - url = reverse_lazy('authenticate:github') + url = reverse_lazy("authenticate:github") response = self.client.get(url) data = response.data - expected = {'detail': 'no-callback-url', 'status_code': 400} + expected = {"detail": "no-callback-url", "status_code": 400} self.assertEqual(data, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_github(self): """Test /github""" - original_url_callback = 'https://google.co.ve' - url = reverse_lazy('authenticate:github') - params = {'url': 'https://google.co.ve'} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + original_url_callback = "https://google.co.ve" + url = reverse_lazy("authenticate:github") + params = {"url": "https://google.co.ve"} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") params = { - 'client_id': os.getenv('GITHUB_CLIENT_ID', ''), - 'redirect_uri': os.getenv('GITHUB_REDIRECT_URL', '') + '?url=' + original_url_callback, - 'scope': 'user repo read:org', + "client_id": os.getenv("GITHUB_CLIENT_ID", ""), + "redirect_uri": os.getenv("GITHUB_REDIRECT_URL", "") + "?url=" + original_url_callback, + "scope": "user repo read:org", } - redirect = f'https://github.com/login/oauth/authorize?{urllib.parse.urlencode(params)}+admin%3Aorg' + redirect = f"https://github.com/login/oauth/authorize?{urllib.parse.urlencode(params)}+admin%3Aorg" self.assertEqual(response.status_code, status.HTTP_302_FOUND) self.assertEqual(response.url, redirect) diff --git a/breathecode/authenticate/tests/urls/tests_github_callback.py b/breathecode/authenticate/tests/urls/tests_github_callback.py index 256ba842f..b539ef37a 100644 --- a/breathecode/authenticate/tests/urls/tests_github_callback.py +++ b/breathecode/authenticate/tests/urls/tests_github_callback.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import re import urllib from unittest import mock @@ -22,38 +23,30 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch): routes = { - 'https://github.com/login/oauth/access_token': - FakeResponse(status_code=200, - data={ - 'access_token': GithubRequestsMock.token, - 'scope': 'repo,gist', - 'token_type': 'bearer' - }), - 'https://rigobot.herokuapp.com/v1/auth/invite': - FakeResponse(status_code=200, data={}), + "https://github.com/login/oauth/access_token": FakeResponse( + status_code=200, + data={"access_token": GithubRequestsMock.token, "scope": "repo,gist", "token_type": "bearer"}, + ), + "https://rigobot.herokuapp.com/v1/auth/invite": FakeResponse(status_code=200, data={}), } def post_mock(url, *args, **kwargs): - return routes.get(url, FakeResponse(status_code=404, data={'status': 'fake request, not found'})) + return routes.get(url, FakeResponse(status_code=404, data={"status": "fake request, not found"})) - monkeypatch.setattr('requests.get', GithubRequestsMock.apply_get_requests_mock()) - monkeypatch.setattr('requests.post', post_mock) - monkeypatch.setattr('django.db.models.signals.pre_delete.send_robust', mock.MagicMock(return_value=None)) - monkeypatch.setattr('breathecode.admissions.signals.student_edu_status_updated.send_robust', - mock.MagicMock(return_value=None)) + monkeypatch.setattr("requests.get", GithubRequestsMock.apply_get_requests_mock()) + monkeypatch.setattr("requests.post", post_mock) + monkeypatch.setattr("django.db.models.signals.pre_delete.send_robust", mock.MagicMock(return_value=None)) + monkeypatch.setattr( + "breathecode.admissions.signals.student_edu_status_updated.send_robust", mock.MagicMock(return_value=None) + ) yield def render(message): request = None return loader.render_to_string( - 'message.html', - { - 'MESSAGE': message, - 'BUTTON': None, - 'BUTTON_TARGET': '_blank', - 'LINK': None - }, + "message.html", + {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None}, request, using=None, ) @@ -61,478 +54,480 @@ def render(message): def get_profile_fields(data={}): return { - 'id': 1, - 'user_id': 1, - 'avatar_url': 'https://avatars2.githubusercontent.com/u/3018142?v=4', - 'bio': - 'I am an Computer engineer, Full-stack Developer\xa0and React Developer, I likes an API good, the clean code, the good programming practices', - 'phone': '', - 'show_tutorial': True, - 'twitter_username': None, - 'github_username': None, - 'portfolio_url': None, - 'linkedin_url': None, - 'blog': 'https://www.facebook.com/chocoland.framework', + "id": 1, + "user_id": 1, + "avatar_url": "https://avatars2.githubusercontent.com/u/3018142?v=4", + "bio": "I am an Computer engineer, Full-stack Developer\xa0and React Developer, I likes an API good, the clean code, the good programming practices", + "phone": "", + "show_tutorial": True, + "twitter_username": None, + "github_username": None, + "portfolio_url": None, + "linkedin_url": None, + "blog": "https://www.facebook.com/chocoland.framework", **data, } def get_credentials_github_fields(data={}): - bio = ('I am an Computer engineer, Full-stack Developer\xa0and React ' - 'Developer, I likes an API good, the clean code, the good programming ' - 'practices') + bio = ( + "I am an Computer engineer, Full-stack Developer\xa0and React " + "Developer, I likes an API good, the clean code, the good programming " + "practices" + ) return { - 'avatar_url': 'https://avatars2.githubusercontent.com/u/3018142?v=4', - 'bio': bio, - 'blog': 'https://www.facebook.com/chocoland.framework', - 'company': '@chocoland ', - 'email': 'jdefreitaspinto@gmail.com', - 'github_id': 3018142, - 'name': 'Jeferson De Freitas', - 'token': 'e72e16c7e42f292c6912e7710c838347ae178b4a', - 'twitter_username': None, - 'user_id': 1, - 'username': 'jefer94', + "avatar_url": "https://avatars2.githubusercontent.com/u/3018142?v=4", + "bio": bio, + "blog": "https://www.facebook.com/chocoland.framework", + "company": "@chocoland ", + "email": "jdefreitaspinto@gmail.com", + "github_id": 3018142, + "name": "Jeferson De Freitas", + "token": "e72e16c7e42f292c6912e7710c838347ae178b4a", + "twitter_username": None, + "user_id": 1, + "username": "jefer94", **data, } def test_github_callback__without_code(bc: Breathecode, client: APIClient): """Test /github/callback without auth""" - url = reverse_lazy('authenticate:github_callback') - params = {'url': 'https://google.co.ve'} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": "https://google.co.ve"} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.json() - expected = {'detail': 'no-code', 'status_code': 400} + expected = {"detail": "no-code", "status_code": 400} assert data == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('authenticate.Profile') == [] - assert bc.database.list_of('authenticate.CredentialsGithub') == [] - assert bc.database.list_of('authenticate.ProfileAcademy') == [] + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("authenticate.Profile") == [] + assert bc.database.list_of("authenticate.CredentialsGithub") == [] + assert bc.database.list_of("authenticate.ProfileAcademy") == [] def test_github_callback__user_not_exist(bc: Breathecode, client: APIClient): """Test /github/callback""" - original_url_callback = 'https://google.co.ve' - code = 'Konan' + original_url_callback = "https://google.co.ve" + code = "Konan" - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code} + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") content = bc.format.from_bytes(response.content) - expected = render('We could not find in our records the email associated to this github account, ' - 'perhaps you want to signup to the platform first? Back to 4Geeks.com') + expected = render( + "We could not find in our records the email associated to this github account, " + 'perhaps you want to signup to the platform first? Back to 4Geeks.com' + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) assert content == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('authenticate.Profile') == [] - assert bc.database.list_of('authenticate.CredentialsGithub') == [] - assert bc.database.list_of('authenticate.ProfileAcademy') == [] + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("authenticate.Profile") == [] + assert bc.database.list_of("authenticate.CredentialsGithub") == [] + assert bc.database.list_of("authenticate.ProfileAcademy") == [] def test_github_callback__user_not_exist_but_waiting_list(bc: Breathecode, client: APIClient): """Test /github/callback""" - user_invite = {'status': 'WAITING_LIST', 'email': 'jdefreitaspinto@gmail.com'} + user_invite = {"status": "WAITING_LIST", "email": "jdefreitaspinto@gmail.com"} bc.database.create(user_invite=user_invite) - original_url_callback = 'https://google.co.ve' - code = 'Konan' + original_url_callback = "https://google.co.ve" + code = "Konan" - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code} + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") content = bc.format.from_bytes(response.content) - expected = render('You are still number 1 on the waiting list, we will email you once you are given access ' - f'Back to 4Geeks.com') + expected = render( + "You are still number 1 on the waiting list, we will email you once you are given access " + f'Back to 4Geeks.com' + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) assert content == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('authenticate.Profile') == [] - assert bc.database.list_of('authenticate.CredentialsGithub') == [] - assert bc.database.list_of('authenticate.ProfileAcademy') == [] + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("authenticate.Profile") == [] + assert bc.database.list_of("authenticate.CredentialsGithub") == [] + assert bc.database.list_of("authenticate.ProfileAcademy") == [] def test_github_callback__with_user(bc: Breathecode, client: APIClient): """Test /github/callback""" - user_kwargs = {'email': 'JDEFREITASPINTO@GMAIL.COM'} - role_kwargs = {'slug': 'student', 'name': 'Student'} + user_kwargs = {"email": "JDEFREITASPINTO@GMAIL.COM"} + role_kwargs = {"slug": "student", "name": "Student"} model = bc.database.create(role=True, user=True, user_kwargs=user_kwargs, role_kwargs=role_kwargs) - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == [{**bc.format.to_dict(model.user)}] + assert bc.database.list_of("auth.User") == [{**bc.format.to_dict(model.user)}] - assert bc.database.list_of('authenticate.Profile') == [] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ + assert bc.database.list_of("authenticate.Profile") == [] + assert bc.database.list_of("authenticate.CredentialsGithub") == [ get_credentials_github_fields(), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ + assert bc.database.list_of("authenticate.ProfileAcademy") == [ bc.format.to_dict(model.profile_academy), ] def test_github_callback__with_user__with_email_in_uppercase(bc: Breathecode, client: APIClient): """Test /github/callback""" - user_kwargs = {'email': 'JDEFREITASPINTO@GMAIL.COM'} - role_kwargs = {'slug': 'student', 'name': 'Student'} + user_kwargs = {"email": "JDEFREITASPINTO@GMAIL.COM"} + role_kwargs = {"slug": "student", "name": "Student"} model = bc.database.create(role=True, user=True, user_kwargs=user_kwargs, role_kwargs=role_kwargs) - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == [{**bc.format.to_dict(model.user)}] + assert bc.database.list_of("auth.User") == [{**bc.format.to_dict(model.user)}] - assert bc.database.list_of('authenticate.Profile') == [get_profile_fields(data={})] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ + assert bc.database.list_of("authenticate.Profile") == [get_profile_fields(data={})] + assert bc.database.list_of("authenticate.CredentialsGithub") == [ get_credentials_github_fields(), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [] + assert bc.database.list_of("authenticate.ProfileAcademy") == [] def test_github_callback__with_bad_user_in_querystring(bc: Breathecode, client: APIClient): """Test /github/callback""" - user_kwargs = {'email': 'JDEFREITASPINTO@GMAIL.COM'} - role_kwargs = {'slug': 'student', 'name': 'Student'} - model = bc.database.create(role=True, - user=True, - profile_academy=True, - user_kwargs=user_kwargs, - role_kwargs=role_kwargs, - token=True) - - original_url_callback = 'https://google.co.ve' - code = 'Konan' - - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code, 'user': 'b14f'} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + user_kwargs = {"email": "JDEFREITASPINTO@GMAIL.COM"} + role_kwargs = {"slug": "student", "name": "Student"} + model = bc.database.create( + role=True, user=True, profile_academy=True, user_kwargs=user_kwargs, role_kwargs=role_kwargs, token=True + ) + + original_url_callback = "https://google.co.ve" + code = "Konan" + + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code, "user": "b14f"} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") json = response.json() - expected = {'detail': 'token-not-found', 'status_code': 404} + expected = {"detail": "token-not-found", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('auth.User') == [{**bc.format.to_dict(model.user)}] - assert bc.database.list_of('authenticate.Profile') == [] - assert bc.database.list_of('authenticate.CredentialsGithub') == [] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ + assert bc.database.list_of("auth.User") == [{**bc.format.to_dict(model.user)}] + assert bc.database.list_of("authenticate.Profile") == [] + assert bc.database.list_of("authenticate.CredentialsGithub") == [] + assert bc.database.list_of("authenticate.ProfileAcademy") == [ bc.format.to_dict(model.profile_academy), ] def test_github_callback__with_user(bc: Breathecode, client: APIClient): """Test /github/callback""" - user_kwargs = {'email': 'JDEFREITASPINTO@GMAIL.COM'} - role_kwargs = {'slug': 'student', 'name': 'Student'} - model = bc.database.create(role=True, - user=True, - profile_academy=True, - user_kwargs=user_kwargs, - role_kwargs=role_kwargs, - token=True) - - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + user_kwargs = {"email": "JDEFREITASPINTO@GMAIL.COM"} + role_kwargs = {"slug": "student", "name": "Student"} + model = bc.database.create( + role=True, user=True, profile_academy=True, user_kwargs=user_kwargs, role_kwargs=role_kwargs, token=True + ) + + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" token = model.token - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code, 'user': token} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code, "user": token} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == [{**bc.format.to_dict(model.user)}] + assert bc.database.list_of("auth.User") == [{**bc.format.to_dict(model.user)}] - assert bc.database.list_of('authenticate.Profile') == [get_profile_fields(data={})] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ + assert bc.database.list_of("authenticate.Profile") == [get_profile_fields(data={})] + assert bc.database.list_of("authenticate.CredentialsGithub") == [ get_credentials_github_fields(), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ + assert bc.database.list_of("authenticate.ProfileAcademy") == [ bc.format.to_dict(model.profile_academy), ] def test_github_callback__with_user__profile_without_avatar_url(bc: Breathecode, client: APIClient): """Test /github/callback""" - user_kwargs = {'email': 'JDEFREITASPINTO@GMAIL.COM'} - role_kwargs = {'slug': 'student', 'name': 'Student'} - model = bc.database.create(role=True, - user=True, - profile_academy=True, - user_kwargs=user_kwargs, - role_kwargs=role_kwargs, - profile=1, - token=True) - - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + user_kwargs = {"email": "JDEFREITASPINTO@GMAIL.COM"} + role_kwargs = {"slug": "student", "name": "Student"} + model = bc.database.create( + role=True, + user=True, + profile_academy=True, + user_kwargs=user_kwargs, + role_kwargs=role_kwargs, + profile=1, + token=True, + ) + + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" token = model.token - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code, 'user': token} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code, "user": token} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == [{**bc.format.to_dict(model.user)}] + assert bc.database.list_of("auth.User") == [{**bc.format.to_dict(model.user)}] - assert bc.database.list_of('authenticate.Profile') == [ - get_profile_fields(data={ - 'bio': None, - 'blog': None - }), + assert bc.database.list_of("authenticate.Profile") == [ + get_profile_fields(data={"bio": None, "blog": None}), ] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ + assert bc.database.list_of("authenticate.CredentialsGithub") == [ get_credentials_github_fields(), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ + assert bc.database.list_of("authenticate.ProfileAcademy") == [ bc.format.to_dict(model.profile_academy), ] def test_github_callback__with_user__profile_with_avatar_url(bc: Breathecode, client: APIClient): """Test /github/callback""" - user_kwargs = {'email': 'JDEFREITASPINTO@GMAIL.COM'} - role_kwargs = {'slug': 'student', 'name': 'Student'} - profile = {'avatar_url': bc.fake.url()} - model = bc.database.create(role=True, - user=True, - profile_academy=True, - user_kwargs=user_kwargs, - role_kwargs=role_kwargs, - profile=profile, - token=True) - - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + user_kwargs = {"email": "JDEFREITASPINTO@GMAIL.COM"} + role_kwargs = {"slug": "student", "name": "Student"} + profile = {"avatar_url": bc.fake.url()} + model = bc.database.create( + role=True, + user=True, + profile_academy=True, + user_kwargs=user_kwargs, + role_kwargs=role_kwargs, + profile=profile, + token=True, + ) + + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" token = model.token - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code, 'user': token} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code, "user": token} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == [{**bc.format.to_dict(model.user)}] + assert bc.database.list_of("auth.User") == [{**bc.format.to_dict(model.user)}] - assert bc.database.list_of('authenticate.Profile') == [ - get_profile_fields(data={ - 'bio': None, - 'blog': None, - **profile - }), + assert bc.database.list_of("authenticate.Profile") == [ + get_profile_fields(data={"bio": None, "blog": None, **profile}), ] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ + assert bc.database.list_of("authenticate.CredentialsGithub") == [ get_credentials_github_fields(), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ + assert bc.database.list_of("authenticate.ProfileAcademy") == [ bc.format.to_dict(model.profile_academy), ] def test_github_callback__with_user_different_email__without_credetials_of_github__without_cohort_user( - bc: Breathecode, client: APIClient): + bc: Breathecode, client: APIClient +): """Test /github/callback""" - user = {'email': 'FJOSE123@GMAIL.COM'} - role = {'slug': 'student', 'name': 'Student'} + user = {"email": "FJOSE123@GMAIL.COM"} + role = {"slug": "student", "name": "Student"} model = bc.database.create(role=role, user=user, profile_academy=True, token=True) - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" token = model.token - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code, 'user': token} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code, "user": token} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == [{**bc.format.to_dict(model.user)}] + assert bc.database.list_of("auth.User") == [{**bc.format.to_dict(model.user)}] - assert bc.database.list_of('authenticate.Profile') == [get_profile_fields(data={})] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ + assert bc.database.list_of("authenticate.Profile") == [get_profile_fields(data={})] + assert bc.database.list_of("authenticate.CredentialsGithub") == [ get_credentials_github_fields(), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ + assert bc.database.list_of("authenticate.ProfileAcademy") == [ bc.format.to_dict(model.profile_academy), ] def test_github_callback__with_user_different_email__without_credetials_of_github__with_cohort_user( - bc: Breathecode, client: APIClient): + bc: Breathecode, client: APIClient +): """Test /github/callback""" - user = {'email': 'FJOSE123@GMAIL.COM'} - role = {'slug': 'student', 'name': 'Student'} + user = {"email": "FJOSE123@GMAIL.COM"} + role = {"slug": "student", "name": "Student"} model = bc.database.create(role=role, user=user, profile_academy=True, cohort_user=1, token=True) - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" token = model.token - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code, 'user': token} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code, "user": token} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == [{**bc.format.to_dict(model.user)}] + assert bc.database.list_of("auth.User") == [{**bc.format.to_dict(model.user)}] - assert bc.database.list_of('authenticate.Profile') == [get_profile_fields(data={})] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ + assert bc.database.list_of("authenticate.Profile") == [get_profile_fields(data={})] + assert bc.database.list_of("authenticate.CredentialsGithub") == [ get_credentials_github_fields(), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ + assert bc.database.list_of("authenticate.ProfileAcademy") == [ bc.format.to_dict(model.profile_academy), ] def test_github_callback__with_user_different_email__with_credentials_of_github__without_cohort_user( - bc: Breathecode, client: APIClient): + bc: Breathecode, client: APIClient +): """Test /github/callback""" - users = [{'email': 'FJOSE123@GMAIL.COM'}, {'email': 'jdefreitaspinto@gmail.com'}] - role = {'slug': 'student', 'name': 'Student'} - credentials_github = {'github_id': 3018142} - token = {'user_id': 2} - model = bc.database.create(role=role, - user=users, - profile_academy=True, - credentials_github=credentials_github, - token=token) - - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + users = [{"email": "FJOSE123@GMAIL.COM"}, {"email": "jdefreitaspinto@gmail.com"}] + role = {"slug": "student", "name": "Student"} + credentials_github = {"github_id": 3018142} + token = {"user_id": 2} + model = bc.database.create( + role=role, user=users, profile_academy=True, credentials_github=credentials_github, token=token + ) + + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" token = model.token - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code, 'user': token} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code, "user": token} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == bc.format.to_dict(model.user) + assert bc.database.list_of("auth.User") == bc.format.to_dict(model.user) - assert bc.database.list_of('authenticate.Profile') == [ - get_profile_fields(data={'user_id': 2}), + assert bc.database.list_of("authenticate.Profile") == [ + get_profile_fields(data={"user_id": 2}), ] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ - get_credentials_github_fields(data={'user_id': 2}), + assert bc.database.list_of("authenticate.CredentialsGithub") == [ + get_credentials_github_fields(data={"user_id": 2}), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ + assert bc.database.list_of("authenticate.ProfileAcademy") == [ bc.format.to_dict(model.profile_academy), ] def test_github_callback__with_user_different_email__with_credentials_of_github__with_cohort_user( - bc: Breathecode, client: APIClient): + bc: Breathecode, client: APIClient +): """Test /github/callback""" - users = [{'email': 'FJOSE123@GMAIL.COM'}, {'email': 'jdefreitaspinto@gmail.com'}] - role = {'slug': 'student', 'name': 'Student'} - credentials_github = {'github_id': 3018142} - token = {'user_id': 2} - cohort_user = {'user_id': 2} - model = bc.database.create(role=role, - user=users, - cohort_user=cohort_user, - profile_academy=True, - credentials_github=credentials_github, - token=token) - - original_url_callback = 'https://google.co.ve' - token_pattern = re.compile('^' + original_url_callback.replace('.', r'\.') + r'\?token=[0-9a-zA-Z]{,40}$') - code = 'Konan' + users = [{"email": "FJOSE123@GMAIL.COM"}, {"email": "jdefreitaspinto@gmail.com"}] + role = {"slug": "student", "name": "Student"} + credentials_github = {"github_id": 3018142} + token = {"user_id": 2} + cohort_user = {"user_id": 2} + model = bc.database.create( + role=role, + user=users, + cohort_user=cohort_user, + profile_academy=True, + credentials_github=credentials_github, + token=token, + ) + + original_url_callback = "https://google.co.ve" + token_pattern = re.compile("^" + original_url_callback.replace(".", r"\.") + r"\?token=[0-9a-zA-Z]{,40}$") + code = "Konan" token = model.token - url = reverse_lazy('authenticate:github_callback') - params = {'url': original_url_callback, 'code': code, 'user': token} - response = client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_callback") + params = {"url": original_url_callback, "code": code, "user": token} + response = client.get(f"{url}?{urllib.parse.urlencode(params)}") assert response.status_code == status.HTTP_302_FOUND assert bool(token_pattern.match(response.url)) == True - assert bc.database.list_of('auth.User') == bc.format.to_dict(model.user) + assert bc.database.list_of("auth.User") == bc.format.to_dict(model.user) - assert bc.database.list_of('authenticate.Profile') == [ - get_profile_fields(data={'user_id': 2}), + assert bc.database.list_of("authenticate.Profile") == [ + get_profile_fields(data={"user_id": 2}), ] - assert bc.database.list_of('authenticate.CredentialsGithub') == [ - get_credentials_github_fields(data={'user_id': 2}), + assert bc.database.list_of("authenticate.CredentialsGithub") == [ + get_credentials_github_fields(data={"user_id": 2}), ] - assert bc.database.list_of('authenticate.ProfileAcademy') == [ - bc.format.to_dict(model.profile_academy), { - 'academy_id': 1, - 'address': None, - 'email': 'jdefreitaspinto@gmail.com', - 'first_name': model.user[1].first_name, - 'id': 2, - 'last_name': model.user[1].last_name, - 'phone': '', - 'role_id': 'student', - 'status': 'ACTIVE', - 'user_id': 2 - } + assert bc.database.list_of("authenticate.ProfileAcademy") == [ + bc.format.to_dict(model.profile_academy), + { + "academy_id": 1, + "address": None, + "email": "jdefreitaspinto@gmail.com", + "first_name": model.user[1].first_name, + "id": 2, + "last_name": model.user[1].last_name, + "phone": "", + "role_id": "student", + "status": "ACTIVE", + "user_id": 2, + }, ] diff --git a/breathecode/authenticate/tests/urls/tests_github_me.py b/breathecode/authenticate/tests/urls/tests_github_me.py index 93aedf8ab..6563abcf7 100644 --- a/breathecode/authenticate/tests/urls/tests_github_me.py +++ b/breathecode/authenticate/tests/urls/tests_github_me.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + from django.urls.base import reverse_lazy from rest_framework import status @@ -13,11 +14,11 @@ class AuthenticateTestSuite(AuthTestCase): """ def test_not_auth(self): - url = reverse_lazy('authenticate:github_me') + url = reverse_lazy("authenticate:github_me") response = self.client.delete(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -30,36 +31,39 @@ def test__delete__not_found(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:github_me') + url = reverse_lazy("authenticate:github_me") response = self.client.delete(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.CredentialsGithub'), []) + self.assertEqual(self.bc.database.list_of("authenticate.CredentialsGithub"), []) """ 🔽🔽🔽 DELETE not found, trying to delete a CredentialsGithub from another User """ def test__delete__not_found__trying_to_delete_credentials_of_other_user(self): - credentials_github = {'user_id': 2} + credentials_github = {"user_id": 2} model = self.bc.database.create(user=2, credentials_github=credentials_github) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('authenticate:github_me') + url = reverse_lazy("authenticate:github_me") response = self.client.delete(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.CredentialsGithub'), [ - self.bc.format.to_dict(model.credentials_github), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.CredentialsGithub"), + [ + self.bc.format.to_dict(model.credentials_github), + ], + ) """ 🔽🔽🔽 DELETE found, it's deleted @@ -69,26 +73,29 @@ def test__delete__found(self): model = self.bc.database.create(user=1, credentials_github=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:github_me') + url = reverse_lazy("authenticate:github_me") response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('authenticate.CredentialsGithub'), []) + self.assertEqual(self.bc.database.list_of("authenticate.CredentialsGithub"), []) """ 🔽🔽🔽 DELETE found, it's deleted, with Profile, the image keep """ def test__delete__found__with_profile__keep_the_image(self): - profile = {'avatar_url': self.bc.fake.url()} + profile = {"avatar_url": self.bc.fake.url()} model = self.bc.database.create(user=1, credentials_github=1, profile=profile) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:github_me') + url = reverse_lazy("authenticate:github_me") response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('authenticate.CredentialsGithub'), []) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - self.bc.format.to_dict(model.profile), - ]) + self.assertEqual(self.bc.database.list_of("authenticate.CredentialsGithub"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + self.bc.format.to_dict(model.profile), + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_github_token.py b/breathecode/authenticate/tests/urls/tests_github_token.py index 7cec7a831..4d2eac2b0 100644 --- a/breathecode/authenticate/tests/urls/tests_github_token.py +++ b/breathecode/authenticate/tests/urls/tests_github_token.py @@ -1,11 +1,13 @@ """ Test cases for /user """ + import urllib import os from django.urls.base import reverse_lazy from rest_framework import status from ..mixins.new_auth_test_case import AuthTestCase + # from ..mocks import GithubRequestsMock @@ -14,13 +16,13 @@ class AuthenticateTestSuite(AuthTestCase): def test_github_id_without_url(self): """Test /github without auth""" - url = reverse_lazy('authenticate:github_token', kwargs={'token': None}) - url = urllib.parse.quote(url.encode('utf-8')) + url = reverse_lazy("authenticate:github_token", kwargs={"token": None}) + url = urllib.parse.quote(url.encode("utf-8")) response = self.client.get(url) data = response.data - expected = {'detail': 'no-callback-url', 'status_code': 400} + expected = {"detail": "no-callback-url", "status_code": 400} self.assertEqual(2, len(data)) self.assertEqual(data, expected) @@ -28,32 +30,35 @@ def test_github_id_without_url(self): def test_github_id_with_args_no_invalid_token(self): """Test /github""" - url = reverse_lazy('authenticate:github_token', kwargs={'token': 'asdasd'}) - url = urllib.parse.quote(url.encode('utf-8')) - params = {'url': 'https://google.co.ve'} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:github_token", kwargs={"token": "asdasd"}) + url = urllib.parse.quote(url.encode("utf-8")) + params = {"url": "https://google.co.ve"} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") json = response.json() - expected = {'detail': 'invalid-token', 'status_code': 400} + expected = {"detail": "invalid-token", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_github_with_auth(self): """Test /github""" - original_url_callback = 'https://google.co.ve' + original_url_callback = "https://google.co.ve" model = self.generate_models(authenticate=True, token=True) token = self.get_token(1) - url = reverse_lazy('authenticate:github_token', kwargs={ - 'token': token, - }) - url = urllib.parse.quote(url.encode('utf-8')) - params = {'url': 'https://google.co.ve'} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy( + "authenticate:github_token", + kwargs={ + "token": token, + }, + ) + url = urllib.parse.quote(url.encode("utf-8")) + params = {"url": "https://google.co.ve"} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") params = { - 'client_id': os.getenv('GITHUB_CLIENT_ID', ''), - 'redirect_uri': os.getenv('GITHUB_REDIRECT_URL', '') + f'?url={original_url_callback}&user={token}', - 'scope': 'user repo read:org', + "client_id": os.getenv("GITHUB_CLIENT_ID", ""), + "redirect_uri": os.getenv("GITHUB_REDIRECT_URL", "") + f"?url={original_url_callback}&user={token}", + "scope": "user repo read:org", } - redirect = f'https://github.com/login/oauth/authorize?{urllib.parse.urlencode(params)}+admin%3Aorg' + redirect = f"https://github.com/login/oauth/authorize?{urllib.parse.urlencode(params)}+admin%3Aorg" self.assertEqual(response.status_code, status.HTTP_302_FOUND) self.assertEqual(response.url, redirect) diff --git a/breathecode/authenticate/tests/urls/tests_invite_resend_id.py b/breathecode/authenticate/tests/urls/tests_invite_resend_id.py index ae034eb4c..cf33406f2 100644 --- a/breathecode/authenticate/tests/urls/tests_invite_resend_id.py +++ b/breathecode/authenticate/tests/urls/tests_invite_resend_id.py @@ -1,6 +1,7 @@ """ This file contains test over AcademyInviteView, if it change, the duck tests will deleted """ + import os import random import re @@ -23,11 +24,11 @@ def post_serializer(self, user_invite, data={}): return { - 'created_at': self.bc.datetime.to_iso_string(user_invite.created_at), - 'email': user_invite.email, - 'id': user_invite.id, - 'sent_at': user_invite.sent_at, - 'status': user_invite.status, + "created_at": self.bc.datetime.to_iso_string(user_invite.created_at), + "email": user_invite.email, + "id": user_invite.id, + "sent_at": user_invite.sent_at, + "status": user_invite.status, **data, } @@ -35,291 +36,348 @@ def post_serializer(self, user_invite, data={}): class AuthenticateTestSuite(AuthTestCase): # When: No invites # Then: Return 404 - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_not_found(self): - """Test """ - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + """Test""" + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) self.bc.check.calls(actions.send_email_message.call_args_list, []) # Given: 1 UserInvite # When: No email # Then: Return 400 - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_no_email(self): - """Test """ + """Test""" model = self.bc.database.create(user_invite=1) - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'without-email', 'status_code': 400} + expected = {"detail": "without-email", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) self.bc.check.calls(actions.send_email_message.call_args_list, []) # Given: 1 UserInvite # When: email, status PENDING and sent_at is None # Then: Return 200 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_sent_at_none(self): - """Test """ + """Test""" - user_invite = {'email': self.bc.fake.email()} + user_invite = {"email": self.bc.fake.email()} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = post_serializer(self, model.user_invite, data={ - 'sent_at': self.bc.datetime.to_iso_string(UTC_NOW), - }) + expected = post_serializer( + self, + model.user_invite, + data={ + "sent_at": self.bc.datetime.to_iso_string(UTC_NOW), + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite), - 'sent_at': UTC_NOW, - }, - ]) - - self.bc.check.calls(actions.send_email_message.call_args_list, [ - call('welcome_academy', - model.user_invite.email, { - 'email': - model.user_invite.email, - 'subject': - 'Invitation to join 4Geeks', - 'LINK': (os.getenv('API_URL', '') + '/v1/auth/member/invite/' + model.user_invite.token + - '?callback=https%3A%2F%2Fadmin.4geeks.com'), - 'FIST_NAME': - model.user_invite.first_name - }, - academy=None) - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "sent_at": UTC_NOW, + }, + ], + ) + + self.bc.check.calls( + actions.send_email_message.call_args_list, + [ + call( + "welcome_academy", + model.user_invite.email, + { + "email": model.user_invite.email, + "subject": "Invitation to join 4Geeks", + "LINK": ( + os.getenv("API_URL", "") + + "/v1/auth/member/invite/" + + model.user_invite.token + + "?callback=https%3A%2F%2Fadmin.4geeks.com" + ), + "FIST_NAME": model.user_invite.first_name, + }, + academy=None, + ) + ], + ) # Given: 1 UserInvite # When: email, status PENDING and sent_at gt 1 day from now # Then: Return 200 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_sent_at_gt_a_day(self): - """Test """ + """Test""" user_invite = { - 'email': self.bc.fake.email(), - 'sent_at': UTC_NOW - timedelta(days=1, seconds=random.randint(1, 3600 * 24 * 7)), + "email": self.bc.fake.email(), + "sent_at": UTC_NOW - timedelta(days=1, seconds=random.randint(1, 3600 * 24 * 7)), } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = post_serializer(self, model.user_invite, data={ - 'sent_at': self.bc.datetime.to_iso_string(UTC_NOW), - }) + expected = post_serializer( + self, + model.user_invite, + data={ + "sent_at": self.bc.datetime.to_iso_string(UTC_NOW), + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite), - 'sent_at': UTC_NOW, - }, - ]) - - self.bc.check.calls(actions.send_email_message.call_args_list, [ - call('welcome_academy', - model.user_invite.email, { - 'email': - model.user_invite.email, - 'subject': - 'Invitation to join 4Geeks', - 'LINK': (os.getenv('API_URL', '') + '/v1/auth/member/invite/' + model.user_invite.token + - '?callback=https%3A%2F%2Fadmin.4geeks.com'), - 'FIST_NAME': - model.user_invite.first_name - }, - academy=None) - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "sent_at": UTC_NOW, + }, + ], + ) + + self.bc.check.calls( + actions.send_email_message.call_args_list, + [ + call( + "welcome_academy", + model.user_invite.email, + { + "email": model.user_invite.email, + "subject": "Invitation to join 4Geeks", + "LINK": ( + os.getenv("API_URL", "") + + "/v1/auth/member/invite/" + + model.user_invite.token + + "?callback=https%3A%2F%2Fadmin.4geeks.com" + ), + "FIST_NAME": model.user_invite.first_name, + }, + academy=None, + ) + ], + ) # Given: 1 UserInvite # When: email, status PENDING and sent_at lt 1 day from now # Then: Return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_sent_at_lt_a_day(self): - """Test """ + """Test""" user_invite = { - 'email': self.bc.fake.email(), - 'sent_at': UTC_NOW - timedelta(seconds=random.randint(0, (60 * 10) - 1)), + "email": self.bc.fake.email(), + "sent_at": UTC_NOW - timedelta(seconds=random.randint(0, (60 * 10) - 1)), } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'sent-at-diff-less-10-minutes', 'status_code': 400} + expected = {"detail": "sent-at-diff-less-10-minutes", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) self.bc.check.calls(actions.send_email_message.call_args_list, []) # Given: 1 UserInvite # When: email, invite answered and sent_at is None, email validated # Then: Return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_invite_answered__email_validated(self): - """Test """ + """Test""" user_invite = { - 'email': self.bc.fake.email(), - 'status': random.choice(['ACCEPTED', 'REJECTED']), - 'is_email_validated': True, + "email": self.bc.fake.email(), + "status": random.choice(["ACCEPTED", "REJECTED"]), + "is_email_validated": True, } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': f'user-already-{user_invite["status"].lower()}', 'status_code': 400} + expected = {"detail": f'user-already-{user_invite["status"].lower()}', "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) self.bc.check.calls(actions.send_email_message.call_args_list, []) # Given: 1 UserInvite # When: email, invite answered and sent_at is None, email not validated # Then: Return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_invite_answered__email_not_validated(self): - """Test """ + """Test""" user_invite = { - 'email': self.bc.fake.email(), - 'status': random.choice(['ACCEPTED', 'REJECTED']), - 'is_email_validated': False, + "email": self.bc.fake.email(), + "status": random.choice(["ACCEPTED", "REJECTED"]), + "is_email_validated": False, } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = post_serializer(self, model.user_invite, data={ - 'sent_at': self.bc.datetime.to_iso_string(UTC_NOW), - }) + expected = post_serializer( + self, + model.user_invite, + data={ + "sent_at": self.bc.datetime.to_iso_string(UTC_NOW), + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'sent_at': UTC_NOW, - }]) - - self.bc.check.calls(actions.send_email_message.call_args_list, [ - call('verify_email', - model.user_invite.email, { - 'SUBJECT': 'Verify your 4Geeks account', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/confirmation/{model.user_invite.token}' - }, - academy=None), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "sent_at": UTC_NOW, + } + ], + ) + + self.bc.check.calls( + actions.send_email_message.call_args_list, + [ + call( + "verify_email", + model.user_invite.email, + { + "SUBJECT": "Verify your 4Geeks account", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/confirmation/{model.user_invite.token}", + }, + academy=None, + ), + ], + ) # Given: 1 UserInvite # When: email, status is WAITING_LIST and sent_at is None # Then: Return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_invite_in_waiting_list(self): - """Test """ + """Test""" user_invite = { - 'email': self.bc.fake.email(), - 'status': 'WAITING_LIST', + "email": self.bc.fake.email(), + "status": "WAITING_LIST", } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': f'user-already-{user_invite["status"].lower()}', 'status_code': 400} + expected = {"detail": f'user-already-{user_invite["status"].lower()}', "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) self.bc.check.calls(actions.send_email_message.call_args_list, []) # Given: 1 UserInvite # When: 3 errors at the same time # Then: Return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_return_3_errors(self): - """Test """ + """Test""" user_invite = { - 'sent_at': UTC_NOW - timedelta(seconds=random.randint(0, (60 * 10) - 1)), - 'status': random.choice(['ACCEPTED', 'REJECTED']), - 'is_email_validated': True, + "sent_at": UTC_NOW - timedelta(seconds=random.randint(0, (60 * 10) - 1)), + "status": random.choice(["ACCEPTED", "REJECTED"]), + "is_email_validated": True, } model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() expected = [ { - 'detail': f'user-already-{model.user_invite.status.lower()}', - 'status_code': 400, + "detail": f"user-already-{model.user_invite.status.lower()}", + "status_code": 400, }, { - 'detail': 'without-email', - 'status_code': 400, + "detail": "without-email", + "status_code": 400, }, { - 'detail': 'sent-at-diff-less-10-minutes', - 'status_code': 400, + "detail": "sent-at-diff-less-10-minutes", + "status_code": 400, }, ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) self.bc.check.calls(actions.send_email_message.call_args_list, []) diff --git a/breathecode/authenticate/tests/urls/tests_login.py b/breathecode/authenticate/tests/urls/tests_login.py index de727b4c7..80b4d08d6 100644 --- a/breathecode/authenticate/tests/urls/tests_login.py +++ b/breathecode/authenticate/tests/urls/tests_login.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + from unittest.mock import MagicMock, call, patch from rest_framework import status from django.urls.base import reverse_lazy @@ -11,145 +12,141 @@ def user_invite_serializer(self, user_invite, academy=None, cohort=None): return { - 'academy': academy, - 'cohort': cohort, - 'created_at': self.bc.datetime.to_iso_string(user_invite.created_at), - 'email': user_invite.email, - 'first_name': user_invite.first_name, - 'id': user_invite.id, - 'last_name': user_invite.last_name, - 'role': user_invite.role, - 'sent_at': user_invite.sent_at, - 'status': user_invite.status, - 'token': user_invite.token, + "academy": academy, + "cohort": cohort, + "created_at": self.bc.datetime.to_iso_string(user_invite.created_at), + "email": user_invite.email, + "first_name": user_invite.first_name, + "id": user_invite.id, + "last_name": user_invite.last_name, + "role": user_invite.role, + "sent_at": user_invite.sent_at, + "status": user_invite.status, + "token": user_invite.token, } class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" - @patch('breathecode.activity.tasks.add_activity.delay', MagicMock()) + @patch("breathecode.activity.tasks.add_activity.delay", MagicMock()) def test_login_with_bad_credentials(self): """Test /login with incorrect credentials""" - url = reverse_lazy('authenticate:login') - data = {'email': 'Konan@naruto.io', 'password': 'Pain!$%'} + url = reverse_lazy("authenticate:login") + data = {"email": "Konan@naruto.io", "password": "Pain!$%"} response = self.client.post(url, data) json = response.json() expected = { - 'non_field_errors': ['Unable to log in with provided credentials.'], - 'status_code': 400, + "non_field_errors": ["Unable to log in with provided credentials."], + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.activity.tasks.add_activity.delay', MagicMock()) + @patch("breathecode.activity.tasks.add_activity.delay", MagicMock()) def test_login_without_email(self): """Test /login with incorrect credentials""" - url = reverse_lazy('authenticate:login') - data = {'password': 'Pain!$%'} + url = reverse_lazy("authenticate:login") + data = {"password": "Pain!$%"} response = self.client.post(url, data) json = response.json() expected = { - 'email': ['This field is required.'], - 'status_code': 400, + "email": ["This field is required."], + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.activity.tasks.add_activity.delay', MagicMock()) + @patch("breathecode.activity.tasks.add_activity.delay", MagicMock()) def test_login_without_password(self): """Test /login with incorrect credentials""" - url = reverse_lazy('authenticate:login') - data = {'email': 'Konan@naruto.io'} + url = reverse_lazy("authenticate:login") + data = {"email": "Konan@naruto.io"} response = self.client.post(url, data) json = response.json() expected = { - 'password': ['This field is required.'], - 'status_code': 400, + "password": ["This field is required."], + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.activity.tasks.add_activity.delay', MagicMock()) + @patch("breathecode.activity.tasks.add_activity.delay", MagicMock()) def test_login_email_not_verified__no_invites(self): """Test /login""" - password = 'Pain!$%' - user = {'email': 'Konan@naruto.io', 'password': make_password(password)} + password = "Pain!$%" + user = {"email": "Konan@naruto.io", "password": make_password(password)} model = self.bc.database.create(user=user) - url = reverse_lazy('authenticate:login') - data = {'email': model.user.email.lower(), 'password': password} + url = reverse_lazy("authenticate:login") + data = {"email": model.user.email.lower(), "password": password} response = self.client.post(url, data) json = response.json() - expected = {'detail': 'email-not-validated', 'status_code': 403, 'data': []} + expected = {"detail": "email-not-validated", "status_code": 403, "data": []} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.activity.tasks.add_activity.delay', MagicMock()) + @patch("breathecode.activity.tasks.add_activity.delay", MagicMock()) def test_login_email_not_verified__no_invites(self): """Test /login""" - password = 'Pain!$%' - user = {'email': 'Konan@naruto.io', 'password': make_password(password)} + password = "Pain!$%" + user = {"email": "Konan@naruto.io", "password": make_password(password)} model = self.bc.database.create(user=user) - url = reverse_lazy('authenticate:login') - data = {'email': model.user.email.lower(), 'password': password} + url = reverse_lazy("authenticate:login") + data = {"email": model.user.email.lower(), "password": password} response = self.client.post(url, data) json = response.json() expected = { - 'detail': 'email-not-validated', - 'status_code': 403, - 'silent': True, - 'silent_code': 'email-not-validated', - 'data': [], + "detail": "email-not-validated", + "status_code": 403, + "silent": True, + "silent_code": "email-not-validated", + "data": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.activity.tasks.add_activity.delay', MagicMock()) + @patch("breathecode.activity.tasks.add_activity.delay", MagicMock()) def test_login_email_not_verified__with_invites(self): """Test /login""" - password = 'Pain!$%' - user = {'email': 'Konan@naruto.io', 'password': make_password(password)} - user_invite = {'email': 'Konan@naruto.io', 'status': 'ACCEPTED', 'is_email_validated': False} + password = "Pain!$%" + user = {"email": "Konan@naruto.io", "password": make_password(password)} + user_invite = {"email": "Konan@naruto.io", "status": "ACCEPTED", "is_email_validated": False} model = self.bc.database.create(user=user, user_invite=(2, user_invite)) - url = reverse_lazy('authenticate:login') - data = {'email': model.user.email.lower(), 'password': password} + url = reverse_lazy("authenticate:login") + data = {"email": model.user.email.lower(), "password": password} response = self.client.post(url, data) json = response.json() expected = { - 'detail': - 'email-not-validated', - 'status_code': - 403, - 'silent': - True, - 'silent_code': - 'email-not-validated', - 'data': [ + "detail": "email-not-validated", + "status_code": 403, + "silent": True, + "silent_code": "email-not-validated", + "data": [ user_invite_serializer(self, model.user_invite[1]), user_invite_serializer(self, model.user_invite[0]), ], @@ -159,58 +156,64 @@ def test_login_email_not_verified__with_invites(self): self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) - @patch('breathecode.activity.tasks.add_activity.delay', MagicMock()) + @patch("breathecode.activity.tasks.add_activity.delay", MagicMock()) def test_login_lowercase_email(self): """Test /login""" - password = 'Pain!$%' - user = {'email': 'Konan@naruto.io', 'password': make_password(password)} - user_invite = {'email': 'Konan@naruto.io', 'status': 'ACCEPTED', 'is_email_validated': True} + password = "Pain!$%" + user = {"email": "Konan@naruto.io", "password": make_password(password)} + user_invite = {"email": "Konan@naruto.io", "status": "ACCEPTED", "is_email_validated": True} model = self.bc.database.create(user=user, user_invite=user_invite) - url = reverse_lazy('authenticate:login') - data = {'email': model.user.email.lower(), 'password': password} + url = reverse_lazy("authenticate:login") + data = {"email": model.user.email.lower(), "password": password} response = self.client.post(url, data) json = response.json() - token = self.bc.database.get('authenticate.Token', 1, dict=False) + token = self.bc.database.get("authenticate.Token", 1, dict=False) expected = { - 'email': model.user.email, - 'expires_at': self.bc.datetime.to_iso_string(token.expires_at), - 'token': token.key, - 'user_id': 1 + "email": model.user.email, + "expires_at": self.bc.datetime.to_iso_string(token.expires_at), + "token": token.key, + "user_id": 1, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'login', related_type='auth.User', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "login", related_type="auth.User", related_id=1), + ], + ) - @patch('breathecode.activity.tasks.add_activity.delay', MagicMock()) + @patch("breathecode.activity.tasks.add_activity.delay", MagicMock()) def test_login_uppercase_email(self): """Test /login""" - password = 'Pain!$%' - user = {'email': 'Konan@naruto.io', 'password': make_password(password)} - user_invite = {'email': 'Konan@naruto.io', 'status': 'ACCEPTED', 'is_email_validated': True} + password = "Pain!$%" + user = {"email": "Konan@naruto.io", "password": make_password(password)} + user_invite = {"email": "Konan@naruto.io", "status": "ACCEPTED", "is_email_validated": True} model = self.bc.database.create(user=user, user_invite=user_invite) - url = reverse_lazy('authenticate:login') - data = {'email': model.user.email.upper(), 'password': password} + url = reverse_lazy("authenticate:login") + data = {"email": model.user.email.upper(), "password": password} response = self.client.post(url, data) json = response.json() - token = self.bc.database.get('authenticate.Token', 1, dict=False) + token = self.bc.database.get("authenticate.Token", 1, dict=False) expected = { - 'email': model.user.email, - 'expires_at': self.bc.datetime.to_iso_string(token.expires_at), - 'token': token.key, - 'user_id': 1 + "email": model.user.email, + "expires_at": self.bc.datetime.to_iso_string(token.expires_at), + "token": token.key, + "user_id": 1, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'login', related_type='auth.User', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "login", related_type="auth.User", related_id=1), + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_logout.py b/breathecode/authenticate/tests/urls/tests_logout.py index cf905d17a..61919454e 100644 --- a/breathecode/authenticate/tests/urls/tests_logout.py +++ b/breathecode/authenticate/tests/urls/tests_logout.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import re from django.urls.base import reverse_lazy from rest_framework import status @@ -14,14 +15,14 @@ def test_logout_without_token(self): """Test /logout without token""" self.create_user() - url = reverse_lazy('authenticate:logout') + url = reverse_lazy("authenticate:logout") response = self.client.get(url) - detail = str(response.data['detail']) - status_code = int(response.data['status_code']) + detail = str(response.data["detail"]) + status_code = int(response.data["status_code"]) self.assertEqual(len(response.data), 2) - self.assertEqual(detail, 'Authentication credentials were not provided.') + self.assertEqual(detail, "Authentication credentials were not provided.") self.assertEqual(status_code, 401) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) diff --git a/breathecode/authenticate/tests/urls/tests_me_app_slug_sync.py b/breathecode/authenticate/tests/urls/tests_me_app_slug_sync.py index 68b4bd928..4dd56a93c 100644 --- a/breathecode/authenticate/tests/urls/tests_me_app_slug_sync.py +++ b/breathecode/authenticate/tests/urls/tests_me_app_slug_sync.py @@ -1,6 +1,7 @@ """ Test /answer """ + import json import random from unittest.mock import AsyncMock, MagicMock, call, patch @@ -57,7 +58,7 @@ def patch_post(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(json.dumps(expected).encode()) - monkeypatch.setattr('aiohttp.ClientSession.post', MagicMock(return_value=ResponseMock(reader, code, headers))) + monkeypatch.setattr("aiohttp.ClientSession.post", MagicMock(return_value=ResponseMock(reader, code, headers))) yield handler @@ -65,16 +66,31 @@ def handler(expected, code, headers): @pytest.fixture def get_jwt(bc: Breathecode, monkeypatch): token = bc.random.string(lower=True, upper=True, symbol=True, number=True, size=20) - monkeypatch.setattr('linked_services.django.actions.get_jwt', MagicMock(return_value=token)) + monkeypatch.setattr("linked_services.django.actions.get_jwt", MagicMock(return_value=token)) yield token -@pytest.fixture(params=[ - ('linked_services.core.service.Service.__aenter__', Exception, 'App rigobot not found', 'app-not-found', 404, True), - ('linked_services.core.service.Service.__aenter__', SynchronousOnlyOperation, - 'Async is not supported by the worker', 'no-async-support', 500, True), - ('aiohttp.ClientSession.post', Exception, 'random exc', 'unexpected-error', 500, False), -]) +@pytest.fixture( + params=[ + ( + "linked_services.core.service.Service.__aenter__", + Exception, + "App rigobot not found", + "app-not-found", + 404, + True, + ), + ( + "linked_services.core.service.Service.__aenter__", + SynchronousOnlyOperation, + "Async is not supported by the worker", + "no-async-support", + 500, + True, + ), + ("aiohttp.ClientSession.post", Exception, "random exc", "unexpected-error", 500, False), + ] +) def post_exc(request, monkeypatch): path, exc, message, slug, code, is_async = request.param if is_async: @@ -109,46 +125,46 @@ async def async_exc_mock(message): monkeypatch.setattr(path, ContextMock) yield { - 'slug': slug, - 'code': code, + "slug": slug, + "code": code, } def post_serializer(user, credentials_github=None, profile=None): data = { - 'username': user.username, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, - 'profile': None, - 'credentialsgithub': None, + "username": user.username, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, + "profile": None, + "credentialsgithub": None, } if profile: - data['profile'] = { - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'phone': profile.phone, - 'show_tutorial': profile.show_tutorial, - 'twitter_username': profile.twitter_username, - 'github_username': profile.github_username, - 'portfolio_url': profile.portfolio_url, - 'linkedin_url': profile.linkedin_url, - 'blog': profile.blog, + data["profile"] = { + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "phone": profile.phone, + "show_tutorial": profile.show_tutorial, + "twitter_username": profile.twitter_username, + "github_username": profile.github_username, + "portfolio_url": profile.portfolio_url, + "linkedin_url": profile.linkedin_url, + "blog": profile.blog, } if credentials_github: - data['credentialsgithub'] = { - 'github_id': credentials_github.github_id, - 'token': credentials_github.token, - 'email': credentials_github.email, - 'avatar_url': credentials_github.avatar_url, - 'name': credentials_github.name, - 'username': credentials_github.username, - 'blog': credentials_github.blog, - 'bio': credentials_github.bio, - 'company': credentials_github.company, - 'twitter_username': credentials_github.twitter_username, + data["credentialsgithub"] = { + "github_id": credentials_github.github_id, + "token": credentials_github.token, + "email": credentials_github.email, + "avatar_url": credentials_github.avatar_url, + "name": credentials_github.name, + "username": credentials_github.username, + "blog": credentials_github.blog, + "bio": credentials_github.bio, + "company": credentials_github.company, + "twitter_username": credentials_github.twitter_username, } return data @@ -157,15 +173,15 @@ def post_serializer(user, credentials_github=None, profile=None): # When: no auth # Then: response 401 def test_no_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('authenticate:me_app_slug_sync', kwargs={'app_slug': 'rigobot'}) + url = reverse_lazy("authenticate:me_app_slug_sync", kwargs={"app_slug": "rigobot"}) response = client.post(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("auth.User") == [] # When: external app @@ -177,61 +193,65 @@ def test_external_app(bc: Breathecode, client: APIClient): bc.fake.slug(): bc.fake.slug(), } - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - 'require_an_agreement': True, - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + "require_an_agreement": True, + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('authenticate:me_app_slug_sync', kwargs={'app_slug': 'rigobot'}) - response = client.post(url, query, format='json') + url = reverse_lazy("authenticate:me_app_slug_sync", kwargs={"app_slug": "rigobot"}) + response = client.post(url, query, format="json") - expected = {'detail': 'external-app', 'silent': True, 'silent_code': 'external-app', 'status_code': 400} + expected = {"detail": "external-app", "silent": True, "silent_code": "external-app", "status_code": 400} json = response.json() assert json == expected assert response.status_code == 400 - assert bc.database.list_of('auth.User') == [bc.format.to_dict(model.user)] + assert bc.database.list_of("auth.User") == [bc.format.to_dict(model.user)] # When: raise an exception # Then: response 200 def test_raise_an_exception(bc: Breathecode, client: APIClient, post_exc): - expected = {'detail': post_exc['slug'], 'status_code': post_exc['code']} + expected = {"detail": post_exc["slug"], "status_code": post_exc["code"]} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), } - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - 'require_an_agreement': False, - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + "require_an_agreement": False, + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('authenticate:me_app_slug_sync', kwargs={'app_slug': 'rigobot'}) + url = reverse_lazy("authenticate:me_app_slug_sync", kwargs={"app_slug": "rigobot"}) - response = client.post(url, query, format='json') + response = client.post(url, query, format="json") json = response.json() assert json == expected - assert response.status_code == post_exc['code'] - assert bc.database.list_of('auth.User') == [bc.format.to_dict(model.user)] + assert response.status_code == post_exc["code"] + assert bc.database.list_of("auth.User") == [bc.format.to_dict(model.user)] # When: auth # Then: response 200 def test_auth(bc: Breathecode, client: APIClient, patch_post, get_jwt): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -239,40 +259,44 @@ def test_auth(bc: Breathecode, client: APIClient, patch_post, get_jwt): } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - task=task, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - 'require_an_agreement': False, - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + task=task, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + "require_an_agreement": False, + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('authenticate:me_app_slug_sync', kwargs={'app_slug': 'rigobot'}) + url = reverse_lazy("authenticate:me_app_slug_sync", kwargs={"app_slug": "rigobot"}) - response = client.post(url, query, format='json') + response = client.post(url, query, format="json") assert aiohttp.ClientSession.post.call_args_list == [ - call(f'{model.app.app_url}/v1/auth/app/user', - json=None, - data=post_serializer(model.user), - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/auth/app/user", + json=None, + data=post_serializer(model.user), + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('auth.User') == [bc.format.to_dict(model.user)] + assert bc.database.list_of("auth.User") == [bc.format.to_dict(model.user)] # When: auth, with profile # Then: response 200 def test_auth__with_profile(bc: Breathecode, client: APIClient, patch_post, get_jwt): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -280,41 +304,45 @@ def test_auth__with_profile(bc: Breathecode, client: APIClient, patch_post, get_ } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - profile=1, - task=task, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - 'require_an_agreement': False, - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + profile=1, + task=task, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + "require_an_agreement": False, + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('authenticate:me_app_slug_sync', kwargs={'app_slug': 'rigobot'}) + url = reverse_lazy("authenticate:me_app_slug_sync", kwargs={"app_slug": "rigobot"}) - response = client.post(url, query, format='json') + response = client.post(url, query, format="json") assert aiohttp.ClientSession.post.call_args_list == [ - call(f'{model.app.app_url}/v1/auth/app/user', - json=None, - data=post_serializer(model.user, profile=model.profile), - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/auth/app/user", + json=None, + data=post_serializer(model.user, profile=model.profile), + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('auth.User') == [bc.format.to_dict(model.user)] + assert bc.database.list_of("auth.User") == [bc.format.to_dict(model.user)] # When: auth, with credentials github # Then: response 200 def test_auth__with_credentials_github(bc: Breathecode, client: APIClient, patch_post, get_jwt): - expected = {'data': {'getTask': {'id': random.randint(1, 100)}}} + expected = {"data": {"getTask": {"id": random.randint(1, 100)}}} query = { bc.fake.slug(): bc.fake.slug(), bc.fake.slug(): bc.fake.slug(), @@ -322,32 +350,36 @@ def test_auth__with_credentials_github(bc: Breathecode, client: APIClient, patch } code = random.randint(200, 299) - headers = {'Content-Type': 'application/json'} + headers = {"Content-Type": "application/json"} patch_post(expected, code, headers) - task = {'github_url': bc.fake.url()} - model = bc.database.create(profile_academy=1, - credentials_github=1, - task=task, - app={ - 'slug': 'rigobot', - 'app_url': bc.fake.url(), - 'require_an_agreement': False, - }) + task = {"github_url": bc.fake.url()} + model = bc.database.create( + profile_academy=1, + credentials_github=1, + task=task, + app={ + "slug": "rigobot", + "app_url": bc.fake.url(), + "require_an_agreement": False, + }, + ) client.force_authenticate(model.user) - url = reverse_lazy('authenticate:me_app_slug_sync', kwargs={'app_slug': 'rigobot'}) + url = reverse_lazy("authenticate:me_app_slug_sync", kwargs={"app_slug": "rigobot"}) - response = client.post(url, query, format='json') + response = client.post(url, query, format="json") assert aiohttp.ClientSession.post.call_args_list == [ - call(f'{model.app.app_url}/v1/auth/app/user', - json=None, - data=post_serializer(model.user, credentials_github=model.credentials_github), - headers={'Authorization': f'Link App=breathecode,Token={get_jwt}'}) + call( + f"{model.app.app_url}/v1/auth/app/user", + json=None, + data=post_serializer(model.user, credentials_github=model.credentials_github), + headers={"Authorization": f"Link App=breathecode,Token={get_jwt}"}, + ) ] - assert response.getvalue().decode('utf-8') == json.dumps(expected) + assert response.getvalue().decode("utf-8") == json.dumps(expected) assert response.status_code == code - assert bc.database.list_of('auth.User') == [bc.format.to_dict(model.user)] + assert bc.database.list_of("auth.User") == [bc.format.to_dict(model.user)] diff --git a/breathecode/authenticate/tests/urls/tests_member_invite.py b/breathecode/authenticate/tests/urls/tests_member_invite.py index 14912031f..49768c0ef 100644 --- a/breathecode/authenticate/tests/urls/tests_member_invite.py +++ b/breathecode/authenticate/tests/urls/tests_member_invite.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os import urllib.parse @@ -14,87 +15,101 @@ # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_page_without_invites(): request = None - APP_URL = os.getenv('APP_URL', '')[:-1] + APP_URL = os.getenv("APP_URL", "")[:-1] return loader.render_to_string( - 'message.html', { - 'MESSAGE': f'You don\'t have any more pending invites', - 'BUTTON': 'Continue to 4Geeks', - 'BUTTON_TARGET': '_blank', - 'LINK': APP_URL - }, request) + "message.html", + { + "MESSAGE": f"You don't have any more pending invites", + "BUTTON": "Continue to 4Geeks", + "BUTTON_TARGET": "_blank", + "LINK": APP_URL, + }, + request, + ) def render_page_with_pending_invites(self, model): request = None - APP_URL = os.getenv('APP_URL', '')[:-1] + APP_URL = os.getenv("APP_URL", "")[:-1] user_invites = [] - if 'user_invite' in model: + if "user_invite" in model: user_invites = model.user_invite if isinstance(model.user_invite, list) else [model.user_invite] # excluding the accepted invited - user_invites = [x for x in user_invites if x.status != 'ACCEPTED'] + user_invites = [x for x in user_invites if x.status != "ACCEPTED"] - querystr = urllib.parse.urlencode({'callback': APP_URL, 'token': model.token.key}) - url = os.getenv('API_URL') + '/v1/auth/member/invite?' + querystr + querystr = urllib.parse.urlencode({"callback": APP_URL, "token": model.token.key}) + url = os.getenv("API_URL") + "/v1/auth/member/invite?" + querystr return loader.render_to_string( - 'user_invite.html', { - 'subject': - f'Invitation to study at 4Geeks.com', - 'invites': [{ - 'id': user_invite.id, - 'academy': { - 'id': user_invite.academy.id, - 'name': user_invite.academy.name, - 'slug': user_invite.academy.slug, - 'timezone': user_invite.academy.timezone, - } if user_invite.academy else None, - 'cohort': { - 'id': - user_invite.cohort.id, - 'name': - user_invite.cohort.name, - 'slug': - user_invite.cohort.slug, - 'ending_date': - self.bc.datetime.to_iso_string(user_invite.cohort.ending_date) - if user_invite.cohort.ending_date else None, - 'stage': - user_invite.cohort.stage, - } if user_invite.cohort else None, - 'role': user_invite.role.slug if user_invite.role else None, - 'created_at': user_invite.created_at, - } for user_invite in user_invites], - 'LINK': - url, - 'user': { - 'id': model.user.id, - 'email': model.user.email, - 'first_name': model.user.first_name, - } - }, request) + "user_invite.html", + { + "subject": f"Invitation to study at 4Geeks.com", + "invites": [ + { + "id": user_invite.id, + "academy": ( + { + "id": user_invite.academy.id, + "name": user_invite.academy.name, + "slug": user_invite.academy.slug, + "timezone": user_invite.academy.timezone, + } + if user_invite.academy + else None + ), + "cohort": ( + { + "id": user_invite.cohort.id, + "name": user_invite.cohort.name, + "slug": user_invite.cohort.slug, + "ending_date": ( + self.bc.datetime.to_iso_string(user_invite.cohort.ending_date) + if user_invite.cohort.ending_date + else None + ), + "stage": user_invite.cohort.stage, + } + if user_invite.cohort + else None + ), + "role": user_invite.role.slug if user_invite.role else None, + "created_at": user_invite.created_at, + } + for user_invite in user_invites + ], + "LINK": url, + "user": { + "id": model.user.id, + "email": model.user.email, + "first_name": model.user.first_name, + }, + }, + request, + ) class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test_member_invite__without_auth(self): - url = reverse_lazy('authenticate:member_invite') + url = reverse_lazy("authenticate:member_invite") response = self.client.get(url) - hash = self.bc.format.to_base64('/v1/auth/member/invite') + hash = self.bc.format.to_base64("/v1/auth/member/invite") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 GET without UserInvite @@ -103,8 +118,8 @@ def test_member_invite__without_auth(self): def test_member_invite__without_profile_academy(self): model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('authenticate:member_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("authenticate:member_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -112,29 +127,29 @@ def test_member_invite__without_profile_academy(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 GET with one UserInvite """ def test_member_invite__with_one_profile_academy(self): - user = {'email': 'dr-goku@yt.com'} + user = {"email": "dr-goku@yt.com"} model = self.bc.database.create(user=user, token=1, user_invite=user) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('authenticate:member_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("authenticate:member_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -142,31 +157,34 @@ def test_member_invite__with_one_profile_academy(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 GET with two UserInvite without Academy, Cohort and Role """ def test_member_invite__with_two_profile_academy(self): - user = {'email': 'dr-goku@yt.com'} + user = {"email": "dr-goku@yt.com"} model = self.bc.database.create(user=user, token=1, user_invite=(2, user)) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('authenticate:member_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("authenticate:member_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -174,29 +192,29 @@ def test_member_invite__with_two_profile_academy(self): # dump error in external files if content != expected or 1: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), self.bc.format.to_dict(model.user_invite)) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite)) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 GET with two UserInvite with Academy and Role, without Cohort """ def test_member_invite__with_two_profile_academy__with_academy_and_role(self): - user = {'email': 'dr-goku@yt.com'} + user = {"email": "dr-goku@yt.com"} model = self.bc.database.create(user=user, token=1, user_invite=(2, user), academy=1, role=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('authenticate:member_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("authenticate:member_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -204,29 +222,29 @@ def test_member_invite__with_two_profile_academy__with_academy_and_role(self): # dump error in external files if content != expected or 1: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), self.bc.format.to_dict(model.user_invite)) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite)) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 GET with two UserInvite without Academy and Role, with Cohort """ def test_member_invite__with_two_profile_academy__with_cohort(self): - user = {'email': 'dr-goku@yt.com'} + user = {"email": "dr-goku@yt.com"} model = self.bc.database.create(user=user, token=1, user_invite=(2, user), cohort=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('authenticate:member_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("authenticate:member_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -234,29 +252,29 @@ def test_member_invite__with_two_profile_academy__with_cohort(self): # dump error in external files if content != expected or 1: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), self.bc.format.to_dict(model.user_invite)) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite)) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 GET with two UserInvite, accepting both """ def test_member_invite__with_two_profile_academy__accepting_both(self): - user = {'email': 'dr-goku@yt.com'} + user = {"email": "dr-goku@yt.com"} model = self.bc.database.create(user=user, token=1, user_invite=(2, user)) - querystring = self.bc.format.to_querystring({'token': model.token.key, 'accepting': '1,2'}) - url = reverse_lazy('authenticate:member_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key, "accepting": "1,2"}) + url = reverse_lazy("authenticate:member_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -264,34 +282,39 @@ def test_member_invite__with_two_profile_academy__accepting_both(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(user_invite), - 'status': 'ACCEPTED', - 'user_id': 1, - } for user_invite in model.user_invite]) - - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(user_invite), + "status": "ACCEPTED", + "user_id": 1, + } + for user_invite in model.user_invite + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 GET with two UserInvite, rejecting both """ def test_member_invite__with_two_profile_academy__rejecting_both(self): - user = {'email': 'dr-goku@yt.com'} + user = {"email": "dr-goku@yt.com"} model = self.bc.database.create(user=user, token=1, user_invite=(2, user)) - querystring = self.bc.format.to_querystring({'token': model.token.key, 'rejecting': '1,2'}) - url = reverse_lazy('authenticate:member_invite') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key, "rejecting": "1,2"}) + url = reverse_lazy("authenticate:member_invite") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -299,19 +322,24 @@ def test_member_invite__with_two_profile_academy__rejecting_both(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(user_invite), - 'status': 'REJECTED', - } for user_invite in model.user_invite]) - - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(user_invite), + "status": "REJECTED", + } + for user_invite in model.user_invite + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) diff --git a/breathecode/authenticate/tests/urls/tests_member_invite_resend_id.py b/breathecode/authenticate/tests/urls/tests_member_invite_resend_id.py index b704990fa..3158d907e 100644 --- a/breathecode/authenticate/tests/urls/tests_member_invite_resend_id.py +++ b/breathecode/authenticate/tests/urls/tests_member_invite_resend_id.py @@ -1,6 +1,7 @@ """ This file contains test over AcademyInviteView, if it change, the duck tests will deleted """ + import os import re from datetime import timedelta @@ -17,38 +18,38 @@ from ..mixins.new_auth_test_case import AuthTestCase -@capable_of('invite_resend') +@capable_of("invite_resend") def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) def generate_user_invite(self, model, user_invite, arguments={}): return { - 'academy': { - 'id': model.academy.id, - 'name': model.academy.name, - 'slug': model.academy.slug, - 'logo_url': model.academy.logo_url, + "academy": { + "id": model.academy.id, + "name": model.academy.name, + "slug": model.academy.slug, + "logo_url": model.academy.logo_url, }, - 'cohort': { - 'name': model.cohort.name, - 'slug': model.cohort.slug, + "cohort": { + "name": model.cohort.name, + "slug": model.cohort.slug, }, - 'created_at': self.bc.datetime.to_iso_string(user_invite.created_at), - 'email': user_invite.email, - 'first_name': user_invite.first_name, - 'id': user_invite.id, - 'invite_url': f'http://localhost:8000/v1/auth/member/invite/{user_invite.token}', - 'last_name': user_invite.last_name, - 'role': { - 'id': model.role.slug, - 'name': model.role.name, - 'slug': model.role.slug, + "created_at": self.bc.datetime.to_iso_string(user_invite.created_at), + "email": user_invite.email, + "first_name": user_invite.first_name, + "id": user_invite.id, + "invite_url": f"http://localhost:8000/v1/auth/member/invite/{user_invite.token}", + "last_name": user_invite.last_name, + "role": { + "id": model.role.slug, + "name": model.role.name, + "slug": model.role.slug, }, - 'sent_at': user_invite.sent_at, - 'status': user_invite.status, - 'token': user_invite.token, + "sent_at": user_invite.sent_at, + "status": user_invite.status, + "token": user_invite.token, **arguments, } @@ -61,40 +62,46 @@ class MemberGetDuckTestSuite(AuthTestCase): def test_duck_test__without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_duck_test__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_invite for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_invite for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_duck_test__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, capability='read_invite', role='role', profile_academy=1) + model = self.bc.database.create(authenticate=True, capability="read_invite", role="role", profile_academy=1) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -103,23 +110,23 @@ def test_duck_test__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.AcademyInviteView.get', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.AcademyInviteView.get", MagicMock(side_effect=view_method_mock)) def test_duck_test__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='invite_resend', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, + capability="invite_resend", + role="role", + profile_academy=[{"academy_id": id} for id in range(1, 4)], + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': n}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": n}) response = self.client.get(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': '1', 'invite_id': n}} + expected = {"args": [], "kwargs": {"academy_id": "1", "invite_id": n}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -133,43 +140,48 @@ class MemberPutDuckTestSuite(AuthTestCase): def test_duck_test__without_auth(self): """Test /academy/:id/member without auth""" - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_duck_test__without_capabilities(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(authenticate=True) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: invite_resend for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: invite_resend for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(self.all_cohort_time_slot_dict(), []) def test_duck_test__with_auth(self): for n in range(1, 4): self.bc.request.set_headers(academy=n) - model = self.bc.database.create(authenticate=True, - capability='invite_resend', - role='role', - profile_academy=1) + model = self.bc.database.create( + authenticate=True, capability="invite_resend", role="role", profile_academy=1 + ) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -178,23 +190,23 @@ def test_duck_test__with_auth(self): 🔽🔽🔽 Check the param is being passed """ - @patch('breathecode.authenticate.views.AcademyInviteView.put', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.AcademyInviteView.put", MagicMock(side_effect=view_method_mock)) def test_duck_test__with_auth___mock_view(self): - model = self.bc.database.create(academy=3, - capability='invite_resend', - role='role', - profile_academy=[{ - 'academy_id': id - } for id in range(1, 4)]) + model = self.bc.database.create( + academy=3, + capability="invite_resend", + role="role", + profile_academy=[{"academy_id": id} for id in range(1, 4)], + ) for n in range(1, 4): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': n}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": n}) response = self.client.put(url) json = response.json() - expected = {'args': [], 'kwargs': {'academy_id': '1', 'invite_id': n}} + expected = {"args": [], "kwargs": {"academy_id": "1", "invite_id": n}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -202,34 +214,34 @@ def test_duck_test__with_auth___mock_view(self): class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test_resend_invite__no_auth(self): - """Test """ + """Test""" self.headers(academy=1) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) def test_resend_invite__no_capability(self): - """Test """ + """Test""" self.headers(academy=1) model = self.generate_models(authenticate=True, profile_academy=True, syllabus=True) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: invite_resend for " - 'academy 1', - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: invite_resend for " "academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) @@ -239,17 +251,17 @@ def test_resend_invite__no_capability(self): """ def test_resend_invite__get__with_capability(self): - """Test """ + """Test""" self.headers(academy=1) model = self.generate_models(authenticate=True, syllabus=True) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_invite for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_invite for academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -259,19 +271,17 @@ def test_resend_invite__get__with_capability(self): """ def test_resend_invite__get__without_data(self): - """Test """ + """Test""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_invite', - role='potato', - syllabus=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_invite", role="potato", syllabus=True + ) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) @@ -280,16 +290,13 @@ def test_resend_invite__get__without_data(self): """ def test_resend_invite__put__with_data(self): - """Test """ + """Test""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=1, - capability='read_invite', - role='potato', - user_invite=1, - syllabus=1) - - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=1, capability="read_invite", role="potato", user_invite=1, syllabus=1 + ) + + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.get(url) json = response.json() @@ -303,147 +310,158 @@ def test_resend_invite__put__with_data(self): """ def test_resend_invite__put__with_capability(self): - """Test """ + """Test""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='invite_resend', - role='potato', - syllabus=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="invite_resend", role="potato", syllabus=True + ) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1359}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1359}) response = self.client.put(url) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_resend_invite_no_invitation(self): - """Test """ + """Test""" self.headers(academy=1) model = self.generate_models( authenticate=True, profile_academy=True, - capability='invite_resend', + capability="invite_resend", # user_invite=1, - role='potato', - syllabus=True) + role="potato", + syllabus=True, + ) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'user-invite-not-found', 'status_code': 404} + expected = {"detail": "user-invite-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) - all_user_invite = [x for x in self.all_user_invite_dict() if x.pop('sent_at')] + all_user_invite = [x for x in self.all_user_invite_dict() if x.pop("sent_at")] self.assertEqual(all_user_invite, []) @patch( - 'requests.post', - apply_requests_post_mock([(201, f"https://api.mailgun.net/v3/{os.environ.get('MAILGUN_DOMAIN')}/messages", {}) - ])) + "requests.post", + apply_requests_post_mock( + [(201, f"https://api.mailgun.net/v3/{os.environ.get('MAILGUN_DOMAIN')}/messages", {})] + ), + ) def test_resend_invite_with_invitation(self): - """Test """ + """Test""" self.headers(academy=1) - profile_academy_kwargs = {'email': 'email@dotdotdotdot.dot'} - user_invite_kwargs = {'email': 'email@dotdotdotdot.dot'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='invite_resend', - role='potato', - syllabus=True, - user_invite=True, - profile_academy_kwargs=profile_academy_kwargs, - user_invite_kwargs=user_invite_kwargs) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + profile_academy_kwargs = {"email": "email@dotdotdotdot.dot"} + user_invite_kwargs = {"email": "email@dotdotdotdot.dot"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="invite_resend", + role="potato", + syllabus=True, + user_invite=True, + profile_academy_kwargs=profile_academy_kwargs, + user_invite_kwargs=user_invite_kwargs, + ) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - created = json['created_at'] - sent = json['sent_at'] - del json['sent_at'] - del json['created_at'] + created = json["created_at"] + sent = json["sent_at"] + del json["sent_at"] + del json["created_at"] expected = { - 'id': 1, - 'status': 'PENDING', - 'email': 'email@dotdotdotdot.dot', - 'first_name': None, - 'last_name': None, - 'token': model.user_invite.token, - 'invite_url': f'http://localhost:8000/v1/auth/member/invite/{model.user_invite.token}', - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - 'logo_url': model['academy'].logo_url, + "id": 1, + "status": "PENDING", + "email": "email@dotdotdotdot.dot", + "first_name": None, + "last_name": None, + "token": model.user_invite.token, + "invite_url": f"http://localhost:8000/v1/auth/member/invite/{model.user_invite.token}", + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + "logo_url": model["academy"].logo_url, }, - 'role': { - 'id': 'potato', - 'name': 'potato', - 'slug': 'potato' - }, - 'cohort': { - 'slug': model['cohort'].slug, - 'name': model['cohort'].name, + "role": {"id": "potato", "name": "potato", "slug": "potato"}, + "cohort": { + "slug": model["cohort"].slug, + "name": model["cohort"].name, }, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - all_user_invite = [x for x in self.all_user_invite_dict() if x.pop('sent_at')] - self.assertEqual(all_user_invite, [{ - 'id': model['user_invite'].id, - 'email': model['user_invite'].email, - 'academy_id': model['user_invite'].academy_id, - 'cohort_id': model['user_invite'].cohort_id, - 'role_id': model['user_invite'].role_id, - 'is_email_validated': model['user_invite'].is_email_validated, - 'conversion_info': None, - 'has_marketing_consent': False, - 'event_slug': None, - 'asset_slug': None, - 'first_name': model['user_invite'].first_name, - 'last_name': model['user_invite'].last_name, - 'token': model['user_invite'].token, - 'author_id': model['user_invite'].author_id, - 'status': model['user_invite'].status, - 'phone': model['user_invite'].phone, - 'process_message': model['user_invite'].process_message, - 'process_status': model['user_invite'].process_status, - 'syllabus_id': None, - 'user_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'email_quality': None, - 'email_status': None, - }]) + all_user_invite = [x for x in self.all_user_invite_dict() if x.pop("sent_at")] + self.assertEqual( + all_user_invite, + [ + { + "id": model["user_invite"].id, + "email": model["user_invite"].email, + "academy_id": model["user_invite"].academy_id, + "cohort_id": model["user_invite"].cohort_id, + "role_id": model["user_invite"].role_id, + "is_email_validated": model["user_invite"].is_email_validated, + "conversion_info": None, + "has_marketing_consent": False, + "event_slug": None, + "asset_slug": None, + "first_name": model["user_invite"].first_name, + "last_name": model["user_invite"].last_name, + "token": model["user_invite"].token, + "author_id": model["user_invite"].author_id, + "status": model["user_invite"].status, + "phone": model["user_invite"].phone, + "process_message": model["user_invite"].process_message, + "process_status": model["user_invite"].process_status, + "syllabus_id": None, + "user_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "email_quality": None, + "email_status": None, + } + ], + ) def test_resend_invite_recently(self): - """Test """ + """Test""" self.headers(academy=1) past_time = timezone.now() - timedelta(seconds=100) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='invite_resend', - role='potato', - syllabus=True, - user_invite=True, - token=True, - user_invite_kwargs={'sent_at': past_time}) - url = reverse_lazy('authenticate:member_invite_resend_id', kwargs={'invite_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="invite_resend", + role="potato", + syllabus=True, + user_invite=True, + token=True, + user_invite_kwargs={"sent_at": past_time}, + ) + url = reverse_lazy("authenticate:member_invite_resend_id", kwargs={"invite_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'sent-at-diff-less-two-minutes', 'status_code': 400} + expected = {"detail": "sent-at-diff-less-two-minutes", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.all_user_invite_dict(), [{ - **self.model_to_dict(model, 'user_invite'), - 'sent_at': past_time, - }]) + self.assertEqual( + self.all_user_invite_dict(), + [ + { + **self.model_to_dict(model, "user_invite"), + "sent_at": past_time, + } + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_member_invite_token.py b/breathecode/authenticate/tests/urls/tests_member_invite_token.py index d54c082f3..3d7c1415f 100644 --- a/breathecode/authenticate/tests/urls/tests_member_invite_token.py +++ b/breathecode/authenticate/tests/urls/tests_member_invite_token.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os from random import randint from unittest.mock import MagicMock, patch @@ -36,90 +37,95 @@ def __new__(cls, _, *args, **kwargs): # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_page_without_invites(): request = None - APP_URL = os.getenv('APP_URL', '') + APP_URL = os.getenv("APP_URL", "") return loader.render_to_string( - 'message.html', { - 'MESSAGE': 'Invitation not found or it was already accepted', - 'BUTTON': None, - 'BUTTON_TARGET': '_blank', - 'LINK': None, - }, request) + "message.html", + { + "MESSAGE": "Invitation not found or it was already accepted", + "BUTTON": None, + "BUTTON_TARGET": "_blank", + "LINK": None, + }, + request, + ) def render_page_with_user_invite(model, arguments={}): environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/v1/auth/member/invite/{model.user_invite.token}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'GET', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': {}, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': '', - 'CONTENT_TYPE': 'application/octet-stream' + "HTTP_COOKIE": "", + "PATH_INFO": f"/v1/auth/member/invite/{model.user_invite.token}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "GET", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": {}, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": "", + "CONTENT_TYPE": "application/octet-stream", } request = WSGIRequest(environ) - data = {'callback': [''], 'token': [model.user_invite.token], **arguments} - form = InviteForm({ - **data, - 'first_name': model.user_invite.first_name, - 'last_name': model.user_invite.last_name, - 'phone': model.user_invite.phone, - }) + data = {"callback": [""], "token": [model.user_invite.token], **arguments} + form = InviteForm( + { + **data, + "first_name": model.user_invite.first_name, + "last_name": model.user_invite.last_name, + "phone": model.user_invite.phone, + } + ) - return loader.render_to_string('form_invite.html', {'form': form, 'csrf_token': CSRF_TOKEN}, request) + return loader.render_to_string("form_invite.html", {"form": form, "csrf_token": CSRF_TOKEN}, request) def render_page_post_form(token, academy=None, arguments={}, messages=[]): request = HttpRequest() environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/v1/auth/member/invite/{token}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'POST', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': {}, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': '', - 'CONTENT_TYPE': 'multipart/form-data; boundary=BoUnDaRyStRiNg; charset=utf-8' + "HTTP_COOKIE": "", + "PATH_INFO": f"/v1/auth/member/invite/{token}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "POST", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": {}, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": "", + "CONTENT_TYPE": "multipart/form-data; boundary=BoUnDaRyStRiNg; charset=utf-8", } request = WSGIRequest(environ) - data = {'callback': '', 'token': token, **arguments} + data = {"callback": "", "token": token, **arguments} form = InviteForm(data) context = { - 'form': form, - 'csrf_token': CSRF_TOKEN, + "form": form, + "csrf_token": CSRF_TOKEN, } if academy: - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name - context['heading'] = academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name + context["heading"] = academy.name if messages: - context['messages'] = messages + context["messages"] = messages - return loader.render_to_string('form_invite.html', context, request) + return loader.render_to_string("form_invite.html", context, request) def render_page_post_successfully(academy=None): @@ -127,46 +133,49 @@ def render_page_post_successfully(academy=None): obj = {} if academy: - obj['COMPANY_INFO_EMAIL'] = academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - obj['COMPANY_LOGO'] = academy.logo_url - obj['COMPANY_NAME'] = academy.name + obj["COMPANY_INFO_EMAIL"] = academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + obj["COMPANY_LOGO"] = academy.logo_url + obj["COMPANY_NAME"] = academy.name - if 'heading' not in obj: - obj['heading'] = academy.name + if "heading" not in obj: + obj["heading"] = academy.name - return loader.render_to_string('message.html', { - 'MESSAGE': 'Welcome to 4Geeks, you can go ahead and log in', - **obj, - }, request) + return loader.render_to_string( + "message.html", + { + "MESSAGE": "Welcome to 4Geeks, you can go ahead and log in", + **obj, + }, + request, + ) def render_to_string_mock(*args, **kwargs): new_args = list(args) base = new_args[1] if new_args[1] else {} - new_args[1] = {**base, 'csrf_token': CSRF_TOKEN} + new_args[1] = {**base, "csrf_token": CSRF_TOKEN} return render_to_string(*new_args, **kwargs) class GetHasherMock: - def __init__(self, *args, **kwargs): - ... + def __init__(self, *args, **kwargs): ... def encode(self, password, salt): return CSRF_TOKEN def salt(self): - return 'salt' + return "salt" def post_serializer(data={}): return { - 'created_at': ..., - 'email': None, - 'id': 0, - 'sent_at': None, - 'status': 'PENDING', + "created_at": ..., + "email": None, + "id": 0, + "sent_at": None, + "status": "PENDING", **data, } @@ -178,20 +187,21 @@ def post_serializer(data={}): def setup(monkeypatch, utc_now): global created_at created_at = utc_now - monkeypatch.setattr('django.utils.timezone.now', MagicMock(return_value=utc_now)) + monkeypatch.setattr("django.utils.timezone.now", MagicMock(return_value=utc_now)) yield class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" + """ 🔽🔽🔽 GET without UserInvite """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__without_user_invite(self): - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': 'invalid'}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": "invalid"}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -199,27 +209,27 @@ def test_member_invite_token__without_user_invite(self): # dump error in external files if content != expected or True: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) assert content == expected self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 GET with UserInvite """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__with_user_invite(self): model = self.bc.database.create(user_invite=1) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -227,64 +237,70 @@ def test_member_invite_token__with_user_invite(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) """ 🔽🔽🔽 GET with UserInvite but this user is already authenticate """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__with_user_invite__already_as_user(self): - user = {'email': 'user@dotdotdotdot.dot'} + user = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user, user=user) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) response = self.client.get(url) - redirect = os.getenv('API_URL') + '/v1/auth/member/invite' + redirect = os.getenv("API_URL") + "/v1/auth/member/invite" content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_302_FOUND) self.assertEqual(response.url, redirect) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) """ 🔽🔽🔽 GET with UserInvite and User with another email """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__with_user_invite__user_with_another_email(self): - user = {'email': 'user1@dotdotdotdot.dot'} - user_invite = {'email': 'user2@dotdotdotdot.dot'} + user = {"email": "user1@dotdotdotdot.dot"} + user_invite = {"email": "user2@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user_invite, user=user) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -292,27 +308,30 @@ def test_member_invite_token__with_user_invite__user_with_another_email(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) """ 🔽🔽🔽 POST bad token, UserInvite without email """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__bad_token(self): - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': 'invalid'}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": "invalid"}) data = {} response = self.client.post(url, data) @@ -321,154 +340,165 @@ def test_member_invite_token__post__bad_token(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) - self.assertEqual(self.bc.database.list_of('auth.User'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) + self.assertEqual(self.bc.database.list_of("auth.User"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 POST bad first and last name, UserInvite with email """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__bad_first_and_last_name(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = {} response = self.client.post(url, data) content = self.bc.format.from_bytes(response.content) - expected = render_page_post_form(token=model.user_invite.token, - messages=[Message('alert-danger', 'Invalid first or last name')], - arguments={}) + expected = render_page_post_form( + token=model.user_invite.token, + messages=[Message("alert-danger", "Invalid first or last name")], + arguments={}, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) - self.assertEqual(self.bc.database.list_of('auth.User'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("auth.User"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 POST password is empty, UserInvite with email """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__password_is_empty(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) - data = {'first_name': 'abc', 'last_name': 'xyz'} + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) + data = {"first_name": "abc", "last_name": "xyz"} response = self.client.post(url, data) content = self.bc.format.from_bytes(response.content) - expected = render_page_post_form(token=model.user_invite.token, - messages=[Message('alert-danger', 'Password is empty')], - arguments=data) + expected = render_page_post_form( + token=model.user_invite.token, messages=[Message("alert-danger", "Password is empty")], arguments=data + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) - self.assertEqual(self.bc.database.list_of('auth.User'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("auth.User"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 POST passwords doesn't not match, UserInvite with email """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__passwords_does_not_match(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp1', - 'repeat_password': '^3^3uUppppp2', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp1", + "repeat_password": "^3^3uUppppp2", } response = self.client.post(url, data) content = self.bc.format.from_bytes(response.content) - expected = render_page_post_form(token=model.user_invite.token, - messages=[Message('alert-danger', "Passwords don't match")], - arguments=data) + expected = render_page_post_form( + token=model.user_invite.token, messages=[Message("alert-danger", "Passwords don't match")], arguments=data + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) - self.assertEqual(self.bc.database.list_of('auth.User'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("auth.User"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 POST with first name, last name and passwords, UserInvite with email """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__with_first_name_last_name_and_passwords(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -477,114 +507,132 @@ def test_member_invite_token__post__with_first_name_last_name_and_passwords(self # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }]) - - user_db = [x for x in self.bc.database.list_of('auth.User') if x['date_joined'] and x.pop('date_joined')] - self.assertEqual(user_db, [{ - 'email': 'user@dotdotdotdot.dot', - 'first_name': 'abc', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'xyz', - 'password': CSRF_TOKEN, - 'username': 'user@dotdotdotdot.dot' - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "status": "ACCEPTED", + "is_email_validated": True, + } + ], + ) + + user_db = [x for x in self.bc.database.list_of("auth.User") if x["date_joined"] and x.pop("date_joined")] + self.assertEqual( + user_db, + [ + { + "email": "user@dotdotdotdot.dot", + "first_name": "abc", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "xyz", + "password": CSRF_TOKEN, + "username": "user@dotdotdotdot.dot", + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 POST with first name, last name and passwords, UserInvite with email, providing callback url """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__with_first_name_last_name_and_passwords__with_callback(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', - 'callback': '/1337' + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", + "callback": "/1337", } - response = self.client.post(url, data, format='multipart') + response = self.client.post(url, data, format="multipart") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, '/1337') - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }]) - - user_db = [x for x in self.bc.database.list_of('auth.User') if x['date_joined'] and x.pop('date_joined')] - self.assertEqual(user_db, [{ - 'email': 'user@dotdotdotdot.dot', - 'first_name': 'abc', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'xyz', - 'password': CSRF_TOKEN, - 'username': 'user@dotdotdotdot.dot' - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(response.url, "/1337") + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "status": "ACCEPTED", + "is_email_validated": True, + } + ], + ) + + user_db = [x for x in self.bc.database.list_of("auth.User") if x["date_joined"] and x.pop("date_joined")] + self.assertEqual( + user_db, + [ + { + "email": "user@dotdotdotdot.dot", + "first_name": "abc", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "xyz", + "password": CSRF_TOKEN, + "username": "user@dotdotdotdot.dot", + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 POST with first name, last name and passwords, UserInvite and User with email and ProfileAcademy """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__with_first_name_last_name_and_passwords__with_profile_academy(self): - user = {'email': 'user@dotdotdotdot.dot', 'first_name': 'Lord', 'last_name': 'Valdomero'} - model = self.bc.database.create(user=user, user_invite=user, profile_academy=user, role='reviewer') - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + user = {"email": "user@dotdotdotdot.dot", "first_name": "Lord", "last_name": "Valdomero"} + model = self.bc.database.create(user=user, user_invite=user, profile_academy=user, role="reviewer") + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -593,70 +641,82 @@ def test_member_invite_token__post__with_first_name_last_name_and_passwords__wit # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }]) - - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - **self.bc.format.to_dict(model.profile_academy), - 'first_name': model.user.first_name, - 'last_name': model.user.last_name, - 'status': 'ACTIVE', - }]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'REVIEWER', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "status": "ACCEPTED", + "is_email_validated": True, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + **self.bc.format.to_dict(model.profile_academy), + "first_name": model.user.first_name, + "last_name": model.user.last_name, + "status": "ACTIVE", + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "REVIEWER", + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) """ 🔽🔽🔽 POST Cohort saas """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.build_plan_financing.delay', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.build_plan_financing.delay", MagicMock(return_value=None)) def test__post__cohort_saas(self): - user = {'email': 'user@dotdotdotdot.dot', 'first_name': 'Lord', 'last_name': 'Valdomero'} - plan = {'time_of_life': None, 'time_of_life_unit': None} - cohort = {'available_as_saas': True} - model = self.bc.database.create(user=user, - user_invite=user, - profile_academy=user, - role='reviewer', - plan=plan, - currency=1, - cohort=cohort) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + user = {"email": "user@dotdotdotdot.dot", "first_name": "Lord", "last_name": "Valdomero"} + plan = {"time_of_life": None, "time_of_life_unit": None} + cohort = {"available_as_saas": True} + model = self.bc.database.create( + user=user, user_invite=user, profile_academy=user, role="reviewer", plan=plan, currency=1, cohort=cohort + ) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -665,73 +725,91 @@ def test__post__cohort_saas(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }]) - - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - **self.bc.format.to_dict(model.profile_academy), - 'first_name': model.user.first_name, - 'last_name': model.user.last_name, - 'status': 'ACTIVE', - }]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'REVIEWER', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "status": "ACCEPTED", + "is_email_validated": True, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + **self.bc.format.to_dict(model.profile_academy), + "first_name": model.user.first_name, + "last_name": model.user.last_name, + "status": "ACTIVE", + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "REVIEWER", + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) self.bc.check.calls(build_plan_financing.delay.call_args_list, []) """ 🔽🔽🔽 POST Academy saas """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.build_plan_financing.delay', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.build_plan_financing.delay", MagicMock(return_value=None)) def test__post__academy_saas(self): - user = {'email': 'user@dotdotdotdot.dot', 'first_name': 'Lord', 'last_name': 'Valdomero'} - plan = {'time_of_life': None, 'time_of_life_unit': None} - cohort = {'available_as_saas': None} - academy = {'available_as_saas': True} - model = self.bc.database.create(user=user, - user_invite=user, - profile_academy=user, - role='reviewer', - plan=plan, - currency=1, - cohort=cohort, - academy=academy) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + user = {"email": "user@dotdotdotdot.dot", "first_name": "Lord", "last_name": "Valdomero"} + plan = {"time_of_life": None, "time_of_life_unit": None} + cohort = {"available_as_saas": None} + academy = {"available_as_saas": True} + model = self.bc.database.create( + user=user, + user_invite=user, + profile_academy=user, + role="reviewer", + plan=plan, + currency=1, + cohort=cohort, + academy=academy, + ) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -740,43 +818,59 @@ def test__post__academy_saas(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }]) - - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), - [{ - **self.bc.format.to_dict(model.profile_academy), - 'first_name': model.user.first_name, - 'last_name': model.user.last_name, - 'status': 'ACTIVE', - }]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'REVIEWER', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "status": "ACCEPTED", + "is_email_validated": True, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + { + **self.bc.format.to_dict(model.profile_academy), + "first_name": model.user.first_name, + "last_name": model.user.last_name, + "status": "ACTIVE", + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "REVIEWER", + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) self.bc.check.calls(build_plan_financing.delay.call_args_list, []) """ @@ -784,19 +878,19 @@ def test__post__academy_saas(self): with Role """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__with_cohort__with_role(self): - user = {'email': 'user@dotdotdotdot.dot', 'first_name': 'Lord', 'last_name': 'Valdomero'} - model = self.bc.database.create(user=user, user_invite=user, cohort=1, role='student') - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + user = {"email": "user@dotdotdotdot.dot", "first_name": "Lord", "last_name": "Valdomero"} + model = self.bc.database.create(user=user, user_invite=user, cohort=1, role="student") + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -805,56 +899,68 @@ def test_member_invite_token__post__with_cohort__with_role(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }]) - - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': model.role.slug.upper(), - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "status": "ACCEPTED", + "is_email_validated": True, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": model.role.slug.upper(), + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) """ 🔽🔽🔽 POST with first name, last name and passwords, UserInvite and User with email and two Cohort with Role """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__with_cohort__with_role__accept_first_invite(self): - user = {'email': 'user@dotdotdotdot.dot', 'first_name': 'Lord', 'last_name': 'Valdomero'} - user_invites = [{**user, 'cohort_id': 1}, {**user, 'cohort_id': 2}] - model = self.bc.database.create(user=user, user_invite=user_invites, cohort=2, role='student') + user = {"email": "user@dotdotdotdot.dot", "first_name": "Lord", "last_name": "Valdomero"} + user_invites = [{**user, "cohort_id": 1}, {**user, "cohort_id": 2}] + model = self.bc.database.create(user=user, user_invite=user_invites, cohort=2, role="student") - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite[0].token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite[0].token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -863,53 +969,64 @@ def test_member_invite_token__post__with_cohort__with_role__accept_first_invite( # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - { - **self.bc.format.to_dict(model.user_invite[0]), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }, - self.bc.format.to_dict(model.user_invite[1]), - ]) - - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': model.role.slug.upper(), - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) - - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite[0]), + "status": "ACCEPTED", + "is_email_validated": True, + }, + self.bc.format.to_dict(model.user_invite[1]), + ], + ) + + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": model.role.slug.upper(), + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) + + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__with_cohort__with_role__accept_second_invite(self): - user = {'email': 'user@dotdotdotdot.dot', 'first_name': 'Lord', 'last_name': 'Valdomero'} - user_invites = [{**user, 'cohort_id': 1}, {**user, 'cohort_id': 2}] - model = self.bc.database.create(user=user, user_invite=user_invites, cohort=2, role='student') + user = {"email": "user@dotdotdotdot.dot", "first_name": "Lord", "last_name": "Valdomero"} + user_invites = [{**user, "cohort_id": 1}, {**user, "cohort_id": 2}] + model = self.bc.database.create(user=user, user_invite=user_invites, cohort=2, role="student") - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite[1].token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite[1].token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -918,57 +1035,68 @@ def test_member_invite_token__post__with_cohort__with_role__accept_second_invite # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite[0]), - { - **self.bc.format.to_dict(model.user_invite[1]), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }, - ]) - - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 2, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': model.role.slug.upper(), - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite[0]), + { + **self.bc.format.to_dict(model.user_invite[1]), + "status": "ACCEPTED", + "is_email_validated": True, + }, + ], + ) + + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 2, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": model.role.slug.upper(), + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) """ 🔽🔽🔽 POST with first name, last name and passwords, UserInvite and User with email and Cohort with Role """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__with_cohort__without_role_in_the_invite__role_student_exists(self): - user = {'email': 'user@dotdotdotdot.dot', 'first_name': 'Lord', 'last_name': 'Valdomero'} - user_invite = {**user, 'role_id': None} - model = self.bc.database.create(user=user, user_invite=user_invite, cohort=1, role='student') - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + user = {"email": "user@dotdotdotdot.dot", "first_name": "Lord", "last_name": "Valdomero"} + user_invite = {**user, "role_id": None} + model = self.bc.database.create(user=user, user_invite=user_invite, cohort=1, role="student") + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -977,54 +1105,66 @@ def test_member_invite_token__post__with_cohort__without_role_in_the_invite__rol # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }]) - - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) - - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [{ - 'cohort_id': 1, - 'educational_status': 'ACTIVE', - 'finantial_status': None, - 'id': 1, - 'role': 'STUDENT', - 'user_id': 1, - 'watching': False, - 'history_log': {}, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "status": "ACCEPTED", + "is_email_validated": True, + } + ], + ) + + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) + + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "educational_status": "ACTIVE", + "finantial_status": None, + "id": 1, + "role": "STUDENT", + "user_id": 1, + "watching": False, + "history_log": {}, + } + ], + ) """ 🔽🔽🔽 POST with first name, last name and passwords, UserInvite and User with email and Cohort without Role in the invite and Role student not exists """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_member_invite_token__post__with_cohort__without_role(self): - user = {'email': 'user@dotdotdotdot.dot', 'first_name': 'Lord', 'last_name': 'Valdomero'} + user = {"email": "user@dotdotdotdot.dot", "first_name": "Lord", "last_name": "Valdomero"} model = self.bc.database.create(user=user, user_invite=user, cohort=1) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } response = self.client.post(url, data) @@ -1033,109 +1173,130 @@ def test_member_invite_token__post__with_cohort__without_role(self): token=model.user_invite.token, academy=model.academy, messages=[ - Message('alert-danger', 'Unexpected error occurred with invite, please contact the staff of 4geeks') + Message("alert-danger", "Unexpected error occurred with invite, please contact the staff of 4geeks") ], - arguments=data) + arguments=data, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 POST JSON password is empty, UserInvite with email """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__post__json__password_is_empty(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) - data = {'first_name': 'abc', 'last_name': 'xyz'} - response = self.client.post(url, data, format='json') + data = {"first_name": "abc", "last_name": "xyz"} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'Password is empty', 'status_code': 400} + expected = {"detail": "Password is empty", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) - self.assertEqual(self.bc.database.list_of('auth.User'), []) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual(self.bc.database.list_of("auth.User"), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) """ 🔽🔽🔽 POST JSON with first name, last name and passwords, UserInvite with email """ - @patch('django.template.loader.render_to_string', MagicMock(side_effect=render_to_string_mock)) - @patch('django.contrib.auth.hashers.get_hasher', MagicMock(side_effect=GetHasherMock)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.template.loader.render_to_string", MagicMock(side_effect=render_to_string_mock)) + @patch("django.contrib.auth.hashers.get_hasher", MagicMock(side_effect=GetHasherMock)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test__post__json__with_first_name_last_name_and_passwords(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:member_invite_token', kwargs={'token': model.user_invite.token}) + url = reverse_lazy("authenticate:member_invite_token", kwargs={"token": model.user_invite.token}) data = { - 'first_name': 'abc', - 'last_name': 'xyz', - 'password': '^3^3uUppppp', - 'repeat_password': '^3^3uUppppp', + "first_name": "abc", + "last_name": "xyz", + "password": "^3^3uUppppp", + "repeat_password": "^3^3uUppppp", } - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer({ - 'id': 1, - 'created_at': self.bc.datetime.to_iso_string(created_at), - 'status': 'ACCEPTED', - 'email': 'user@dotdotdotdot.dot', - }) + expected = post_serializer( + { + "id": 1, + "created_at": self.bc.datetime.to_iso_string(created_at), + "status": "ACCEPTED", + "email": "user@dotdotdotdot.dot", + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [{ - **self.bc.format.to_dict(model.user_invite), - 'status': 'ACCEPTED', - 'is_email_validated': True, - }]) - - user_db = [x for x in self.bc.database.list_of('auth.User') if x['date_joined'] and x.pop('date_joined')] - self.assertEqual(user_db, [{ - 'email': 'user@dotdotdotdot.dot', - 'first_name': 'abc', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'xyz', - 'password': CSRF_TOKEN, - 'username': 'user@dotdotdotdot.dot' - }]) - - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + **self.bc.format.to_dict(model.user_invite), + "status": "ACCEPTED", + "is_email_validated": True, + } + ], + ) + + user_db = [x for x in self.bc.database.list_of("auth.User") if x["date_joined"] and x.pop("date_joined")] + self.assertEqual( + user_db, + [ + { + "email": "user@dotdotdotdot.dot", + "first_name": "abc", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "xyz", + "password": CSRF_TOKEN, + "username": "user@dotdotdotdot.dot", + } + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) diff --git a/breathecode/authenticate/tests/urls/tests_password_reset.py b/breathecode/authenticate/tests/urls/tests_password_reset.py index f15e369aa..ab0c8f1f1 100644 --- a/breathecode/authenticate/tests/urls/tests_password_reset.py +++ b/breathecode/authenticate/tests/urls/tests_password_reset.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import os from unittest.mock import call, patch @@ -15,229 +16,269 @@ class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" - @patch('breathecode.notify.actions.send_email_message') + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__without_data(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models() data = {} response = self.client.post(url, data) - content = response.content.decode('utf-8') + content = response.content.decode("utf-8") - self.assertNotEqual(content.find(''), -1) - self.assertNotEqual(content.find('Email is required'), -1) + self.assertNotEqual(content.find("<title>"), -1) + self.assertNotEqual(content.find("Email is required"), -1) self.assertNotEqual( - content.find('<ul class="errorlist"><li>This field is required.</li></ul>\n' - '<input type="password" name="password1"'), -1) + content.find( + '<ul class="errorlist"><li>This field is required.</li></ul>\n' + '<input type="password" name="password1"' + ), + -1, + ) self.assertNotEqual( - content.find('<ul class="errorlist"><li>This field is required.</li></ul>\n' - '<input type="password" name="password2"'), -1) + content.find( + '<ul class="errorlist"><li>This field is required.</li></ul>\n' + '<input type="password" name="password2"' + ), + -1, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_user_dict(), []) self.assertEqual(mock.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message') + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__with_bad_passwords(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models() data = { - 'password1': 'pass1', - 'password2': 'pass2', + "password1": "pass1", + "password2": "pass2", } response = self.client.post(url, data) - content = response.content.decode('utf-8') + content = response.content.decode("utf-8") - self.assertNotEqual(content.find('<title>'), -1) + self.assertNotEqual(content.find("<title>"), -1) self.assertNotEqual( - content.find('<ul class="errorlist"><li>Ensure this value has at least 8 characters (it has 5).</li></ul>\n' - '<input type="password" name="password1"'), -1) + content.find( + '<ul class="errorlist"><li>Ensure this value has at least 8 characters (it has 5).</li></ul>\n' + '<input type="password" name="password1"' + ), + -1, + ) self.assertNotEqual( - content.find('<ul class="errorlist"><li>Ensure this value has at least 8 characters (it has 5).</li></ul>\n' - '<input type="password" name="password2"'), -1) + content.find( + '<ul class="errorlist"><li>Ensure this value has at least 8 characters (it has 5).</li></ul>\n' + '<input type="password" name="password2"' + ), + -1, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_user_dict(), []) self.assertEqual(mock.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message') + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__passwords_dont_match(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models() data = { - 'password1': 'pass12341', - 'password2': 'pass12342', + "password1": "pass12341", + "password2": "pass12342", } response = self.client.post(url, data) - content = response.content.decode('utf-8') + content = response.content.decode("utf-8") - self.assertNotEqual(content.find('<title>'), -1) - self.assertNotEqual(content.find('Email is required'), -1) + self.assertNotEqual(content.find("<title>"), -1) + self.assertNotEqual(content.find("Email is required"), -1) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_user_dict(), []) self.assertEqual(mock.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message') + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__passwords_dont_match___(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models() data = { - 'password1': 'pass1234', - 'password2': 'pass1234', + "password1": "pass1234", + "password2": "pass1234", } response = self.client.post(url, data) - content = response.content.decode('utf-8') + content = response.content.decode("utf-8") - self.assertNotEqual(content.find('<title>'), -1) - self.assertNotEqual(content.find('Email is required'), -1) + self.assertNotEqual(content.find("<title>"), -1) + self.assertNotEqual(content.find("Email is required"), -1) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_user_dict(), []) self.assertEqual(mock.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message') + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__with_email(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models() data = { - 'email': 'konan@naturo.io', + "email": "konan@naturo.io", } response = self.client.post(url, data) - content = response.content.decode('utf-8') + content = response.content.decode("utf-8") - self.assertNotEqual(content.find('Check your email for a password reset!'), -1) + self.assertNotEqual(content.find("Check your email for a password reset!"), -1) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_user_dict(), []) self.assertEqual(mock.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message') + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__with_email__with_user(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models(user=True) - data = {'email': model['user'].email} + data = {"email": model["user"].email} response = self.client.post(url, data) - content = response.content.decode('utf-8') - token, created = Token.get_or_create(model['user'], token_type='temporal') + content = response.content.decode("utf-8") + token, created = Token.get_or_create(model["user"], token_type="temporal") - self.assertNotEqual(content.find('Check your email for a password reset!'), -1) + self.assertNotEqual(content.find("Check your email for a password reset!"), -1) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_user_dict(), [{**self.model_to_dict(model, 'user')}]) - - self.assertEqual(mock.call_args_list, [ - call('pick_password', - model['user'].email, { - 'SUBJECT': 'You asked to reset your password at 4Geeks', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/password/{token}' - }, - academy=None) - ]) - - @patch('breathecode.notify.actions.send_email_message') + self.assertEqual(self.all_user_dict(), [{**self.model_to_dict(model, "user")}]) + + self.assertEqual( + mock.call_args_list, + [ + call( + "pick_password", + model["user"].email, + { + "SUBJECT": "You asked to reset your password at 4Geeks", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/password/{token}", + }, + academy=None, + ) + ], + ) + + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__with_email_in_uppercase__with_user(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models(user=True) data = { - 'email': model['user'].email.upper(), + "email": model["user"].email.upper(), } response = self.client.post(url, data) - content = response.content.decode('utf-8') - token, created = Token.get_or_create(model['user'], token_type='temporal') + content = response.content.decode("utf-8") + token, created = Token.get_or_create(model["user"], token_type="temporal") - self.assertNotEqual(content.find('Check your email for a password reset!'), -1) + self.assertNotEqual(content.find("Check your email for a password reset!"), -1) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_user_dict(), [{**self.model_to_dict(model, 'user')}]) - - self.assertEqual(mock.call_args_list, [ - call('pick_password', - model['user'].email, { - 'SUBJECT': 'You asked to reset your password at 4Geeks', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/password/{token}' - }, - academy=None) - ]) - - @patch('breathecode.notify.actions.send_email_message') + self.assertEqual(self.all_user_dict(), [{**self.model_to_dict(model, "user")}]) + + self.assertEqual( + mock.call_args_list, + [ + call( + "pick_password", + model["user"].email, + { + "SUBJECT": "You asked to reset your password at 4Geeks", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/password/{token}", + }, + academy=None, + ) + ], + ) + + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__with_callback__with_email(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models() data = { - 'email': 'konan@naturo.io', - 'callback': 'https://naturo.io/', + "email": "konan@naturo.io", + "callback": "https://naturo.io/", } response = self.client.post(url, data) - self.assertEqual(response.url, 'https://naturo.io/?msg=Check%20your%20email%20for%20a%20password%20reset!') + self.assertEqual(response.url, "https://naturo.io/?msg=Check%20your%20email%20for%20a%20password%20reset!") self.assertEqual(response.status_code, status.HTTP_302_FOUND) self.assertEqual(self.all_user_dict(), []) self.assertEqual(mock.call_args_list, []) - @patch('breathecode.notify.actions.send_email_message') + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__with_callback__with_email__with_user(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models(user=True) data = { - 'email': model['user'].email, - 'callback': 'https://naturo.io/', + "email": model["user"].email, + "callback": "https://naturo.io/", } response = self.client.post(url, data) - token, created = Token.get_or_create(model['user'], token_type='temporal') + token, created = Token.get_or_create(model["user"], token_type="temporal") - self.assertEqual(response.url, 'https://naturo.io/?msg=Check%20your%20email%20for%20a%20password%20reset!') + self.assertEqual(response.url, "https://naturo.io/?msg=Check%20your%20email%20for%20a%20password%20reset!") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.all_user_dict(), [{**self.model_to_dict(model, 'user')}]) - - self.assertEqual(mock.call_args_list, [ - call('pick_password', - model['user'].email, { - 'SUBJECT': 'You asked to reset your password at 4Geeks', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/password/{token}' - }, - academy=None) - ]) - - @patch('breathecode.notify.actions.send_email_message') + self.assertEqual(self.all_user_dict(), [{**self.model_to_dict(model, "user")}]) + + self.assertEqual( + mock.call_args_list, + [ + call( + "pick_password", + model["user"].email, + { + "SUBJECT": "You asked to reset your password at 4Geeks", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/password/{token}", + }, + academy=None, + ) + ], + ) + + @patch("breathecode.notify.actions.send_email_message") def test_password_reset__post__with_callback__with_email_in_uppercase__with_user(self, mock): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('authenticate:password_reset') + url = reverse_lazy("authenticate:password_reset") model = self.generate_models(user=True) data = { - 'email': model['user'].email.upper(), - 'callback': 'https://naturo.io/', + "email": model["user"].email.upper(), + "callback": "https://naturo.io/", } response = self.client.post(url, data) - token, created = Token.get_or_create(model['user'], token_type='temporal') + token, created = Token.get_or_create(model["user"], token_type="temporal") - self.assertEqual(response.url, 'https://naturo.io/?msg=Check%20your%20email%20for%20a%20password%20reset!') + self.assertEqual(response.url, "https://naturo.io/?msg=Check%20your%20email%20for%20a%20password%20reset!") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.all_user_dict(), [{**self.model_to_dict(model, 'user')}]) - - self.assertEqual(mock.call_args_list, [ - call('pick_password', - model['user'].email, { - 'SUBJECT': 'You asked to reset your password at 4Geeks', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/password/{token}' - }, - academy=None) - ]) + self.assertEqual(self.all_user_dict(), [{**self.model_to_dict(model, "user")}]) + + self.assertEqual( + mock.call_args_list, + [ + call( + "pick_password", + model["user"].email, + { + "SUBJECT": "You asked to reset your password at 4Geeks", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/password/{token}", + }, + academy=None, + ) + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_password_token.py b/breathecode/authenticate/tests/urls/tests_password_token.py index 154159e06..617274def 100644 --- a/breathecode/authenticate/tests/urls/tests_password_token.py +++ b/breathecode/authenticate/tests/urls/tests_password_token.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os import random import string @@ -34,16 +35,11 @@ def __new__(cls, _, *args, **kwargs): return super().__new__(cls, *args, **kwargs) -def render(message, button=None, button_target='_blank', link=None): +def render(message, button=None, button_target="_blank", link=None): request = None return loader.render_to_string( - 'message.html', - { - 'MESSAGE': message, - 'BUTTON': button, - 'BUTTON_TARGET': button_target, - 'LINK': link - }, + "message.html", + {"MESSAGE": message, "BUTTON": button, "BUTTON_TARGET": button_target, "LINK": link}, request, using=None, ) @@ -52,46 +48,46 @@ def render(message, button=None, button_target='_blank', link=None): # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_message(message): request = None - context = {'MESSAGE': message, 'BUTTON': None, 'BUTTON_TARGET': '_blank', 'LINK': None} + context = {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None} - return loader.render_to_string('message.html', context, request) + return loader.render_to_string("message.html", context, request) def render_pick_password(self, method, token, data, messages=[]): environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/v1/auth/password/{token}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': method, - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': None, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': '', - 'CONTENT_TYPE': 'application/octet-stream' + "HTTP_COOKIE": "", + "PATH_INFO": f"/v1/auth/password/{token}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": method, + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": None, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": "", + "CONTENT_TYPE": "application/octet-stream", } request = WSGIRequest(environ) querystring = self.bc.format.to_querystring(data) data = QueryDict(querystring, mutable=True) - data['token'] = token - data['callback'] = '' + data["token"] = token + data["callback"] = "" form = PickPasswordForm(data) - context = {'form': form} + context = {"form": form} if messages: - context['messages'] = messages + context["messages"] = messages - return loader.render_to_string('form.html', context, request) + return loader.render_to_string("form.html", context, request) class AuthenticateTestSuite(AuthTestCase): @@ -100,361 +96,371 @@ class AuthenticateTestSuite(AuthTestCase): """ def test__get__bad_token(self): - url = reverse_lazy('authenticate:password_token', kwargs={'token': 'token'}) + url = reverse_lazy("authenticate:password_token", kwargs={"token": "token"}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('The link has expired.') + expected = render_message("The link has expired.") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), []) + self.assertEqual(self.bc.database.list_of("auth.User"), []) """ 🔽🔽🔽 GET with token """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) def test__get__with_token(self): email = self.bc.fake.email() - user = {'password': '', 'email': email} - token = {'key': 'xyz'} - user_invite = {'token': 'abc', 'email': email} + user = {"password": "", "email": email} + token = {"key": "xyz"} + user_invite = {"token": "abc", "email": email} - cases = [({'user': user, 'token': token}, 'xyz'), ({'user': user, 'user_invite': user_invite}, 'abc')] + cases = [({"user": user, "token": token}, "xyz"), ({"user": user, "user_invite": user_invite}, "abc")] for kwargs, token in cases: model = self.bc.database.create(**kwargs) - url = reverse_lazy('authenticate:password_token', kwargs={'token': token}) + url = reverse_lazy("authenticate:password_token", kwargs={"token": token}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_pick_password(self, 'GET', token, data={}) + expected = render_pick_password(self, "GET", token, data={}) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) # teardown - self.bc.database.delete('auth.User') - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("auth.User") + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 GET with token and empty fields """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) def test__get__with_token__with_empty_fields(self): email = self.bc.fake.email() - user = {'password': '', 'email': email} - token = {'key': 'xyz'} - user_invite = {'token': 'abc', 'email': email} + user = {"password": "", "email": email} + token = {"key": "xyz"} + user_invite = {"token": "abc", "email": email} - cases = [({'user': user, 'token': token}, 'xyz'), ({'user': user, 'user_invite': user_invite}, 'abc')] + cases = [({"user": user, "token": token}, "xyz"), ({"user": user, "user_invite": user_invite}, "abc")] for kwargs, token in cases: model = self.bc.database.create(**kwargs) - url = reverse_lazy('authenticate:password_token', kwargs={'token': token}) + url = reverse_lazy("authenticate:password_token", kwargs={"token": token}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_pick_password(self, 'GET', token, data={'password1': '', 'password2': ''}) + expected = render_pick_password(self, "GET", token, data={"password1": "", "password2": ""}) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) # teardown - self.bc.database.delete('auth.User') - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("auth.User") + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 POST with bad token """ def test__post__bad_token(self): - url = reverse_lazy('authenticate:password_token', kwargs={'token': 'token'}) + url = reverse_lazy("authenticate:password_token", kwargs={"token": "token"}) response = self.client.post(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('The link has expired.') + expected = render_message("The link has expired.") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), []) + self.assertEqual(self.bc.database.list_of("auth.User"), []) """ 🔽🔽🔽 POST with token, password is empty """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) def test__post__with_token__password_is_empty(self): email = self.bc.fake.email() - user = {'password': '', 'email': email} - token = {'key': 'xyz'} - user_invite = {'token': 'abc', 'email': email} + user = {"password": "", "email": email} + token = {"key": "xyz"} + user_invite = {"token": "abc", "email": email} - cases = [({'user': user, 'token': token}, 'xyz'), ({'user': user, 'user_invite': user_invite}, 'abc')] + cases = [({"user": user, "token": token}, "xyz"), ({"user": user, "user_invite": user_invite}, "abc")] for kwargs, token in cases: model = self.bc.database.create(**kwargs) - url = reverse_lazy('authenticate:password_token', kwargs={'token': token}) - data = {'password1': '', 'password2': ''} + url = reverse_lazy("authenticate:password_token", kwargs={"token": token}) + data = {"password1": "", "password2": ""} response = self.client.post(url, data) content = self.bc.format.from_bytes(response.content) - expected = render_pick_password(self, - 'POST', - token, - data=data, - messages=[Message('alert-danger', "Password can't be empty")]) + expected = render_pick_password( + self, "POST", token, data=data, messages=[Message("alert-danger", "Password can't be empty")] + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) # teardown - self.bc.database.delete('auth.User') - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("auth.User") + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 POST with token, password don't match """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) def test__post__with_token__password_does_not_match(self): email = self.bc.fake.email() - user = {'password': '', 'email': email} - token = {'key': 'xyz'} - user_invite = {'token': 'abc', 'email': email} + user = {"password": "", "email": email} + token = {"key": "xyz"} + user_invite = {"token": "abc", "email": email} - cases = [({'user': user, 'token': token}, 'xyz'), ({'user': user, 'user_invite': user_invite}, 'abc')] + cases = [({"user": user, "token": token}, "xyz"), ({"user": user, "user_invite": user_invite}, "abc")] for kwargs, token in cases: model = self.bc.database.create(**kwargs) - url = reverse_lazy('authenticate:password_token', kwargs={'token': token}) - data = {'password1': self.bc.fake.password(), 'password2': self.bc.fake.password()} + url = reverse_lazy("authenticate:password_token", kwargs={"token": token}) + data = {"password1": self.bc.fake.password(), "password2": self.bc.fake.password()} response = self.client.post(url, data) content = self.bc.format.from_bytes(response.content) - expected = render_pick_password(self, - 'POST', - token, - data=data, - messages=[Message('alert-danger', "Passwords don't match")]) + expected = render_pick_password( + self, "POST", token, data=data, messages=[Message("alert-danger", "Passwords don't match")] + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) # teardown - self.bc.database.delete('auth.User') - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("auth.User") + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 POST with token, invalid password """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) def test__post__with_token__invalid_password(self): email = self.bc.fake.email() - user = {'password': '', 'email': email} - token = {'key': 'xyz'} - user_invite = {'token': 'abc', 'email': email} + user = {"password": "", "email": email} + token = {"key": "xyz"} + user_invite = {"token": "abc", "email": email} - cases = [({'user': user, 'token': token}, 'xyz'), ({'user': user, 'user_invite': user_invite}, 'abc')] + cases = [({"user": user, "token": token}, "xyz"), ({"user": user, "user_invite": user_invite}, "abc")] for kwargs, token in cases: passwords = [ - ''.join(random.choices(string.ascii_lowercase, k=random.randint(1, 7))), - ''.join(random.choices(string.ascii_uppercase, k=random.randint(1, 7))), - ''.join(random.choices(string.punctuation, k=random.randint(1, 7))), + "".join(random.choices(string.ascii_lowercase, k=random.randint(1, 7))), + "".join(random.choices(string.ascii_uppercase, k=random.randint(1, 7))), + "".join(random.choices(string.punctuation, k=random.randint(1, 7))), ] for password in passwords: model = self.bc.database.create(**kwargs) - url = reverse_lazy('authenticate:password_token', kwargs={'token': token}) - data = {'password1': password, 'password2': password} + url = reverse_lazy("authenticate:password_token", kwargs={"token": token}) + data = {"password1": password, "password2": password} response = self.client.post(url, data) content = self.bc.format.from_bytes(response.content) expected = render_pick_password( self, - 'POST', + "POST", token, data=data, messages=[ - Message('alert-danger', - 'Password must contain 8 characters with lowercase, uppercase and symbols') - ]) + Message( + "alert-danger", "Password must contain 8 characters with lowercase, uppercase and symbols" + ) + ], + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) # teardown - self.bc.database.delete('auth.User') - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("auth.User") + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 POST with token, right password """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.contrib.auth.models.User.set_password', set_password) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.contrib.auth.models.User.set_password", set_password) def test__post__with_token__right_password(self): email = self.bc.fake.email() - user = {'password': '', 'email': email} - token = {'key': 'xyz'} - user_invite = {'token': 'abc', 'email': email} + user = {"password": "", "email": email} + token = {"key": "xyz"} + user_invite = {"token": "abc", "email": email} - cases = [({'user': user, 'token': token}, 'xyz'), ({'user': user, 'user_invite': user_invite}, 'abc')] + cases = [({"user": user, "token": token}, "xyz"), ({"user": user, "user_invite": user_invite}, "abc")] for kwargs, token in cases: - password_characters = (random.choices(string.ascii_lowercase, k=3) + - random.choices(string.ascii_uppercase, k=3) + - random.choices(string.punctuation, k=2)) + password_characters = ( + random.choices(string.ascii_lowercase, k=3) + + random.choices(string.ascii_uppercase, k=3) + + random.choices(string.punctuation, k=2) + ) random.shuffle(password_characters) - password = ''.join(password_characters) + password = "".join(password_characters) model = self.bc.database.create(**kwargs) - url = reverse_lazy('authenticate:password_token', kwargs={'token': token}) - data = {'password1': password, 'password2': password} + url = reverse_lazy("authenticate:password_token", kwargs={"token": token}) + data = {"password1": password, "password2": password} response = self.client.post(url, data) content = self.bc.format.from_bytes(response.content) - expected = render('You password has been successfully set.', - button='Continue to sign in', - button_target='_self', - link=os.getenv('APP_URL', 'https://4geeks.com') + '/login') + expected = render( + "You password has been successfully set.", + button="Continue to sign in", + button_target="_self", + link=os.getenv("APP_URL", "https://4geeks.com") + "/login", + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) assert content == expected self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - { - **self.bc.format.to_dict(model.user), - 'password': password, - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + { + **self.bc.format.to_dict(model.user), + "password": password, + }, + ], + ) # teardown - self.bc.database.delete('auth.User') - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("auth.User") + self.bc.database.delete("authenticate.UserInvite") """ 🔽🔽🔽 POST with token, right password, passing callback """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.contrib.auth.models.User.set_password', set_password) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.contrib.auth.models.User.set_password", set_password) def test__post__with_token__right_password__passing_callback(self): email = self.bc.fake.email() - user = {'password': '', 'email': email} - token = {'key': 'xyz'} - user_invite = {'token': 'abc', 'email': email} + user = {"password": "", "email": email} + token = {"key": "xyz"} + user_invite = {"token": "abc", "email": email} - cases = [({'user': user, 'token': token}, 'xyz'), ({'user': user, 'user_invite': user_invite}, 'abc')] + cases = [({"user": user, "token": token}, "xyz"), ({"user": user, "user_invite": user_invite}, "abc")] for kwargs, token in cases: - password_characters = (random.choices(string.ascii_lowercase, k=3) + - random.choices(string.ascii_uppercase, k=3) + - random.choices(string.punctuation, k=2)) + password_characters = ( + random.choices(string.ascii_lowercase, k=3) + + random.choices(string.ascii_uppercase, k=3) + + random.choices(string.punctuation, k=2) + ) random.shuffle(password_characters) - password = ''.join(password_characters) + password = "".join(password_characters) model = self.bc.database.create(**kwargs) redirect_url = self.bc.fake.url() - url = reverse_lazy('authenticate:password_token', kwargs={'token': token}) - data = {'password1': password, 'password2': password, 'callback': redirect_url} + url = reverse_lazy("authenticate:password_token", kwargs={"token": token}) + data = {"password1": password, "password2": password, "callback": redirect_url} response = self.client.post(url, data) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_302_FOUND) self.assertEqual(response.url, redirect_url) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - { - **self.bc.format.to_dict(model.user), - 'password': password, - }, - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + { + **self.bc.format.to_dict(model.user), + "password": password, + }, + ], + ) # teardown - self.bc.database.delete('auth.User') - self.bc.database.delete('authenticate.UserInvite') + self.bc.database.delete("auth.User") + self.bc.database.delete("authenticate.UserInvite") diff --git a/breathecode/authenticate/tests/urls/tests_profile_academy_github_reset_link.py b/breathecode/authenticate/tests/urls/tests_profile_academy_github_reset_link.py index b7e1ef73e..d63845c5b 100644 --- a/breathecode/authenticate/tests/urls/tests_profile_academy_github_reset_link.py +++ b/breathecode/authenticate/tests/urls/tests_profile_academy_github_reset_link.py @@ -11,74 +11,82 @@ class AuthenticateTestSuite(AuthTestCase): def test_github_reset_link_without_auth(self): """Test /auth/member/<profile_academy_id>/token""" - url = reverse_lazy('authenticate:profile_academy_reset_github_link', kwargs={'profile_academy_id': 3}) + url = reverse_lazy("authenticate:profile_academy_reset_github_link", kwargs={"profile_academy_id": 3}) response = self.client.post(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_github_reset_link_without_capability(self): """Test /auth/member/<profile_academy_id>/token""" self.headers(academy=1) self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:profile_academy_reset_github_link', kwargs={'profile_academy_id': 3}) + url = reverse_lazy("authenticate:profile_academy_reset_github_link", kwargs={"profile_academy_id": 3}) response = self.client.post(url) json = response.json() self.assertEqual( - json, { - 'detail': "You (user: 1) don't have this capability: generate_temporal_token " - 'for academy 1', - 'status_code': 403 - }) + json, + { + "detail": "You (user: 1) don't have this capability: generate_temporal_token " "for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_github_reset_link_without_user(self): """Test /auth/member/<profile_academy_id>/token""" - role = 'pikachu' + role = "pikachu" self.headers(academy=1) - self.generate_models(authenticate=True, capability='generate_temporal_token', profile_academy=True, role=role) - url = reverse_lazy('authenticate:profile_academy_reset_github_link', kwargs={'profile_academy_id': 3}) + self.generate_models(authenticate=True, capability="generate_temporal_token", profile_academy=True, role=role) + url = reverse_lazy("authenticate:profile_academy_reset_github_link", kwargs={"profile_academy_id": 3}) response = self.client.post(url) json = response.json() - self.assertEqual(json, {'detail': 'member-not-found', 'status_code': 404}) + self.assertEqual(json, {"detail": "member-not-found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_github_reset_link_ok(self): """Test /auth/member/<profile_academy_id>/token""" - role = 'academy_token' + role = "academy_token" self.headers(academy=1) - profile_academy_kwargs = {'id': 3} - self.generate_models(authenticate=True, - user=True, - capability='generate_temporal_token', - profile_academy=True, - role=role, - profile_academy_kwargs=profile_academy_kwargs) - url = reverse_lazy('authenticate:profile_academy_reset_github_link', kwargs={'profile_academy_id': 3}) + profile_academy_kwargs = {"id": 3} + self.generate_models( + authenticate=True, + user=True, + capability="generate_temporal_token", + profile_academy=True, + role=role, + profile_academy_kwargs=profile_academy_kwargs, + ) + url = reverse_lazy("authenticate:profile_academy_reset_github_link", kwargs={"profile_academy_id": 3}) response = self.client.post(url) json = response.json() - profile_academy = ProfileAcademy.objects.filter(id=profile_academy_kwargs['id']).first() + profile_academy = ProfileAcademy.objects.filter(id=profile_academy_kwargs["id"]).first() - token, created = Token.get_or_create(user=profile_academy.user, token_type='temporal') + token, created = Token.get_or_create(user=profile_academy.user, token_type="temporal") self.assertEqual( - json, { - 'user': { - 'id': 1, - 'email': profile_academy.user.email, - 'first_name': profile_academy.user.first_name, - 'last_name': profile_academy.user.last_name, - 'username': profile_academy.user.username, + json, + { + "user": { + "id": 1, + "email": profile_academy.user.email, + "first_name": profile_academy.user.first_name, + "last_name": profile_academy.user.last_name, + "username": profile_academy.user.username, }, - 'key': f'{token}', - 'reset_password_url': f'http://localhost:8000/v1/auth/password/{token}', - 'reset_github_url': f'http://localhost:8000/v1/auth/github/{token}' - }) + "key": f"{token}", + "reset_password_url": f"http://localhost:8000/v1/auth/password/{token}", + "reset_github_url": f"http://localhost:8000/v1/auth/github/{token}", + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/authenticate/tests/urls/tests_profile_invite_me.py b/breathecode/authenticate/tests/urls/tests_profile_invite_me.py index 5180075ba..3898c0053 100644 --- a/breathecode/authenticate/tests/urls/tests_profile_invite_me.py +++ b/breathecode/authenticate/tests/urls/tests_profile_invite_me.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + from unittest.mock import MagicMock, patch from django.urls.base import reverse_lazy @@ -13,75 +14,57 @@ def get_serializer(self, mentor_profile, academy, mentorship_service, user): return { - 'invites': [], - 'mentor_profiles': [{ - 'booking_url': - mentor_profile.booking_url, - 'online_meeting_url': - None, - 'created_at': - self.bc.datetime.to_iso_string(mentor_profile.created_at), - 'email': - mentor_profile.email, - 'id': - mentor_profile.id, - 'one_line_bio': - mentor_profile.one_line_bio, - 'price_per_hour': - mentor_profile.price_per_hour, - 'rating': - mentor_profile.rating, - 'services': [{ - 'academy': { - 'icon_url': '/static/icons/picture.png', - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "invites": [], + "mentor_profiles": [ + { + "booking_url": mentor_profile.booking_url, + "online_meeting_url": None, + "created_at": self.bc.datetime.to_iso_string(mentor_profile.created_at), + "email": mentor_profile.email, + "id": mentor_profile.id, + "one_line_bio": mentor_profile.one_line_bio, + "price_per_hour": mentor_profile.price_per_hour, + "rating": mentor_profile.rating, + "services": [ + { + "academy": { + "icon_url": "/static/icons/picture.png", + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "timezone": mentor_profile.timezone, + "updated_at": self.bc.datetime.to_iso_string(mentor_profile.updated_at), + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'timezone': - mentor_profile.timezone, - 'updated_at': - self.bc.datetime.to_iso_string(mentor_profile.updated_at), - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, } - } for mentor_profile in (mentor_profile if isinstance(mentor_profile, list) else [mentor_profile])], - 'profile_academies': [], + for mentor_profile in (mentor_profile if isinstance(mentor_profile, list) else [mentor_profile]) + ], + "profile_academies": [], } @@ -90,32 +73,35 @@ class AuthenticateTestSuite(AuthTestCase): 🔽🔽🔽 Auth """ - @patch('os.getenv', MagicMock(return_value='https://dot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dot.dot")) def test_profile_invite_me__without_auth(self): - url = reverse_lazy('authenticate:profile_invite_me') + url = reverse_lazy("authenticate:profile_invite_me") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) """ 🔽🔽🔽 Without data """ - @patch('os.getenv', MagicMock(return_value='https://dot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dot.dot")) def test_profile_invite_me__without_data(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_invite_me') + url = reverse_lazy("authenticate:profile_invite_me") response = self.client.get(url) json = response.json() - expected = {'invites': [], 'mentor_profiles': [], 'profile_academies': []} + expected = {"invites": [], "mentor_profiles": [], "profile_academies": []} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -124,82 +110,90 @@ def test_profile_invite_me__without_data(self): 🔽🔽🔽 With one UserInvite """ - @patch('os.getenv', MagicMock(return_value='https://dot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dot.dot")) def test_profile_invite_me__with_one_user_invite(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} - user = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} + user = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user=user, user_invite=user_invite) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_invite_me') + url = reverse_lazy("authenticate:profile_invite_me") response = self.client.get(url) json = response.json() expected = { - 'invites': [{ - 'academy': model.user_invite.academy, - 'cohort': model.user_invite.cohort, - 'created_at': self.bc.datetime.to_iso_string(model.user_invite.created_at), - 'email': model.user_invite.email, - 'first_name': model.user_invite.first_name, - 'id': model.user_invite.id, - 'invite_url': f'https://dot.dot/v1/auth/member/invite/{model.user_invite.token}', - 'last_name': model.user_invite.last_name, - 'role': model.user_invite.role, - 'sent_at': model.user_invite.sent_at, - 'status': model.user_invite.status, - 'token': model.user_invite.token, - }], - 'mentor_profiles': [], - 'profile_academies': [], + "invites": [ + { + "academy": model.user_invite.academy, + "cohort": model.user_invite.cohort, + "created_at": self.bc.datetime.to_iso_string(model.user_invite.created_at), + "email": model.user_invite.email, + "first_name": model.user_invite.first_name, + "id": model.user_invite.id, + "invite_url": f"https://dot.dot/v1/auth/member/invite/{model.user_invite.token}", + "last_name": model.user_invite.last_name, + "role": model.user_invite.role, + "sent_at": model.user_invite.sent_at, + "status": model.user_invite.status, + "token": model.user_invite.token, + } + ], + "mentor_profiles": [], + "profile_academies": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [ - self.bc.format.to_dict(model.user_invite), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + self.bc.format.to_dict(model.user_invite), + ], + ) """ 🔽🔽🔽 With two UserInvite """ - @patch('os.getenv', MagicMock(return_value='https://dot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dot.dot")) def test_profile_invite_me__with_two_user_invites(self): - user_invite = {'email': 'user@dotdotdotdot.dot'} - user = {'email': 'user@dotdotdotdot.dot'} + user_invite = {"email": "user@dotdotdotdot.dot"} + user = {"email": "user@dotdotdotdot.dot"} model = self.bc.database.create(user=user, user_invite=(2, user_invite)) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_invite_me') + url = reverse_lazy("authenticate:profile_invite_me") response = self.client.get(url) json = response.json() expected = { - 'invites': [{ - 'academy': user_invite.academy, - 'cohort': user_invite.cohort, - 'created_at': self.bc.datetime.to_iso_string(user_invite.created_at), - 'email': user_invite.email, - 'first_name': user_invite.first_name, - 'id': user_invite.id, - 'invite_url': f'https://dot.dot/v1/auth/member/invite/{user_invite.token}', - 'last_name': user_invite.last_name, - 'role': user_invite.role, - 'sent_at': user_invite.sent_at, - 'status': user_invite.status, - 'token': user_invite.token, - } for user_invite in model.user_invite], - 'mentor_profiles': [], - 'profile_academies': [], + "invites": [ + { + "academy": user_invite.academy, + "cohort": user_invite.cohort, + "created_at": self.bc.datetime.to_iso_string(user_invite.created_at), + "email": user_invite.email, + "first_name": user_invite.first_name, + "id": user_invite.id, + "invite_url": f"https://dot.dot/v1/auth/member/invite/{user_invite.token}", + "last_name": user_invite.last_name, + "role": user_invite.role, + "sent_at": user_invite.sent_at, + "status": user_invite.status, + "token": user_invite.token, + } + for user_invite in model.user_invite + ], + "mentor_profiles": [], + "profile_academies": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('authenticate.UserInvite'), + self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite), ) @@ -207,12 +201,12 @@ def test_profile_invite_me__with_two_user_invites(self): 🔽🔽🔽 With one MentorProfile """ - @patch('os.getenv', MagicMock(return_value='https://dot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dot.dot")) def test_profile_invite_me__with_one_mentor_profile(self): model = self.bc.database.create(user=1, mentor_profile=1, mentorship_service=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_invite_me') + url = reverse_lazy("authenticate:profile_invite_me") response = self.client.get(url) json = response.json() @@ -221,20 +215,23 @@ def test_profile_invite_me__with_one_mentor_profile(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) """ 🔽🔽🔽 With two MentorProfile """ - @patch('os.getenv', MagicMock(return_value='https://dot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dot.dot")) def test_profile_invite_me__with_two_mentor_profiles(self): model = self.bc.database.create(user=1, mentor_profile=2, mentorship_service=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_invite_me') + url = reverse_lazy("authenticate:profile_invite_me") response = self.client.get(url) json = response.json() @@ -244,7 +241,7 @@ def test_profile_invite_me__with_two_mentor_profiles(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorProfile'), + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile), ) @@ -252,107 +249,115 @@ def test_profile_invite_me__with_two_mentor_profiles(self): 🔽🔽🔽 With one ProfileAcademy """ - @patch('os.getenv', MagicMock(return_value='https://dot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dot.dot")) def test_profile_invite_me__with_one_profile_academy(self): - model = self.bc.database.create(user=1, profile_academy=1, role='potato') + model = self.bc.database.create(user=1, profile_academy=1, role="potato") self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_invite_me') + url = reverse_lazy("authenticate:profile_invite_me") response = self.client.get(url) json = response.json() expected = { - 'invites': [], - 'mentor_profiles': [], - 'profile_academies': [{ - 'academy': { - 'id': model.academy.id, - 'name': model.academy.name, - 'slug': model.academy.slug, - }, - 'address': model.profile_academy.address, - 'created_at': self.bc.datetime.to_iso_string(model.profile_academy.created_at), - 'email': model.profile_academy.email, - 'first_name': model.profile_academy.first_name, - 'id': model.profile_academy.id, - 'invite_url': 'https://dot.dot/v1/auth/academy/html/invite', - 'last_name': model.profile_academy.last_name, - 'phone': model.profile_academy.phone, - 'role': { - 'id': 'potato', - 'name': 'potato', - 'slug': 'potato', - }, - 'status': model.profile_academy.status, - 'user': { - 'email': model.user.email, - 'first_name': model.user.first_name, - 'github': None, - 'id': model.user.id, - 'last_name': model.user.last_name, - 'profile': None, + "invites": [], + "mentor_profiles": [], + "profile_academies": [ + { + "academy": { + "id": model.academy.id, + "name": model.academy.name, + "slug": model.academy.slug, + }, + "address": model.profile_academy.address, + "created_at": self.bc.datetime.to_iso_string(model.profile_academy.created_at), + "email": model.profile_academy.email, + "first_name": model.profile_academy.first_name, + "id": model.profile_academy.id, + "invite_url": "https://dot.dot/v1/auth/academy/html/invite", + "last_name": model.profile_academy.last_name, + "phone": model.profile_academy.phone, + "role": { + "id": "potato", + "name": "potato", + "slug": "potato", + }, + "status": model.profile_academy.status, + "user": { + "email": model.user.email, + "first_name": model.user.first_name, + "github": None, + "id": model.user.id, + "last_name": model.user.last_name, + "profile": None, + }, } - }], + ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), [ - self.bc.format.to_dict(model.profile_academy), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.ProfileAcademy"), + [ + self.bc.format.to_dict(model.profile_academy), + ], + ) """ 🔽🔽🔽 With two ProfileAcademy """ - @patch('os.getenv', MagicMock(return_value='https://dot.dot')) + @patch("os.getenv", MagicMock(return_value="https://dot.dot")) def test_profile_invite_me__with_two_profile_academies(self): - model = self.bc.database.create(user=1, profile_academy=2, role='potato') + model = self.bc.database.create(user=1, profile_academy=2, role="potato") self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_invite_me') + url = reverse_lazy("authenticate:profile_invite_me") response = self.client.get(url) json = response.json() expected = { - 'invites': [], - 'mentor_profiles': [], - 'profile_academies': [{ - 'academy': { - 'id': model.academy.id, - 'name': model.academy.name, - 'slug': model.academy.slug, - }, - 'address': profile_academy.address, - 'created_at': self.bc.datetime.to_iso_string(profile_academy.created_at), - 'email': profile_academy.email, - 'first_name': profile_academy.first_name, - 'id': profile_academy.id, - 'invite_url': 'https://dot.dot/v1/auth/academy/html/invite', - 'last_name': profile_academy.last_name, - 'phone': profile_academy.phone, - 'role': { - 'id': 'potato', - 'name': 'potato', - 'slug': 'potato', - }, - 'status': profile_academy.status, - 'user': { - 'email': model.user.email, - 'first_name': model.user.first_name, - 'github': None, - 'id': model.user.id, - 'last_name': model.user.last_name, - 'profile': None, + "invites": [], + "mentor_profiles": [], + "profile_academies": [ + { + "academy": { + "id": model.academy.id, + "name": model.academy.name, + "slug": model.academy.slug, + }, + "address": profile_academy.address, + "created_at": self.bc.datetime.to_iso_string(profile_academy.created_at), + "email": profile_academy.email, + "first_name": profile_academy.first_name, + "id": profile_academy.id, + "invite_url": "https://dot.dot/v1/auth/academy/html/invite", + "last_name": profile_academy.last_name, + "phone": profile_academy.phone, + "role": { + "id": "potato", + "name": "potato", + "slug": "potato", + }, + "status": profile_academy.status, + "user": { + "email": model.user.email, + "first_name": model.user.first_name, + "github": None, + "id": model.user.id, + "last_name": model.user.last_name, + "profile": None, + }, } - } for profile_academy in model.profile_academy], + for profile_academy in model.profile_academy + ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('authenticate.ProfileAcademy'), + self.bc.database.list_of("authenticate.ProfileAcademy"), self.bc.format.to_dict(model.profile_academy), ) diff --git a/breathecode/authenticate/tests/urls/tests_profile_me.py b/breathecode/authenticate/tests/urls/tests_profile_me.py index 02da58b9f..0b8977ddc 100644 --- a/breathecode/authenticate/tests/urls/tests_profile_me.py +++ b/breathecode/authenticate/tests/urls/tests_profile_me.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import random from django.urls.base import reverse_lazy from rest_framework import status @@ -9,42 +10,42 @@ def get_serializer(profile, user): return { - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'blog': profile.blog, - 'github_username': profile.github_username, - 'linkedin_url': profile.linkedin_url, - 'phone': profile.phone, - 'portfolio_url': profile.portfolio_url, - 'show_tutorial': profile.show_tutorial, - 'twitter_username': profile.twitter_username, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'username': user.username, + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "blog": profile.blog, + "github_username": profile.github_username, + "linkedin_url": profile.linkedin_url, + "phone": profile.phone, + "portfolio_url": profile.portfolio_url, + "show_tutorial": profile.show_tutorial, + "twitter_username": profile.twitter_username, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "username": user.username, }, } def post_serializer(user, data={}): return { - 'avatar_url': None, - 'bio': None, - 'blog': None, - 'github_username': None, - 'linkedin_url': None, - 'phone': '', - 'portfolio_url': None, - 'show_tutorial': True, - 'twitter_username': None, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'username': user.username, + "avatar_url": None, + "bio": None, + "blog": None, + "github_username": None, + "linkedin_url": None, + "phone": "", + "portfolio_url": None, + "show_tutorial": True, + "twitter_username": None, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "username": user.username, }, **data, } @@ -52,21 +53,21 @@ def post_serializer(user, data={}): def put_serializer(profile, user, data={}): return { - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'blog': profile.blog, - 'github_username': profile.github_username, - 'linkedin_url': profile.linkedin_url, - 'phone': profile.phone, - 'portfolio_url': profile.portfolio_url, - 'show_tutorial': profile.show_tutorial, - 'twitter_username': profile.twitter_username, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'username': user.username, + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "blog": profile.blog, + "github_username": profile.github_username, + "linkedin_url": profile.linkedin_url, + "phone": profile.phone, + "portfolio_url": profile.portfolio_url, + "show_tutorial": profile.show_tutorial, + "twitter_username": profile.twitter_username, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "username": user.username, }, **data, } @@ -74,36 +75,37 @@ def put_serializer(profile, user, data={}): def profile_fields(data={}): return { - 'avatar_url': None, - 'bio': None, - 'blog': None, - 'github_username': None, - 'id': 0, - 'linkedin_url': None, - 'phone': '', - 'portfolio_url': None, - 'show_tutorial': True, - 'twitter_username': None, - 'user_id': 0, + "avatar_url": None, + "bio": None, + "blog": None, + "github_username": None, + "id": 0, + "linkedin_url": None, + "phone": "", + "portfolio_url": None, + "show_tutorial": True, + "twitter_username": None, + "user_id": 0, **data, } class AuthenticateTestSuite(AuthTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test__get__without_auth(self): """Test /user/me without auth""" - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -117,11 +119,11 @@ def test__get__without_permission(self): """Test /user/me""" model = self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.get(url) json = response.json() - expected = {'detail': 'without-permission', 'status_code': 403} + expected = {"detail": "without-permission", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -133,18 +135,18 @@ def test__get__without_permission(self): def test__get__without_profile(self): """Test /user/me""" - permission = {'codename': 'get_my_profile'} + permission = {"codename": "get_my_profile"} model = self.generate_models(authenticate=True, permission=permission) - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.get(url) json = response.json() - expected = {'detail': 'profile-not-found', 'status_code': 404} + expected = {"detail": "profile-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Profile"), []) """ 🔽🔽🔽 GET with one Profile @@ -153,10 +155,10 @@ def test__get__without_profile(self): def test__get__with_profile(self): """Test /user/me""" - permission = {'codename': 'get_my_profile'} + permission = {"codename": "get_my_profile"} model = self.generate_models(authenticate=True, permission=permission, profile=1) - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.get(url) json = response.json() @@ -164,9 +166,12 @@ def test__get__with_profile(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - self.bc.format.to_dict(model.profile), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + self.bc.format.to_dict(model.profile), + ], + ) """ 🔽🔽🔽 POST without permission @@ -176,11 +181,11 @@ def test__post__without_permission(self): """Test /user/me""" model = self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.post(url) json = response.json() - expected = {'detail': 'without-permission', 'status_code': 403} + expected = {"detail": "without-permission", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -192,10 +197,10 @@ def test__post__without_permission(self): def test__post__without_body(self): """Test /user/me""" - permission = {'codename': 'create_my_profile'} + permission = {"codename": "create_my_profile"} model = self.generate_models(authenticate=True, permission=permission) - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.post(url) json = response.json() @@ -203,12 +208,17 @@ def test__post__without_body(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_fields({ - 'id': 1, - 'user_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_fields( + { + "id": 1, + "user_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 POST passing the arguments @@ -217,21 +227,21 @@ def test__post__without_body(self): def test__post__passing_all_the_fields(self): """Test /user/me""" - permission = {'codename': 'create_my_profile'} + permission = {"codename": "create_my_profile"} model = self.generate_models(authenticate=True, permission=permission) - url = reverse_lazy('authenticate:profile_me') - phone = f'+{random.randint(100000000, 999999999)}' + url = reverse_lazy("authenticate:profile_me") + phone = f"+{random.randint(100000000, 999999999)}" data = { - 'avatar_url': self.bc.fake.url(), - 'bio': self.bc.fake.text(), - 'phone': phone, - 'show_tutorial': bool(random.getrandbits(1)), - 'twitter_username': self.bc.fake.name().replace(' ', '-'), - 'github_username': self.bc.fake.name().replace(' ', '-'), - 'portfolio_url': self.bc.fake.url(), - 'linkedin_url': self.bc.fake.url(), - 'blog': self.bc.fake.text()[:150].strip(), + "avatar_url": self.bc.fake.url(), + "bio": self.bc.fake.text(), + "phone": phone, + "show_tutorial": bool(random.getrandbits(1)), + "twitter_username": self.bc.fake.name().replace(" ", "-"), + "github_username": self.bc.fake.name().replace(" ", "-"), + "portfolio_url": self.bc.fake.url(), + "linkedin_url": self.bc.fake.url(), + "blog": self.bc.fake.text()[:150].strip(), } response = self.client.post(url, data) @@ -241,13 +251,18 @@ def test__post__passing_all_the_fields(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_fields({ - 'id': 1, - 'user_id': 1, - **data, - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_fields( + { + "id": 1, + "user_id": 1, + **data, + } + ), + ], + ) """ 🔽🔽🔽 PUT without permission @@ -257,11 +272,11 @@ def test__put__without_permission(self): """Test /user/me""" model = self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.put(url) json = response.json() - expected = {'detail': 'without-permission', 'status_code': 403} + expected = {"detail": "without-permission", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -273,18 +288,18 @@ def test__put__without_permission(self): def test__put__not_found(self): """Test /user/me""" - permission = {'codename': 'update_my_profile'} + permission = {"codename": "update_my_profile"} model = self.generate_models(authenticate=True, permission=permission) - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.put(url) json = response.json() - expected = {'detail': 'profile-not-found', 'status_code': 404} + expected = {"detail": "profile-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Profile"), []) """ 🔽🔽🔽 PUT with one Profile @@ -293,10 +308,10 @@ def test__put__not_found(self): def test__put__with_one_profile__without_body(self): """Test /user/me""" - permission = {'codename': 'update_my_profile'} + permission = {"codename": "update_my_profile"} model = self.generate_models(authenticate=True, permission=permission, profile=1) - url = reverse_lazy('authenticate:profile_me') + url = reverse_lazy("authenticate:profile_me") response = self.client.put(url) json = response.json() @@ -304,12 +319,17 @@ def test__put__with_one_profile__without_body(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_fields({ - 'id': 1, - 'user_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_fields( + { + "id": 1, + "user_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 PUT with one Profile passing all the fields @@ -318,21 +338,21 @@ def test__put__with_one_profile__without_body(self): def test__put__with_one_profile__passing_all_the_fields(self): """Test /user/me""" - permission = {'codename': 'update_my_profile'} + permission = {"codename": "update_my_profile"} model = self.generate_models(authenticate=True, permission=permission, profile=1) - url = reverse_lazy('authenticate:profile_me') - phone = f'+{random.randint(100000000, 999999999)}' + url = reverse_lazy("authenticate:profile_me") + phone = f"+{random.randint(100000000, 999999999)}" data = { - 'avatar_url': self.bc.fake.url(), - 'bio': self.bc.fake.text(), - 'phone': phone, - 'show_tutorial': bool(random.getrandbits(1)), - 'twitter_username': self.bc.fake.name().replace(' ', '-'), - 'github_username': self.bc.fake.name().replace(' ', '-'), - 'portfolio_url': self.bc.fake.url(), - 'linkedin_url': self.bc.fake.url(), - 'blog': self.bc.fake.text()[:150].strip(), + "avatar_url": self.bc.fake.url(), + "bio": self.bc.fake.text(), + "phone": phone, + "show_tutorial": bool(random.getrandbits(1)), + "twitter_username": self.bc.fake.name().replace(" ", "-"), + "github_username": self.bc.fake.name().replace(" ", "-"), + "portfolio_url": self.bc.fake.url(), + "linkedin_url": self.bc.fake.url(), + "blog": self.bc.fake.text()[:150].strip(), } response = self.client.put(url, data) @@ -341,11 +361,18 @@ def test__put__with_one_profile__passing_all_the_fields(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [{ - **self.bc.format.to_dict(model.profile), - **profile_fields({ - 'id': 1, - 'user_id': 1, - **data, - }), - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + { + **self.bc.format.to_dict(model.profile), + **profile_fields( + { + "id": 1, + "user_id": 1, + **data, + } + ), + } + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_profile_me_picture.py b/breathecode/authenticate/tests/urls/tests_profile_me_picture.py index b62dc61f9..e14f6b744 100644 --- a/breathecode/authenticate/tests/urls/tests_profile_me_picture.py +++ b/breathecode/authenticate/tests/urls/tests_profile_me_picture.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os import random from unittest.mock import MagicMock, PropertyMock, call, patch @@ -14,7 +15,7 @@ from ..mixins.new_auth_test_case import AuthTestCase -SHAPE_OF_URL = 'https://us-central1-labor-day-story.cloudfunctions.net/shape-of-image' +SHAPE_OF_URL = "https://us-central1-labor-day-story.cloudfunctions.net/shape-of-image" def apply_get_env(configuration={}): @@ -27,21 +28,21 @@ def get_env(key, value=None): def put_serializer_creating(user, data={}): return { - 'avatar_url': '', - 'bio': None, - 'blog': None, - 'github_username': None, - 'linkedin_url': None, - 'phone': '', - 'portfolio_url': None, - 'show_tutorial': True, - 'twitter_username': None, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'username': user.username, + "avatar_url": "", + "bio": None, + "blog": None, + "github_username": None, + "linkedin_url": None, + "phone": "", + "portfolio_url": None, + "show_tutorial": True, + "twitter_username": None, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "username": user.username, }, **data, } @@ -49,21 +50,21 @@ def put_serializer_creating(user, data={}): def put_serializer_updating(profile, user, data={}): return { - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'blog': profile.blog, - 'github_username': profile.github_username, - 'linkedin_url': profile.linkedin_url, - 'phone': profile.phone, - 'portfolio_url': profile.portfolio_url, - 'show_tutorial': profile.show_tutorial, - 'twitter_username': profile.twitter_username, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - 'username': user.username, + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "blog": profile.blog, + "github_username": profile.github_username, + "linkedin_url": profile.linkedin_url, + "phone": profile.phone, + "portfolio_url": profile.portfolio_url, + "show_tutorial": profile.show_tutorial, + "twitter_username": profile.twitter_username, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + "username": user.username, }, **data, } @@ -71,17 +72,17 @@ def put_serializer_updating(profile, user, data={}): def profile_row(data={}): return { - 'avatar_url': None, - 'bio': None, - 'blog': None, - 'github_username': None, - 'id': 0, - 'linkedin_url': None, - 'phone': '', - 'portfolio_url': None, - 'show_tutorial': True, - 'twitter_username': None, - 'user_id': 0, + "avatar_url": None, + "bio": None, + "blog": None, + "github_username": None, + "id": 0, + "linkedin_url": None, + "phone": "", + "portfolio_url": None, + "show_tutorial": True, + "twitter_username": None, + "user_id": 0, **data, } @@ -91,97 +92,108 @@ class AuthenticateTestSuite(AuthTestCase): 🔽🔽🔽 Auth """ - filename = '' + filename = "" def tearDown(self): self.bc.garbage_collector.collect() super().tearDown() @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROFILE_BUCKET': 'https://dot.dot', - 'GCLOUD_SHAPE_OF_IMAGE': SHAPE_OF_URL - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env({"PROFILE_BUCKET": "https://dot.dot", "GCLOUD_SHAPE_OF_IMAGE": SHAPE_OF_URL}) + ), + ) def test__without_auth(self): - url = reverse_lazy('authenticate:profile_me_picture') + url = reverse_lazy("authenticate:profile_me_picture") response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, - }) + self.assertEqual( + json, + { + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, + }, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Profile"), []) """ 🔽🔽🔽 Put without permission """ @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROFILE_BUCKET': 'https://dot.dot', - 'GCLOUD_SHAPE_OF_IMAGE': SHAPE_OF_URL - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env({"PROFILE_BUCKET": "https://dot.dot", "GCLOUD_SHAPE_OF_IMAGE": SHAPE_OF_URL}) + ), + ) def test__without_permission(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_me_picture') + url = reverse_lazy("authenticate:profile_me_picture") response = self.client.put(url) json = response.json() - expected = {'detail': 'without-permission', 'status_code': 403} + expected = {"detail": "without-permission", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Profile"), []) """ 🔽🔽🔽 Put without passing file """ - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) @patch.multiple( - 'breathecode.services.google_cloud.File', + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", __init__=MagicMock(return_value=None), bucket=PropertyMock(), file_name=PropertyMock(), upload=MagicMock(), exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100'), - create=True) + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100"), + create=True, + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROFILE_BUCKET': 'https://dot.dot', - 'GCLOUD_SHAPE_OF_IMAGE': SHAPE_OF_URL - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env({"PROFILE_BUCKET": "https://dot.dot", "GCLOUD_SHAPE_OF_IMAGE": SHAPE_OF_URL}) + ), + ) def test__without_passing_file(self): - permission = {'codename': 'update_my_profile'} + permission = {"codename": "update_my_profile"} model = self.bc.database.create(user=1, permission=permission) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_me_picture') + url = reverse_lazy("authenticate:profile_me_picture") data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'missing-file', 'status_code': 400} + expected = {"detail": "missing-file", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_row({ - 'user_id': 1, - 'id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_row( + { + "user_id": 1, + "id": 1, + } + ), + ], + ) self.assertEqual(Storage.__init__.call_args_list, []) self.assertEqual(File.upload.call_args_list, []) @@ -192,47 +204,55 @@ def test__without_passing_file(self): 🔽🔽🔽 Put passing file and exists in google cloud """ - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) @patch.multiple( - 'breathecode.services.google_cloud.File', + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", __init__=MagicMock(return_value=None), bucket=PropertyMock(), file_name=PropertyMock(), upload=MagicMock(), exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100'), - create=True) + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100"), + create=True, + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROFILE_BUCKET': 'https://dot.dot', - 'GCLOUD_SHAPE_OF_IMAGE': SHAPE_OF_URL - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env({"PROFILE_BUCKET": "https://dot.dot", "GCLOUD_SHAPE_OF_IMAGE": SHAPE_OF_URL}) + ), + ) def test__passing_file__file_exists(self): # random_image file, self.filename = self.bc.random.file() - permission = {'codename': 'update_my_profile'} + permission = {"codename": "update_my_profile"} model = self.bc.database.create(user=1, permission=permission) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_me_picture') - response = self.client.put(url, {'name': self.filename, 'file': file}) + url = reverse_lazy("authenticate:profile_me_picture") + response = self.client.put(url, {"name": self.filename, "file": file}) json = response.json() - expected = {'detail': 'bad-file-format', 'status_code': 400} + expected = {"detail": "bad-file-format", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_row({ - 'user_id': 1, - 'id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_row( + { + "user_id": 1, + "id": 1, + } + ), + ], + ) self.assertEqual(Storage.__init__.call_args_list, []) self.assertEqual(File.upload.call_args_list, []) @@ -243,28 +263,31 @@ def test__passing_file__file_exists(self): 🔽🔽🔽 Put with Profile, passing file and exists in google cloud """ - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) @patch.multiple( - 'breathecode.services.google_cloud.File', + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", __init__=MagicMock(return_value=None), bucket=PropertyMock(), file_name=PropertyMock(), upload=MagicMock(), exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100'), - create=True) + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100"), + create=True, + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROFILE_BUCKET': 'https://dot.dot', - 'GCLOUD_SHAPE_OF_IMAGE': SHAPE_OF_URL - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env({"PROFILE_BUCKET": "https://dot.dot", "GCLOUD_SHAPE_OF_IMAGE": SHAPE_OF_URL}) + ), + ) def test__passing_file__with_profile__file_exists(self): - exts = ['png', 'jpg', 'jpeg'] - permission = {'codename': 'update_my_profile'} + exts = ["png", "jpg", "jpeg"] + permission = {"codename": "update_my_profile"} base = self.bc.database.create(permission=permission) for ext in exts: @@ -273,26 +296,32 @@ def test__passing_file__with_profile__file_exists(self): model = self.bc.database.create(user=1, permission=base.permission, profile=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_me_picture') - response = self.client.put(url, {'name': 'filename.lbs', 'file': file}) + url = reverse_lazy("authenticate:profile_me_picture") + response = self.client.put(url, {"name": "filename.lbs", "file": file}) json = response.json() expected = put_serializer_updating( model.profile, model.user, data={ - 'avatar_url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100', - }) + "avatar_url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100", + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_row({ - 'user_id': model.user.id, - 'id': model.profile.id, - 'avatar_url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100', - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_row( + { + "user_id": model.user.id, + "id": model.profile.id, + "avatar_url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100", + } + ), + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) self.assertEqual(File.upload.call_args_list, []) @@ -300,7 +329,7 @@ def test__passing_file__with_profile__file_exists(self): self.assertEqual(File.url.call_args_list, [call()]) # teardown - self.bc.database.delete('authenticate.Profile') + self.bc.database.delete("authenticate.Profile") Storage.__init__.call_args_list = [] File.upload.call_args_list = [] File.exists.call_args_list = [] @@ -310,57 +339,66 @@ def test__passing_file__with_profile__file_exists(self): 🔽🔽🔽 Put with Profile, passing file and does'nt exists in google cloud, shape is square """ - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) @patch.multiple( - 'breathecode.services.google_cloud.File', + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", __init__=MagicMock(return_value=None), bucket=PropertyMock(), file_name=PropertyMock(), delete=MagicMock(), upload=MagicMock(), exists=MagicMock(return_value=False), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100'), - create=True) + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100"), + create=True, + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROFILE_BUCKET': 'https://dot.dot', - 'GCLOUD_SHAPE_OF_IMAGE': SHAPE_OF_URL - }))) - @patch('google.oauth2.id_token.fetch_id_token', MagicMock(return_value='blablabla')) - @patch('requests.request', apply_requests_request_mock([(200, SHAPE_OF_URL, {'shape': 'Square'})])) - @patch('breathecode.services.google_cloud.credentials.resolve_credentials', MagicMock()) + "os.getenv", + MagicMock( + side_effect=apply_get_env({"PROFILE_BUCKET": "https://dot.dot", "GCLOUD_SHAPE_OF_IMAGE": SHAPE_OF_URL}) + ), + ) + @patch("google.oauth2.id_token.fetch_id_token", MagicMock(return_value="blablabla")) + @patch("requests.request", apply_requests_request_mock([(200, SHAPE_OF_URL, {"shape": "Square"})])) + @patch("breathecode.services.google_cloud.credentials.resolve_credentials", MagicMock()) def test__passing_file__with_profile__file_does_not_exists__shape_is_square(self): file, self.filename = self.bc.random.image(2, 2) - permission = {'codename': 'update_my_profile'} - profile = {'avatar_url': f'https://blabla.bla/{self.bc.random.string(size=64, lower=True)}-100x100'} + permission = {"codename": "update_my_profile"} + profile = {"avatar_url": f"https://blabla.bla/{self.bc.random.string(size=64, lower=True)}-100x100"} model = self.bc.database.create(user=1, permission=permission, profile=profile) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_me_picture') - response = self.client.put(url, {'name': 'filename.lbs', 'file': file}) + url = reverse_lazy("authenticate:profile_me_picture") + response = self.client.put(url, {"name": "filename.lbs", "file": file}) json = response.json() expected = put_serializer_updating( model.profile, model.user, data={ - 'avatar_url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100', - }) + "avatar_url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100", + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_row({ - 'user_id': 1, - 'id': 1, - 'avatar_url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100', - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_row( + { + "user_id": 1, + "id": 1, + "avatar_url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100", + } + ), + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) self.assertEqual(len(File.upload.call_args_list), 1) @@ -372,29 +410,32 @@ def test__passing_file__with_profile__file_does_not_exists__shape_is_square(self 🔽🔽🔽 Put with Profile, passing file and does'nt exists in google cloud, shape is not square """ - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) @patch.multiple( - 'breathecode.services.google_cloud.File', + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", __init__=MagicMock(return_value=None), bucket=PropertyMock(), file_name=PropertyMock(), delete=MagicMock(), upload=MagicMock(), exists=MagicMock(return_value=False), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100'), - create=True) + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url-100x100"), + create=True, + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROFILE_BUCKET': 'https://dot.dot', - 'GCLOUD_SHAPE_OF_IMAGE': SHAPE_OF_URL - }))) - @patch('google.oauth2.id_token.fetch_id_token', MagicMock(return_value='blablabla')) - @patch('requests.request', apply_requests_request_mock([(200, SHAPE_OF_URL, {'shape': 'Rectangle'})])) - @patch('breathecode.services.google_cloud.credentials.resolve_credentials', MagicMock()) + "os.getenv", + MagicMock( + side_effect=apply_get_env({"PROFILE_BUCKET": "https://dot.dot", "GCLOUD_SHAPE_OF_IMAGE": SHAPE_OF_URL}) + ), + ) + @patch("google.oauth2.id_token.fetch_id_token", MagicMock(return_value="blablabla")) + @patch("requests.request", apply_requests_request_mock([(200, SHAPE_OF_URL, {"shape": "Rectangle"})])) + @patch("breathecode.services.google_cloud.credentials.resolve_credentials", MagicMock()) def test__passing_file__with_profile__file_does_not_exists__shape_is_not_square(self): options = [2, 1] @@ -404,22 +445,25 @@ def test__passing_file__with_profile__file_does_not_exists__shape_is_not_square( file, self.filename = self.bc.random.image(width, height) - permission = {'codename': 'update_my_profile'} - profile = {'avatar_url': f'https://blabla.bla/{self.bc.random.string(size=64, lower=True)}-100x100'} + permission = {"codename": "update_my_profile"} + profile = {"avatar_url": f"https://blabla.bla/{self.bc.random.string(size=64, lower=True)}-100x100"} model = self.bc.database.create(user=1, permission=permission, profile=profile) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:profile_me_picture') - response = self.client.put(url, {'name': 'filename.lbs', 'file': file}) + url = reverse_lazy("authenticate:profile_me_picture") + response = self.client.put(url, {"name": "filename.lbs", "file": file}) json = response.json() - expected = {'detail': 'not-square-image', 'status_code': 400} + expected = {"detail": "not-square-image", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - self.bc.format.to_dict(model.profile), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + self.bc.format.to_dict(model.profile), + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) self.assertEqual(len(File.upload.call_args_list), 1) diff --git a/breathecode/authenticate/tests/urls/tests_slack.py b/breathecode/authenticate/tests/urls/tests_slack.py index 0d12eb077..2386d7377 100644 --- a/breathecode/authenticate/tests/urls/tests_slack.py +++ b/breathecode/authenticate/tests/urls/tests_slack.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import base64 import os import urllib @@ -16,77 +17,93 @@ class AuthenticateTestSuite(AuthTestCase): def test_slack_without_url(self): """Test /slack without auth""" - url = reverse_lazy('authenticate:slack') + url = reverse_lazy("authenticate:slack") response = self.client.get(url) data = response.data - details = data['details'] - status_code = data['status_code'] + details = data["details"] + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'No callback URL specified') + self.assertEqual(details, "No callback URL specified") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_slack_without_user(self): """Test /slack without auth""" - original_url_callback = 'https://google.co.ve' - url = reverse_lazy('authenticate:slack') + original_url_callback = "https://google.co.ve" + url = reverse_lazy("authenticate:slack") params = { - 'url': base64.b64encode(original_url_callback.encode('utf-8')), + "url": base64.b64encode(original_url_callback.encode("utf-8")), } - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = data['details'] - status_code = data['status_code'] + details = data["details"] + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'No user specified on the URL') + self.assertEqual(details, "No user specified on the URL") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_slack_without_a(self): """Test /slack without auth""" - original_url_callback = 'https://google.co.ve' - url = reverse_lazy('authenticate:slack') - user = '1234567890' - params = {'url': base64.b64encode(original_url_callback.encode('utf-8')), 'user': user} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + original_url_callback = "https://google.co.ve" + url = reverse_lazy("authenticate:slack") + user = "1234567890" + params = {"url": base64.b64encode(original_url_callback.encode("utf-8")), "user": user} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = data['details'] - status_code = data['status_code'] + details = data["details"] + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'No academy specified on the URL') + self.assertEqual(details, "No academy specified on the URL") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_slack(self): """Test /slack""" - original_url_callback = 'https://google.co.ve' - url = reverse_lazy('authenticate:slack') - academy = 'Team 7' - user = '1234567890' - params = {'url': base64.b64encode(original_url_callback.encode('utf-8')), 'user': user, 'a': academy} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') - - query_string = f'a={academy}&url={original_url_callback}&user={user}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') - scopes = ('app_mentions:read', 'channels:history', 'channels:join', 'channels:read', 'chat:write', - 'chat:write.customize', 'commands', 'files:read', 'files:write', 'groups:history', 'groups:read', - 'groups:write', 'incoming-webhook', 'team:read', 'users:read', 'users:read.email', - 'users.profile:read', 'users:read') + original_url_callback = "https://google.co.ve" + url = reverse_lazy("authenticate:slack") + academy = "Team 7" + user = "1234567890" + params = {"url": base64.b64encode(original_url_callback.encode("utf-8")), "user": user, "a": academy} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") + + query_string = f"a={academy}&url={original_url_callback}&user={user}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") + scopes = ( + "app_mentions:read", + "channels:history", + "channels:join", + "channels:read", + "chat:write", + "chat:write.customize", + "commands", + "files:read", + "files:write", + "groups:history", + "groups:read", + "groups:write", + "incoming-webhook", + "team:read", + "users:read", + "users:read.email", + "users.profile:read", + "users:read", + ) params = { - 'client_id': os.getenv('SLACK_CLIENT_ID', ''), - 'redirect_uri': os.getenv('SLACK_REDIRECT_URL', '') + '?payload=' + payload, - 'scope': ','.join(scopes) + "client_id": os.getenv("SLACK_CLIENT_ID", ""), + "redirect_uri": os.getenv("SLACK_REDIRECT_URL", "") + "?payload=" + payload, + "scope": ",".join(scopes), } - redirect = 'https://slack.com/oauth/v2/authorize?' + redirect = "https://slack.com/oauth/v2/authorize?" for key in params: - redirect += f'{key}={params[key]}&' + redirect += f"{key}={params[key]}&" self.assertEqual(response.status_code, status.HTTP_302_FOUND) self.assertEqual(response.url, redirect) diff --git a/breathecode/authenticate/tests/urls/tests_slack_callback.py b/breathecode/authenticate/tests/urls/tests_slack_callback.py index 5a06074c6..5242af105 100644 --- a/breathecode/authenticate/tests/urls/tests_slack_callback.py +++ b/breathecode/authenticate/tests/urls/tests_slack_callback.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import base64 import urllib from unittest import mock @@ -15,45 +16,45 @@ class AuthenticateTestSuite(AuthTestCase, SlackTestCase): def test_slack_callback_with_error(self): """Test /slack/callback without auth""" - url = reverse_lazy('authenticate:slack_callback') - params = {'error': 'Oh my god', 'error_description': 'They killed kenny'} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:slack_callback") + params = {"error": "Oh my god", "error_description": "They killed kenny"} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - detail = str(data['detail']) - status_code = data['status_code'] + detail = str(data["detail"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(detail, 'Slack: They killed kenny') + self.assertEqual(detail, "Slack: They killed kenny") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) def test_slack_callback_without_callback(self): """Test /slack/callback without auth""" - url = reverse_lazy('authenticate:slack_callback') + url = reverse_lazy("authenticate:slack_callback") response = self.client.get(url) data = response.data - details = str(data['details']) - status_code = data['status_code'] + details = str(data["details"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'No payload specified') + self.assertEqual(details, "No payload specified") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_slack_callback_with_bad_callback(self): """Test /slack/callback without auth""" - url = reverse_lazy('authenticate:slack_callback') - params = {'payload': 'They killed kenny'} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + url = reverse_lazy("authenticate:slack_callback") + params = {"payload": "They killed kenny"} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = str(data['details']) - status_code = data['status_code'] + details = str(data["details"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'Cannot decode payload in base64') + self.assertEqual(details, "Cannot decode payload in base64") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -61,19 +62,19 @@ def test_slack_callback_without_url_in_payload(self): """Test /slack/callback without auth""" self.slack() self.get_academy() - url = reverse_lazy('authenticate:slack_callback') + url = reverse_lazy("authenticate:slack_callback") - query_string = ''.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') - params = {'payload': payload} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + query_string = "".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") + params = {"payload": payload} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = str(data['details']) - status_code = data['status_code'] + details = str(data["details"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'No url specified from the slack payload') + self.assertEqual(details, "No url specified from the slack payload") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -82,20 +83,20 @@ def test_slack_callback_without_user_in_payload(self): self.slack() self.get_academy() original_url_callback = self.url_callback - url = reverse_lazy('authenticate:slack_callback') + url = reverse_lazy("authenticate:slack_callback") academy = 2 - query_string = f'a={academy}&url={original_url_callback}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') - params = {'payload': payload} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + query_string = f"a={academy}&url={original_url_callback}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") + params = {"payload": payload} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = str(data['details']) - status_code = data['status_code'] + details = str(data["details"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'No user id specified from the slack payload') + self.assertEqual(details, "No user id specified from the slack payload") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -104,20 +105,20 @@ def test_slack_callback_without_a_in_payload(self): self.slack() self.get_academy() original_url_callback = self.url_callback - url = reverse_lazy('authenticate:slack_callback') + url = reverse_lazy("authenticate:slack_callback") user = 1 - query_string = f'user={user}&url={original_url_callback}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') - params = {'payload': payload} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + query_string = f"user={user}&url={original_url_callback}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") + params = {"payload": payload} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = str(data['details']) - status_code = data['status_code'] + details = str(data["details"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'No academy id specified from the slack payload') + self.assertEqual(details, "No academy id specified from the slack payload") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -126,21 +127,21 @@ def test_slack_callback_with_user_in_payload_but_not_exist(self): self.slack() self.get_academy() original_url_callback = self.url_callback - url = reverse_lazy('authenticate:slack_callback') + url = reverse_lazy("authenticate:slack_callback") academy = 2 user = 1 - query_string = f'user={user}&a={academy}&url={original_url_callback}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') - params = {'payload': payload} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + query_string = f"user={user}&a={academy}&url={original_url_callback}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") + params = {"payload": payload} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = str(data['details']) - status_code = data['status_code'] + details = str(data["details"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'Not exist academy with that id') + self.assertEqual(details, "Not exist academy with that id") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -149,21 +150,21 @@ def test_slack_callback_with_a_in_payload_but_not_exist(self): self.slack() self.get_academy() original_url_callback = self.url_callback - url = reverse_lazy('authenticate:slack_callback') + url = reverse_lazy("authenticate:slack_callback") academy = 1 user = 2 - query_string = f'user={user}&a={academy}&url={original_url_callback}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') - params = {'payload': payload} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + query_string = f"user={user}&a={academy}&url={original_url_callback}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") + params = {"payload": payload} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = str(data['details']) - status_code = data['status_code'] + details = str(data["details"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'Not exist user with that id') + self.assertEqual(details, "Not exist user with that id") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -172,38 +173,38 @@ def test_slack_callback_without_code(self): self.slack() self.get_academy() original_url_callback = self.url_callback - url = reverse_lazy('authenticate:slack_callback') + url = reverse_lazy("authenticate:slack_callback") academy = 1 user = 1 - query_string = f'user={user}&a={academy}&url={original_url_callback}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') - params = {'payload': payload} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + query_string = f"user={user}&a={academy}&url={original_url_callback}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") + params = {"payload": payload} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") data = response.data - details = str(data['details']) - status_code = data['status_code'] + details = str(data["details"]) + status_code = data["status_code"] self.assertEqual(2, len(data)) - self.assertEqual(details, 'No slack code specified') + self.assertEqual(details, "No slack code specified") self.assertEqual(status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @mock.patch('requests.post', SlackRequestsMock.apply_post_requests_mock()) + @mock.patch("requests.post", SlackRequestsMock.apply_post_requests_mock()) def test_slack_callback_without_code2(self): """Test /slack/callback without auth""" self.slack() self.get_academy() original_url_callback = self.url_callback - url = reverse_lazy('authenticate:slack_callback') + url = reverse_lazy("authenticate:slack_callback") academy = 1 user = 1 - query_string = f'user={user}&a={academy}&url={original_url_callback}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') - params = {'payload': payload, 'code': 'haha'} - response = self.client.get(f'{url}?{urllib.parse.urlencode(params)}') + query_string = f"user={user}&a={academy}&url={original_url_callback}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") + params = {"payload": payload, "code": "haha"} + response = self.client.get(f"{url}?{urllib.parse.urlencode(params)}") self.assertEqual(response.url, original_url_callback) self.assertEqual(response.status_code, status.HTTP_302_FOUND) diff --git a/breathecode/authenticate/tests/urls/tests_subscribe.py b/breathecode/authenticate/tests/urls/tests_subscribe.py index 4815d2780..b5e480a78 100644 --- a/breathecode/authenticate/tests/urls/tests_subscribe.py +++ b/breathecode/authenticate/tests/urls/tests_subscribe.py @@ -1,6 +1,7 @@ """ Test /v1/auth/subscribe """ + import hashlib import os import random @@ -23,134 +24,134 @@ def user_db_item(data={}): return { - 'email': '', - 'first_name': '', - 'id': 0, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': '', - 'password': '', - 'username': '', + "email": "", + "first_name": "", + "id": 0, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "", + "password": "", + "username": "", **data, } def plan_db_item(plan, data={}): return { - 'id': plan.id, - 'event_type_set_id': plan.event_type_set.id if plan.event_type_set else None, - 'mentorship_service_set_id': plan.mentorship_service_set.id if plan.mentorship_service_set else None, - 'cohort_set_id': plan.cohort_set.id if plan.cohort_set else None, - 'currency_id': plan.currency.id, - 'slug': plan.slug, - 'status': plan.status, - 'has_waiting_list': plan.has_waiting_list, - 'is_onboarding': plan.is_onboarding, - 'time_of_life': plan.time_of_life, - 'time_of_life_unit': plan.time_of_life_unit, - 'trial_duration': plan.trial_duration, - 'trial_duration_unit': plan.trial_duration_unit, - 'is_renewable': plan.is_renewable, - 'owner_id': plan.owner.id if plan.owner else None, - 'price_per_half': plan.price_per_half, - 'price_per_month': plan.price_per_month, - 'price_per_quarter': plan.price_per_quarter, - 'price_per_year': plan.price_per_year, + "id": plan.id, + "event_type_set_id": plan.event_type_set.id if plan.event_type_set else None, + "mentorship_service_set_id": plan.mentorship_service_set.id if plan.mentorship_service_set else None, + "cohort_set_id": plan.cohort_set.id if plan.cohort_set else None, + "currency_id": plan.currency.id, + "slug": plan.slug, + "status": plan.status, + "has_waiting_list": plan.has_waiting_list, + "is_onboarding": plan.is_onboarding, + "time_of_life": plan.time_of_life, + "time_of_life_unit": plan.time_of_life_unit, + "trial_duration": plan.trial_duration, + "trial_duration_unit": plan.trial_duration_unit, + "is_renewable": plan.is_renewable, + "owner_id": plan.owner.id if plan.owner else None, + "price_per_half": plan.price_per_half, + "price_per_month": plan.price_per_month, + "price_per_quarter": plan.price_per_quarter, + "price_per_year": plan.price_per_year, **data, } def user_invite_db_item(data={}): return { - 'academy_id': None, - 'author_id': None, - 'cohort_id': None, - 'id': 1, - 'role_id': None, - 'sent_at': None, - 'status': 'PENDING', - 'conversion_info': None, - 'has_marketing_consent': False, - 'event_slug': None, - 'asset_slug': None, - 'is_email_validated': False, - 'token': '', - 'process_message': '', - 'process_status': 'PENDING', - 'syllabus_id': None, - 'user_id': None, - 'city': None, - 'email': 'pokemon@potato.io', - 'email_quality': None, - 'email_status': None, - 'country': None, - 'first_name': None, - 'last_name': None, - 'latitude': None, - 'longitude': None, - 'phone': '', + "academy_id": None, + "author_id": None, + "cohort_id": None, + "id": 1, + "role_id": None, + "sent_at": None, + "status": "PENDING", + "conversion_info": None, + "has_marketing_consent": False, + "event_slug": None, + "asset_slug": None, + "is_email_validated": False, + "token": "", + "process_message": "", + "process_status": "PENDING", + "syllabus_id": None, + "user_id": None, + "city": None, + "email": "pokemon@potato.io", + "email_quality": None, + "email_status": None, + "country": None, + "first_name": None, + "last_name": None, + "latitude": None, + "longitude": None, + "phone": "", **data, } def plan_serializer(plan): return { - 'financing_options': [], - 'service_items': [], - 'has_available_cohorts': bool(plan.cohort_set), - 'slug': plan.slug, - 'status': plan.status, - 'time_of_life': plan.time_of_life, - 'time_of_life_unit': plan.time_of_life_unit, - 'trial_duration': plan.trial_duration, - 'trial_duration_unit': plan.trial_duration_unit, + "financing_options": [], + "service_items": [], + "has_available_cohorts": bool(plan.cohort_set), + "slug": plan.slug, + "status": plan.status, + "time_of_life": plan.time_of_life, + "time_of_life_unit": plan.time_of_life_unit, + "trial_duration": plan.trial_duration, + "trial_duration_unit": plan.trial_duration_unit, } def post_serializer(plans=[], data={}): return { - 'id': 0, - 'access_token': None, - 'cohort': None, - 'syllabus': None, - 'email': '', - 'first_name': '', - 'last_name': '', - 'phone': '', - 'plans': [plan_serializer(plan) for plan in plans], - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'conversion_info': None, - 'asset_slug': None, - 'event_slug': None, + "id": 0, + "access_token": None, + "cohort": None, + "syllabus": None, + "email": "", + "first_name": "", + "last_name": "", + "phone": "", + "plans": [plan_serializer(plan) for plan in plans], + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "conversion_info": None, + "asset_slug": None, + "event_slug": None, **data, } def put_serializer(user_invite, cohort=None, syllabus=None, user=None, plans=[], data={}): return { - 'id': user_invite.id, - 'access_token': None, - 'cohort': cohort.id if cohort else None, - 'syllabus': syllabus.id if syllabus else None, - 'email': user_invite.email, - 'first_name': user_invite.first_name, - 'last_name': user_invite.last_name, - 'phone': user_invite.phone, - 'user': user.id if user else None, - 'plans': [plan_serializer(plan) for plan in plans], - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'conversion_info': None, - 'asset_slug': None, - 'event_slug': None, - 'status': user_invite.status, + "id": user_invite.id, + "access_token": None, + "cohort": cohort.id if cohort else None, + "syllabus": syllabus.id if syllabus else None, + "email": user_invite.email, + "first_name": user_invite.first_name, + "last_name": user_invite.last_name, + "phone": user_invite.phone, + "user": user.id if user else None, + "plans": [plan_serializer(plan) for plan in plans], + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "conversion_info": None, + "asset_slug": None, + "event_slug": None, + "status": user_invite.status, **data, } @@ -160,9 +161,9 @@ def put_serializer(user_invite, cohort=None, syllabus=None, user=None, plans=[], @pytest.fixture(autouse=True) def setup(monkeypatch: pytest.MonkeyPatch, db): - monkeypatch.setattr('os.urandom', lambda _: b) - monkeypatch.setattr('breathecode.authenticate.tasks.create_user_from_invite.delay', MagicMock()) - monkeypatch.setattr('breathecode.authenticate.tasks.async_validate_email_invite.delay', MagicMock()) + monkeypatch.setattr("os.urandom", lambda _: b) + monkeypatch.setattr("breathecode.authenticate.tasks.create_user_from_invite.delay", MagicMock()) + monkeypatch.setattr("breathecode.authenticate.tasks.async_validate_email_invite.delay", MagicMock()) yield @@ -170,55 +171,59 @@ def setup(monkeypatch: pytest.MonkeyPatch, db): @pytest.fixture def validation_res(patch_request): validation_res = { - 'quality_score': (random.random() * 0.4) + 0.6, - 'email_quality': (random.random() * 0.4) + 0.6, - 'is_valid_format': { - 'value': True, + "quality_score": (random.random() * 0.4) + 0.6, + "email_quality": (random.random() * 0.4) + 0.6, + "is_valid_format": { + "value": True, }, - 'is_mx_found': { - 'value': True, + "is_mx_found": { + "value": True, }, - 'is_smtp_valid': { - 'value': True, + "is_smtp_valid": { + "value": True, }, - 'is_catchall_email': { - 'value': True, + "is_catchall_email": { + "value": True, }, - 'is_role_email': { - 'value': True, + "is_role_email": { + "value": True, }, - 'is_disposable_email': { - 'value': False, + "is_disposable_email": { + "value": False, }, - 'is_free_email': { - 'value': True, + "is_free_email": { + "value": True, }, } - patch_request([ - ( - call('get', - 'https://emailvalidation.abstractapi.com/v1/?api_key=None&email=pokemon@potato.io', - params=None, - timeout=10), - validation_res, - ), - ]) + patch_request( + [ + ( + call( + "get", + "https://emailvalidation.abstractapi.com/v1/?api_key=None&email=pokemon@potato.io", + params=None, + timeout=10, + ), + validation_res, + ), + ] + ) return validation_res -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__without_email(bc: Breathecode, client: APIClient): - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") response = client.post(url) json = response.json() - expected = {'detail': 'without-email', 'status_code': 400} + expected = {"detail": "without-email", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("authenticate.UserInvite") == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] # """ @@ -226,43 +231,46 @@ def test_task__post__without_email(bc: Breathecode, client: APIClient): # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__without_user_invite(bc: Breathecode, client: APIClient, validation_res): - url = reverse_lazy('authenticate:subscribe') - data = {'email': 'pokemon@potato.io', 'first_name': 'lord', 'last_name': 'valdomero', 'phone': '+123123123'} + url = reverse_lazy("authenticate:subscribe") + data = {"email": "pokemon@potato.io", "first_name": "lord", "last_name": "valdomero", "phone": "+123123123"} access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") json = response.json() - expected = post_serializer(data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + } + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ user_invite_db_item( data={ - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] @@ -271,49 +279,52 @@ def test_task__post__without_user_invite(bc: Breathecode, client: APIClient, val # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__without_user_invite_with_asset_slug(bc: Breathecode, client: APIClient, validation_res): - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'asset_slug': 'pokemon_exercise', + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "asset_slug": "pokemon_exercise", } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") json = response.json() - expected = post_serializer(data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + } + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ user_invite_db_item( data={ - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] @@ -322,49 +333,52 @@ def test_task__post__without_user_invite_with_asset_slug(bc: Breathecode, client # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__without_user_invite_with_event_slug(bc: Breathecode, client: APIClient, validation_res): - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'event_slug': 'pokemon_event', + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "event_slug": "pokemon_event", } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") json = response.json() - expected = post_serializer(data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + } + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ user_invite_db_item( data={ - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] @@ -373,7 +387,7 @@ def test_task__post__without_user_invite_with_event_slug(bc: Breathecode, client # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__with_user_invite__already_exists__status_waiting_list(bc: Breathecode, client: APIClient): """ Descriptions of models are being generated: @@ -381,87 +395,89 @@ def test_task__post__with_user_invite__already_exists__status_waiting_list(bc: B UserInvite(id=1): {} """ - user_invite = {'email': 'pokemon@potato.io', 'status': 'WAITING_LIST'} + user_invite = {"email": "pokemon@potato.io", "status": "WAITING_LIST"} model = bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') - data = {'email': 'pokemon@potato.io'} - response = client.post(url, data, format='json') + url = reverse_lazy("authenticate:subscribe") + data = {"email": "pokemon@potato.io"} + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'user-invite-exists', 'status_code': 400} + expected = {"detail": "user-invite-exists", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__with_user_invite__already_exists__status_pending__academy_no_saas( - bc: Breathecode, client: APIClient): + bc: Breathecode, client: APIClient +): """ Descriptions of models are being generated: UserInvite(id=1): {} """ - user_invite = {'email': 'pokemon@potato.io', 'status': 'PENDING'} - academy = {'available_as_saas': False} + user_invite = {"email": "pokemon@potato.io", "status": "PENDING"} + academy = {"available_as_saas": False} model = bc.database.create(user_invite=user_invite, academy=academy) - url = reverse_lazy('authenticate:subscribe') - data = {'email': 'pokemon@potato.io'} - response = client.post(url, data, format='json') + url = reverse_lazy("authenticate:subscribe") + data = {"email": "pokemon@potato.io"} + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'invite-exists', 'status_code': 400} + expected = {"detail": "invite-exists", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__with_user_invite__already_exists__status_pending__academy_no_saas__from_cohort( - bc: Breathecode, client: APIClient): + bc: Breathecode, client: APIClient +): """ Descriptions of models are being generated: UserInvite(id=1): {} """ - user_invite = {'email': 'pokemon@potato.io', 'status': 'PENDING', 'academy_id': None} - academy = {'available_as_saas': False} + user_invite = {"email": "pokemon@potato.io", "status": "PENDING", "academy_id": None} + academy = {"available_as_saas": False} model = bc.database.create(user_invite=user_invite, academy=academy, cohort=1) - url = reverse_lazy('authenticate:subscribe') - data = {'email': 'pokemon@potato.io'} - response = client.post(url, data, format='json') + url = reverse_lazy("authenticate:subscribe") + data = {"email": "pokemon@potato.io"} + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'invite-exists', 'status_code': 400} + expected = {"detail": "invite-exists", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__with_user_invite__already_exists__status_pending(bc: Breathecode, client: APIClient): """ Descriptions of models are being generated: @@ -469,25 +485,25 @@ def test_task__post__with_user_invite__already_exists__status_pending(bc: Breath UserInvite(id=1): {} """ - user_invites = [{'email': 'pokemon@potato.io', 'status': x} for x in ['PENDING', 'ACCEPTED']] + user_invites = [{"email": "pokemon@potato.io", "status": x} for x in ["PENDING", "ACCEPTED"]] model = bc.database.create(user_invite=user_invites) - url = reverse_lazy('authenticate:subscribe') - data = {'email': 'pokemon@potato.io'} - response = client.post(url, data, format='json') + url = reverse_lazy("authenticate:subscribe") + data = {"email": "pokemon@potato.io"} + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'user-invite-exists-status-pending', 'status_code': 400} + expected = {"detail": "user-invite-exists-status-pending", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == bc.format.to_dict(model.user_invite) + assert bc.database.list_of("authenticate.UserInvite") == bc.format.to_dict(model.user_invite) - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__with_user_invite__already_exists__status_accepted(bc: Breathecode, client: APIClient): """ Descriptions of models are being generated: @@ -495,29 +511,29 @@ def test_task__post__with_user_invite__already_exists__status_accepted(bc: Breat UserInvite(id=1): {} """ - user_invite = {'email': 'pokemon@potato.io', 'status': 'ACCEPTED'} + user_invite = {"email": "pokemon@potato.io", "status": "ACCEPTED"} model = bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') - data = {'email': 'pokemon@potato.io'} - response = client.post(url, data, format='json') + url = reverse_lazy("authenticate:subscribe") + data = {"email": "pokemon@potato.io"} + response = client.post(url, data, format="json") json = response.json() expected = { - 'detail': 'user-invite-exists-status-accepted', - 'status_code': 400, - 'silent': True, - 'silent_code': 'user-invite-exists-status-accepted', + "detail": "user-invite-exists-status-accepted", + "status_code": 400, + "silent": True, + "silent_code": "user-invite-exists-status-accepted", } assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] # """ @@ -525,7 +541,7 @@ def test_task__post__with_user_invite__already_exists__status_accepted(bc: Breat # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) def test_task__post__with_user_invite__user_exists(bc: Breathecode, client: APIClient): """ Descriptions of models are being generated: @@ -535,26 +551,26 @@ def test_task__post__with_user_invite__user_exists(bc: Breathecode, client: APIC user_permissions: [] """ - user = {'email': 'pokemon@potato.io'} + user = {"email": "pokemon@potato.io"} model = bc.database.create(user=user) - url = reverse_lazy('authenticate:subscribe') - data = {'email': 'pokemon@potato.io'} - response = client.post(url, data, format='json') + url = reverse_lazy("authenticate:subscribe") + data = {"email": "pokemon@potato.io"} + response = client.post(url, data, format="json") json = response.json() expected = { - 'detail': 'user-exists', - 'silent': True, - 'silent_code': 'user-exists', - 'status_code': 400, + "detail": "user-exists", + "silent": True, + "silent_code": "user-exists", + "status_code": 400, } assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("authenticate.UserInvite") == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] # """ @@ -562,9 +578,9 @@ def test_task__post__with_user_invite__user_exists(bc: Breathecode, client: APIC # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__post__with_user_invite(bc: Breathecode, client: APIClient, validation_res): """ Descriptions of models are being generated: @@ -572,79 +588,87 @@ def test_task__post__with_user_invite(bc: Breathecode, client: APIClient, valida UserInvite(id=1): {} """ - user_invite = {'email': 'henrrieta@horseman.io', 'status': 'WAITING_LIST'} + user_invite = {"email": "henrrieta@horseman.io", "status": "WAITING_LIST"} model = bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') - data = {'email': 'pokemon@potato.io', 'first_name': 'lord', 'last_name': 'valdomero', 'phone': '+123123123'} + url = reverse_lazy("authenticate:subscribe") + data = {"email": "pokemon@potato.io", "first_name": "lord", "last_name": "valdomero", "phone": "+123123123"} access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") json = response.json() - expected = post_serializer(data={ - 'id': 2, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + data={ + "id": 2, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + } + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), user_invite_db_item( data={ - 'id': 2, - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "id": 2, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] - - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] - - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] + + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } + ] + + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1), call(2)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': ('/v1/auth/password/' + hashlib.sha512('pokemon@potato.io'.encode('UTF-8') + b).hexdigest()) - }, - academy=None) - ] - - user = bc.database.get('auth.User', 1, dict=False) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": ("/v1/auth/password/" + hashlib.sha512("pokemon@potato.io".encode("UTF-8") + b).hexdigest()), + }, + academy=None, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] @@ -653,9 +677,9 @@ def test_task__post__with_user_invite(bc: Breathecode, client: APIClient, valida # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__post__does_not_get_in_waiting_list_using_a_plan(bc: Breathecode, client: APIClient, validation_res): """ Descriptions of models are being generated: @@ -663,52 +687,57 @@ def test_task__post__does_not_get_in_waiting_list_using_a_plan(bc: Breathecode, UserInvite(id=1): {} """ - user_invite = {'email': 'henrrieta@horseman.io', 'status': 'WAITING_LIST'} - plan = {'time_of_life': None, 'time_of_life_unit': None, 'has_waiting_list': True, 'invites': []} + user_invite = {"email": "henrrieta@horseman.io", "status": "WAITING_LIST"} + plan = {"time_of_life": None, "time_of_life_unit": None, "has_waiting_list": True, "invites": []} model = bc.database.create(user_invite=user_invite, plan=plan) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'plan': 1, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "plan": 1, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['plan'] + del data["plan"] json = response.json() - expected = put_serializer(model.user_invite, plans=[model.plan], data={ - 'id': 2, - **data, - }) + expected = put_serializer( + model.user_invite, + plans=[model.plan], + data={ + "id": 2, + **data, + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), user_invite_db_item( data={ - 'id': 2, - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'PENDING', - 'status': 'WAITING_LIST', - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'email_quality': None, - 'email_status': None, + "id": 2, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "PENDING", + "status": "WAITING_LIST", + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "email_quality": None, + "email_status": None, **data, - }), + } + ), ] - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [plan_db_item(model.plan, data={})] + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [plan_db_item(model.plan, data={})] bc.check.queryset_with_pks(model.plan.invites.all(), [2]) assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -719,9 +748,9 @@ def test_task__post__does_not_get_in_waiting_list_using_a_plan(bc: Breathecode, # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__post__get_in_waiting_list_using_a_plan(bc: Breathecode, client: APIClient, validation_res): """ Descriptions of models are being generated: @@ -729,135 +758,141 @@ def test_task__post__get_in_waiting_list_using_a_plan(bc: Breathecode, client: A UserInvite(id=1): {} """ - user_invite = {'email': 'henrrieta@horseman.io', 'status': 'WAITING_LIST'} - plan = {'time_of_life': None, 'time_of_life_unit': None, 'has_waiting_list': False, 'invites': []} + user_invite = {"email": "henrrieta@horseman.io", "status": "WAITING_LIST"} + plan = {"time_of_life": None, "time_of_life_unit": None, "has_waiting_list": False, "invites": []} model = bc.database.create(user_invite=user_invite, plan=plan) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'plan': 1, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "plan": 1, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['plan'] + del data["plan"] json = response.json() - expected = post_serializer(plans=[model.plan], - data={ - 'id': 2, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + plans=[model.plan], + data={ + "id": 2, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), user_invite_db_item( data={ - 'id': 2, - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "id": 2, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [plan_db_item(model.plan, data={})] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [plan_db_item(model.plan, data={})] bc.check.queryset_with_pks(model.plan.invites.all(), [2]) - token = hashlib.sha512('pokemon@potato.io'.encode('UTF-8') + b).hexdigest() + token = hashlib.sha512("pokemon@potato.io".encode("UTF-8") + b).hexdigest() assert async_validate_email_invite.delay.call_args_list == [call(1), call(2)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=None) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=None, + ) ] - User = bc.database.get_model('auth.User') - user = User.objects.get(email=data['email']) + User = bc.database.get_model("auth.User") + user = User.objects.get(email=data["email"]) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] # When: Syllabus is passed and does not exist # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__syllabus_does_not_exists(bc: Breathecode, client: APIClient): - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'syllabus': random.choice([bc.fake.slug(), random.randint(1, 100)]), + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "syllabus": random.choice([bc.fake.slug(), random.randint(1, 100)]), } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['syllabus'] + del data["syllabus"] json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 400} + expected = {"detail": "syllabus-not-found", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [] + assert bc.database.list_of("authenticate.UserInvite") == [] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("auth.User") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] # When: Course is passed and does not exist # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__course_does_not_exists(bc: Breathecode, client: APIClient): - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([bc.fake.slug(), random.randint(1, 100)]), + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([bc.fake.slug(), random.randint(1, 100)]), } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['course'] + del data["course"] json = response.json() - expected = {'detail': 'course-not-found', 'status_code': 400} + expected = {"detail": "course-not-found", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [] + assert bc.database.list_of("authenticate.UserInvite") == [] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("auth.User") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -865,222 +900,239 @@ def test__post__course_does_not_exists(bc: Breathecode, client: APIClient): # Given: 1 Course # When: Course is passed as slug and exists # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__course_without_syllabus(bc: Breathecode, client: APIClient, validation_res): model = bc.database.create(course=1) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['course'] + del data["course"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + plans=[], + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ user_invite_db_item( data={ - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'academy_id': 1, - 'user_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "DONE", + "status": "ACCEPTED", + "academy_id": 1, + "user_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - del data['phone'] - users = [x for x in bc.database.list_of('auth.User') if x.pop('date_joined')] + del data["phone"] + users = [x for x in bc.database.list_of("auth.User") if x.pop("date_joined")] users == [ - user_db_item(data={ - **data, - 'id': 1, - 'username': 'pokemon@potato.io', - }), + user_db_item( + data={ + **data, + "id": 1, + "username": "pokemon@potato.io", + } + ), ] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), [1]) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] - token = hashlib.sha512('pokemon@potato.io'.encode('UTF-8') + b).hexdigest() + token = hashlib.sha512("pokemon@potato.io".encode("UTF-8") + b).hexdigest() assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) ] - User = bc.database.get_model('auth.User') - user = User.objects.get(email=data['email']) + User = bc.database.get_model("auth.User") + user = User.objects.get(email=data["email"]) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] # Given: 1 Course # When: Course is passed as slug and exists # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__course_and_syllabus(bc: Breathecode, client: APIClient, validation_res): model = bc.database.create(course=1, syllabus=1) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), - 'syllabus': random.choice([model.syllabus.id, model.syllabus.slug]), + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), + "syllabus": random.choice([model.syllabus.id, model.syllabus.slug]), } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['course'] + del data["course"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + plans=[], + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - token = hashlib.sha512('pokemon@potato.io'.encode('UTF-8') + b).hexdigest() + token = hashlib.sha512("pokemon@potato.io".encode("UTF-8") + b).hexdigest() - data['syllabus_id'] = data.pop('syllabus') - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'academy_id': 1, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'token': token, - **data, - }) + data["syllabus_id"] = data.pop("syllabus") + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "academy_id": 1, + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "token": token, + **data, + } + ) ] - del data['phone'] - del data['syllabus_id'] - users = [x for x in bc.database.list_of('auth.User') if x.pop('date_joined')] + del data["phone"] + del data["syllabus_id"] + users = [x for x in bc.database.list_of("auth.User") if x.pop("date_joined")] users == [ - user_db_item(data={ - **data, - 'id': 1, - 'username': 'pokemon@potato.io', - }), + user_db_item( + data={ + **data, + "id": 1, + "username": "pokemon@potato.io", + } + ), ] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), [1]) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) ] - User = bc.database.get_model('auth.User') - user = User.objects.get(email=data['email']) + User = bc.database.get_model("auth.User") + user = User.objects.get(email=data["email"]) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] # Given: 1 Course and 1 Syllabus # When: Course is passed as slug and exists, course is not associated to syllabus # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__course_and_syllabus__syllabus_not_associated_to_course(bc: Breathecode, client: APIClient): - course = {'syllabus': []} - syllabus = {'slug': bc.fake.slug()} + course = {"syllabus": []} + syllabus = {"slug": bc.fake.slug()} model = bc.database.create(course=course, syllabus=syllabus) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), - 'syllabus': random.choice([model.syllabus.id, model.syllabus.slug]), + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), + "syllabus": random.choice([model.syllabus.id, model.syllabus.slug]), # 'token': token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['course'] + del data["course"] json = response.json() - expected = {'detail': 'syllabus-not-belong-to-course', 'status_code': 400} + expected = {"detail": "syllabus-not-belong-to-course", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - del data['syllabus'] + del data["syllabus"] - assert bc.database.list_of('authenticate.UserInvite') == [] + assert bc.database.list_of("authenticate.UserInvite") == [] - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), []) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -1089,67 +1141,70 @@ def test__post__course_and_syllabus__syllabus_not_associated_to_course(bc: Breat # Given: 1 Course, 1 Syllabus # When: Course is passed as slug and exists, course with waiting list # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__course_and_syllabus__waiting_list(bc: Breathecode, client: APIClient): - course = {'has_waiting_list': True, 'invites': []} + course = {"has_waiting_list": True, "invites": []} model = bc.database.create(course=course, syllabus=1) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), - 'syllabus': random.choice([model.syllabus.id, model.syllabus.slug]), + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), + "syllabus": random.choice([model.syllabus.id, model.syllabus.slug]), } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['course'] + del data["course"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 1, - 'access_token': None, - 'user': None, - **data, - 'status': 'WAITING_LIST', - }) + expected = post_serializer( + plans=[], + data={ + "id": 1, + "access_token": None, + "user": None, + **data, + "status": "WAITING_LIST", + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - data['syllabus_id'] = data.pop('syllabus') - assert bc.database.list_of('authenticate.UserInvite') == [ + data["syllabus_id"] = data.pop("syllabus") + assert bc.database.list_of("authenticate.UserInvite") == [ user_invite_db_item( data={ - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'PENDING', - 'status': 'WAITING_LIST', - 'academy_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "PENDING", + "status": "WAITING_LIST", + "academy_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - del data['phone'] - del data['syllabus_id'] + del data["phone"] + del data["syllabus_id"] - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), [1]) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -1158,70 +1213,73 @@ def test__post__course_and_syllabus__waiting_list(bc: Breathecode, client: APICl # Given: 1 Course, 1 UserInvite and 1 Syllabus # When: Course is passed as slug and exists, course with waiting list # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__with_other_invite__course_and_syllabus__waiting_list(bc: Breathecode, client: APIClient): - course = {'has_waiting_list': True, 'invites': []} - user_invite = {'email': 'pokemon@potato.io', 'status': 'WAITING_LIST'} + course = {"has_waiting_list": True, "invites": []} + user_invite = {"email": "pokemon@potato.io", "status": "WAITING_LIST"} model = bc.database.create(course=course, syllabus=1, user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), - 'syllabus': random.choice([model.syllabus.id, model.syllabus.slug]), + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), + "syllabus": random.choice([model.syllabus.id, model.syllabus.slug]), } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['course'] + del data["course"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 2, - 'access_token': None, - 'user': None, - **data, - 'status': 'WAITING_LIST', - }) + expected = post_serializer( + plans=[], + data={ + "id": 2, + "access_token": None, + "user": None, + **data, + "status": "WAITING_LIST", + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - data['syllabus_id'] = data.pop('syllabus') - assert bc.database.list_of('authenticate.UserInvite') == [ + data["syllabus_id"] = data.pop("syllabus") + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), user_invite_db_item( data={ - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'PENDING', - 'status': 'WAITING_LIST', - 'academy_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "PENDING", + "status": "WAITING_LIST", + "academy_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - 'id': 2, - }), + "id": 2, + } + ), ] - del data['phone'] - del data['syllabus_id'] + del data["phone"] + del data["syllabus_id"] - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), [2]) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -1230,68 +1288,71 @@ def test__post__with_other_invite__course_and_syllabus__waiting_list(bc: Breathe # Given: 1 Plan and 1 UserInvite # When: Course is passed as slug and exists, course with waiting list # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__with_other_invite__plan__waiting_list(bc: Breathecode, client: APIClient): - plan = {'has_waiting_list': True, 'invites': [], 'time_of_life': None, 'time_of_life_unit': None} - user_invite = {'email': 'pokemon@potato.io', 'status': 'WAITING_LIST'} + plan = {"has_waiting_list": True, "invites": [], "time_of_life": None, "time_of_life_unit": None} + user_invite = {"email": "pokemon@potato.io", "status": "WAITING_LIST"} model = bc.database.create(plan=plan, user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'plan': random.choice([model.plan.id, model.plan.slug]), + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "plan": random.choice([model.plan.id, model.plan.slug]), } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['plan'] + del data["plan"] json = response.json() - expected = post_serializer(plans=[model.plan], - data={ - 'id': 2, - 'access_token': None, - 'user': None, - **data, - 'status': 'WAITING_LIST', - }) + expected = post_serializer( + plans=[model.plan], + data={ + "id": 2, + "access_token": None, + "user": None, + **data, + "status": "WAITING_LIST", + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED # data['syllabus_id'] = data.pop('syllabus') - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), user_invite_db_item( data={ - 'token': hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest(), - 'process_status': 'PENDING', - 'status': 'WAITING_LIST', - 'academy_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest(), + "process_status": "PENDING", + "status": "WAITING_LIST", + "academy_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - 'id': 2, - }), + "id": 2, + } + ), ] - del data['phone'] + del data["phone"] - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('payments.Plan') == [ + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("payments.Plan") == [ bc.format.to_dict(model.plan), ] bc.check.queryset_with_pks(model.plan.invites.all(), [2]) - assert bc.database.list_of('marketing.Course') == [] + assert bc.database.list_of("marketing.Course") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -1300,187 +1361,205 @@ def test__post__with_other_invite__plan__waiting_list(bc: Breathecode, client: A # Given: 1 Cohort and 1 UserInvite # When: Course is passed as slug and exists, course with waiting list # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__with_other_invite__cohort__waiting_list(bc: Breathecode, client: APIClient, validation_res): - user_invite = {'email': 'pokemon@potato.io', 'status': 'WAITING_LIST', 'cohort_id': None, 'syllabus_id': None} + user_invite = {"email": "pokemon@potato.io", "status": "WAITING_LIST", "cohort_id": None, "syllabus_id": None} model = bc.database.create(cohort=1, user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'cohort': model.cohort.id, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "cohort": model.cohort.id, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['cohort'] + del data["cohort"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 2, - 'access_token': access_token, - 'cohort': 1, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + plans=[], + data={ + "id": 2, + "access_token": access_token, + "cohort": 1, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - token = hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest() - assert bc.database.list_of('authenticate.UserInvite') == [ + token = hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest() + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), - user_invite_db_item({ - 'id': 2, - 'academy_id': 1, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'token': token, - 'cohort_id': 1, - **data, - }), + user_invite_db_item( + { + "id": 2, + "academy_id": 1, + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "token": token, + "cohort_id": 1, + **data, + } + ), ] - del data['phone'] + del data["phone"] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] - assert bc.database.list_of('marketing.Course') == [] + assert bc.database.list_of("marketing.Course") == [] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] - - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] - assert async_validate_email_invite.delay.call_args_list == [call(1), call(2)] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] - assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } ] + assert async_validate_email_invite.delay.call_args_list == [call(1), call(2)] - user = bc.database.get('auth.User', 1, dict=False) + assert notify_actions.send_email_message.call_args_list == [ + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] # Given: 1 Syllabus and 1 UserInvite # When: Course is passed as slug and exists, course with waiting list # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__post__with_other_invite__syllabus__waiting_list(bc: Breathecode, client: APIClient, validation_res): - user_invite = {'email': 'pokemon@potato.io', 'status': 'WAITING_LIST', 'cohort_id': None, 'syllabus_id': None} + user_invite = {"email": "pokemon@potato.io", "status": "WAITING_LIST", "cohort_id": None, "syllabus_id": None} model = bc.database.create(syllabus=1, user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'syllabus': model.syllabus.id, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "syllabus": model.syllabus.id, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.post(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.post(url, data, format="json") - del data['syllabus'] + del data["syllabus"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 2, - 'access_token': access_token, - 'syllabus': 1, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + plans=[], + data={ + "id": 2, + "access_token": access_token, + "syllabus": 1, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - token = hashlib.sha512(('pokemon@potato.io').encode('UTF-8') + b).hexdigest() - assert bc.database.list_of('authenticate.UserInvite') == [ + token = hashlib.sha512(("pokemon@potato.io").encode("UTF-8") + b).hexdigest() + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), - user_invite_db_item({ - 'id': 2, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'token': token, - 'syllabus_id': 1, - **data, - }), + user_invite_db_item( + { + "id": 2, + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "token": token, + "syllabus_id": 1, + **data, + } + ), ] - del data['phone'] + del data["phone"] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] - assert bc.database.list_of('marketing.Course') == [] + assert bc.database.list_of("marketing.Course") == [] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] - - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] - assert async_validate_email_invite.delay.call_args_list == [call(1), call(2)] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] - assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=None) + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } ] + assert async_validate_email_invite.delay.call_args_list == [call(1), call(2)] - user = bc.database.get('auth.User', 1, dict=False) + assert notify_actions.send_email_message.call_args_list == [ + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=None, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] @@ -1490,22 +1569,22 @@ def test__post__with_other_invite__syllabus__waiting_list(bc: Breathecode, clien # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__without_email(bc: Breathecode, client: APIClient): - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") response = client.put(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('authenticate.UserInvite') == [] - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("authenticate.UserInvite") == [] + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -1516,90 +1595,99 @@ def test_task__put__without_email(bc: Breathecode, client: APIClient): # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__cohort_as_none(bc: Breathecode, client: APIClient, validation_res): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, } model = bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = put_serializer(model.user_invite, - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = put_serializer( + model.user_invite, + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'token': token, - **data, - }) + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "token": token, + **data, + } + ) ] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] - - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] - - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] + + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } + ] + + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=None) - ] - - user = bc.database.get('auth.User', 1, dict=False) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=None, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] @@ -1608,45 +1696,47 @@ def test_task__put__with_user_invite__cohort_as_none(bc: Breathecode, client: AP # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__cohort_not_found(bc: Breathecode, client: APIClient): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, } bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'cohort': 1, - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "cohort": 1, + "token": token, } - response = client.put(url, data, format='json') + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = {'cohort': ['Invalid pk "1" - object does not exist.']} + expected = {"cohort": ['Invalid pk "1" - object does not exist.']} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'status': 'WAITING_LIST', - 'token': token, - }), + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "status": "WAITING_LIST", + "token": token, + } + ), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("auth.User") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -1657,94 +1747,103 @@ def test_task__put__with_user_invite__cohort_not_found(bc: Breathecode, client: # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__cohort_found(bc: Breathecode, client: APIClient, validation_res): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, } model = bc.database.create(user_invite=user_invite, cohort=1) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'cohort': 1, - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "cohort": 1, + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = put_serializer(model.user_invite, - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = put_serializer( + model.user_invite, + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - del data['cohort'] - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'academy_id': 1, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'token': token, - 'cohort_id': 1, - **data, - }) + del data["cohort"] + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "academy_id": 1, + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "token": token, + "cohort_id": 1, + **data, + } + ) ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] - - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] - assert async_validate_email_invite.delay.call_args_list == [call(1)] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] - assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } ] + assert async_validate_email_invite.delay.call_args_list == [call(1)] - user = bc.database.get('auth.User', 1, dict=False) + assert notify_actions.send_email_message.call_args_list == [ + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] @@ -1753,95 +1852,105 @@ def test_task__put__with_user_invite__cohort_found(bc: Breathecode, client: APIC # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__cohort_found__academy_available_as_saas__user_does_not_exists( - bc: Breathecode, client: APIClient, validation_res): + bc: Breathecode, client: APIClient, validation_res +): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, } - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = bc.database.create(user_invite=user_invite, cohort=1, academy=academy) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'cohort': 1, - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "cohort": 1, + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = put_serializer(model.user_invite, - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = put_serializer( + model.user_invite, + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - del data['cohort'] - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'academy_id': 1, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'token': token, - 'cohort_id': 1, - **data, - }) + del data["cohort"] + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "academy_id": 1, + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "token": token, + "cohort_id": 1, + **data, + } + ) ] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] - - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] - - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] + + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } + ] + + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) - ] - - user = bc.database.get('auth.User', 1, dict=False) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] @@ -1850,70 +1959,75 @@ def test_task__put__with_user_invite__cohort_found__academy_available_as_saas__u # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__cohort_found__academy_available_as_saas__user_exists( - bc: Breathecode, client: APIClient, validation_res): + bc: Breathecode, client: APIClient, validation_res +): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, } - academy = {'available_as_saas': True} - user = {'email': 'pokemon@potato.io'} + academy = {"available_as_saas": True} + user = {"email": "pokemon@potato.io"} model = bc.database.create(user_invite=user_invite, cohort=1, academy=academy, user=user) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'cohort': 1, - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "cohort": 1, + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = put_serializer(model.user_invite, - user=model.user, - data={ - 'id': 1, - 'access_token': access_token, - **data, - 'status': 'ACCEPTED', - }) + expected = put_serializer( + model.user_invite, + user=model.user, + data={ + "id": 1, + "access_token": access_token, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - del data['cohort'] - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'academy_id': 1, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'user_id': 1, - 'token': token, - 'author_id': 1, - 'cohort_id': 1, - **data, - }) + del data["cohort"] + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "academy_id": 1, + "process_status": "DONE", + "status": "ACCEPTED", + "user_id": 1, + "token": token, + "author_id": 1, + "cohort_id": 1, + **data, + } + ) ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('auth.User') == [bc.format.to_dict(model.user)] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("auth.User") == [bc.format.to_dict(model.user)] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [ - call(user=model.user, token_type='login'), + call(user=model.user, token_type="login"), ] @@ -1922,45 +2036,47 @@ def test_task__put__with_user_invite__cohort_found__academy_available_as_saas__u # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__syllabus_not_found(bc: Breathecode, client: APIClient): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, } bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'syllabus': 1, - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "syllabus": 1, + "token": token, } - response = client.put(url, data, format='json') + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 400} + expected = {"detail": "syllabus-not-found", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'status': 'WAITING_LIST', - 'token': token, - }), + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "status": "WAITING_LIST", + "token": token, + } + ), ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("auth.User") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -1971,96 +2087,105 @@ def test_task__put__with_user_invite__syllabus_not_found(bc: Breathecode, client # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__syllabus_found(bc: Breathecode, client: APIClient, validation_res): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } model = bc.database.create(user_invite=user_invite, cohort=1, syllabus_version=1) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'syllabus': 1, - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "syllabus": 1, + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = put_serializer(model.user_invite, - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = put_serializer( + model.user_invite, + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - del data['syllabus'] - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'academy_id': 1, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'syllabus_id': 1, - 'user_id': 1, - 'token': token, - **data, - }) + del data["syllabus"] + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "academy_id": 1, + "process_status": "DONE", + "status": "ACCEPTED", + "syllabus_id": 1, + "user_id": 1, + "token": token, + **data, + } + ) ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] - - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] - assert async_validate_email_invite.delay.call_args_list == [call(1)] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] - assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } ] + assert async_validate_email_invite.delay.call_args_list == [call(1)] - user = bc.database.get('auth.User', 1, dict=False) + assert notify_actions.send_email_message.call_args_list == [ + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] @@ -2069,96 +2194,106 @@ def test_task__put__with_user_invite__syllabus_found(bc: Breathecode, client: AP # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__syllabus_found__academy_available_as_saas__user_does_not_exists( - bc: Breathecode, client: APIClient, validation_res): + bc: Breathecode, client: APIClient, validation_res +): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = bc.database.create(user_invite=user_invite, cohort=1, syllabus_version=1, academy=academy) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'syllabus': 1, - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "syllabus": 1, + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = put_serializer(model.user_invite, - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = put_serializer( + model.user_invite, + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - del data['syllabus'] - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'academy_id': 1, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'syllabus_id': 1, - 'user_id': 1, - 'token': token, - **data, - }) + del data["syllabus"] + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "academy_id": 1, + "process_status": "DONE", + "status": "ACCEPTED", + "syllabus_id": 1, + "user_id": 1, + "token": token, + **data, + } + ) ] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] - - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] - - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] + + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } + ] + + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) - ] - - user = bc.database.get('auth.User', 1, dict=False) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] @@ -2167,76 +2302,78 @@ def test_task__put__with_user_invite__syllabus_found__academy_available_as_saas_ # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__with_user_invite__syllabus_found__academy_available_as_saas__user_exists( - bc: Breathecode, client: APIClient, validation_res): + bc: Breathecode, client: APIClient, validation_res +): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } - academy = {'available_as_saas': True} - user = {'email': 'pokemon@potato.io'} - model = bc.database.create(user_invite=user_invite, - cohort=1, - syllabus_version=1, - syllabus=1, - academy=academy, - user=user) - url = reverse_lazy('authenticate:subscribe') + academy = {"available_as_saas": True} + user = {"email": "pokemon@potato.io"} + model = bc.database.create( + user_invite=user_invite, cohort=1, syllabus_version=1, syllabus=1, academy=academy, user=user + ) + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'syllabus': 1, - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "syllabus": 1, + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = put_serializer(model.user_invite, - user=model.user, - data={ - 'id': 1, - 'access_token': access_token, - **data, - 'status': 'ACCEPTED', - }) + expected = put_serializer( + model.user_invite, + user=model.user, + data={ + "id": 1, + "access_token": access_token, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - del data['syllabus'] - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'academy_id': 1, - 'author_id': 1, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'syllabus_id': 1, - 'user_id': 1, - 'token': token, - **data, - }) + del data["syllabus"] + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "academy_id": 1, + "author_id": 1, + "process_status": "DONE", + "status": "ACCEPTED", + "syllabus_id": 1, + "user_id": 1, + "token": token, + **data, + } + ) ] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('auth.User') == [bc.format.to_dict(model.user)] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("auth.User") == [bc.format.to_dict(model.user)] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [ - call(user=model.user, token_type='login'), + call(user=model.user, token_type="login"), ] @@ -2246,54 +2383,56 @@ def test_task__put__with_user_invite__syllabus_found__academy_available_as_saas_ # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__plan_does_not_exist(bc: Breathecode, client: APIClient): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, } - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} bc.database.create(user_invite=user_invite, cohort=1, academy=academy) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", # 'cohort': 1, - 'token': token, - 'plan': 1, + "token": token, + "plan": 1, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] + del data["token"] json = response.json() - expected = {'detail': 'plan-not-found', 'status_code': 400} + expected = {"detail": "plan-not-found", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - 'status': 'WAITING_LIST', - 'academy_id': 1, - 'token': token, - 'cohort_id': 1, - }), - ] - - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [] - user_db = bc.database.list_of('auth.User') + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + "status": "WAITING_LIST", + "academy_id": 1, + "token": token, + "cohort_id": 1, + } + ), + ] + + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [] + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] assert user_db == [] @@ -2307,58 +2446,64 @@ def test_task__put__plan_does_not_exist(bc: Breathecode, client: APIClient): # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__plan_has_waiting_list(bc: Breathecode, client: APIClient): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, } - academy = {'available_as_saas': True} - plan = {'time_of_life': None, 'time_of_life_unit': None, 'has_waiting_list': True, 'invites': []} + academy = {"available_as_saas": True} + plan = {"time_of_life": None, "time_of_life_unit": None, "has_waiting_list": True, "invites": []} model = bc.database.create(user_invite=user_invite, academy=academy, plan=plan) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'token': token, - 'plan': 1, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "token": token, + "plan": 1, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] - del data['plan'] + del data["token"] + del data["plan"] json = response.json() - expected = put_serializer(model.user_invite, plans=[model.plan], data={ - 'id': 1, - 'user': None, - **data, - }) + expected = put_serializer( + model.user_invite, + plans=[model.plan], + data={ + "id": 1, + "user": None, + **data, + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('authenticate.UserInvite') == [ - user_invite_db_item({ - **data, - 'status': 'WAITING_LIST', - 'academy_id': 1, - 'token': token, - }), + assert bc.database.list_of("authenticate.UserInvite") == [ + user_invite_db_item( + { + **data, + "status": "WAITING_LIST", + "academy_id": 1, + "token": token, + } + ), ] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("auth.User") == [] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [plan_db_item(model.plan, data={})] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [plan_db_item(model.plan, data={})] bc.check.queryset_with_pks(model.plan.invites.all(), [1]) assert notify_actions.send_email_message.call_args_list == [] @@ -2371,157 +2516,164 @@ def test_task__put__plan_has_waiting_list(bc: Breathecode, client: APIClient): # """ -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test_task__put__plan_has_not_waiting_list(bc: Breathecode, client: APIClient, validation_res): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } - academy = {'available_as_saas': True} - plan = {'time_of_life': None, 'time_of_life_unit': None, 'has_waiting_list': False, 'invites': []} + academy = {"available_as_saas": True} + plan = {"time_of_life": None, "time_of_life_unit": None, "has_waiting_list": False, "invites": []} model = bc.database.create(user_invite=user_invite, cohort=1, academy=academy, plan=plan) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'token': token, - 'plan': 1, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "token": token, + "plan": 1, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] - del data['plan'] + del data["token"] + del data["plan"] json = response.json() - expected = put_serializer(model.user_invite, - plans=[model.plan], - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = put_serializer( + model.user_invite, + plans=[model.plan], + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ { - 'user_id': 1, - 'academy_id': 1, - 'author_id': None, - 'cohort_id': None, - 'id': 1, - 'is_email_validated': False, - 'conversion_info': None, - 'has_marketing_consent': False, - 'event_slug': None, - 'asset_slug': None, - 'role_id': None, - 'sent_at': None, - 'status': 'ACCEPTED', - 'process_message': '', - 'process_status': 'DONE', - 'token': token, - 'syllabus_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'email_quality': None, - 'email_status': None, + "user_id": 1, + "academy_id": 1, + "author_id": None, + "cohort_id": None, + "id": 1, + "is_email_validated": False, + "conversion_info": None, + "has_marketing_consent": False, + "event_slug": None, + "asset_slug": None, + "role_id": None, + "sent_at": None, + "status": "ACCEPTED", + "process_message": "", + "process_status": "DONE", + "token": token, + "syllabus_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "email_quality": None, + "email_status": None, **data, }, ] - user_db = bc.database.list_of('auth.User') + user_db = bc.database.list_of("auth.User") for item in user_db: - assert isinstance(item['date_joined'], datetime) - del item['date_joined'] + assert isinstance(item["date_joined"], datetime) + del item["date_joined"] - assert bc.database.list_of('marketing.Course') == [] - assert bc.database.list_of('payments.Plan') == [plan_db_item(model.plan, data={})] + assert bc.database.list_of("marketing.Course") == [] + assert bc.database.list_of("payments.Plan") == [plan_db_item(model.plan, data={})] bc.check.queryset_with_pks(model.plan.invites.all(), [1]) - assert user_db == [{ - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'id': 1, - 'is_active': True, - 'is_staff': False, - 'is_superuser': False, - 'last_login': None, - 'last_name': 'valdomero', - 'password': '', - 'username': 'pokemon@potato.io', - }] + assert user_db == [ + { + "email": "pokemon@potato.io", + "first_name": "lord", + "id": 1, + "is_active": True, + "is_staff": False, + "is_superuser": False, + "last_login": None, + "last_name": "valdomero", + "password": "", + "username": "pokemon@potato.io", + } + ] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) - ] - - user = bc.database.get('auth.User', 1, dict=False) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) + ] + + user = bc.database.get("auth.User", 1, dict=False) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] # When: Course is passed and does not exist # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__put__course_does_not_exists(bc: Breathecode, client: APIClient): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } model = bc.database.create(user_invite=user_invite) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([bc.fake.slug(), random.randint(1, 100)]), - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([bc.fake.slug(), random.randint(1, 100)]), + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['course'] + del data["course"] json = response.json() - expected = {'detail': 'course-not-found', 'status_code': 400} + expected = {"detail": "course-not-found", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), ] - assert bc.database.list_of('auth.User') == [] + assert bc.database.list_of("auth.User") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -2529,252 +2681,268 @@ def test__put__course_does_not_exists(bc: Breathecode, client: APIClient): # Given: 1 Course # When: Course is passed as slug and exists # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__put__course_without_syllabus(bc: Breathecode, client: APIClient, validation_res): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } model = bc.database.create(user_invite=user_invite, course=1) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] - del data['course'] + del data["token"] + del data["course"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - **data, - 'status': 'ACCEPTED', - }) + expected = post_serializer( + plans=[], + data={ + "id": 1, + "access_token": access_token, + "user": 1, + **data, + "status": "ACCEPTED", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ user_invite_db_item( data={ - 'token': token, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'academy_id': 1, - 'user_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": token, + "process_status": "DONE", + "status": "ACCEPTED", + "academy_id": 1, + "user_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - del data['phone'] - users = [x for x in bc.database.list_of('auth.User') if x.pop('date_joined')] + del data["phone"] + users = [x for x in bc.database.list_of("auth.User") if x.pop("date_joined")] users == [ - user_db_item(data={ - **data, - 'id': 1, - 'username': 'pokemon@potato.io', - }), + user_db_item( + data={ + **data, + "id": 1, + "username": "pokemon@potato.io", + } + ), ] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), [1]) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) ] - User = bc.database.get_model('auth.User') - user = User.objects.get(email=data['email']) + User = bc.database.get_model("auth.User") + user = User.objects.get(email=data["email"]) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] # Given: 1 Course # When: Course is passed as slug and exists # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__put__course_and_syllabus(bc: Breathecode, client: APIClient, validation_res): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } model = bc.database.create(user_invite=user_invite, course=1, syllabus=1) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), - 'syllabus': random.choice([model.syllabus.id, model.syllabus.slug]), - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), + "syllabus": random.choice([model.syllabus.id, model.syllabus.slug]), + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] - del data['course'] + del data["token"] + del data["course"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 1, - 'access_token': access_token, - 'user': 1, - 'status': 'ACCEPTED', - **data, - }) + expected = post_serializer( + plans=[], + data={ + "id": 1, + "access_token": access_token, + "user": 1, + "status": "ACCEPTED", + **data, + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - data['syllabus_id'] = data.pop('syllabus') - assert bc.database.list_of('authenticate.UserInvite') == [ + data["syllabus_id"] = data.pop("syllabus") + assert bc.database.list_of("authenticate.UserInvite") == [ user_invite_db_item( data={ - 'token': token, - 'process_status': 'DONE', - 'status': 'ACCEPTED', - 'academy_id': 1, - 'user_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": token, + "process_status": "DONE", + "status": "ACCEPTED", + "academy_id": 1, + "user_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - del data['phone'] - del data['syllabus_id'] - users = [x for x in bc.database.list_of('auth.User') if x.pop('date_joined')] + del data["phone"] + del data["syllabus_id"] + users = [x for x in bc.database.list_of("auth.User") if x.pop("date_joined")] users == [ - user_db_item(data={ - **data, - 'id': 1, - 'username': 'pokemon@potato.io', - }), + user_db_item( + data={ + **data, + "id": 1, + "username": "pokemon@potato.io", + } + ), ] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), [1]) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] assert async_validate_email_invite.delay.call_args_list == [call(1)] assert notify_actions.send_email_message.call_args_list == [ - call('verify_email', - 'pokemon@potato.io', { - 'LANG': 'en', - 'SUBJECT': '4Geeks - Validate account', - 'LINK': f'/v1/auth/password/{token}', - }, - academy=model.academy) + call( + "verify_email", + "pokemon@potato.io", + { + "LANG": "en", + "SUBJECT": "4Geeks - Validate account", + "LINK": f"/v1/auth/password/{token}", + }, + academy=model.academy, + ) ] - User = bc.database.get_model('auth.User') - user = User.objects.get(email=data['email']) + User = bc.database.get_model("auth.User") + user = User.objects.get(email=data["email"]) assert Token.get_or_create.call_args_list == [ - call(user=user, token_type='login'), + call(user=user, token_type="login"), ] # Given: 1 Course and 1 Syllabus # When: Course is passed as slug and exists, course is not associated to syllabus # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__put__course_and_syllabus__syllabus_not_associated_to_course(bc: Breathecode, client: APIClient): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } - course = {'syllabus': []} - syllabus = {'slug': bc.fake.slug()} + course = {"syllabus": []} + syllabus = {"slug": bc.fake.slug()} model = bc.database.create(user_invite=user_invite, course=course, syllabus=syllabus) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), - 'syllabus': random.choice([model.syllabus.id, model.syllabus.slug]), - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), + "syllabus": random.choice([model.syllabus.id, model.syllabus.slug]), + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] - del data['course'] + del data["token"] + del data["course"] json = response.json() - expected = {'detail': 'syllabus-not-belong-to-course', 'status_code': 400} + expected = {"detail": "syllabus-not-belong-to-course", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - del data['syllabus'] + del data["syllabus"] - assert bc.database.list_of('authenticate.UserInvite') == [ + assert bc.database.list_of("authenticate.UserInvite") == [ bc.format.to_dict(model.user_invite), ] - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), []) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] @@ -2783,76 +2951,79 @@ def test__put__course_and_syllabus__syllabus_not_associated_to_course(bc: Breath # Given: 1 Course, 1 UserInvite and 1 Syllabus # When: Course is passed as slug and exists, course with waiting list # Then: It should return 400 -@patch('django.utils.timezone.now', MagicMock(return_value=now)) -@patch('breathecode.notify.actions.send_email_message', MagicMock(return_value=None)) -@patch('breathecode.authenticate.models.Token.get_or_create', MagicMock(wraps=Token.get_or_create)) +@patch("django.utils.timezone.now", MagicMock(return_value=now)) +@patch("breathecode.notify.actions.send_email_message", MagicMock(return_value=None)) +@patch("breathecode.authenticate.models.Token.get_or_create", MagicMock(wraps=Token.get_or_create)) def test__put__course_and_syllabus__waiting_list(bc: Breathecode, client: APIClient): token = bc.random.string(lower=True, upper=True, number=True, size=40) user_invite = { - 'email': 'pokemon@potato.io', - 'status': 'WAITING_LIST', - 'token': token, - 'cohort_id': None, + "email": "pokemon@potato.io", + "status": "WAITING_LIST", + "token": token, + "cohort_id": None, } - course = {'has_waiting_list': True, 'invites': []} + course = {"has_waiting_list": True, "invites": []} model = bc.database.create(user_invite=user_invite, course=course, syllabus=1) - url = reverse_lazy('authenticate:subscribe') + url = reverse_lazy("authenticate:subscribe") data = { - 'email': 'pokemon@potato.io', - 'first_name': 'lord', - 'last_name': 'valdomero', - 'phone': '+123123123', - 'course': random.choice([model.course.id, model.course.slug]), - 'syllabus': random.choice([model.syllabus.id, model.syllabus.slug]), - 'token': token, + "email": "pokemon@potato.io", + "first_name": "lord", + "last_name": "valdomero", + "phone": "+123123123", + "course": random.choice([model.course.id, model.course.slug]), + "syllabus": random.choice([model.syllabus.id, model.syllabus.slug]), + "token": token, } access_token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('binascii.hexlify', MagicMock(return_value=bytes(access_token, 'utf-8'))): - response = client.put(url, data, format='json') + with patch("binascii.hexlify", MagicMock(return_value=bytes(access_token, "utf-8"))): + response = client.put(url, data, format="json") - del data['token'] - del data['course'] + del data["token"] + del data["course"] json = response.json() - expected = post_serializer(plans=[], - data={ - 'id': 1, - 'access_token': None, - 'user': None, - **data, - 'status': 'WAITING_LIST', - }) + expected = post_serializer( + plans=[], + data={ + "id": 1, + "access_token": None, + "user": None, + **data, + "status": "WAITING_LIST", + }, + ) assert json == expected assert response.status_code == status.HTTP_200_OK - data['syllabus_id'] = data.pop('syllabus') - assert bc.database.list_of('authenticate.UserInvite') == [ + data["syllabus_id"] = data.pop("syllabus") + assert bc.database.list_of("authenticate.UserInvite") == [ user_invite_db_item( data={ - 'token': token, - 'process_status': 'PENDING', - 'status': 'WAITING_LIST', - 'academy_id': 1, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, + "token": token, + "process_status": "PENDING", + "status": "WAITING_LIST", + "academy_id": 1, + "city": None, + "country": None, + "latitude": None, + "longitude": None, **data, - }), + } + ), ] - del data['phone'] - del data['syllabus_id'] + del data["phone"] + del data["syllabus_id"] - assert bc.database.list_of('auth.User') == [] - assert bc.database.list_of('marketing.Course') == [ + assert bc.database.list_of("auth.User") == [] + assert bc.database.list_of("marketing.Course") == [ bc.format.to_dict(model.course), ] bc.check.queryset_with_pks(model.course.invites.all(), [1]) - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] assert notify_actions.send_email_message.call_args_list == [] assert Token.get_or_create.call_args_list == [] diff --git a/breathecode/authenticate/tests/urls/tests_token_me.py b/breathecode/authenticate/tests/urls/tests_token_me.py index f71e54846..33a494b73 100644 --- a/breathecode/authenticate/tests/urls/tests_token_me.py +++ b/breathecode/authenticate/tests/urls/tests_token_me.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + from datetime import timedelta from unittest.mock import MagicMock, patch @@ -21,90 +22,95 @@ class AuthenticateTestSuite(AuthTestCase): def test__auth__without_auth(self): """Test /logout without auth""" - url = reverse_lazy('authenticate:token_me') + url = reverse_lazy("authenticate:token_me") self.bc.database.create(user=1) response = self.client.post(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('authenticate.Token'), []) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('rest_framework.authtoken.models.Token.generate_key', - MagicMock(side_effect=[ - TOKEN + '1', - TOKEN + '2', - TOKEN + '3', - ])) + self.assertEqual(self.bc.database.list_of("authenticate.Token"), []) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch( + "rest_framework.authtoken.models.Token.generate_key", + MagicMock( + side_effect=[ + TOKEN + "1", + TOKEN + "2", + TOKEN + "3", + ] + ), + ) def test__generate_tokens(self): """Test /token""" cases = [ - (None, 'temporal', UTC_NOW + timedelta(minutes=10), 1), - ({ - 'token_type': 'temporal' - }, 'temporal', UTC_NOW + timedelta(minutes=10), 2), - ({ - 'token_type': 'one_time' - }, 'one_time', None, 3), + (None, "temporal", UTC_NOW + timedelta(minutes=10), 1), + ({"token_type": "temporal"}, "temporal", UTC_NOW + timedelta(minutes=10), 2), + ({"token_type": "one_time"}, "one_time", None, 3), ] for data, token_type, expires_at, index in cases: - url = reverse_lazy('authenticate:token_me') + url = reverse_lazy("authenticate:token_me") model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) response = self.client.post(url, data) json = response.json() expected = { - 'email': model.user.email, - 'expires_at': self.bc.datetime.to_iso_string(expires_at) if expires_at else None, - 'token': TOKEN + str(index), - 'token_type': token_type, - 'user_id': index, + "email": model.user.email, + "expires_at": self.bc.datetime.to_iso_string(expires_at) if expires_at else None, + "token": TOKEN + str(index), + "token_type": token_type, + "user_id": index, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.Token'), [{ - 'created': UTC_NOW, - 'expires_at': expires_at, - 'id': index, - 'key': TOKEN + str(index), - 'token_type': token_type, - 'user_id': index, - }]) + self.assertEqual( + self.bc.database.list_of("authenticate.Token"), + [ + { + "created": UTC_NOW, + "expires_at": expires_at, + "id": index, + "key": TOKEN + str(index), + "token_type": token_type, + "user_id": index, + } + ], + ) # teardown - self.bc.database.delete('authenticate.Token') - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('rest_framework.authtoken.models.Token.generate_key', - MagicMock(side_effect=[ - TOKEN + '1', - TOKEN + '2', - TOKEN + '3', - ])) + self.bc.database.delete("authenticate.Token") + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch( + "rest_framework.authtoken.models.Token.generate_key", + MagicMock( + side_effect=[ + TOKEN + "1", + TOKEN + "2", + TOKEN + "3", + ] + ), + ) def test__generate_tokens__bad_token_type(self): """Test /token""" cases = [ - { - 'token_type': 'login' - }, - { - 'token_type': 'permanent' - }, + {"token_type": "login"}, + {"token_type": "permanent"}, ] for data in cases: - url = reverse_lazy('authenticate:token_me') + url = reverse_lazy("authenticate:token_me") model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) response = self.client.post(url, data) json = response.json() - expected = {'detail': 'token-type-invalid-or-not-allowed', 'status_code': 400} + expected = {"detail": "token-type-invalid-or-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.Token'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Token"), []) diff --git a/breathecode/authenticate/tests/urls/tests_user.py b/breathecode/authenticate/tests/urls/tests_user.py index 7c271ae6c..1a149bfbf 100644 --- a/breathecode/authenticate/tests/urls/tests_user.py +++ b/breathecode/authenticate/tests/urls/tests_user.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + from django.urls.base import reverse_lazy from rest_framework import status from ..mixins import AuthTestCase @@ -11,36 +12,37 @@ class AuthenticateTestSuite(AuthTestCase): def test_user_without_auth(self): """Test /user without auth""" - url = reverse_lazy('authenticate:user') - data = {'email': self.email, 'password': self.password} + url = reverse_lazy("authenticate:user") + data = {"email": self.email, "password": self.password} # return client.post(url, data) response = self.client.post(url, data) - detail = str(response.data['detail']) - status_code = int(response.data['status_code']) + detail = str(response.data["detail"]) + status_code = int(response.data["status_code"]) self.assertEqual(len(response.data), 2) - self.assertEqual(detail, 'Authentication credentials were not provided.') + self.assertEqual(detail, "Authentication credentials were not provided.") self.assertEqual(status_code, 401) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_user(self): """Test /user""" # self.login() - url = reverse_lazy('authenticate:user') + url = reverse_lazy("authenticate:user") self.client.force_authenticate(user=self.user) response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'id': self.user.id, - 'email': self.user.email, - 'first_name': self.user.first_name, - 'last_name': self.user.last_name, - 'github': { - 'avatar_url': None, - 'name': None, - 'username': None - }, - 'profile': None, - }]) + self.assertEqual( + json, + [ + { + "id": self.user.id, + "email": self.user.email, + "first_name": self.user.first_name, + "last_name": self.user.last_name, + "github": {"avatar_url": None, "name": None, "username": None}, + "profile": None, + } + ], + ) diff --git a/breathecode/authenticate/tests/urls/tests_user_me.py b/breathecode/authenticate/tests/urls/tests_user_me.py index 07a5e09ea..047d28452 100644 --- a/breathecode/authenticate/tests/urls/tests_user_me.py +++ b/breathecode/authenticate/tests/urls/tests_user_me.py @@ -1,6 +1,7 @@ """ Test cases for /user """ + import datetime import pytz @@ -12,63 +13,61 @@ def get_permission_serializer(permission): return { - 'codename': permission.codename, - 'name': permission.name, + "codename": permission.codename, + "name": permission.name, } def user_setting_serializer(user_setting): return { - 'lang': user_setting.lang, - 'main_currency': user_setting.main_currency, + "lang": user_setting.lang, + "main_currency": user_setting.main_currency, } -def get_serializer(self, - user, - credentials_github=None, - profile_academies=[], - profile=None, - permissions=[], - user_setting=None, - data={}): +def get_serializer( + self, user, credentials_github=None, profile_academies=[], profile=None, permissions=[], user_setting=None, data={} +): return { - 'id': - user.id, - 'email': - user.email, - 'username': - user.username, - 'first_name': - user.first_name, - 'last_name': - user.last_name, - 'date_joined': - self.bc.datetime.to_iso_string(user.date_joined), - 'username': - user.username, - 'settings': - user_setting_serializer(user_setting) if user_setting else None, - 'permissions': [get_permission_serializer(x) for x in permissions], - 'github': { - 'avatar_url': credentials_github.avatar_url, - 'name': credentials_github.name, - 'username': credentials_github.username, - } if credentials_github else None, - 'profile': { - 'avatar_url': profile.avatar_url, - } if profile else None, - 'roles': [{ - 'academy': { - 'id': profile_academy.academy.id, - 'name': profile_academy.academy.name, - 'slug': profile_academy.academy.slug, - 'timezone': profile_academy.academy.timezone, - }, - 'created_at': self.bc.datetime.to_iso_string(profile_academy.created_at), - 'id': profile_academy.id, - 'role': profile_academy.role.slug, - } for profile_academy in profile_academies], + "id": user.id, + "email": user.email, + "username": user.username, + "first_name": user.first_name, + "last_name": user.last_name, + "date_joined": self.bc.datetime.to_iso_string(user.date_joined), + "username": user.username, + "settings": user_setting_serializer(user_setting) if user_setting else None, + "permissions": [get_permission_serializer(x) for x in permissions], + "github": ( + { + "avatar_url": credentials_github.avatar_url, + "name": credentials_github.name, + "username": credentials_github.username, + } + if credentials_github + else None + ), + "profile": ( + { + "avatar_url": profile.avatar_url, + } + if profile + else None + ), + "roles": [ + { + "academy": { + "id": profile_academy.academy.id, + "name": profile_academy.academy.name, + "slug": profile_academy.academy.slug, + "timezone": profile_academy.academy.timezone, + }, + "created_at": self.bc.datetime.to_iso_string(profile_academy.created_at), + "id": profile_academy.id, + "role": profile_academy.role.slug, + } + for profile_academy in profile_academies + ], **data, } @@ -78,8 +77,8 @@ class AuthenticateTestSuite(AuthTestCase): def setUp(self): super().setUp() - Permission = self.bc.database.get_model('auth.Permission') - permission = Permission.objects.filter().order_by('-id').first() + Permission = self.bc.database.get_model("auth.Permission") + permission = Permission.objects.filter().order_by("-id").first() self.latest_permission_id = permission.id """ @@ -88,13 +87,13 @@ def setUp(self): def test_user_me__without_auth(self): """Test /user/me without auth""" - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -108,14 +107,20 @@ def test_user_me(self): """Test /user/me""" model = self.generate_models(authenticate=True) - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() - expected = get_serializer(self, model.user, data={'settings': { - 'lang': 'en', - 'main_currency': None, - }}) + expected = get_serializer( + self, + model.user, + data={ + "settings": { + "lang": "en", + "main_currency": None, + } + }, + ) self.assertEqual(json, expected) @@ -127,17 +132,21 @@ def test_user_me__with_github_credentials(self): """Test /user/me""" model = self.generate_models(authenticate=True, credentials_github=True) - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.user, - credentials_github=model.credentials_github, - data={'settings': { - 'lang': 'en', - 'main_currency': None, - }}) + expected = get_serializer( + self, + model.user, + credentials_github=model.credentials_github, + data={ + "settings": { + "lang": "en", + "main_currency": None, + } + }, + ) self.assertEqual(json, expected) @@ -149,17 +158,21 @@ def test_user_me__with_profile_academy(self): """Test /user/me""" model = self.generate_models(authenticate=True, profile_academy=True) - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.user, - profile_academies=[model.profile_academy], - data={'settings': { - 'lang': 'en', - 'main_currency': None, - }}) + expected = get_serializer( + self, + model.user, + profile_academies=[model.profile_academy], + data={ + "settings": { + "lang": "en", + "main_currency": None, + } + }, + ) self.assertEqual(json, expected) @@ -171,17 +184,21 @@ def test_user_me__with_profile(self): """Test /user/me""" model = self.generate_models(authenticate=True, profile=True) - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.user, - profile=model.profile, - data={'settings': { - 'lang': 'en', - 'main_currency': None, - }}) + expected = get_serializer( + self, + model.user, + profile=model.profile, + data={ + "settings": { + "lang": "en", + "main_currency": None, + } + }, + ) self.assertEqual(json, expected) @@ -193,18 +210,22 @@ def test_user_me__with_profile__with_permission(self): """Test /user/me""" model = self.generate_models(authenticate=True, profile=True, permission=1) - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.user, - profile=model.profile, - permissions=[], - data={'settings': { - 'lang': 'en', - 'main_currency': None, - }}) + expected = get_serializer( + self, + model.user, + profile=model.profile, + permissions=[], + data={ + "settings": { + "lang": "en", + "main_currency": None, + } + }, + ) self.assertEqual(json, expected) @@ -216,18 +237,22 @@ def test_user_me__with_profile__one_group_with_one_permission(self): """Test /user/me""" model = self.generate_models(authenticate=True, profile=True, permission=1, group=1) - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.user, - profile=model.profile, - permissions=[model.permission], - data={'settings': { - 'lang': 'en', - 'main_currency': None, - }}) + expected = get_serializer( + self, + model.user, + profile=model.profile, + permissions=[model.permission], + data={ + "settings": { + "lang": "en", + "main_currency": None, + } + }, + ) self.assertEqual(json, expected) @@ -239,18 +264,22 @@ def test_user_me__with_profile__three_groups_with_one_permission(self): """Test /user/me""" model = self.generate_models(authenticate=True, profile=True, permission=1, group=3) - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.user, - profile=model.profile, - permissions=[model.permission], - data={'settings': { - 'lang': 'en', - 'main_currency': None, - }}) + expected = get_serializer( + self, + model.user, + profile=model.profile, + permissions=[model.permission], + data={ + "settings": { + "lang": "en", + "main_currency": None, + } + }, + ) self.assertEqual(json, expected) @@ -263,30 +292,34 @@ def test_user_me__with_profile__two_groups_with_four_permissions(self): groups = [ { - 'permissions': [self.latest_permission_id + 1, self.latest_permission_id + 2], + "permissions": [self.latest_permission_id + 1, self.latest_permission_id + 2], }, { - 'permissions': [self.latest_permission_id + 3, self.latest_permission_id + 4], + "permissions": [self.latest_permission_id + 3, self.latest_permission_id + 4], }, ] model = self.generate_models(authenticate=True, profile=True, permission=4, group=groups) - url = reverse_lazy('authenticate:user_me') + url = reverse_lazy("authenticate:user_me") response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.user, - profile=model.profile, - permissions=[ - model.permission[3], - model.permission[2], - model.permission[1], - model.permission[0], - ], - data={'settings': { - 'lang': 'en', - 'main_currency': None, - }}) + expected = get_serializer( + self, + model.user, + profile=model.profile, + permissions=[ + model.permission[3], + model.permission[2], + model.permission[1], + model.permission[0], + ], + data={ + "settings": { + "lang": "en", + "main_currency": None, + } + }, + ) self.assertEqual(json, expected) diff --git a/breathecode/authenticate/tests/urls/tests_user_me_invite.py b/breathecode/authenticate/tests/urls/tests_user_me_invite.py index 651d83800..c2de2568b 100644 --- a/breathecode/authenticate/tests/urls/tests_user_me_invite.py +++ b/breathecode/authenticate/tests/urls/tests_user_me_invite.py @@ -1,6 +1,7 @@ """ Set of tests for MeInviteView, this include duck tests """ + from random import choice from unittest.mock import MagicMock, patch @@ -12,24 +13,24 @@ def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) def generate_user_invite(self, model, user_invite, arguments={}): return { - 'academy': None, - 'cohort': None, - 'created_at': self.bc.datetime.to_iso_string(user_invite.created_at), - 'email': user_invite.email, - 'first_name': user_invite.first_name, - 'id': user_invite.id, - 'invite_url': f'http://localhost:8000/v1/auth/member/invite/{user_invite.token}', - 'last_name': user_invite.last_name, - 'role': user_invite.role, - 'sent_at': user_invite.sent_at, - 'status': user_invite.status, - 'token': user_invite.token, + "academy": None, + "cohort": None, + "created_at": self.bc.datetime.to_iso_string(user_invite.created_at), + "email": user_invite.email, + "first_name": user_invite.first_name, + "id": user_invite.id, + "invite_url": f"http://localhost:8000/v1/auth/member/invite/{user_invite.token}", + "last_name": user_invite.last_name, + "role": user_invite.role, + "sent_at": user_invite.sent_at, + "status": user_invite.status, + "token": user_invite.token, **arguments, } @@ -40,18 +41,18 @@ class MemberSetOfDuckTestSuite(AuthTestCase): 🔽🔽🔽 GET check the param is being passed """ - @patch('breathecode.authenticate.views.MeInviteView.get', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MeInviteView.get", MagicMock(side_effect=view_method_mock)) def test_duck_test__get__with_auth___mock_view(self): model = self.bc.database.create(user=3) for n in range(0, 3): self.bc.request.authenticate(model.user[n]) - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.get(url) json = response.json() - expected = {'args': [], 'kwargs': {}} + expected = {"args": [], "kwargs": {}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -60,18 +61,18 @@ def test_duck_test__get__with_auth___mock_view(self): 🔽🔽🔽 PUT check the param is being passed """ - @patch('breathecode.authenticate.views.MeInviteView.put', MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.views.MeInviteView.put", MagicMock(side_effect=view_method_mock)) def test_duck_test__put__with_auth___mock_view(self): model = self.bc.database.create(user=3) for n in range(0, 3): self.bc.request.authenticate(model.user[n]) - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.put(url) json = response.json() - expected = {'args': [], 'kwargs': {}} + expected = {"args": [], "kwargs": {}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -81,20 +82,20 @@ class AuthenticateTestSuite(AuthTestCase): def test_user_me_invite__without_auth(self): """Test /academy/user/invite without auth""" - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_user_me_invite__wrong_academy(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -108,7 +109,7 @@ def test_user_me_invite__get__without_user_invites(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.get(url) json = response.json() @@ -116,20 +117,20 @@ def test_user_me_invite__get__without_user_invites(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 GET with 1 UserInvite, email not match between User and UserInvite """ def test_user_me_invite__get__with_one_user_invite__email_not_match(self): - user_invite = {'email': 'eeeeeeee@eeeeeeee.eeeeeeee'} + user_invite = {"email": "eeeeeeee@eeeeeeee.eeeeeeee"} model = self.bc.database.create(user=1, user_invite=user_invite) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.get(url) json = response.json() @@ -137,22 +138,23 @@ def test_user_me_invite__get__with_one_user_invite__email_not_match(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [self.bc.format.to_dict(model.user_invite)]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), [self.bc.format.to_dict(model.user_invite)] + ) """ 🔽🔽🔽 GET with 1 UserInvite with status INVITED, email match between User and UserInvite """ def test_user_me_invite__get__with_one_user_invite__email_match__status_pending(self): - user = {'email': 'eeeeeeee@eeeeeeee.eeeeeeee'} - user_invite = {'email': 'eeeeeeee@eeeeeeee.eeeeeeee', 'status': 'PENDING'} + user = {"email": "eeeeeeee@eeeeeeee.eeeeeeee"} + user_invite = {"email": "eeeeeeee@eeeeeeee.eeeeeeee", "status": "PENDING"} model = self.bc.database.create(user=user, user_invite=user_invite) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.get(url) json = response.json() @@ -160,23 +162,24 @@ def test_user_me_invite__get__with_one_user_invite__email_match__status_pending( self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), - [self.bc.format.to_dict(model.user_invite)]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), [self.bc.format.to_dict(model.user_invite)] + ) """ 🔽🔽🔽 GET with 3 UserInvite with bad statuses, email match between User and UserInvite """ def test_user_me_invite__get__with_three_user_invite__email_match__bad_statuses(self): - bad_statuses = ['ACCEPTED', 'REJECTED', 'WAITING_LIST'] - user = {'email': 'eeeeeeee@eeeeeeee.eeeeeeee'} - user_invites = [{'email': 'eeeeeeee@eeeeeeee.eeeeeeee', 'status': x} for x in bad_statuses] + bad_statuses = ["ACCEPTED", "REJECTED", "WAITING_LIST"] + user = {"email": "eeeeeeee@eeeeeeee.eeeeeeee"} + user_invites = [{"email": "eeeeeeee@eeeeeeee.eeeeeeee", "status": x} for x in bad_statuses] model = self.bc.database.create(user=user, user_invite=user_invites) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.get(url) json = response.json() @@ -184,22 +187,22 @@ def test_user_me_invite__get__with_three_user_invite__email_match__bad_statuses( self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), self.bc.format.to_dict(model.user_invite)) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite)) """ 🔽🔽🔽 GET with 4 UserInvite with different statuses, email match between User and UserInvite """ def test_user_me_invite__get__with_four_user_invite__email_match__different_statuses(self): - statuses = ['PENDING', 'ACCEPTED', 'REJECTED', 'WAITING_LIST'] - user = {'email': 'eeeeeeee@eeeeeeee.eeeeeeee'} - user_invites = [{'email': 'eeeeeeee@eeeeeeee.eeeeeeee', 'status': x} for x in statuses] + statuses = ["PENDING", "ACCEPTED", "REJECTED", "WAITING_LIST"] + user = {"email": "eeeeeeee@eeeeeeee.eeeeeeee"} + user_invites = [{"email": "eeeeeeee@eeeeeeee.eeeeeeee", "status": x} for x in statuses] model = self.bc.database.create(user=user, user_invite=user_invites) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('authenticate:user_me_invite') + f'?status={",".join(statuses)}' + url = reverse_lazy("authenticate:user_me_invite") + f'?status={",".join(statuses)}' response = self.client.get(url) json = response.json() @@ -207,7 +210,7 @@ def test_user_me_invite__get__with_four_user_invite__email_match__different_stat self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), self.bc.format.to_dict(model.user_invite)) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), self.bc.format.to_dict(model.user_invite)) """ 🔽🔽🔽 PUT without new status in the url @@ -218,22 +221,24 @@ def test_user_me_invite__put__without_passing_ids(self): self.bc.request.set_headers(academy=1) invite_kwargs = { - 'email': choice(['a@a.com', 'b@b.com', 'c@c.com']), + "email": choice(["a@a.com", "b@b.com", "c@c.com"]), } - slug = 'missing-status' + slug = "missing-status" - model = self.generate_models(academy=True, - capability='crud_invite', - authenticate=True, - role='potato', - invite_kwargs=invite_kwargs, - profile_academy=True) + model = self.generate_models( + academy=True, + capability="crud_invite", + authenticate=True, + role="potato", + invite_kwargs=invite_kwargs, + profile_academy=True, + ) - url = reverse_lazy('authenticate:user_me_invite') + url = reverse_lazy("authenticate:user_me_invite") response = self.client.put(url) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()['detail'], slug) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(response.json()["detail"], slug) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) diff --git a/breathecode/authenticate/tests/urls/tests_user_me_invite_status.py b/breathecode/authenticate/tests/urls/tests_user_me_invite_status.py index de96355d0..c5571708d 100644 --- a/breathecode/authenticate/tests/urls/tests_user_me_invite_status.py +++ b/breathecode/authenticate/tests/urls/tests_user_me_invite_status.py @@ -1,6 +1,7 @@ """ Set of tests for MeInviteView, this include duck tests """ + from random import choice from unittest.mock import MagicMock, PropertyMock, patch @@ -12,7 +13,7 @@ def view_method_mock(request, *args, **kwargs): - response = {'args': args, 'kwargs': kwargs} + response = {"args": args, "kwargs": kwargs} return Response(response, status=200) @@ -22,19 +23,19 @@ class MemberSetOfDuckTestSuite(AuthTestCase): 🔽🔽🔽 GET check the param is being passed """ - @patch('breathecode.authenticate.views.MeInviteView.get', MagicMock(side_effect=view_method_mock)) - @patch('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + @patch("breathecode.authenticate.views.MeInviteView.get", MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) def test_duck_test__get__with_auth___mock_view(self): model = self.bc.database.create(user=3) for n in range(0, 3): self.bc.request.authenticate(model.user[n]) - url = reverse_lazy('authenticate:user_me_invite_status', kwargs={'new_status': 'accepted'}) + url = reverse_lazy("authenticate:user_me_invite_status", kwargs={"new_status": "accepted"}) response = self.client.get(url) json = response.json() - expected = {'args': [], 'kwargs': {'new_status': 'accepted'}} + expected = {"args": [], "kwargs": {"new_status": "accepted"}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -43,19 +44,19 @@ def test_duck_test__get__with_auth___mock_view(self): 🔽🔽🔽 PUT check the param is being passed """ - @patch('breathecode.authenticate.views.MeInviteView.put', MagicMock(side_effect=view_method_mock)) - @patch('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + @patch("breathecode.authenticate.views.MeInviteView.put", MagicMock(side_effect=view_method_mock)) + @patch("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) def test_duck_test__put__with_auth___mock_view(self): model = self.bc.database.create(user=3) for n in range(0, 3): self.bc.request.authenticate(model.user[n]) - url = reverse_lazy('authenticate:user_me_invite_status', kwargs={'new_status': 'accepted'}) + url = reverse_lazy("authenticate:user_me_invite_status", kwargs={"new_status": "accepted"}) response = self.client.put(url) json = response.json() - expected = {'args': [], 'kwargs': {'new_status': 'accepted'}} + expected = {"args": [], "kwargs": {"new_status": "accepted"}} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -66,24 +67,24 @@ class AuthenticateTestSuite(AuthTestCase): 🔽🔽🔽 Auth """ - @patch('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + @patch("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) def test_user_me_invite_status__without_auth(self): """Test /academy/user/invite without auth""" - url = reverse_lazy('authenticate:user_me_invite_status', kwargs={'new_status': 'pending'}) + url = reverse_lazy("authenticate:user_me_invite_status", kwargs={"new_status": "pending"}) response = self.client.put(url) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + @patch("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) def test_user_me_invite_status__wrong_academy(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:user_me_invite_status', kwargs={'new_status': 'pending'}) + url = reverse_lazy("authenticate:user_me_invite_status", kwargs={"new_status": "pending"}) response = self.client.put(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -91,189 +92,186 @@ def test_user_me_invite_status__wrong_academy(self): 🔽🔽🔽 PUT passing status is not allowed or invalid through of the url """ - @patch('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + @patch("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) def test_user_me_invite_status__passing_invalid_status(self): """Test academy/user/me/invite""" self.bc.request.set_headers(academy=1) - statuses_upper = ['WAITING_LIST', 'PENDING'] + statuses_upper = ["WAITING_LIST", "PENDING"] statuses_lower = [x.lower() for x in statuses_upper] statuses = statuses_upper + statuses_lower - model = self.generate_models(academy=True, - capability='crud_invite', - authenticate=True, - role='potato', - profile_academy=True) + model = self.generate_models( + academy=True, capability="crud_invite", authenticate=True, role="potato", profile_academy=True + ) for x in statuses: - url = reverse_lazy('authenticate:user_me_invite_status', kwargs={'new_status': x}) + url = reverse_lazy("authenticate:user_me_invite_status", kwargs={"new_status": x}) response = self.client.put(url) json = response.json() - expected = {'detail': 'invalid-status', 'status_code': 400} + expected = {"detail": "invalid-status", "status_code": 400} self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 PUT passing valid statuses through of the url """ - @patch('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + @patch("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) def test_user_me_invite_status__passing_valid_status__without_bulk_mode(self): """Test academy/user/me/invite""" self.bc.request.set_headers(academy=1) - statuses_upper = ['ACCEPTED', 'REJECTED'] + statuses_upper = ["ACCEPTED", "REJECTED"] statuses_lower = [x.lower() for x in statuses_upper] statuses = statuses_upper + statuses_lower - model = self.generate_models(academy=True, - capability='crud_invite', - authenticate=True, - role='potato', - profile_academy=True) + model = self.generate_models( + academy=True, capability="crud_invite", authenticate=True, role="potato", profile_academy=True + ) for x in statuses: - url = reverse_lazy('authenticate:user_me_invite_status', kwargs={'new_status': x}) + url = reverse_lazy("authenticate:user_me_invite_status", kwargs={"new_status": x}) response = self.client.put(url) json = response.json() - expected = {'detail': 'missing-ids', 'status_code': 400} + expected = {"detail": "missing-ids", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserInvite"), []) """ 🔽🔽🔽 PUT bulk mode """ - @patch('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + @patch("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) def test_user_me_invite_status__to_accepted_in_bulk_with_ids(self): """Test academy/user/me/invite""" self.bc.request.set_headers(academy=1) - base = self.generate_models(academy=True, - capability='crud_invite', - authenticate=True, - role='potato', - skip_cohort=True, - user_kwargs={'email': 'a@a.com'}) + base = self.generate_models( + academy=True, + capability="crud_invite", + authenticate=True, + role="potato", + skip_cohort=True, + user_kwargs={"email": "a@a.com"}, + ) invite_kwargs = { - 'status': 'PENDING', - 'email': 'a@a.com', - 'id': 1, + "status": "PENDING", + "email": "a@a.com", + "id": 1, } - model1 = self.generate_models(authenticate=True, - profile_academy=True, - user_invite=True, - user_invite_kwargs=invite_kwargs, - models=base) - invite_kwargs['id'] = 2 + model1 = self.generate_models( + authenticate=True, profile_academy=True, user_invite=True, user_invite_kwargs=invite_kwargs, models=base + ) + invite_kwargs["id"] = 2 - model2 = self.generate_models(authenticate=True, - profile_academy=True, - user_invite=True, - user_invite_kwargs=invite_kwargs, - models=base) + model2 = self.generate_models( + authenticate=True, profile_academy=True, user_invite=True, user_invite_kwargs=invite_kwargs, models=base + ) - url = reverse_lazy('authenticate:user_me_invite_status', kwargs={'new_status': 'accepted'}) + '?id=1,2' + url = reverse_lazy("authenticate:user_me_invite_status", kwargs={"new_status": "accepted"}) + "?id=1,2" response = self.client.put(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json(), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserInvite'), [{ - 'user_id': 1, - 'academy_id': 1, - 'author_id': 1, - 'cohort_id': 1, - 'email': 'a@a.com', - 'first_name': None, - 'conversion_info': None, - 'has_marketing_consent': False, - 'event_slug': None, - 'asset_slug': None, - 'id': 1, - 'is_email_validated': False, - 'last_name': None, - 'phone': '', - 'role_id': 'potato', - 'sent_at': None, - 'status': 'ACCEPTED', - 'token': model1['user_invite'].token, - 'process_message': '', - 'process_status': 'PENDING', - 'syllabus_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'email_quality': None, - 'email_status': None, - }, { - 'user_id': 1, - 'academy_id': 1, - 'author_id': 1, - 'cohort_id': 2, - 'email': 'a@a.com', - 'first_name': None, - 'conversion_info': None, - 'has_marketing_consent': False, - 'event_slug': None, - 'asset_slug': None, - 'id': 2, - 'is_email_validated': False, - 'last_name': None, - 'phone': '', - 'role_id': 'potato', - 'sent_at': None, - 'status': 'ACCEPTED', - 'token': model2['user_invite'].token, - 'process_message': '', - 'process_status': 'PENDING', - 'syllabus_id': None, - 'city': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'email_quality': None, - 'email_status': None, - }]) - - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model1.user), - ]) - - @patch('breathecode.authenticate.signals.invite_status_updated.send_robust', MagicMock()) + self.assertEqual( + self.bc.database.list_of("authenticate.UserInvite"), + [ + { + "user_id": 1, + "academy_id": 1, + "author_id": 1, + "cohort_id": 1, + "email": "a@a.com", + "first_name": None, + "conversion_info": None, + "has_marketing_consent": False, + "event_slug": None, + "asset_slug": None, + "id": 1, + "is_email_validated": False, + "last_name": None, + "phone": "", + "role_id": "potato", + "sent_at": None, + "status": "ACCEPTED", + "token": model1["user_invite"].token, + "process_message": "", + "process_status": "PENDING", + "syllabus_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "email_quality": None, + "email_status": None, + }, + { + "user_id": 1, + "academy_id": 1, + "author_id": 1, + "cohort_id": 2, + "email": "a@a.com", + "first_name": None, + "conversion_info": None, + "has_marketing_consent": False, + "event_slug": None, + "asset_slug": None, + "id": 2, + "is_email_validated": False, + "last_name": None, + "phone": "", + "role_id": "potato", + "sent_at": None, + "status": "ACCEPTED", + "token": model2["user_invite"].token, + "process_message": "", + "process_status": "PENDING", + "syllabus_id": None, + "city": None, + "country": None, + "latitude": None, + "longitude": None, + "email_quality": None, + "email_status": None, + }, + ], + ) + + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model1.user), + ], + ) + + @patch("breathecode.authenticate.signals.invite_status_updated.send_robust", MagicMock()) def test_user_me_invite_status__to_accepted_invitations_not_matched(self): """Test academy/user/me/invite""" self.bc.request.set_headers(academy=1) - base = self.generate_models(academy=True, - capability='crud_invite', - authenticate=True, - role='potato', - user_kwards={'email': 'a@a.com'}) + base = self.generate_models( + academy=True, capability="crud_invite", authenticate=True, role="potato", user_kwards={"email": "a@a.com"} + ) - invite_kwargs = {'status': 'ACCEPTED', 'email': 'a@a.com'} + invite_kwargs = {"status": "ACCEPTED", "email": "a@a.com"} - model1 = self.generate_models(authenticate=True, - profile_academy=True, - user_invite=True, - user_invite_kwargs=invite_kwargs, - models=base) + model1 = self.generate_models( + authenticate=True, profile_academy=True, user_invite=True, user_invite_kwargs=invite_kwargs, models=base + ) - model2 = self.generate_models(authenticate=True, - profile_academy=True, - user_invite=True, - user_invite_kwargs=invite_kwargs, - models=base) + model2 = self.generate_models( + authenticate=True, profile_academy=True, user_invite=True, user_invite_kwargs=invite_kwargs, models=base + ) - url = reverse_lazy('authenticate:user_me_invite_status', kwargs={'new_status': 'accepted'}) + '?id=1,2' + url = reverse_lazy("authenticate:user_me_invite_status", kwargs={"new_status": "accepted"}) + "?id=1,2" response = self.client.put(url) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/authenticate/tests/urls/tests_user_profile.py b/breathecode/authenticate/tests/urls/tests_user_profile.py index db2b047b4..5e8d5df59 100644 --- a/breathecode/authenticate/tests/urls/tests_user_profile.py +++ b/breathecode/authenticate/tests/urls/tests_user_profile.py @@ -1,6 +1,7 @@ """ Test cases for /profile/<int:user_id> """ + import pytz, datetime from django.urls.base import reverse_lazy from rest_framework import status @@ -9,21 +10,21 @@ def get_serializer(self, profile, data={}): return { - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'blog': profile.blog, - 'github_username': profile.github_username, - 'linkedin_url': profile.linkedin_url, - 'phone': profile.phone, - 'portfolio_url': profile.portfolio_url, - 'show_tutorial': profile.show_tutorial, - 'twitter_username': profile.twitter_username, - 'user': { - 'id': profile.user.id, - 'email': profile.user.email, - 'username': profile.user.username, - 'first_name': profile.user.first_name, - 'last_name': profile.user.last_name, + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "blog": profile.blog, + "github_username": profile.github_username, + "linkedin_url": profile.linkedin_url, + "phone": profile.phone, + "portfolio_url": profile.portfolio_url, + "show_tutorial": profile.show_tutorial, + "twitter_username": profile.twitter_username, + "user": { + "id": profile.user.id, + "email": profile.user.email, + "username": profile.user.username, + "first_name": profile.user.first_name, + "last_name": profile.user.last_name, }, **data, } @@ -31,32 +32,32 @@ def get_serializer(self, profile, data={}): def put_serializer(self, profile, data={}): return { - 'id': profile.id, - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'blog': profile.blog, - 'github_username': profile.github_username, - 'linkedin_url': profile.linkedin_url, - 'phone': profile.phone, - 'portfolio_url': profile.portfolio_url, - 'show_tutorial': profile.show_tutorial, - 'twitter_username': profile.twitter_username, - 'user': profile.user.id, + "id": profile.id, + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "blog": profile.blog, + "github_username": profile.github_username, + "linkedin_url": profile.linkedin_url, + "phone": profile.phone, + "portfolio_url": profile.portfolio_url, + "show_tutorial": profile.show_tutorial, + "twitter_username": profile.twitter_username, + "user": profile.user.id, **data, } def profile_row(data={}): return { - 'avatar_url': None, - 'bio': None, - 'blog': None, - 'github_username': None, - 'linkedin_url': None, - 'phone': '', - 'portfolio_url': None, - 'show_tutorial': True, - 'twitter_username': None, + "avatar_url": None, + "bio": None, + "blog": None, + "github_username": None, + "linkedin_url": None, + "phone": "", + "portfolio_url": None, + "show_tutorial": True, + "twitter_username": None, **data, } @@ -68,15 +69,18 @@ class ProfileTestSuite(AuthTestCase): def test_user_profile__without_auth(self): """Test /profile/id without auth""" - url = reverse_lazy('authenticate:user_profile', kwargs={ - 'user_id': 1, - }) + url = reverse_lazy( + "authenticate:user_profile", + kwargs={ + "user_id": 1, + }, + ) response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -85,43 +89,50 @@ def test_user_profile__without_auth(self): def test_user_profile__wrong_academy(self): """Test /profile/id with wrong academy""" self.bc.request.set_headers(academy=1) - url = reverse_lazy('authenticate:user_profile', kwargs={ - 'user_id': 1, - }) + url = reverse_lazy( + "authenticate:user_profile", + kwargs={ + "user_id": 1, + }, + ) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_user_profile__wrong_user_id(self): - """Test /profile/id """ + """Test /profile/id""" self.bc.request.set_headers(academy=1) - model = self.bc.database.create(authenticate=True, capability='crud_event', role='role', profile_academy=1) - - url = reverse_lazy('authenticate:user_profile', kwargs={ - 'user_id': 1, - }) + model = self.bc.database.create(authenticate=True, capability="crud_event", role="role", profile_academy=1) + + url = reverse_lazy( + "authenticate:user_profile", + kwargs={ + "user_id": 1, + }, + ) response = self.client.get(url) json = response.json() expected = { - 'detail': 'profile-not-found', - 'status_code': status.HTTP_404_NOT_FOUND, + "detail": "profile-not-found", + "status_code": status.HTTP_404_NOT_FOUND, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_user_profile(self): - """Test /profile/id """ + """Test /profile/id""" self.bc.request.set_headers(academy=1) - model = self.bc.database.create(authenticate=True, - profile=1, - capability='crud_event', - role='role', - profile_academy=1) - - url = reverse_lazy('authenticate:user_profile', kwargs={ - 'user_id': 1, - }) + model = self.bc.database.create( + authenticate=True, profile=1, capability="crud_event", role="role", profile_academy=1 + ) + + url = reverse_lazy( + "authenticate:user_profile", + kwargs={ + "user_id": 1, + }, + ) response = self.client.get(url) json = response.json() @@ -131,59 +142,66 @@ def test_user_profile(self): self.assertEqual(response.status_code, status.HTTP_200_OK) def test_put_user_profile__wrong_user_id(self): - """Test put /profile/id """ + """Test put /profile/id""" self.bc.request.set_headers(academy=1) - model = self.bc.database.create(authenticate=True, capability='crud_event', role='role', profile_academy=1) - - url = reverse_lazy('authenticate:user_profile', kwargs={ - 'user_id': 1, - }) + model = self.bc.database.create(authenticate=True, capability="crud_event", role="role", profile_academy=1) + + url = reverse_lazy( + "authenticate:user_profile", + kwargs={ + "user_id": 1, + }, + ) response = self.client.put(url) json = response.json() expected = { - 'detail': 'profile-not-found', - 'status_code': status.HTTP_404_NOT_FOUND, + "detail": "profile-not-found", + "status_code": status.HTTP_404_NOT_FOUND, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_put_user_profile(self): - """Test /profile/id """ + """Test /profile/id""" self.bc.request.set_headers(academy=1) - model = self.bc.database.create(authenticate=True, - profile=1, - capability='crud_event', - role='role', - profile_academy=1) + model = self.bc.database.create( + authenticate=True, profile=1, capability="crud_event", role="role", profile_academy=1 + ) - url = reverse_lazy('authenticate:user_profile', kwargs={ - 'user_id': 1, - }) + url = reverse_lazy( + "authenticate:user_profile", + kwargs={ + "user_id": 1, + }, + ) data = { - 'user': 1, - 'avatar_url': 'https://google.com', - 'bio': 'blablabla', - 'phone': '+1555555555', - 'show_tutorial': False, - 'twitter_username': 'Kenny', - 'github_username': 'Kenny', - 'portfolio_url': 'Kenny', - 'linkedin_url': 'Kenny', - 'blog': 'Kenny', + "user": 1, + "avatar_url": "https://google.com", + "bio": "blablabla", + "phone": "+1555555555", + "show_tutorial": False, + "twitter_username": "Kenny", + "github_username": "Kenny", + "portfolio_url": "Kenny", + "linkedin_url": "Kenny", + "blog": "Kenny", } response = self.client.put(url, data) json = response.json() expected = put_serializer(self, model.profile, data=data) - bc_data = {**data, 'user_id': 1, 'id': 1} - bc_data.pop('user', None) + bc_data = {**data, "user_id": 1, "id": 1} + bc_data.pop("user", None) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_row(data={**bc_data}), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_row(data={**bc_data}), + ], + ) diff --git a/breathecode/authenticate/urls.py b/breathecode/authenticate/urls.py index 267f79fb6..4fe028076 100644 --- a/breathecode/authenticate/urls.py +++ b/breathecode/authenticate/urls.py @@ -13,6 +13,7 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ + import os from django.urls import path @@ -70,100 +71,98 @@ ) # avoiding issues on test environment due that the fixture are loaded after this app -ENV = os.getenv('ENV') -TEST_ENV = (ENV == 'test' or ENV not in ['development', 'staging', 'production']) -LOGIN_URL = '/v1/auth/view/login' -app_url = os.getenv('APP_URL') +ENV = os.getenv("ENV") +TEST_ENV = ENV == "test" or ENV not in ["development", "staging", "production"] +LOGIN_URL = "/v1/auth/view/login" +app_url = os.getenv("APP_URL") if TEST_ENV and not app_url: import faker fake = faker.Faker() - app_url = fake.url().replace('http://', 'https://') + app_url = fake.url().replace("http://", "https://") -app_name = 'authenticate' +app_name = "authenticate" urlpatterns = [ - path('confirmation/<str:token>', ConfirmEmailView.as_view(), name='confirmation_token'), - path('invite/resend/<int:invite_id>', ResendInviteView.as_view(), name='invite_resend_id'), - path('member/invite/resend/<int:invite_id>', AcademyInviteView.as_view(), name='member_invite_resend_id'), - path('subscribe/', WaitingListView.as_view(), name='subscribe'), - path('user/', get_users, name='user'), - path('user/me', UserMeView.as_view(), name='user_me'), - path('user/me/settings', UserSettingsView.as_view(), name='user_me_settings'), - path('user/<str:id_or_email>', get_user_by_id_or_email), - path('role', get_roles, name='role'), - path('role/<str:role_slug>', get_roles, name='role_slug'), - path('profile/<int:user_id>', ProfileView.as_view(), name='user_profile'), - path('profile/me', ProfileMeView.as_view(), name='profile_me'), - path('profile/me/picture', ProfileMePictureView.as_view(), name='profile_me_picture'), - path('profile/invite/me', ProfileInviteMeView.as_view(), name='profile_invite_me'), - path('member/invite', render_user_invite, name='member_invite'), - path('member/invite/<str:token>', render_invite, name='member_invite_token'), - path('member/<int:profile_academy_id>/token', TokenTemporalView.as_view(), - name='profile_academy_reset_github_link'), - path('academy/member', MemberView.as_view(), name='academy_member'), - path('academy/member/<int:profileacademy_id>/invite', AcademyInviteView.as_view(), name='academy_member_id_invite'), - path('academy/<int:academy_id>/member', MemberView.as_view(), name='academy_id_member'), - path('academy/<int:academy_id>/member/<str:user_id_or_email>', MemberView.as_view(), name='academy_id_member_id'), - path('academy/member/<str:user_id_or_email>', MemberView.as_view(), name='academy_member_id'), - path('academy/student', StudentView.as_view(), name='academy_student'), - path('academy/student/<str:user_id_or_email>', StudentView.as_view(), name='academy_student_id'), + path("confirmation/<str:token>", ConfirmEmailView.as_view(), name="confirmation_token"), + path("invite/resend/<int:invite_id>", ResendInviteView.as_view(), name="invite_resend_id"), + path("member/invite/resend/<int:invite_id>", AcademyInviteView.as_view(), name="member_invite_resend_id"), + path("subscribe/", WaitingListView.as_view(), name="subscribe"), + path("user/", get_users, name="user"), + path("user/me", UserMeView.as_view(), name="user_me"), + path("user/me/settings", UserSettingsView.as_view(), name="user_me_settings"), + path("user/<str:id_or_email>", get_user_by_id_or_email), + path("role", get_roles, name="role"), + path("role/<str:role_slug>", get_roles, name="role_slug"), + path("profile/<int:user_id>", ProfileView.as_view(), name="user_profile"), + path("profile/me", ProfileMeView.as_view(), name="profile_me"), + path("profile/me/picture", ProfileMePictureView.as_view(), name="profile_me_picture"), + path("profile/invite/me", ProfileInviteMeView.as_view(), name="profile_invite_me"), + path("member/invite", render_user_invite, name="member_invite"), + path("member/invite/<str:token>", render_invite, name="member_invite_token"), + path( + "member/<int:profile_academy_id>/token", TokenTemporalView.as_view(), name="profile_academy_reset_github_link" + ), + path("academy/member", MemberView.as_view(), name="academy_member"), + path("academy/member/<int:profileacademy_id>/invite", AcademyInviteView.as_view(), name="academy_member_id_invite"), + path("academy/<int:academy_id>/member", MemberView.as_view(), name="academy_id_member"), + path("academy/<int:academy_id>/member/<str:user_id_or_email>", MemberView.as_view(), name="academy_id_member_id"), + path("academy/member/<str:user_id_or_email>", MemberView.as_view(), name="academy_member_id"), + path("academy/student", StudentView.as_view(), name="academy_student"), + path("academy/student/<str:user_id_or_email>", StudentView.as_view(), name="academy_student_id"), # TODO: these endpoints starts with academy but actually they are related to the user, not to the academy - path('academy/user/me/invite', MeInviteView.as_view(), name='academy_user_me_invite'), - path('academy/user/me/invite/<slug:new_status>', MeInviteView.as_view(), name='academy_user_me_invite_status'), + path("academy/user/me/invite", MeInviteView.as_view(), name="academy_user_me_invite"), + path("academy/user/me/invite/<slug:new_status>", MeInviteView.as_view(), name="academy_user_me_invite_status"), # 🔼🔼🔼 - path('academy/invite/<int:invite_id>', AcademyInviteView.as_view(), name='academy_invite_id'), - path('academy/user/invite', AcademyInviteView.as_view(), name='academy_user_invite'), - path('academy/html/invite', render_academy_invite, name='academy_html_invite'), + path("academy/invite/<int:invite_id>", AcademyInviteView.as_view(), name="academy_invite_id"), + path("academy/user/invite", AcademyInviteView.as_view(), name="academy_user_invite"), + path("academy/html/invite", render_academy_invite, name="academy_html_invite"), # path('group/', get_groups, name="group"), - path('view/login', login_html_view, name='login_view'), # html login form + path("view/login", login_html_view, name="login_view"), # html login form # get token from email and password - path('login/', LoginView.as_view(), name='login'), - path('logout/', LogoutView.as_view(), name='logout'), + path("login/", LoginView.as_view(), name="login"), + path("logout/", LogoutView.as_view(), name="logout"), # get an another token (temporal), from a logged in user - path('academy/token/', AcademyTokenView.as_view(), name='academy_token'), - path('token/me', TemporalTokenView.as_view(), name='token_me'), - path('token/<str:token>', get_token_info, name='token'), # get token information - path('password/reset', reset_password_view, name='password_reset'), - path('member/<int:profileacademy_id>/password/reset', PasswordResetView.as_view(), name='member_password_reset'), - path('password/<str:token>', pick_password, name='password_token'), - path('github/', get_github_token, name='github'), - path('github/me', GithubMeView.as_view(), name='github_me'), - path('github/<str:token>', get_github_token, name='github_token'), - path('github/callback/', save_github_token, name='github_callback'), - path('slack/', get_slack_token, name='slack'), - path('slack/callback/', save_slack_token, name='slack_callback'), - path('facebook/', get_facebook_token, name='facebook'), - path('facebook/callback/', save_facebook_token, name='facebook_callback'), - path('user/me', UserMeView.as_view(), name='user_me'), - path('user/me/invite', MeInviteView.as_view(), name='user_me_invite'), - path('user/me/invite/<slug:new_status>', MeInviteView.as_view(), name='user_me_invite_status'), - path('academy/settings', AcademyAuthSettingsView.as_view(), name='academy_me_settings'), - + path("academy/token/", AcademyTokenView.as_view(), name="academy_token"), + path("token/me", TemporalTokenView.as_view(), name="token_me"), + path("token/<str:token>", get_token_info, name="token"), # get token information + path("password/reset", reset_password_view, name="password_reset"), + path("member/<int:profileacademy_id>/password/reset", PasswordResetView.as_view(), name="member_password_reset"), + path("password/<str:token>", pick_password, name="password_token"), + path("github/", get_github_token, name="github"), + path("github/me", GithubMeView.as_view(), name="github_me"), + path("github/<str:token>", get_github_token, name="github_token"), + path("github/callback/", save_github_token, name="github_callback"), + path("slack/", get_slack_token, name="slack"), + path("slack/callback/", save_slack_token, name="slack_callback"), + path("facebook/", get_facebook_token, name="facebook"), + path("facebook/callback/", save_facebook_token, name="facebook_callback"), + path("user/me", UserMeView.as_view(), name="user_me"), + path("user/me/invite", MeInviteView.as_view(), name="user_me_invite"), + path("user/me/invite/<slug:new_status>", MeInviteView.as_view(), name="user_me_invite_status"), + path("academy/settings", AcademyAuthSettingsView.as_view(), name="academy_me_settings"), # google authentication oath2.0 - path('google/<str:token>', get_google_token, name='google_token'), - path('google/callback/', save_google_token, name='google_callback'), - path('gitpod/sync', sync_gitpod_users_view, name='sync_gitpod_users'), - + path("google/<str:token>", get_google_token, name="google_token"), + path("google/callback/", save_google_token, name="google_callback"), + path("gitpod/sync", sync_gitpod_users_view, name="sync_gitpod_users"), # sync with gitHUB - path('academy/github/user', GithubUserView.as_view(), name='github_user'), - path('academy/github/user/sync', AcademyGithubSyncView.as_view(), name='github_user_sync'), - path('academy/github/user/<int:githubuser_id>', GithubUserView.as_view(), name='github_user_id'), - + path("academy/github/user", GithubUserView.as_view(), name="github_user"), + path("academy/github/user/sync", AcademyGithubSyncView.as_view(), name="github_user_sync"), + path("academy/github/user/<int:githubuser_id>", GithubUserView.as_view(), name="github_user_id"), # sync with gitPOD - path('academy/gitpod/user', GitpodUserView.as_view(), name='gitpod_user'), - path('academy/gitpod/user/<int:gitpoduser_id>', GitpodUserView.as_view(), name='gitpod_user_id'), - + path("academy/gitpod/user", GitpodUserView.as_view(), name="gitpod_user"), + path("academy/gitpod/user/<int:gitpoduser_id>", GitpodUserView.as_view(), name="gitpod_user_id"), # authorize - path('authorize/<str:app_slug>', - authorize_view(login_url=LOGIN_URL, app_url=app_url, get_language=get_user_language), - name='authorize_slug'), - + path( + "authorize/<str:app_slug>", + authorize_view(login_url=LOGIN_URL, app_url=app_url, get_language=get_user_language), + name="authorize_slug", + ), # apps - path('appuseragreement', AppUserAgreementView.as_view(), name='appuseragreement'), - path('app/user', AppUserView.as_view(), name='app_user'), - path('app/user/<int:user_id>', AppUserView.as_view(), name='app_user_id'), - path('app/webhook', app_webhook, name='app_webhook'), - path('me/app/<str:app_slug>/sync', AppSync.as_view(), name='me_app_slug_sync'), + path("appuseragreement", AppUserAgreementView.as_view(), name="appuseragreement"), + path("app/user", AppUserView.as_view(), name="app_user"), + path("app/user/<int:user_id>", AppUserView.as_view(), name="app_user_id"), + path("app/webhook", app_webhook, name="app_webhook"), + path("me/app/<str:app_slug>/sync", AppSync.as_view(), name="me_app_slug_sync"), ] diff --git a/breathecode/authenticate/views.py b/breathecode/authenticate/views.py index 0a7ee5c2a..0c8f7739e 100644 --- a/breathecode/authenticate/views.py +++ b/breathecode/authenticate/views.py @@ -124,24 +124,24 @@ logger = logging.getLogger(__name__) PATTERNS = { - 'CONTAINS_LOWERCASE': r'[a-z]', - 'CONTAINS_UPPERCASE': r'[A-Z]', - 'CONTAINS_SYMBOLS': r'[^a-zA-Z]', + "CONTAINS_LOWERCASE": r"[a-z]", + "CONTAINS_UPPERCASE": r"[A-Z]", + "CONTAINS_SYMBOLS": r"[^a-zA-Z]", } -PROFILE_MIME_ALLOWED = ['image/png', 'image/jpeg'] +PROFILE_MIME_ALLOWED = ["image/png", "image/jpeg"] def get_profile_bucket(): - return os.getenv('PROFILE_BUCKET', '') + return os.getenv("PROFILE_BUCKET", "") def get_shape_of_image_url(): - return os.getenv('GCLOUD_SHAPE_OF_IMAGE', '') + return os.getenv("GCLOUD_SHAPE_OF_IMAGE", "") def get_google_project_id(): - return os.getenv('GOOGLE_PROJECT_ID', '') + return os.getenv("GOOGLE_PROJECT_ID", "") class TemporalTokenView(ObtainAuthToken): @@ -150,53 +150,61 @@ class TemporalTokenView(ObtainAuthToken): def post(self, request): - token_type = request.data.get('token_type', 'temporal') + token_type = request.data.get("token_type", "temporal") - allowed_token_types = ['temporal', 'one_time'] + allowed_token_types = ["temporal", "one_time"] if token_type not in allowed_token_types: - raise ValidationException(f'The token type must be one of {", ".join(allowed_token_types)}', - slug='token-type-invalid-or-not-allowed') + raise ValidationException( + f'The token type must be one of {", ".join(allowed_token_types)}', + slug="token-type-invalid-or-not-allowed", + ) token, created = Token.get_or_create(user=request.user, token_type=token_type) - return Response({ - 'token': token.key, - 'token_type': token.token_type, - 'expires_at': token.expires_at, - 'user_id': token.user.pk, - 'email': token.user.email - }) + return Response( + { + "token": token.key, + "token_type": token.token_type, + "expires_at": token.expires_at, + "user_id": token.user.pk, + "email": token.user.email, + } + ) class AcademyTokenView(ObtainAuthToken): schema = AutoSchema() permission_classes = [IsAuthenticated] - @capable_of('get_academy_token') + @capable_of("get_academy_token") def get(self, request, academy_id): academy = Academy.objects.get(id=academy_id) academy_user = User.objects.filter(username=academy.slug).first() if academy_user is None: - raise ValidationException('No academy token has been generated yet', slug='academy-token-not-found') + raise ValidationException("No academy token has been generated yet", slug="academy-token-not-found") - token = Token.objects.filter(user=academy_user, token_type='permanent').first() + token = Token.objects.filter(user=academy_user, token_type="permanent").first() if token is None: - raise ValidationException('No academy token has been generated yet', slug='academy-token-not-found') + raise ValidationException("No academy token has been generated yet", slug="academy-token-not-found") - return Response({ - 'token': token.key, - 'token_type': token.token_type, - 'expires_at': token.expires_at, - }) + return Response( + { + "token": token.key, + "token_type": token.token_type, + "expires_at": token.expires_at, + } + ) - @capable_of('generate_academy_token') + @capable_of("generate_academy_token") def post(self, request, academy_id): token = generate_academy_token(academy_id, True) - return Response({ - 'token': token.key, - 'token_type': token.token_type, - 'expires_at': token.expires_at, - }) + return Response( + { + "token": token.key, + "token_type": token.token_type, + "expires_at": token.expires_at, + } + ) class LogoutView(APIView): @@ -204,11 +212,13 @@ class LogoutView(APIView): permission_classes = [IsAuthenticated] def get(self, request): - Token.objects.filter(token_type='login').delete() + Token.objects.filter(token_type="login").delete() request.auth.delete() - return Response({ - 'message': 'User tokens successfully deleted', - }) + return Response( + { + "message": "User tokens successfully deleted", + } + ) class WaitingListView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): @@ -219,33 +229,35 @@ def post(self, request): lang = get_user_language(request) syllabus = None - if (v := data.pop('syllabus', None)): + if v := data.pop("syllabus", None): try: args = {} if isinstance(v, int): - args['id'] = v + args["id"] = v else: - args['slug'] = v + args["slug"] = v syllabus = Syllabus.objects.filter(**args).get() except Exception: raise ValidationException( - translation(lang, - en='The syllabus does not exist', - es='El syllabus no existe', - slug='syllabus-not-found')) + translation( + lang, en="The syllabus does not exist", es="El syllabus no existe", slug="syllabus-not-found" + ) + ) if syllabus: - data['syllabus'] = syllabus.id - - serializer = UserInviteWaitingListSerializer(data=data, - context={ - 'lang': lang, - 'plan': data.get('plan'), - 'course': data.get('course'), - 'syllabus': syllabus, - }) + data["syllabus"] = syllabus.id + + serializer = UserInviteWaitingListSerializer( + data=data, + context={ + "lang": lang, + "plan": data.get("plan"), + "course": data.get("course"), + "syllabus": syllabus, + }, + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -254,47 +266,53 @@ def post(self, request): def put(self, request): lang = get_user_language(request) - invite = UserInvite.objects.filter(email=request.data.get('email'), - status='WAITING_LIST', - token=request.data.get('token', 'empty')).first() + invite = UserInvite.objects.filter( + email=request.data.get("email"), status="WAITING_LIST", token=request.data.get("token", "empty") + ).first() if not invite: - raise ValidationException(translation(lang, - en='The email does not exist in the waiting list', - es='El email no existe en la lista de espera', - slug='not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="The email does not exist in the waiting list", + es="El email no existe en la lista de espera", + slug="not-found", + ), + code=404, + ) data = {**request.data} syllabus = None - if (v := data.pop('syllabus', None)): + if v := data.pop("syllabus", None): try: args = {} if isinstance(v, int): - args['id'] = v + args["id"] = v else: - args['slug'] = v + args["slug"] = v syllabus = Syllabus.objects.filter(**args).get() except Exception: raise ValidationException( - translation(lang, - en='The syllabus does not exist', - es='El syllabus no existe', - slug='syllabus-not-found')) + translation( + lang, en="The syllabus does not exist", es="El syllabus no existe", slug="syllabus-not-found" + ) + ) if syllabus: - data['syllabus'] = syllabus.id - - serializer = UserInviteWaitingListSerializer(invite, - data=data, - context={ - 'lang': lang, - 'plan': data.get('plan'), - 'course': data.get('course'), - 'syllabus': syllabus, - }) + data["syllabus"] = syllabus.id + + serializer = UserInviteWaitingListSerializer( + invite, + data=data, + context={ + "lang": lang, + "plan": data.get("plan"), + "course": data.get("course"), + "syllabus": syllabus, + }, + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -304,7 +322,7 @@ def put(self, request): class MemberView(APIView, GenerateLookupsMixin): extensions = APIViewExtensions(paginate=True) - @capable_of('read_member') + @capable_of("read_member") def get(self, request, academy_id, user_id_or_email=None): handler = self.extensions(request) @@ -316,57 +334,59 @@ def get(self, request, academy_id, user_id_or_email=None): item = ProfileAcademy.objects.filter(user__email=user_id_or_email, academy_id=academy_id).first() if item is None: - raise ValidationException('Profile not found for this user and academy', - code=404, - slug='profile-academy-not-found') + raise ValidationException( + "Profile not found for this user and academy", code=404, slug="profile-academy-not-found" + ) serializer = GetProfileAcademySerializer(item, many=False) return Response(serializer.data) items = ProfileAcademy.objects.filter(academy__id=academy_id) - include = request.GET.get('include', '').split() - if not 'student' in include: - items = items.exclude(role__slug='student') + include = request.GET.get("include", "").split() + if not "student" in include: + items = items.exclude(role__slug="student") - roles = request.GET.get('roles', '') - if roles != '': - items = items.filter(role__in=roles.lower().split(',')) + roles = request.GET.get("roles", "") + if roles != "": + items = items.filter(role__in=roles.lower().split(",")) - status = request.GET.get('status', None) + status = request.GET.get("status", None) if status is not None: items = items.filter(status__iexact=status) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = query_like_by_full_name(like=like, items=items) - items = items.exclude(user__email__contains='@token.com') + items = items.exclude(user__email__contains="@token.com") items = handler.queryset(items) serializer = GetProfileAcademySmallSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_member') + @capable_of("crud_member") def post(self, request, academy_id=None): - serializer = MemberPOSTSerializer(data=request.data, context={'academy_id': academy_id, 'request': request}) + serializer = MemberPOSTSerializer(data=request.data, context={"academy_id": academy_id, "request": request}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_member') + @capable_of("crud_member") def put(self, request, academy_id=None, user_id_or_email=None): lang = get_user_language(request) user = ProfileAcademy.objects.filter(user__id=request.user.id, academy__id=academy_id).first() - if user.email is None or user.email.strip() == '': + if user.email is None or user.email.strip() == "": raise ValidationException( - translation(lang, - en='This mentor does not have an email address', - es='Este mentor no tiene una dirección de correo electrónico', - slug='email-not-found'), + translation( + lang, + en="This mentor does not have an email address", + es="Este mentor no tiene una dirección de correo electrónico", + slug="email-not-found", + ), code=400, ) @@ -374,9 +394,9 @@ def put(self, request, academy_id=None, user_id_or_email=None): if user_id_or_email.isnumeric(): already = ProfileAcademy.objects.filter(user__id=user_id_or_email, academy_id=academy_id).first() else: - raise ValidationException('User id must be a numeric value', code=400, slug='user-id-is-not-numeric') + raise ValidationException("User id must be a numeric value", code=400, slug="user-id-is-not-numeric") - request_data = {**request.data, 'user': user_id_or_email, 'academy': academy_id} + request_data = {**request.data, "user": user_id_or_email, "academy": academy_id} if already: serializer = MemberPUTSerializer(already, data=request_data) if serializer.is_valid(): @@ -384,17 +404,17 @@ def put(self, request, academy_id=None, user_id_or_email=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) else: - serializer = MemberPOSTSerializer(data=request_data, context={'academy_id': academy_id, 'request': request}) + serializer = MemberPOSTSerializer(data=request_data, context={"academy_id": academy_id, "request": request}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_member') + @capable_of("crud_member") def delete(self, request, academy_id=None, user_id_or_email=None): - raise ValidationException('This functionality is under maintenance and it\'s not working', - code=403, - slug='delete-is-forbidden') + raise ValidationException( + "This functionality is under maintenance and it's not working", code=403, slug="delete-is-forbidden" + ) class MeInviteView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): @@ -402,28 +422,28 @@ class MeInviteView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): def get(self, request): invites = UserInvite.objects.filter(email=request.user.email) - status = request.GET.get('status', '') - if status != '': - invites = invites.filter(status__in=status.split(',')) + status = request.GET.get("status", "") + if status != "": + invites = invites.filter(status__in=status.split(",")) else: - invites = invites.filter(status='PENDING') + invites = invites.filter(status="PENDING") serializer = UserInviteSerializer(invites, many=True) return Response(serializer.data) def put(self, request, new_status=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) accept_invite(user=request.user) if new_status is None: - raise ValidationException('Please specify new status for the invites', slug='missing-status') + raise ValidationException("Please specify new status for the invites", slug="missing-status") - if new_status.upper() not in ['ACCEPTED', 'REJECTED']: - raise ValidationException(f'Invalid invite status {new_status}', slug='invalid-status') + if new_status.upper() not in ["ACCEPTED", "REJECTED"]: + raise ValidationException(f"Invalid invite status {new_status}", slug="invalid-status") if lookups: - items = UserInvite.objects.filter(**lookups, email=request.user.email, status='PENDING') + items = UserInvite.objects.filter(**lookups, email=request.user.email, status="PENDING") for item in items: @@ -433,15 +453,17 @@ def put(self, request, new_status=None): exists = ProfileAcademy.objects.filter(email=item.email, academy__id=item.academy.id) if exists.count() == 0: - profile_academy = ProfileAcademy(academy=item.academy, - role=item.role, - status='ACTIVE', - email=item.email, - first_name=item.first_name, - last_name=item.last_name) + profile_academy = ProfileAcademy( + academy=item.academy, + role=item.role, + status="ACTIVE", + email=item.email, + first_name=item.first_name, + last_name=item.last_name, + ) profile_academy.save() - if new_status.upper() == 'ACCEPTED' and User.objects.filter(email=item.email).count() == 0: + if new_status.upper() == "ACCEPTED" and User.objects.filter(email=item.email).count() == 0: user = User() user.email = item.email user.username = item.email @@ -451,16 +473,15 @@ def put(self, request, new_status=None): serializer = UserInviteSerializer(item) if serializer.is_valid(): - tasks_activity.add_activity.delay(request.user.id, - 'invite_status_updated', - related_type='auth.UserInvite', - related_id=item.id) + tasks_activity.add_activity.delay( + request.user.id, "invite_status_updated", related_type="auth.UserInvite", related_id=item.id + ) serializer = UserInviteSerializer(items, many=True) return Response(serializer.data, status=status.HTTP_200_OK) else: - raise ValidationException('Invite ids were not provided', code=400, slug='missing-ids') + raise ValidationException("Invite ids were not provided", code=400, slug="missing-ids") class ConfirmEmailView(APIView): @@ -472,21 +493,21 @@ def get(self, request, token=None): invite = UserInvite.objects.filter(token=token).first() if invite is None: - e = ValidationException('Invite not found', code=404, slug='user-invite-not-found') - if request.META.get('HTTP_ACCEPT') == 'application/json': + e = ValidationException("Invite not found", code=404, slug="user-invite-not-found") + if request.META.get("HTTP_ACCEPT") == "application/json": raise e return render_message(request, e.get_message(), status=404) if not invite.email: - errors.append(C('This invite don\'t have email, contact to admin', slug='without-email')) + errors.append(C("This invite don't have email, contact to admin", slug="without-email")) if invite.is_email_validated: - errors.append(C('Email already validated', slug='email-already-validated')) + errors.append(C("Email already validated", slug="email-already-validated")) if errors: e = ValidationException(errors, code=400) - if request.META.get('HTTP_ACCEPT') == 'application/json': + if request.META.get("HTTP_ACCEPT") == "application/json": raise e return render_message(request, e.get_message(), status=400, academy=invite.academy) @@ -494,13 +515,13 @@ def get(self, request, token=None): invite.is_email_validated = True invite.save() - if request.META.get('HTTP_ACCEPT') == 'application/json': + if request.META.get("HTTP_ACCEPT") == "application/json": # If it's JSON, use your serializer and return a JSON response. serializer = UserInviteShortSerializer(invite, many=False) return Response(serializer.data) # If not JSON, return your HTML message. - return render_message(request, 'Your email was validated, you can close this page.', academy=invite.academy) + return render_message(request, "Your email was validated, you can close this page.", academy=invite.academy) class ResendInviteView(APIView): @@ -512,40 +533,45 @@ def put(self, request, invite_id=None): invite = UserInvite.objects.filter(id=invite_id).first() if invite is None: - raise ValidationException('Invite not found', code=404, slug='user-invite-not-found') + raise ValidationException("Invite not found", code=404, slug="user-invite-not-found") - invite_answered = invite.status not in ['PENDING', 'WAITING_LIST'] + invite_answered = invite.status not in ["PENDING", "WAITING_LIST"] if invite.is_email_validated and invite_answered: status = invite.status.lower() - errors.append(C(f'You already {status} this invite', slug=f'user-already-{status}')) + errors.append(C(f"You already {status} this invite", slug=f"user-already-{status}")) - if invite.status == 'WAITING_LIST': + if invite.status == "WAITING_LIST": status = invite.status.lower() - errors.append(C('You are in the waiting list, ', slug=f'user-already-{status}')) + errors.append(C("You are in the waiting list, ", slug=f"user-already-{status}")) if not invite.email: status = invite.status.lower() - errors.append(C('This invite don\'t have email, contact to admin', slug='without-email')) + errors.append(C("This invite don't have email, contact to admin", slug="without-email")) now = timezone.now() minutes = 10 if invite.sent_at and invite.sent_at + timedelta(minutes=minutes) > now: errors.append( - C(f'You have a pending invitation sent less than {minutes} minutes ago, check your email', - slug=f'sent-at-diff-less-{minutes}-minutes')) + C( + f"You have a pending invitation sent less than {minutes} minutes ago, check your email", + slug=f"sent-at-diff-less-{minutes}-minutes", + ) + ) if errors: raise ValidationException(errors, code=400) if not invite.is_email_validated and invite_answered: notify_actions.send_email_message( - 'verify_email', - invite.email, { - 'SUBJECT': 'Verify your 4Geeks account', - 'LINK': os.getenv('API_URL', '') + f'/v1/auth/confirmation/{invite.token}', + "verify_email", + invite.email, + { + "SUBJECT": "Verify your 4Geeks account", + "LINK": os.getenv("API_URL", "") + f"/v1/auth/confirmation/{invite.token}", }, - academy=invite.academy) + academy=invite.academy, + ) if not invite_answered: resend_invite(invite.token, invite.email, invite.first_name, academy=invite.academy) @@ -558,15 +584,15 @@ def put(self, request, invite_id=None): class AcademyInviteView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): - @capable_of('read_invite') + @capable_of("read_invite") def get(self, request, academy_id=None, profileacademy_id=None, invite_id=None): if invite_id is not None: - invite = UserInvite.objects.filter(academy__id=academy_id, id=invite_id, status='PENDING').first() + invite = UserInvite.objects.filter(academy__id=academy_id, id=invite_id, status="PENDING").first() if invite is None: - raise ValidationException('No pending invite was found for this user and academy', - code=404, - slug='user-invite-not-found') + raise ValidationException( + "No pending invite was found for this user and academy", code=404, slug="user-invite-not-found" + ) serializer = UserInviteSerializer(invite, many=False) return Response(serializer.data) @@ -574,41 +600,43 @@ def get(self, request, academy_id=None, profileacademy_id=None, invite_id=None): if profileacademy_id is not None: profile = ProfileAcademy.objects.filter(academy__id=academy_id, id=profileacademy_id).first() if profile is None: - raise ValidationException('Profile not found', code=404, slug='profile-academy-not-found') + raise ValidationException("Profile not found", code=404, slug="profile-academy-not-found") - invite = UserInvite.objects.filter(academy__id=academy_id, email=profile.email, status='PENDING').first() + invite = UserInvite.objects.filter(academy__id=academy_id, email=profile.email, status="PENDING").first() - if invite is None and profile.status != 'INVITED': - raise ValidationException('No pending invite was found for this user and academy', - code=404, - slug='user-invite-and-profile-academy-with-status-invited-not-found') + if invite is None and profile.status != "INVITED": + raise ValidationException( + "No pending invite was found for this user and academy", + code=404, + slug="user-invite-and-profile-academy-with-status-invited-not-found", + ) # IMPORTANT: both serializers need to include "invite_url" property to have a consistent response if invite is not None: serializer = UserInviteSerializer(invite, many=False) return Response(serializer.data) - if profile.status == 'INVITED': + if profile.status == "INVITED": serializer = GetProfileAcademySerializer(profile, many=False) return Response(serializer.data) invites = UserInvite.objects.filter(academy__id=academy_id) - status = request.GET.get('status', '') - if status != '': - invites = invites.filter(status__in=status.split(',')) + status = request.GET.get("status", "") + if status != "": + invites = invites.filter(status__in=status.split(",")) else: - invites = invites.filter(status='PENDING') + invites = invites.filter(status="PENDING") - if 'role' in self.request.GET: - param = self.request.GET.get('role') + if "role" in self.request.GET: + param = self.request.GET.get("role") invites = invites.filter(role__name__icontains=param) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: invites = query_like_by_full_name(like=like, items=invites) - invites = invites.order_by(request.GET.get('sort', '-created_at')) + invites = invites.order_by(request.GET.get("sort", "-created_at")) page = self.paginate_queryset(invites, request) serializer = UserInviteSerializer(page, many=True) @@ -618,9 +646,9 @@ def get(self, request, academy_id=None, profileacademy_id=None, invite_id=None): else: return Response(serializer.data, status=200) - @capable_of('crud_invite') + @capable_of("crud_invite") def delete(self, request, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups: items = UserInvite.objects.filter(**lookups, academy__id=academy_id) @@ -628,55 +656,62 @@ def delete(self, request, academy_id=None): item.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) else: - raise ValidationException('Invite ids were not provided', 404, slug='missing_ids') + raise ValidationException("Invite ids were not provided", 404, slug="missing_ids") - @capable_of('invite_resend') + @capable_of("invite_resend") def put(self, request, invite_id=None, profileacademy_id=None, academy_id=None): invite = None profile_academy = None if invite_id is not None: - invite = UserInvite.objects.filter(academy__id=academy_id, id=invite_id, status='PENDING').first() + invite = UserInvite.objects.filter(academy__id=academy_id, id=invite_id, status="PENDING").first() if invite is None: - raise ValidationException('No pending invite was found for this user and academy', - code=404, - slug='user-invite-not-found') + raise ValidationException( + "No pending invite was found for this user and academy", code=404, slug="user-invite-not-found" + ) elif profileacademy_id is not None: profile_academy = ProfileAcademy.objects.filter(id=profileacademy_id).first() if profile_academy is None: - raise ValidationException('Member not found', code=400, slug='profile-academy-not-found') + raise ValidationException("Member not found", code=400, slug="profile-academy-not-found") invite = UserInvite.objects.filter(academy__id=academy_id, email=profile_academy.email).first() - if (invite is None and profile_academy is not None and profile_academy.status == 'INVITED' - and (profile_academy.user.email or invite.email)): - notify_actions.send_email_message('academy_invite', - profile_academy.user.email or invite.email, { - 'subject': f'Invitation to study at {profile_academy.academy.name}', - 'invites': [ProfileAcademySmallSerializer(profile_academy).data], - 'user': UserSmallSerializer(profile_academy.user).data, - 'LINK': os.getenv('API_URL') + '/v1/auth/academy/html/invite', - }, - academy=profile_academy.academy) + if ( + invite is None + and profile_academy is not None + and profile_academy.status == "INVITED" + and (profile_academy.user.email or invite.email) + ): + notify_actions.send_email_message( + "academy_invite", + profile_academy.user.email or invite.email, + { + "subject": f"Invitation to study at {profile_academy.academy.name}", + "invites": [ProfileAcademySmallSerializer(profile_academy).data], + "user": UserSmallSerializer(profile_academy.user).data, + "LINK": os.getenv("API_URL") + "/v1/auth/academy/html/invite", + }, + academy=profile_academy.academy, + ) serializer = GetProfileAcademySerializer(profile_academy) return Response(serializer.data) if invite is None: - raise ValidationException('Invite not found', code=400, slug='user-invite-not-found') + raise ValidationException("Invite not found", code=400, slug="user-invite-not-found") if invite.sent_at is not None: now = timezone.now() minutes_diff = (now - invite.sent_at).total_seconds() / 60.0 if minutes_diff < 2: - raise ValidationException('Impossible to resend invitation', - code=400, - slug='sent-at-diff-less-two-minutes') + raise ValidationException( + "Impossible to resend invitation", code=400, slug="sent-at-diff-less-two-minutes" + ) email = (profile_academy and profile_academy.user and profile_academy.user.email) or invite.email if not email: - raise ValidationException('Impossible to determine the email of user', code=400, slug='without-email') + raise ValidationException("Impossible to determine the email of user", code=400, slug="without-email") resend_invite(invite.token, email, invite.first_name, academy=invite.academy) @@ -687,9 +722,9 @@ def put(self, request, invite_id=None, profileacademy_id=None, academy_id=None): class StudentView(APIView, GenerateLookupsMixin): - extensions = APIViewExtensions(paginate=True, sort='-created_at') + extensions = APIViewExtensions(paginate=True, sort="-created_at") - @capable_of('read_student') + @capable_of("read_student") def get(self, request, academy_id=None, user_id_or_email=None): handler = self.extensions(request) @@ -701,63 +736,67 @@ def get(self, request, academy_id=None, user_id_or_email=None): profile = ProfileAcademy.objects.filter(academy__id=academy_id, user__email=user_id_or_email).first() if profile is None: - raise ValidationException('Profile not found', code=404, slug='profile-academy-not-found') + raise ValidationException("Profile not found", code=404, slug="profile-academy-not-found") serializer = GetProfileAcademySerializer(profile, many=False) return Response(serializer.data) - items = ProfileAcademy.objects.filter(role__slug='student', academy__id=academy_id) + items = ProfileAcademy.objects.filter(role__slug="student", academy__id=academy_id) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = query_like_by_full_name(like=like, items=items) - status = request.GET.get('status', None) + status = request.GET.get("status", None) if status is not None: items = items.filter(status__iexact=status) - cohort = request.GET.get('cohort', None) + cohort = request.GET.get("cohort", None) if cohort is not None: - lookups = self.generate_lookups(request, many_fields=['cohort']) - items = items.filter(user__cohortuser__cohort__slug__in=lookups['cohort__in']) + lookups = self.generate_lookups(request, many_fields=["cohort"]) + items = items.filter(user__cohortuser__cohort__slug__in=lookups["cohort__in"]) items = handler.queryset(items) serializer = GetProfileAcademySmallSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_student') + @capable_of("crud_student") def post(self, request, academy_id=None): - serializer = StudentPOSTSerializer(data=request.data, context={'academy_id': academy_id, 'request': request}) + serializer = StudentPOSTSerializer(data=request.data, context={"academy_id": academy_id, "request": request}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_student') + @capable_of("crud_student") def put(self, request, academy_id=None, user_id_or_email=None): if not user_id_or_email.isnumeric(): - raise ValidationException('User id must be a numeric value', code=404, slug='user-id-is-not-numeric') + raise ValidationException("User id must be a numeric value", code=404, slug="user-id-is-not-numeric") student = ProfileAcademy.objects.filter(user__id=user_id_or_email, academy__id=academy_id).first() - if student and student.role.slug != 'student': - raise ValidationException(f'This endpoint can only update student profiles (not {student.role.slug})', - code=400, - slug='trying-to-change-a-staff') + if student and student.role.slug != "student": + raise ValidationException( + f"This endpoint can only update student profiles (not {student.role.slug})", + code=400, + slug="trying-to-change-a-staff", + ) - request_data = {**request.data, 'user': user_id_or_email, 'academy': academy_id, 'role': 'student'} - if 'role' in request.data: - raise ValidationException('The student role cannot be updated with this endpoint, user /member instead.', - code=400, - slug='trying-to-change-role') + request_data = {**request.data, "user": user_id_or_email, "academy": academy_id, "role": "student"} + if "role" in request.data: + raise ValidationException( + "The student role cannot be updated with this endpoint, user /member instead.", + code=400, + slug="trying-to-change-role", + ) if not student: - raise ValidationException('The user is not a student in this academy', - code=404, - slug='profile-academy-not-found') + raise ValidationException( + "The user is not a student in this academy", code=404, slug="profile-academy-not-found" + ) serializer = MemberPUTSerializer(student, data=request_data) if serializer.is_valid(): @@ -765,11 +804,11 @@ def put(self, request, academy_id=None, user_id_or_email=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_student') + @capable_of("crud_student") def delete(self, request, academy_id=None, user_id_or_email=None): - raise ValidationException('This functionality is under maintenance and it\'s not working', - code=403, - slug='delete-is-forbidden') + raise ValidationException( + "This functionality is under maintenance and it's not working", code=403, slug="delete-is-forbidden" + ) class LoginView(ObtainAuthToken): @@ -777,31 +816,28 @@ class LoginView(ObtainAuthToken): def post(self, request, *args, **kwargs): - serializer = AuthSerializer(data=request.data, context={'request': request}) + serializer = AuthSerializer(data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) - user = serializer.validated_data['user'] - token, created = Token.get_or_create(user=user, token_type='login') + user = serializer.validated_data["user"] + token, created = Token.get_or_create(user=user, token_type="login") - tasks_activity.add_activity.delay(user.id, 'login', related_type='auth.User', related_id=user.id) + tasks_activity.add_activity.delay(user.id, "login", related_type="auth.User", related_id=user.id) - return Response({'token': token.key, 'user_id': user.pk, 'email': user.email, 'expires_at': token.expires_at}) + return Response({"token": token.key, "user_id": user.pk, "email": user.email, "expires_at": token.expires_at}) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_token_info(request, token): token = Token.objects.filter(key=token).first() if token is None or token.expires_at < timezone.now(): - raise PermissionDenied('Expired or invalid token') + raise PermissionDenied("Expired or invalid token") - return Response({ - 'token': token.key, - 'token_type': token.token_type, - 'expires_at': token.expires_at, - 'user_id': token.user.pk - }) + return Response( + {"token": token.key, "token_type": token.token_type, "expires_at": token.expires_at, "user_id": token.user.pk} + ) class UserMeView(APIView): @@ -810,10 +846,10 @@ def get(self, request, format=None): # TODO: This should be not accessible because this endpoint require auth try: if isinstance(request.user, AnonymousUser): - raise ValidationException('There is not user', slug='without-auth', code=403) + raise ValidationException("There is not user", slug="without-auth", code=403) except User.DoesNotExist: - raise ValidationException('You don\'t have a user', slug='user-not-found', code=403) + raise ValidationException("You don't have a user", slug="user-not-found", code=403) users = UserSerializer(request.user) return Response(users.data) @@ -822,12 +858,12 @@ def put(self, request): # TODO: This should be not accessible because this endpoint require auth try: if isinstance(request.user, AnonymousUser): - raise ValidationException('There is not user', slug='without-auth', code=403) + raise ValidationException("There is not user", slug="without-auth", code=403) except User.DoesNotExist: - raise ValidationException('You don\'t have a user', slug='user-not-found', code=403) + raise ValidationException("You don't have a user", slug="user-not-found", code=403) - serializer = UserMeSerializer(request.user, data=request.data, context={'request': request}) + serializer = UserMeSerializer(request.user, data=request.data, context={"request": request}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -839,10 +875,10 @@ class UserSettingsView(APIView): def get(self, request, format=None): try: if isinstance(request.user, AnonymousUser): - raise ValidationException('There is not user', slug='without-auth', code=403) + raise ValidationException("There is not user", slug="without-auth", code=403) except User.DoesNotExist: - raise ValidationException('You don\'t have a user', slug='user-not-found', code=403) + raise ValidationException("You don't have a user", slug="user-not-found", code=403) settings = get_user_settings(request.user.id) serializer = SettingsSerializer(settings) @@ -851,14 +887,14 @@ def get(self, request, format=None): def put(self, request): try: if isinstance(request.user, AnonymousUser): - raise ValidationException('Invalid or unauthenticated user', slug='without-auth', code=403) + raise ValidationException("Invalid or unauthenticated user", slug="without-auth", code=403) except User.DoesNotExist: - raise ValidationException('You don\'t have a user', slug='user-not-found', code=403) + raise ValidationException("You don't have a user", slug="user-not-found", code=403) settings, created = UserSetting.objects.get_or_create(user_id=request.user.id) - serializer = UserSettingsSerializer(settings, data=request.data, context={'request': request}) + serializer = UserSettingsSerializer(settings, data=request.data, context={"request": request}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -868,7 +904,7 @@ def put(self, request): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) def get_users(request): query = User.objects.all() @@ -878,24 +914,24 @@ def find_user_by_name(query_name, qs): qs = qs.filter(Q(first_name__icontains=term) | Q(last_name__icontains=term)) return qs - name = request.GET.get('name', None) + name = request.GET.get("name", None) if name is not None: query = find_user_by_name(name, query) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: - if '@' in like: + if "@" in like: query = query.filter(Q(email__icontains=like)) else: query = find_user_by_name(like, query) - query = query.exclude(email__contains='@token.com') - query = query.order_by('-date_joined') + query = query.exclude(email__contains="@token.com") + query = query.order_by("-date_joined") users = UserSmallSerializer(query, many=True) return Response(users.data) -@api_view(['GET']) +@api_view(["GET"]) def get_user_by_id_or_email(request, id_or_email): query = None @@ -905,20 +941,20 @@ def get_user_by_id_or_email(request, id_or_email): query = User.objects.filter(email=id_or_email).first() if query is None: - raise ValidationException('User with that id or email does not exists', slug='user-dont-exists', code=404) + raise ValidationException("User with that id or email does not exists", slug="user-dont-exists", code=404) users = UserSmallSerializer(query, many=False) return Response(users.data) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_roles(request, role_slug=None): if role_slug is not None: role = Role.objects.filter(slug=role_slug).first() if role is None: - raise ValidationException('Role not found', code=404) + raise ValidationException("Role not found", code=404) serializer = RoleBigSerializer(role) return Response(serializer.data) @@ -931,36 +967,36 @@ def get_roles(request, role_slug=None): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_github_token(request, token=None): - url = request.query_params.get('url', None) + url = request.query_params.get("url", None) if url == None: - raise ValidationException('No callback URL specified', slug='no-callback-url') + raise ValidationException("No callback URL specified", slug="no-callback-url") if token is not None: if Token.get_valid(token) is None: - raise ValidationException('Invalid or missing token', slug='invalid-token') + raise ValidationException("Invalid or missing token", slug="invalid-token") else: - url = url + f'&user={token}' + url = url + f"&user={token}" - scope = request.query_params.get('scope', 'user repo read:org admin:org') + scope = request.query_params.get("scope", "user repo read:org admin:org") try: - scope = base64.b64decode(scope.encode('utf-8')).decode('utf-8') + scope = base64.b64decode(scope.encode("utf-8")).decode("utf-8") except Exception: pass params = { - 'client_id': os.getenv('GITHUB_CLIENT_ID', ''), - 'redirect_uri': os.getenv('GITHUB_REDIRECT_URL', '') + f'?url={url}', - 'scope': scope, + "client_id": os.getenv("GITHUB_CLIENT_ID", ""), + "redirect_uri": os.getenv("GITHUB_REDIRECT_URL", "") + f"?url={url}", + "scope": scope, } - logger.debug('Redirecting to github') + logger.debug("Redirecting to github") logger.debug(params) - redirect = f'https://github.com/login/oauth/authorize?{urlencode(params)}' + redirect = f"https://github.com/login/oauth/authorize?{urlencode(params)}" if settings.DEBUG: return HttpResponse(f"Redirect to: <a href='{redirect}'>{redirect}</a>") @@ -971,82 +1007,84 @@ def get_github_token(request, token=None): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def save_github_token(request): - logger.debug('Github callback just landed') + logger.debug("Github callback just landed") logger.debug(request.query_params) - error = request.query_params.get('error', False) - error_description = request.query_params.get('error_description', '') + error = request.query_params.get("error", False) + error_description = request.query_params.get("error_description", "") if error: - raise APIException('Github: ' + error_description) + raise APIException("Github: " + error_description) - url = request.query_params.get('url', None) + url = request.query_params.get("url", None) if url == None: - raise ValidationException('No callback URL specified', slug='no-callback-url') + raise ValidationException("No callback URL specified", slug="no-callback-url") # the url may or may not be encoded try: - url = base64.b64decode(url.encode('utf-8')).decode('utf-8') + url = base64.b64decode(url.encode("utf-8")).decode("utf-8") except Exception: pass - code = request.query_params.get('code', None) + code = request.query_params.get("code", None) if code == None: - raise ValidationException('No github code specified', slug='no-code') + raise ValidationException("No github code specified", slug="no-code") - token = request.query_params.get('user', None) + token = request.query_params.get("user", None) payload = { - 'client_id': os.getenv('GITHUB_CLIENT_ID', ''), - 'client_secret': os.getenv('GITHUB_SECRET', ''), - 'redirect_uri': os.getenv('GITHUB_REDIRECT_URL', ''), - 'code': code, + "client_id": os.getenv("GITHUB_CLIENT_ID", ""), + "client_secret": os.getenv("GITHUB_SECRET", ""), + "redirect_uri": os.getenv("GITHUB_REDIRECT_URL", ""), + "code": code, } - headers = {'Accept': 'application/json'} - resp = requests.post('https://github.com/login/oauth/access_token', data=payload, headers=headers, timeout=2) + headers = {"Accept": "application/json"} + resp = requests.post("https://github.com/login/oauth/access_token", data=payload, headers=headers, timeout=2) if resp.status_code == 200: - logger.debug('Github responded with 200') + logger.debug("Github responded with 200") body = resp.json() - if 'access_token' not in body: - raise APIException(body['error_description']) + if "access_token" not in body: + raise APIException(body["error_description"]) - github_token = body['access_token'] - resp = requests.get('https://api.github.com/user', - headers={'Authorization': 'token ' + github_token}, - timeout=2) + github_token = body["access_token"] + resp = requests.get( + "https://api.github.com/user", headers={"Authorization": "token " + github_token}, timeout=2 + ) if resp.status_code == 200: github_user = resp.json() logger.debug(github_user) - if github_user['email'] is None: - resp = requests.get('https://api.github.com/user/emails', - headers={'Authorization': 'token ' + github_token}, - timeout=2) + if github_user["email"] is None: + resp = requests.get( + "https://api.github.com/user/emails", headers={"Authorization": "token " + github_token}, timeout=2 + ) if resp.status_code == 200: emails = resp.json() - primary_emails = [x for x in emails if x['primary'] == True] + primary_emails = [x for x in emails if x["primary"] == True] if len(primary_emails) > 0: - github_user['email'] = primary_emails[0]['email'] + github_user["email"] = primary_emails[0]["email"] elif len(emails) > 0: - github_user['email'] = emails[0]['email'] + github_user["email"] = emails[0]["email"] - if github_user['email'] is None: - raise ValidationError('Impossible to retrieve user email') + if github_user["email"] is None: + raise ValidationError("Impossible to retrieve user email") user = None # assuming by default that its a new user # is a valid token??? if not valid it will become None - if token is not None and token != '': + if token is not None and token != "": token = Token.get_valid(token) if not token: - logger.debug('Token not found or is expired') - raise ValidationException('Token was not found or is expired, please use a different token', - code=404, - slug='token-not-found') + logger.debug("Token not found or is expired") + raise ValidationException( + "Token was not found or is expired, please use a different token", + code=404, + slug="token-not-found", + ) user = User.objects.filter(auth_token=token.id).first() else: # for the token to become null for easier management @@ -1054,36 +1092,40 @@ def save_github_token(request): # user can't be found thru token, lets try thru the github credentials if token is None and user is None: - user = User.objects.filter(credentialsgithub__github_id=github_user['id']).first() + user = User.objects.filter(credentialsgithub__github_id=github_user["id"]).first() if user is None: - user = User.objects.filter(email__iexact=github_user['email'], - credentialsgithub__isnull=True).first() + user = User.objects.filter( + email__iexact=github_user["email"], credentialsgithub__isnull=True + ).first() user_does_not_exists = user is None if user_does_not_exists: - invite = UserInvite.objects.filter(status='WAITING_LIST', email=github_user['email']).first() + invite = UserInvite.objects.filter(status="WAITING_LIST", email=github_user["email"]).first() if user_does_not_exists and invite: - if url is None or url == '': + if url is None or url == "": url = get_app_url() return render_message( - request, f'You are still number {invite.id} on the waiting list, we will email you once you are ' + request, + f"You are still number {invite.id} on the waiting list, we will email you once you are " f'given access <a href="{url}">Back to 4Geeks.com</a>', - academy=invite.academy) + academy=invite.academy, + ) if user_does_not_exists: academy = None if invite: academy = invite.academy - return render_message(request, - 'We could not find in our records the email associated to this github account, ' - 'perhaps you want to signup to the platform first? <a href="' + url + - '">Back to 4Geeks.com</a>', - academy=academy) + return render_message( + request, + "We could not find in our records the email associated to this github account, " + 'perhaps you want to signup to the platform first? <a href="' + url + '">Back to 4Geeks.com</a>', + academy=academy, + ) - github_credentials = CredentialsGithub.objects.filter(github_id=github_user['id']).first() + github_credentials = CredentialsGithub.objects.filter(github_id=github_user["id"]).first() # update latest credentials if the user.id doesn't match if github_credentials and github_credentials.user.id != user.id: @@ -1092,17 +1134,17 @@ def save_github_token(request): # create a new credentials if it doesn't exists if github_credentials is None: - github_credentials = CredentialsGithub(github_id=github_user['id'], user=user) + github_credentials = CredentialsGithub(github_id=github_user["id"], user=user) github_credentials.token = github_token - github_credentials.username = github_user['login'] - github_credentials.email = github_user['email'].lower() - github_credentials.avatar_url = github_user['avatar_url'] - github_credentials.name = github_user['name'] - github_credentials.blog = github_user['blog'] - github_credentials.bio = github_user['bio'] - github_credentials.company = github_user['company'] - github_credentials.twitter_username = github_user['twitter_username'] + github_credentials.username = github_user["login"] + github_credentials.email = github_user["email"].lower() + github_credentials.avatar_url = github_user["avatar_url"] + github_credentials.name = github_user["name"] + github_credentials.blog = github_user["blog"] + github_credentials.bio = github_user["bio"] + github_credentials.company = github_user["company"] + github_credentials.twitter_username = github_user["twitter_username"] github_credentials.save() # IMPORTANT! The GithubAcademyUser.username is used for billing purposes on the provisioning activity, we have @@ -1111,92 +1153,111 @@ def save_github_token(request): profile = Profile.objects.filter(user=user).first() if profile is None: - profile = Profile(user=user, - avatar_url=github_user['avatar_url'], - blog=github_user['blog'], - bio=github_user['bio'], - twitter_username=github_user['twitter_username']) + profile = Profile( + user=user, + avatar_url=github_user["avatar_url"], + blog=github_user["blog"], + bio=github_user["bio"], + twitter_username=github_user["twitter_username"], + ) profile.save() if not profile.avatar_url: - profile.avatar_url = github_user['avatar_url'] + profile.avatar_url = github_user["avatar_url"] profile.save() - student_role = Role.objects.get(slug='student') - cus = CohortUser.objects.filter(user=user, role='STUDENT') + student_role = Role.objects.get(slug="student") + cus = CohortUser.objects.filter(user=user, role="STUDENT") for cu in cus: profile_academy = ProfileAcademy.objects.filter(user=cu.user, academy=cu.cohort.academy).first() if profile_academy is None: - profile_academy = ProfileAcademy(user=cu.user, - academy=cu.cohort.academy, - role=student_role, - email=cu.user.email, - first_name=cu.user.first_name, - last_name=cu.user.last_name, - status='ACTIVE') + profile_academy = ProfileAcademy( + user=cu.user, + academy=cu.cohort.academy, + role=student_role, + email=cu.user.email, + first_name=cu.user.first_name, + last_name=cu.user.last_name, + status="ACTIVE", + ) profile_academy.save() if not token: - token, created = Token.get_or_create(user=user, token_type='login') + token, created = Token.get_or_create(user=user, token_type="login") - #register user in rigobot - rigobot_payload = {'organization': '4geeks', 'user_token': token.key} - headers = {'Content-Type': 'application/json'} - rigobot_resp = requests.post('https://rigobot.herokuapp.com/v1/auth/invite', - timeout=2, - headers=headers, - json=rigobot_payload) + # register user in rigobot + rigobot_payload = {"organization": "4geeks", "user_token": token.key} + headers = {"Content-Type": "application/json"} + rigobot_resp = requests.post( + "https://rigobot.herokuapp.com/v1/auth/invite", timeout=2, headers=headers, json=rigobot_payload + ) if rigobot_resp.status_code == 200: - logger.debug('User registered on rigobot') + logger.debug("User registered on rigobot") else: - logger.error('Failed user registration on rigobot') + logger.error("Failed user registration on rigobot") - return HttpResponseRedirect(redirect_to=url + '?token=' + token.key) + return HttpResponseRedirect(redirect_to=url + "?token=" + token.key) else: - raise APIException('Error from github') + raise APIException("Error from github") # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_slack_token(request): """Generate stack redirect url for authorize.""" - url = request.query_params.get('url', None) + url = request.query_params.get("url", None) if url is None: - raise ValidationError('No callback URL specified') + raise ValidationError("No callback URL specified") # the url may or may not be encoded try: - url = base64.b64decode(url.encode('utf-8')).decode('utf-8') + url = base64.b64decode(url.encode("utf-8")).decode("utf-8") except Exception: pass - user_id = request.query_params.get('user', None) + user_id = request.query_params.get("user", None) if user_id is None: - raise ValidationError('No user specified on the URL') + raise ValidationError("No user specified on the URL") - academy = request.query_params.get('a', None) + academy = request.query_params.get("a", None) if academy is None: - raise ValidationError('No academy specified on the URL') + raise ValidationError("No academy specified on the URL") # Missing scopes!! admin.invites:write, identify - scopes = ('app_mentions:read', 'channels:history', 'channels:join', 'channels:read', 'chat:write', - 'chat:write.customize', 'commands', 'files:read', 'files:write', 'groups:history', 'groups:read', - 'groups:write', 'incoming-webhook', 'team:read', 'users:read', 'users:read.email', 'users.profile:read', - 'users:read') + scopes = ( + "app_mentions:read", + "channels:history", + "channels:join", + "channels:read", + "chat:write", + "chat:write.customize", + "commands", + "files:read", + "files:write", + "groups:history", + "groups:read", + "groups:write", + "incoming-webhook", + "team:read", + "users:read", + "users:read.email", + "users.profile:read", + "users:read", + ) - query_string = f'a={academy}&url={url}&user={user_id}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') + query_string = f"a={academy}&url={url}&user={user_id}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") params = { - 'client_id': os.getenv('SLACK_CLIENT_ID', ''), - 'redirect_uri': os.getenv('SLACK_REDIRECT_URL', '') + '?payload=' + payload, - 'scope': ','.join(scopes) + "client_id": os.getenv("SLACK_CLIENT_ID", ""), + "redirect_uri": os.getenv("SLACK_REDIRECT_URL", "") + "?payload=" + payload, + "scope": ",".join(scopes), } - redirect = 'https://slack.com/oauth/v2/authorize?' + redirect = "https://slack.com/oauth/v2/authorize?" for key in params: - redirect += f'{key}={params[key]}&' + redirect += f"{key}={params[key]}&" if settings.DEBUG: return HttpResponse(f"Redirect to: <a href='{redirect}'>{redirect}</a>") @@ -1207,140 +1268,141 @@ def get_slack_token(request): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def save_slack_token(request): """Get Slack token and redirect to authorization route.""" - logger.debug('Slack callback just landed') + logger.debug("Slack callback just landed") - error = request.query_params.get('error', False) - error_description = request.query_params.get('error_description', '') + error = request.query_params.get("error", False) + error_description = request.query_params.get("error_description", "") if error: - raise APIException('Slack: ' + error_description) + raise APIException("Slack: " + error_description) - original_payload = request.query_params.get('payload', None) - payload = request.query_params.get('payload', None) + original_payload = request.query_params.get("payload", None) + payload = request.query_params.get("payload", None) if payload is None: - raise ValidationError('No payload specified') + raise ValidationError("No payload specified") else: try: - payload = base64.b64decode(payload).decode('utf-8') + payload = base64.b64decode(payload).decode("utf-8") payload = parse_qs(payload) except Exception: - raise ValidationError('Cannot decode payload in base64') + raise ValidationError("Cannot decode payload in base64") - if 'url' not in payload: + if "url" not in payload: logger.exception(payload) - raise ValidationError('No url specified from the slack payload') + raise ValidationError("No url specified from the slack payload") - if 'user' not in payload: + if "user" not in payload: logger.exception(payload) - raise ValidationError('No user id specified from the slack payload') + raise ValidationError("No user id specified from the slack payload") - if 'a' not in payload: + if "a" not in payload: logger.exception(payload) - raise ValidationError('No academy id specified from the slack payload') + raise ValidationError("No academy id specified from the slack payload") try: - academy = Academy.objects.get(id=payload['a'][0]) + academy = Academy.objects.get(id=payload["a"][0]) except Exception as e: - raise ValidationError('Not exist academy with that id') from e + raise ValidationError("Not exist academy with that id") from e user = None try: - user = User.objects.get(id=payload['user'][0]) + user = User.objects.get(id=payload["user"][0]) except Exception as e: - raise ValidationError('Not exist user with that id') from e + raise ValidationError("Not exist user with that id") from e - code = request.query_params.get('code', None) + code = request.query_params.get("code", None) if code is None: - raise ValidationError('No slack code specified') + raise ValidationError("No slack code specified") params = { - 'client_id': os.getenv('SLACK_CLIENT_ID', ''), - 'client_secret': os.getenv('SLACK_SECRET', ''), - 'redirect_uri': os.getenv('SLACK_REDIRECT_URL', '') + '?payload=' + original_payload, - 'code': code, + "client_id": os.getenv("SLACK_CLIENT_ID", ""), + "client_secret": os.getenv("SLACK_SECRET", ""), + "redirect_uri": os.getenv("SLACK_REDIRECT_URL", "") + "?payload=" + original_payload, + "code": code, } - resp = requests.post('https://slack.com/api/oauth.v2.access', data=params, timeout=2) + resp = requests.post("https://slack.com/api/oauth.v2.access", data=params, timeout=2) if resp.status_code == 200: - logger.debug('Slack responded with 200') + logger.debug("Slack responded with 200") slack_data = resp.json() - if 'access_token' not in slack_data: - raise APIException('Slack error status: ' + slack_data['error']) + if "access_token" not in slack_data: + raise APIException("Slack error status: " + slack_data["error"]) slack_data = resp.json() logger.debug(slack_data) # delete all previous credentials for the same team and cohort - CredentialsSlack.objects.filter(app_id=slack_data['app_id'], team_id=slack_data['team']['id'], - user__id=user.id).delete() + CredentialsSlack.objects.filter( + app_id=slack_data["app_id"], team_id=slack_data["team"]["id"], user__id=user.id + ).delete() credentials = CredentialsSlack( user=user, - app_id=slack_data['app_id'], - bot_user_id=slack_data['bot_user_id'], - token=slack_data['access_token'], - team_id=slack_data['team']['id'], - team_name=slack_data['team']['name'], - authed_user=slack_data['authed_user']['id'], + app_id=slack_data["app_id"], + bot_user_id=slack_data["bot_user_id"], + token=slack_data["access_token"], + team_id=slack_data["team"]["id"], + team_name=slack_data["team"]["name"], + authed_user=slack_data["authed_user"]["id"], ) credentials.save() - team = SlackTeam.objects.filter(academy__id=academy.id, slack_id=slack_data['team']['id']).first() + team = SlackTeam.objects.filter(academy__id=academy.id, slack_id=slack_data["team"]["id"]).first() if team is None: - team = SlackTeam(slack_id=slack_data['team']['id'], owner=user, academy=academy) + team = SlackTeam(slack_id=slack_data["team"]["id"], owner=user, academy=academy) - team.name = slack_data['team']['name'] + team.name = slack_data["team"]["name"] team.save() - return HttpResponseRedirect(redirect_to=payload['url'][0]) + return HttpResponseRedirect(redirect_to=payload["url"][0]) # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_facebook_token(request): """Generate stack redirect url for authorize.""" - url = request.query_params.get('url', None) + url = request.query_params.get("url", None) if url is None: - raise ValidationError('No callback URL specified') + raise ValidationError("No callback URL specified") # the url may or may not be encoded try: - url = base64.b64decode(url.encode('utf-8')).decode('utf-8') + url = base64.b64decode(url.encode("utf-8")).decode("utf-8") except Exception: pass - user_id = request.query_params.get('user', None) + user_id = request.query_params.get("user", None) if user_id is None: - raise ValidationError('No user specified on the URL') + raise ValidationError("No user specified on the URL") - academy = request.query_params.get('a', None) + academy = request.query_params.get("a", None) if academy is None: - raise ValidationError('No academy specified on the URL') + raise ValidationError("No academy specified on the URL") # Missing scopes!! admin.invites:write, identify scopes = ( - 'email', - 'ads_read', - 'business_management', - 'leads_retrieval', - 'pages_manage_metadata', - 'pages_read_engagement', + "email", + "ads_read", + "business_management", + "leads_retrieval", + "pages_manage_metadata", + "pages_read_engagement", ) - query_string = f'a={academy}&url={url}&user={user_id}'.encode('utf-8') - payload = str(base64.urlsafe_b64encode(query_string), 'utf-8') + query_string = f"a={academy}&url={url}&user={user_id}".encode("utf-8") + payload = str(base64.urlsafe_b64encode(query_string), "utf-8") params = { - 'client_id': os.getenv('FACEBOOK_CLIENT_ID', ''), - 'redirect_uri': os.getenv('FACEBOOK_REDIRECT_URL', ''), - 'scope': ','.join(scopes), - 'state': payload + "client_id": os.getenv("FACEBOOK_CLIENT_ID", ""), + "redirect_uri": os.getenv("FACEBOOK_REDIRECT_URL", ""), + "scope": ",".join(scopes), + "state": payload, } - redirect = 'https://www.facebook.com/v8.0/dialog/oauth?' + redirect = "https://www.facebook.com/v8.0/dialog/oauth?" for key in params: - redirect += f'{key}={params[key]}&' + redirect += f"{key}={params[key]}&" if settings.DEBUG: return HttpResponse(f"Redirect to: <a href='{redirect}'>{redirect}</a>") @@ -1349,189 +1411,190 @@ def get_facebook_token(request): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def save_facebook_token(request): """Save facebook token.""" - logger.debug('Facebook callback just landed') - error = request.query_params.get('error_code', False) - error_description = request.query_params.get('error_message', '') + logger.debug("Facebook callback just landed") + error = request.query_params.get("error_code", False) + error_description = request.query_params.get("error_message", "") if error: - raise APIException('Facebook: ' + error_description) + raise APIException("Facebook: " + error_description) - payload = request.query_params.get('state', None) + payload = request.query_params.get("state", None) if payload is None: - raise ValidationError('No payload specified') + raise ValidationError("No payload specified") else: try: - payload = base64.b64decode(payload).decode('utf-8') + payload = base64.b64decode(payload).decode("utf-8") payload = parse_qs(payload) except Exception: - raise ValidationError('Cannot decode payload in base64') + raise ValidationError("Cannot decode payload in base64") - if 'url' not in payload: + if "url" not in payload: logger.exception(payload) - raise ValidationError('No url specified from the slack payload') + raise ValidationError("No url specified from the slack payload") - if 'user' not in payload: + if "user" not in payload: logger.exception(payload) - raise ValidationError('No user id specified from the slack payload') + raise ValidationError("No user id specified from the slack payload") - if 'a' not in payload: + if "a" not in payload: logger.exception(payload) - raise ValidationError('No academy id specified from the slack payload') + raise ValidationError("No academy id specified from the slack payload") try: - academy = Academy.objects.get(id=payload['a'][0]) + academy = Academy.objects.get(id=payload["a"][0]) except Exception as e: - raise ValidationError('Not exist academy with that id') from e + raise ValidationError("Not exist academy with that id") from e try: - user = User.objects.get(id=payload['user'][0]) + user = User.objects.get(id=payload["user"][0]) except Exception as e: - raise ValidationError('Not exist user with that id') from e + raise ValidationError("Not exist user with that id") from e # token = request.query_params.get('token', None) # if token == None: # raise ValidationError("No facebook token specified") - code = request.query_params.get('code', None) + code = request.query_params.get("code", None) if code is None: - raise ValidationError('No slack code specified') + raise ValidationError("No slack code specified") params = { - 'client_id': os.getenv('FACEBOOK_CLIENT_ID', ''), - 'client_secret': os.getenv('FACEBOOK_SECRET', ''), - 'redirect_uri': os.getenv('FACEBOOK_REDIRECT_URL', ''), - 'code': code, + "client_id": os.getenv("FACEBOOK_CLIENT_ID", ""), + "client_secret": os.getenv("FACEBOOK_SECRET", ""), + "redirect_uri": os.getenv("FACEBOOK_REDIRECT_URL", ""), + "code": code, } - resp = requests.post('https://graph.facebook.com/v8.0/oauth/access_token', data=params, timeout=2) + resp = requests.post("https://graph.facebook.com/v8.0/oauth/access_token", data=params, timeout=2) if resp.status_code == 200: - logger.debug('Facebook responded with 200') + logger.debug("Facebook responded with 200") facebook_data = resp.json() - if 'access_token' not in facebook_data: - logger.debug('Facebook response body') + if "access_token" not in facebook_data: + logger.debug("Facebook response body") logger.debug(facebook_data) - raise APIException('Facebook error status: ' + facebook_data['error_message']) + raise APIException("Facebook error status: " + facebook_data["error_message"]) # delete all previous credentials for the same team CredentialsFacebook.objects.filter(user_id=user.id).delete() utc_now = timezone.now() - expires_at = utc_now + \ - timezone.timedelta(milliseconds=facebook_data['expires_in']) + expires_at = utc_now + timezone.timedelta(milliseconds=facebook_data["expires_in"]) credentials = CredentialsFacebook( user=user, academy=academy, expires_at=expires_at, - token=facebook_data['access_token'], + token=facebook_data["access_token"], ) credentials.save() params = { - 'access_token': facebook_data['access_token'], - 'fields': 'id,email', + "access_token": facebook_data["access_token"], + "fields": "id,email", } - resp = requests.post('https://graph.facebook.com/me', data=params, timeout=2) + resp = requests.post("https://graph.facebook.com/me", data=params, timeout=2) if resp.status_code == 200: - logger.debug('Facebook responded with 200') + logger.debug("Facebook responded with 200") facebook_data = resp.json() - if 'email' in facebook_data: - credentials.email = facebook_data['email'] - if 'id' in facebook_data: - credentials.facebook_id = facebook_data['id'] + if "email" in facebook_data: + credentials.email = facebook_data["email"] + if "id" in facebook_data: + credentials.facebook_id = facebook_data["id"] credentials.save() - return HttpResponseRedirect(redirect_to=payload['url'][0]) + return HttpResponseRedirect(redirect_to=payload["url"][0]) def change_password(request, token): - if request.method == 'POST': + if request.method == "POST": form = PasswordChangeCustomForm(request.user, request.POST) if form.is_valid(): user = form.save() update_session_auth_hash(request, user) # Important! - messages.success(request, 'Your password was successfully updated!') - return redirect('change_password') + messages.success(request, "Your password was successfully updated!") + return redirect("change_password") else: - messages.error(request, 'Please correct the error below.') + messages.error(request, "Please correct the error below.") else: form = PasswordChangeCustomForm(request.user) - return render(request, 'form.html', {'form': form}) + return render(request, "form.html", {"form": form}) class TokenTemporalView(APIView): - @capable_of('generate_temporal_token') + @capable_of("generate_temporal_token") def post(self, request, profile_academy_id=None, academy_id=None): profile_academy = ProfileAcademy.objects.filter(id=profile_academy_id).first() if profile_academy is None: - raise ValidationException('Member not found', code=404, slug='member-not-found') + raise ValidationException("Member not found", code=404, slug="member-not-found") - token, created = Token.get_or_create(user=profile_academy.user, token_type='temporal') + token, created = Token.get_or_create(user=profile_academy.user, token_type="temporal") serializer = TokenSmallSerializer(token) return Response(serializer.data) def sync_gitpod_users_view(request): - if request.method == 'POST': + if request.method == "POST": _dict = request.POST.copy() form = SyncGithubUsersForm(_dict) - if 'html' not in _dict or _dict['html'] == '': - messages.error(request, 'HTML string is required') - return render(request, 'form.html', {'form': form}) + if "html" not in _dict or _dict["html"] == "": + messages.error(request, "HTML string is required") + return render(request, "form.html", {"form": form}) try: - all_usernames = update_gitpod_users(_dict['html']) + all_usernames = update_gitpod_users(_dict["html"]) return render( - request, 'message.html', { - 'MESSAGE': - f'{len(all_usernames["active"])} active and {len(all_usernames["inactive"])} inactive users found' - }) + request, + "message.html", + { + "MESSAGE": f'{len(all_usernames["active"])} active and {len(all_usernames["inactive"])} inactive users found' + }, + ) except Exception as e: return render_message(request, str(e)) else: form = SyncGithubUsersForm() - return render(request, 'form.html', {'form': form}) + return render(request, "form.html", {"form": form}) def reset_password_view(request): - if request.method == 'POST': + if request.method == "POST": _dict = request.POST.copy() form = PickPasswordForm(_dict) - if 'email' not in _dict or _dict['email'] == '': - messages.error(request, 'Email is required') - return render(request, 'form.html', {'form': form}) + if "email" not in _dict or _dict["email"] == "": + messages.error(request, "Email is required") + return render(request, "form.html", {"form": form}) - users = User.objects.filter(email__iexact=_dict['email']) - if (users.count() > 0): + users = User.objects.filter(email__iexact=_dict["email"]) + if users.count() > 0: reset_password(users) else: - logger.debug('No users with ' + _dict['email'] + ' email to reset password') + logger.debug("No users with " + _dict["email"] + " email to reset password") - if 'callback' in _dict and _dict['callback'] != '': - return HttpResponseRedirect(redirect_to=_dict['callback'] + '?msg=Check your email for a password reset!') + if "callback" in _dict and _dict["callback"] != "": + return HttpResponseRedirect(redirect_to=_dict["callback"] + "?msg=Check your email for a password reset!") else: - return render(request, 'message.html', {'MESSAGE': 'Check your email for a password reset!'}) + return render(request, "message.html", {"MESSAGE": "Check your email for a password reset!"}) else: _dict = request.GET.copy() - _dict['callback'] = request.GET.get('callback', '') + _dict["callback"] = request.GET.get("callback", "") form = ResetPasswordForm(_dict) - return render(request, 'form.html', {'form': form}) + return render(request, "form.html", {"form": form}) def pick_password(request, token): _dict = request.POST.copy() - _dict['token'] = token - _dict['callback'] = request.GET.get('callback', '') + _dict["token"] = token + _dict["callback"] = request.GET.get("callback", "") token_instance = Token.get_valid(token) invite = None @@ -1545,63 +1608,65 @@ def pick_password(request, token): invite = UserInvite.objects.filter(token=token).first() # just can process if this user not have a password yet - user = User.objects.filter(email=invite.email, password='').first() if invite else None + user = User.objects.filter(email=invite.email, password="").first() if invite else None if not user: academy = None if invite: academy = invite.academy - return render_message(request, 'The link has expired.', academy=academy) + return render_message(request, "The link has expired.", academy=academy) form = PickPasswordForm(_dict) - if request.method == 'POST': - password1 = request.POST.get('password1', None) - password2 = request.POST.get('password2', None) + if request.method == "POST": + password1 = request.POST.get("password1", None) + password2 = request.POST.get("password2", None) if password1 != password2: obj = {} if invite and invite.academy: - obj['COMPANY_INFO_EMAIL'] = invite.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = invite.academy.legal_name or invite.academy.name - obj['COMPANY_LOGO'] = invite.academy.logo_url - obj['COMPANY_NAME'] = invite.academy.name + obj["COMPANY_INFO_EMAIL"] = invite.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = invite.academy.legal_name or invite.academy.name + obj["COMPANY_LOGO"] = invite.academy.logo_url + obj["COMPANY_NAME"] = invite.academy.name - if 'heading' not in obj: - obj['heading'] = invite.academy.name + if "heading" not in obj: + obj["heading"] = invite.academy.name - messages.error(request, 'Passwords don\'t match') - return render(request, 'form.html', {'form': form, **obj}) + messages.error(request, "Passwords don't match") + return render(request, "form.html", {"form": form, **obj}) if not password1: obj = {} if invite and invite.academy: - obj['COMPANY_INFO_EMAIL'] = invite.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = invite.academy.legal_name or invite.academy.name - obj['COMPANY_LOGO'] = invite.academy.logo_url - obj['COMPANY_NAME'] = invite.academy.name + obj["COMPANY_INFO_EMAIL"] = invite.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = invite.academy.legal_name or invite.academy.name + obj["COMPANY_LOGO"] = invite.academy.logo_url + obj["COMPANY_NAME"] = invite.academy.name - if 'heading' not in obj: - obj['heading'] = invite.academy.name + if "heading" not in obj: + obj["heading"] = invite.academy.name messages.error(request, "Password can't be empty") - return render(request, 'form.html', {'form': form, **obj}) - - if (len(password1) < 8 or not re.findall(PATTERNS['CONTAINS_LOWERCASE'], password1) - or not re.findall(PATTERNS['CONTAINS_UPPERCASE'], password1) - or not re.findall(PATTERNS['CONTAINS_SYMBOLS'], password1)): + return render(request, "form.html", {"form": form, **obj}) + + if ( + len(password1) < 8 + or not re.findall(PATTERNS["CONTAINS_LOWERCASE"], password1) + or not re.findall(PATTERNS["CONTAINS_UPPERCASE"], password1) + or not re.findall(PATTERNS["CONTAINS_SYMBOLS"], password1) + ): obj = {} if invite and invite.academy: - obj['COMPANY_INFO_EMAIL'] = invite.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = invite.academy.legal_name or invite.academy.name - obj['COMPANY_LOGO'] = invite.academy.logo_url - obj['COMPANY_NAME'] = invite.academy.name + obj["COMPANY_INFO_EMAIL"] = invite.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = invite.academy.legal_name or invite.academy.name + obj["COMPANY_LOGO"] = invite.academy.logo_url + obj["COMPANY_NAME"] = invite.academy.name - if 'heading' not in obj: - obj['heading'] = invite.academy.name + if "heading" not in obj: + obj["heading"] = invite.academy.name - messages.error(request, 'Password must contain 8 characters with lowercase, uppercase and ' - 'symbols') - return render(request, 'form.html', {'form': form, **obj}) + messages.error(request, "Password must contain 8 characters with lowercase, uppercase and " "symbols") + return render(request, "form.html", {"form": form, **obj}) else: user.set_password(password1) @@ -1613,153 +1678,169 @@ def pick_password(request, token): UserInvite.objects.filter(email=user.email, is_email_validated=False).update(is_email_validated=True) - callback = request.POST.get('callback', None) - if callback is not None and callback != '': - return HttpResponseRedirect(redirect_to=request.POST.get('callback')) + callback = request.POST.get("callback", None) + if callback is not None and callback != "": + return HttpResponseRedirect(redirect_to=request.POST.get("callback")) else: obj = {} if invite and invite.academy: - obj['COMPANY_INFO_EMAIL'] = invite.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = invite.academy.legal_name or invite.academy.name - obj['COMPANY_LOGO'] = invite.academy.logo_url - obj['COMPANY_NAME'] = invite.academy.name + obj["COMPANY_INFO_EMAIL"] = invite.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = invite.academy.legal_name or invite.academy.name + obj["COMPANY_LOGO"] = invite.academy.logo_url + obj["COMPANY_NAME"] = invite.academy.name - if 'heading' not in obj: - obj['heading'] = invite.academy.name + if "heading" not in obj: + obj["heading"] = invite.academy.name return render( - request, 'message.html', { - 'MESSAGE': 'You password has been successfully set.', - 'BUTTON': 'Continue to sign in', - 'BUTTON_TARGET': '_self', - 'LINK': os.getenv('APP_URL', 'https://4geeks.com') + '/login', - **obj - }) + request, + "message.html", + { + "MESSAGE": "You password has been successfully set.", + "BUTTON": "Continue to sign in", + "BUTTON_TARGET": "_self", + "LINK": os.getenv("APP_URL", "https://4geeks.com") + "/login", + **obj, + }, + ) - return render(request, 'form.html', {'form': form}) + return render(request, "form.html", {"form": form}) class PasswordResetView(APIView): - @capable_of('send_reset_password') + @capable_of("send_reset_password") def post(self, request, profileacademy_id=None, academy_id=None): profile_academy = ProfileAcademy.objects.filter(id=profileacademy_id).first() if profile_academy is None: - raise ValidationException('Member not found', 400) + raise ValidationException("Member not found", 400) if reset_password([profile_academy.user], academy=profile_academy.academy): - token = Token.objects.filter(user=profile_academy.user, token_type='temporal').first() + token = Token.objects.filter(user=profile_academy.user, token_type="temporal").first() serializer = TokenSmallSerializer(token) return Response(serializer.data) else: - raise ValidationException('Reset password token could not be sent') + raise ValidationException("Reset password token could not be sent") class ProfileInviteMeView(APIView): def get(self, request): invites = UserInvite.objects.filter(email=request.user.email) - profile_academies = ProfileAcademy.objects.filter(user=request.user, status='INVITED') - mentor_profiles = MentorProfile.objects.filter(user=request.user, status='INVITED') - - return Response({ - 'invites': UserInviteSerializer(invites, many=True).data, - 'profile_academies': GetProfileAcademySerializer(profile_academies, many=True).data, - 'mentor_profiles': GETMentorSmallSerializer(mentor_profiles, many=True).data, - }) + profile_academies = ProfileAcademy.objects.filter(user=request.user, status="INVITED") + mentor_profiles = MentorProfile.objects.filter(user=request.user, status="INVITED") + + return Response( + { + "invites": UserInviteSerializer(invites, many=True).data, + "profile_academies": GetProfileAcademySerializer(profile_academies, many=True).data, + "mentor_profiles": GETMentorSmallSerializer(mentor_profiles, many=True).data, + } + ) @private_view() def render_user_invite(request, token): - accepting = request.GET.get('accepting', '') - rejecting = request.GET.get('rejecting', '') - if accepting.strip() != '': + accepting = request.GET.get("accepting", "") + rejecting = request.GET.get("rejecting", "") + if accepting.strip() != "": accept_invite(accepting, token.user) - if rejecting.strip() != '': - UserInvite.objects.filter(id__in=rejecting.split(','), email=token.user.email, - status='PENDING').update(status='REJECTED') + if rejecting.strip() != "": + UserInvite.objects.filter(id__in=rejecting.split(","), email=token.user.email, status="PENDING").update( + status="REJECTED" + ) - pending_invites = UserInvite.objects.filter(email=token.user.email, status='PENDING') + pending_invites = UserInvite.objects.filter(email=token.user.email, status="PENDING") if pending_invites.count() == 0: - return render_message(request, - 'You don\'t have any more pending invites', - btn_label='Continue to 4Geeks', - btn_url=get_app_url()) + return render_message( + request, "You don't have any more pending invites", btn_label="Continue to 4Geeks", btn_url=get_app_url() + ) - querystr = urllib.parse.urlencode({'callback': get_app_url(), 'token': token.key}) - url = os.getenv('API_URL') + '/v1/auth/member/invite?' + querystr + querystr = urllib.parse.urlencode({"callback": get_app_url(), "token": token.key}) + url = os.getenv("API_URL") + "/v1/auth/member/invite?" + querystr return render( - request, 'user_invite.html', { - 'subject': 'Invitation to study at 4Geeks.com', - 'invites': UserInviteSmallSerializer(pending_invites, many=True).data, - 'LINK': url, - 'user': UserTinySerializer(token.user, many=False).data - }) + request, + "user_invite.html", + { + "subject": "Invitation to study at 4Geeks.com", + "invites": UserInviteSmallSerializer(pending_invites, many=True).data, + "LINK": url, + "user": UserTinySerializer(token.user, many=False).data, + }, + ) -@api_view(['GET', 'POST']) +@api_view(["GET", "POST"]) @permission_classes([AllowAny]) def render_invite(request, token, member_id=None): _dict = request.POST.copy() - _dict['token'] = token - _dict['callback'] = request.GET.get('callback', '') + _dict["token"] = token + _dict["callback"] = request.GET.get("callback", "") lang = get_user_language(request) invite = UserInvite.objects.filter(token=token).first() - if invite is None or invite.status != 'PENDING': - callback_msg = '' - if _dict['callback'] != '': - callback_msg = ". You can try and login at <a href='" + _dict['callback'] + "'>" + _dict['callback'] + '</a>' + if invite is None or invite.status != "PENDING": + callback_msg = "" + if _dict["callback"] != "": + callback_msg = ( + ". You can try and login at <a href='" + _dict["callback"] + "'>" + _dict["callback"] + "</a>" + ) academy = None if invite and invite.academy: academy = invite.academy - return render_message(request, - 'Invitation not found or it was already accepted' + callback_msg, - academy=academy) + return render_message( + request, "Invitation not found or it was already accepted" + callback_msg, academy=academy + ) - if request.method == 'GET' and request.META.get('CONTENT_TYPE') == 'application/json': + if request.method == "GET" and request.META.get("CONTENT_TYPE") == "application/json": serializer = UserInviteSerializer(invite, many=False) return Response(serializer.data) - if request.method == 'GET': + if request.method == "GET": if invite and User.objects.filter(email=invite.email).exists(): - redirect = os.getenv('API_URL') + '/v1/auth/member/invite' + redirect = os.getenv("API_URL") + "/v1/auth/member/invite" return HttpResponseRedirect(redirect_to=redirect) - form = InviteForm({ - 'callback': [''], - **_dict, - 'first_name': invite.first_name, - 'last_name': invite.last_name, - 'phone': invite.phone, - }) + form = InviteForm( + { + "callback": [""], + **_dict, + "first_name": invite.first_name, + "last_name": invite.last_name, + "phone": invite.phone, + } + ) obj = {} if invite and invite.academy: - obj['COMPANY_INFO_EMAIL'] = invite.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = invite.academy.legal_name or invite.academy.name - obj['COMPANY_LOGO'] = invite.academy.logo_url - obj['COMPANY_NAME'] = invite.academy.name - - if 'heading' not in obj: - obj['heading'] = invite.academy.name - - return render(request, 'form_invite.html', { - 'form': form, - **obj, - }) + obj["COMPANY_INFO_EMAIL"] = invite.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = invite.academy.legal_name or invite.academy.name + obj["COMPANY_LOGO"] = invite.academy.logo_url + obj["COMPANY_NAME"] = invite.academy.name + + if "heading" not in obj: + obj["heading"] = invite.academy.name + + return render( + request, + "form_invite.html", + { + "form": form, + **obj, + }, + ) - if request.method == 'POST' and request.META.get('CONTENT_TYPE') == 'application/json': + if request.method == "POST" and request.META.get("CONTENT_TYPE") == "application/json": _dict = request.data - _dict['token'] = token - _dict['callback'] = request.GET.get('callback', '') + _dict["token"] = token + _dict["callback"] = request.GET.get("callback", "") try: invite = accept_invite_action(_dict, token, lang) @@ -1769,7 +1850,7 @@ def render_invite(request, token, member_id=None): serializer = UserInviteShortSerializer(invite, many=False) return Response(serializer.data) - if request.method == 'POST': + if request.method == "POST": try: accept_invite_action(_dict, token, lang) except Exception as e: @@ -1778,23 +1859,27 @@ def render_invite(request, token, member_id=None): obj = {} if invite and invite.academy: - obj['COMPANY_INFO_EMAIL'] = invite.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = invite.academy.legal_name or invite.academy.name - obj['COMPANY_LOGO'] = invite.academy.logo_url - obj['COMPANY_NAME'] = invite.academy.name + obj["COMPANY_INFO_EMAIL"] = invite.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = invite.academy.legal_name or invite.academy.name + obj["COMPANY_LOGO"] = invite.academy.logo_url + obj["COMPANY_NAME"] = invite.academy.name - if 'heading' not in obj: - obj['heading'] = invite.academy.name + if "heading" not in obj: + obj["heading"] = invite.academy.name - return render(request, 'form_invite.html', { - 'form': form, - **obj, - }) + return render( + request, + "form_invite.html", + { + "form": form, + **obj, + }, + ) - callback = request.POST.get('callback', None) + callback = request.POST.get("callback", None) if callback: uri = callback[0] if isinstance(callback, list) else callback - if len(uri) > 0 and uri[0] == '[': + if len(uri) > 0 and uri[0] == "[": uri = uri[2:-2] if settings.DEBUG: return HttpResponse(f"Redirect to: <a href='{uri}'>{uri}</a>") @@ -1803,46 +1888,53 @@ def render_invite(request, token, member_id=None): else: obj = {} if invite and invite.academy: - obj['COMPANY_INFO_EMAIL'] = invite.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = invite.academy.legal_name or invite.academy.name - obj['COMPANY_LOGO'] = invite.academy.logo_url - obj['COMPANY_NAME'] = invite.academy.name + obj["COMPANY_INFO_EMAIL"] = invite.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = invite.academy.legal_name or invite.academy.name + obj["COMPANY_LOGO"] = invite.academy.logo_url + obj["COMPANY_NAME"] = invite.academy.name - if 'heading' not in obj: - obj['heading'] = invite.academy.name + if "heading" not in obj: + obj["heading"] = invite.academy.name - return render(request, 'message.html', { - 'MESSAGE': 'Welcome to 4Geeks, you can go ahead and log in', - **obj, - }) + return render( + request, + "message.html", + { + "MESSAGE": "Welcome to 4Geeks, you can go ahead and log in", + **obj, + }, + ) @private_view() def render_academy_invite(request, token): - accepting = request.GET.get('accepting', '') - rejecting = request.GET.get('rejecting', '') - if accepting.strip() != '': - ProfileAcademy.objects.filter(id__in=accepting.split(','), user__id=token.user.id, - status='INVITED').update(status='ACTIVE') - if rejecting.strip() != '': - ProfileAcademy.objects.filter(id__in=rejecting.split(','), user__id=token.user.id).delete() - - pending_invites = ProfileAcademy.objects.filter(user__id=token.user.id, status='INVITED') + accepting = request.GET.get("accepting", "") + rejecting = request.GET.get("rejecting", "") + if accepting.strip() != "": + ProfileAcademy.objects.filter(id__in=accepting.split(","), user__id=token.user.id, status="INVITED").update( + status="ACTIVE" + ) + if rejecting.strip() != "": + ProfileAcademy.objects.filter(id__in=rejecting.split(","), user__id=token.user.id).delete() + + pending_invites = ProfileAcademy.objects.filter(user__id=token.user.id, status="INVITED") if pending_invites.count() == 0: - return render_message(request, - 'You don\'t have any more pending invites', - btn_label='Continue to 4Geeks', - btn_url=get_app_url()) + return render_message( + request, "You don't have any more pending invites", btn_label="Continue to 4Geeks", btn_url=get_app_url() + ) - querystr = urllib.parse.urlencode({'callback': get_app_url(), 'token': token.key}) - url = os.getenv('API_URL') + '/v1/auth/academy/html/invite?' + querystr + querystr = urllib.parse.urlencode({"callback": get_app_url(), "token": token.key}) + url = os.getenv("API_URL") + "/v1/auth/academy/html/invite?" + querystr return render( - request, 'academy_invite.html', { - 'subject': 'Invitation to study at 4Geeks.com', - 'invites': ProfileAcademySmallSerializer(pending_invites, many=True).data, - 'LINK': url, - 'user': UserTinySerializer(token.user, many=False).data - }) + request, + "academy_invite.html", + { + "subject": "Invitation to study at 4Geeks.com", + "invites": ProfileAcademySmallSerializer(pending_invites, many=True).data, + "LINK": url, + "user": UserTinySerializer(token.user, many=False).data, + }, + ) def login_html_view(request): @@ -1850,31 +1942,31 @@ def login_html_view(request): _dict = request.GET.copy() form = LoginForm(_dict) - if request.method == 'POST': + if request.method == "POST": try: - url = request.POST.get('url', None) - if url is None or url == '': - raise Exception('Invalid redirect url, you must specify a url to redirect to') + url = request.POST.get("url", None) + if url is None or url == "": + raise Exception("Invalid redirect url, you must specify a url to redirect to") # the url may or may not be encoded try: - url = base64.b64decode(url.encode('utf-8')).decode('utf-8') + url = base64.b64decode(url.encode("utf-8")).decode("utf-8") except Exception: pass - email = request.POST.get('email', None) - password = request.POST.get('password', None) + email = request.POST.get("email", None) + password = request.POST.get("password", None) user = None if email and password: user = User.objects.filter(Q(email=email.lower()) | Q(username=email)).first() if not user: - msg = 'Unable to log in with provided credentials.' + msg = "Unable to log in with provided credentials." raise Exception(msg) if user.check_password(password) != True: - msg = 'Unable to log in with provided credentials.' + msg = "Unable to log in with provided credentials." raise Exception(msg) # The authenticate call simply returns None for is_active=False # users. (Assuming the default ModelBackend authentication @@ -1883,60 +1975,65 @@ def login_html_view(request): msg = 'Must include "username" and "password".' raise Exception(msg, code=403) - if user and not UserInvite.objects.filter(email__iexact=email, status='ACCEPTED', - is_email_validated=True).exists(): - raise Exception('You need to validate your email first') + if ( + user + and not UserInvite.objects.filter( + email__iexact=email, status="ACCEPTED", is_email_validated=True + ).exists() + ): + raise Exception("You need to validate your email first") - token, _ = Token.get_or_create(user=user, token_type='login') + token, _ = Token.get_or_create(user=user, token_type="login") - request.session['token'] = token.key + request.session["token"] = token.key return HttpResponseRedirect( - set_query_parameter(set_query_parameter(url, 'attempt', '1'), 'token', str(token))) + set_query_parameter(set_query_parameter(url, "attempt", "1"), "token", str(token)) + ) except Exception as e: - messages.error(request, e.message if hasattr(e, 'message') else e) - return render(request, 'login.html', {'form': form}) + messages.error(request, e.message if hasattr(e, "message") else e) + return render(request, "login.html", {"form": form}) else: - url = request.GET.get('url', None) - if url is None or url == '': + url = request.GET.get("url", None) + if url is None or url == "": messages.error(request, "You must specify a 'url' (querystring) to redirect to after successful login") - return render(request, 'login.html', {'form': form, 'redirect_url': request.GET.get('url', None)}) + return render(request, "login.html", {"form": form, "redirect_url": request.GET.get("url", None)}) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_google_token(request, token=None): if token == None: - raise ValidationException('No session token has been specified', slug='no-session-token') + raise ValidationException("No session token has been specified", slug="no-session-token") - url = request.query_params.get('url', None) + url = request.query_params.get("url", None) if url == None: - raise ValidationException('No callback URL specified', slug='no-callback-url') + raise ValidationException("No callback URL specified", slug="no-callback-url") try: - url = base64.b64decode(url.encode('utf-8')).decode('utf-8') + url = base64.b64decode(url.encode("utf-8")).decode("utf-8") except Exception: pass token = Token.get_valid(token) # IMPORTANT!! you can only connect to google with temporal short lasting tokens - if token is None or token.token_type != 'temporal': - raise ValidationException('Invalid or inactive token', code=403, slug='invalid-token') + if token is None or token.token_type != "temporal": + raise ValidationException("Invalid or inactive token", code=403, slug="invalid-token") params = { - 'response_type': 'code', - 'client_id': os.getenv('GOOGLE_CLIENT_ID', ''), - 'redirect_uri': os.getenv('GOOGLE_REDIRECT_URL', ''), - 'access_type': 'offline', #we need offline access to receive refresh token and avoid total expiration - 'scope': 'https://www.googleapis.com/auth/calendar.events', - 'state': f'token={token.key}&url={url}' + "response_type": "code", + "client_id": os.getenv("GOOGLE_CLIENT_ID", ""), + "redirect_uri": os.getenv("GOOGLE_REDIRECT_URL", ""), + "access_type": "offline", # we need offline access to receive refresh token and avoid total expiration + "scope": "https://www.googleapis.com/auth/calendar.events", + "state": f"token={token.key}&url={url}", } - logger.debug('Redirecting to google') + logger.debug("Redirecting to google") logger.debug(params) - redirect = f'https://accounts.google.com/o/oauth2/v2/auth?{urlencode(params)}' + redirect = f"https://accounts.google.com/o/oauth2/v2/auth?{urlencode(params)}" if settings.DEBUG: return HttpResponse(f"Redirect to: <a href='{redirect}'>{redirect}</a>") @@ -1947,120 +2044,123 @@ def get_google_token(request, token=None): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def save_google_token(request): - logger.debug('Google callback just landed') + logger.debug("Google callback just landed") logger.debug(request.query_params) - error = request.query_params.get('error', False) - error_description = request.query_params.get('error_description', '') + error = request.query_params.get("error", False) + error_description = request.query_params.get("error_description", "") if error: - raise APIException('Google OAuth: ' + error_description) + raise APIException("Google OAuth: " + error_description) - state = parse_qs(request.query_params.get('state', None)) + state = parse_qs(request.query_params.get("state", None)) - if state['url'] == None: - raise ValidationException('No callback URL specified', slug='no-callback-url') - if state['token'] == None: - raise ValidationException('No user token specified', slug='no-user-token') + if state["url"] == None: + raise ValidationException("No callback URL specified", slug="no-callback-url") + if state["token"] == None: + raise ValidationException("No user token specified", slug="no-user-token") - code = request.query_params.get('code', None) + code = request.query_params.get("code", None) if code == None: - raise ValidationException('No google code specified', slug='no-code') + raise ValidationException("No google code specified", slug="no-code") payload = { - 'client_id': os.getenv('GOOGLE_CLIENT_ID', ''), - 'client_secret': os.getenv('GOOGLE_SECRET', ''), - 'redirect_uri': os.getenv('GOOGLE_REDIRECT_URL', ''), - 'grant_type': 'authorization_code', - 'code': code, + "client_id": os.getenv("GOOGLE_CLIENT_ID", ""), + "client_secret": os.getenv("GOOGLE_SECRET", ""), + "redirect_uri": os.getenv("GOOGLE_REDIRECT_URL", ""), + "grant_type": "authorization_code", + "code": code, } - headers = {'Accept': 'application/json'} - resp = requests.post('https://oauth2.googleapis.com/token', data=payload, headers=headers, timeout=2) + headers = {"Accept": "application/json"} + resp = requests.post("https://oauth2.googleapis.com/token", data=payload, headers=headers, timeout=2) if resp.status_code == 200: - logger.debug('Google responded with 200') + logger.debug("Google responded with 200") body = resp.json() - if 'access_token' not in body: - raise APIException(body['error_description']) + if "access_token" not in body: + raise APIException(body["error_description"]) logger.debug(body) - token = Token.get_valid(state['token'][0]) + token = Token.get_valid(state["token"][0]) if not token: logger.debug(f'Token {state["token"][0]} not found or is expired') - raise ValidationException('Token was not found or is expired, please use a different token', - code=404, - slug='token-not-found') + raise ValidationException( + "Token was not found or is expired, please use a different token", code=404, slug="token-not-found" + ) user = token.user - refresh = '' - if 'refresh_token' in body: - refresh = body['refresh_token'] + refresh = "" + if "refresh_token" in body: + refresh = body["refresh_token"] CredentialsGoogle.objects.filter(user__id=user.id).delete() google_credentials = CredentialsGoogle( user=user, - token=body['access_token'], + token=body["access_token"], refresh_token=refresh, - expires_at=timezone.now() + timedelta(seconds=body['expires_in']), + expires_at=timezone.now() + timedelta(seconds=body["expires_in"]), ) google_credentials.save() - return HttpResponseRedirect(redirect_to=state['url'][0] + '?token=' + token.key) + return HttpResponseRedirect(redirect_to=state["url"][0] + "?token=" + token.key) else: logger.error(resp.json()) - raise APIException('Error from google credentials') + raise APIException("Error from google credentials") class GithubUserView(APIView, GenerateLookupsMixin): extensions = APIViewExtensions(paginate=True) - @capable_of('get_github_user') + @capable_of("get_github_user") def get(self, request, academy_id, githubuser_id=None): handler = self.extensions(request) if githubuser_id is not None: item = GithubAcademyUser.objects.filter(id=githubuser_id, academy_id=academy_id).first() if item is None: - raise ValidationException('Github User not found for this academy', - code=404, - slug='githubuser-not-found') + raise ValidationException( + "Github User not found for this academy", code=404, slug="githubuser-not-found" + ) serializer = GithubUserSerializer(item, many=False) return Response(serializer.data) items = GithubAcademyUser.objects.filter(Q(academy__id=academy_id) | Q(academy__isnull=True)) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter( - Q(username__icontains=like) | Q(user__email__icontains=like) - | Q(user__first_name__icontains=like) | Q(user__last_name__icontains=like)) + Q(username__icontains=like) + | Q(user__email__icontains=like) + | Q(user__first_name__icontains=like) + | Q(user__last_name__icontains=like) + ) - items = items.order_by(request.GET.get('sort', '-created_at')) + items = items.order_by(request.GET.get("sort", "-created_at")) items = handler.queryset(items) serializer = GithubUserSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('update_github_user') + @capable_of("update_github_user") def put(self, request, academy_id, githubuser_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if githubuser_id is not None: - lookups = {'id': githubuser_id} + lookups = {"id": githubuser_id} if lookups is None or len(lookups.keys()) == 0: - raise ValidationException('No github users lookups to find', code=404, slug='no-lookup') + raise ValidationException("No github users lookups to find", code=404, slug="no-lookup") items = GithubAcademyUser.objects.filter(**lookups, academy_id=academy_id) if items.count() == 0: - raise ValidationException('Github User not found for this academy', code=404, slug='githubuser-not-found') + raise ValidationException("Github User not found for this academy", code=404, slug="githubuser-not-found") valid = [] for gu in items: @@ -2080,27 +2180,34 @@ def put(self, request, academy_id, githubuser_id=None): else: return Response(GithubUserSerializer(data_list[0], many=False).data, status=status.HTTP_200_OK) - @capable_of('update_github_user') + @capable_of("update_github_user") def post(self, request, academy_id): body = request.data - if 'cohort' not in body: - raise ValidationException(translation( - en='You must specify the cohort the user belongs to, and it must be active on it', - es='Debes especificar la cohort y el usuario debe estar activo en ella'), - slug='user-not-found') - if 'user' not in body: - raise ValidationException(translation(en='You must specify the user', es='Debes especificar el usuario'), - slug='user-not-found') - - cu = CohortUser.objects.filter(user=body['user'], cohort=body['cohort'], educational_status='ACTIVE').first() + if "cohort" not in body: + raise ValidationException( + translation( + en="You must specify the cohort the user belongs to, and it must be active on it", + es="Debes especificar la cohort y el usuario debe estar activo en ella", + ), + slug="user-not-found", + ) + if "user" not in body: + raise ValidationException( + translation(en="You must specify the user", es="Debes especificar el usuario"), slug="user-not-found" + ) + + cu = CohortUser.objects.filter(user=body["user"], cohort=body["cohort"], educational_status="ACTIVE").first() if cu is None: - raise ValidationException(translation( - en='You must specify the cohort the user belongs to, and it must be active on it', - es='Debes especificar la cohort y el usuario debe estar activo en ella'), - slug='cohort-not-found') + raise ValidationException( + translation( + en="You must specify the cohort the user belongs to, and it must be active on it", + es="Debes especificar la cohort y el usuario debe estar activo en ella", + ), + slug="cohort-not-found", + ) - context = {'academy_id': academy_id, 'request': request} + context = {"academy_id": academy_id, "request": request} serializer = POSTGithubUserSerializer(data=request.data, context=context) if serializer.is_valid(): serializer.save() @@ -2112,21 +2219,27 @@ def post(self, request, academy_id): class AcademyGithubSyncView(APIView, GenerateLookupsMixin): extensions = APIViewExtensions(paginate=True) - @capable_of('sync_organization_users') + @capable_of("sync_organization_users") def put(self, request, academy_id): settings = AcademyAuthSettings.objects.filter(academy__id=academy_id).first() if settings is None: - raise ValidationException(translation( - en='Github Settings not found for this academy', - es='No se ha encontrado una configuracion para esta academy en Github'), - slug='settings-not-found') + raise ValidationException( + translation( + en="Github Settings not found for this academy", + es="No se ha encontrado una configuracion para esta academy en Github", + ), + slug="settings-not-found", + ) if not settings.github_is_sync: - raise ValidationException(translation( - en='Github sync is turned off in the academy settings', - es='La sincronización con github esta desactivada para esta academia'), - slug='github-sync-off') + raise ValidationException( + translation( + en="Github sync is turned off in the academy settings", + es="La sincronización con github esta desactivada para esta academia", + ), + slug="github-sync-off", + ) try: result = sync_organization_members(academy_id) @@ -2139,25 +2252,28 @@ def put(self, request, academy_id): class AcademyAuthSettingsView(APIView, GenerateLookupsMixin): - @capable_of('get_academy_auth_settings') + @capable_of("get_academy_auth_settings") def get(self, request, academy_id): lang = get_user_language(request) settings = AcademyAuthSettings.objects.filter(academy_id=academy_id).first() if settings is None: raise ValidationException( - translation(lang, - en='Academy has not github authentication settings', - es='La academia no tiene configurada la integracion con github', - slug='no-github-auth-settings')) + translation( + lang, + en="Academy has not github authentication settings", + es="La academia no tiene configurada la integracion con github", + slug="no-github-auth-settings", + ) + ) serializer = AuthSettingsBigSerializer(settings, many=False) return Response(serializer.data) - @capable_of('crud_academy_auth_settings') + @capable_of("crud_academy_auth_settings") def put(self, request, academy_id): settings = AcademyAuthSettings.objects.filter(academy_id=academy_id).first() - context = {'academy_id': academy_id, 'request': request} + context = {"academy_id": academy_id, "request": request} if settings is None: serializer = AcademyAuthSettingsSerializer(data=request.data, context=context) else: @@ -2172,43 +2288,46 @@ def put(self, request, academy_id): class GitpodUserView(APIView, GenerateLookupsMixin): extensions = APIViewExtensions(paginate=True) - @capable_of('get_gitpod_user') + @capable_of("get_gitpod_user") def get(self, request, academy_id, gitpoduser_id=None): handler = self.extensions(request) if gitpoduser_id is not None: item = GitpodUser.objects.filter(id=gitpoduser_id, academy_id=academy_id).first() if item is None: - raise ValidationException('Gitpod User not found for this academy', - code=404, - slug='gitpoduser-not-found') + raise ValidationException( + "Gitpod User not found for this academy", code=404, slug="gitpoduser-not-found" + ) serializer = GetGitpodUserSerializer(item, many=False) return Response(serializer.data) items = GitpodUser.objects.filter(Q(academy__id=academy_id) | Q(academy__isnull=True)) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter( - Q(github_username__icontains=like) | Q(user__email__icontains=like) - | Q(user__first_name__icontains=like) | Q(user__last_name__icontains=like)) + Q(github_username__icontains=like) + | Q(user__email__icontains=like) + | Q(user__first_name__icontains=like) + | Q(user__last_name__icontains=like) + ) - items = items.order_by(request.GET.get('sort', 'expires_at')) + items = items.order_by(request.GET.get("sort", "expires_at")) items = handler.queryset(items) serializer = GitpodUserSmallSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('update_gitpod_user') + @capable_of("update_gitpod_user") def put(self, request, academy_id, gitpoduser_id): item = GitpodUser.objects.filter(id=gitpoduser_id, academy_id=academy_id).first() if item is None: - raise ValidationException('Gitpod User not found for this academy', code=404, slug='gitpoduser-not-found') + raise ValidationException("Gitpod User not found for this academy", code=404, slug="gitpoduser-not-found") - if request.data is None or ('expires_at' in request.data and request.data['expires_at'] is None): + if request.data is None or ("expires_at" in request.data and request.data["expires_at"] is None): item.expires_at = None item.save() item = set_gitpod_user_expiration(item.id) @@ -2224,22 +2343,22 @@ def put(self, request, academy_id, gitpoduser_id): class ProfileView(APIView, GenerateLookupsMixin): - @capable_of('crud_event') + @capable_of("crud_event") def get(self, request, academy_id=None, user_id=None): item = Profile.objects.filter(user__id=user_id).first() if not item: - raise ValidationException('Profile not found', code=404, slug='profile-not-found') + raise ValidationException("Profile not found", code=404, slug="profile-not-found") serializer = GetProfileSerializer(item, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_event') + @capable_of("crud_event") def put(self, request, academy_id=None, user_id=None): item = Profile.objects.filter(user__id=user_id).first() if not item: - raise ValidationException('Profile not found', code=404, slug='profile-not-found') + raise ValidationException("Profile not found", code=404, slug="profile-not-found") serializer = ProfileSerializer(item, data=request.data) if serializer.is_valid(): @@ -2250,25 +2369,25 @@ def put(self, request, academy_id=None, user_id=None): class ProfileMeView(APIView, GenerateLookupsMixin): - @has_permission('get_my_profile') + @has_permission("get_my_profile") def get(self, request): item = Profile.objects.filter(user=request.user).first() if not item: - raise ValidationException('Profile not found', code=404, slug='profile-not-found') + raise ValidationException("Profile not found", code=404, slug="profile-not-found") serializer = GetProfileSerializer(item, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - @has_permission('create_my_profile') + @has_permission("create_my_profile") def post(self, request): if Profile.objects.filter(user__id=request.user.id).exists(): - raise ValidationException('Profile already exists', code=400, slug='profile-already-exist') + raise ValidationException("Profile already exists", code=400, slug="profile-already-exist") data = {} for key in request.data: data[key] = request.data[key] - data['user'] = request.user.id + data["user"] = request.user.id serializer = ProfileSerializer(data=data) if serializer.is_valid(): @@ -2278,17 +2397,17 @@ def post(self, request): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @has_permission('update_my_profile') + @has_permission("update_my_profile") def put(self, request): item = Profile.objects.filter(user__id=request.user.id).first() if not item: - raise ValidationException('Profile not found', code=404, slug='profile-not-found') + raise ValidationException("Profile not found", code=404, slug="profile-not-found") data = {} for key in request.data: data[key] = request.data[key] - data['user'] = request.user.id + data["user"] = request.user.id serializer = ProfileSerializer(item, data=data) if serializer.is_valid(): @@ -2304,9 +2423,10 @@ class ProfileMePictureView(APIView): put: Upload a file to Google Cloud. """ + parser_classes = [MultiPartParser, FileUploadParser] - @has_permission('update_my_profile') + @has_permission("update_my_profile") def put(self, request): from ..services.google_cloud import Storage @@ -2317,23 +2437,24 @@ def put(self, request): profile = Profile(user=request.user) profile.save() - files = request.data.getlist('file') - file = request.data.get('file') + files = request.data.getlist("file") + file = request.data.get("file") if not file: - raise ValidationException('Missing file in request', slug='missing-file') + raise ValidationException("Missing file in request", slug="missing-file") if not len(files): - raise ValidationException('empty files in request') + raise ValidationException("empty files in request") if len(files) > 1: - raise ValidationException('Just can upload one file at a time') + raise ValidationException("Just can upload one file at a time") # files validation below if file.content_type not in PROFILE_MIME_ALLOWED: raise ValidationException( f'You can upload only files on the following formats: {",".join(PROFILE_MIME_ALLOWED)}', - slug='bad-file-format') + slug="bad-file-format", + ) file_bytes = file.read() hash = hashlib.sha256(file_bytes).hexdigest() @@ -2341,64 +2462,73 @@ def put(self, request): try: storage = Storage() cloud_file = storage.file(get_profile_bucket(), hash) - cloud_file_thumbnail = storage.file(get_profile_bucket(), f'{hash}-100x100') + cloud_file_thumbnail = storage.file(get_profile_bucket(), f"{hash}-100x100") if thumb_exists := cloud_file_thumbnail.exists(): cloud_file_thumbnail_url = cloud_file_thumbnail.url() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) if not thumb_exists: cloud_file.upload(file, content_type=file.content_type) func = FunctionV2(get_shape_of_image_url()) - res = func.call({'filename': hash, 'bucket': get_profile_bucket()}, timeout=28) + res = func.call({"filename": hash, "bucket": get_profile_bucket()}, timeout=28) json = res.json() - if json['shape'] != 'Square': + if json["shape"] != "Square": cloud_file.delete() - raise ValidationException('just can upload square images', slug='not-square-image') + raise ValidationException("just can upload square images", slug="not-square-image") - func = FunctionV1(region='us-central1', project_id=get_google_project_id(), name='resize-image') + func = FunctionV1(region="us-central1", project_id=get_google_project_id(), name="resize-image") - res = func.call({ - 'width': 100, - 'filename': hash, - 'bucket': get_profile_bucket(), - }, timeout=28) + res = func.call( + { + "width": 100, + "filename": hash, + "bucket": get_profile_bucket(), + }, + timeout=28, + ) try: - cloud_file_thumbnail = storage.file(get_profile_bucket(), f'{hash}-100x100') + cloud_file_thumbnail = storage.file(get_profile_bucket(), f"{hash}-100x100") cloud_file_thumbnail_url = cloud_file_thumbnail.url() cloud_file.delete() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) - - previous_avatar_url = profile.avatar_url or '' + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) + + previous_avatar_url = profile.avatar_url or "" profile.avatar_url = cloud_file_thumbnail_url profile.save() if previous_avatar_url != profile.avatar_url: - result = re.search(r'/(.{64})-100x100$', previous_avatar_url) + result = re.search(r"/(.{64})-100x100$", previous_avatar_url) if result: previous_hash = result[1] @@ -2406,19 +2536,22 @@ def put(self, request): # remove the file when the last user remove their copy of the same image if not Profile.objects.filter(avatar_url__contains=previous_hash).exists(): try: - cloud_file = storage.file(get_profile_bucket(), f'{hash}-100x100') + cloud_file = storage.file(get_profile_bucket(), f"{hash}-100x100") cloud_file.delete() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) serializer = GetProfileSerializer(profile, many=False) return Response(serializer.data, status=status.HTTP_200_OK) @@ -2429,9 +2562,9 @@ class GithubMeView(APIView): def delete(self, request): instance = CredentialsGithub.objects.filter(user=request.user).first() if not instance: - raise ValidationException('This user not have Github account associated with with account', - code=404, - slug='not-found') + raise ValidationException( + "This user not have Github account associated with with account", code=404, slug="not-found" + ) instance.delete() @@ -2443,45 +2576,51 @@ class AppUserView(APIView): permission_classes = [AllowAny] extensions = APIViewExtensions(paginate=True) - @scope(['read:user']) + @scope(["read:user"]) def get(self, request: LinkedHttpRequest, app: LinkedApp, token: LinkedToken, user_id=None): handler = self.extensions(request) lang = get_user_language(request) extra = {} if app.require_an_agreement: - extra['appuseragreement__app__id'] = app.id + extra["appuseragreement__app__id"] = app.id if token.sub: user = request.get_user() - extra['id'] = user.id + extra["id"] = user.id if user_id: - if 'id' in extra and extra['id'] != user_id: - raise ValidationException(translation(lang, - en='This user does not have access to this resource', - es='Este usuario no tiene acceso a este recurso'), - code=403, - slug='user-with-no-access', - silent=True) - - if 'id' not in extra: - extra['id'] = user_id + if "id" in extra and extra["id"] != user_id: + raise ValidationException( + translation( + lang, + en="This user does not have access to this resource", + es="Este usuario no tiene acceso a este recurso", + ), + code=403, + slug="user-with-no-access", + silent=True, + ) + + if "id" not in extra: + extra["id"] = user_id user = User.objects.filter(**extra).first() if not user: - raise ValidationException(translation(lang, en='User not found', es='Usuario no encontrado'), - code=404, - slug='user-not-found', - silent=True) + raise ValidationException( + translation(lang, en="User not found", es="Usuario no encontrado"), + code=404, + slug="user-not-found", + silent=True, + ) serializer = AppUserSerializer(user, many=False) return Response(serializer.data) - if not token.sub and (id := request.GET.get('id')): - extra['id'] = id + if not token.sub and (id := request.GET.get("id")): + extra["id"] = id - for key in ['email', 'username']: + for key in ["email", "username"]: if key in request.GET: extra[key] = request.GET.get(key) @@ -2528,47 +2667,51 @@ async def post(self, request, app_slug: str): # app = await aget_app(app_slug) async with Service(app_slug, user.id, proxy=True) as s: if s.app.require_an_agreement: - raise ValidationException(translation(lang, - en='Can\'t sync with an external app', - es='No se puede sincronizar con una aplicación externa', - slug='external-app'), - slug='external-app', - silent=True) + raise ValidationException( + translation( + lang, + en="Can't sync with an external app", + es="No se puede sincronizar con una aplicación externa", + slug="external-app", + ), + slug="external-app", + silent=True, + ) data = { - 'username': user.username, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, - 'profile': None, - 'credentialsgithub': None, + "username": user.username, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, + "profile": None, + "credentialsgithub": None, } if profile := await self.aget_profile(): - data['profile'] = { - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'phone': profile.phone, - 'show_tutorial': profile.show_tutorial, - 'twitter_username': profile.twitter_username, - 'github_username': profile.github_username, - 'portfolio_url': profile.portfolio_url, - 'linkedin_url': profile.linkedin_url, - 'blog': profile.blog, + data["profile"] = { + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "phone": profile.phone, + "show_tutorial": profile.show_tutorial, + "twitter_username": profile.twitter_username, + "github_username": profile.github_username, + "portfolio_url": profile.portfolio_url, + "linkedin_url": profile.linkedin_url, + "blog": profile.blog, } if github_credentials := await self.aget_github_credentials(): - data['credentialsgithub'] = { - 'github_id': github_credentials.github_id, - 'token': github_credentials.token, - 'email': github_credentials.email, - 'avatar_url': github_credentials.avatar_url, - 'name': github_credentials.name, - 'username': github_credentials.username, - 'blog': github_credentials.blog, - 'bio': github_credentials.bio, - 'company': github_credentials.company, - 'twitter_username': github_credentials.twitter_username, + data["credentialsgithub"] = { + "github_id": github_credentials.github_id, + "token": github_credentials.token, + "email": github_credentials.email, + "avatar_url": github_credentials.avatar_url, + "name": github_credentials.name, + "username": github_credentials.username, + "blog": github_credentials.blog, + "bio": github_credentials.bio, + "company": github_credentials.company, + "twitter_username": github_credentials.twitter_username, } - return await s.post('/v1/auth/app/user', data) + return await s.post("/v1/auth/app/user", data) diff --git a/breathecode/career/actions.py b/breathecode/career/actions.py index 1a715d7c1..8bc30c235 100644 --- a/breathecode/career/actions.py +++ b/breathecode/career/actions.py @@ -14,8 +14,8 @@ def run_spider(spider): if spider is None: - logger.error('First you must specify a spider (run_spider)') - raise ValidationException('First you must specify a spider', slug='missing-spider') + logger.error("First you must specify a spider (run_spider)") + raise ValidationException("First you must specify a spider", slug="missing-spider") platform = spider.zyte_project.platform.name class_scrapper = scraper_factory(platform) @@ -28,93 +28,102 @@ def run_spider(spider): position_alias.save() data = { - 'project': spider.zyte_project.zyte_api_deploy, - 'spider': spider.zyte_project.platform.name, - 'job': spider.job_search, + "project": spider.zyte_project.zyte_api_deploy, + "spider": spider.zyte_project.platform.name, + "job": spider.job_search, } - data['loc'] = spider.loc_search - response = requests.post('https://app.scrapinghub.com/api/run.json', - data=data, - auth=(spider.zyte_project.zyte_api_key, ''), - timeout=2) + data["loc"] = spider.loc_search + response = requests.post( + "https://app.scrapinghub.com/api/run.json", data=data, auth=(spider.zyte_project.zyte_api_key, ""), timeout=2 + ) result = response.json() - if result['status'] == 'error': - spider.spider_last_run_status = 'ERROR' - spider.spider_last_run_desc = f"The spider career ended error. ({result['message']} at " + str( - timezone.now()) + ')' + if result["status"] == "error": + spider.spider_last_run_status = "ERROR" + spider.spider_last_run_desc = ( + f"The spider career ended error. ({result['message']} at " + str(timezone.now()) + ")" + ) spider.save() logger.error(f'The spider ended error. Type error {result["message"]}') else: - spider.spider_last_run_status = 'SYNCHED' - spider.spider_last_run_desc = f'The execution of the spider was successful to {spider.name} at ' + str( - timezone.now()) + spider.spider_last_run_status = "SYNCHED" + spider.spider_last_run_desc = f"The execution of the spider was successful to {spider.name} at " + str( + timezone.now() + ) spider.save() - return (response.status_code == 200 and 'status' in result and result['status'] == 'ok', result) + return (response.status_code == 200 and "status" in result and result["status"] == "ok", result) def fetch_to_api(spider): if spider is None: - logger.debug('First you must specify a spider (fetch_to_api)') - raise ValidationException('First you must specify a spider', slug='without-spider') + logger.debug("First you must specify a spider (fetch_to_api)") + raise ValidationException("First you must specify a spider", slug="without-spider") params = ( - ('project', spider.zyte_project.zyte_api_deploy), - ('spider', spider.zyte_project.platform.name), - ('state', 'finished'), + ("project", spider.zyte_project.zyte_api_deploy), + ("spider", spider.zyte_project.platform.name), + ("state", "finished"), ) - res = requests.get('https://app.scrapinghub.com/api/jobs/list.json', - params=params, - auth=(spider.zyte_project.zyte_api_key, ''), - timeout=2).json() + res = requests.get( + "https://app.scrapinghub.com/api/jobs/list.json", + params=params, + auth=(spider.zyte_project.zyte_api_key, ""), + timeout=2, + ).json() return res def get_scraped_data_of_platform(spider, api_fetch): if spider is None: - logger.error('First you must specify a spider (get_scraped_data_of_platform)') - raise ValidationException('First you must specify a spider', slug='without-spider') + logger.error("First you must specify a spider (get_scraped_data_of_platform)") + raise ValidationException("First you must specify a spider", slug="without-spider") if api_fetch is None: - logger.error('I did not receive results from the API (get_scraped_data_of_platform)') - raise ValidationException('Is did not receive results from the API', slug='no-return-json-data') + logger.error("I did not receive results from the API (get_scraped_data_of_platform)") + raise ValidationException("Is did not receive results from the API", slug="no-return-json-data") platform = spider.zyte_project.platform.name class_scrapper = scraper_factory(platform) data_project = [] - for res_api_jobs in api_fetch['jobs']: - deploy, num_spider, num_job = class_scrapper.get_job_id_from_string(res_api_jobs['id']) + for res_api_jobs in api_fetch["jobs"]: + deploy, num_spider, num_job = class_scrapper.get_job_id_from_string(res_api_jobs["id"]) if num_spider == spider.zyte_spider_number and num_job >= spider.zyte_job_number: response = requests.get( f'https://storage.scrapinghub.com/items/{res_api_jobs["id"]}?apikey={spider.zyte_project.zyte_api_key}&format=json', - timeout=2) + timeout=2, + ) if response.status_code != 200: - spider.sync_status = 'ERROR' - spider.sync_desc = f'There was a {response.status_code} error fetching spider {spider.zyte_spider_number} job {num_spider} (get_scraped_data_of_platform)' + str( - timezone.now()) + spider.sync_status = "ERROR" + spider.sync_desc = ( + f"There was a {response.status_code} error fetching spider {spider.zyte_spider_number} job {num_spider} (get_scraped_data_of_platform)" + + str(timezone.now()) + ) spider.save() logger.error( - f'There was a {response.status_code} error fetching spider {spider.zyte_spider_number} job {num_spider} (get_scraped_data_of_platform)' + f"There was a {response.status_code} error fetching spider {spider.zyte_spider_number} job {num_spider} (get_scraped_data_of_platform)" ) raise ValidationException( - f'There was a {response.status_code} error fetching spider {spider.zyte_spider_number} job {num_spider}', - slug='bad-response-fetch') + f"There was a {response.status_code} error fetching spider {spider.zyte_spider_number} job {num_spider}", + slug="bad-response-fetch", + ) new_jobs = save_data(spider, response.json()) - data_project.append({ - 'status': 'ok', - 'platform_name': spider.zyte_project.platform.name, - 'num_spider': int(num_spider), - 'num_job': int(num_job), - 'jobs_saved': new_jobs - }) + data_project.append( + { + "status": "ok", + "platform_name": spider.zyte_project.platform.name, + "num_spider": int(num_spider), + "num_job": int(num_job), + "jobs_saved": new_jobs, + } + ) return data_project @@ -125,35 +134,35 @@ def save_data(spider, jobs): new_jobs = 0 for j in jobs: - locations, remote = class_scrapper.get_location_from_string(j['Location']) + locations, remote = class_scrapper.get_location_from_string(j["Location"]) location_pk = class_scrapper.get_pk_location(locations) - employer = class_scrapper.get_employer_from_string(j['Company_name']) + employer = class_scrapper.get_employer_from_string(j["Company_name"]) if employer is None: - employer = Employer(name=j['Company_name'], location=location_pk) + employer = Employer(name=j["Company_name"], location=location_pk) employer.save() - position = class_scrapper.get_position_from_string(j['Searched_job']) + position = class_scrapper.get_position_from_string(j["Searched_job"]) if position is None: - position = Position(name=j['Searched_job']) + position = Position(name=j["Searched_job"]) position.save() - position_alias = PositionAlias(name=j['Searched_job'], position=position) + position_alias = PositionAlias(name=j["Searched_job"], position=position) position_alias.save() - (min_salary, max_salary, salary_str) = class_scrapper.get_salary_from_string(j['Salary']) + (min_salary, max_salary, salary_str) = class_scrapper.get_salary_from_string(j["Salary"]) - save_tags = class_scrapper.get_tag_from_string(j['Tags']) + save_tags = class_scrapper.get_tag_from_string(j["Tags"]) - validate = class_scrapper.job_exist(j['Job_title'], j['Company_name']) + validate = class_scrapper.job_exist(j["Job_title"], j["Company_name"]) if validate is False: job = Job( - title=j['Job_title'], + title=j["Job_title"], spider=spider, - published_date_raw=j['Post_date'], - apply_url=j['Apply_to'], + published_date_raw=j["Post_date"], + apply_url=j["Apply_to"], salary=salary_str, - job_description=j['Job_description'], + job_description=j["Job_description"], min_salary=min_salary, max_salary=max_salary, remote=remote, @@ -177,8 +186,8 @@ def save_data(spider, jobs): def fetch_sync_all_data(spider): if spider is None: - logger.debug('First you must specify a spider (fetch_sync_all_data)') - raise ValidationException('First you must specify a spider', slug='without-spider') + logger.debug("First you must specify a spider (fetch_sync_all_data)") + raise ValidationException("First you must specify a spider", slug="without-spider") res = fetch_to_api(spider) data_jobs = get_scraped_data_of_platform(spider, res) @@ -191,9 +200,11 @@ def fetch_sync_all_data(spider): job_saved, job_number = jobs_info_saved spider.zyte_job_number = job_number spider.zyte_last_fetch_date = timezone.now() - spider.sync_status = 'SYNCHED' - spider.sync_desc = f"The spider's career ended successfully. Added {job_saved} new jobs to {spider.name} at " + str( - timezone.now()) + spider.sync_status = "SYNCHED" + spider.sync_desc = ( + f"The spider's career ended successfully. Added {job_saved} new jobs to {spider.name} at " + + str(timezone.now()) + ) spider.save() ZyteProject.objects.filter(id=spider.zyte_project.id).update(zyte_api_last_job_number=job_number) @@ -204,8 +215,8 @@ def fetch_sync_all_data(spider): def get_was_published_date_from_string(job): if job is None: - logger.error('First you must specify a job (get_was_published_date_from_string)') - raise ValidationException('First you must specify a job', slug='data-job-none') + logger.error("First you must specify a job (get_was_published_date_from_string)") + raise ValidationException("First you must specify a job", slug="data-job-none") platform = job.spider.zyte_project.platform.name class_scrapper = scraper_factory(platform) diff --git a/breathecode/career/admin.py b/breathecode/career/admin.py index 74a83fba4..495f45c68 100644 --- a/breathecode/career/admin.py +++ b/breathecode/career/admin.py @@ -1,6 +1,17 @@ import logging from django.contrib import admin -from .models import Platform, ZyteProject, Spider, Job, Employer, Position, PositionAlias, CareerTag, Location, LocationAlias +from .models import ( + Platform, + ZyteProject, + Spider, + Job, + Employer, + Position, + PositionAlias, + CareerTag, + Location, + LocationAlias, +) logger = logging.getLogger(__name__) @@ -8,56 +19,67 @@ # Register your models here. @admin.register(Platform) class PlatformAdmin(admin.ModelAdmin): - list_display = ('name', 'created_at') + list_display = ("name", "created_at") -@admin.display(description='Fetch sync all data.') +@admin.display(description="Fetch sync all data.") def fetch_sync_all_data_admin(modeladmin, request, queryset): from django.contrib import messages from .actions import fetch_sync_all_data + spiders = queryset.all() try: for s in spiders: fetch_sync_all_data(s) - messages.success(request, f'{s.sync_desc}') + messages.success(request, f"{s.sync_desc}") except Exception as e: - logger.error(f'There was an error retriving the spider {str(e)}') - messages.error(request, f'There was an error retriving the spider {str(e)}') + logger.error(f"There was an error retriving the spider {str(e)}") + messages.error(request, f"There was an error retriving the spider {str(e)}") -@admin.display(description='Run spider.') +@admin.display(description="Run spider.") def run_spider_admin(modeladmin, request, queryset): from django.contrib import messages from .actions import run_spider + spiders = queryset.all() try: for s in spiders: run_spider(s) - messages.success(request, f'The execution of the spider {s} was successful') + messages.success(request, f"The execution of the spider {s} was successful") except Exception as e: - message = f'There was an error retriving the spider {str(e)}' + message = f"There was an error retriving the spider {str(e)}" logger.error(message) messages.error(request, message) -@admin.display(description='Get was publiched date.') +@admin.display(description="Get was publiched date.") def get_was_published_date_from_string_admin(modeladmin, request, queryset): from django.contrib import messages from .actions import get_was_published_date_from_string + jobs = queryset.all() try: for job in jobs: get_was_published_date_from_string(job) - messages.success(request, 'The publication date was successfully parsed') + messages.success(request, "The publication date was successfully parsed") except Exception as e: - logger.error(f'There was an error retriving the jobs {str(e)}') - messages.error(request, f'There was an error retriving the jobs {str(e)}') + logger.error(f"There was an error retriving the jobs {str(e)}") + messages.error(request, f"There was an error retriving the jobs {str(e)}") @admin.register(Spider) class SpiderAdmin(admin.ModelAdmin): - list_display = ('name', 'job_search', 'position', 'spider_last_run_status', 'spider_last_run_desc', 'sync_status', - 'sync_desc', 'zyte_last_fetch_date') + list_display = ( + "name", + "job_search", + "position", + "spider_last_run_status", + "spider_last_run_desc", + "sync_status", + "sync_desc", + "zyte_last_fetch_date", + ) actions = ( fetch_sync_all_data_admin, run_spider_admin, @@ -66,41 +88,52 @@ class SpiderAdmin(admin.ModelAdmin): @admin.register(ZyteProject) class ZyteProjectAdmin(admin.ModelAdmin): - list_display = ('platform', 'zyte_api_key', 'zyte_api_deploy', 'created_at') + list_display = ("platform", "zyte_api_key", "zyte_api_deploy", "created_at") @admin.register(Job) class JobAdmin(admin.ModelAdmin): - list_display = ('title', 'spider', 'published_date_raw', 'salary', 'status', 'employer', 'position', 'apply_url', - 'currency', 'remote', 'created_at') - actions = (get_was_published_date_from_string_admin, ) + list_display = ( + "title", + "spider", + "published_date_raw", + "salary", + "status", + "employer", + "position", + "apply_url", + "currency", + "remote", + "created_at", + ) + actions = (get_was_published_date_from_string_admin,) @admin.register(Employer) class EmployerAdmin(admin.ModelAdmin): - list_display = ('name', 'location', 'created_at') + list_display = ("name", "location", "created_at") @admin.register(Position) class PositionAdmin(admin.ModelAdmin): - list_display = ('name', 'created_at') + list_display = ("name", "created_at") @admin.register(PositionAlias) class PositionAliasAdmin(admin.ModelAdmin): - list_display = ('name', 'position', 'created_at') + list_display = ("name", "position", "created_at") @admin.register(CareerTag) class TagAdmin(admin.ModelAdmin): - list_display = ('slug', 'created_at') + list_display = ("slug", "created_at") @admin.register(Location) class LocationAdmin(admin.ModelAdmin): - list_display = ('name', 'created_at') + list_display = ("name", "created_at") @admin.register(LocationAlias) class LocationAliasAdmin(admin.ModelAdmin): - list_display = ('name', 'location', 'created_at') + list_display = ("name", "location", "created_at") diff --git a/breathecode/career/apps.py b/breathecode/career/apps.py index f8350424c..e1ccf3cf4 100644 --- a/breathecode/career/apps.py +++ b/breathecode/career/apps.py @@ -2,5 +2,5 @@ class CareerConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'breathecode.career' + default_auto_field = "django.db.models.BigAutoField" + name = "breathecode.career" diff --git a/breathecode/career/management/commands/curl_all_spider.py b/breathecode/career/management/commands/curl_all_spider.py index 38b8fd312..043c8fb7b 100644 --- a/breathecode/career/management/commands/curl_all_spider.py +++ b/breathecode/career/management/commands/curl_all_spider.py @@ -5,23 +5,32 @@ class Command(BaseCommand): - help = 'Synchronize from run_spider. Do not receive arguments.' + help = "Synchronize from run_spider. Do not receive arguments." def handle(self, *args, **options): now = timezone.now() spiders = Spider.objects.all() count = 0 for spi in spiders: - if spi.zyte_project.zyte_api_deploy is None or spi.zyte_project.zyte_api_deploy == '' or spi.zyte_spider_number is None or spi.zyte_spider_number == '' or spi.zyte_job_number is None or spi.zyte_job_number == '' or spi.zyte_project.zyte_api_key is None or spi.zyte_project.zyte_api_key == '': - spi.sync_status = 'ERROR' + if ( + spi.zyte_project.zyte_api_deploy is None + or spi.zyte_project.zyte_api_deploy == "" + or spi.zyte_spider_number is None + or spi.zyte_spider_number == "" + or spi.zyte_job_number is None + or spi.zyte_job_number == "" + or spi.zyte_project.zyte_api_key is None + or spi.zyte_project.zyte_api_key == "" + ): + spi.sync_status = "ERROR" spi.sync_desc = "Missing the spider's args (Invalid args)" spi.save() - self.stdout.write(self.style.ERROR(f'Spider {str(spi)} is missing async_fetch_sync_all_data key or ID')) + self.stdout.write(self.style.ERROR(f"Spider {str(spi)} is missing async_fetch_sync_all_data key or ID")) else: - spi.sync_status = 'PENDING' - spi.sync_desc = 'Running run_spider command at ' + str(now) + spi.sync_status = "PENDING" + spi.sync_desc = "Running run_spider command at " + str(now) spi.save() - async_fetch_sync_all_data.delay({'spi_id': spi.id}) + async_fetch_sync_all_data.delay({"spi_id": spi.id}) count = count + 1 - self.stdout.write(self.style.SUCCESS(f'Enqueued {count} of {len(spiders)} for async fetch all spiders')) + self.stdout.write(self.style.SUCCESS(f"Enqueued {count} of {len(spiders)} for async fetch all spiders")) diff --git a/breathecode/career/management/commands/run_spiders.py b/breathecode/career/management/commands/run_spiders.py index 261006cb0..f65ae43e5 100644 --- a/breathecode/career/management/commands/run_spiders.py +++ b/breathecode/career/management/commands/run_spiders.py @@ -5,23 +5,30 @@ class Command(BaseCommand): - help = 'Synchronize from run_spider. Do not receive arguments.' + help = "Synchronize from run_spider. Do not receive arguments." def handle(self, *args, **options): now = timezone.now() spiders = Spider.objects.all() count = 0 for spi in spiders: - if spi.job is None or spi.job == '' or spi.zyte_project is None or spi.zyte_project == '' or spi.zyte_spider_number is None or spi.zyte_spider_number == '': - spi.sync_status = 'ERROR' - spi.sync_desc = 'Missing run_spider key or id' + if ( + spi.job is None + or spi.job == "" + or spi.zyte_project is None + or spi.zyte_project == "" + or spi.zyte_spider_number is None + or spi.zyte_spider_number == "" + ): + spi.sync_status = "ERROR" + spi.sync_desc = "Missing run_spider key or id" spi.save() - self.stdout.write(self.style.ERROR(f'Spider {str(spi)} is missing run_spider key or ID')) + self.stdout.write(self.style.ERROR(f"Spider {str(spi)} is missing run_spider key or ID")) else: - spi.sync_status = 'PENDING' - spi.sync_desc = 'Running run_spider command at ' + str(now) + spi.sync_status = "PENDING" + spi.sync_desc = "Running run_spider command at " + str(now) spi.save() - async_run_spider.delay({'spi_id': spi.id}) + async_run_spider.delay({"spi_id": spi.id}) count = count + 1 - self.stdout.write(self.style.SUCCESS(f'Enqueued {count} of {len(spiders)} for sync spider')) + self.stdout.write(self.style.SUCCESS(f"Enqueued {count} of {len(spiders)} for sync spider")) diff --git a/breathecode/career/migrations/0001_initial.py b/breathecode/career/migrations/0001_initial.py index 11abc7c66..4918f25ee 100644 --- a/breathecode/career/migrations/0001_initial.py +++ b/breathecode/career/migrations/0001_initial.py @@ -11,12 +11,12 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Platform', + name="Platform", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=150)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=150)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/breathecode/career/migrations/0002_auto_20220404_1939.py b/breathecode/career/migrations/0002_auto_20220404_1939.py index b4e64f80a..7c091f91e 100644 --- a/breathecode/career/migrations/0002_auto_20220404_1939.py +++ b/breathecode/career/migrations/0002_auto_20220404_1939.py @@ -7,172 +7,219 @@ class Migration(migrations.Migration): dependencies = [ - ('career', '0001_initial'), + ("career", "0001_initial"), ] operations = [ migrations.CreateModel( - name='CareerTag', + name="CareerTag", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(blank=True, max_length=200, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(blank=True, max_length=200, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Employer', + name="Employer", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=100)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Location', + name="Location", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100, unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=100, unique=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Position', + name="Position", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100, unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=100, unique=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='ZyteProject', + name="ZyteProject", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('zyte_api_key', models.CharField(max_length=150)), - ('zyte_api_deploy', models.CharField(max_length=50)), - ('zyte_api_spider_number', - models.IntegerField( - help_text='This number is the one that corresponds when the ZYTE spider was created.')), - ('zyte_api_last_job_number', - models.IntegerField(blank=True, - default=0, - help_text='(Optional field) Start at 0 but increase with each search.', - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('platform', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='career.platform')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("zyte_api_key", models.CharField(max_length=150)), + ("zyte_api_deploy", models.CharField(max_length=50)), + ( + "zyte_api_spider_number", + models.IntegerField( + help_text="This number is the one that corresponds when the ZYTE spider was created." + ), + ), + ( + "zyte_api_last_job_number", + models.IntegerField( + blank=True, + default=0, + help_text="(Optional field) Start at 0 but increase with each search.", + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("platform", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="career.platform")), ], ), migrations.CreateModel( - name='Spider', + name="Spider", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=150)), - ('job_search', models.CharField(max_length=150)), - ('loc_search', - models.CharField(blank=True, - help_text='This field may be optional on some platforms.', - max_length=150, - null=True)), - ('zyte_spider_number', - models.IntegerField(blank=True, default=0, help_text='This number must be copy from ZYTE', null=True)), - ('zyte_job_number', - models.IntegerField(blank=True, - default=0, - help_text='Start at 0 but increase on each fetch', - null=True)), - ('zyte_fetch_count', - models.IntegerField(default=0, help_text='The number of spider job excecutions to fetch')), - ('zyte_last_fetch_date', models.DateTimeField(blank=True, null=True)), - ('spider_last_run_status', - models.CharField(choices=[('SYNCHED', 'Synched'), ('PENDING', 'Pending'), ('WARNING', 'Warning'), - ('ERROR', 'Error')], - default='PENDING', - max_length=15)), - ('spider_last_run_desc', models.CharField(blank=True, max_length=200, null=True)), - ('sync_status', - models.CharField(choices=[('SYNCHED', 'Synched'), ('PENDING', 'Pending'), ('WARNING', 'Warning'), - ('ERROR', 'Error')], - default='PENDING', - max_length=15)), - ('sync_desc', models.CharField(blank=True, max_length=200, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('position', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='career.position')), - ('zyte_project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='career.zyteproject')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=150)), + ("job_search", models.CharField(max_length=150)), + ( + "loc_search", + models.CharField( + blank=True, help_text="This field may be optional on some platforms.", max_length=150, null=True + ), + ), + ( + "zyte_spider_number", + models.IntegerField( + blank=True, default=0, help_text="This number must be copy from ZYTE", null=True + ), + ), + ( + "zyte_job_number", + models.IntegerField( + blank=True, default=0, help_text="Start at 0 but increase on each fetch", null=True + ), + ), + ( + "zyte_fetch_count", + models.IntegerField(default=0, help_text="The number of spider job excecutions to fetch"), + ), + ("zyte_last_fetch_date", models.DateTimeField(blank=True, null=True)), + ( + "spider_last_run_status", + models.CharField( + choices=[ + ("SYNCHED", "Synched"), + ("PENDING", "Pending"), + ("WARNING", "Warning"), + ("ERROR", "Error"), + ], + default="PENDING", + max_length=15, + ), + ), + ("spider_last_run_desc", models.CharField(blank=True, max_length=200, null=True)), + ( + "sync_status", + models.CharField( + choices=[ + ("SYNCHED", "Synched"), + ("PENDING", "Pending"), + ("WARNING", "Warning"), + ("ERROR", "Error"), + ], + default="PENDING", + max_length=15, + ), + ), + ("sync_desc", models.CharField(blank=True, max_length=200, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("position", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="career.position")), + ( + "zyte_project", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="career.zyteproject"), + ), ], ), migrations.CreateModel( - name='PositionAlias', + name="PositionAlias", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('position', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='career.position')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=100)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("position", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="career.position")), ], ), migrations.CreateModel( - name='LocationAlias', + name="LocationAlias", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='career.location')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=100)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("location", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="career.location")), ], ), migrations.CreateModel( - name='Job', + name="Job", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(max_length=150)), - ('published_date_raw', models.CharField(max_length=50)), - ('published_date_processed', models.DateTimeField(blank=True, default=None, null=True)), - ('status', - models.CharField(choices=[('OPENED', 'Opened'), ('FILLED', 'Filled')], default='OPENED', - max_length=15)), - ('apply_url', models.URLField(max_length=500)), - ('currency', - models.CharField(blank=True, - choices=[('USD', 'USD'), ('CRC', 'CRC'), ('CLP', 'CLP'), ('EUR', 'EUR'), - ('UYU', 'UYU')], - default='USD', - max_length=3)), - ('min_salary', models.FloatField(blank=True, null=True)), - ('max_salary', models.FloatField(blank=True, null=True)), - ('salary', models.CharField(blank=True, max_length=253, null=True)), - ('job_description', models.TextField(blank=True, null=True)), - ('job_type', - models.CharField(choices=[('Full-time', 'Full-time'), ('Internship', 'Internship'), - ('Part-time', 'Part-time'), ('Temporary', 'Temporary'), - ('Contract', 'Contract')], - default='Full-time', - max_length=15)), - ('remote', models.BooleanField(default=False, verbose_name='Remote')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('career_tags', models.ManyToManyField(blank=True, null=True, to='career.CareerTag')), - ('employer', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='career.employer')), - ('locations', models.ManyToManyField(blank=True, null=True, to='career.Location')), - ('position', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='career.position')), - ('spider', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='career.spider')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.CharField(max_length=150)), + ("published_date_raw", models.CharField(max_length=50)), + ("published_date_processed", models.DateTimeField(blank=True, default=None, null=True)), + ( + "status", + models.CharField( + choices=[("OPENED", "Opened"), ("FILLED", "Filled")], default="OPENED", max_length=15 + ), + ), + ("apply_url", models.URLField(max_length=500)), + ( + "currency", + models.CharField( + blank=True, + choices=[("USD", "USD"), ("CRC", "CRC"), ("CLP", "CLP"), ("EUR", "EUR"), ("UYU", "UYU")], + default="USD", + max_length=3, + ), + ), + ("min_salary", models.FloatField(blank=True, null=True)), + ("max_salary", models.FloatField(blank=True, null=True)), + ("salary", models.CharField(blank=True, max_length=253, null=True)), + ("job_description", models.TextField(blank=True, null=True)), + ( + "job_type", + models.CharField( + choices=[ + ("Full-time", "Full-time"), + ("Internship", "Internship"), + ("Part-time", "Part-time"), + ("Temporary", "Temporary"), + ("Contract", "Contract"), + ], + default="Full-time", + max_length=15, + ), + ), + ("remote", models.BooleanField(default=False, verbose_name="Remote")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("career_tags", models.ManyToManyField(blank=True, null=True, to="career.CareerTag")), + ( + "employer", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="career.employer" + ), + ), + ("locations", models.ManyToManyField(blank=True, null=True, to="career.Location")), + ("position", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="career.position")), + ("spider", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="career.spider")), ], ), migrations.AddField( - model_name='employer', - name='location', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='career.location'), + model_name="employer", + name="location", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="career.location" + ), ), ] diff --git a/breathecode/career/migrations/0003_auto_20220629_2026.py b/breathecode/career/migrations/0003_auto_20220629_2026.py index 3cf09082c..5d9885b1f 100644 --- a/breathecode/career/migrations/0003_auto_20220629_2026.py +++ b/breathecode/career/migrations/0003_auto_20220629_2026.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('career', '0002_auto_20220404_1939'), + ("career", "0002_auto_20220404_1939"), ] operations = [ migrations.AlterField( - model_name='job', - name='career_tags', - field=models.ManyToManyField(blank=True, to='career.CareerTag'), + model_name="job", + name="career_tags", + field=models.ManyToManyField(blank=True, to="career.CareerTag"), ), migrations.AlterField( - model_name='job', - name='locations', - field=models.ManyToManyField(blank=True, to='career.Location'), + model_name="job", + name="locations", + field=models.ManyToManyField(blank=True, to="career.Location"), ), ] diff --git a/breathecode/career/models.py b/breathecode/career/models.py index f0d47be4e..dd9da6bbd 100644 --- a/breathecode/career/models.py +++ b/breathecode/career/models.py @@ -8,7 +8,7 @@ class Platform(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class Position(models.Model): @@ -17,34 +17,36 @@ class Position(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class ZyteProject(models.Model): zyte_api_key = models.CharField(max_length=150) zyte_api_deploy = models.CharField(max_length=50) zyte_api_spider_number = models.IntegerField( - null=False, blank=False, help_text='This number is the one that corresponds when the ZYTE spider was created.') + null=False, blank=False, help_text="This number is the one that corresponds when the ZYTE spider was created." + ) zyte_api_last_job_number = models.IntegerField( - default=0, null=True, blank=True, help_text='(Optional field) Start at 0 but increase with each search.') + default=0, null=True, blank=True, help_text="(Optional field) Start at 0 but increase with each search." + ) platform = models.ForeignKey(Platform, on_delete=models.CASCADE, null=False, blank=False) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.platform} {self.zyte_api_key} {self.zyte_api_deploy} ({self.id})' + return f"{self.platform} {self.zyte_api_key} {self.zyte_api_deploy} ({self.id})" -SYNCHED = 'SYNCHED' -PENDING = 'PENDING' -WARNING = 'WARNING' -ERROR = 'ERROR' +SYNCHED = "SYNCHED" +PENDING = "PENDING" +WARNING = "WARNING" +ERROR = "ERROR" SPIDER_STATUS = ( - (SYNCHED, 'Synched'), - (PENDING, 'Pending'), - (WARNING, 'Warning'), - (ERROR, 'Error'), + (SYNCHED, "Synched"), + (PENDING, "Pending"), + (WARNING, "Warning"), + (ERROR, "Error"), ) @@ -55,20 +57,17 @@ class Spider(models.Model): name = models.CharField(max_length=150) position = models.ForeignKey(Position, on_delete=models.CASCADE, null=False, blank=False) job_search = models.CharField(max_length=150) - loc_search = models.CharField(max_length=150, - null=True, - blank=True, - help_text='This field may be optional on some platforms.') + loc_search = models.CharField( + max_length=150, null=True, blank=True, help_text="This field may be optional on some platforms." + ) zyte_project = models.ForeignKey(ZyteProject, on_delete=models.CASCADE, null=False, blank=False) - zyte_spider_number = models.IntegerField(default=0, - null=True, - blank=True, - help_text='This number must be copy from ZYTE') - zyte_job_number = models.IntegerField(default=0, - null=True, - blank=True, - help_text='Start at 0 but increase on each fetch') - zyte_fetch_count = models.IntegerField(default=0, help_text='The number of spider job excecutions to fetch') + zyte_spider_number = models.IntegerField( + default=0, null=True, blank=True, help_text="This number must be copy from ZYTE" + ) + zyte_job_number = models.IntegerField( + default=0, null=True, blank=True, help_text="Start at 0 but increase on each fetch" + ) + zyte_fetch_count = models.IntegerField(default=0, help_text="The number of spider job excecutions to fetch") zyte_last_fetch_date = models.DateTimeField(null=True, blank=True) spider_last_run_status = models.CharField(max_length=15, choices=SPIDER_STATUS, default=PENDING) spider_last_run_desc = models.CharField(max_length=200, null=True, blank=True) @@ -83,7 +82,7 @@ def clean(self): self.zyte_job_number = self.zyte_project.zyte_api_last_job_number def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class PositionAlias(models.Model): @@ -93,7 +92,7 @@ class PositionAlias(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class CareerTag(models.Model): @@ -102,7 +101,7 @@ class CareerTag(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.slug} ({self.id})' + return f"{self.slug} ({self.id})" class Location(models.Model): @@ -111,7 +110,7 @@ class Location(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class LocationAlias(models.Model): @@ -121,7 +120,7 @@ class LocationAlias(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class Employer(models.Model): @@ -131,40 +130,40 @@ class Employer(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} {self.name} ({self.id})' + return f"{self.name} {self.name} ({self.id})" -OPENED = 'OPENED' -FILLED = 'FILLED' +OPENED = "OPENED" +FILLED = "FILLED" JOB_STATUS = ( - (OPENED, 'Opened'), - (FILLED, 'Filled'), + (OPENED, "Opened"), + (FILLED, "Filled"), ) -FULLTIME = 'Full-time' -INTERNSHIP = 'Internship' -PARTTIME = 'Part-time' -TEMPORARY = 'Temporary' -CONTRACT = 'Contract' +FULLTIME = "Full-time" +INTERNSHIP = "Internship" +PARTTIME = "Part-time" +TEMPORARY = "Temporary" +CONTRACT = "Contract" JOB_TYPE = ( - (FULLTIME, 'Full-time'), - (INTERNSHIP, 'Internship'), - (PARTTIME, 'Part-time'), - (TEMPORARY, 'Temporary'), - (CONTRACT, 'Contract'), + (FULLTIME, "Full-time"), + (INTERNSHIP, "Internship"), + (PARTTIME, "Part-time"), + (TEMPORARY, "Temporary"), + (CONTRACT, "Contract"), ) -USD = 'USD' # United States dollar -CRC = 'CRC' # Costa Rican colón -CLP = 'CLP' # Chilean peso -EUR = 'EUR' # Euro -UYU = 'UYU' # Uruguayan peso +USD = "USD" # United States dollar +CRC = "CRC" # Costa Rican colón +CLP = "CLP" # Chilean peso +EUR = "EUR" # Euro +UYU = "UYU" # Uruguayan peso CURRENCIES = ( - (USD, 'USD'), - (CRC, 'CRC'), - (CLP, 'CLP'), - (EUR, 'EUR'), - (UYU, 'UYU'), + (USD, "USD"), + (CRC, "CRC"), + (CLP, "CLP"), + (EUR, "EUR"), + (UYU, "UYU"), ) @@ -181,7 +180,7 @@ class Job(models.Model): salary = models.CharField(max_length=253, null=True, blank=True) job_description = models.TextField(null=True, blank=True) job_type = models.CharField(max_length=15, choices=JOB_TYPE, default=FULLTIME) - remote = models.BooleanField(default=False, verbose_name='Remote') + remote = models.BooleanField(default=False, verbose_name="Remote") employer = models.ForeignKey(Employer, on_delete=models.CASCADE, null=True, blank=True) position = models.ForeignKey(Position, on_delete=models.CASCADE, null=False, blank=False) career_tags = models.ManyToManyField(CareerTag, blank=True) @@ -190,4 +189,4 @@ class Job(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.title} ({self.id})' + return f"{self.title} ({self.id})" diff --git a/breathecode/career/services/__init__.py b/breathecode/career/services/__init__.py index a862f8b14..6c60b9904 100644 --- a/breathecode/career/services/__init__.py +++ b/breathecode/career/services/__init__.py @@ -6,8 +6,10 @@ def scraper_factory(service: str): import importlib + try: - return getattr(importlib.import_module('breathecode.career.services.' + service.lower()), - service.capitalize() + 'Scraper') + return getattr( + importlib.import_module("breathecode.career.services." + service.lower()), service.capitalize() + "Scraper" + ) except Exception as e: - logger.error(f'There was an error import the library - {str(e)}') + logger.error(f"There was an error import the library - {str(e)}") diff --git a/breathecode/career/services/base_scraper.py b/breathecode/career/services/base_scraper.py index 3d3e48d1b..40ed15504 100644 --- a/breathecode/career/services/base_scraper.py +++ b/breathecode/career/services/base_scraper.py @@ -46,7 +46,7 @@ def get_employer_from_string(cls, keyword: str): @classmethod def save_tag(cls, keyword: str): - tag_slug = keyword.replace(' ', '-').replace('.', '-').lower() + tag_slug = keyword.replace(" ", "-").replace(".", "-").lower() tag = CareerTag.objects.filter(slug__iexact=tag_slug).first() if tag is None: @@ -86,7 +86,7 @@ def get_salary_format_from_string(cls, salary: str): @classmethod def get_job_id_from_string(cls, string: str): if string: - regex = r'^(\d{1,9})\/(\d{1,3})\/(\d{1,3})$' + regex = r"^(\d{1,9})\/(\d{1,3})\/(\d{1,3})$" result = re.findall(regex, string).pop() deploy, num_spider, num_job = result return (int(deploy), int(num_spider), int(num_job)) @@ -95,12 +95,12 @@ def get_job_id_from_string(cls, string: str): def get_info_amount_jobs_saved(cls, data: list): items = 0 - num_job = data[0]['num_job'] + num_job = data[0]["num_job"] if isinstance(data, list): for dat in data: - if num_job < dat['num_job']: - num_job = dat['num_job'] + if num_job < dat["num_job"]: + num_job = dat["num_job"] - items += dat['jobs_saved'] + items += dat["jobs_saved"] return (items, num_job) diff --git a/breathecode/career/services/getonboard.py b/breathecode/career/services/getonboard.py index 32193ef4e..4e84e5749 100644 --- a/breathecode/career/services/getonboard.py +++ b/breathecode/career/services/getonboard.py @@ -8,7 +8,7 @@ class GetonboardScraper(BaseScraper): @classmethod def get_location_from_string(cls, text: str): if text is None: - text = 'Remote' + text = "Remote" for regex in _cases_location: findings = re.findall(regex, text) @@ -16,9 +16,9 @@ def get_location_from_string(cls, text: str): locations = _cases_location[regex](findings, text) remote = False - if 'Remote' in locations: + if "Remote" in locations: remote = True - locations.remove('Remote') + locations.remove("Remote") if isinstance(locations, list): locations = [cls.save_location(x) for x in locations] @@ -29,21 +29,21 @@ def get_location_from_string(cls, text: str): def get_salary_from_string(cls, salary): min_salary = 0 max_salary = 0 - salary_str = 'Not supplied' + salary_str = "Not supplied" - if salary is not None and salary != 'Not supplied' and salary != 'Remote': + if salary is not None and salary != "Not supplied" and salary != "Remote": salary = cls.get_salary_format_from_string(salary) if salary: min_salary = float(salary[0]) * 12 max_salary = float(salary[1]) * 12 - salary_str = f'${min_salary} - ${max_salary} a year.' + salary_str = f"${min_salary} - ${max_salary} a year." return (min_salary, max_salary, salary_str) @classmethod def get_tag_from_string(cls, tags: list): if not tags: - tags = ['web-developer'] + tags = ["web-developer"] if isinstance(tags, list): tags = [cls.save_tag(x) for x in tags] diff --git a/breathecode/career/services/indeed.py b/breathecode/career/services/indeed.py index d44578956..fe6fb1f40 100644 --- a/breathecode/career/services/indeed.py +++ b/breathecode/career/services/indeed.py @@ -8,7 +8,7 @@ class IndeedScraper(BaseScraper): @classmethod def get_location_from_string(cls, text: str): if text is None: - text = 'Remote' + text = "Remote" for regex in _cases_location: findings = re.findall(regex, text) @@ -16,9 +16,9 @@ def get_location_from_string(cls, text: str): locations = _cases_location[regex](findings, text) remote = False - if 'Remote' in locations: + if "Remote" in locations: remote = True - locations.remove('Remote') + locations.remove("Remote") if isinstance(locations, list): locations = [cls.save_location(x) for x in locations] @@ -29,21 +29,21 @@ def get_location_from_string(cls, text: str): def get_salary_from_string(cls, salary): min_salary = 0 max_salary = 0 - salary_str = 'Not supplied' + salary_str = "Not supplied" - if salary is not None and salary != 'Not supplied': + if salary is not None and salary != "Not supplied": salary = cls.get_salary_format_from_string(salary) if salary: min_salary = float(salary[0]) max_salary = float(salary[1]) - salary_str = f'${min_salary} - ${max_salary} a year.' + salary_str = f"${min_salary} - ${max_salary} a year." return (min_salary, max_salary, salary_str) @classmethod def get_tag_from_string(cls, tags: list): if not tags: - tags = ['web-developer'] + tags = ["web-developer"] if isinstance(tags, list): tags = [cls.save_tag(x) for x in tags] diff --git a/breathecode/career/services/regex.py b/breathecode/career/services/regex.py index 50018333c..2874876ef 100644 --- a/breathecode/career/services/regex.py +++ b/breathecode/career/services/regex.py @@ -13,31 +13,33 @@ def today(): def change_format_to_date(findings, string_date): - dtz = datetime.strptime(string_date, '%B %d, %Y') + dtz = datetime.strptime(string_date, "%B %d, %Y") return timezone.make_aware(dtz) def location_format(findings, string_loc): job_id_fecth = list(findings.pop()) - v = ''.join(job_id_fecth[1]) - result = v.split(',') - location = [job_id_fecth[0].replace('.', '').strip().capitalize()] + v = "".join(job_id_fecth[1]) + result = v.split(",") + location = [job_id_fecth[0].replace(".", "").strip().capitalize()] loc = location for tag in result: loc.append( - tag.replace(' o ', ',').replace(';', - ',').replace('-', - '\', \'').replace('\'', - '').replace('temporarily remote', - 'Remote').strip()) + tag.replace(" o ", ",") + .replace(";", ",") + .replace("-", "', '") + .replace("'", "") + .replace("temporarily remote", "Remote") + .strip() + ) return loc def get_remote_from_strin(findings, string_loc): - if string_loc == '.' or string_loc == ')' or string_loc == '(' or string_loc == '' or string_loc == None: - string_loc = 'Remote' + if string_loc == "." or string_loc == ")" or string_loc == "(" or string_loc == "" or string_loc == None: + string_loc = "Remote" return [string_loc.strip()] @@ -47,7 +49,7 @@ def salary(findings, string_salary): val = [] for sal in salary: - val += [sal.replace('$', '').replace('K', '').replace(',', '').strip()] + val += [sal.replace("$", "").replace("K", "").replace(",", "").strip()] return val @@ -57,7 +59,7 @@ def salary_month(findings, string_salary): val = [] for sal in salary: - val += [sal.replace('$', '').replace('K', '').strip()] + val += [sal.replace("$", "").replace("K", "").strip()] return val @@ -67,29 +69,29 @@ def salary_month_only_one(findings, string_salary): val = [] for sal in salary: - val += [sal.replace('$', '').replace('K', '').replace(',', '').strip()] + val += [sal.replace("$", "").replace("K", "").replace(",", "").strip()] - val += '0' + val += "0" return val _cases_date = { - r'^(?:Active\s)?(\d{1,2})\+? days? ago': days_ago_to_date, - r'(.*\s?\d{1,2}\+?,? \d{1,4})': change_format_to_date, - r'^today': lambda *args, **kwargs: today(), - r'^Today': lambda *args, **kwargs: today(), - r'^Just posted': lambda *args, **kwargs: today(), - r'^just posted': lambda *args, **kwargs: today(), + r"^(?:Active\s)?(\d{1,2})\+? days? ago": days_ago_to_date, + r"(.*\s?\d{1,2}\+?,? \d{1,4})": change_format_to_date, + r"^today": lambda *args, **kwargs: today(), + r"^Today": lambda *args, **kwargs: today(), + r"^Just posted": lambda *args, **kwargs: today(), + r"^just posted": lambda *args, **kwargs: today(), } _cases_location = { - r'(.*\s)?\((.*)\)': location_format, - r'^\s?(.*)': get_remote_from_strin, + r"(.*\s)?\((.*)\)": location_format, + r"^\s?(.*)": get_remote_from_strin, } _cases_salary = { - r'^(.*)\s?-\s(.*)\+? a? year': salary, - r'^(.*)\s?to\s(.*)\+? per? year': salary, - r'^(.*)\s?-\s(.*)\+? USD/month': salary_month, - r'^(.*)\s?\+? USD/month': salary_month_only_one, + r"^(.*)\s?-\s(.*)\+? a? year": salary, + r"^(.*)\s?to\s(.*)\+? per? year": salary, + r"^(.*)\s?-\s(.*)\+? USD/month": salary_month, + r"^(.*)\s?\+? USD/month": salary_month_only_one, } diff --git a/breathecode/career/tasks.py b/breathecode/career/tasks.py index 26b1fcf2c..2a20c6ea2 100644 --- a/breathecode/career/tasks.py +++ b/breathecode/career/tasks.py @@ -14,15 +14,15 @@ def async_run_spider(self, args): from .actions import run_spider - logger.error('Starting async_run_spider') + logger.error("Starting async_run_spider") now = timezone.now() - spider = Spider.objects.filter(id=args['spi_id']).first() + spider = Spider.objects.filter(id=args["spi_id"]).first() result = run_spider(spider) if result: - logger.error(f'Starting async_run_spider in spider name {spider.name}') - spider.spider_last_run_status = 'SYNCHED' - spider.spider_last_run_desc = 'The run of the spider ended successfully command at ' + str(now) + logger.error(f"Starting async_run_spider in spider name {spider.name}") + spider.spider_last_run_status = "SYNCHED" + spider.spider_last_run_desc = "The run of the spider ended successfully command at " + str(now) spider.save() @@ -30,14 +30,14 @@ def async_run_spider(self, args): def async_fetch_sync_all_data(self, args): from .actions import fetch_sync_all_data - logger.error('Starting async_fetch_sync_all_data') + logger.error("Starting async_fetch_sync_all_data") now = timezone.now() - spider = Spider.objects.filter(id=args['spi_id']).first() + spider = Spider.objects.filter(id=args["spi_id"]).first() result = fetch_sync_all_data(spider) if result: - message = f'Starting async_fetch_sync_all_data in spider name {spider.name}' + message = f"Starting async_fetch_sync_all_data in spider name {spider.name}" logger.error(message) - spider.sync_status = 'SYNCHED' + spider.sync_status = "SYNCHED" spider.sync_desc = message + str(now) spider.save() diff --git a/breathecode/career/tests/actions/tests_fetch_sync_all_data.py b/breathecode/career/tests/actions/tests_fetch_sync_all_data.py index 009ff076c..c11a55fd3 100644 --- a/breathecode/career/tests/actions/tests_fetch_sync_all_data.py +++ b/breathecode/career/tests/actions/tests_fetch_sync_all_data.py @@ -8,167 +8,164 @@ ) DATA = { - 'status': - 'ok', - 'count': - 3, - 'total': - 3, - 'jobs': [{ - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'front end', - 'loc': 'remote' + "status": "ok", + "count": 3, + "total": 3, + "jobs": [ + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "front end", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 609370879, + "logs": 74, + "id": "223344/2/72", + "started_time": "2022-01-02T22:56:02", + "updated_time": "2022-01-02T23:53:52", + "items_scraped": 227, + "errors_count": 0, + "responses_received": 555, }, - 'close_reason': 'finished', - 'elapsed': 609370879, - 'logs': 74, - 'id': '223344/2/72', - 'started_time': '2022-01-02T22:56:02', - 'updated_time': '2022-01-02T23:53:52', - 'items_scraped': 227, - 'errors_count': 0, - 'responses_received': 555 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'front end', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "front end", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 609370879, + "logs": 75, + "id": "223344/2/75", + "started_time": "2022-01-02T22:56:02", + "updated_time": "2022-01-02T23:53:52", + "items_scraped": 227, + "errors_count": 0, + "responses_received": 555, }, - 'close_reason': 'finished', - 'elapsed': 609370879, - 'logs': 75, - 'id': '223344/2/75', - 'started_time': '2022-01-02T22:56:02', - 'updated_time': '2022-01-02T23:53:52', - 'items_scraped': 227, - 'errors_count': 0, - 'responses_received': 555 - }] + ], } DATA1 = { - 'status': - 'ok', - 'count': - 1, - 'total': - 1, - 'jobs': [{ - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'front end', - 'loc': 'remote' - }, - 'close_reason': 'finished', - 'elapsed': 609370879, - 'logs': 74, - 'id': '223344/2/72', - 'started_time': '2022-01-02T22:56:02', - 'updated_time': '2022-01-02T23:53:52', - 'items_scraped': 227, - 'errors_count': 0, - 'responses_received': 555 - }] + "status": "ok", + "count": 1, + "total": 1, + "jobs": [ + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "front end", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 609370879, + "logs": 74, + "id": "223344/2/72", + "started_time": "2022-01-02T22:56:02", + "updated_time": "2022-01-02T23:53:52", + "items_scraped": 227, + "errors_count": 0, + "responses_received": 555, + } + ], } -JOBS = [{ - 'Searched_job': 'ruby', - 'Job_title': '.Net Core Developer', - 'Location': 'New Orleans, LA', - 'Company_name': 'Revelry Labs', - 'Post_date': '8 days ago', - 'Extract_date': '2022-02-17', - 'Job_description': 'Net Core Developer who has experience with .net Core, C#, and SQL Server Database experience.', - 'Salary': '', - 'Tags': [], - 'Apply_to': - 'https://www.indeed.com/company/Revelry/jobs/Net-Core-Developer-a8e4e600cb716fb7?fccid=89b6cc7775dbcb2b&vjs=3', - '_type': 'dict' -}, { - 'Searched_job': 'ruby', - 'Job_title': 'Junior DevOps Engineer', - 'Location': 'Remote', - 'Company_name': 'Clear Labs', - 'Post_date': '2 days ago', - 'Extract_date': '2022-02-17', - 'Job_description': 'We are looking for a qualified engineer for a full time Junior DevOps Role.', - 'Salary': '', - 'Tags': [], - 'Apply_to': - 'https://www.indeed.com/company/Clear-Labs/jobs/Junior-Devop-Engineer-71a0689ea2bd8cb1?fccid=250710b384a27cb1&vjs=3', - '_type': 'dict' -}] - -JOBS2 = [{ - 'Searched_job': 'ruby', - 'Job_title': '.Net Core Developer', - 'Location': 'New Orleans, LA', - 'Company_name': 'Revelry Labs', - 'Post_date': '8 days ago', - 'Extract_date': '2022-02-17', - 'Job_description': 'Net Core Developer who has experience with .net Core, C#, and SQL Server Database experience.', - 'Salary': '', - 'Tags': [], - 'Apply_to': - 'https://www.indeed.com/company/Revelry/jobs/Net-Core-Developer-a8e4e600cb716fb7?fccid=89b6cc7775dbcb2b&vjs=3', - '_type': 'dict' -}, { - 'Searched_job': 'ruby', - 'Job_title': 'Junior DevOps Engineer', - 'Location': 'Remote', - 'Company_name': 'Clear Labs', - 'Post_date': '2 days ago', - 'Extract_date': '2022-02-17', - 'Job_description': 'We are looking for a qualified engineer for a full time Junior DevOps Role.', - 'Salary': '', - 'Tags': [], - 'Apply_to': - 'https://www.indeed.com/company/Clear-Labs/jobs/Junior-Devop-Engineer-71a0689ea2bd8cb1?fccid=250710b384a27cb1&vjs=3', - '_type': 'dict' -}] - -spider = {'name': 'indeed', 'zyte_spider_number': 2, 'zyte_job_number': 0} -zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 223344} -platform = {'name': 'indeed'} +JOBS = [ + { + "Searched_job": "ruby", + "Job_title": ".Net Core Developer", + "Location": "New Orleans, LA", + "Company_name": "Revelry Labs", + "Post_date": "8 days ago", + "Extract_date": "2022-02-17", + "Job_description": "Net Core Developer who has experience with .net Core, C#, and SQL Server Database experience.", + "Salary": "", + "Tags": [], + "Apply_to": "https://www.indeed.com/company/Revelry/jobs/Net-Core-Developer-a8e4e600cb716fb7?fccid=89b6cc7775dbcb2b&vjs=3", + "_type": "dict", + }, + { + "Searched_job": "ruby", + "Job_title": "Junior DevOps Engineer", + "Location": "Remote", + "Company_name": "Clear Labs", + "Post_date": "2 days ago", + "Extract_date": "2022-02-17", + "Job_description": "We are looking for a qualified engineer for a full time Junior DevOps Role.", + "Salary": "", + "Tags": [], + "Apply_to": "https://www.indeed.com/company/Clear-Labs/jobs/Junior-Devop-Engineer-71a0689ea2bd8cb1?fccid=250710b384a27cb1&vjs=3", + "_type": "dict", + }, +] + +JOBS2 = [ + { + "Searched_job": "ruby", + "Job_title": ".Net Core Developer", + "Location": "New Orleans, LA", + "Company_name": "Revelry Labs", + "Post_date": "8 days ago", + "Extract_date": "2022-02-17", + "Job_description": "Net Core Developer who has experience with .net Core, C#, and SQL Server Database experience.", + "Salary": "", + "Tags": [], + "Apply_to": "https://www.indeed.com/company/Revelry/jobs/Net-Core-Developer-a8e4e600cb716fb7?fccid=89b6cc7775dbcb2b&vjs=3", + "_type": "dict", + }, + { + "Searched_job": "ruby", + "Job_title": "Junior DevOps Engineer", + "Location": "Remote", + "Company_name": "Clear Labs", + "Post_date": "2 days ago", + "Extract_date": "2022-02-17", + "Job_description": "We are looking for a qualified engineer for a full time Junior DevOps Role.", + "Salary": "", + "Tags": [], + "Apply_to": "https://www.indeed.com/company/Clear-Labs/jobs/Junior-Devop-Engineer-71a0689ea2bd8cb1?fccid=250710b384a27cb1&vjs=3", + "_type": "dict", + }, +] + +spider = {"name": "indeed", "zyte_spider_number": 2, "zyte_job_number": 0} +zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 223344} +platform = {"name": "indeed"} class ActionTestFetchSyncAllDataAdminTestCase(CareerTestCase): - @patch('logging.Logger.debug', MagicMock()) + @patch("logging.Logger.debug", MagicMock()) def test_fetch_funtion___with_zero_spider(self): from logging import Logger + try: fetch_sync_all_data(None) assert False except Exception as e: - self.assertEqual(Logger.debug.call_args_list, - [call('First you must specify a spider (fetch_sync_all_data)')]) - self.assertEqual(str(e), 'without-spider') + self.assertEqual( + Logger.debug.call_args_list, [call("First you must specify a spider (fetch_sync_all_data)")] + ) + self.assertEqual(str(e), "without-spider") @patch( - REQUESTS_PATH['get'], - apply_requests_get_mock([ - (200, 'https://app.scrapinghub.com/api/jobs/list.json', DATA), - (200, 'https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json', JOBS), - (200, 'https://storage.scrapinghub.com/items/223344/2/75?apikey=1234567&format=json', JOBS2) - ])) + REQUESTS_PATH["get"], + apply_requests_get_mock( + [ + (200, "https://app.scrapinghub.com/api/jobs/list.json", DATA), + (200, "https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json", JOBS), + (200, "https://storage.scrapinghub.com/items/223344/2/75?apikey=1234567&format=json", JOBS2), + ] + ), + ) def test_fetch_funtion__with_one_spider_two_requests(self): import requests @@ -177,20 +174,29 @@ def test_fetch_funtion__with_one_spider_two_requests(self): result = fetch_sync_all_data(model.spider) self.assertEqual(result, DATA) - self.assertEqual(requests.get.call_args_list, [ - call('https://app.scrapinghub.com/api/jobs/list.json', - params=(('project', '223344'), ('spider', 'indeed'), ('state', 'finished')), - auth=('1234567', ''), - timeout=2), - call('https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json', timeout=2), - call('https://storage.scrapinghub.com/items/223344/2/75?apikey=1234567&format=json', timeout=2) - ]) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://app.scrapinghub.com/api/jobs/list.json", + params=(("project", "223344"), ("spider", "indeed"), ("state", "finished")), + auth=("1234567", ""), + timeout=2, + ), + call("https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json", timeout=2), + call("https://storage.scrapinghub.com/items/223344/2/75?apikey=1234567&format=json", timeout=2), + ], + ) @patch( - REQUESTS_PATH['get'], - apply_requests_get_mock([(200, 'https://app.scrapinghub.com/api/jobs/list.json', DATA1), - (200, 'https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json', - JOBS)])) + REQUESTS_PATH["get"], + apply_requests_get_mock( + [ + (200, "https://app.scrapinghub.com/api/jobs/list.json", DATA1), + (200, "https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json", JOBS), + ] + ), + ) def test_verify_fetch_funtions_was_called(self): import requests @@ -200,10 +206,15 @@ def test_verify_fetch_funtions_was_called(self): requests.get.assert_called() self.assertEqual(result, DATA1) - self.assertEqual(requests.get.call_args_list, [ - call('https://app.scrapinghub.com/api/jobs/list.json', - params=(('project', '223344'), ('spider', 'indeed'), ('state', 'finished')), - auth=('1234567', ''), - timeout=2), - call('https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json', timeout=2), - ]) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://app.scrapinghub.com/api/jobs/list.json", + params=(("project", "223344"), ("spider", "indeed"), ("state", "finished")), + auth=("1234567", ""), + timeout=2, + ), + call("https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json", timeout=2), + ], + ) diff --git a/breathecode/career/tests/actions/tests_fetch_to_api.py b/breathecode/career/tests/actions/tests_fetch_to_api.py index 880bf1cd5..4440fae5b 100644 --- a/breathecode/career/tests/actions/tests_fetch_to_api.py +++ b/breathecode/career/tests/actions/tests_fetch_to_api.py @@ -8,91 +8,85 @@ ) DATA = { - 'status': - 'ok', - 'count': - 3, - 'total': - 3, - 'jobs': [{ - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'front end', - 'loc': 'remote' + "status": "ok", + "count": 3, + "total": 3, + "jobs": [ + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "front end", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 609370879, + "logs": 74, + "id": "223344/2/72", + "started_time": "2022-01-02T22:56:02", + "updated_time": "2022-01-02T23:53:52", + "items_scraped": 227, + "errors_count": 0, + "responses_received": 555, }, - 'close_reason': 'finished', - 'elapsed': 609370879, - 'logs': 74, - 'id': '223344/2/72', - 'started_time': '2022-01-02T22:56:02', - 'updated_time': '2022-01-02T23:53:52', - 'items_scraped': 227, - 'errors_count': 0, - 'responses_received': 555 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'go', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "go", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 646146617, + "logs": 18, + "id": "223344/2/71", + "started_time": "2022-01-02T13:40:20", + "updated_time": "2022-01-02T13:40:57", + "items_scraped": 0, + "errors_count": 0, + "responses_received": 2, }, - 'close_reason': 'finished', - 'elapsed': 646146617, - 'logs': 18, - 'id': '223344/2/71', - 'started_time': '2022-01-02T13:40:20', - 'updated_time': '2022-01-02T13:40:57', - 'items_scraped': 0, - 'errors_count': 0, - 'responses_received': 2 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'web developer', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "web developer", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 647281256, + "logs": 25, + "id": "223344/2/70", + "started_time": "2022-01-02T13:15:17", + "updated_time": "2022-01-02T13:22:03", + "items_scraped": 0, + "errors_count": 2, + "responses_received": 0, }, - 'close_reason': 'finished', - 'elapsed': 647281256, - 'logs': 25, - 'id': '223344/2/70', - 'started_time': '2022-01-02T13:15:17', - 'updated_time': '2022-01-02T13:22:03', - 'items_scraped': 0, - 'errors_count': 2, - 'responses_received': 0 - }] + ], } class ActionTestfetchToApiTestCase(CareerTestCase): - @patch('logging.Logger.debug', MagicMock()) + @patch("logging.Logger.debug", MagicMock()) def test_fetch_to_api__without_spider(self): from logging import Logger + try: fetch_to_api(None) assert False except Exception as e: - self.assertEqual(str(e), ('without-spider')) - self.assertEqual(Logger.debug.call_args_list, [call('First you must specify a spider (fetch_to_api)')]) + self.assertEqual(str(e), ("without-spider")) + self.assertEqual(Logger.debug.call_args_list, [call("First you must specify a spider (fetch_to_api)")]) - @patch(REQUESTS_PATH['get'], - apply_requests_get_mock([(200, 'https://app.scrapinghub.com/api/jobs/list.json', DATA)])) + @patch( + REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://app.scrapinghub.com/api/jobs/list.json", DATA)]) + ) def test_status_ok_fetch_to_api__whith_data(self): import requests @@ -100,82 +94,81 @@ def test_status_ok_fetch_to_api__whith_data(self): result = fetch_to_api(model.spider) self.assertEqual( - result, { - 'status': - 'ok', - 'count': - 3, - 'total': - 3, - 'jobs': [{ - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'front end', - 'loc': 'remote' + result, + { + "status": "ok", + "count": 3, + "total": 3, + "jobs": [ + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "front end", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 609370879, + "logs": 74, + "id": "223344/2/72", + "started_time": "2022-01-02T22:56:02", + "updated_time": "2022-01-02T23:53:52", + "items_scraped": 227, + "errors_count": 0, + "responses_received": 555, }, - 'close_reason': 'finished', - 'elapsed': 609370879, - 'logs': 74, - 'id': '223344/2/72', - 'started_time': '2022-01-02T22:56:02', - 'updated_time': '2022-01-02T23:53:52', - 'items_scraped': 227, - 'errors_count': 0, - 'responses_received': 555 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'go', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "go", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 646146617, + "logs": 18, + "id": "223344/2/71", + "started_time": "2022-01-02T13:40:20", + "updated_time": "2022-01-02T13:40:57", + "items_scraped": 0, + "errors_count": 0, + "responses_received": 2, }, - 'close_reason': 'finished', - 'elapsed': 646146617, - 'logs': 18, - 'id': '223344/2/71', - 'started_time': '2022-01-02T13:40:20', - 'updated_time': '2022-01-02T13:40:57', - 'items_scraped': 0, - 'errors_count': 0, - 'responses_received': 2 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'web developer', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "web developer", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 647281256, + "logs": 25, + "id": "223344/2/70", + "started_time": "2022-01-02T13:15:17", + "updated_time": "2022-01-02T13:22:03", + "items_scraped": 0, + "errors_count": 2, + "responses_received": 0, }, - 'close_reason': 'finished', - 'elapsed': 647281256, - 'logs': 25, - 'id': '223344/2/70', - 'started_time': '2022-01-02T13:15:17', - 'updated_time': '2022-01-02T13:22:03', - 'items_scraped': 0, - 'errors_count': 2, - 'responses_received': 0 - }] - }) - self.assertEqual(requests.get.call_args_list, [ - call('https://app.scrapinghub.com/api/jobs/list.json', - params=( - ('project', model.zyte_project.zyte_api_deploy), - ('spider', model.zyte_project.platform.name), - ('state', 'finished'), - ), - auth=(model.zyte_project.zyte_api_key, ''), - timeout=2) - ]) + ], + }, + ) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://app.scrapinghub.com/api/jobs/list.json", + params=( + ("project", model.zyte_project.zyte_api_deploy), + ("spider", model.zyte_project.platform.name), + ("state", "finished"), + ), + auth=(model.zyte_project.zyte_api_key, ""), + timeout=2, + ) + ], + ) diff --git a/breathecode/career/tests/actions/tests_get_job_id_from_string.py b/breathecode/career/tests/actions/tests_get_job_id_from_string.py index fc37bc714..8f10bf745 100644 --- a/breathecode/career/tests/actions/tests_get_job_id_from_string.py +++ b/breathecode/career/tests/actions/tests_get_job_id_from_string.py @@ -6,9 +6,9 @@ class ActionGetDateFromStringTestCase(CareerTestCase): def test_get_job_id_from_string_is_empty(self): - result = BaseScraper.get_job_id_from_string('') + result = BaseScraper.get_job_id_from_string("") self.assertEqual(result, None) def test_get_job_id_from_string_with_id(self): - result = BaseScraper.get_job_id_from_string('223344/6/25') + result = BaseScraper.get_job_id_from_string("223344/6/25") self.assertEqual(result, (223344, 6, 25)) diff --git a/breathecode/career/tests/actions/tests_get_salary_from_string.py b/breathecode/career/tests/actions/tests_get_salary_from_string.py index 6d2bb8d2b..01c125b60 100644 --- a/breathecode/career/tests/actions/tests_get_salary_from_string.py +++ b/breathecode/career/tests/actions/tests_get_salary_from_string.py @@ -11,55 +11,55 @@ class ActionGetSalaryFromStringTestCase(CareerTestCase): def test_get_salary_from_string__with_salary_month(self): - platform = 'getonboard' - salary = '$2700 - 3700 USD/month' + platform = "getonboard" + salary = "$2700 - 3700 USD/month" result = GetonboardScraper.get_salary_from_string(salary) (min_salary, max_salary, salary_str) = result self.assertEqual(min_salary, 32400) self.assertEqual(max_salary, 44400) - self.assertEqual(salary_str, '$32400.0 - $44400.0 a year.') + self.assertEqual(salary_str, "$32400.0 - $44400.0 a year.") def test_get_salary_from_string__with_salary_is_null(self): - platform = 'getonboard' + platform = "getonboard" salary = None result = GetonboardScraper.get_salary_from_string(salary) (min_salary, max_salary, salary_str) = result self.assertEqual(min_salary, 0) self.assertEqual(max_salary, 0) - self.assertEqual(salary_str, 'Not supplied') + self.assertEqual(salary_str, "Not supplied") def test_get_salary_from_string__with_salary_and_platform_is_other(self): - platform = 'indeed' - salary = '$32400.0 - $44400.0 a year.' + platform = "indeed" + salary = "$32400.0 - $44400.0 a year." result = IndeedScraper.get_salary_from_string(salary) (min_salary, max_salary, salary_str) = result self.assertEqual(min_salary, 32400) self.assertEqual(max_salary, 44400) - self.assertEqual(salary_str, '$32400.0 - $44400.0 a year.') + self.assertEqual(salary_str, "$32400.0 - $44400.0 a year.") def test_get_salary_from_string_is_null_and_platform_is_other(self): - platform = 'indeed' + platform = "indeed" salary = None result = IndeedScraper.get_salary_from_string(salary) (min_salary, max_salary, salary_str) = result self.assertEqual(min_salary, 0) self.assertEqual(max_salary, 0) - self.assertEqual(salary_str, 'Not supplied') + self.assertEqual(salary_str, "Not supplied") def test_get_salary_from_string__with_only_salary(self): - platform = 'getonboard' - salary = '$2700 USD/month' + platform = "getonboard" + salary = "$2700 USD/month" result = GetonboardScraper.get_salary_from_string(salary) (min_salary, max_salary, salary_str) = result self.assertEqual(min_salary, 32400) self.assertEqual(max_salary, 0.0) - self.assertEqual(salary_str, '$32400.0 - $0.0 a year.') + self.assertEqual(salary_str, "$32400.0 - $0.0 a year.") def test_get_salary_from_string__with_salary_bad_format(self): - platform = 'getonboard' - salary = '$2700 - K3700 USD/month' + platform = "getonboard" + salary = "$2700 - K3700 USD/month" result = GetonboardScraper.get_salary_from_string(salary) (min_salary, max_salary, salary_str) = result self.assertEqual(min_salary, 32400) self.assertEqual(max_salary, 44400) - self.assertEqual(salary_str, '$32400.0 - $44400.0 a year.') + self.assertEqual(salary_str, "$32400.0 - $44400.0 a year.") diff --git a/breathecode/career/tests/actions/tests_get_scraped_data_of_platform.py b/breathecode/career/tests/actions/tests_get_scraped_data_of_platform.py index 43d5ffe95..07dabe01b 100644 --- a/breathecode/career/tests/actions/tests_get_scraped_data_of_platform.py +++ b/breathecode/career/tests/actions/tests_get_scraped_data_of_platform.py @@ -9,231 +9,249 @@ ) DATA = { - 'status': - 'ok', - 'count': - 3, - 'total': - 3, - 'jobs': [{ - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'front end', - 'loc': 'remote' + "status": "ok", + "count": 3, + "total": 3, + "jobs": [ + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "front end", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 609370879, + "logs": 74, + "id": "223344/2/72", + "started_time": "2022-01-02T22:56:02", + "updated_time": "2022-01-02T23:53:52", + "items_scraped": 227, + "errors_count": 0, + "responses_received": 555, }, - 'close_reason': 'finished', - 'elapsed': 609370879, - 'logs': 74, - 'id': '223344/2/72', - 'started_time': '2022-01-02T22:56:02', - 'updated_time': '2022-01-02T23:53:52', - 'items_scraped': 227, - 'errors_count': 0, - 'responses_received': 555 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'getonboard', - 'spider_args': { - 'job': 'go', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "getonboard", + "spider_args": {"job": "go", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 646146617, + "logs": 18, + "id": "223344/3/35", + "started_time": "2022-01-02T13:40:20", + "updated_time": "2022-01-02T13:40:57", + "items_scraped": 6, + "errors_count": 0, + "responses_received": 2, }, - 'close_reason': 'finished', - 'elapsed': 646146617, - 'logs': 18, - 'id': '223344/3/35', - 'started_time': '2022-01-02T13:40:20', - 'updated_time': '2022-01-02T13:40:57', - 'items_scraped': 6, - 'errors_count': 0, - 'responses_received': 2 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'getonboard', - 'spider_args': { - 'job': 'web developer', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "getonboard", + "spider_args": {"job": "web developer", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 647281256, + "logs": 25, + "id": "223344/3/34", + "started_time": "2022-01-02T13:15:17", + "updated_time": "2022-01-02T13:22:03", + "items_scraped": 3, + "errors_count": 2, + "responses_received": 0, }, - 'close_reason': 'finished', - 'elapsed': 647281256, - 'logs': 25, - 'id': '223344/3/34', - 'started_time': '2022-01-02T13:15:17', - 'updated_time': '2022-01-02T13:22:03', - 'items_scraped': 3, - 'errors_count': 2, - 'responses_received': 0 - }] + ], } -JOBS = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Desarrollador Full-Stack', - 'Location': 'Santiago (temporarily remote)', - 'Company_name': 'Centry', - 'Post_date': 'January 19, 2022', - 'Extract_date': '2022-01-30', - 'Job_description': '', - 'Salary': '$1800 - 2100 USD/month', - 'Tags': ['api', 'back-end', 'full-stack', 'git', 'java', 'mvc', 'python', 'ruby'], - 'Apply_to': 'https://www.getonbrd.com/jobs/programming/desarrollador-full-stack-developer-centry-santiago', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Desarrollador Full-Stack Python/React', - 'Location': 'Remote', - 'Company_name': 'Alluxi', - 'Post_date': 'January 14, 2022', - 'Extract_date': '2022-01-30', - 'Job_description': - 'Al menos 1 año de experiencia trabajando con Python y Django Al menos 1 año de experiencia trabajando con React.js Experiencia desarrollando APIs REST Ingles Conversacional Buscamos un desarrollador responsable, autodidacta, proactivo, eficiente y organizado.', - 'Salary': '$1800 - 2000 USD/month', - 'Tags': ['api', 'back-end', 'django', 'english', 'front-end', 'full-stack', 'javascript', 'python', 'react'], - 'Apply_to': 'https://www.getonbrd.com/jobs/programming/desarrollodor-fullstack-python-react-alluxi-remote', - '_type': 'dict' -}, { - 'Searched_job': - 'junior web developer', - 'Job_title': - 'Full-Stack Developer', - 'Location': - 'Santiago', - 'Company_name': - 'AAXIS Commerce', - 'Post_date': - 'January 17, 2022', - 'Extract_date': - '2022-01-30', - 'Job_description': - 'Four-year degree in any computer science-related field or equivalent experience. At least 3-year solid front-end developer as well as back-end full stack developer. Relevant experience working with PHP/Symfony (if it is in Magento or Oro Commerce, even better). Familiar with responsive/adaptive design and mobile development best practices. Web and mobile development, familiar with front+back end developing and data interaction. Experience with Express, Redis. and Node.js, mainframe (React, Angular, Knockout) preferred for React.', - 'Salary': - 'Not supplied', - 'Tags': [ - 'angularjs', 'back-end', 'express', 'front-end', 'full-stack', 'javascript', 'magento', 'mobile development', - 'node.js', 'php', 'react', 'redis', 'responsive', 'symfony', 'ui design' - ], - 'Apply_to': - 'https://www.getonbrd.com/jobs/programming/full-stack-developer-aaxis-commerce-santiago-3c8e', - '_type': - 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile)', - 'Company_name': 'Rule 1 Ventures', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Rule 1 Ventures', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Front-end Developer', - 'Location': 'Lima', - 'Company_name': 'ID Business Intelligence', - 'Post_date': 'January 24, 2022', - 'Extract_date': '2022-01-30', - 'Job_description': - 'Manejo de Git Flow. (~°-°)~ Dominar a profundidad CSS y JS (mínimo 1 año) Experiencia con React Experiencia consumiendo Web Service (Rest) Preocuparse por entregar productos de calidad.', - 'Salary': 'Not supplied', - 'Tags': ['api', 'css', 'front-end', 'git', 'javascript', 'react'], - 'Apply_to': 'https://www.getonbrd.com/jobs/programming/fronted-developer-id-business-intelligence-remote', - '_type': 'dict' -}, { - 'Searched_job': - 'junior web developer', - 'Job_title': - 'Junior Web Developer', - 'Location': - None, - 'Company_name': - 'Reign', - 'Post_date': - 'January 29, 2022', - 'Extract_date': - '2022-01-30', - 'Job_description': - '', - 'Salary': - 'Not supplied', - 'Tags': [ - 'angularjs', 'api', 'back-end', 'ci/cd', 'css', 'docker', 'front-end', 'html5', 'javascript', 'json', 'mongodb', - 'node.js', 'nosql', 'postgresql', 'react', 'responsive', 'ui design', 'virtualization' - ], - 'Apply_to': - 'https://www.getonbrd.com/jobs/programming/junior-web-developer-reign-remote', - '_type': - 'dict' -}] +JOBS = [ + { + "Searched_job": "junior web developer", + "Job_title": "Desarrollador Full-Stack", + "Location": "Santiago (temporarily remote)", + "Company_name": "Centry", + "Post_date": "January 19, 2022", + "Extract_date": "2022-01-30", + "Job_description": "", + "Salary": "$1800 - 2100 USD/month", + "Tags": ["api", "back-end", "full-stack", "git", "java", "mvc", "python", "ruby"], + "Apply_to": "https://www.getonbrd.com/jobs/programming/desarrollador-full-stack-developer-centry-santiago", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Desarrollador Full-Stack Python/React", + "Location": "Remote", + "Company_name": "Alluxi", + "Post_date": "January 14, 2022", + "Extract_date": "2022-01-30", + "Job_description": "Al menos 1 año de experiencia trabajando con Python y Django Al menos 1 año de experiencia trabajando con React.js Experiencia desarrollando APIs REST Ingles Conversacional Buscamos un desarrollador responsable, autodidacta, proactivo, eficiente y organizado.", + "Salary": "$1800 - 2000 USD/month", + "Tags": ["api", "back-end", "django", "english", "front-end", "full-stack", "javascript", "python", "react"], + "Apply_to": "https://www.getonbrd.com/jobs/programming/desarrollodor-fullstack-python-react-alluxi-remote", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Full-Stack Developer", + "Location": "Santiago", + "Company_name": "AAXIS Commerce", + "Post_date": "January 17, 2022", + "Extract_date": "2022-01-30", + "Job_description": "Four-year degree in any computer science-related field or equivalent experience. At least 3-year solid front-end developer as well as back-end full stack developer. Relevant experience working with PHP/Symfony (if it is in Magento or Oro Commerce, even better). Familiar with responsive/adaptive design and mobile development best practices. Web and mobile development, familiar with front+back end developing and data interaction. Experience with Express, Redis. and Node.js, mainframe (React, Angular, Knockout) preferred for React.", + "Salary": "Not supplied", + "Tags": [ + "angularjs", + "back-end", + "express", + "front-end", + "full-stack", + "javascript", + "magento", + "mobile development", + "node.js", + "php", + "react", + "redis", + "responsive", + "symfony", + "ui design", + ], + "Apply_to": "https://www.getonbrd.com/jobs/programming/full-stack-developer-aaxis-commerce-santiago-3c8e", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile)", + "Company_name": "Rule 1 Ventures", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Rule 1 Ventures", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Front-end Developer", + "Location": "Lima", + "Company_name": "ID Business Intelligence", + "Post_date": "January 24, 2022", + "Extract_date": "2022-01-30", + "Job_description": "Manejo de Git Flow. (~°-°)~ Dominar a profundidad CSS y JS (mínimo 1 año) Experiencia con React Experiencia consumiendo Web Service (Rest) Preocuparse por entregar productos de calidad.", + "Salary": "Not supplied", + "Tags": ["api", "css", "front-end", "git", "javascript", "react"], + "Apply_to": "https://www.getonbrd.com/jobs/programming/fronted-developer-id-business-intelligence-remote", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Junior Web Developer", + "Location": None, + "Company_name": "Reign", + "Post_date": "January 29, 2022", + "Extract_date": "2022-01-30", + "Job_description": "", + "Salary": "Not supplied", + "Tags": [ + "angularjs", + "api", + "back-end", + "ci/cd", + "css", + "docker", + "front-end", + "html5", + "javascript", + "json", + "mongodb", + "node.js", + "nosql", + "postgresql", + "react", + "responsive", + "ui design", + "virtualization", + ], + "Apply_to": "https://www.getonbrd.com/jobs/programming/junior-web-developer-reign-remote", + "_type": "dict", + }, +] -spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} -zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 223344} -platform = {'name': 'getonboard'} +spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} +zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 223344} +platform = {"name": "getonboard"} class ActionGetScrapedDataOfPlatformTestCase(CareerTestCase): - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_get_scraped_data_of_platform__without_spider(self): from breathecode.career.actions import get_scraped_data_of_platform from logging import Logger + try: get_scraped_data_of_platform(None, DATA) except Exception as e: - self.assertEqual(str(e), ('without-spider')) - self.assertEqual(Logger.error.call_args_list, [ - call('First you must specify a spider (get_scraped_data_of_platform)'), - ]) + self.assertEqual(str(e), ("without-spider")) + self.assertEqual( + Logger.error.call_args_list, + [ + call("First you must specify a spider (get_scraped_data_of_platform)"), + ], + ) - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_get_scraped_data_of_platform__without_data(self): from breathecode.career.actions import get_scraped_data_of_platform from logging import Logger + model = self.bc.database.create(spider=1) try: get_scraped_data_of_platform(model.spider, None) except Exception as e: - self.assertEqual(str(e), ('no-return-json-data')) - self.assertEqual(Logger.error.call_args_list, [ - call('I did not receive results from the API (get_scraped_data_of_platform)'), - ]) + self.assertEqual(str(e), ("no-return-json-data")) + self.assertEqual( + Logger.error.call_args_list, + [ + call("I did not receive results from the API (get_scraped_data_of_platform)"), + ], + ) @patch( - REQUESTS_PATH['get'], - apply_requests_get_mock([ - (200, 'https://storage.scrapinghub.com/items/223344/3/35?apikey=1234567&format=json', JOBS), - (200, 'https://storage.scrapinghub.com/items/223344/3/34?apikey=1234567&format=json', JOBS) - ])) + REQUESTS_PATH["get"], + apply_requests_get_mock( + [ + (200, "https://storage.scrapinghub.com/items/223344/3/35?apikey=1234567&format=json", JOBS), + (200, "https://storage.scrapinghub.com/items/223344/3/34?apikey=1234567&format=json", JOBS), + ] + ), + ) def test_fetch_data__with_two_num_jobs(self): import requests @@ -241,47 +259,55 @@ def test_fetch_data__with_two_num_jobs(self): result = get_scraped_data_of_platform(model.spider, DATA) - self.assertEqual(result, [{ - 'status': 'ok', - 'platform_name': model.platform.name, - 'num_spider': 3, - 'num_job': 35, - 'jobs_saved': 6 - }, { - 'status': 'ok', - 'platform_name': model.platform.name, - 'num_spider': 3, - 'num_job': 34, - 'jobs_saved': 0 - }]) - self.assertEqual(requests.get.call_args_list, [ - call('https://storage.scrapinghub.com/items/223344/3/35?apikey=1234567&format=json', timeout=2), - call('https://storage.scrapinghub.com/items/223344/3/34?apikey=1234567&format=json', timeout=2) - ]) + self.assertEqual( + result, + [ + {"status": "ok", "platform_name": model.platform.name, "num_spider": 3, "num_job": 35, "jobs_saved": 6}, + {"status": "ok", "platform_name": model.platform.name, "num_spider": 3, "num_job": 34, "jobs_saved": 0}, + ], + ) + self.assertEqual( + requests.get.call_args_list, + [ + call("https://storage.scrapinghub.com/items/223344/3/35?apikey=1234567&format=json", timeout=2), + call("https://storage.scrapinghub.com/items/223344/3/34?apikey=1234567&format=json", timeout=2), + ], + ) @patch( - REQUESTS_PATH['get'], - apply_requests_get_mock([(400, 'https://storage.scrapinghub.com/items/223344/3/35?apikey=1234567&format=json', - [{ - 'status_code': 400, - 'data': [] - }])])) - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + REQUESTS_PATH["get"], + apply_requests_get_mock( + [ + ( + 400, + "https://storage.scrapinghub.com/items/223344/3/35?apikey=1234567&format=json", + [{"status_code": 400, "data": []}], + ) + ] + ), + ) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_fetch_data__with_bad_request(self): import requests from logging import Logger + model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) try: result = get_scraped_data_of_platform(model.spider, DATA) - self.assertEqual(result, [{'status_code': 400, 'data': []}]) - self.assertEqual(requests.get.call_args_list, - [call('https://storage.scrapinghub.com/items/223344/3/35?apikey=1234567&format=json')]) + self.assertEqual(result, [{"status_code": 400, "data": []}]) + self.assertEqual( + requests.get.call_args_list, + [call("https://storage.scrapinghub.com/items/223344/3/35?apikey=1234567&format=json")], + ) except Exception as e: - self.assertEqual(str(e), ('bad-response-fetch')) - self.assertEqual(Logger.error.call_args_list, [ - call('There was a 400 error fetching spider 3 job 3 (get_scraped_data_of_platform)'), - ]) + self.assertEqual(str(e), ("bad-response-fetch")) + self.assertEqual( + Logger.error.call_args_list, + [ + call("There was a 400 error fetching spider 3 job 3 (get_scraped_data_of_platform)"), + ], + ) diff --git a/breathecode/career/tests/actions/tests_parse_date.py b/breathecode/career/tests/actions/tests_parse_date.py index 49c0a0895..2c290ea2b 100644 --- a/breathecode/career/tests/actions/tests_parse_date.py +++ b/breathecode/career/tests/actions/tests_parse_date.py @@ -9,74 +9,78 @@ apply_requests_post_mock, ) -spider = {'name': 'indeed', 'zyte_spider_number': 2, 'zyte_job_number': 0} +spider = {"name": "indeed", "zyte_spider_number": 2, "zyte_job_number": 0} zyte_project = { - 'zyte_api_key': 1234567, - 'zyte_api_deploy': 11223344, - 'zyte_api_spider_number': 2, - 'zyte_api_last_job_number': 0 + "zyte_api_key": 1234567, + "zyte_api_deploy": 11223344, + "zyte_api_spider_number": 2, + "zyte_api_last_job_number": 0, } -platform = {'name': 'indeed'} +platform = {"name": "indeed"} class ActionRunSpiderTestCase(CareerTestCase): - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_get_was_published_date_from_string__without_job(self): from logging import Logger + try: get_was_published_date_from_string(None) assert False except Exception as e: - self.assertEqual(str(e), ('data-job-none')) - self.assertEqual(Logger.error.call_args_list, [ - call('First you must specify a job (get_was_published_date_from_string)'), - ]) + self.assertEqual(str(e), ("data-job-none")) + self.assertEqual( + Logger.error.call_args_list, + [ + call("First you must specify a job (get_was_published_date_from_string)"), + ], + ) def test_get_was_published_date_from_string__whith_x_days_ago(self): - job = {'published_date_raw': '30+ days ago'} + job = {"published_date_raw": "30+ days ago"} model = self.bc.database.create(platform=platform, zyte_project=zyte_project, spider=spider, job=job) result = get_was_published_date_from_string(model.job) result = result.published_date_processed - result = f'{result.year}-{result.month}-{result.day}' + result = f"{result.year}-{result.month}-{result.day}" expected = timezone.now() - timedelta(days=30) - expected = f'{expected.year}-{expected.month}-{expected.day}' + expected = f"{expected.year}-{expected.month}-{expected.day}" self.assertEqual(result, expected) def test_get_was_published_date_from_string__whith_active_x_days_ago(self): - job = {'published_date_raw': 'Active 6 days ago'} + job = {"published_date_raw": "Active 6 days ago"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform, job=job) result = get_was_published_date_from_string(model.job) result = result.published_date_processed - result = f'{result.year}-{result.month}-{result.day}' + result = f"{result.year}-{result.month}-{result.day}" expected = timezone.now() - timedelta(days=6) - expected = f'{expected.year}-{expected.month}-{expected.day}' + expected = f"{expected.year}-{expected.month}-{expected.day}" self.assertEqual(result, expected) def test_get_was_published_date_from_string__whith_month_day_year(self): - job = {'published_date_raw': 'July 17, 1977'} + job = {"published_date_raw": "July 17, 1977"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform, job=job) result = get_was_published_date_from_string(model.job) result = result.published_date_processed - result = f'{result.year}-{result.month}-{result.day}' + result = f"{result.year}-{result.month}-{result.day}" - self.assertEqual(result, '1977-7-17') + self.assertEqual(result, "1977-7-17") def test_get_was_published_date_from_string__whith_today(self): - job = {'published_date_raw': 'today'} + job = {"published_date_raw": "today"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform, job=job) result = get_was_published_date_from_string(model.job) result = result.published_date_processed - result = f'{result.year}-{result.month}-{result.day}' + result = f"{result.year}-{result.month}-{result.day}" expected = timezone.now() - expected = f'{expected.year}-{expected.month}-{expected.day}' + expected = f"{expected.year}-{expected.month}-{expected.day}" self.assertEqual(result, expected) diff --git a/breathecode/career/tests/actions/tests_run_spider.py b/breathecode/career/tests/actions/tests_run_spider.py index e657af77e..cb855bb11 100644 --- a/breathecode/career/tests/actions/tests_run_spider.py +++ b/breathecode/career/tests/actions/tests_run_spider.py @@ -6,18 +6,18 @@ from ..mixins import CareerTestCase RESULT = { - 'spider': ['Invalid pk "indeed5" - object does not exist.'], - 'status': 'error', - 'message': 'spider: Invalid pk "indeed5" - object does not exist.' + "spider": ['Invalid pk "indeed5" - object does not exist.'], + "status": "error", + "message": 'spider: Invalid pk "indeed5" - object does not exist.', } -spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} -zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 223344} -platform = {'name': 'getonboard'} +spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} +zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 223344} +platform = {"name": "getonboard"} -spider1 = {'name': 'indeed', 'zyte_spider_number': 2, 'zyte_job_number': 0} -zyte_project1 = {'zyte_api_key': 1234567, 'zyte_api_deploy': 223344} -platform1 = {'name': 'indeed'} +spider1 = {"name": "indeed", "zyte_spider_number": 2, "zyte_job_number": 0} +zyte_project1 = {"zyte_api_key": 1234567, "zyte_api_deploy": 223344} +platform1 = {"name": "indeed"} class ActionRunSpiderTestCase(CareerTestCase): @@ -27,10 +27,10 @@ def test_run_spider__without_spider(self): run_spider(None) assert False except Exception as e: - self.assertEqual(str(e), 'missing-spider') + self.assertEqual(str(e), "missing-spider") - @patch(REQUESTS_PATH['post'], apply_requests_post_mock([(400, 'https://app.scrapinghub.com/api/run.json', RESULT)])) - @patch('logging.Logger.error', MagicMock()) + @patch(REQUESTS_PATH["post"], apply_requests_post_mock([(400, "https://app.scrapinghub.com/api/run.json", RESULT)])) + @patch("logging.Logger.error", MagicMock()) def test_run_spider__with_status_code_error(self): from logging import Logger @@ -41,61 +41,79 @@ def test_run_spider__with_status_code_error(self): model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) try: result = run_spider(model.spider) - self.assertEqual(result, (False, { - 'spider': ['Invalid pk "indeed5" - object does not exist.'], - 'status': 'error', - 'message': 'spider: Invalid pk "indeed5" - object does not exist.' - })) - self.assertEqual(requests.post.call_args_list, [ - call('https://app.scrapinghub.com/api/run.json', - data={ - 'project': model.zyte_project.zyte_api_deploy, - 'spider': model.zyte_project.platform.name, - 'job': model.spider.job_search, - 'loc': model.spider.loc_search - }, - auth=(model.zyte_project.zyte_api_key, ''), - timeout=2) - ]) - except Exception as e: - self.assertEqual(str(e), ('bad-request')) - self.assertEqual(Logger.error.call_args_list, [ - call( - 'The spider ended error. Type error [\'Invalid pk "indeed5" - object does not exist.\'] to getonboard' + self.assertEqual( + result, + ( + False, + { + "spider": ['Invalid pk "indeed5" - object does not exist.'], + "status": "error", + "message": 'spider: Invalid pk "indeed5" - object does not exist.', + }, ), - call('Status 400 - bad-request') - ]) - - @patch(REQUESTS_PATH['post'], - apply_requests_post_mock([(200, 'https://app.scrapinghub.com/api/run.json', { - 'status': 'ok', - 'data': [] - })])) + ) + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://app.scrapinghub.com/api/run.json", + data={ + "project": model.zyte_project.zyte_api_deploy, + "spider": model.zyte_project.platform.name, + "job": model.spider.job_search, + "loc": model.spider.loc_search, + }, + auth=(model.zyte_project.zyte_api_key, ""), + timeout=2, + ) + ], + ) + except Exception as e: + self.assertEqual(str(e), ("bad-request")) + self.assertEqual( + Logger.error.call_args_list, + [ + call( + "The spider ended error. Type error ['Invalid pk \"indeed5\" - object does not exist.'] to getonboard" + ), + call("Status 400 - bad-request"), + ], + ) + + @patch( + REQUESTS_PATH["post"], + apply_requests_post_mock([(200, "https://app.scrapinghub.com/api/run.json", {"status": "ok", "data": []})]), + ) def test_run_spider__with_one_spider(self): import requests from breathecode.career.actions import run_spider + model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = run_spider(model.spider) - self.assertEqual(result, (True, {'status': 'ok', 'data': []})) - self.assertEqual(requests.post.call_args_list, [ - call('https://app.scrapinghub.com/api/run.json', - data={ - 'project': model.zyte_project.zyte_api_deploy, - 'spider': model.zyte_project.platform.name, - 'job': model.spider.job_search, - 'loc': model.spider.loc_search - }, - auth=(model.zyte_project.zyte_api_key, ''), - timeout=2) - ]) - - @patch(REQUESTS_PATH['post'], - apply_requests_post_mock([(200, 'https://app.scrapinghub.com/api/run.json', { - 'status': 'ok', - 'data': [] - })])) + self.assertEqual(result, (True, {"status": "ok", "data": []})) + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://app.scrapinghub.com/api/run.json", + data={ + "project": model.zyte_project.zyte_api_deploy, + "spider": model.zyte_project.platform.name, + "job": model.spider.job_search, + "loc": model.spider.loc_search, + }, + auth=(model.zyte_project.zyte_api_key, ""), + timeout=2, + ) + ], + ) + + @patch( + REQUESTS_PATH["post"], + apply_requests_post_mock([(200, "https://app.scrapinghub.com/api/run.json", {"status": "ok", "data": []})]), + ) def test_run_spider__with_two_spiders(self): import requests @@ -107,26 +125,33 @@ def test_run_spider__with_two_spiders(self): result_1 = run_spider(model_1.spider) result_2 = run_spider(model_2.spider) - self.assertEqual(result_1, (True, {'status': 'ok', 'data': []})) - self.assertEqual(result_2, (True, {'status': 'ok', 'data': []})) - - self.assertEqual(requests.post.call_args_list, [ - call('https://app.scrapinghub.com/api/run.json', - data={ - 'project': model_1.zyte_project.zyte_api_deploy, - 'spider': model_1.zyte_project.platform.name, - 'job': model_1.spider.job_search, - 'loc': model_1.spider.loc_search - }, - auth=(model_1.zyte_project.zyte_api_key, ''), - timeout=2), - call('https://app.scrapinghub.com/api/run.json', - data={ - 'project': model_2.zyte_project.zyte_api_deploy, - 'spider': model_2.zyte_project.platform.name, - 'job': model_2.spider.job_search, - 'loc': model_2.spider.loc_search - }, - auth=(model_2.zyte_project.zyte_api_key, ''), - timeout=2) - ]) + self.assertEqual(result_1, (True, {"status": "ok", "data": []})) + self.assertEqual(result_2, (True, {"status": "ok", "data": []})) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://app.scrapinghub.com/api/run.json", + data={ + "project": model_1.zyte_project.zyte_api_deploy, + "spider": model_1.zyte_project.platform.name, + "job": model_1.spider.job_search, + "loc": model_1.spider.loc_search, + }, + auth=(model_1.zyte_project.zyte_api_key, ""), + timeout=2, + ), + call( + "https://app.scrapinghub.com/api/run.json", + data={ + "project": model_2.zyte_project.zyte_api_deploy, + "spider": model_2.zyte_project.platform.name, + "job": model_2.spider.job_search, + "loc": model_2.spider.loc_search, + }, + auth=(model_2.zyte_project.zyte_api_key, ""), + timeout=2, + ), + ], + ) diff --git a/breathecode/career/tests/actions/tests_save_data.py b/breathecode/career/tests/actions/tests_save_data.py index e460654c1..28b6cd5e0 100644 --- a/breathecode/career/tests/actions/tests_save_data.py +++ b/breathecode/career/tests/actions/tests_save_data.py @@ -6,828 +6,909 @@ apply_requests_post_mock, ) -JOBS = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Desarrollador Full-Stack', - 'Location': 'Santiago (temporarily remote)', - 'Company_name': 'Centry', - 'Post_date': 'January 19, 2022', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': '$1800 - 2100 USD/month', - 'Tags': ['api', 'back-end', 'full-stack', 'git', 'java', 'mvc', 'python', 'ruby'], - 'Apply_to': 'https://www.getonbrd.com/jobs/programming/desarrollador-full-stack-developer-centry-santiago', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Desarrollador Full-Stack Python/React', - 'Location': 'Remote', - 'Company_name': 'Alluxi', - 'Post_date': 'January 14, 2022', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': '$1800 - 2000 USD/month', - 'Tags': [], - 'Apply_to': 'https://www.getonbrd.com/jobs/programming/desarrollodor-fullstack-python-react-alluxi-remote', - '_type': 'dict' -}, { - 'Searched_job': - 'junior web developer', - 'Job_title': - 'Full-Stack Developer', - 'Location': - 'Santiago', - 'Company_name': - 'AAXIS Commerce', - 'Post_date': - 'January 17, 2022', - 'Extract_date': - '2022-01-30', - 'Job_description': - 'Vuln exploitation Security reports', - 'Salary': - 'Not supplied', - 'Tags': [ - 'angularjs', 'back-end', 'express', 'front-end', 'full-stack', 'javascript', 'magento', 'mobile development', - 'node.js', 'php', 'react', 'redis', 'responsive', 'symfony', 'ui design' - ], - 'Apply_to': - 'https://www.getonbrd.com/jobs/programming/full-stack-developer-aaxis-commerce-santiago-3c8e', - '_type': - 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile)', - 'Company_name': 'Rule 1 Ventures', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Rule 1 Ventures', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': '$1800 - 2000 a year', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Front-end Developer', - 'Location': '.', - 'Company_name': 'ID Business Intelligence', - 'Post_date': 'January 24, 2022', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': '$1800 - k2000 per year', - 'Tags': ['api', 'css', 'front-end', 'git', 'javascript', 'react'], - 'Apply_to': 'https://www.getonbrd.com/jobs/programming/fronted-developer-id-business-intelligence-remote', - '_type': 'dict' -}, { - 'Searched_job': - 'junior web developer', - 'Job_title': - 'Junior Web Developer', - 'Location': - None, - 'Company_name': - 'Reign', - 'Post_date': - 'January 29, 2022', - 'Extract_date': - '2022-01-30', - 'Job_description': - 'Vuln exploitation Security reports', - 'Salary': - '18000 USD/month', - 'Tags': [ - 'angularjs', 'api', 'back-end', 'ci/cd', 'css', 'docker', 'front-end', 'html5', 'javascript', 'json', 'mongodb', - 'node.js', 'nosql', 'postgresql', 'react', 'responsive', 'ui design', 'virtualization' - ], - 'Apply_to': - 'https://www.getonbrd.com/jobs/programming/junior-web-developer-reign-remote', - '_type': - 'dict' -}] - -JOBS1 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Repite Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile)', - 'Company_name': 'Repite Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Peru, Colombia)', - 'Company_name': 'Other Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/3', - '_type': 'dict' -}] - -JOBS2 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Repite Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile)', - 'Company_name': 'Repite Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}] - -JOBS3 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': None, - 'Company_name': 'Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': '$1800 - 2000 USD/month', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Chile', - 'Company_name': 'Employer', - 'Post_date': 'today', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': '$1800 - 2000 USD/month', - 'Tags': ['back-end'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}] - -JOBS4 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Repite Employer', - 'Post_date': 'today', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Python', - 'Location': 'Chile', - 'Company_name': 'Employer 2', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Venezuela', - 'Company_name': 'Repite Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/3', - '_type': 'dict' -}] - -JOBS5 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Employer', - 'Post_date': 'today', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'currency': 'USD', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Chile', - 'Company_name': 'Other Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'currency': 'USD', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}] - -JOBS6 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Employer', - 'Post_date': 'today', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'currency': 'USD', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Python', - 'Location': 'Chile', - 'Company_name': 'Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'currency': 'USD', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}] - -JOBS7 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile)', - 'Company_name': 'Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Chile', - 'Company_name': 'Employer 1', - 'Post_date': 'today', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}] - -JOBS8 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Employer 2', - 'Post_date': 'today', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Chile', - 'Company_name': 'Repite Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Venezuela', - 'Company_name': 'Other Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/3', - '_type': 'dict' -}] - -JOBS9 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': None, - 'Company_name': 'Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}] - -JOBS10 = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': None, - 'Company_name': 'Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Santiago', - 'Company_name': 'Employer 2', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}] +JOBS = [ + { + "Searched_job": "junior web developer", + "Job_title": "Desarrollador Full-Stack", + "Location": "Santiago (temporarily remote)", + "Company_name": "Centry", + "Post_date": "January 19, 2022", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "$1800 - 2100 USD/month", + "Tags": ["api", "back-end", "full-stack", "git", "java", "mvc", "python", "ruby"], + "Apply_to": "https://www.getonbrd.com/jobs/programming/desarrollador-full-stack-developer-centry-santiago", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Desarrollador Full-Stack Python/React", + "Location": "Remote", + "Company_name": "Alluxi", + "Post_date": "January 14, 2022", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "$1800 - 2000 USD/month", + "Tags": [], + "Apply_to": "https://www.getonbrd.com/jobs/programming/desarrollodor-fullstack-python-react-alluxi-remote", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Full-Stack Developer", + "Location": "Santiago", + "Company_name": "AAXIS Commerce", + "Post_date": "January 17, 2022", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": [ + "angularjs", + "back-end", + "express", + "front-end", + "full-stack", + "javascript", + "magento", + "mobile development", + "node.js", + "php", + "react", + "redis", + "responsive", + "symfony", + "ui design", + ], + "Apply_to": "https://www.getonbrd.com/jobs/programming/full-stack-developer-aaxis-commerce-santiago-3c8e", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile)", + "Company_name": "Rule 1 Ventures", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Rule 1 Ventures", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "$1800 - 2000 a year", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Front-end Developer", + "Location": ".", + "Company_name": "ID Business Intelligence", + "Post_date": "January 24, 2022", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "$1800 - k2000 per year", + "Tags": ["api", "css", "front-end", "git", "javascript", "react"], + "Apply_to": "https://www.getonbrd.com/jobs/programming/fronted-developer-id-business-intelligence-remote", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Junior Web Developer", + "Location": None, + "Company_name": "Reign", + "Post_date": "January 29, 2022", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "18000 USD/month", + "Tags": [ + "angularjs", + "api", + "back-end", + "ci/cd", + "css", + "docker", + "front-end", + "html5", + "javascript", + "json", + "mongodb", + "node.js", + "nosql", + "postgresql", + "react", + "responsive", + "ui design", + "virtualization", + ], + "Apply_to": "https://www.getonbrd.com/jobs/programming/junior-web-developer-reign-remote", + "_type": "dict", + }, +] + +JOBS1 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Repite Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile)", + "Company_name": "Repite Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Peru, Colombia)", + "Company_name": "Other Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/3", + "_type": "dict", + }, +] + +JOBS2 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Repite Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile)", + "Company_name": "Repite Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, +] + +JOBS3 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": None, + "Company_name": "Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "$1800 - 2000 USD/month", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Chile", + "Company_name": "Employer", + "Post_date": "today", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "$1800 - 2000 USD/month", + "Tags": ["back-end"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, +] + +JOBS4 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Repite Employer", + "Post_date": "today", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Python", + "Location": "Chile", + "Company_name": "Employer 2", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Venezuela", + "Company_name": "Repite Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/3", + "_type": "dict", + }, +] + +JOBS5 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Employer", + "Post_date": "today", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "currency": "USD", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Chile", + "Company_name": "Other Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "currency": "USD", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, +] + +JOBS6 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Employer", + "Post_date": "today", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "currency": "USD", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Python", + "Location": "Chile", + "Company_name": "Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "currency": "USD", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, +] + +JOBS7 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile)", + "Company_name": "Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Chile", + "Company_name": "Employer 1", + "Post_date": "today", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, +] + +JOBS8 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Employer 2", + "Post_date": "today", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Chile", + "Company_name": "Repite Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Venezuela", + "Company_name": "Other Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/3", + "_type": "dict", + }, +] + +JOBS9 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": None, + "Company_name": "Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + } +] + +JOBS10 = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": None, + "Company_name": "Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Santiago", + "Company_name": "Employer 2", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, +] class ActionSaveDataTestCase(CareerTestCase): def test_save_data__with_spider(self): - spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'getonboard'} + spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "getonboard"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS) - self.assertEqual(self.bc.database.list_of('career.Job'), [{ - 'id': 1, - 'title': 'Desarrollador Full-Stack', - 'spider_id': 1, - 'published_date_raw': 'January 19, 2022', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.getonbrd.com/jobs/programming/desarrollador-full-stack-developer-centry-santiago', - 'currency': 'USD', - 'min_salary': 21600.0, - 'max_salary': 25200.0, - 'salary': '$21600.0 - $25200.0 a year.', - 'job_description': 'Vuln exploitation Security reports', - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 1, - 'position_id': 2 - }, { - 'id': 2, - 'title': 'Desarrollador Full-Stack Python/React', - 'spider_id': 1, - 'published_date_raw': 'January 14, 2022', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.getonbrd.com/jobs/programming/desarrollodor-fullstack-python-react-alluxi-remote', - 'currency': 'USD', - 'min_salary': 21600.0, - 'max_salary': 24000.0, - 'salary': '$21600.0 - $24000.0 a year.', - 'job_description': 'Vuln exploitation Security reports', - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 2, - 'position_id': 2 - }, { - 'id': 3, - 'title': 'Full-Stack Developer', - 'spider_id': 1, - 'published_date_raw': 'January 17, 2022', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.getonbrd.com/jobs/programming/full-stack-developer-aaxis-commerce-santiago-3c8e', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'job_type': 'Full-time', - 'remote': False, - 'employer_id': 3, - 'position_id': 2 - }, { - 'id': 4, - 'title': 'Pentester Cybersecurity', - 'spider_id': 1, - 'published_date_raw': 'November 05, 2021', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 4, - 'position_id': 2 - }, { - 'id': 5, - 'title': 'Front-end Developer', - 'spider_id': 1, - 'published_date_raw': 'January 24, 2022', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.getonbrd.com/jobs/programming/fronted-developer-id-business-intelligence-remote', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 5, - 'position_id': 2 - }, { - 'id': 6, - 'title': 'Junior Web Developer', - 'spider_id': 1, - 'published_date_raw': 'January 29, 2022', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.getonbrd.com/jobs/programming/junior-web-developer-reign-remote', - 'currency': 'USD', - 'min_salary': 216000.0, - 'max_salary': 0.0, - 'salary': '$216000.0 - $0.0 a year.', - 'job_description': 'Vuln exploitation Security reports', - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 6, - 'position_id': 2 - }]) + self.assertEqual( + self.bc.database.list_of("career.Job"), + [ + { + "id": 1, + "title": "Desarrollador Full-Stack", + "spider_id": 1, + "published_date_raw": "January 19, 2022", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.getonbrd.com/jobs/programming/desarrollador-full-stack-developer-centry-santiago", + "currency": "USD", + "min_salary": 21600.0, + "max_salary": 25200.0, + "salary": "$21600.0 - $25200.0 a year.", + "job_description": "Vuln exploitation Security reports", + "job_type": "Full-time", + "remote": True, + "employer_id": 1, + "position_id": 2, + }, + { + "id": 2, + "title": "Desarrollador Full-Stack Python/React", + "spider_id": 1, + "published_date_raw": "January 14, 2022", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.getonbrd.com/jobs/programming/desarrollodor-fullstack-python-react-alluxi-remote", + "currency": "USD", + "min_salary": 21600.0, + "max_salary": 24000.0, + "salary": "$21600.0 - $24000.0 a year.", + "job_description": "Vuln exploitation Security reports", + "job_type": "Full-time", + "remote": True, + "employer_id": 2, + "position_id": 2, + }, + { + "id": 3, + "title": "Full-Stack Developer", + "spider_id": 1, + "published_date_raw": "January 17, 2022", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.getonbrd.com/jobs/programming/full-stack-developer-aaxis-commerce-santiago-3c8e", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "job_type": "Full-time", + "remote": False, + "employer_id": 3, + "position_id": 2, + }, + { + "id": 4, + "title": "Pentester Cybersecurity", + "spider_id": 1, + "published_date_raw": "November 05, 2021", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.getonbrd.com/jobs/cybersecurity/security-engineer-rule-1-ventures-remote", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "job_type": "Full-time", + "remote": True, + "employer_id": 4, + "position_id": 2, + }, + { + "id": 5, + "title": "Front-end Developer", + "spider_id": 1, + "published_date_raw": "January 24, 2022", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.getonbrd.com/jobs/programming/fronted-developer-id-business-intelligence-remote", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "job_type": "Full-time", + "remote": True, + "employer_id": 5, + "position_id": 2, + }, + { + "id": 6, + "title": "Junior Web Developer", + "spider_id": 1, + "published_date_raw": "January 29, 2022", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.getonbrd.com/jobs/programming/junior-web-developer-reign-remote", + "currency": "USD", + "min_salary": 216000.0, + "max_salary": 0.0, + "salary": "$216000.0 - $0.0 a year.", + "job_description": "Vuln exploitation Security reports", + "job_type": "Full-time", + "remote": True, + "employer_id": 6, + "position_id": 2, + }, + ], + ) self.assertEqual(result, 6) def test_give_two_employer_repited(self): - spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'getonboard'} + spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "getonboard"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS2) - employer = self.bc.database.list_of('career.Employer') + employer = self.bc.database.list_of("career.Employer") - self.assertEqual(employer, [{'id': 1, 'name': 'Repite Employer', 'location_id': 1}]) + self.assertEqual(employer, [{"id": 1, "name": "Repite Employer", "location_id": 1}]) self.assertEqual(len(employer), 1) def test_give_two_employer_repited_and_one_diferent(self): - spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'getonboard'} + spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "getonboard"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS1) - employer = self.bc.database.list_of('career.Employer') - self.assertEqual(employer, [{ - 'id': 1, - 'name': 'Repite Employer', - 'location_id': 1 - }, { - 'id': 2, - 'name': 'Other Employer', - 'location_id': 3 - }]) + employer = self.bc.database.list_of("career.Employer") + self.assertEqual( + employer, + [ + {"id": 1, "name": "Repite Employer", "location_id": 1}, + {"id": 2, "name": "Other Employer", "location_id": 3}, + ], + ) self.assertEqual(len(employer), 2) def test_give_two_jobs_repited_save_one(self): - spider = {'name': 'indeed', 'zyte_spider_number': 2, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'indeed'} + spider = {"name": "indeed", "zyte_spider_number": 2, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "indeed"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS3) - job = self.bc.database.list_of('career.Job') - - self.assertEqual(job, [{ - 'id': 1, - 'title': 'Pentester Cybersecurity', - 'published_date_raw': 'November 05, 2021', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/1', - 'currency': 'USD', - 'min_salary': 1800.0, - 'max_salary': 2000.0, - 'salary': '$1800.0 - $2000.0 a year.', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 1, - 'position_id': 2 - }]) + job = self.bc.database.list_of("career.Job") + + self.assertEqual( + job, + [ + { + "id": 1, + "title": "Pentester Cybersecurity", + "published_date_raw": "November 05, 2021", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/1", + "currency": "USD", + "min_salary": 1800.0, + "max_salary": 2000.0, + "salary": "$1800.0 - $2000.0 a year.", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": True, + "employer_id": 1, + "position_id": 2, + } + ], + ) self.assertEqual(len(job), 1) def test_give_two_jobs_repited_and_one_diferen(self): - spider = {'name': 'indeed', 'zyte_spider_number': 2, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'indeed'} + spider = {"name": "indeed", "zyte_spider_number": 2, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "indeed"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS4) - job = self.bc.database.list_of('career.Job') - - self.assertEqual(job, [{ - 'id': 1, - 'title': 'Pentester Cybersecurity', - 'published_date_raw': 'today', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/1', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 1, - 'position_id': 2 - }, { - 'id': 2, - 'title': 'Python', - 'published_date_raw': 'November 05, 2021', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/2', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': False, - 'employer_id': 2, - 'position_id': 2 - }]) + job = self.bc.database.list_of("career.Job") + + self.assertEqual( + job, + [ + { + "id": 1, + "title": "Pentester Cybersecurity", + "published_date_raw": "today", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/1", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": True, + "employer_id": 1, + "position_id": 2, + }, + { + "id": 2, + "title": "Python", + "published_date_raw": "November 05, 2021", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/2", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": False, + "employer_id": 2, + "position_id": 2, + }, + ], + ) self.assertEqual(len(job), 2) def test_save_jobs_with_same_title_and_diferent_employer(self): - spider = {'name': 'indeed', 'zyte_spider_number': 2, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'indeed'} + spider = {"name": "indeed", "zyte_spider_number": 2, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "indeed"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS5) - job = self.bc.database.list_of('career.Job') - - self.assertEqual(job, [{ - 'id': 1, - 'title': 'Pentester Cybersecurity', - 'published_date_raw': 'today', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/1', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 1, - 'position_id': 2 - }, { - 'id': 2, - 'title': 'Pentester Cybersecurity', - 'published_date_raw': 'November 05, 2021', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/2', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': False, - 'employer_id': 2, - 'position_id': 2 - }]) + job = self.bc.database.list_of("career.Job") + + self.assertEqual( + job, + [ + { + "id": 1, + "title": "Pentester Cybersecurity", + "published_date_raw": "today", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/1", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": True, + "employer_id": 1, + "position_id": 2, + }, + { + "id": 2, + "title": "Pentester Cybersecurity", + "published_date_raw": "November 05, 2021", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/2", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": False, + "employer_id": 2, + "position_id": 2, + }, + ], + ) self.assertEqual(len(job), 2) def test_save_jobs_with_same_employer_and_diferent_title(self): - spider = {'name': 'indeed', 'zyte_spider_number': 2, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'indeed'} + spider = {"name": "indeed", "zyte_spider_number": 2, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "indeed"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS6) - job = self.bc.database.list_of('career.Job') - - self.assertEqual(job, [{ - 'id': 1, - 'title': 'Pentester Cybersecurity', - 'published_date_raw': 'today', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/1', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 1, - 'position_id': 2 - }, { - 'id': 2, - 'title': 'Python', - 'published_date_raw': 'November 05, 2021', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/2', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': False, - 'employer_id': 1, - 'position_id': 2 - }]) + job = self.bc.database.list_of("career.Job") + + self.assertEqual( + job, + [ + { + "id": 1, + "title": "Pentester Cybersecurity", + "published_date_raw": "today", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/1", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": True, + "employer_id": 1, + "position_id": 2, + }, + { + "id": 2, + "title": "Python", + "published_date_raw": "November 05, 2021", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/2", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": False, + "employer_id": 1, + "position_id": 2, + }, + ], + ) self.assertEqual(len(job), 2) def test_give_two_location_alias_repited(self): - spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'getonboard'} + spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "getonboard"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS7) - location = self.bc.database.list_of('career.Location') - location_alias = self.bc.database.list_of('career.LocationAlias') + location = self.bc.database.list_of("career.Location") + location_alias = self.bc.database.list_of("career.LocationAlias") - self.assertEqual(location, [{'id': 1, 'name': 'Chile'}]) + self.assertEqual(location, [{"id": 1, "name": "Chile"}]) self.assertEqual(len(location), 1) - self.assertEqual(location_alias, [{'id': 1, 'name': 'Chile', 'location_id': 1}]) + self.assertEqual(location_alias, [{"id": 1, "name": "Chile", "location_id": 1}]) self.assertEqual(len(location_alias), 1) def test_give_two_location_alias_repited_and_one_diferent(self): - spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'getonboard'} + spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "getonboard"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS8) - location = self.bc.database.list_of('career.Location') - location_alias = self.bc.database.list_of('career.LocationAlias') + location = self.bc.database.list_of("career.Location") + location_alias = self.bc.database.list_of("career.LocationAlias") - self.assertEqual(location, [{'id': 1, 'name': 'Chile'}, {'id': 2, 'name': 'Venezuela'}]) + self.assertEqual(location, [{"id": 1, "name": "Chile"}, {"id": 2, "name": "Venezuela"}]) self.assertEqual(len(location), 2) - self.assertEqual(location_alias, [{ - 'id': 1, - 'name': 'Chile', - 'location_id': 1 - }, { - 'id': 2, - 'name': 'Venezuela', - 'location_id': 2 - }]) + self.assertEqual( + location_alias, + [{"id": 1, "name": "Chile", "location_id": 1}, {"id": 2, "name": "Venezuela", "location_id": 2}], + ) self.assertEqual(len(location_alias), 2) def test_save_one_job_without_location_and_return_remote_true(self): - spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'getonboard'} + spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "getonboard"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS9) - location = self.bc.database.list_of('career.Location') - job = self.bc.database.list_of('career.Job') + location = self.bc.database.list_of("career.Location") + job = self.bc.database.list_of("career.Job") self.assertEqual(location, []) - self.assertEqual(job.pop()['remote'], True) + self.assertEqual(job.pop()["remote"], True) def test_save_two_job_with_location_and_without_location(self): - spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} - zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 11223344} - platform = {'name': 'getonboard'} + spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} + zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 11223344} + platform = {"name": "getonboard"} model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) result = save_data(model.spider, JOBS10) - location = self.bc.database.list_of('career.Location') - job = self.bc.database.list_of('career.Job') + location = self.bc.database.list_of("career.Location") + job = self.bc.database.list_of("career.Job") - self.assertEqual(location, [{'id': 1, 'name': 'Santiago'}]) + self.assertEqual(location, [{"id": 1, "name": "Santiago"}]) self.assertEqual(len(location), 1) - self.assertEqual(job, [{ - 'id': 1, - 'title': 'Pentester Cybersecurity', - 'published_date_raw': 'November 05, 2021', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/1', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': True, - 'employer_id': 1, - 'position_id': 2 - }, { - 'id': 2, - 'title': 'Pentester Cybersecurity', - 'published_date_raw': 'November 05, 2021', - 'published_date_processed': None, - 'status': 'OPENED', - 'apply_url': 'https://www.url.com/2', - 'currency': 'USD', - 'min_salary': 0.0, - 'max_salary': 0.0, - 'salary': 'Not supplied', - 'job_description': 'Vuln exploitation Security reports', - 'spider_id': 1, - 'job_type': 'Full-time', - 'remote': False, - 'employer_id': 2, - 'position_id': 2 - }]) + self.assertEqual( + job, + [ + { + "id": 1, + "title": "Pentester Cybersecurity", + "published_date_raw": "November 05, 2021", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/1", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": True, + "employer_id": 1, + "position_id": 2, + }, + { + "id": 2, + "title": "Pentester Cybersecurity", + "published_date_raw": "November 05, 2021", + "published_date_processed": None, + "status": "OPENED", + "apply_url": "https://www.url.com/2", + "currency": "USD", + "min_salary": 0.0, + "max_salary": 0.0, + "salary": "Not supplied", + "job_description": "Vuln exploitation Security reports", + "spider_id": 1, + "job_type": "Full-time", + "remote": False, + "employer_id": 2, + "position_id": 2, + }, + ], + ) self.assertEqual(len(job), 2) diff --git a/breathecode/career/tests/admin/tests_fetch_sync_all_data_admin.py b/breathecode/career/tests/admin/tests_fetch_sync_all_data_admin.py index 507aeae8e..e58f06031 100644 --- a/breathecode/career/tests/admin/tests_fetch_sync_all_data_admin.py +++ b/breathecode/career/tests/admin/tests_fetch_sync_all_data_admin.py @@ -1,5 +1,9 @@ from unittest.mock import patch, MagicMock, call -from breathecode.tests.mocks.django_contrib import DJANGO_CONTRIB_PATH, DJANGO_CONTRIB_INSTANCES, apply_django_contrib_messages_mock +from breathecode.tests.mocks.django_contrib import ( + DJANGO_CONTRIB_PATH, + DJANGO_CONTRIB_INSTANCES, + apply_django_contrib_messages_mock, +) from breathecode.career.models import Spider from breathecode.career.admin import fetch_sync_all_data_admin from ..mixins import CareerTestCase @@ -11,138 +15,132 @@ ) DATA = { - 'status': - 'ok', - 'count': - 3, - 'total': - 3, - 'jobs': [{ - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'front end', - 'loc': 'remote' + "status": "ok", + "count": 3, + "total": 3, + "jobs": [ + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "front end", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 609370879, + "logs": 74, + "id": "223344/2/72", + "started_time": "2022-01-02T22:56:02", + "updated_time": "2022-01-02T23:53:52", + "items_scraped": 227, + "errors_count": 0, + "responses_received": 555, }, - 'close_reason': 'finished', - 'elapsed': 609370879, - 'logs': 74, - 'id': '223344/2/72', - 'started_time': '2022-01-02T22:56:02', - 'updated_time': '2022-01-02T23:53:52', - 'items_scraped': 227, - 'errors_count': 0, - 'responses_received': 555 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'go', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "go", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 646146617, + "logs": 18, + "id": "223344/2/71", + "started_time": "2022-01-02T13:40:20", + "updated_time": "2022-01-02T13:40:57", + "items_scraped": 0, + "errors_count": 0, + "responses_received": 2, }, - 'close_reason': 'finished', - 'elapsed': 646146617, - 'logs': 18, - 'id': '223344/2/71', - 'started_time': '2022-01-02T13:40:20', - 'updated_time': '2022-01-02T13:40:57', - 'items_scraped': 0, - 'errors_count': 0, - 'responses_received': 2 - }, { - 'priority': 2, - 'tags': [], - 'version': '2f9f2a5-master', - 'state': 'finished', - 'spider_type': 'manual', - 'spider': 'indeed', - 'spider_args': { - 'job': 'web developer', - 'loc': 'remote' + { + "priority": 2, + "tags": [], + "version": "2f9f2a5-master", + "state": "finished", + "spider_type": "manual", + "spider": "indeed", + "spider_args": {"job": "web developer", "loc": "remote"}, + "close_reason": "finished", + "elapsed": 647281256, + "logs": 25, + "id": "223344/2/70", + "started_time": "2022-01-02T13:15:17", + "updated_time": "2022-01-02T13:22:03", + "items_scraped": 0, + "errors_count": 2, + "responses_received": 0, }, - 'close_reason': 'finished', - 'elapsed': 647281256, - 'logs': 25, - 'id': '223344/2/70', - 'started_time': '2022-01-02T13:15:17', - 'updated_time': '2022-01-02T13:22:03', - 'items_scraped': 0, - 'errors_count': 2, - 'responses_received': 0 - }] + ], } -JOBS = [{ - 'Searched_job': 'ruby', - 'Job_title': '.Net Core Developer', - 'Location': 'New Orleans, LA', - 'Company_name': 'Revelry Labs', - 'Post_date': '8 days ago', - 'Extract_date': '2022-02-17', - 'Job_description': 'Net Core Developer who has experience with .net Core, C#, and SQL Server Database experience.', - 'Salary': '', - 'Tags': [], - 'Apply_to': - 'https://www.indeed.com/company/Revelry/jobs/Net-Core-Developer-a8e4e600cb716fb7?fccid=89b6cc7775dbcb2b&vjs=3', - '_type': 'dict' -}, { - 'Searched_job': 'ruby', - 'Job_title': 'Junior DevOps Engineer', - 'Location': 'Remote', - 'Company_name': 'Clear Labs', - 'Post_date': '2 days ago', - 'Extract_date': '2022-02-17', - 'Job_description': 'We are looking for a qualified engineer for a full time Junior DevOps Role.', - 'Salary': '', - 'Tags': [], - 'Apply_to': - 'https://www.indeed.com/company/Clear-Labs/jobs/Junior-Devop-Engineer-71a0689ea2bd8cb1?fccid=250710b384a27cb1&vjs=3', - '_type': 'dict' -}] - -JOBS2 = [{ - 'Searched_job': 'ruby', - 'Job_title': '.Net Core Developer', - 'Location': 'New Orleans, LA', - 'Company_name': 'Revelry Labs', - 'Post_date': '8 days ago', - 'Extract_date': '2022-02-17', - 'Job_description': 'Net Core Developer who has experience with .net Core, C#, and SQL Server Database experience.', - 'Salary': '', - 'Tags': [], - 'Apply_to': - 'https://www.indeed.com/company/Revelry/jobs/Net-Core-Developer-a8e4e600cb716fb7?fccid=89b6cc7775dbcb2b&vjs=3', - '_type': 'dict' -}, { - 'Searched_job': 'ruby', - 'Job_title': 'Junior DevOps Engineer', - 'Location': 'Remote', - 'Company_name': 'Clear Labs', - 'Post_date': '2 days ago', - 'Extract_date': '2022-02-17', - 'Job_description': 'We are looking for a qualified engineer for a full time Junior DevOps Role.', - 'Salary': '', - 'Tags': [], - 'Apply_to': - 'https://www.indeed.com/company/Clear-Labs/jobs/Junior-Devop-Engineer-71a0689ea2bd8cb1?fccid=250710b384a27cb1&vjs=3', - '_type': 'dict' -}] +JOBS = [ + { + "Searched_job": "ruby", + "Job_title": ".Net Core Developer", + "Location": "New Orleans, LA", + "Company_name": "Revelry Labs", + "Post_date": "8 days ago", + "Extract_date": "2022-02-17", + "Job_description": "Net Core Developer who has experience with .net Core, C#, and SQL Server Database experience.", + "Salary": "", + "Tags": [], + "Apply_to": "https://www.indeed.com/company/Revelry/jobs/Net-Core-Developer-a8e4e600cb716fb7?fccid=89b6cc7775dbcb2b&vjs=3", + "_type": "dict", + }, + { + "Searched_job": "ruby", + "Job_title": "Junior DevOps Engineer", + "Location": "Remote", + "Company_name": "Clear Labs", + "Post_date": "2 days ago", + "Extract_date": "2022-02-17", + "Job_description": "We are looking for a qualified engineer for a full time Junior DevOps Role.", + "Salary": "", + "Tags": [], + "Apply_to": "https://www.indeed.com/company/Clear-Labs/jobs/Junior-Devop-Engineer-71a0689ea2bd8cb1?fccid=250710b384a27cb1&vjs=3", + "_type": "dict", + }, +] + +JOBS2 = [ + { + "Searched_job": "ruby", + "Job_title": ".Net Core Developer", + "Location": "New Orleans, LA", + "Company_name": "Revelry Labs", + "Post_date": "8 days ago", + "Extract_date": "2022-02-17", + "Job_description": "Net Core Developer who has experience with .net Core, C#, and SQL Server Database experience.", + "Salary": "", + "Tags": [], + "Apply_to": "https://www.indeed.com/company/Revelry/jobs/Net-Core-Developer-a8e4e600cb716fb7?fccid=89b6cc7775dbcb2b&vjs=3", + "_type": "dict", + }, + { + "Searched_job": "ruby", + "Job_title": "Junior DevOps Engineer", + "Location": "Remote", + "Company_name": "Clear Labs", + "Post_date": "2 days ago", + "Extract_date": "2022-02-17", + "Job_description": "We are looking for a qualified engineer for a full time Junior DevOps Role.", + "Salary": "", + "Tags": [], + "Apply_to": "https://www.indeed.com/company/Clear-Labs/jobs/Junior-Devop-Engineer-71a0689ea2bd8cb1?fccid=250710b384a27cb1&vjs=3", + "_type": "dict", + }, +] class RunSpiderAdminTestSuite(CareerTestCase): - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.career.actions.fetch_sync_all_data', MagicMock(side_effect=Exception('They killed kenny'))) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.career.actions.fetch_sync_all_data", MagicMock(side_effect=Exception("They killed kenny"))) def test_fetch_sync_all_data_admin__with_zero_spider(self): from breathecode.career.actions import fetch_sync_all_data from logging import Logger @@ -153,12 +151,13 @@ def test_fetch_sync_all_data_admin__with_zero_spider(self): fetch_sync_all_data_admin(None, request, queryset) - self.assertEqual(Logger.error.call_args_list, - [call('There was an error retriving the spider They killed kenny')]) + self.assertEqual( + Logger.error.call_args_list, [call("There was an error retriving the spider They killed kenny")] + ) self.assertEqual(fetch_sync_all_data.call_args_list, [call(model.spider)]) - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('breathecode.career.actions.fetch_sync_all_data', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("breathecode.career.actions.fetch_sync_all_data", MagicMock()) def test_fetch_sync_all_data_admin__with_one_spider(self): from breathecode.career.actions import fetch_sync_all_data from django.contrib import messages @@ -172,36 +171,32 @@ def test_fetch_sync_all_data_admin__with_one_spider(self): self.assertEqual(fetch_sync_all_data.call_args_list, [call(model.spider)]) - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) @patch( - REQUESTS_PATH['get'], - apply_requests_get_mock([ - (200, 'https://app.scrapinghub.com/api/jobs/list.json', DATA), - (200, 'https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json', JOBS), - (200, 'https://storage.scrapinghub.com/items/223344/2/75?apikey=1234567&format=json', JOBS2) - ])) - @patch('breathecode.career.actions.fetch_sync_all_data', MagicMock()) + REQUESTS_PATH["get"], + apply_requests_get_mock( + [ + (200, "https://app.scrapinghub.com/api/jobs/list.json", DATA), + (200, "https://storage.scrapinghub.com/items/223344/2/72?apikey=1234567&format=json", JOBS), + (200, "https://storage.scrapinghub.com/items/223344/2/75?apikey=1234567&format=json", JOBS2), + ] + ), + ) + @patch("breathecode.career.actions.fetch_sync_all_data", MagicMock()) def test_fetch_sync_all_data_admin__with_two_spiders(self): from breathecode.career.actions import fetch_sync_all_data from django.contrib import messages - SPIDER = [{ - 'name': 'indeed', - 'zyte_spider_number': 2, - 'zyte_job_number': 0 - }, { - 'name': 'getonboard', - 'zyte_spider_number': 3, - 'zyte_job_number': 0 - }] - ZYTE_PROJECT = [{ - 'zyte_api_key': 1234567, - 'zyte_api_deploy': 223344 - }, { - 'zyte_api_key': 1234567, - 'zyte_api_deploy': 223344 - }] - PLATFORM = [{'name': 'indeed'}, {'name': 'getonboard'}] + + SPIDER = [ + {"name": "indeed", "zyte_spider_number": 2, "zyte_job_number": 0}, + {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0}, + ] + ZYTE_PROJECT = [ + {"zyte_api_key": 1234567, "zyte_api_deploy": 223344}, + {"zyte_api_key": 1234567, "zyte_api_deploy": 223344}, + ] + PLATFORM = [{"name": "indeed"}, {"name": "getonboard"}] request = HttpRequest() diff --git a/breathecode/career/tests/admin/tests_parse_date_admin.py b/breathecode/career/tests/admin/tests_parse_date_admin.py index ed52c32d2..1ae644e41 100644 --- a/breathecode/career/tests/admin/tests_parse_date_admin.py +++ b/breathecode/career/tests/admin/tests_parse_date_admin.py @@ -8,24 +8,27 @@ class ParseDateAdminTestSuite(CareerTestCase): - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.career.actions.get_was_published_date_from_string', - MagicMock(side_effect=Exception('They killed kenny'))) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch( + "breathecode.career.actions.get_was_published_date_from_string", + MagicMock(side_effect=Exception("They killed kenny")), + ) def test_get_was_published_date_from_string_admin__with_zero_job(self): from breathecode.career.actions import get_was_published_date_from_string from logging import Logger + model = self.bc.database.create(job=1) request = HttpRequest() queryset = Job.objects.all() get_was_published_date_from_string_admin(None, request, queryset) - self.assertEqual(Logger.error.call_args_list, [call('There was an error retriving the jobs They killed kenny')]) + self.assertEqual(Logger.error.call_args_list, [call("There was an error retriving the jobs They killed kenny")]) self.assertEqual(get_was_published_date_from_string.call_args_list, [call(model.job)]) - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('breathecode.career.actions.get_was_published_date_from_string', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("breathecode.career.actions.get_was_published_date_from_string", MagicMock()) def test_get_was_published_date_from_string_admin__with_one_job(self): from breathecode.career.actions import get_was_published_date_from_string from django.contrib import messages @@ -38,8 +41,8 @@ def test_get_was_published_date_from_string_admin__with_one_job(self): get_was_published_date_from_string_admin(None, request, queryset) self.assertEqual(get_was_published_date_from_string.call_args_list, [call(model.job)]) - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('breathecode.career.actions.get_was_published_date_from_string', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("breathecode.career.actions.get_was_published_date_from_string", MagicMock()) def test_get_was_published_date_from_string_admin__with_two_jobs(self): from breathecode.career.actions import get_was_published_date_from_string from django.contrib import messages diff --git a/breathecode/career/tests/admin/tests_run_spider_admin.py b/breathecode/career/tests/admin/tests_run_spider_admin.py index c28ea3dd0..ffbae7336 100644 --- a/breathecode/career/tests/admin/tests_run_spider_admin.py +++ b/breathecode/career/tests/admin/tests_run_spider_admin.py @@ -8,10 +8,10 @@ class RunSpiderAdminTestSuite(CareerTestCase): - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.career.actions.run_spider', MagicMock(side_effect=Exception('They killed kenny'))) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.career.actions.run_spider", MagicMock(side_effect=Exception("They killed kenny"))) def test_run_spider_admin__with_zero_spider_logger_error(self): from breathecode.career.actions import run_spider from logging import Logger @@ -21,28 +21,31 @@ def test_run_spider_admin__with_zero_spider_logger_error(self): queryset = Spider.objects.all() run_spider_admin(None, request, queryset) - self.assertEqual(Logger.error.call_args_list, - [call('There was an error retriving the spider They killed kenny')]) - - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.career.actions.run_spider', MagicMock(side_effect=Exception('They killed kenny'))) + self.assertEqual( + Logger.error.call_args_list, [call("There was an error retriving the spider They killed kenny")] + ) + + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.career.actions.run_spider", MagicMock(side_effect=Exception("They killed kenny"))) def test_run_spider_admin__with_zero_spider(self): from breathecode.career.actions import run_spider from logging import Logger + model = self.bc.database.create(spider=1) request = HttpRequest() queryset = Spider.objects.all() run_spider_admin(None, request, queryset) - self.assertEqual(Logger.error.call_args_list, - [call('There was an error retriving the spider They killed kenny')]) + self.assertEqual( + Logger.error.call_args_list, [call("There was an error retriving the spider They killed kenny")] + ) self.assertEqual(run_spider.call_args_list, [call(model.spider)]) - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('breathecode.career.actions.run_spider', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("breathecode.career.actions.run_spider", MagicMock()) def test_run_spider_admin__with_one_spider(self): from breathecode.career.actions import run_spider from django.contrib import messages @@ -56,8 +59,8 @@ def test_run_spider_admin__with_one_spider(self): self.assertEqual(run_spider.call_args_list, [call(model.spider)]) - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('breathecode.career.actions.run_spider', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("breathecode.career.actions.run_spider", MagicMock()) def test_run_spider_admin__with_two_spiders(self): from breathecode.career.actions import run_spider from django.contrib import messages diff --git a/breathecode/career/tests/mixins/__init__.py b/breathecode/career/tests/mixins/__init__.py index fdc0afa2d..32745820b 100644 --- a/breathecode/career/tests/mixins/__init__.py +++ b/breathecode/career/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Mixins """ + from .career_test_case import CareerTestCase # noqa: F401 diff --git a/breathecode/career/tests/mixins/career_test_case.py b/breathecode/career/tests/mixins/career_test_case.py index 5627a5353..e51622404 100644 --- a/breathecode/career/tests/mixins/career_test_case.py +++ b/breathecode/career/tests/mixins/career_test_case.py @@ -1,11 +1,18 @@ from rest_framework.test import APITestCase from datetime import datetime, timedelta, date -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + BreathecodeMixin, +) -class CareerTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, DatetimeMixin, - BreathecodeMixin): +class CareerTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, DatetimeMixin, BreathecodeMixin +): """CertificateTestCase with auth methods""" def setUp(self): diff --git a/breathecode/career/tests/services/tests_get_class_from_scraper_factory.py b/breathecode/career/tests/services/tests_get_class_from_scraper_factory.py index e68bf64df..8989c8fc1 100644 --- a/breathecode/career/tests/services/tests_get_class_from_scraper_factory.py +++ b/breathecode/career/tests/services/tests_get_class_from_scraper_factory.py @@ -6,26 +6,26 @@ class ServicesGetClassScraperFactoryTestCase(CareerTestCase): - @patch(DJANGO_CONTRIB_PATH['messages'], apply_django_contrib_messages_mock()) - @patch('django.contrib.messages.add_message', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch(DJANGO_CONTRIB_PATH["messages"], apply_django_contrib_messages_mock()) + @patch("django.contrib.messages.add_message", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_return_false(self): from logging import Logger - scraper_factory('motor') + scraper_factory("motor") self.assertEqual( Logger.error.call_args_list, - [call('There was an error import the library - No ' - "module named 'breathecode.career.services.motor'")]) + [call("There was an error import the library - No " "module named 'breathecode.career.services.motor'")], + ) def test_get_class_correctly(self): - SPIDER = {'name': 'getonboard', 'zyte_spider_number': 1, 'zyte_job_number': 0} - ZYTE_PROJECT = {'zyte_api_key': 1234567, 'zyte_api_deploy': 223344} - PLATFORM = {'name': 'getonboard'} + SPIDER = {"name": "getonboard", "zyte_spider_number": 1, "zyte_job_number": 0} + ZYTE_PROJECT = {"zyte_api_key": 1234567, "zyte_api_deploy": 223344} + PLATFORM = {"name": "getonboard"} model = self.bc.database.create(spider=SPIDER, zyte_project=ZYTE_PROJECT, platform=PLATFORM) - result = scraper_factory('getonboard') - self.assertEqual(result.__module__, 'breathecode.career.services.getonboard') - self.assertEqual(result.__qualname__, 'GetonboardScraper') + result = scraper_factory("getonboard") + self.assertEqual(result.__module__, "breathecode.career.services.getonboard") + self.assertEqual(result.__qualname__, "GetonboardScraper") diff --git a/breathecode/career/tests/tasks/tests_async_fetch_sync_all_data.py b/breathecode/career/tests/tasks/tests_async_fetch_sync_all_data.py index a671c88a6..159ae9612 100644 --- a/breathecode/career/tests/tasks/tests_async_fetch_sync_all_data.py +++ b/breathecode/career/tests/tasks/tests_async_fetch_sync_all_data.py @@ -9,25 +9,28 @@ from ...tasks import async_fetch_sync_all_data -spider = {'name': 'getonboard', 'zyte_spider_number': 4, 'zyte_job_number': 0} -zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 223344} -platform = {'name': 'getonboard'} +spider = {"name": "getonboard", "zyte_spider_number": 4, "zyte_job_number": 0} +zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 223344} +platform = {"name": "getonboard"} class AsyncFetchSyncAllDataTaskTestCase(CareerTestCase): - @patch('breathecode.career.actions.fetch_sync_all_data', MagicMock()) - @patch('logging.Logger.debug', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("breathecode.career.actions.fetch_sync_all_data", MagicMock()) + @patch("logging.Logger.debug", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_async_async_fetch_sync_all_data__with_spider(self): from breathecode.career.actions import fetch_sync_all_data from logging import Logger model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) - async_fetch_sync_all_data.delay({'spi_id': model['spider'].id}) + async_fetch_sync_all_data.delay({"spi_id": model["spider"].id}) self.assertEqual(fetch_sync_all_data.call_args_list, [call(model.spider)]) - self.assertEqual(Logger.error.call_args_list, [ - call('Starting async_fetch_sync_all_data'), - call('Starting async_fetch_sync_all_data in spider name getonboard') - ]) + self.assertEqual( + Logger.error.call_args_list, + [ + call("Starting async_fetch_sync_all_data"), + call("Starting async_fetch_sync_all_data in spider name getonboard"), + ], + ) diff --git a/breathecode/career/tests/tasks/tests_async_run_spider.py b/breathecode/career/tests/tasks/tests_async_run_spider.py index 5d5423986..d523bf5d1 100644 --- a/breathecode/career/tests/tasks/tests_async_run_spider.py +++ b/breathecode/career/tests/tasks/tests_async_run_spider.py @@ -9,52 +9,55 @@ from ...tasks import async_run_spider -JOBS = [{ - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile, Venezuela)', - 'Company_name': 'Repite Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/1', - '_type': 'dict' -}, { - 'Searched_job': 'junior web developer', - 'Job_title': 'Pentester Cybersecurity', - 'Location': 'Remote (Chile)', - 'Company_name': 'Repite Employer', - 'Post_date': 'November 05, 2021', - 'Extract_date': '2022-01-30', - 'Job_description': 'Vuln exploitation Security reports', - 'Salary': 'Not supplied', - 'Tags': ['back-end', 'cybersecurity', 'english', 'pentesting', 'python'], - 'Apply_to': 'https://www.url.com/2', - '_type': 'dict' -}] - -spider = {'name': 'getonboard', 'zyte_spider_number': 3, 'zyte_job_number': 0} -zyte_project = {'zyte_api_key': 1234567, 'zyte_api_deploy': 223344} -platform = {'name': 'getonboard'} +JOBS = [ + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile, Venezuela)", + "Company_name": "Repite Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/1", + "_type": "dict", + }, + { + "Searched_job": "junior web developer", + "Job_title": "Pentester Cybersecurity", + "Location": "Remote (Chile)", + "Company_name": "Repite Employer", + "Post_date": "November 05, 2021", + "Extract_date": "2022-01-30", + "Job_description": "Vuln exploitation Security reports", + "Salary": "Not supplied", + "Tags": ["back-end", "cybersecurity", "english", "pentesting", "python"], + "Apply_to": "https://www.url.com/2", + "_type": "dict", + }, +] + +spider = {"name": "getonboard", "zyte_spider_number": 3, "zyte_job_number": 0} +zyte_project = {"zyte_api_key": 1234567, "zyte_api_deploy": 223344} +platform = {"name": "getonboard"} class RunSpiderTaskTestCase(CareerTestCase): - @patch('breathecode.career.actions.run_spider', MagicMock()) - @patch('logging.Logger.debug', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("breathecode.career.actions.run_spider", MagicMock()) + @patch("logging.Logger.debug", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_async_run_spider__without_tasks(self): from breathecode.career.actions import run_spider from logging import Logger model = self.bc.database.create(spider=spider, zyte_project=zyte_project, platform=platform) - async_run_spider.delay({'spi_id': model['spider'].id}) + async_run_spider.delay({"spi_id": model["spider"].id}) self.assertEqual(run_spider.call_args_list, [call(model.spider)]) self.assertEqual( Logger.error.call_args_list, - [call('Starting async_run_spider'), - call('Starting async_run_spider in spider name getonboard')]) + [call("Starting async_run_spider"), call("Starting async_run_spider in spider name getonboard")], + ) diff --git a/breathecode/celery.py b/breathecode/celery.py index 10ea53820..7abeaf9e0 100644 --- a/breathecode/celery.py +++ b/breathecode/celery.py @@ -16,34 +16,36 @@ from breathecode.setup import get_redis_config # set the default Django settings module for the 'celery' program. -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'breathecode.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "breathecode.settings") # django.setup() settings, kwargs, REDIS_URL = get_redis_config() -app = Celery('celery_breathecode', **kwargs) -if os.getenv('ENV') == 'test': +app = Celery("celery_breathecode", **kwargs) +if os.getenv("ENV") == "test": app.conf.update(task_always_eager=True) # Using a string here means the worker doesn't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. -app.config_from_object('django.conf:settings') -app.conf.update(broker_url=REDIS_URL, - result_backend=REDIS_URL, - namespace='CELERY', - result_expires=10, - worker_max_memory_per_child=int(os.getenv('CELERY_MAX_MEMORY_PER_WORKER', '470000')), - worker_max_tasks_per_child=int(os.getenv('CELERY_MAX_TASKS_PER_WORKER', '1000'))) +app.config_from_object("django.conf:settings") +app.conf.update( + broker_url=REDIS_URL, + result_backend=REDIS_URL, + namespace="CELERY", + result_expires=10, + worker_max_memory_per_child=int(os.getenv("CELERY_MAX_MEMORY_PER_WORKER", "470000")), + worker_max_tasks_per_child=int(os.getenv("CELERY_MAX_TASKS_PER_WORKER", "1000")), +) # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.conf.broker_transport_options = { - 'priority_steps': list(range(11)), - 'sep': ':', - 'queue_order_strategy': 'priority', + "priority_steps": list(range(11)), + "sep": ":", + "queue_order_strategy": "priority", } @@ -57,18 +59,19 @@ class Worker(TypedDict): def get_workers_amount(): - dynos = int(os.getenv('CELERY_DYNOS') or 1) - workers = int(os.getenv('CELERY_MAX_WORKERS') or 1) + dynos = int(os.getenv("CELERY_DYNOS") or 1) + workers = int(os.getenv("CELERY_MAX_WORKERS") or 1) return dynos * workers -if os.getenv('ENV') != 'test' and (WORKER := os.getenv('CELERY_POOL', 'prefork')) not in ['prefork']: +if os.getenv("ENV") != "test" and (WORKER := os.getenv("CELERY_POOL", "prefork")) not in ["prefork"]: raise ValueError(f'CELERY_POOL must be "prefork" but got {WORKER} that is not supported yet.') def get_worker_id(): - if WORKER == 'gevent': + if WORKER == "gevent": from gevent import getcurrent + return id(getcurrent()) return os.getpid() @@ -79,7 +82,7 @@ def worker_process_init_handler(**kwargs): from django_redis import get_redis_connection from redis.exceptions import LockError - is_django_redis = hasattr(cache, 'delete_pattern') + is_django_redis = hasattr(cache, "delete_pattern") if is_django_redis: from redis.lock import Lock @@ -97,12 +100,12 @@ def __exit__(self, *args, **kwargs): pass worker_id = get_worker_id() - print(f'Worker process initialized with id: {worker_id}') + print(f"Worker process initialized with id: {worker_id}") client = None if is_django_redis: - client = get_redis_connection('default') + client = get_redis_connection("default") workers = get_workers_amount() delta = timedelta(minutes=2) @@ -110,8 +113,8 @@ def __exit__(self, *args, **kwargs): # save the workers pid in cache for know its worker number while True: try: - with Lock(client, 'lock:workers', timeout=3, blocking_timeout=3): - data: Workers = cache.get('workers') + with Lock(client, "lock:workers", timeout=3, blocking_timeout=3): + data: Workers = cache.get("workers") available: DataMap = {} if data is None: data = {} @@ -121,9 +124,9 @@ def __exit__(self, *args, **kwargs): data[i] = [] if len(data[i]) >= 2: - data[i].sort(key=lambda x: x['created_at'].replace(tzinfo=UTC)) + data[i].sort(key=lambda x: x["created_at"].replace(tzinfo=UTC)) - if datetime.now(UTC) - data[i][-1]['created_at'].replace(tzinfo=UTC) < delta: + if datetime.now(UTC) - data[i][-1]["created_at"].replace(tzinfo=UTC) < delta: available[i] = False data[i] = data[i][-2:] else: @@ -131,7 +134,7 @@ def __exit__(self, *args, **kwargs): data[i] = data[i][-1:] elif len(data[i]) == 1: - if datetime.now(UTC) - data[i][0]['created_at'].replace(tzinfo=UTC) < delta: + if datetime.now(UTC) - data[i][0]["created_at"].replace(tzinfo=UTC) < delta: available[i] = False else: available[i] = True @@ -142,7 +145,7 @@ def __exit__(self, *args, **kwargs): found = False for i in range(workers): if available[i]: - data[i].append({'pid': worker_id, 'created_at': datetime.now(UTC)}) + data[i].append({"pid": worker_id, "created_at": datetime.now(UTC)}) found = True break @@ -153,9 +156,9 @@ def __exit__(self, *args, **kwargs): if len(data[i]) < len(pointer): pointer = data[i] - pointer.append({'pid': worker_id, 'created_at': datetime.now(UTC)}) + pointer.append({"pid": worker_id, "created_at": datetime.now(UTC)}) - cache.set('workers', data, timeout=None) + cache.set("workers", data, timeout=None) break except LockError: diff --git a/breathecode/certificate/actions.py b/breathecode/certificate/actions.py index fa1005301..2850869ee 100644 --- a/breathecode/certificate/actions.py +++ b/breathecode/certificate/actions.py @@ -1,6 +1,7 @@ """ Certificate actions """ + import hashlib import json import logging @@ -20,21 +21,21 @@ from .models import ERROR, PERSISTED, LayoutDesign, Specialty, UserSpecialty logger = logging.getLogger(__name__) -ENVIRONMENT = os.getenv('ENV', None) -BUCKET_NAME = 'certificates-breathecode' +ENVIRONMENT = os.getenv("ENV", None) +BUCKET_NAME = "certificates-breathecode" strings = { - 'es': { - 'Main Instructor': 'Instructor Principal', + "es": { + "Main Instructor": "Instructor Principal", + }, + "en": { + "Main Instructor": "Main Instructor", }, - 'en': { - 'Main Instructor': 'Main Instructor', - } } def certificate_set_default_issued_at(): - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) for item in query: if item.cohort: @@ -47,19 +48,19 @@ def certificate_set_default_issued_at(): def syllabus_weeks_to_days(json): days = [] - weeks = json.pop('weeks', []) + weeks = json.pop("weeks", []) for week in weeks: - days += week['days'] + days += week["days"] - if 'days' not in json: - json['days'] = days + if "days" not in json: + json["days"] = days return json -def get_assets_from_syllabus(syllabus_version: SyllabusVersion | int, - task_types: Optional[list[str]] = None, - only_mandatory=False): +def get_assets_from_syllabus( + syllabus_version: SyllabusVersion | int, task_types: Optional[list[str]] = None, only_mandatory=False +): if not isinstance(syllabus_version, SyllabusVersion): syllabus = SyllabusVersion.objects.filter(id=syllabus_version).first() @@ -67,10 +68,10 @@ def get_assets_from_syllabus(syllabus_version: SyllabusVersion | int, syllabus = syllabus_version key_map = { - 'QUIZ': 'quizzes', - 'LESSON': 'lessons', - 'EXERCISE': 'replits', - 'PROJECT': 'assignments', + "QUIZ": "quizzes", + "LESSON": "lessons", + "EXERCISE": "replits", + "PROJECT": "assignments", } if task_types is None: @@ -83,7 +84,7 @@ def get_assets_from_syllabus(syllabus_version: SyllabusVersion | int, syllabus.json = syllabus_weeks_to_days(syllabus.json) - for day in syllabus.json['days']: + for day in syllabus.json["days"]: for atype in key_map: if key_map[atype] not in day: continue @@ -92,36 +93,40 @@ def get_assets_from_syllabus(syllabus_version: SyllabusVersion | int, continue for asset in day[key_map[atype]]: - if (only_mandatory and asset.get('mandatory', True) == True) or only_mandatory is False: - findings.append(asset['slug']) + if (only_mandatory and asset.get("mandatory", True) == True) or only_mandatory is False: + findings.append(asset["slug"]) return findings -def how_many_pending_tasks(syllabus_version: SyllabusVersion | int, user: User | int, task_types: list[str], - only_mandatory: bool) -> int: +def how_many_pending_tasks( + syllabus_version: SyllabusVersion | int, user: User | int, task_types: list[str], only_mandatory: bool +) -> int: extra = {} if (n_task_types := len(task_types)) == 1: - extra['task_type'] = task_types[0] + extra["task_type"] = task_types[0] elif n_task_types > 1: - extra['task_type__in'] = task_types + extra["task_type__in"] = task_types if not isinstance(user, User): - extra['user__id'] = user.id + extra["user__id"] = user.id else: - extra['user'] = user + extra["user"] = user slugs = get_assets_from_syllabus(syllabus_version, task_types=task_types, only_mandatory=only_mandatory) - how_many_approved_tasks = Task.objects.filter(associated_slug__in=slugs, - revision_status__in=['APPROVED', 'IGNORED'], - **extra).count() + how_many_approved_tasks = Task.objects.filter( + associated_slug__in=slugs, revision_status__in=["APPROVED", "IGNORED"], **extra + ).count() - how_many_pending_tasks = Task.objects.filter(associated_slug__in=slugs, - **extra).exclude(revision_status__in=['APPROVED', 'IGNORED']).count() + how_many_pending_tasks = ( + Task.objects.filter(associated_slug__in=slugs, **extra) + .exclude(revision_status__in=["APPROVED", "IGNORED"]) + .count() + ) how_many_tasks = how_many_approved_tasks + how_many_pending_tasks if (how_many_slugs := len(slugs)) != how_many_tasks: @@ -131,42 +136,44 @@ def how_many_pending_tasks(syllabus_version: SyllabusVersion | int, user: User | def generate_certificate(user, cohort=None, layout=None): - query = {'user__id': user.id} + query = {"user__id": user.id} if cohort: - query['cohort__id'] = cohort.id + query["cohort__id"] = cohort.id - cohort_user = CohortUser.objects.filter(**query).exclude(cohort__stage='DELETED').first() + cohort_user = CohortUser.objects.filter(**query).exclude(cohort__stage="DELETED").first() if not cohort_user: - raise ValidationException("Impossible to obtain the student cohort, maybe it's none assigned", - slug='missing-cohort-user') + raise ValidationException( + "Impossible to obtain the student cohort, maybe it's none assigned", slug="missing-cohort-user" + ) if not cohort: cohort = cohort_user.cohort if cohort.syllabus_version is None: raise ValidationException( - f'The cohort has no syllabus assigned, please set a syllabus for cohort: {cohort.name}', - slug='missing-syllabus-version') + f"The cohort has no syllabus assigned, please set a syllabus for cohort: {cohort.name}", + slug="missing-syllabus-version", + ) specialty = Specialty.objects.filter(syllabus__id=cohort.syllabus_version.syllabus_id).first() if not specialty: - raise ValidationException('Specialty has no Syllabus assigned', slug='missing-specialty') + raise ValidationException("Specialty has no Syllabus assigned", slug="missing-specialty") uspe = UserSpecialty.objects.filter(user=user, cohort=cohort).first() - if (uspe is not None and uspe.status == 'PERSISTED' and uspe.preview_url): - raise ValidationException('This user already has a certificate created', slug='already-exists') + if uspe is not None and uspe.status == "PERSISTED" and uspe.preview_url: + raise ValidationException("This user already has a certificate created", slug="already-exists") if uspe is None: utc_now = timezone.now() uspe = UserSpecialty( user=user, cohort=cohort, - token=hashlib.sha1((str(user.id) + str(utc_now)).encode('UTF-8')).hexdigest(), + token=hashlib.sha1((str(user.id) + str(utc_now)).encode("UTF-8")).hexdigest(), specialty=specialty, - signed_by_role=strings[cohort.language.lower()]['Main Instructor'], + signed_by_role=strings[cohort.language.lower()]["Main Instructor"], ) if specialty.expiration_day_delta is not None: uspe.expires_at = utc_now + timezone.timedelta(days=specialty.expiration_day_delta) @@ -177,59 +184,62 @@ def generate_certificate(user, cohort=None, layout=None): layout = LayoutDesign.objects.filter(is_default=True, academy=cohort.academy).first() if layout is None: - layout = LayoutDesign.objects.filter(slug='default').first() + layout = LayoutDesign.objects.filter(slug="default").first() if layout is None: - raise ValidationException('No layout was specified and there is no default layout for this academy', - slug='no-default-layout') + raise ValidationException( + "No layout was specified and there is no default layout for this academy", slug="no-default-layout" + ) uspe.layout = layout # validate for teacher - main_teacher = CohortUser.objects.filter(cohort__id=cohort.id, role='TEACHER').first() + main_teacher = CohortUser.objects.filter(cohort__id=cohort.id, role="TEACHER").first() if main_teacher is None or main_teacher.user is None: - raise ValidationException('This cohort does not have a main teacher, please assign it first', - slug='without-main-teacher') + raise ValidationException( + "This cohort does not have a main teacher, please assign it first", slug="without-main-teacher" + ) main_teacher = main_teacher.user - uspe.signed_by = main_teacher.first_name + ' ' + main_teacher.last_name + uspe.signed_by = main_teacher.first_name + " " + main_teacher.last_name try: uspe.academy = cohort.academy - pending_tasks = how_many_pending_tasks(cohort.syllabus_version, - user, - task_types=['PROJECT'], - only_mandatory=True) + pending_tasks = how_many_pending_tasks( + cohort.syllabus_version, user, task_types=["PROJECT"], only_mandatory=True + ) if pending_tasks and pending_tasks > 0: - raise ValidationException(f'The student has {pending_tasks} pending tasks', - slug=f'with-pending-tasks-{pending_tasks}') + raise ValidationException( + f"The student has {pending_tasks} pending tasks", slug=f"with-pending-tasks-{pending_tasks}" + ) if not (cohort_user.finantial_status == FULLY_PAID or cohort_user.finantial_status == UP_TO_DATE): - message = 'The student must have finantial status FULLY_PAID or UP_TO_DATE' - raise ValidationException(message, slug='bad-finantial-status') + message = "The student must have finantial status FULLY_PAID or UP_TO_DATE" + raise ValidationException(message, slug="bad-finantial-status") - if cohort_user.educational_status != 'GRADUATED': - raise ValidationException('The student must have educational ' - 'status GRADUATED', - slug='bad-educational-status') + if cohort_user.educational_status != "GRADUATED": + raise ValidationException( + "The student must have educational " "status GRADUATED", slug="bad-educational-status" + ) if not cohort.never_ends and cohort.current_day != cohort.syllabus_version.syllabus.duration_in_days: raise ValidationException( - 'Cohort current day should be ' - f'{cohort.syllabus_version.syllabus.duration_in_days}', - slug='cohort-not-finished') + "Cohort current day should be " f"{cohort.syllabus_version.syllabus.duration_in_days}", + slug="cohort-not-finished", + ) - if not cohort.never_ends and cohort.stage != 'ENDED': + if not cohort.never_ends and cohort.stage != "ENDED": raise ValidationException( "The student cohort stage has to be 'ENDED' before you can issue any certificates", - slug='cohort-without-status-ended') + slug="cohort-without-status-ended", + ) if not uspe.issued_at: uspe.issued_at = timezone.now() uspe.status = PERSISTED - uspe.status_text = 'Certificate successfully queued for PDF generation' + uspe.status_text = "Certificate successfully queued for PDF generation" uspe.save() except ValidationException as e: @@ -245,25 +255,27 @@ def certificate_screenshot(certificate_id: int): certificate = UserSpecialty.objects.get(id=certificate_id) if not certificate.preview_url: - file_name = f'{certificate.token}' + file_name = f"{certificate.token}" storage = Storage() file = storage.file(BUCKET_NAME, file_name) # if the file does not exist if file.blob is None: - query_string = urlencode({ - 'key': os.environ.get('SCREENSHOT_MACHINE_KEY'), - 'url': f'https://certificate.4geeks.com/preview/{certificate.token}', - 'device': 'desktop', - 'cacheLimit': '0', - 'dimension': '1024x707', - }) - r = requests.get(f'https://api.screenshotmachine.com?{query_string}', stream=True) + query_string = urlencode( + { + "key": os.environ.get("SCREENSHOT_MACHINE_KEY"), + "url": f"https://certificate.4geeks.com/preview/{certificate.token}", + "device": "desktop", + "cacheLimit": "0", + "dimension": "1024x707", + } + ) + r = requests.get(f"https://api.screenshotmachine.com?{query_string}", stream=True) if r.status_code == 200: file.upload(r.content, public=True) else: - print('Invalid reponse code: ', r.status_code) + print("Invalid reponse code: ", r.status_code) # after created, lets save the URL if file.blob is not None: @@ -281,7 +293,7 @@ def remove_certificate_screenshot(certificate_id): file = storage.file(BUCKET_NAME, file_name) file.delete() - certificate.preview_url = '' + certificate.preview_url = "" certificate.save() return True diff --git a/breathecode/certificate/admin.py b/breathecode/certificate/admin.py index d9b515bfe..d5c566af5 100644 --- a/breathecode/certificate/admin.py +++ b/breathecode/certificate/admin.py @@ -13,71 +13,79 @@ @admin.register(Badge) class BadgeAdmin(admin.ModelAdmin): - list_display = ('slug', 'name') + list_display = ("slug", "name") @admin.register(Specialty) class SpecialtyAdmin(admin.ModelAdmin): - list_display = ('slug', 'name') + list_display = ("slug", "name") @admin.register(LayoutDesign) class LayoutDesignAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'academy', 'template', 'default') - list_filter = ['academy__slug'] + list_display = ("slug", "name", "academy", "template", "default") + list_filter = ["academy__slug"] def template(self, obj): return format_html( - f"<a rel='noopener noreferrer' target='_blank' href='{obj.background_url}'>view template</a>") + f"<a rel='noopener noreferrer' target='_blank' href='{obj.background_url}'>view template</a>" + ) def default(self, obj): if obj.is_default: - return '✅ default' + return "✅ default" else: - return 'not default' + return "not default" -@admin.display(description='🔄 RETAKE Screenshot') +@admin.display(description="🔄 RETAKE Screenshot") def screenshot(modeladmin, request, queryset): from django.contrib import messages - certificate_ids = queryset.values_list('id', flat=True) + certificate_ids = queryset.values_list("id", flat=True) for cert_id in certificate_ids: reset_screenshot.delay(cert_id) - messages.success(request, message='Screenshots scheduled correctly') + messages.success(request, message="Screenshots scheduled correctly") -@admin.display(description='⛔️ DELETE Screenshot') +@admin.display(description="⛔️ DELETE Screenshot") def delete_screenshot(modeladmin, request, queryset): from django.contrib import messages - certificate_ids = queryset.values_list('id', flat=True) + certificate_ids = queryset.values_list("id", flat=True) for cert_id in certificate_ids: remove_screenshot.delay(cert_id) - messages.success(request, message='Screenshots scheduled for deletion') + messages.success(request, message="Screenshots scheduled for deletion") -@admin.display(description='⬇️ Export Selected') +@admin.display(description="⬇️ Export Selected") def export_user_specialty_csv(self, request, queryset): - response = HttpResponse(content_type='text/csv') - response['Content-Disposition'] = 'attachment; filename=certificates.csv' + response = HttpResponse(content_type="text/csv") + response["Content-Disposition"] = "attachment; filename=certificates.csv" writer = csv.writer(response) - writer.writerow(['First Name', 'Last Name', 'Specialty', 'Academy', 'Cohort', 'Certificate', 'PDF']) + writer.writerow(["First Name", "Last Name", "Specialty", "Academy", "Cohort", "Certificate", "PDF"]) for obj in queryset: - writer.writerow([ - obj.user.first_name, obj.user.last_name, obj.specialty.name, obj.academy.name, obj.cohort.name, - f'https://certificate.4geeks.com/{obj.token}', f'https://certificate.4geeks.com/pdf/{obj.token}' - ]) + writer.writerow( + [ + obj.user.first_name, + obj.user.last_name, + obj.specialty.name, + obj.academy.name, + obj.cohort.name, + f"https://certificate.4geeks.com/{obj.token}", + f"https://certificate.4geeks.com/pdf/{obj.token}", + ] + ) return response @admin.register(UserSpecialty) class UserSpecialtyAdmin(admin.ModelAdmin): - search_fields = ['user__email', 'user__first_name', 'user__last_name', 'cohort__name', 'cohort__slug'] - list_display = ('user', 'specialty', 'expires_at', 'academy', 'cohort', 'pdf', 'preview') - list_filter = ['specialty', 'academy__slug', 'cohort__slug'] - raw_id_fields = ['user'] + search_fields = ["user__email", "user__first_name", "user__last_name", "cohort__name", "cohort__slug"] + list_display = ("user", "specialty", "expires_at", "academy", "cohort", "pdf", "preview") + list_filter = ["specialty", "academy__slug", "cohort__slug"] + raw_id_fields = ["user"] actions = [screenshot, delete_screenshot, export_user_specialty_csv] def pdf(self, obj): @@ -86,51 +94,51 @@ def pdf(self, obj): ) def preview(self, obj): - if obj.preview_url is None or obj.preview_url == '': - return format_html('No available') + if obj.preview_url is None or obj.preview_url == "": + return format_html("No available") return format_html("<a rel='noopener noreferrer' target='_blank' href='{url}'>preview</a>", url=obj.preview_url) def get_readonly_fields(self, request, obj=None): - return ['token', 'expires_at'] + return ["token", "expires_at"] -@admin.display(description='🎖 Generate Student Certificate') +@admin.display(description="🎖 Generate Student Certificate") def user_bulk_certificate(modeladmin, request, queryset): from django.contrib import messages users = queryset.all() try: for u in users: - logger.debug(f'Generating certificate for user {u.id}') + logger.debug(f"Generating certificate for user {u.id}") generate_certificate(u) - messages.success(request, message='Certificates generated sucessfully') + messages.success(request, message="Certificates generated sucessfully") except Exception as e: - logger.exception('Problem generating certificates') + logger.exception("Problem generating certificates") messages.error(request, message=str(e)) @admin.register(UserProxy) class UserAdmin(UserAdmin): - list_display = ('username', 'email', 'first_name', 'last_name') + list_display = ("username", "email", "first_name", "last_name") actions = [user_bulk_certificate] -@admin.display(description='🥇 Generate Cohort Certificates') +@admin.display(description="🥇 Generate Cohort Certificates") def cohort_bulk_certificate(modeladmin, request, queryset): from django.contrib import messages - cohort_ids = queryset.values_list('id', flat=True) + cohort_ids = queryset.values_list("id", flat=True) for _id in cohort_ids: - logger.debug(f'Scheduling certificate generation for cohort {_id}') + logger.debug(f"Scheduling certificate generation for cohort {_id}") generate_cohort_certificates.delay(_id) - messages.success(request, message='Scheduled certificate generation') + messages.success(request, message="Scheduled certificate generation") @admin.register(CohortProxy) class CohortAdmin(AdmissionsCohortAdmin): - list_display = ('id', 'slug', 'stage', 'name', 'kickoff_date', 'syllabus_version', 'schedule') + list_display = ("id", "slug", "stage", "name", "kickoff_date", "syllabus_version", "schedule") actions = [cohort_bulk_certificate] diff --git a/breathecode/certificate/apps.py b/breathecode/certificate/apps.py index 327f308e2..7bade1627 100644 --- a/breathecode/certificate/apps.py +++ b/breathecode/certificate/apps.py @@ -2,7 +2,7 @@ class CertificateConfig(AppConfig): - name = 'breathecode.certificate' + name = "breathecode.certificate" def ready(self): from . import receivers # noqa diff --git a/breathecode/certificate/management/commands/set_default_issued_at.py b/breathecode/certificate/management/commands/set_default_issued_at.py index ee21860b3..40acd5595 100644 --- a/breathecode/certificate/management/commands/set_default_issued_at.py +++ b/breathecode/certificate/management/commands/set_default_issued_at.py @@ -3,7 +3,7 @@ class Command(BaseCommand): - help = 'sets default issued_at for new certificates' + help = "sets default issued_at for new certificates" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/breathecode/certificate/migrations/0001_initial.py b/breathecode/certificate/migrations/0001_initial.py index e6cdb6d65..9b4e3142d 100644 --- a/breathecode/certificate/migrations/0001_initial.py +++ b/breathecode/certificate/migrations/0001_initial.py @@ -10,48 +10,50 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ('admissions', '0008_auto_20200708_0049'), + ("admissions", "0008_auto_20200708_0049"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='Specialty', + name="Specialty", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('logo_url', models.CharField(blank=True, default=None, max_length=250, null=True)), - ('duration_in_hours', models.IntegerField(blank=True, default=None, null=True)), - ('expiration_day_delta', models.IntegerField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("logo_url", models.CharField(blank=True, default=None, max_length=250, null=True)), + ("duration_in_hours", models.IntegerField(blank=True, default=None, null=True)), + ("expiration_day_delta", models.IntegerField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='UserSpecialty', + name="UserSpecialty", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('token', models.CharField(db_index=True, max_length=40, unique=True)), - ('expires_at', models.DateTimeField(blank=True, default=None, null=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('specialty', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='certificate.specialty')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("token", models.CharField(db_index=True, max_length=40, unique=True)), + ("expires_at", models.DateTimeField(blank=True, default=None, null=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "specialty", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="certificate.specialty"), + ), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( - name='Badge', + name="Badge", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('logo_url', models.CharField(blank=True, default=None, max_length=250, null=True)), - ('duration_in_hours', models.IntegerField()), - ('expiration_day_delta', models.IntegerField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('specialties', models.ManyToManyField(to='certificate.Specialty')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("logo_url", models.CharField(blank=True, default=None, max_length=250, null=True)), + ("duration_in_hours", models.IntegerField()), + ("expiration_day_delta", models.IntegerField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("specialties", models.ManyToManyField(to="certificate.Specialty")), ], ), ] diff --git a/breathecode/certificate/migrations/0001_squashed_0004_userspecialty_cohort.py b/breathecode/certificate/migrations/0001_squashed_0004_userspecialty_cohort.py index aec3e548a..769f2f800 100644 --- a/breathecode/certificate/migrations/0001_squashed_0004_userspecialty_cohort.py +++ b/breathecode/certificate/migrations/0001_squashed_0004_userspecialty_cohort.py @@ -8,62 +8,69 @@ class Migration(migrations.Migration): - replaces = [('certificate', '0001_initial'), ('certificate', '0002_auto_20200907_2333'), - ('certificate', '0003_userspecialty_signed_by'), ('certificate', '0004_userspecialty_cohort')] + replaces = [ + ("certificate", "0001_initial"), + ("certificate", "0002_auto_20200907_2333"), + ("certificate", "0003_userspecialty_signed_by"), + ("certificate", "0004_userspecialty_cohort"), + ] initial = True dependencies = [ - ('admissions', '0008_auto_20200708_0049'), + ("admissions", "0008_auto_20200708_0049"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='Specialty', + name="Specialty", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('logo_url', models.CharField(blank=True, default=None, max_length=250, null=True)), - ('duration_in_hours', models.IntegerField(blank=True, default=None, null=True)), - ('expiration_day_delta', models.IntegerField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("logo_url", models.CharField(blank=True, default=None, max_length=250, null=True)), + ("duration_in_hours", models.IntegerField(blank=True, default=None, null=True)), + ("expiration_day_delta", models.IntegerField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Badge', + name="Badge", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('logo_url', models.CharField(blank=True, default=None, max_length=250, null=True)), - ('duration_in_hours', models.IntegerField()), - ('expiration_day_delta', models.IntegerField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('specialties', models.ManyToManyField(to='certificate.Specialty')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("logo_url", models.CharField(blank=True, default=None, max_length=250, null=True)), + ("duration_in_hours", models.IntegerField()), + ("expiration_day_delta", models.IntegerField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("specialties", models.ManyToManyField(to="certificate.Specialty")), ], ), migrations.CreateModel( - name='UserSpecialty', + name="UserSpecialty", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('token', models.CharField(db_index=True, max_length=40, unique=True)), - ('expires_at', models.DateTimeField(blank=True, default=None, null=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('specialty', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='certificate.specialty')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), - ('created_at', models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('signed_by', models.CharField(default='Bob Dylan', max_length=100)), - ('cohort', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("token", models.CharField(db_index=True, max_length=40, unique=True)), + ("expires_at", models.DateTimeField(blank=True, default=None, null=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "specialty", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="certificate.specialty"), + ), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("created_at", models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("signed_by", models.CharField(default="Bob Dylan", max_length=100)), + ( + "cohort", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), + ), ], ), ] diff --git a/breathecode/certificate/migrations/0002_auto_20200907_2333.py b/breathecode/certificate/migrations/0002_auto_20200907_2333.py index 22c5cf475..118c9753e 100644 --- a/breathecode/certificate/migrations/0002_auto_20200907_2333.py +++ b/breathecode/certificate/migrations/0002_auto_20200907_2333.py @@ -7,19 +7,19 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0001_initial'), + ("certificate", "0001_initial"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='created_at', + model_name="userspecialty", + name="created_at", field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( - model_name='userspecialty', - name='updated_at', + model_name="userspecialty", + name="updated_at", field=models.DateTimeField(auto_now=True), ), ] diff --git a/breathecode/certificate/migrations/0002_auto_20200908_0126.py b/breathecode/certificate/migrations/0002_auto_20200908_0126.py index 98814cc6b..a7506e676 100644 --- a/breathecode/certificate/migrations/0002_auto_20200908_0126.py +++ b/breathecode/certificate/migrations/0002_auto_20200908_0126.py @@ -6,23 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0001_squashed_0004_userspecialty_cohort'), + ("certificate", "0001_squashed_0004_userspecialty_cohort"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='signed_by_role', - field=models.CharField(default='Director', max_length=100), + model_name="userspecialty", + name="signed_by_role", + field=models.CharField(default="Director", max_length=100), ), migrations.AlterField( - model_name='userspecialty', - name='created_at', + model_name="userspecialty", + name="created_at", field=models.DateTimeField(auto_now_add=True), ), migrations.AlterField( - model_name='userspecialty', - name='signed_by', + model_name="userspecialty", + name="signed_by", field=models.CharField(max_length=100), ), ] diff --git a/breathecode/certificate/migrations/0003_userspecialty_preview_url.py b/breathecode/certificate/migrations/0003_userspecialty_preview_url.py index bb339eb92..6fe7ce21b 100644 --- a/breathecode/certificate/migrations/0003_userspecialty_preview_url.py +++ b/breathecode/certificate/migrations/0003_userspecialty_preview_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0002_auto_20200908_0126'), + ("certificate", "0002_auto_20200908_0126"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='preview_url', + model_name="userspecialty", + name="preview_url", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), ] diff --git a/breathecode/certificate/migrations/0003_userspecialty_signed_by.py b/breathecode/certificate/migrations/0003_userspecialty_signed_by.py index 881998ab3..bf3912127 100644 --- a/breathecode/certificate/migrations/0003_userspecialty_signed_by.py +++ b/breathecode/certificate/migrations/0003_userspecialty_signed_by.py @@ -6,14 +6,14 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0002_auto_20200907_2333'), + ("certificate", "0002_auto_20200907_2333"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='signed_by', - field=models.CharField(default='Bob Dylan', max_length=100), + model_name="userspecialty", + name="signed_by", + field=models.CharField(default="Bob Dylan", max_length=100), preserve_default=False, ), ] diff --git a/breathecode/certificate/migrations/0004_auto_20200929_1812.py b/breathecode/certificate/migrations/0004_auto_20200929_1812.py index 66efbd8bd..cd67b2c66 100644 --- a/breathecode/certificate/migrations/0004_auto_20200929_1812.py +++ b/breathecode/certificate/migrations/0004_auto_20200929_1812.py @@ -8,43 +8,42 @@ class Migration(migrations.Migration): dependencies = [ - ('auth', '0012_alter_user_first_name_max_length'), - ('certificate', '0003_userspecialty_preview_url'), + ("auth", "0012_alter_user_first_name_max_length"), + ("certificate", "0003_userspecialty_preview_url"), ] operations = [ migrations.CreateModel( - name='LayoutDesign', + name="LayoutDesign", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=40)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=40)), ], ), migrations.CreateModel( - name='UserCertificate', + name="UserCertificate", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), migrations.AddField( - model_name='specialty', - name='description', + model_name="specialty", + name="description", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( - model_name='userspecialty', - name='layout', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='certificate.layoutdesign'), + model_name="userspecialty", + name="layout", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="certificate.layoutdesign" + ), ), ] diff --git a/breathecode/certificate/migrations/0004_userspecialty_cohort.py b/breathecode/certificate/migrations/0004_userspecialty_cohort.py index 514496c7d..bb57d7cfb 100644 --- a/breathecode/certificate/migrations/0004_userspecialty_cohort.py +++ b/breathecode/certificate/migrations/0004_userspecialty_cohort.py @@ -7,17 +7,16 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0008_auto_20200708_0049'), - ('certificate', '0003_userspecialty_signed_by'), + ("admissions", "0008_auto_20200708_0049"), + ("certificate", "0003_userspecialty_signed_by"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='cohort', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="userspecialty", + name="cohort", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), ), ] diff --git a/breathecode/certificate/migrations/0005_auto_20200929_1916.py b/breathecode/certificate/migrations/0005_auto_20200929_1916.py index 2eee27edb..52cbe044b 100644 --- a/breathecode/certificate/migrations/0005_auto_20200929_1916.py +++ b/breathecode/certificate/migrations/0005_auto_20200929_1916.py @@ -7,28 +7,30 @@ class Migration(migrations.Migration): dependencies = [ - ('auth', '0012_alter_user_first_name_max_length'), - ('certificate', '0004_auto_20200929_1812'), + ("auth", "0012_alter_user_first_name_max_length"), + ("certificate", "0004_auto_20200929_1812"), ] operations = [ - migrations.DeleteModel(name='UserCertificate', ), + migrations.DeleteModel( + name="UserCertificate", + ), migrations.CreateModel( - name='UserProxy', + name="UserProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), migrations.AlterField( - model_name='specialty', - name='description', + model_name="specialty", + name="description", field=models.TextField(blank=True, default=None, max_length=500, null=True), ), ] diff --git a/breathecode/certificate/migrations/0006_auto_20201005_2253.py b/breathecode/certificate/migrations/0006_auto_20201005_2253.py index 5237d828f..9a23df6d0 100644 --- a/breathecode/certificate/migrations/0006_auto_20201005_2253.py +++ b/breathecode/certificate/migrations/0006_auto_20201005_2253.py @@ -7,17 +7,19 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0005_auto_20200929_1916'), + ("certificate", "0005_auto_20200929_1916"), ] operations = [ migrations.AlterField( - model_name='userspecialty', - name='layout', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='certificate.layoutdesign'), + model_name="userspecialty", + name="layout", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="certificate.layoutdesign", + ), ), ] diff --git a/breathecode/certificate/migrations/0007_auto_20201029_1106.py b/breathecode/certificate/migrations/0007_auto_20201029_1106.py index 900d143b8..f76e66f4e 100644 --- a/breathecode/certificate/migrations/0007_auto_20201029_1106.py +++ b/breathecode/certificate/migrations/0007_auto_20201029_1106.py @@ -7,29 +7,31 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('certificate', '0006_auto_20201005_2253'), + ("admissions", "0011_auto_20201006_0058"), + ("certificate", "0006_auto_20201005_2253"), ] operations = [ migrations.CreateModel( - name='CohortProxy', + name="CohortProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('admissions.cohort', ), + bases=("admissions.cohort",), ), migrations.AddField( - model_name='specialty', - name='certificate', - field=models.OneToOneField(blank=True, - default=None, - help_text='This specialty represents only one certificate', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.certificate'), + model_name="specialty", + name="certificate", + field=models.OneToOneField( + blank=True, + default=None, + help_text="This specialty represents only one certificate", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.certificate", + ), ), ] diff --git a/breathecode/certificate/migrations/0008_userspecialty_status_text.py b/breathecode/certificate/migrations/0008_userspecialty_status_text.py index 2181b3066..e1ed38c4c 100644 --- a/breathecode/certificate/migrations/0008_userspecialty_status_text.py +++ b/breathecode/certificate/migrations/0008_userspecialty_status_text.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0007_auto_20201029_1106'), + ("certificate", "0007_auto_20201029_1106"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='status_text', + model_name="userspecialty", + name="status_text", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), ] diff --git a/breathecode/certificate/migrations/0009_userspecialty_status.py b/breathecode/certificate/migrations/0009_userspecialty_status.py index be81d9623..f35040d17 100644 --- a/breathecode/certificate/migrations/0009_userspecialty_status.py +++ b/breathecode/certificate/migrations/0009_userspecialty_status.py @@ -6,15 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0008_userspecialty_status_text'), + ("certificate", "0008_userspecialty_status_text"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('ERROR', 'Error')], - default='PENDING', - max_length=15), + model_name="userspecialty", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("PERSISTED", "Persisted"), ("ERROR", "Error")], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/certificate/migrations/0010_auto_20210701_1613.py b/breathecode/certificate/migrations/0010_auto_20210701_1613.py index 25b5aac07..a78739481 100644 --- a/breathecode/certificate/migrations/0010_auto_20210701_1613.py +++ b/breathecode/certificate/migrations/0010_auto_20210701_1613.py @@ -7,23 +7,24 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0019_certificatetimeslot_cohorttimeslot'), - ('certificate', '0009_userspecialty_status'), + ("admissions", "0019_certificatetimeslot_cohorttimeslot"), + ("certificate", "0009_userspecialty_status"), ] operations = [ migrations.AddField( - model_name='layoutdesign', - name='academy', - field=models.ForeignKey(default=4, on_delete=django.db.models.deletion.CASCADE, to='admissions.academy'), + model_name="layoutdesign", + name="academy", + field=models.ForeignKey(default=4, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy"), preserve_default=False, ), migrations.AddField( - model_name='layoutdesign', - name='background_url', + model_name="layoutdesign", + name="background_url", field=models.CharField( - default='https://storage.cloud.google.com/certificates-breathecode/_template-default-4geeks.png', - max_length=250), + default="https://storage.cloud.google.com/certificates-breathecode/_template-default-4geeks.png", + max_length=250, + ), preserve_default=False, ), ] diff --git a/breathecode/certificate/migrations/0011_auto_20210701_1941.py b/breathecode/certificate/migrations/0011_auto_20210701_1941.py index 9852ebe3e..4c511bf42 100644 --- a/breathecode/certificate/migrations/0011_auto_20210701_1941.py +++ b/breathecode/certificate/migrations/0011_auto_20210701_1941.py @@ -6,25 +6,26 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0010_auto_20210701_1613'), + ("certificate", "0010_auto_20210701_1613"), ] operations = [ migrations.AddField( - model_name='layoutdesign', - name='css_content', + model_name="layoutdesign", + name="css_content", field=models.TextField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='layoutdesign', - name='html_content', + model_name="layoutdesign", + name="html_content", field=models.TextField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='layoutdesign', - name='is_default', + model_name="layoutdesign", + name="is_default", field=models.BooleanField( default=False, - help_text='Will be used as default for all future certificates. Only one default layout per academy.'), + help_text="Will be used as default for all future certificates. Only one default layout per academy.", + ), ), ] diff --git a/breathecode/certificate/migrations/0012_auto_20210727_1106.py b/breathecode/certificate/migrations/0012_auto_20210727_1106.py index ebd25a38f..6cee80dd5 100644 --- a/breathecode/certificate/migrations/0012_auto_20210727_1106.py +++ b/breathecode/certificate/migrations/0012_auto_20210727_1106.py @@ -7,23 +7,25 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0020_auto_20210727_1106'), - ('certificate', '0011_auto_20210701_1941'), + ("admissions", "0020_auto_20210727_1106"), + ("certificate", "0011_auto_20210701_1941"), ] operations = [ migrations.RemoveField( - model_name='specialty', - name='certificate', + model_name="specialty", + name="certificate", ), migrations.AddField( - model_name='specialty', - name='syllabus', - field=models.OneToOneField(blank=True, - default=None, - help_text='This specialty represents only one certificate', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.syllabus'), + model_name="specialty", + name="syllabus", + field=models.OneToOneField( + blank=True, + default=None, + help_text="This specialty represents only one certificate", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.syllabus", + ), ), ] diff --git a/breathecode/certificate/migrations/0012_auto_20210805_0329.py b/breathecode/certificate/migrations/0012_auto_20210805_0329.py index 0e7bbe7fc..31fc4f124 100644 --- a/breathecode/certificate/migrations/0012_auto_20210805_0329.py +++ b/breathecode/certificate/migrations/0012_auto_20210805_0329.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0011_auto_20210701_1941'), + ("certificate", "0011_auto_20210701_1941"), ] operations = [ migrations.AddField( - model_name='layoutdesign', - name='preview_url', + model_name="layoutdesign", + name="preview_url", field=models.CharField(default=None, max_length=250, null=True), ), migrations.AddField( - model_name='userspecialty', - name='issued_at', + model_name="userspecialty", + name="issued_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/certificate/migrations/0013_remove_userspecialty_issued_at.py b/breathecode/certificate/migrations/0013_remove_userspecialty_issued_at.py index 6c614ce23..53aae96b3 100644 --- a/breathecode/certificate/migrations/0013_remove_userspecialty_issued_at.py +++ b/breathecode/certificate/migrations/0013_remove_userspecialty_issued_at.py @@ -6,12 +6,12 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0012_auto_20210805_0329'), + ("certificate", "0012_auto_20210805_0329"), ] operations = [ migrations.RemoveField( - model_name='userspecialty', - name='issued_at', + model_name="userspecialty", + name="issued_at", ), ] diff --git a/breathecode/certificate/migrations/0014_merge_20210810_0418.py b/breathecode/certificate/migrations/0014_merge_20210810_0418.py index 74fa124f8..202c32ecf 100644 --- a/breathecode/certificate/migrations/0014_merge_20210810_0418.py +++ b/breathecode/certificate/migrations/0014_merge_20210810_0418.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0012_auto_20210727_1106'), - ('certificate', '0013_remove_userspecialty_issued_at'), + ("certificate", "0012_auto_20210727_1106"), + ("certificate", "0013_remove_userspecialty_issued_at"), ] operations = [] diff --git a/breathecode/certificate/migrations/0015_userspecialty_issued_at.py b/breathecode/certificate/migrations/0015_userspecialty_issued_at.py index 460d2344e..269898d4a 100644 --- a/breathecode/certificate/migrations/0015_userspecialty_issued_at.py +++ b/breathecode/certificate/migrations/0015_userspecialty_issued_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0014_merge_20210810_0418'), + ("certificate", "0014_merge_20210810_0418"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='issued_at', + model_name="userspecialty", + name="issued_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/certificate/migrations/0016_userspecialty_update_hash.py b/breathecode/certificate/migrations/0016_userspecialty_update_hash.py index f64a2be46..84d2350d9 100644 --- a/breathecode/certificate/migrations/0016_userspecialty_update_hash.py +++ b/breathecode/certificate/migrations/0016_userspecialty_update_hash.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0015_userspecialty_issued_at'), + ("certificate", "0015_userspecialty_issued_at"), ] operations = [ migrations.AddField( - model_name='userspecialty', - name='update_hash', + model_name="userspecialty", + name="update_hash", field=models.CharField(blank=True, max_length=40, null=True), ), ] diff --git a/breathecode/certificate/migrations/0017_layoutdesign_foot_note.py b/breathecode/certificate/migrations/0017_layoutdesign_foot_note.py index d39726bff..ec7d6f99f 100644 --- a/breathecode/certificate/migrations/0017_layoutdesign_foot_note.py +++ b/breathecode/certificate/migrations/0017_layoutdesign_foot_note.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('certificate', '0016_userspecialty_update_hash'), + ("certificate", "0016_userspecialty_update_hash"), ] operations = [ migrations.AddField( - model_name='layoutdesign', - name='foot_note', + model_name="layoutdesign", + name="foot_note", field=models.CharField(default=None, max_length=250, null=True), ), ] diff --git a/breathecode/certificate/models.py b/breathecode/certificate/models.py index 7f0688b35..028dc76fe 100644 --- a/breathecode/certificate/models.py +++ b/breathecode/certificate/models.py @@ -8,7 +8,7 @@ import breathecode.certificate.signals as signals from breathecode.admissions.models import Academy, Cohort, Syllabus -__all__ = ['UserProxy', 'Specialty', 'Badge', 'LayoutDesign', 'UserSpecialty'] +__all__ = ["UserProxy", "Specialty", "Badge", "LayoutDesign", "UserSpecialty"] class UserProxy(User): @@ -33,12 +33,14 @@ class Specialty(models.Model): # how long it takes to expire, leave null for unlimited expiration_day_delta = models.IntegerField(blank=True, null=True, default=None) - syllabus = models.OneToOneField(Syllabus, - on_delete=models.CASCADE, - help_text='This specialty represents only one certificate', - blank=True, - null=True, - default=None) + syllabus = models.OneToOneField( + Syllabus, + on_delete=models.CASCADE, + help_text="This specialty represents only one certificate", + blank=True, + null=True, + default=None, + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -71,7 +73,8 @@ class LayoutDesign(models.Model): name = models.CharField(max_length=40) is_default = models.BooleanField( default=False, - help_text='Will be used as default for all future certificates. Only one default layout per academy.') + help_text="Will be used as default for all future certificates. Only one default layout per academy.", + ) html_content = models.TextField(null=True, default=None, blank=True) css_content = models.TextField(null=True, default=None, blank=True) @@ -85,13 +88,13 @@ def __str__(self): return self.name -PENDING = 'PENDING' -PERSISTED = 'PERSISTED' -ERROR = 'ERROR' +PENDING = "PENDING" +PERSISTED = "PERSISTED" +ERROR = "ERROR" USER_SPECIALTY_STATUS = ( - (PENDING, 'Pending'), - (PERSISTED, 'Persisted'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (PERSISTED, "Persisted"), + (ERROR, "Error"), ) @@ -108,7 +111,7 @@ class UserSpecialty(models.Model): layout = models.ForeignKey(LayoutDesign, on_delete=models.CASCADE, blank=True, null=True, default=None) cohort = models.ForeignKey(Cohort, on_delete=models.CASCADE, blank=True, null=True) signed_by = models.CharField(max_length=100) - signed_by_role = models.CharField(max_length=100, default='Director') + signed_by_role = models.CharField(max_length=100, default="Director") issued_at = models.DateTimeField(default=None, blank=True, null=True) update_hash = models.CharField(max_length=40, blank=True, null=True) @@ -119,30 +122,31 @@ class UserSpecialty(models.Model): def generate_update_hash(self): kwargs = { - 'signed_by': self.signed_by, - 'signed_by_role': self.signed_by_role, - 'status': self.status, - 'layout': self.layout, - 'expires_at': self.expires_at, - 'issued_at': self.issued_at, + "signed_by": self.signed_by, + "signed_by_role": self.signed_by_role, + "status": self.status, + "layout": self.layout, + "expires_at": self.expires_at, + "issued_at": self.issued_at, } - important_fields = ['signed_by', 'signed_by_role', 'status', 'layout', 'expires_at', 'issued_at'] - important_values = '-'.join( - [str(kwargs.get(field) if field in kwargs else None) for field in sorted(important_fields)]) + important_fields = ["signed_by", "signed_by_role", "status", "layout", "expires_at", "issued_at"] + important_values = "-".join( + [str(kwargs.get(field) if field in kwargs else None) for field in sorted(important_fields)] + ) - return hashlib.sha1(important_values.encode('UTF-8')).hexdigest() + return hashlib.sha1(important_values.encode("UTF-8")).hexdigest() def clean(self): if self.status == ERROR: return if self.cohort is not None and self.cohort.academy.id != self.academy.id: - raise ValidationError('Cohort academy does not match the specified academy for this certificate') + raise ValidationError("Cohort academy does not match the specified academy for this certificate") utc_now = timezone.now() - if self.token is None or self.token == '': - self.token = hashlib.sha1((str(self.user.id) + str(utc_now)).encode('UTF-8')).hexdigest() + if self.token is None or self.token == "": + self.token = hashlib.sha1((str(self.user.id) + str(utc_now)).encode("UTF-8")).hexdigest() # set expiration if self.specialty.expiration_day_delta is not None: diff --git a/breathecode/certificate/receivers.py b/breathecode/certificate/receivers.py index eb3ae128b..c40cfe85d 100644 --- a/breathecode/certificate/receivers.py +++ b/breathecode/certificate/receivers.py @@ -14,14 +14,14 @@ @receiver(user_specialty_saved, sender=UserSpecialty) def post_save_user_specialty(sender, instance: UserSpecialty, **kwargs): - if instance._hash_was_updated and instance.status == 'PERSISTED' and instance.preview_url: + if instance._hash_was_updated and instance.status == "PERSISTED" and instance.preview_url: tasks.reset_screenshot.delay(instance.id) - elif instance._hash_was_updated and instance.status == 'PERSISTED' and not instance.preview_url: + elif instance._hash_was_updated and instance.status == "PERSISTED" and not instance.preview_url: tasks.take_screenshot.delay(instance.id) @receiver(student_edu_status_updated, sender=CohortUser) def generate_certificate(sender, instance: CohortUser, **kwargs): - if instance.cohort.available_as_saas and instance.educational_status == 'GRADUATED': + if instance.cohort.available_as_saas and instance.educational_status == "GRADUATED": tasks.async_generate_certificate.delay(instance.cohort.id, instance.user.id) diff --git a/breathecode/certificate/serializers.py b/breathecode/certificate/serializers.py index 9ad3e8b8c..09373a60c 100644 --- a/breathecode/certificate/serializers.py +++ b/breathecode/certificate/serializers.py @@ -6,12 +6,14 @@ class ProfileSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() class UserSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -21,6 +23,7 @@ class UserSmallSerializer(serpy.Serializer): class AcademyTinySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -29,6 +32,7 @@ class AcademyTinySerializer(serpy.Serializer): class AcademySmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -39,6 +43,7 @@ class AcademySmallSerializer(serpy.Serializer): class TinyLayoutDesignSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. slug = serpy.Field() name = serpy.Field() @@ -48,6 +53,7 @@ class TinyLayoutDesignSerializer(serpy.Serializer): class LayoutDesignSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. slug = serpy.Field() name = serpy.Field() @@ -58,6 +64,7 @@ class LayoutDesignSerializer(serpy.Serializer): class SyllabusVersionSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. version = serpy.Field() slug = serpy.MethodField() @@ -88,6 +95,7 @@ def get_week_hours(self, obj): class CohortSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -98,6 +106,7 @@ class CohortSmallSerializer(serpy.Serializer): class CohortMidSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -110,6 +119,7 @@ class CohortMidSerializer(serpy.Serializer): class SpecialtySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -123,6 +133,7 @@ class SpecialtySerializer(serpy.Serializer): class BadgeSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -130,6 +141,7 @@ class BadgeSmallSerializer(serpy.Serializer): class BadgeSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -139,6 +151,7 @@ class BadgeSerializer(serpy.Serializer): class UserSpecialtySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() signed_by = serpy.Field() diff --git a/breathecode/certificate/signals.py b/breathecode/certificate/signals.py index 225d2e510..d11bbd07c 100644 --- a/breathecode/certificate/signals.py +++ b/breathecode/certificate/signals.py @@ -2,6 +2,6 @@ from task_manager.django.dispatch import Emisor -emisor = Emisor('breathecode.certificate') +emisor = Emisor("breathecode.certificate") -user_specialty_saved = emisor.signal('user_specialty_saved') +user_specialty_saved = emisor.signal("user_specialty_saved") diff --git a/breathecode/certificate/tasks.py b/breathecode/certificate/tasks.py index 2c1743f5a..3ddf4e8c3 100644 --- a/breathecode/certificate/tasks.py +++ b/breathecode/certificate/tasks.py @@ -12,7 +12,7 @@ @task(bind=True, priority=TaskPriority.CERTIFICATE.value) def take_screenshot(self, certificate_id, **_): - logger.debug('Starting take_screenshot') + logger.debug("Starting take_screenshot") # unittest.mock.patch is poor applying mocks from .actions import certificate_screenshot @@ -23,20 +23,20 @@ def take_screenshot(self, certificate_id, **_): def remove_screenshot(self, certificate_id, **_): from .actions import remove_certificate_screenshot - logger.info('Starting remove_screenshot') + logger.info("Starting remove_screenshot") try: res = remove_certificate_screenshot(certificate_id) except UserSpecialty.DoesNotExist: - raise RetryTask(f'UserSpecialty {certificate_id} does not exist') + raise RetryTask(f"UserSpecialty {certificate_id} does not exist") if res is False: - raise AbortTask('UserSpecialty does not have any screenshot, it is skipped') + raise AbortTask("UserSpecialty does not have any screenshot, it is skipped") @task(bind=True, priority=TaskPriority.CERTIFICATE.value) def reset_screenshot(self, certificate_id, **_): - logger.debug('Starting reset_screenshot') + logger.debug("Starting reset_screenshot") # unittest.mock.patch is poor applying mocks from .actions import certificate_screenshot, remove_certificate_screenshot @@ -47,36 +47,38 @@ def reset_screenshot(self, certificate_id, **_): @task(bind=True, priority=TaskPriority.CERTIFICATE.value) def generate_cohort_certificates(self, cohort_id, **_): - logger.debug('Starting generate_cohort_certificates') + logger.debug("Starting generate_cohort_certificates") from .actions import generate_certificate - cohort_users = CohortUser.objects.filter(cohort__id=cohort_id, role='STUDENT') + cohort_users = CohortUser.objects.filter(cohort__id=cohort_id, role="STUDENT") - logger.debug(f'Generating certificate for {str(cohort_users.count())} students that GRADUATED') + logger.debug(f"Generating certificate for {str(cohort_users.count())} students that GRADUATED") for cu in cohort_users: try: generate_certificate(cu.user, cu.cohort) except Exception: - logger.exception(f'Error generating certificate for {str(cu.user.id)} cohort {str(cu.cohort.id)}') + logger.exception(f"Error generating certificate for {str(cu.user.id)} cohort {str(cu.cohort.id)}") @task(bind=True, priority=TaskPriority.CERTIFICATE.value) def async_generate_certificate(self, cohort_id, user_id, layout=None, **_): - logger.info('Starting generate_cohort_certificates', slug='starting-generating-certificate') + logger.info("Starting generate_cohort_certificates", slug="starting-generating-certificate") from .actions import generate_certificate - cohort_user = CohortUser.objects.filter(cohort__id=cohort_id, user__id=user_id, role='STUDENT').first() + cohort_user = CohortUser.objects.filter(cohort__id=cohort_id, user__id=user_id, role="STUDENT").first() if not cohort_user: - logger.error(f'Cant generate certificate with {user_id}', slug='cohort-user-not-found') + logger.error(f"Cant generate certificate with {user_id}", slug="cohort-user-not-found") return - logger.info(f'Generating gertificate for {str(cohort_user.user)} student that GRADUATED', - slug='generating-certificate') + logger.info( + f"Generating gertificate for {str(cohort_user.user)} student that GRADUATED", slug="generating-certificate" + ) try: generate_certificate(cohort_user.user, cohort_user.cohort, layout) except Exception: logger.exception( - f'Error generating certificate for {str(cohort_user.user.id)}, cohort {str(cohort_user.cohort.id)}', - slug='error-generating-certificate') + f"Error generating certificate for {str(cohort_user.user.id)}, cohort {str(cohort_user.cohort.id)}", + slug="error-generating-certificate", + ) diff --git a/breathecode/certificate/tests/actions/tests_certificate_screenshot.py b/breathecode/certificate/tests/actions/tests_certificate_screenshot.py index d1cccef3b..271064045 100644 --- a/breathecode/certificate/tests/actions/tests_certificate_screenshot.py +++ b/breathecode/certificate/tests/actions/tests_certificate_screenshot.py @@ -1,6 +1,7 @@ """ Tasks tests """ + import os from unittest.mock import MagicMock, PropertyMock, call, patch from urllib.parse import urlencode @@ -15,47 +16,58 @@ from ...models import UserSpecialty from ..mixins import CertificateTestCase -token = '12345a67890b12345c67890d' -query_string = urlencode({ - 'key': os.environ.get('SCREENSHOT_MACHINE_KEY'), - 'url': f'https://certificate.4geeks.com/preview/{token}', - 'device': 'desktop', - 'cacheLimit': '0', - 'dimension': '1024x707', -}) +token = "12345a67890b12345c67890d" +query_string = urlencode( + { + "key": os.environ.get("SCREENSHOT_MACHINE_KEY"), + "url": f"https://certificate.4geeks.com/preview/{token}", + "device": "desktop", + "cacheLimit": "0", + "dimension": "1024x707", + } +) class ActionCertificateScreenshotTestCase(CertificateTestCase): """Tests action certificate_screenshot""" + """ 🔽🔽🔽 Zero UserSpecialty """ - @patch('requests.get', - apply_requests_get_mock([ - (200, f'https://api.screenshotmachine.com?{query_string}', 'mailgun response'), - ])) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, f'https://api.screenshotmachine.com?{query_string}')])) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - blob=PropertyMock(side_effect=[None, 1]), - upload=MagicMock(), - url=MagicMock(return_value='https://xyz/hardcoded_url'), - create=True) + @patch( + "requests.get", + apply_requests_get_mock( + [ + (200, f"https://api.screenshotmachine.com?{query_string}", "mailgun response"), + ] + ), + ) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, f"https://api.screenshotmachine.com?{query_string}")])) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + blob=PropertyMock(side_effect=[None, 1]), + upload=MagicMock(), + url=MagicMock(return_value="https://xyz/hardcoded_url"), + create=True, + ) def test_certificate_screenshot__with_invalid_id(self): """certificate_screenshot don't call open in development environment""" - with self.assertRaisesMessage(UserSpecialty.DoesNotExist, 'UserSpecialty matching query does not exist.'): + with self.assertRaisesMessage(UserSpecialty.DoesNotExist, "UserSpecialty matching query does not exist."): certificate_screenshot(1) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) @@ -66,43 +78,56 @@ def test_certificate_screenshot__with_invalid_id(self): 🔽🔽🔽 Invalid preview_url, equal to '' """ - @patch('requests.get', - apply_requests_get_mock([ - (200, f'https://api.screenshotmachine.com?{query_string}', 'mailgun response'), - ])) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, f'https://api.screenshotmachine.com?{query_string}')])) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - blob=PropertyMock(side_effect=[None, 1]), - upload=MagicMock(), - url=MagicMock(return_value='https://xyz/hardcoded_url'), - create=True) + @patch( + "requests.get", + apply_requests_get_mock( + [ + (200, f"https://api.screenshotmachine.com?{query_string}", "mailgun response"), + ] + ), + ) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, f"https://api.screenshotmachine.com?{query_string}")])) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + blob=PropertyMock(side_effect=[None, 1]), + upload=MagicMock(), + url=MagicMock(return_value="https://xyz/hardcoded_url"), + create=True, + ) def test_certificate_screenshot__with_invalid_preview_url__equal_to_empty_string(self): """certificate_screenshot don't call open in development environment""" - user_specialty = {'preview_url': '', 'token': token} + user_specialty = {"preview_url": "", "token": token} model = self.bc.database.create(user_specialty=user_specialty) certificate_screenshot(1) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), [ - { - **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), - 'preview_url': - 'https://xyz/hardcoded_url', - }, - ]) - - self.assertEqual(requests.get.call_args_list, [ - call(f'https://api.screenshotmachine.com?{query_string}', stream=True), - ]) + self.assertEqual( + self.bc.database.list_of("certificate.UserSpecialty"), + [ + { + **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), + "preview_url": "https://xyz/hardcoded_url", + }, + ], + ) + + self.assertEqual( + requests.get.call_args_list, + [ + call(f"https://api.screenshotmachine.com?{query_string}", stream=True), + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, [ @@ -110,51 +135,65 @@ def test_certificate_screenshot__with_invalid_preview_url__equal_to_empty_string call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Save call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + ], + ) - self.assertEqual(File.upload.call_args_list, [call(b'mailgun response', public=True)]) + self.assertEqual(File.upload.call_args_list, [call(b"mailgun response", public=True)]) self.assertEqual(File.url.call_args_list, [call()]) """ 🔽🔽🔽 Invalid preview_url, equal to None """ - @patch('requests.get', - apply_requests_get_mock([ - (200, f'https://api.screenshotmachine.com?{query_string}', 'mailgun response'), - ])) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - blob=PropertyMock(side_effect=[None, 1]), - upload=MagicMock(), - url=MagicMock(return_value='https://xyz/hardcoded_url'), - create=True) + @patch( + "requests.get", + apply_requests_get_mock( + [ + (200, f"https://api.screenshotmachine.com?{query_string}", "mailgun response"), + ] + ), + ) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + blob=PropertyMock(side_effect=[None, 1]), + upload=MagicMock(), + url=MagicMock(return_value="https://xyz/hardcoded_url"), + create=True, + ) def test_certificate_screenshot__with_invalid_preview_url__equal_to_none(self): """certificate_screenshot don't call open in development environment""" - user_specialty = {'preview_url': None, 'token': token} + user_specialty = {"preview_url": None, "token": token} model = self.bc.database.create(user_specialty=user_specialty) certificate_screenshot(1) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), [ - { - **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), - 'preview_url': - 'https://xyz/hardcoded_url', - }, - ]) - - self.assertEqual(requests.get.call_args_list, [ - call(f'https://api.screenshotmachine.com?{query_string}', stream=True), - ]) + self.assertEqual( + self.bc.database.list_of("certificate.UserSpecialty"), + [ + { + **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), + "preview_url": "https://xyz/hardcoded_url", + }, + ], + ) + + self.assertEqual( + requests.get.call_args_list, + [ + call(f"https://api.screenshotmachine.com?{query_string}", stream=True), + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, [ @@ -162,47 +201,58 @@ def test_certificate_screenshot__with_invalid_preview_url__equal_to_none(self): call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Save call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + ], + ) - self.assertEqual(File.upload.call_args_list, [call(b'mailgun response', public=True)]) + self.assertEqual(File.upload.call_args_list, [call(b"mailgun response", public=True)]) self.assertEqual(File.url.call_args_list, [call()]) """ 🔽🔽🔽 Invalid preview_url, the object exists in gcloud """ - @patch('requests.get', - apply_requests_get_mock([ - (200, f'https://api.screenshotmachine.com?{query_string}', 'mailgun response'), - ])) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - blob=PropertyMock(return_value=1), - upload=MagicMock(), - url=MagicMock(return_value='https://xyz/hardcoded_url'), - create=True) + @patch( + "requests.get", + apply_requests_get_mock( + [ + (200, f"https://api.screenshotmachine.com?{query_string}", "mailgun response"), + ] + ), + ) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + blob=PropertyMock(return_value=1), + upload=MagicMock(), + url=MagicMock(return_value="https://xyz/hardcoded_url"), + create=True, + ) def test_certificate_screenshot__with_invalid_preview_url__the_objects_exists_in_gcloud(self): """certificate_screenshot don't call open in development environment""" - user_specialty = {'preview_url': None, 'token': token} + user_specialty = {"preview_url": None, "token": token} model = self.bc.database.create(user_specialty=user_specialty) certificate_screenshot(1) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), [ - { - **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), - 'preview_url': - 'https://xyz/hardcoded_url', - }, - ]) + self.assertEqual( + self.bc.database.list_of("certificate.UserSpecialty"), + [ + { + **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), + "preview_url": "https://xyz/hardcoded_url", + }, + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual( @@ -212,7 +262,8 @@ def test_certificate_screenshot__with_invalid_preview_url__the_objects_exists_in call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Save call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + ], + ) self.assertEqual(File.upload.call_args_list, []) self.assertEqual(File.url.call_args_list, [call()]) @@ -221,43 +272,56 @@ def test_certificate_screenshot__with_invalid_preview_url__the_objects_exists_in 🔽🔽🔽 Correct preview_url """ - @patch('requests.get', - apply_requests_get_mock([ - (200, f'https://api.screenshotmachine.com?{query_string}', 'mailgun response'), - ])) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - blob=PropertyMock(return_value=1), - upload=MagicMock(), - url=MagicMock(return_value='https://xyz/hardcoded_url'), - create=True) + @patch( + "requests.get", + apply_requests_get_mock( + [ + (200, f"https://api.screenshotmachine.com?{query_string}", "mailgun response"), + ] + ), + ) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + blob=PropertyMock(return_value=1), + upload=MagicMock(), + url=MagicMock(return_value="https://xyz/hardcoded_url"), + create=True, + ) def test_certificate_screenshot__with_correct_preview_url(self): """certificate_screenshot don't call open in development environment""" - user_specialty = {'preview_url': 'https://xyz/hardcoded_url', 'token': token} + user_specialty = {"preview_url": "https://xyz/hardcoded_url", "token": token} model = self.bc.database.create(user_specialty=user_specialty) certificate_screenshot(1) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), [ - { - **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), - 'preview_url': - 'https://xyz/hardcoded_url', - }, - ]) + self.assertEqual( + self.bc.database.list_of("certificate.UserSpecialty"), + [ + { + **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), + "preview_url": "https://xyz/hardcoded_url", + }, + ], + ) self.assertEqual(requests.get.call_args_list, []) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ], + ) self.assertEqual(Storage.__init__.call_args_list, []) self.assertEqual(File.__init__.call_args_list, []) diff --git a/breathecode/certificate/tests/actions/tests_certificate_set_default_issued_at.py b/breathecode/certificate/tests/actions/tests_certificate_set_default_issued_at.py index ae461b048..b40e39047 100644 --- a/breathecode/certificate/tests/actions/tests_certificate_set_default_issued_at.py +++ b/breathecode/certificate/tests/actions/tests_certificate_set_default_issued_at.py @@ -1,6 +1,7 @@ """ Tasks tests """ + from unittest.mock import MagicMock, call, patch from django.utils import timezone @@ -14,276 +15,329 @@ class ActionCertificateSetDefaultIssuedAtTestCase(CertificateTestCase): - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_null_status_error(self): # the issues_at should remain None because the certificate generation gave an error. - model = self.generate_models(user_specialty=True, - cohort=False, - user_specialty_kwargs={ - 'status': 'ERROR', - 'issued_at': None - }) - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + model = self.generate_models( + user_specialty=True, cohort=False, user_specialty_kwargs={"status": "ERROR", "issued_at": None} + ) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model, 'user_specialty'), - 'status': 'ERROR', - 'issued_at': None, - }])) + self.remove_is_clean( + [ + { + **self.model_to_dict(model, "user_specialty"), + "status": "ERROR", + "issued_at": None, + } + ] + ), + ) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ], + ) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_set_status_error(self): # the issues_at should remain the same and not be modified because the certificate gave an error. now = timezone.now() - model = self.generate_models(user_specialty=True, - cohort=False, - user_specialty_kwargs={ - 'status': 'ERROR', - 'issued_at': now - }) + model = self.generate_models( + user_specialty=True, cohort=False, user_specialty_kwargs={"status": "ERROR", "issued_at": now} + ) - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model, 'user_specialty'), - 'status': 'ERROR', - 'issued_at': now, - }])) + self.remove_is_clean( + [ + { + **self.model_to_dict(model, "user_specialty"), + "status": "ERROR", + "issued_at": now, + } + ] + ), + ) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ], + ) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_null_status_persisted_one_item(self): # The issued_at should remain None because the user_specialty does not have cohort specified, # and it is impossible to determine cohort ending_at - model = self.generate_models(user_specialty=True, - cohort=False, - user_specialty_kwargs={ - 'status': 'PERSISTED', - 'issued_at': None - }) + model = self.generate_models( + user_specialty=True, cohort=False, user_specialty_kwargs={"status": "PERSISTED", "issued_at": None} + ) - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model, 'user_specialty'), - 'status': 'PERSISTED', - 'issued_at': None, - }])) + self.remove_is_clean( + [ + { + **self.model_to_dict(model, "user_specialty"), + "status": "PERSISTED", + "issued_at": None, + } + ] + ), + ) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ], + ) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_null_status_persisted_two_items(self): # both certificates should have issued_at None because both cohorts are null - model1 = self.generate_models(user_specialty=True, - cohort=False, - user_specialty_kwargs={ - 'status': 'PERSISTED', - 'issued_at': None, - 'token': '123abcd' - }) - model2 = self.generate_models(user_specialty=True, - cohort=False, - user_specialty_kwargs={ - 'status': 'PERSISTED', - 'issued_at': None, - 'token': '567pqrst' - }) - - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + model1 = self.generate_models( + user_specialty=True, + cohort=False, + user_specialty_kwargs={"status": "PERSISTED", "issued_at": None, "token": "123abcd"}, + ) + model2 = self.generate_models( + user_specialty=True, + cohort=False, + user_specialty_kwargs={"status": "PERSISTED", "issued_at": None, "token": "567pqrst"}, + ) + + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model1, 'user_specialty'), - 'status': 'PERSISTED', - 'issued_at': None, - }, { - **self.model_to_dict(model2, 'user_specialty'), - 'status': 'PERSISTED', - 'issued_at': None, - }])) - - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model1.user_specialty, sender=model1.user_specialty.__class__), - call(instance=model2.user_specialty, sender=model2.user_specialty.__class__), - ]) - - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + self.remove_is_clean( + [ + { + **self.model_to_dict(model1, "user_specialty"), + "status": "PERSISTED", + "issued_at": None, + }, + { + **self.model_to_dict(model2, "user_specialty"), + "status": "PERSISTED", + "issued_at": None, + }, + ] + ), + ) + + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model1.user_specialty, sender=model1.user_specialty.__class__), + call(instance=model2.user_specialty, sender=model2.user_specialty.__class__), + ], + ) + + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_null_status_persisted_one_item_with_cohort(self): - model = self.generate_models(user_specialty=True, - cohort=True, - user_specialty_kwargs={ - 'status': 'PERSISTED', - 'issued_at': None - }) + model = self.generate_models( + user_specialty=True, cohort=True, user_specialty_kwargs={"status": "PERSISTED", "issued_at": None} + ) - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model, 'user_specialty'), - 'status': 'PERSISTED', - 'issued_at': model.cohort.ending_date, - }])) + self.remove_is_clean( + [ + { + **self.model_to_dict(model, "user_specialty"), + "status": "PERSISTED", + "issued_at": model.cohort.ending_date, + } + ] + ), + ) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ], + ) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_null_status_persisted_two_items_with_cohort(self): - model1 = self.generate_models(user_specialty=True, - cohort=True, - user_specialty_kwargs={ - 'status': 'PERSISTED', - 'issued_at': None, - 'token': '123abcd' - }) - model2 = self.generate_models(user_specialty=True, - cohort=True, - user_specialty_kwargs={ - 'status': 'PERSISTED', - 'issued_at': None, - 'token': '567pqrst' - }) - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + model1 = self.generate_models( + user_specialty=True, + cohort=True, + user_specialty_kwargs={"status": "PERSISTED", "issued_at": None, "token": "123abcd"}, + ) + model2 = self.generate_models( + user_specialty=True, + cohort=True, + user_specialty_kwargs={"status": "PERSISTED", "issued_at": None, "token": "567pqrst"}, + ) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model1, 'user_specialty'), - 'status': 'PERSISTED', - 'issued_at': model1.cohort.ending_date, - }, { - **self.model_to_dict(model2, 'user_specialty'), - 'status': 'PERSISTED', - 'issued_at': model2.cohort.ending_date, - }])) - - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model1.user_specialty, sender=model1.user_specialty.__class__), - call(instance=model2.user_specialty, sender=model2.user_specialty.__class__), - ]) - - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + self.remove_is_clean( + [ + { + **self.model_to_dict(model1, "user_specialty"), + "status": "PERSISTED", + "issued_at": model1.cohort.ending_date, + }, + { + **self.model_to_dict(model2, "user_specialty"), + "status": "PERSISTED", + "issued_at": model2.cohort.ending_date, + }, + ] + ), + ) + + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model1.user_specialty, sender=model1.user_specialty.__class__), + call(instance=model2.user_specialty, sender=model2.user_specialty.__class__), + ], + ) + + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_set_status_persisted(self): # issuet_at should remain the same because there was already a value so the default should no be applied. now = timezone.now() - model = self.generate_models(user_specialty=True, - user_specialty_kwargs={ - 'status': 'PERSISTED', - 'issued_at': now - }) + model = self.generate_models( + user_specialty=True, user_specialty_kwargs={"status": "PERSISTED", "issued_at": now} + ) - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model, 'user_specialty'), - 'status': 'PERSISTED', - 'issued_at': now, - }])) - - self.assertEqual(str(signals.user_specialty_saved.send_robust.call_args_list), - str([ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ])) - - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + self.remove_is_clean( + [ + { + **self.model_to_dict(model, "user_specialty"), + "status": "PERSISTED", + "issued_at": now, + } + ] + ), + ) + + self.assertEqual( + str(signals.user_specialty_saved.send_robust.call_args_list), + str( + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ] + ), + ) + + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_set_status_pending(self): # issuet_at should remain the same because there was already a value so the default should no be applied. now = timezone.now() - model = self.generate_models(user_specialty=True, user_specialty_kwargs={'status': 'PENDING', 'issued_at': now}) + model = self.generate_models(user_specialty=True, user_specialty_kwargs={"status": "PENDING", "issued_at": now}) - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model, 'user_specialty'), - 'status': 'PENDING', - 'issued_at': now, - }])) - - self.assertEqual(str(signals.user_specialty_saved.send_robust.call_args_list), - str([ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ])) - - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + self.remove_is_clean( + [ + { + **self.model_to_dict(model, "user_specialty"), + "status": "PENDING", + "issued_at": now, + } + ] + ), + ) + + self.assertEqual( + str(signals.user_specialty_saved.send_robust.call_args_list), + str( + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ] + ), + ) + + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_issued_at_null_status_pending(self): # issuet_at should remain the same because status=Pending - model = self.generate_models(user_specialty=True, - user_specialty_kwargs={ - 'status': 'PENDING', - 'issued_at': None - }) + model = self.generate_models( + user_specialty=True, user_specialty_kwargs={"status": "PENDING", "issued_at": None} + ) - query = UserSpecialty.objects.filter(status='PERSISTED', issued_at__isnull=True) + query = UserSpecialty.objects.filter(status="PERSISTED", issued_at__isnull=True) result = certificate_set_default_issued_at() self.assertEqual(list(result), list(query)) self.assertEqual( self.all_user_specialty_dict(), - self.remove_is_clean([{ - **self.model_to_dict(model, 'user_specialty'), - 'status': 'PENDING', - 'issued_at': None, - }])) - - self.assertEqual(str(signals.user_specialty_saved.send_robust.call_args_list), - str([ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ])) + self.remove_is_clean( + [ + { + **self.model_to_dict(model, "user_specialty"), + "status": "PENDING", + "issued_at": None, + } + ] + ), + ) + + self.assertEqual( + str(signals.user_specialty_saved.send_robust.call_args_list), + str( + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ] + ), + ) diff --git a/breathecode/certificate/tests/actions/tests_generate_certificate.py b/breathecode/certificate/tests/actions/tests_generate_certificate.py index a447ca434..d63096e0c 100644 --- a/breathecode/certificate/tests/actions/tests_generate_certificate.py +++ b/breathecode/certificate/tests/actions/tests_generate_certificate.py @@ -1,6 +1,7 @@ """ Tasks tests """ + import hashlib from unittest.mock import MagicMock, call, patch @@ -28,7 +29,7 @@ def clear_preview_url(self, dicts: list[dict]): Clear preview url to evit one diff when run test in all tests and just certificate tests """ - return [{**item, 'preview_url': None} for item in dicts] + return [{**item, "preview_url": None} for item in dicts] def clear_keys(self, dicts, keys): _d = {} @@ -38,46 +39,47 @@ def clear_keys(self, dicts, keys): return [{**item, **_d} for item in dicts] def remove_is_clean_for_one_item(self, item): - if 'is_cleaned' in item: - del item['is_cleaned'] + if "is_cleaned" in item: + del item["is_cleaned"] return item def generate_update_hash(self, instance): kwargs = { - 'signed_by': instance.signed_by, - 'signed_by_role': instance.signed_by_role, - 'status': instance.status, - 'layout': instance.layout, - 'expires_at': instance.expires_at, - 'issued_at': instance.issued_at, + "signed_by": instance.signed_by, + "signed_by_role": instance.signed_by_role, + "status": instance.status, + "layout": instance.layout, + "expires_at": instance.expires_at, + "issued_at": instance.issued_at, } - important_fields = ['signed_by', 'signed_by_role', 'status', 'layout', 'expires_at', 'issued_at'] + important_fields = ["signed_by", "signed_by_role", "status", "layout", "expires_at", "issued_at"] - important_values = '-'.join( - [str(kwargs.get(field) if field in kwargs else None) for field in sorted(important_fields)]) + important_values = "-".join( + [str(kwargs.get(field) if field in kwargs else None) for field in sorted(important_fields)] + ) - return hashlib.sha1(important_values.encode('UTF-8')).hexdigest() + return hashlib.sha1(important_values.encode("UTF-8")).hexdigest() """ 🔽🔽🔽 With User and without Cohort """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__with_user_without_cohort(self): model = self.generate_models(user=True) try: - generate_certificate(model['user']) + generate_certificate(model["user"]) assert False except Exception as e: - self.assertEqual(str(e), 'missing-cohort-user') + self.assertEqual(str(e), "missing-cohort-user") - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) @@ -85,21 +87,21 @@ def test_generate_certificate__with_user_without_cohort(self): 🔽🔽🔽 without CohortUser """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__without_cohort_user(self): model = self.generate_models(user=True, cohort=True) try: - generate_certificate(model['user'], model['cohort']) + generate_certificate(model["user"], model["cohort"]) assert False except Exception as e: - self.assertEqual(str(e), 'missing-cohort-user') + self.assertEqual(str(e), "missing-cohort-user") - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) @@ -107,130 +109,136 @@ def test_generate_certificate__without_cohort_user(self): 🔽🔽🔽 Cohort not ended """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__cohort_not_ended(self): cohort_user_kwargs = { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'GRADUATED', + "finantial_status": "FULLY_PAID", + "educational_status": "GRADUATED", } cohort_kwargs = { - 'current_day': 43877965, + "current_day": 43877965, } syllabus_kwargs = { - 'duration_in_days': 43877965, + "duration_in_days": 43877965, } - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus=True, - syllabus_version=True, - specialty=True, - syllabus_schedule=True, - layout_design=True, - cohort_user_kwargs=cohort_user_kwargs, - cohort_kwargs=cohort_kwargs, - syllabus_kwargs=syllabus_kwargs) + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus=True, + syllabus_version=True, + specialty=True, + syllabus_schedule=True, + layout_design=True, + cohort_user_kwargs=cohort_user_kwargs, + cohort_kwargs=cohort_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) - self.assertToken(result['token']) - result['token'] = None + self.assertToken(result["token"]) + result["token"] = None - translation = strings[model['cohort'].language] - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + translation = strings[model["cohort"].language] + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': (teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name), - 'signed_by_role': translation['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'token': None, - 'status_text': 'cohort-without-status-ended', - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": (teacher_model["user"].first_name + " " + teacher_model["user"].last_name), + "signed_by_role": translation["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "token": None, + "status_text": "cohort-without-status-ended", + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } self.assertEqual(result, expected) self.assertEqual( - self.clear_keys(self.bc.database.list_of('certificate.UserSpecialty'), ['preview_url', 'token']), - [expected]) + self.clear_keys(self.bc.database.list_of("certificate.UserSpecialty"), ["preview_url", "token"]), [expected] + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 without SyllabusVersion """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__without_syllabus_version(self): - cohort_kwargs = {'stage': 'ENDED'} + cohort_kwargs = {"stage": "ENDED"} model = self.generate_models(user=True, cohort=True, cohort_user=True, cohort_kwargs=cohort_kwargs) try: - generate_certificate(model['user'], model['cohort']) + generate_certificate(model["user"], model["cohort"]) assert False except Exception as e: - self.assertEqual(str(e), 'missing-syllabus-version') + self.assertEqual(str(e), "missing-syllabus-version") - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 without Specialty """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__without_specialty(self): - cohort_kwargs = {'stage': 'ENDED'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_schedule=True, - syllabus_version=True, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"stage": "ENDED"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_schedule=True, + syllabus_version=True, + cohort_kwargs=cohort_kwargs, + ) try: - generate_certificate(model['user'], model['cohort']) + generate_certificate(model["user"], model["cohort"]) assert False except Exception as e: - self.assertEqual(str(e), 'missing-specialty') + self.assertEqual(str(e), "missing-specialty") - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) @@ -238,27 +246,29 @@ def test_generate_certificate__without_specialty(self): 🔽🔽🔽 without Syllabus """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__without_syllabus(self): - cohort_kwargs = {'stage': 'ENDED'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"stage": "ENDED"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + ) try: - generate_certificate(model['user'], model['cohort']) + generate_certificate(model["user"], model["cohort"]) assert False except Exception as e: - self.assertEqual(str(e), 'missing-specialty') + self.assertEqual(str(e), "missing-specialty") - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) @@ -266,29 +276,31 @@ def test_generate_certificate__without_syllabus(self): 🔽🔽🔽 without default Layout """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__without_specialty_layout(self): - cohort_kwargs = {'stage': 'ENDED'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"stage": "ENDED"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + cohort_kwargs=cohort_kwargs, + ) try: - generate_certificate(model['user'], model['cohort']) + generate_certificate(model["user"], model["cohort"]) assert False except Exception as e: - self.assertEqual(str(e), 'no-default-layout') + self.assertEqual(str(e), "no-default-layout") - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) @@ -296,30 +308,32 @@ def test_generate_certificate__without_specialty_layout(self): 🔽🔽🔽 without main teacher """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__without_teacher(self): - cohort_kwargs = {'stage': 'ENDED'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"stage": "ENDED"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + ) try: - generate_certificate(model['user'], model['cohort']) + generate_certificate(model["user"], model["cohort"]) assert False except Exception as e: - self.assertEqual(str(e), 'without-main-teacher') + self.assertEqual(str(e), "without-main-teacher") - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) @@ -327,82 +341,86 @@ def test_generate_certificate__without_teacher(self): 🔽🔽🔽 Bad financial status """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_with_bad_student_financial_status(self): - cohort_kwargs = {'stage': 'ENDED'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"stage": "ENDED"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) - self.assertToken(result['token']) - result['token'] = None + self.assertToken(result["token"]) + result["token"] = None - translation = strings[model['cohort'].language] - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + translation = strings[model["cohort"].language] + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': (teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name), - 'signed_by_role': translation['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'token': None, - 'status_text': 'bad-finantial-status', - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": (teacher_model["user"].first_name + " " + teacher_model["user"].last_name), + "signed_by_role": translation["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "token": None, + "status_text": "bad-finantial-status", + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } self.assertEqual(result, expected) self.assertEqual( - self.clear_keys(self.bc.database.list_of('certificate.UserSpecialty'), ['preview_url', 'token']), - [expected]) + self.clear_keys(self.bc.database.list_of("certificate.UserSpecialty"), ["preview_url", "token"]), [expected] + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Student with pending tasks """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__with_student_that_didnt_finish_tasks(self): - cohort_kwargs = {'stage': 'ENDED'} - task_kwargs = {'task_type': 'PROJECT', 'revision_status': 'PENDING'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE'} + cohort_kwargs = {"stage": "ENDED"} + task_kwargs = {"task_type": "PROJECT", "revision_status": "PENDING"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE"} model = self.generate_models( user=2, cohort=True, @@ -410,15 +428,8 @@ def test_generate_certificate__with_student_that_didnt_finish_tasks(self): cohort_kwargs=cohort_kwargs, cohort_user_kwargs=cohort_user_kwargs, syllabus_version={ - 'id': 1, - 'json': { - 'days': [{ - 'assignments': [{ - 'slug': 'testing-slug', - 'mandatory': True - }] - }] - } + "id": 1, + "json": {"days": [{"assignments": [{"slug": "testing-slug", "mandatory": True}]}]}, }, syllabus=True, syllabus_schedule=True, @@ -426,897 +437,932 @@ def test_generate_certificate__with_student_that_didnt_finish_tasks(self): layout_design=True, ) - task_model = self.generate_models(task=[{ - 'user': model['user'][0], - 'associated_slug': 'testing-slug' - }, { - 'user': model['user'][1], - 'associated_slug': 'testing-slug' - }], - task_kwargs=task_kwargs, - models=model) + task_model = self.generate_models( + task=[ + {"user": model["user"][0], "associated_slug": "testing-slug"}, + {"user": model["user"][1], "associated_slug": "testing-slug"}, + ], + task_kwargs=task_kwargs, + models=model, + ) base = task_model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - result = self.remove_dinamics_fields(generate_certificate(model['user'][0], model['cohort']).__dict__) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + result = self.remove_dinamics_fields(generate_certificate(model["user"][0], model["cohort"]).__dict__) - self.assertToken(result['token']) - result['token'] = None + self.assertToken(result["token"]) + result["token"] = None - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'token': None, - 'status_text': 'with-pending-tasks-1', - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "token": None, + "status_text": "with-pending-tasks-1", + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } self.assertEqual(result, expected) self.assertEqual( - self.clear_keys(self.bc.database.list_of('certificate.UserSpecialty'), ['preview_url', 'token']), - [expected]) + self.clear_keys(self.bc.database.list_of("certificate.UserSpecialty"), ["preview_url", "token"]), [expected] + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Student with pending tasks without mandatory property """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__with_student_that_didnt_finish_tasks_without_mandatory(self): - cohort_kwargs = {'stage': 'ENDED'} - task_kwargs = {'task_type': 'PROJECT', 'revision_status': 'PENDING'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version={ - 'id': 1, - 'json': { - 'days': [{ - 'assignments': [{ - 'slug': 'testing-slug' - }] - }] - } - }, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - task={'associated_slug': 'testing-slug'}, - task_kwargs=task_kwargs, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + cohort_kwargs = {"stage": "ENDED"} + task_kwargs = {"task_type": "PROJECT", "revision_status": "PENDING"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version={"id": 1, "json": {"days": [{"assignments": [{"slug": "testing-slug"}]}]}}, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + task={"associated_slug": "testing-slug"}, + task_kwargs=task_kwargs, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) - self.assertToken(result['token']) - result['token'] = None + self.assertToken(result["token"]) + result["token"] = None - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'token': None, - 'status_text': 'with-pending-tasks-1', - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "token": None, + "status_text": "with-pending-tasks-1", + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } self.assertEqual(result, expected) self.assertEqual( - self.clear_keys(self.bc.database.list_of('certificate.UserSpecialty'), ['preview_url', 'token']), - [expected]) + self.clear_keys(self.bc.database.list_of("certificate.UserSpecialty"), ["preview_url", "token"]), [expected] + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Student with non mandatory pending tasks without """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__with_student_that_didnt_finish_mandatory_tasks(self): - cohort_kwargs = {'stage': 'ENDED'} - task_kwargs = {'task_type': 'PROJECT', 'revision_status': 'PENDING'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version={ - 'id': 1, - 'json': { - 'days': [{ - 'assignments': [{ - 'slug': 'testing-slug', - 'mandatory': False - }] - }] - } - }, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - task={'associated_slug': 'testing-slug'}, - task_kwargs=task_kwargs, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + cohort_kwargs = {"stage": "ENDED"} + task_kwargs = {"task_type": "PROJECT", "revision_status": "PENDING"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version={ + "id": 1, + "json": {"days": [{"assignments": [{"slug": "testing-slug", "mandatory": False}]}]}, + }, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + task={"associated_slug": "testing-slug"}, + task_kwargs=task_kwargs, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) - self.assertToken(result['token']) - result['token'] = None + self.assertToken(result["token"]) + result["token"] = None - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'token': None, - 'status_text': 'bad-educational-status', - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "token": None, + "status_text": "bad-educational-status", + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } self.assertEqual(result, expected) self.assertEqual( - self.clear_keys(self.bc.database.list_of('certificate.UserSpecialty'), ['preview_url', 'token']), - [expected]) + self.clear_keys(self.bc.database.list_of("certificate.UserSpecialty"), ["preview_url", "token"]), [expected] + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Student not graduated """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__without_proper_educational_status(self): - cohort_kwargs = {'stage': 'ENDED'} - cohort_user_kwargs = {'finantial_status': 'FULLY_PAID'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + cohort_kwargs = {"stage": "ENDED"} + cohort_user_kwargs = {"finantial_status": "FULLY_PAID"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) - self.assertToken(result['token']) - result['token'] = None + self.assertToken(result["token"]) + result["token"] = None - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'status_text': 'bad-educational-status', - 'token': None, - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "status_text": "bad-educational-status", + "token": None, + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } self.assertEqual(result, expected) self.assertEqual( - self.clear_keys(self.bc.database.list_of('certificate.UserSpecialty'), ['preview_url', 'token']), - [expected]) + self.clear_keys(self.bc.database.list_of("certificate.UserSpecialty"), ["preview_url", "token"]), [expected] + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Student with bad finantial_status """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__with_cohort_user__with_finantial_status_eq_up_to_date(self): - cohort_kwargs = {'stage': 'ENDED'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + cohort_kwargs = {"stage": "ENDED"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] - - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) - self.assertToken(result['token']) - result['token'] = None - - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + del base["user"] + del base["cohort_user"] + + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) + self.assertToken(result["token"]) + result["token"] = None + + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'status_text': 'bad-educational-status', - 'token': None, - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "status_text": "bad-educational-status", + "token": None, + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } self.assertEqual(result, expected) self.assertEqual( - self.clear_keys(self.bc.database.list_of('certificate.UserSpecialty'), ['preview_url', 'token']), - [expected]) + self.clear_keys(self.bc.database.list_of("certificate.UserSpecialty"), ["preview_url", "token"]), [expected] + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Student dropped """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__with_cohort_user__with_educational_status_eq_dropped(self): - cohort_kwargs = {'stage': 'ENDED'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'DROPPED'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + cohort_kwargs = {"stage": "ENDED"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "DROPPED"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) - self.assertToken(result['token']) - result['token'] = None + self.assertToken(result["token"]) + result["token"] = None - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'status_text': 'bad-educational-status', - 'token': None, - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "status_text": "bad-educational-status", + "token": None, + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } self.assertEqual(result, expected) self.assertEqual( - self.clear_keys(self.bc.database.list_of('certificate.UserSpecialty'), ['preview_url', 'token']), - [expected]) + self.clear_keys(self.bc.database.list_of("certificate.UserSpecialty"), ["preview_url", "token"]), [expected] + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Cohort not finished """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__with_cohort_not_finished(self): - cohort_kwargs = {'stage': 'ENDED'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + cohort_kwargs = {"stage": "ENDED"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] - - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + del base["user"] + del base["cohort_user"] + + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'issued_at': None, - 'status': 'ERROR', - 'status_text': 'cohort-not-finished', - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "issued_at": None, + "status": "ERROR", + "status_text": "cohort-not-finished", + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), } - self.assertToken(result['token']) - token = result['token'] - del result['token'] + self.assertToken(result["token"]) + token = result["token"] + del result["token"] self.assertEqual(result, expected) - self.assertEqual(self.clear_preview_url(self.bc.database.list_of('certificate.UserSpecialty')), - [{ - **expected, - 'token': token, - }]) + self.assertEqual( + self.clear_preview_url(self.bc.database.list_of("certificate.UserSpecialty")), + [ + { + **expected, + "token": token, + } + ], + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Generate certificate """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate(self): - cohort_kwargs = {'stage': 'ENDED', 'current_day': 9545799} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - syllabus_kwargs = {'duration_in_days': 9545799} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"stage": "ENDED", "current_day": 9545799} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + syllabus_kwargs = {"duration_in_days": 9545799} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) start = timezone.now() - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) end = timezone.now() - issued_at = result['issued_at'] + issued_at = result["issued_at"] self.assertGreater(issued_at, start) self.assertLess(issued_at, end) - del result['issued_at'] - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + del result["issued_at"] + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user_id': 1, - 'is_cleaned': True, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user_id": 1, + "is_cleaned": True, + "update_hash": self.generate_update_hash(user_specialty), } - self.assertToken(result['token']) - token = result['token'] - del result['token'] + self.assertToken(result["token"]) + token = result["token"] + del result["token"] self.assertEqual(result, expected) - del expected['is_cleaned'] + del expected["is_cleaned"] - self.assertEqual(self.clear_preview_url(self.bc.database.list_of('certificate.UserSpecialty')), - [{ - **expected, 'token': token, - 'issued_at': issued_at - }]) + self.assertEqual( + self.clear_preview_url(self.bc.database.list_of("certificate.UserSpecialty")), + [{**expected, "token": token, "issued_at": issued_at}], + ) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) """ 🔽🔽🔽 Translations """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__lang_en(self): - cohort_kwargs = {'stage': 'ENDED', 'current_day': 9545799, 'language': 'en'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - syllabus_kwargs = {'duration_in_days': 9545799} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"stage": "ENDED", "current_day": 9545799, "language": "en"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + syllabus_kwargs = {"duration_in_days": 9545799} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) start = timezone.now() - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) end = timezone.now() - issued_at = result['issued_at'] + issued_at = result["issued_at"] self.assertGreater(issued_at, start) self.assertLess(issued_at, end) - del result['issued_at'] + del result["issued_at"] - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user_id': 1, - 'is_cleaned': True, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user_id": 1, + "is_cleaned": True, + "update_hash": self.generate_update_hash(user_specialty), } - self.assertToken(result['token']) - token = result['token'] - del result['token'] + self.assertToken(result["token"]) + token = result["token"] + del result["token"] self.assertEqual(result, expected) - del expected['is_cleaned'] - - self.assertEqual(self.clear_preview_url(self.bc.database.list_of('certificate.UserSpecialty')), - [{ - **expected, 'token': token, - 'issued_at': issued_at - }]) - - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @pytest.mark.parametrize('current_day,duration_in_days,never_ends', [ - (9545799, 9545799, False), - (1, 9545799, True), - ]) + del expected["is_cleaned"] + + self.assertEqual( + self.clear_preview_url(self.bc.database.list_of("certificate.UserSpecialty")), + [{**expected, "token": token, "issued_at": issued_at}], + ) + + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @pytest.mark.parametrize( + "current_day,duration_in_days,never_ends", + [ + (9545799, 9545799, False), + (1, 9545799, True), + ], + ) def test_generate_certificate__lang_es(self, current_day, duration_in_days, never_ends): - cohort_kwargs = {'stage': 'ENDED', 'current_day': current_day, 'language': 'es', 'never_ends': never_ends} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - syllabus_kwargs = {'duration_in_days': duration_in_days} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"stage": "ENDED", "current_day": current_day, "language": "es", "never_ends": never_ends} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + syllabus_kwargs = {"duration_in_days": duration_in_days} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) signals.user_specialty_saved.send_robust.call_args_list = [] base = model.copy() - del base['user'] - del base['cohort_user'] - - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + del base["user"] + del base["cohort_user"] + + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) start = timezone.now() - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) end = timezone.now() - issued_at = result['issued_at'] + issued_at = result["issued_at"] self.assertGreater(issued_at, start) self.assertLess(issued_at, end) - del result['issued_at'] + del result["issued_at"] - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user_id': 1, - 'is_cleaned': True, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user_id": 1, + "is_cleaned": True, + "update_hash": self.generate_update_hash(user_specialty), } - self.assertToken(result['token']) - token = result['token'] - del result['token'] + self.assertToken(result["token"]) + token = result["token"] + del result["token"] self.assertEqual(result, expected) - del expected['is_cleaned'] - - self.assertEqual(self.clear_preview_url(self.bc.database.list_of('certificate.UserSpecialty')), - [{ - **expected, 'token': token, - 'issued_at': issued_at - }]) - - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @pytest.mark.parametrize('stage', ['INACTIVE', 'PREWORK', 'STARTED', 'FINAL_PROJECT', 'ENDED']) + del expected["is_cleaned"] + + self.assertEqual( + self.clear_preview_url(self.bc.database.list_of("certificate.UserSpecialty")), + [{**expected, "token": token, "issued_at": issued_at}], + ) + + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @pytest.mark.parametrize("stage", ["INACTIVE", "PREWORK", "STARTED", "FINAL_PROJECT", "ENDED"]) def test_generate_certificate__lang_es__never_ends_true(self, stage): - cohort_kwargs = {'stage': stage, 'current_day': 1, 'language': 'es', 'never_ends': True} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - syllabus_kwargs = {'duration_in_days': 9545799} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"stage": stage, "current_day": 1, "language": "es", "never_ends": True} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + syllabus_kwargs = {"duration_in_days": 9545799} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) signals.user_specialty_saved.send_robust.call_args_list = [] start = timezone.now() - result = self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + result = self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) end = timezone.now() - issued_at = result['issued_at'] + issued_at = result["issued_at"] self.assertGreater(issued_at, start) self.assertLess(issued_at, end) - del result['issued_at'] + del result["issued_at"] - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) expected = { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': None, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': strings[model['cohort'].language]['Main Instructor'], - 'specialty_id': 1, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user_id': 1, - 'is_cleaned': True, - 'update_hash': self.generate_update_hash(user_specialty), + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": None, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": strings[model["cohort"].language]["Main Instructor"], + "specialty_id": 1, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user_id": 1, + "is_cleaned": True, + "update_hash": self.generate_update_hash(user_specialty), } - self.assertToken(result['token']) - token = result['token'] - del result['token'] + self.assertToken(result["token"]) + token = result["token"] + del result["token"] self.assertEqual(result, expected) - del expected['is_cleaned'] - - self.assertEqual(self.clear_preview_url(self.bc.database.list_of('certificate.UserSpecialty')), - [{ - **expected, 'token': token, - 'issued_at': issued_at - }]) - - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + del expected["is_cleaned"] + + self.assertEqual( + self.clear_preview_url(self.bc.database.list_of("certificate.UserSpecialty")), + [{**expected, "token": token, "issued_at": issued_at}], + ) + + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__lang_es__never_ends_true__stage_deleted(self): - stage = 'DELETED' - cohort_kwargs = {'stage': stage, 'current_day': 1, 'language': 'es', 'never_ends': True} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - syllabus_kwargs = {'duration_in_days': 9545799} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs) + stage = "DELETED" + cohort_kwargs = {"stage": stage, "current_day": 1, "language": "es", "never_ends": True} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + syllabus_kwargs = {"duration_in_days": 9545799} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} + cohort_user_kwargs = {"role": "TEACHER"} self.generate_models(user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base) signals.user_specialty_saved.send_robust.call_args_list = [] - with pytest.raises(ValidationException, match='missing-cohort-user'): - self.remove_dinamics_fields(generate_certificate(model['user'], model['cohort']).__dict__) + with pytest.raises(ValidationException, match="missing-cohort-user"): + self.remove_dinamics_fields(generate_certificate(model["user"], model["cohort"]).__dict__) - self.assertEqual(self.clear_preview_url(self.bc.database.list_of('certificate.UserSpecialty')), []) + self.assertEqual(self.clear_preview_url(self.bc.database.list_of("certificate.UserSpecialty")), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) """ 🔽🔽🔽 Retry generate certificate """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate__retry_generate_certificate(self): - cohort_kwargs = {'stage': 'ENDED', 'current_day': 9545799} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - syllabus_kwargs = {'duration_in_days': 9545799} - user_specialty_kwargs = {'status': 'PERSISTED'} - model = self.generate_models(user=True, - cohort=True, - cohort_user=True, - syllabus_version=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - user_specialty=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs, - user_specialty_kwargs=user_specialty_kwargs) + cohort_kwargs = {"stage": "ENDED", "current_day": 9545799} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + syllabus_kwargs = {"duration_in_days": 9545799} + user_specialty_kwargs = {"status": "PERSISTED"} + model = self.generate_models( + user=True, + cohort=True, + cohort_user=True, + syllabus_version=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + user_specialty=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + user_specialty_kwargs=user_specialty_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} + cohort_user_kwargs = {"role": "TEACHER"} self.generate_models(user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base) try: - generate_certificate(model['user'], model['cohort']) + generate_certificate(model["user"], model["cohort"]) assert False except Exception as e: - self.assertEqual(str(e), 'already-exists') + self.assertEqual(str(e), "already-exists") - user_specialty = self.model_to_dict(model, 'user_specialty') - del user_specialty['is_cleaned'] + user_specialty = self.model_to_dict(model, "user_specialty") + del user_specialty["is_cleaned"] - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), [user_specialty]) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), [user_specialty]) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=user_specialty, sender=user_specialty.__class__), - ]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=user_specialty, sender=user_specialty.__class__), + ], + ) diff --git a/breathecode/certificate/tests/actions/tests_remove_certificate_screenshot.py b/breathecode/certificate/tests/actions/tests_remove_certificate_screenshot.py index b2615b679..c532979c0 100644 --- a/breathecode/certificate/tests/actions/tests_remove_certificate_screenshot.py +++ b/breathecode/certificate/tests/actions/tests_remove_certificate_screenshot.py @@ -1,6 +1,7 @@ """ Tasks tests """ + from unittest.mock import MagicMock, PropertyMock, call, patch import pytest @@ -15,21 +16,23 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - monkeypatch.setattr('breathecode.services.google_cloud.Storage.__init__', MagicMock(return_value=None)) - monkeypatch.setattr('breathecode.services.google_cloud.Storage.client', PropertyMock(), raising=False) - monkeypatch.setattr('breathecode.services.google_cloud.File.__init__', MagicMock(return_value=None)) - monkeypatch.setattr('breathecode.services.google_cloud.File.bucket', PropertyMock(), raising=False) - monkeypatch.setattr('breathecode.services.google_cloud.File.file_name', PropertyMock(), raising=False) - monkeypatch.setattr('breathecode.services.google_cloud.File.blob', PropertyMock(return_value=1), raising=False) - monkeypatch.setattr('breathecode.services.google_cloud.File.upload', MagicMock()) - monkeypatch.setattr('breathecode.services.google_cloud.File.delete', MagicMock()) - monkeypatch.setattr('breathecode.services.google_cloud.File.url', - MagicMock(return_value='https://xyz/hardcoded_url')) + monkeypatch.setattr("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + monkeypatch.setattr("breathecode.services.google_cloud.Storage.__init__", MagicMock(return_value=None)) + monkeypatch.setattr("breathecode.services.google_cloud.Storage.client", PropertyMock(), raising=False) + monkeypatch.setattr("breathecode.services.google_cloud.File.__init__", MagicMock(return_value=None)) + monkeypatch.setattr("breathecode.services.google_cloud.File.bucket", PropertyMock(), raising=False) + monkeypatch.setattr("breathecode.services.google_cloud.File.file_name", PropertyMock(), raising=False) + monkeypatch.setattr("breathecode.services.google_cloud.File.blob", PropertyMock(return_value=1), raising=False) + monkeypatch.setattr("breathecode.services.google_cloud.File.upload", MagicMock()) + monkeypatch.setattr("breathecode.services.google_cloud.File.delete", MagicMock()) + monkeypatch.setattr( + "breathecode.services.google_cloud.File.url", MagicMock(return_value="https://xyz/hardcoded_url") + ) class ActionCertificateScreenshotTestCase(CertificateTestCase): """Tests action remove_certificate_screenshot""" + """ 🔽🔽🔽 UserSpecialty not exists """ @@ -37,10 +40,10 @@ class ActionCertificateScreenshotTestCase(CertificateTestCase): def test_remove_certificate_screenshot_with_invalid_id(self): """remove_certificate_screenshot don't call open in development environment""" - with self.assertRaisesMessage(UserSpecialty.DoesNotExist, 'UserSpecialty matching query does not exist.'): + with self.assertRaisesMessage(UserSpecialty.DoesNotExist, "UserSpecialty matching query does not exist."): remove_certificate_screenshot(1) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), []) + self.assertEqual(self.bc.database.list_of("certificate.UserSpecialty"), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) self.assertEqual(File.delete.call_args_list, []) @@ -52,21 +55,27 @@ def test_remove_certificate_screenshot_with_invalid_id(self): def test_remove_certificate_screenshot__with_preview_url_as_empty_string(self): """remove_certificate_screenshot don't call open in development environment""" - user_specialty = {'preview_url': ''} + user_specialty = {"preview_url": ""} model = self.generate_models(user_specialty=user_specialty) result = remove_certificate_screenshot(1) self.assertFalse(result) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), [ - { - **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), - }, - ]) - - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + self.assertEqual( + self.bc.database.list_of("certificate.UserSpecialty"), + [ + { + **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), + }, + ], + ) + + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ], + ) self.assertEqual(File.delete.call_args_list, []) """ @@ -76,21 +85,27 @@ def test_remove_certificate_screenshot__with_preview_url_as_empty_string(self): def test_remove_certificate_screenshot__with_preview_url_as_none(self): """remove_certificate_screenshot don't call open in development environment""" - user_specialty = {'preview_url': None} + user_specialty = {"preview_url": None} model = self.generate_models(user_specialty=user_specialty) result = remove_certificate_screenshot(1) self.assertFalse(result) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), [ - { - **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), - }, - ]) - - self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, [ - call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + self.assertEqual( + self.bc.database.list_of("certificate.UserSpecialty"), + [ + { + **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), + }, + ], + ) + + self.assertEqual( + signals.user_specialty_saved.send_robust.call_args_list, + [ + call(instance=model.user_specialty, sender=model.user_specialty.__class__), + ], + ) self.assertEqual(File.delete.call_args_list, []) """ @@ -100,18 +115,21 @@ def test_remove_certificate_screenshot__with_preview_url_as_none(self): def test_remove_certificate_screenshot__with_a_properly_preview_url(self): """remove_certificate_screenshot don't call open in development environment""" - user_specialty = {'preview_url': 'https://xyz/hardcoded_url'} + user_specialty = {"preview_url": "https://xyz/hardcoded_url"} model = self.generate_models(user_specialty=user_specialty) result = remove_certificate_screenshot(1) self.assertTrue(result) - self.assertEqual(self.bc.database.list_of('certificate.UserSpecialty'), [ - { - **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), - 'preview_url': '', - }, - ]) + self.assertEqual( + self.bc.database.list_of("certificate.UserSpecialty"), + [ + { + **self.remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)), + "preview_url": "", + }, + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, @@ -120,5 +138,6 @@ def test_remove_certificate_screenshot__with_a_properly_preview_url(self): call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Save call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + ], + ) self.assertEqual(File.delete.call_args_list, [call()]) diff --git a/breathecode/certificate/tests/management/commands/tests_set_default_issued_at.py b/breathecode/certificate/tests/management/commands/tests_set_default_issued_at.py index db83feb22..7a3422543 100644 --- a/breathecode/certificate/tests/management/commands/tests_set_default_issued_at.py +++ b/breathecode/certificate/tests/management/commands/tests_set_default_issued_at.py @@ -1,6 +1,7 @@ """ Tasks tests """ + from unittest.mock import patch, call, MagicMock from ...mixins import CertificateTestCase @@ -9,10 +10,11 @@ class SetDefaultIssuedAtTestCase(CertificateTestCase): - @patch('breathecode.certificate.actions.certificate_set_default_issued_at', MagicMock()) + @patch("breathecode.certificate.actions.certificate_set_default_issued_at", MagicMock()) def test_default_issued_at__checking_function_is_being_called(self): """certificate_screenshot don't call open in development environment""" from breathecode.certificate.actions import certificate_set_default_issued_at + instance = Command() result = instance.handle() diff --git a/breathecode/certificate/tests/mixins/__init__.py b/breathecode/certificate/tests/mixins/__init__.py index df1aaea3e..95c49e68a 100644 --- a/breathecode/certificate/tests/mixins/__init__.py +++ b/breathecode/certificate/tests/mixins/__init__.py @@ -1,5 +1,6 @@ """ Mixins """ + from .development_environment import DevelopmentEnvironment # noqa: F401 from .certificate_test_case import CertificateTestCase # noqa: F401 diff --git a/breathecode/certificate/tests/mixins/certificate_test_case.py b/breathecode/certificate/tests/mixins/certificate_test_case.py index b84ee7e3c..92f5abac1 100644 --- a/breathecode/certificate/tests/mixins/certificate_test_case.py +++ b/breathecode/certificate/tests/mixins/certificate_test_case.py @@ -1,14 +1,22 @@ """ Collections of mixins used to login in authorize microservice """ + import hashlib from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, - BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + DatetimeMixin, + BreathecodeMixin, +) -class CertificateTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, - BreathecodeMixin): +class CertificateTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, BreathecodeMixin +): """CertificateTestCase with auth methods""" def setUp(self): @@ -25,7 +33,7 @@ def clear_preview_url(self, dicts: list[dict]): Clear preview url to evit one diff when run test in all tests and just certificate tests """ - return [{**item, 'preview_url': None} for item in dicts] + return [{**item, "preview_url": None} for item in dicts] def clear_keys(self, dicts, keys): _d = {} @@ -36,28 +44,29 @@ def clear_keys(self, dicts, keys): def remove_is_clean(self, items): for item in items: - if 'is_cleaned' in item: - del item['is_cleaned'] + if "is_cleaned" in item: + del item["is_cleaned"] return items def remove_is_clean_for_one_item(self, item): - if 'is_cleaned' in item: - del item['is_cleaned'] + if "is_cleaned" in item: + del item["is_cleaned"] return item def generate_update_hash(self, instance): kwargs = { - 'signed_by': instance.signed_by, - 'signed_by_role': instance.signed_by_role, - 'status': instance.status, - 'layout': instance.layout, - 'expires_at': instance.expires_at, - 'issued_at': instance.issued_at, + "signed_by": instance.signed_by, + "signed_by_role": instance.signed_by_role, + "status": instance.status, + "layout": instance.layout, + "expires_at": instance.expires_at, + "issued_at": instance.issued_at, } - important_fields = ['signed_by', 'signed_by_role', 'status', 'layout', 'expires_at', 'issued_at'] + important_fields = ["signed_by", "signed_by_role", "status", "layout", "expires_at", "issued_at"] - important_values = '-'.join( - [str(kwargs.get(field) if field in kwargs else None) for field in sorted(important_fields)]) + important_values = "-".join( + [str(kwargs.get(field) if field in kwargs else None) for field in sorted(important_fields)] + ) - return hashlib.sha1(important_values.encode('UTF-8')).hexdigest() + return hashlib.sha1(important_values.encode("UTF-8")).hexdigest() diff --git a/breathecode/certificate/tests/mixins/development_environment.py b/breathecode/certificate/tests/mixins/development_environment.py index e24704161..32f0c5ccf 100644 --- a/breathecode/certificate/tests/mixins/development_environment.py +++ b/breathecode/certificate/tests/mixins/development_environment.py @@ -1,11 +1,12 @@ """ Apply ENV=development """ + import os -class DevelopmentEnvironment(): +class DevelopmentEnvironment: """Apply env""" def __init__(self): - os.environ['ENV'] = 'development' + os.environ["ENV"] = "development" diff --git a/breathecode/certificate/tests/mocks/actions/__init__.py b/breathecode/certificate/tests/mocks/actions/__init__.py index 4b603bd32..846dbdb1e 100644 --- a/breathecode/certificate/tests/mocks/actions/__init__.py +++ b/breathecode/certificate/tests/mocks/actions/__init__.py @@ -1,34 +1,35 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import Mock from .certificate_screenshot_mock import certificate_screenshot_mock from .generate_certificate_mock import generate_certificate_mock from .remove_certificate_screenshot_mock import remove_certificate_screenshot_mock ACTIONS_PATH = { - 'certificate_screenshot': 'breathecode.certificate.actions.certificate_screenshot', - 'generate_certificate': 'breathecode.certificate.actions.generate_certificate', - 'remove_certificate_screenshot': 'breathecode.certificate.actions.remove_certificate_screenshot' + "certificate_screenshot": "breathecode.certificate.actions.certificate_screenshot", + "generate_certificate": "breathecode.certificate.actions.generate_certificate", + "remove_certificate_screenshot": "breathecode.certificate.actions.remove_certificate_screenshot", } ACTIONS_INSTANCES = { - 'certificate_screenshot': Mock(side_effect=certificate_screenshot_mock), - 'generate_certificate': Mock(side_effect=generate_certificate_mock), - 'remove_certificate_screenshot': Mock(side_effect=remove_certificate_screenshot_mock) + "certificate_screenshot": Mock(side_effect=certificate_screenshot_mock), + "generate_certificate": Mock(side_effect=generate_certificate_mock), + "remove_certificate_screenshot": Mock(side_effect=remove_certificate_screenshot_mock), } def apply_certificate_screenshot_mock(): """Apply certificate_screenshot Mock""" - return ACTIONS_INSTANCES['certificate_screenshot'] + return ACTIONS_INSTANCES["certificate_screenshot"] def apply_generate_certificate_mock(): """Apply generate_certificate Mock""" - return ACTIONS_INSTANCES['generate_certificate'] + return ACTIONS_INSTANCES["generate_certificate"] def apply_remove_certificate_screenshot_mock(): """Apply remove_certificate_screenshot Mock""" - return ACTIONS_INSTANCES['remove_certificate_screenshot'] + return ACTIONS_INSTANCES["remove_certificate_screenshot"] diff --git a/breathecode/certificate/tests/mocks/credentials/__init__.py b/breathecode/certificate/tests/mocks/credentials/__init__.py index 80ea4ffe6..88809dcaf 100644 --- a/breathecode/certificate/tests/mocks/credentials/__init__.py +++ b/breathecode/certificate/tests/mocks/credentials/__init__.py @@ -1,16 +1,17 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import Mock from .resolve_credentials_mock import resolve_credentials_mock CREDENTIALS_PATH = { - 'resolve_credentials': 'breathecode.services.google_cloud.credentials.resolve_credentials', + "resolve_credentials": "breathecode.services.google_cloud.credentials.resolve_credentials", } -CREDENTIALS_INSTANCES = {'resolve_credentials': Mock(side_effect=resolve_credentials_mock)} +CREDENTIALS_INSTANCES = {"resolve_credentials": Mock(side_effect=resolve_credentials_mock)} def apply_resolve_credentials_mock(): """Apply Resolve Credentials Mock""" - return CREDENTIALS_INSTANCES['resolve_credentials'] + return CREDENTIALS_INSTANCES["resolve_credentials"] diff --git a/breathecode/certificate/tests/mocks/google_cloud_storage/__init__.py b/breathecode/certificate/tests/mocks/google_cloud_storage/__init__.py index e5a11493f..ea771cc60 100644 --- a/breathecode/certificate/tests/mocks/google_cloud_storage/__init__.py +++ b/breathecode/certificate/tests/mocks/google_cloud_storage/__init__.py @@ -1,34 +1,35 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import Mock from .blob_mock import BlobMock from .bucket_mock import BucketMock from .client_mock import ClientMock GOOGLE_CLOUD_PATH = { - 'client': 'google.cloud.storage.Client', - 'bucket': 'google.cloud.storage.Bucket', - 'blob': 'google.cloud.storage.Blob' + "client": "google.cloud.storage.Client", + "bucket": "google.cloud.storage.Bucket", + "blob": "google.cloud.storage.Blob", } GOOGLE_CLOUD_INSTANCES = { - 'client': Mock(side_effect=ClientMock), - 'bucket': Mock(side_effect=BucketMock), - 'blob': Mock(side_effect=BlobMock), + "client": Mock(side_effect=ClientMock), + "bucket": Mock(side_effect=BucketMock), + "blob": Mock(side_effect=BlobMock), } def apply_google_cloud_blob_mock(): """Apply Storage Blob Mock""" - return GOOGLE_CLOUD_INSTANCES['blob'] + return GOOGLE_CLOUD_INSTANCES["blob"] def apply_google_cloud_bucket_mock(): """Apply Storage Bucket Mock""" - return GOOGLE_CLOUD_INSTANCES['bucket'] + return GOOGLE_CLOUD_INSTANCES["bucket"] def apply_google_cloud_client_mock(): """Apply Storage Client Mock""" - return GOOGLE_CLOUD_INSTANCES['client'] + return GOOGLE_CLOUD_INSTANCES["client"] diff --git a/breathecode/certificate/tests/mocks/google_cloud_storage/blob_mock.py b/breathecode/certificate/tests/mocks/google_cloud_storage/blob_mock.py index a1bbcdb30..e9664ef96 100644 --- a/breathecode/certificate/tests/mocks/google_cloud_storage/blob_mock.py +++ b/breathecode/certificate/tests/mocks/google_cloud_storage/blob_mock.py @@ -1,4 +1,4 @@ -class BlobMock(): +class BlobMock: public_url = None name = None content = None @@ -13,7 +13,7 @@ def upload_from_string(self, data): return None def make_public(self): - self.public_url = f'https://storage.cloud.google.com/{self.bucket.name}/{self.name}' + self.public_url = f"https://storage.cloud.google.com/{self.bucket.name}/{self.name}" def delete(self): return None diff --git a/breathecode/certificate/tests/mocks/google_cloud_storage/bucket_mock.py b/breathecode/certificate/tests/mocks/google_cloud_storage/bucket_mock.py index 2b8659cc7..72ca71b90 100644 --- a/breathecode/certificate/tests/mocks/google_cloud_storage/bucket_mock.py +++ b/breathecode/certificate/tests/mocks/google_cloud_storage/bucket_mock.py @@ -1,7 +1,7 @@ from .blob_mock import BlobMock -class BucketMock(): +class BucketMock: name = None bucket = None files = {} diff --git a/breathecode/certificate/tests/mocks/google_cloud_storage/client_mock.py b/breathecode/certificate/tests/mocks/google_cloud_storage/client_mock.py index 900342635..5ac5cfc4e 100644 --- a/breathecode/certificate/tests/mocks/google_cloud_storage/client_mock.py +++ b/breathecode/certificate/tests/mocks/google_cloud_storage/client_mock.py @@ -1,7 +1,7 @@ from .bucket_mock import BucketMock -class ClientMock(): +class ClientMock: def bucket(self, bucket_name): return BucketMock(bucket_name) diff --git a/breathecode/certificate/tests/mocks/screenshotmachine/__init__.py b/breathecode/certificate/tests/mocks/screenshotmachine/__init__.py index c191dc6e7..f7a4f8064 100644 --- a/breathecode/certificate/tests/mocks/screenshotmachine/__init__.py +++ b/breathecode/certificate/tests/mocks/screenshotmachine/__init__.py @@ -1,16 +1,17 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import Mock from .requests_mock import get_mock SCREENSHOTMACHINE_PATH = { - 'get': 'requests.get', + "get": "requests.get", } -SCREENSHOTMACHINE_INSTANCES = {'get': Mock(side_effect=get_mock)} +SCREENSHOTMACHINE_INSTANCES = {"get": Mock(side_effect=get_mock)} def apply_screenshotmachine_requests_get_mock(): """Apply Storage Blob Mock""" - return SCREENSHOTMACHINE_INSTANCES['get'] + return SCREENSHOTMACHINE_INSTANCES["get"] diff --git a/breathecode/certificate/tests/mocks/screenshotmachine/requests_mock.py b/breathecode/certificate/tests/mocks/screenshotmachine/requests_mock.py index aa5e623a9..886ba0494 100644 --- a/breathecode/certificate/tests/mocks/screenshotmachine/requests_mock.py +++ b/breathecode/certificate/tests/mocks/screenshotmachine/requests_mock.py @@ -3,13 +3,14 @@ """ -class ResponseMock(): +class ResponseMock: """Simutate Response to be used by mocks""" + status_code = None data = None content = None - def __init__(self, status_code=200, data=''): + def __init__(self, status_code=200, data=""): self.status_code = status_code if isinstance(data, str): @@ -24,4 +25,4 @@ def json(self) -> dict: def get_mock(url: str, stream=False): """Requests get mock""" - return ResponseMock(data='ok', status_code=200) + return ResponseMock(data="ok", status_code=200) diff --git a/breathecode/certificate/tests/signals/tests_user_specialty_saved.py b/breathecode/certificate/tests/signals/tests_user_specialty_saved.py index 752c5b87c..ad5dd59d1 100644 --- a/breathecode/certificate/tests/signals/tests_user_specialty_saved.py +++ b/breathecode/certificate/tests/signals/tests_user_specialty_saved.py @@ -8,27 +8,28 @@ def remove_is_clean_for_one_item(item): - if 'is_cleaned' in item: - del item['is_cleaned'] + if "is_cleaned" in item: + del item["is_cleaned"] return item def generate_update_hash(instance): kwargs = { - 'signed_by': instance.signed_by, - 'signed_by_role': instance.signed_by_role, - 'status': instance.status, - 'layout': instance.layout, - 'expires_at': instance.expires_at, - 'issued_at': instance.issued_at, + "signed_by": instance.signed_by, + "signed_by_role": instance.signed_by_role, + "status": instance.status, + "layout": instance.layout, + "expires_at": instance.expires_at, + "issued_at": instance.issued_at, } - important_fields = ['signed_by', 'signed_by_role', 'status', 'layout', 'expires_at', 'issued_at'] + important_fields = ["signed_by", "signed_by_role", "status", "layout", "expires_at", "issued_at"] - important_values = '-'.join( - [str(kwargs.get(field) if field in kwargs else None) for field in sorted(important_fields)]) + important_values = "-".join( + [str(kwargs.get(field) if field in kwargs else None) for field in sorted(important_fields)] + ) - return hashlib.sha1(important_values.encode('UTF-8')).hexdigest() + return hashlib.sha1(important_values.encode("UTF-8")).hexdigest() class TestAcademyEvent(LegacyAPITestCase): @@ -36,12 +37,12 @@ class TestAcademyEvent(LegacyAPITestCase): 🔽🔽🔽 Status ERROR """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_error(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'ERROR', 'update_hash': '⬛🌷'} + user_specialty = {"status": "ERROR", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) @@ -50,18 +51,18 @@ def test_user_specialty_saved__status_error(self, enable_signals): assert tasks.reset_screenshot.delay.call_args_list == [] assert tasks.take_screenshot.delay.call_args_list == [] - assert self.bc.database.list_of('certificate.UserSpecialty') == [user_specialty_db] + assert self.bc.database.list_of("certificate.UserSpecialty") == [user_specialty_db] """ 🔽🔽🔽 Status PENDING """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_pending(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PENDING', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PENDING", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) @@ -70,18 +71,18 @@ def test_user_specialty_saved__status_pending(self, enable_signals): assert tasks.reset_screenshot.delay.call_args_list == [] assert tasks.take_screenshot.delay.call_args_list == [] - assert self.bc.database.list_of('certificate.UserSpecialty') == [user_specialty_db] + assert self.bc.database.list_of("certificate.UserSpecialty") == [user_specialty_db] """ 🔽🔽🔽 Status PERSISTED and preview_url is empty """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_is_empty(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': '', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) @@ -90,73 +91,74 @@ def test_user_specialty_saved__status_persisted__preview_url_is_empty(self, enab assert tasks.reset_screenshot.delay.call_args_list == [] assert tasks.take_screenshot.delay.call_args_list == [call(1)] - assert self.bc.database.list_of('certificate.UserSpecialty') == [user_specialty_db] + assert self.bc.database.list_of("certificate.UserSpecialty") == [user_specialty_db] """ 🔽🔽🔽 Status PERSISTED and preview_url is empty, changing signed_by """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_is_empty__changing_signed_by(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': '', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) - model.user_specialty.signed_by = 'GOD 🤷‍♂️' + model.user_specialty.signed_by = "GOD 🤷‍♂️" model.user_specialty.save() assert tasks.reset_screenshot.delay.call_args_list == [] assert tasks.take_screenshot.delay.call_args_list == [call(1), call(1)] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'signed_by': - 'GOD 🤷‍♂️', - 'update_hash': - generate_update_hash(model.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "signed_by": "GOD 🤷‍♂️", + "update_hash": generate_update_hash(model.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url is empty, changing signed_by_role """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_is_empty__changing_signed_by_role( - self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + self, enable_signals + ): + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': '', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) - model.user_specialty.signed_by_role = 'GOD 🤷‍♂️' + model.user_specialty.signed_by_role = "GOD 🤷‍♂️" model.user_specialty.save() assert tasks.reset_screenshot.delay.call_args_list == [] assert tasks.take_screenshot.delay.call_args_list == [call(1), call(1)] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'signed_by_role': - 'GOD 🤷‍♂️', - 'update_hash': - generate_update_hash(model.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "signed_by_role": "GOD 🤷‍♂️", + "update_hash": generate_update_hash(model.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url is empty, changing layout """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_is_empty__changing_layout(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': '', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "", "update_hash": "⬛🌷"} model1 = self.bc.database.create(user_specialty=user_specialty) model2 = self.bc.database.create(layout_design=1) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model1.user_specialty)) @@ -167,24 +169,24 @@ def test_user_specialty_saved__status_persisted__preview_url_is_empty__changing_ assert tasks.reset_screenshot.delay.call_args_list == [] assert tasks.take_screenshot.delay.call_args_list == [call(1), call(1)] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'layout_id': - 1, - 'update_hash': - generate_update_hash(model1.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "layout_id": 1, + "update_hash": generate_update_hash(model1.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url is empty, changing expires_at """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_is_empty__changing_expires_at(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': '', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) @@ -195,24 +197,24 @@ def test_user_specialty_saved__status_persisted__preview_url_is_empty__changing_ assert tasks.reset_screenshot.delay.call_args_list == [] assert tasks.take_screenshot.delay.call_args_list == [call(1), call(1)] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'expires_at': - utc_now, - 'update_hash': - generate_update_hash(model.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "expires_at": utc_now, + "update_hash": generate_update_hash(model.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url is empty, changing issued_at """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_is_empty__changing_issued_at(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': '', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) @@ -223,24 +225,24 @@ def test_user_specialty_saved__status_persisted__preview_url_is_empty__changing_ assert tasks.reset_screenshot.delay.call_args_list == [] assert tasks.take_screenshot.delay.call_args_list == [call(1), call(1)] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'issued_at': - utc_now, - 'update_hash': - generate_update_hash(model.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "issued_at": utc_now, + "update_hash": generate_update_hash(model.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url set """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_set(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': 'GOD 🤷‍♂️', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "GOD 🤷‍♂️", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) @@ -249,72 +251,72 @@ def test_user_specialty_saved__status_persisted__preview_url_set(self, enable_si assert tasks.reset_screenshot.delay.call_args_list == [call(1)] assert tasks.take_screenshot.delay.call_args_list == [] - assert self.bc.database.list_of('certificate.UserSpecialty') == [user_specialty_db] + assert self.bc.database.list_of("certificate.UserSpecialty") == [user_specialty_db] """ 🔽🔽🔽 Status PERSISTED and preview_url set, changing signed_by """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_set__changing_signed_by(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': 'GOD 🤷‍♂️', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "GOD 🤷‍♂️", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) - model.user_specialty.signed_by = 'GOD 🤷‍♂️' + model.user_specialty.signed_by = "GOD 🤷‍♂️" model.user_specialty.save() assert tasks.reset_screenshot.delay.call_args_list == [call(1), call(1)] assert tasks.take_screenshot.delay.call_args_list == [] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'signed_by': - 'GOD 🤷‍♂️', - 'update_hash': - generate_update_hash(model.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "signed_by": "GOD 🤷‍♂️", + "update_hash": generate_update_hash(model.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url set, changing signed_by_role """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_set__changing_signed_by_role(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': 'GOD 🤷‍♂️', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "GOD 🤷‍♂️", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) - model.user_specialty.signed_by_role = 'GOD 🤷‍♂️' + model.user_specialty.signed_by_role = "GOD 🤷‍♂️" model.user_specialty.save() assert tasks.reset_screenshot.delay.call_args_list == [call(1), call(1)] assert tasks.take_screenshot.delay.call_args_list == [] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'signed_by_role': - 'GOD 🤷‍♂️', - 'update_hash': - generate_update_hash(model.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "signed_by_role": "GOD 🤷‍♂️", + "update_hash": generate_update_hash(model.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url set, changing layout """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_set__changing_layout(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': 'GOD 🤷‍♂️', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "GOD 🤷‍♂️", "update_hash": "⬛🌷"} model1 = self.bc.database.create(user_specialty=user_specialty) model2 = self.bc.database.create(layout_design=1) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model1.user_specialty)) @@ -325,24 +327,24 @@ def test_user_specialty_saved__status_persisted__preview_url_set__changing_layou assert tasks.reset_screenshot.delay.call_args_list == [call(1), call(1)] assert tasks.take_screenshot.delay.call_args_list == [] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'layout_id': - 1, - 'update_hash': - generate_update_hash(model1.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "layout_id": 1, + "update_hash": generate_update_hash(model1.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url set, changing expires_at """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_set__changing_expires_at(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': 'GOD 🤷‍♂️', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "GOD 🤷‍♂️", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) @@ -353,24 +355,24 @@ def test_user_specialty_saved__status_persisted__preview_url_set__changing_expir assert tasks.reset_screenshot.delay.call_args_list == [call(1), call(1)] assert tasks.take_screenshot.delay.call_args_list == [] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'expires_at': - utc_now, - 'update_hash': - generate_update_hash(model.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "expires_at": utc_now, + "update_hash": generate_update_hash(model.user_specialty), + } + ] """ 🔽🔽🔽 Status PERSISTED and preview_url set, changing issued_at """ - @patch('breathecode.certificate.tasks.reset_screenshot.delay', MagicMock()) - @patch('breathecode.certificate.tasks.take_screenshot.delay', MagicMock()) + @patch("breathecode.certificate.tasks.reset_screenshot.delay", MagicMock()) + @patch("breathecode.certificate.tasks.take_screenshot.delay", MagicMock()) def test_user_specialty_saved__status_persisted__preview_url_set__changing_issued_at(self, enable_signals): - enable_signals('breathecode.certificate.signals.user_specialty_saved') + enable_signals("breathecode.certificate.signals.user_specialty_saved") - user_specialty = {'status': 'PERSISTED', 'preview_url': 'GOD 🤷‍♂️', 'update_hash': '⬛🌷'} + user_specialty = {"status": "PERSISTED", "preview_url": "GOD 🤷‍♂️", "update_hash": "⬛🌷"} model = self.bc.database.create(user_specialty=user_specialty) user_specialty_db = remove_is_clean_for_one_item(self.bc.format.to_dict(model.user_specialty)) @@ -381,10 +383,10 @@ def test_user_specialty_saved__status_persisted__preview_url_set__changing_issue assert tasks.reset_screenshot.delay.call_args_list == [call(1), call(1)] assert tasks.take_screenshot.delay.call_args_list == [] - assert self.bc.database.list_of('certificate.UserSpecialty') == [{ - **user_specialty_db, - 'issued_at': - utc_now, - 'update_hash': - generate_update_hash(model.user_specialty), - }] + assert self.bc.database.list_of("certificate.UserSpecialty") == [ + { + **user_specialty_db, + "issued_at": utc_now, + "update_hash": generate_update_hash(model.user_specialty), + } + ] diff --git a/breathecode/certificate/tests/tasks/tests_async_generate_certificate.py b/breathecode/certificate/tests/tasks/tests_async_generate_certificate.py index ba68e276e..ca03d59ad 100644 --- a/breathecode/certificate/tests/tasks/tests_async_generate_certificate.py +++ b/breathecode/certificate/tests/tasks/tests_async_generate_certificate.py @@ -1,6 +1,7 @@ """ Tasks Tests """ + import logging from unittest.mock import MagicMock, call, patch @@ -12,49 +13,56 @@ class ActionCertificateGenerateOneCertificateTestCase(CertificateTestCase): """Tests action async_generate_certificate""" + """ 🔽🔽🔽 CohortUser not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.certificate.actions.generate_certificate', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.certificate.actions.generate_certificate", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_async_generate_certificate__cohort_user_not_found(self): - layout = 'vanilla' + layout = "vanilla" async_generate_certificate(1, 1, layout) self.assertEqual(actions.generate_certificate.call_args_list, []) - self.assertEqual(logging.Logger.info.call_args_list, [call('starting-generating-certificate')]) - self.assertEqual(logging.Logger.error.call_args_list, [call('cohort-user-not-found')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("starting-generating-certificate")]) + self.assertEqual(logging.Logger.error.call_args_list, [call("cohort-user-not-found")]) """ 🔽🔽🔽 Call generate_certificate successful """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.certificate.actions.generate_certificate', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.certificate.actions.generate_certificate", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_async_generate_certificate_with_user_role_student(self): - cohort_user = {'role': 'STUDENT'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + cohort_user = {"role": "STUDENT"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(cohort_user=cohort_user) logging.Logger.info.call_args_list = [] - layout = 'vanilla' + layout = "vanilla" async_generate_certificate(1, 1, layout) - self.assertEqual(actions.generate_certificate.call_args_list, [ - call(model.user, model.cohort, 'vanilla'), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('starting-generating-certificate'), - call('generating-certificate'), - ]) + self.assertEqual( + actions.generate_certificate.call_args_list, + [ + call(model.user, model.cohort, "vanilla"), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("starting-generating-certificate"), + call("generating-certificate"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) @@ -62,29 +70,38 @@ def test_async_generate_certificate_with_user_role_student(self): 🔽🔽🔽 Call generate_certificate raise a exception """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.certificate.actions.generate_certificate', MagicMock(side_effect=Exception())) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.certificate.actions.generate_certificate", MagicMock(side_effect=Exception())) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_async_generate_certificate_with_user_role_teacher(self): - cohort_user = {'role': 'STUDENT'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + cohort_user = {"role": "STUDENT"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(cohort_user=cohort_user) logging.Logger.info.call_args_list = [] - layout = 'vanilla' + layout = "vanilla" async_generate_certificate(1, 1, layout) - self.assertEqual(actions.generate_certificate.call_args_list, [ - call(model.user, model.cohort, 'vanilla'), - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('starting-generating-certificate'), - call('generating-certificate'), - ]) - - self.assertEqual(logging.Logger.error.call_args_list, [ - call('error-generating-certificate', exc_info=True), - ]) + self.assertEqual( + actions.generate_certificate.call_args_list, + [ + call(model.user, model.cohort, "vanilla"), + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("starting-generating-certificate"), + call("generating-certificate"), + ], + ) + + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("error-generating-certificate", exc_info=True), + ], + ) diff --git a/breathecode/certificate/tests/tasks/tests_remove_screenshot.py b/breathecode/certificate/tests/tasks/tests_remove_screenshot.py index 8ac87761c..d4efb2d6c 100644 --- a/breathecode/certificate/tests/tasks/tests_remove_screenshot.py +++ b/breathecode/certificate/tests/tasks/tests_remove_screenshot.py @@ -16,16 +16,16 @@ def wrapper(key, value): else: m4 = MagicMock(side_effect=lambda x: value if key == x else 1000) - monkeypatch.setattr('breathecode.certificate.actions.remove_certificate_screenshot', m4) + monkeypatch.setattr("breathecode.certificate.actions.remove_certificate_screenshot", m4) return m1, m2, m3, m4 m1 = MagicMock() m2 = MagicMock() m3 = MagicMock() - monkeypatch.setattr('logging.Logger.info', m1) - monkeypatch.setattr('logging.Logger.warning', m2) - monkeypatch.setattr('logging.Logger.error', m3) + monkeypatch.setattr("logging.Logger.info", m1) + monkeypatch.setattr("logging.Logger.warning", m2) + monkeypatch.setattr("logging.Logger.error", m3) yield wrapper @@ -38,7 +38,7 @@ def test_returns_true(bc: Breathecode, get_patch, get_int): remove_screenshot(key) - assert info_mock.call_args_list == [call('Starting remove_screenshot')] + assert info_mock.call_args_list == [call("Starting remove_screenshot")] assert warn_mock.call_args_list == [] assert error_mock.call_args_list == [] assert action_mock.call_args_list == [call(key)] @@ -52,10 +52,10 @@ def test_returns_false(bc: Breathecode, get_patch, get_int): remove_screenshot(key) - assert info_mock.call_args_list == [call('Starting remove_screenshot')] + assert info_mock.call_args_list == [call("Starting remove_screenshot")] assert warn_mock.call_args_list == [] assert error_mock.call_args_list == [ - call('UserSpecialty does not have any screenshot, it is skipped', exc_info=True) + call("UserSpecialty does not have any screenshot, it is skipped", exc_info=True) ] assert action_mock.call_args_list == [call(key)] @@ -69,7 +69,7 @@ def test_returns_an_exception(bc: Breathecode, get_patch, get_int, fake): remove_screenshot(key) - assert info_mock.call_args_list == [call('Starting remove_screenshot')] + assert info_mock.call_args_list == [call("Starting remove_screenshot")] assert warn_mock.call_args_list == [] assert error_mock.call_args_list == [call(exc, exc_info=True)] assert action_mock.call_args_list == [call(key)] diff --git a/breathecode/certificate/tests/tasks/tests_reset_screenshot.py b/breathecode/certificate/tests/tasks/tests_reset_screenshot.py index d350a7e49..5e5beb9b5 100644 --- a/breathecode/certificate/tests/tasks/tests_reset_screenshot.py +++ b/breathecode/certificate/tests/tasks/tests_reset_screenshot.py @@ -1,6 +1,7 @@ """ Tasks tests """ + from unittest.mock import MagicMock, patch, call from ...tasks import reset_screenshot from ..mixins import CertificateTestCase @@ -10,8 +11,8 @@ class ActionCertificateScreenshotTestCase(CertificateTestCase): """Tests action reset_screenshot""" - @patch('breathecode.certificate.actions.certificate_screenshot', MagicMock()) - @patch('breathecode.certificate.actions.remove_certificate_screenshot', MagicMock()) + @patch("breathecode.certificate.actions.certificate_screenshot", MagicMock()) + @patch("breathecode.certificate.actions.remove_certificate_screenshot", MagicMock()) def test_reset_screenshot__call_all_properly(self): """reset_screenshot don't call open in development environment""" @@ -20,8 +21,8 @@ def test_reset_screenshot__call_all_properly(self): self.assertEqual(actions.certificate_screenshot.call_args_list, [call(1)]) self.assertEqual(actions.remove_certificate_screenshot.call_args_list, [call(1)]) - @patch('breathecode.certificate.actions.certificate_screenshot', MagicMock(side_effect=Exception())) - @patch('breathecode.certificate.actions.remove_certificate_screenshot', MagicMock()) + @patch("breathecode.certificate.actions.certificate_screenshot", MagicMock(side_effect=Exception())) + @patch("breathecode.certificate.actions.remove_certificate_screenshot", MagicMock()) def test_reset_screenshot__certificate_screenshot_raise_a_exception(self): """reset_screenshot don't call open in development environment""" @@ -30,8 +31,8 @@ def test_reset_screenshot__certificate_screenshot_raise_a_exception(self): self.assertEqual(actions.certificate_screenshot.call_args_list, [call(1)]) self.assertEqual(actions.remove_certificate_screenshot.call_args_list, [call(1)]) - @patch('breathecode.certificate.actions.certificate_screenshot', MagicMock()) - @patch('breathecode.certificate.actions.remove_certificate_screenshot', MagicMock(side_effect=Exception())) + @patch("breathecode.certificate.actions.certificate_screenshot", MagicMock()) + @patch("breathecode.certificate.actions.remove_certificate_screenshot", MagicMock(side_effect=Exception())) def test_reset_screenshot__remove_certificate_screenshot_raise_a_exception(self): """reset_screenshot don't call open in development environment""" diff --git a/breathecode/certificate/tests/tasks/tests_take_screenshot.py b/breathecode/certificate/tests/tasks/tests_take_screenshot.py index da6c1dbd4..070be184d 100644 --- a/breathecode/certificate/tests/tasks/tests_take_screenshot.py +++ b/breathecode/certificate/tests/tasks/tests_take_screenshot.py @@ -1,6 +1,7 @@ """ Tasks tests """ + from unittest.mock import MagicMock, patch, call from ...tasks import take_screenshot from ..mixins import CertificateTestCase @@ -10,12 +11,12 @@ class ActionCertificateScreenshotTestCase(CertificateTestCase): """Tests action take_screenshot""" - @patch('breathecode.certificate.actions.certificate_screenshot', MagicMock()) + @patch("breathecode.certificate.actions.certificate_screenshot", MagicMock()) def test_take_screenshot__call_take_screenshot_properly(self): take_screenshot(1) self.assertEqual(actions.certificate_screenshot.call_args_list, [call(1)]) - @patch('breathecode.certificate.actions.certificate_screenshot', MagicMock(side_effect=Exception())) + @patch("breathecode.certificate.actions.certificate_screenshot", MagicMock(side_effect=Exception())) def test_take_screenshot__take_screenshot_raise_a_exception(self): take_screenshot(1) self.assertEqual(actions.certificate_screenshot.call_args_list, [call(1)]) diff --git a/breathecode/certificate/tests/urls/tests_cohort_id.py b/breathecode/certificate/tests/urls/tests_cohort_id.py index 278ad08fe..433a6bff4 100644 --- a/breathecode/certificate/tests/urls/tests_cohort_id.py +++ b/breathecode/certificate/tests/urls/tests_cohort_id.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -20,46 +21,47 @@ class CertificateTestSuite(CertificateTestCase): """Test /certificate/cohort/id""" + """ 🔽🔽🔽 Auth """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate_cohort_user__without_auth(self): self.headers(academy=1) - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) response = self.client.post(url, {}) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate_cohort_user__with_auth_without_permissions(self): self.headers(academy=1) self.generate_models(authenticate=True) - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) response = self.client.post(url, {}) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_certificate for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_certificate for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -71,33 +73,35 @@ def test_certificate_cohort_user__with_auth_without_permissions(self): 🔽🔽🔽 Post method """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_with_role_student_without_syllabus_version(self): - """ No main teacher in cohort """ + """No main teacher in cohort""" self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED'} - self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_certificate', - role='STUDENT', - cohort_user=True, - syllabus=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + cohort_kwargs = {"stage": "ENDED"} + self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_certificate", + role="STUDENT", + cohort_user=True, + syllabus=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - expected = {'detail': 'cohort-has-no-syllabus-version-assigned', 'status_code': 400} + expected = {"detail": "cohort-has-no-syllabus-version-assigned", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -105,34 +109,36 @@ def test_generate_certificate_with_role_student_without_syllabus_version(self): self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_with_role_student_without_main_teacher(self): - """ No main teacher in cohort """ + """No main teacher in cohort""" self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED'} - self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_certificate', - role='STUDENT', - cohort_user=True, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + cohort_kwargs = {"stage": "ENDED"} + self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_certificate", + role="STUDENT", + cohort_user=True, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - expected = {'detail': 'without-main-teacher', 'status_code': 400} + expected = {"detail": "without-main-teacher", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -140,27 +146,29 @@ def test_generate_certificate_with_role_student_without_main_teacher(self): self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_without_cohort_user(self): - """ No cohort user""" + """No cohort user""" self.headers(academy=1) - self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_certificate', - role='STUDENT', - cohort_stage='ENDED') - - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_certificate", + role="STUDENT", + cohort_stage="ENDED", + ) + + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - expected = {'detail': 'no-user-with-student-role', 'status_code': 400} + expected = {"detail": "no-user-with-student-role", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -168,23 +176,23 @@ def test_generate_certificate_without_cohort_user(self): self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_with_everything_but_schedule(self): - """ Should be ok because cohorts dont need specialy mode to generate certificates """ + """Should be ok because cohorts dont need specialy mode to generate certificates""" self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED'} + cohort_kwargs = {"stage": "ENDED"} model = self.generate_models( authenticate=True, cohort=True, user=True, profile_academy=True, - capability='crud_certificate', - role='STUDENT', + capability="crud_certificate", + role="STUDENT", syllabus_version=True, specialty=True, cohort_user=True, @@ -193,51 +201,53 @@ def test_generate_certificate_with_everything_but_schedule(self): ) base = model.copy() - del base['user'] - del base['cohort_user'] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + del base["user"] + del base["cohort_user"] + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") self.assertEqual(response.status_code, status.HTTP_201_CREATED) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, [ # Action call(instance=user_specialty, sender=user_specialty.__class__), - ]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_with_role_student_with_syllabus_without_specialty(self): - """ No specialty """ + """No specialty""" self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED'} - self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_certificate', - role='STUDENT', - cohort_user=True, - syllabus=True, - syllabus_version=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + cohort_kwargs = {"stage": "ENDED"} + self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_certificate", + role="STUDENT", + cohort_user=True, + syllabus=True, + syllabus_version=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - expected = {'detail': 'missing-specialty', 'status_code': 400} + expected = {"detail": "missing-specialty", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -245,33 +255,35 @@ def test_generate_certificate_with_role_student_with_syllabus_without_specialty( self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_test_without_layout(self): - """ No specialty """ + """No specialty""" self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED'} - self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_certificate', - role='STUDENT', - cohort_user=True, - syllabus=True, - syllabus_version=True, - specialty=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs) - - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + cohort_kwargs = {"stage": "ENDED"} + self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_certificate", + role="STUDENT", + cohort_user=True, + syllabus=True, + syllabus_version=True, + specialty=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + ) + + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - expected = {'detail': 'no-default-layout', 'status_code': 400} + expected = {"detail": "no-default-layout", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -279,30 +291,32 @@ def test_generate_certificate_test_without_layout(self): self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_test_with_cohort_stage_no_ended(self): - """ No specialty """ + """No specialty""" self.headers(academy=1) - self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_certificate', - role='STUDENT', - cohort_user=True, - syllabus=True, - specialty=True, - layout_design=True) - - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_certificate", + role="STUDENT", + cohort_user=True, + syllabus=True, + specialty=True, + layout_design=True, + ) + + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - expected = {'detail': 'cohort-stage-must-be-ended', 'status_code': 400} + expected = {"detail": "cohort-stage-must-be-ended", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -310,143 +324,146 @@ def test_generate_certificate_test_with_cohort_stage_no_ended(self): self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_test_without_cohort_user_finantial_status(self): self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED'} - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - cohort_user=True, - capability='crud_certificate', - role='STUDENT', - syllabus=True, - syllabus_version=True, - specialty=True, - syllabus_schedule=True, - user_specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"stage": "ENDED"} + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + cohort_user=True, + capability="crud_certificate", + role="STUDENT", + syllabus=True, + syllabus_version=True, + specialty=True, + syllabus_schedule=True, + user_specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - self.assertDatetime(json[0]['updated_at']) - del json[0]['updated_at'] - - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': model['academy'].logo_url, - 'name': model['academy'].name, - 'slug': model['academy'].slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(model.cohort.kickoff_date), - 'ending_date': None, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - 'schedule': { - 'id': model['syllabus_schedule'].id, - 'name': model['syllabus_schedule'].name, - 'syllabus': model['syllabus_schedule'].syllabus.id, + self.assertDatetime(json[0]["updated_at"]) + del json[0]["updated_at"] + + expected = [ + { + "academy": { + "id": 1, + "logo_url": model["academy"].logo_url, + "name": model["academy"].name, + "slug": model["academy"].slug, + "website_url": None, + }, + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(model.cohort.kickoff_date), + "ending_date": None, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + "schedule": { + "id": model["syllabus_schedule"].id, + "name": model["syllabus_schedule"].name, + "syllabus": model["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": { + "version": model["syllabus_version"].version, + "name": model["syllabus_version"].syllabus.name, + "slug": model["syllabus_version"].syllabus.slug, + "syllabus": model["syllabus_version"].syllabus.id, + "duration_in_days": model["syllabus_version"].syllabus.duration_in_days, + "duration_in_hours": model["syllabus_version"].syllabus.duration_in_hours, + "week_hours": model["syllabus_version"].syllabus.week_hours, + }, }, - 'syllabus_version': { - 'version': model['syllabus_version'].version, - 'name': model['syllabus_version'].syllabus.name, - 'slug': model['syllabus_version'].syllabus.slug, - 'syllabus': model['syllabus_version'].syllabus.id, - 'duration_in_days': model['syllabus_version'].syllabus.duration_in_days, - 'duration_in_hours': model['syllabus_version'].syllabus.duration_in_hours, - 'week_hours': model['syllabus_version'].syllabus.week_hours, + "created_at": self.datetime_to_iso(model["user_specialty"].created_at), + "expires_at": model["user_specialty"].expires_at, + "id": 1, + "layout": { + "name": model["layout_design"].name, + "background_url": model["layout_design"].background_url, + "slug": model["layout_design"].slug, + "foot_note": model["layout_design"].foot_note, }, - }, - 'created_at': self.datetime_to_iso(model['user_specialty'].created_at), - 'expires_at': model['user_specialty'].expires_at, - 'id': 1, - 'layout': { - 'name': model['layout_design'].name, - 'background_url': model['layout_design'].background_url, - 'slug': model['layout_design'].slug, - 'foot_note': model['layout_design'].foot_note - }, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(model['specialty'].created_at), - 'description': model['specialty'].description, - 'id': 1, - 'logo_url': None, - 'name': model['specialty'].name, - 'slug': model['specialty'].slug, - 'updated_at': self.datetime_to_iso(model['specialty'].updated_at), - }, - 'status': 'ERROR', - 'issued_at': None, - 'status_text': 'bad-finantial-status', - 'user': { - 'first_name': model['user'].first_name, - 'id': 1, - 'last_name': model['user'].last_name - }, - 'profile_academy': { - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'status': model['profile_academy'].status, - 'phone': model['profile_academy'].phone, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'academy': { - 'id': 1, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(model["specialty"].created_at), + "description": model["specialty"].description, + "id": 1, + "logo_url": None, + "name": model["specialty"].name, + "slug": model["specialty"].slug, + "updated_at": self.datetime_to_iso(model["specialty"].updated_at), + }, + "status": "ERROR", + "issued_at": None, + "status_text": "bad-finantial-status", + "user": {"first_name": model["user"].first_name, "id": 1, "last_name": model["user"].last_name}, + "profile_academy": { + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "status": model["profile_academy"].status, + "phone": model["profile_academy"].phone, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "academy": { + "id": 1, + "name": model["academy"].name, + "slug": model["academy"].slug, + }, }, } - }] + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(self.all_user_specialty_dict(), - [{ - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty_id': 1, - 'status': 'ERROR', - 'issued_at': None, - 'status_text': 'bad-finantial-status', - 'user_id': 1, - 'update_hash': self.generate_update_hash(user_specialty), - 'token': '9e76a2ab3bd55454c384e0a5cdb5298d17285949', - }]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + self.all_user_specialty_dict(), + [ + { + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty_id": 1, + "status": "ERROR", + "issued_at": None, + "status_text": "bad-finantial-status", + "user_id": 1, + "update_hash": self.generate_update_hash(user_specialty), + "token": "9e76a2ab3bd55454c384e0a5cdb5298d17285949", + } + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, @@ -455,286 +472,293 @@ def test_generate_certificate_test_without_cohort_user_finantial_status(self): call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Action call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_test_without_cohort_user_educational_status(self): self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE'} - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - cohort_user=True, - capability='crud_certificate', - role='STUDENT', - syllabus=True, - syllabus_version=True, - specialty=True, - syllabus_schedule=True, - user_specialty=True, - layout_design=True, - cohort_user_kwargs=cohort_user_kwargs, - cohort_kwargs=cohort_kwargs) + cohort_kwargs = {"stage": "ENDED"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE"} + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + cohort_user=True, + capability="crud_certificate", + role="STUDENT", + syllabus=True, + syllabus_version=True, + specialty=True, + syllabus_schedule=True, + user_specialty=True, + layout_design=True, + cohort_user_kwargs=cohort_user_kwargs, + cohort_kwargs=cohort_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - self.assertDatetime(json[0]['updated_at']) - del json[0]['updated_at'] - - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': model['academy'].logo_url, - 'name': model['academy'].name, - 'slug': model['academy'].slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(model.cohort.kickoff_date), - 'ending_date': None, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - 'schedule': { - 'id': model['syllabus_schedule'].id, - 'name': model['syllabus_schedule'].name, - 'syllabus': model['syllabus_schedule'].syllabus.id, + self.assertDatetime(json[0]["updated_at"]) + del json[0]["updated_at"] + + expected = [ + { + "academy": { + "id": 1, + "logo_url": model["academy"].logo_url, + "name": model["academy"].name, + "slug": model["academy"].slug, + "website_url": None, + }, + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(model.cohort.kickoff_date), + "ending_date": None, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + "schedule": { + "id": model["syllabus_schedule"].id, + "name": model["syllabus_schedule"].name, + "syllabus": model["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": { + "version": model["syllabus_version"].version, + "name": model["syllabus_version"].syllabus.name, + "slug": model["syllabus_version"].syllabus.slug, + "syllabus": model["syllabus_version"].syllabus.id, + "duration_in_days": model["syllabus_version"].syllabus.duration_in_days, + "duration_in_hours": model["syllabus_version"].syllabus.duration_in_hours, + "week_hours": model["syllabus_version"].syllabus.week_hours, + }, + }, + "created_at": self.datetime_to_iso(model["user_specialty"].created_at), + "expires_at": model["user_specialty"].expires_at, + "id": 1, + "layout": { + "name": model["layout_design"].name, + "background_url": model["layout_design"].background_url, + "slug": model["layout_design"].slug, + "foot_note": model["layout_design"].foot_note, }, - 'syllabus_version': { - 'version': model['syllabus_version'].version, - 'name': model['syllabus_version'].syllabus.name, - 'slug': model['syllabus_version'].syllabus.slug, - 'syllabus': model['syllabus_version'].syllabus.id, - 'duration_in_days': model['syllabus_version'].syllabus.duration_in_days, - 'duration_in_hours': model['syllabus_version'].syllabus.duration_in_hours, - 'week_hours': model['syllabus_version'].syllabus.week_hours, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(model["specialty"].created_at), + "description": model["specialty"].description, + "id": 1, + "logo_url": None, + "name": model["specialty"].name, + "slug": model["specialty"].slug, + "updated_at": self.datetime_to_iso(model["specialty"].updated_at), }, - }, - 'created_at': self.datetime_to_iso(model['user_specialty'].created_at), - 'expires_at': model['user_specialty'].expires_at, - 'id': 1, - 'layout': { - 'name': model['layout_design'].name, - 'background_url': model['layout_design'].background_url, - 'slug': model['layout_design'].slug, - 'foot_note': model['layout_design'].foot_note - }, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(model['specialty'].created_at), - 'description': model['specialty'].description, - 'id': 1, - 'logo_url': None, - 'name': model['specialty'].name, - 'slug': model['specialty'].slug, - 'updated_at': self.datetime_to_iso(model['specialty'].updated_at), - }, - 'status': 'ERROR', - 'issued_at': None, - 'status_text': 'bad-educational-status', - 'user': { - 'first_name': model['user'].first_name, - 'id': 1, - 'last_name': model['user'].last_name - }, - 'profile_academy': { - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'status': model['profile_academy'].status, - 'phone': model['profile_academy'].phone, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'academy': { - 'id': 1, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + "status": "ERROR", + "issued_at": None, + "status_text": "bad-educational-status", + "user": {"first_name": model["user"].first_name, "id": 1, "last_name": model["user"].last_name}, + "profile_academy": { + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "status": model["profile_academy"].status, + "phone": model["profile_academy"].phone, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "academy": { + "id": 1, + "name": model["academy"].name, + "slug": model["academy"].slug, + }, }, - }, - }] + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(self.all_user_specialty_dict(), - [{ - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty_id': 1, - 'status': 'ERROR', - 'issued_at': None, - 'status_text': 'bad-educational-status', - 'user_id': 1, - 'token': '9e76a2ab3bd55454c384e0a5cdb5298d17285949', - 'update_hash': user_specialty.update_hash, - }]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + self.all_user_specialty_dict(), + [ + { + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty_id": 1, + "status": "ERROR", + "issued_at": None, + "status_text": "bad-educational-status", + "user_id": 1, + "token": "9e76a2ab3bd55454c384e0a5cdb5298d17285949", + "update_hash": user_specialty.update_hash, + } + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_test_with_final_cohort(self): self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED'} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - cohort_user=True, - syllabus=True, - syllabus_version=True, - capability='crud_certificate', - role='STUDENT', - specialty=True, - syllabus_schedule=True, - user_specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + cohort_kwargs = {"stage": "ENDED"} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + cohort_user=True, + syllabus=True, + syllabus_version=True, + capability="crud_certificate", + role="STUDENT", + specialty=True, + syllabus_schedule=True, + user_specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - response = self.client.post(url, format='json') + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + response = self.client.post(url, format="json") json = response.json() - self.assertDatetime(json[0]['updated_at']) - del json[0]['updated_at'] - - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': model['academy'].logo_url, - 'name': model['academy'].name, - 'slug': model['academy'].slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(model.cohort.kickoff_date), - 'ending_date': None, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - 'schedule': { - 'id': model['syllabus_schedule'].id, - 'name': model['syllabus_schedule'].name, - 'syllabus': model['syllabus_schedule'].syllabus.id, + self.assertDatetime(json[0]["updated_at"]) + del json[0]["updated_at"] + + expected = [ + { + "academy": { + "id": 1, + "logo_url": model["academy"].logo_url, + "name": model["academy"].name, + "slug": model["academy"].slug, + "website_url": None, }, - 'syllabus_version': { - 'version': model['syllabus_version'].version, - 'name': model['syllabus_version'].syllabus.name, - 'slug': model['syllabus_version'].syllabus.slug, - 'syllabus': model['syllabus_version'].syllabus.id, - 'duration_in_days': model['syllabus_version'].syllabus.duration_in_days, - 'duration_in_hours': model['syllabus_version'].syllabus.duration_in_hours, - 'week_hours': model['syllabus_version'].syllabus.week_hours, + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(model.cohort.kickoff_date), + "ending_date": None, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + "schedule": { + "id": model["syllabus_schedule"].id, + "name": model["syllabus_schedule"].name, + "syllabus": model["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": { + "version": model["syllabus_version"].version, + "name": model["syllabus_version"].syllabus.name, + "slug": model["syllabus_version"].syllabus.slug, + "syllabus": model["syllabus_version"].syllabus.id, + "duration_in_days": model["syllabus_version"].syllabus.duration_in_days, + "duration_in_hours": model["syllabus_version"].syllabus.duration_in_hours, + "week_hours": model["syllabus_version"].syllabus.week_hours, + }, }, - }, - 'created_at': self.datetime_to_iso(model['user_specialty'].created_at), - 'expires_at': model['user_specialty'].expires_at, - 'id': 1, - 'layout': { - 'name': model['layout_design'].name, - 'background_url': model['layout_design'].background_url, - 'slug': model['layout_design'].slug, - 'foot_note': model['layout_design'].foot_note - }, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(model['specialty'].created_at), - 'description': model['specialty'].description, - 'id': 1, - 'logo_url': None, - 'name': model['specialty'].name, - 'slug': model['specialty'].slug, - 'updated_at': self.datetime_to_iso(model['specialty'].updated_at), - }, - 'status': 'ERROR', - 'issued_at': None, - 'status_text': 'cohort-not-finished', - 'user': { - 'first_name': model['user'].first_name, - 'id': 1, - 'last_name': model['user'].last_name - }, - 'profile_academy': { - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'status': model['profile_academy'].status, - 'phone': model['profile_academy'].phone, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'academy': { - 'id': 1, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + "created_at": self.datetime_to_iso(model["user_specialty"].created_at), + "expires_at": model["user_specialty"].expires_at, + "id": 1, + "layout": { + "name": model["layout_design"].name, + "background_url": model["layout_design"].background_url, + "slug": model["layout_design"].slug, + "foot_note": model["layout_design"].foot_note, + }, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(model["specialty"].created_at), + "description": model["specialty"].description, + "id": 1, + "logo_url": None, + "name": model["specialty"].name, + "slug": model["specialty"].slug, + "updated_at": self.datetime_to_iso(model["specialty"].updated_at), + }, + "status": "ERROR", + "issued_at": None, + "status_text": "cohort-not-finished", + "user": {"first_name": model["user"].first_name, "id": 1, "last_name": model["user"].last_name}, + "profile_academy": { + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "status": model["profile_academy"].status, + "phone": model["profile_academy"].phone, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "academy": { + "id": 1, + "name": model["academy"].name, + "slug": model["academy"].slug, + }, }, } - }] + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(self.all_user_specialty_dict(), - [{ - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty_id': 1, - 'status': 'ERROR', - 'issued_at': None, - 'status_text': 'cohort-not-finished', - 'user_id': 1, - 'token': '9e76a2ab3bd55454c384e0a5cdb5298d17285949', - 'update_hash': user_specialty.update_hash, - }]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + self.all_user_specialty_dict(), + [ + { + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty_id": 1, + "status": "ERROR", + "issued_at": None, + "status_text": "cohort-not-finished", + "user_id": 1, + "token": "9e76a2ab3bd55454c384e0a5cdb5298d17285949", + "update_hash": user_specialty.update_hash, + } + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, @@ -743,159 +767,163 @@ def test_generate_certificate_test_with_final_cohort(self): call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Action call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_good_request(self): """Test /certificate/cohort/id status: 201""" self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED', 'current_day': 9545799} - cohort_user_kwargs = {'finantial_status': 'UP_TO_DATE', 'educational_status': 'GRADUATED'} - syllabus_kwargs = {'duration_in_days': 9545799} - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - syllabus=True, - syllabus_version=True, - capability='crud_certificate', - role='STUDENT', - cohort_user=True, - specialty=True, - syllabus_schedule=True, - user_specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"stage": "ENDED", "current_day": 9545799} + cohort_user_kwargs = {"finantial_status": "UP_TO_DATE", "educational_status": "GRADUATED"} + syllabus_kwargs = {"duration_in_days": 9545799} + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + syllabus=True, + syllabus_version=True, + capability="crud_certificate", + role="STUDENT", + cohort_user=True, + specialty=True, + syllabus_schedule=True, + user_specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - url = reverse_lazy('certificate:cohort_id', kwargs={'cohort_id': 1}) - data = {'layout_slug': 'vanilla'} + url = reverse_lazy("certificate:cohort_id", kwargs={"cohort_id": 1}) + data = {"layout_slug": "vanilla"} start = timezone.now() - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") end = timezone.now() json = response.json() - self.assertDatetime(json[0]['updated_at']) - del json[0]['updated_at'] + self.assertDatetime(json[0]["updated_at"]) + del json[0]["updated_at"] - issued_at = self.iso_to_datetime(json[0]['issued_at']) + issued_at = self.iso_to_datetime(json[0]["issued_at"]) self.assertGreater(issued_at, start) self.assertLess(issued_at, end) - del json[0]['issued_at'] - - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': model['academy'].logo_url, - 'name': model['academy'].name, - 'slug': model['academy'].slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(model.cohort.kickoff_date), - 'ending_date': None, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - 'schedule': { - 'id': model['syllabus_schedule'].id, - 'name': model['syllabus_schedule'].name, - 'syllabus': model['syllabus_schedule'].syllabus.id, + del json[0]["issued_at"] + + expected = [ + { + "academy": { + "id": 1, + "logo_url": model["academy"].logo_url, + "name": model["academy"].name, + "slug": model["academy"].slug, + "website_url": None, + }, + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(model.cohort.kickoff_date), + "ending_date": None, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + "schedule": { + "id": model["syllabus_schedule"].id, + "name": model["syllabus_schedule"].name, + "syllabus": model["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": { + "version": model["syllabus_version"].version, + "name": model["syllabus_version"].syllabus.name, + "slug": model["syllabus_version"].syllabus.slug, + "syllabus": model["syllabus_version"].syllabus.id, + "duration_in_days": model["syllabus_version"].syllabus.duration_in_days, + "duration_in_hours": model["syllabus_version"].syllabus.duration_in_hours, + "week_hours": model["syllabus_version"].syllabus.week_hours, + }, + }, + "created_at": self.datetime_to_iso(model["user_specialty"].created_at), + "expires_at": model["user_specialty"].expires_at, + "id": 1, + "layout": { + "name": model["layout_design"].name, + "background_url": model["layout_design"].background_url, + "slug": model["layout_design"].slug, + "foot_note": model["layout_design"].foot_note, }, - 'syllabus_version': { - 'version': model['syllabus_version'].version, - 'name': model['syllabus_version'].syllabus.name, - 'slug': model['syllabus_version'].syllabus.slug, - 'syllabus': model['syllabus_version'].syllabus.id, - 'duration_in_days': model['syllabus_version'].syllabus.duration_in_days, - 'duration_in_hours': model['syllabus_version'].syllabus.duration_in_hours, - 'week_hours': model['syllabus_version'].syllabus.week_hours, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(model["specialty"].created_at), + "description": model["specialty"].description, + "id": 1, + "logo_url": None, + "name": model["specialty"].name, + "slug": model["specialty"].slug, + "updated_at": self.datetime_to_iso(model["specialty"].updated_at), }, - }, - 'created_at': self.datetime_to_iso(model['user_specialty'].created_at), - 'expires_at': model['user_specialty'].expires_at, - 'id': 1, - 'layout': { - 'name': model['layout_design'].name, - 'background_url': model['layout_design'].background_url, - 'slug': model['layout_design'].slug, - 'foot_note': model['layout_design'].foot_note - }, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(model['specialty'].created_at), - 'description': model['specialty'].description, - 'id': 1, - 'logo_url': None, - 'name': model['specialty'].name, - 'slug': model['specialty'].slug, - 'updated_at': self.datetime_to_iso(model['specialty'].updated_at), - }, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user': { - 'first_name': model['user'].first_name, - 'id': 1, - 'last_name': model['user'].last_name - }, - 'profile_academy': { - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'status': model['profile_academy'].status, - 'phone': model['profile_academy'].phone, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'academy': { - 'id': 1, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user": {"first_name": model["user"].first_name, "id": 1, "last_name": model["user"].last_name}, + "profile_academy": { + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "status": model["profile_academy"].status, + "phone": model["profile_academy"].phone, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "academy": { + "id": 1, + "name": model["academy"].name, + "slug": model["academy"].slug, + }, }, } - }] + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(self.all_user_specialty_dict(), - [{ - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty_id': 1, - 'status': 'PERSISTED', - 'issued_at': issued_at, - 'status_text': 'Certificate successfully queued for PDF generation', - 'user_id': 1, - 'token': '9e76a2ab3bd55454c384e0a5cdb5298d17285949', - 'update_hash': user_specialty.update_hash, - }]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + self.all_user_specialty_dict(), + [ + { + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty_id": 1, + "status": "PERSISTED", + "issued_at": issued_at, + "status_text": "Certificate successfully queued for PDF generation", + "user_id": 1, + "token": "9e76a2ab3bd55454c384e0a5cdb5298d17285949", + "update_hash": user_specialty.update_hash, + } + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, @@ -904,4 +932,5 @@ def test_generate_certificate_good_request(self): call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Action call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + ], + ) diff --git a/breathecode/certificate/tests/urls/tests_cohort_id_student_id.py b/breathecode/certificate/tests/urls/tests_cohort_id_student_id.py index 097de1b7a..fe1fb45b3 100644 --- a/breathecode/certificate/tests/urls/tests_cohort_id_student_id.py +++ b/breathecode/certificate/tests/urls/tests_cohort_id_student_id.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -20,25 +21,26 @@ class CertificateTestSuite(CertificateTestCase): """Test /certificate/cohort/id/student/id""" + """ 🔽🔽🔽 Post Method """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_no_default_layout(self): - """ No main teacher in cohort """ + """No main teacher in cohort""" self.headers(academy=1) - cohort_user_kwargs = {'role': 'STUDENT'} + cohort_user_kwargs = {"role": "STUDENT"} base = self.generate_models( authenticate=True, - capability='crud_certificate', + capability="crud_certificate", profile_academy=True, - role='potato', + role="potato", cohort=True, user=True, cohort_user=True, @@ -49,16 +51,15 @@ def test_generate_certificate_no_default_layout(self): cohort_user_kwargs=cohort_user_kwargs, ) - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - url = reverse_lazy('certificate:cohort_id_student_id', kwargs={'cohort_id': 1, 'student_id': 1}) - response = self.client.post(url, format='json') + url = reverse_lazy("certificate:cohort_id_student_id", kwargs={"cohort_id": 1, "student_id": 1}) + response = self.client.post(url, format="json") json = response.json() - expected = {'detail': 'no-default-layout', 'status_code': 400} + expected = {"detail": "no-default-layout", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -66,36 +67,35 @@ def test_generate_certificate_no_default_layout(self): self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate_no_cohort_user(self): - """ No main teacher in cohort """ + """No main teacher in cohort""" self.headers(academy=1) base = self.generate_models( authenticate=True, cohort=True, user=True, profile_academy=True, - capability='crud_certificate', - role='POTATO', + capability="crud_certificate", + role="POTATO", syllabus=True, specialty=True, ) - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - url = reverse_lazy('certificate:cohort_id_student_id', kwargs={'cohort_id': 1, 'student_id': 1}) - response = self.client.post(url, format='json') + url = reverse_lazy("certificate:cohort_id_student_id", kwargs={"cohort_id": 1, "student_id": 1}) + response = self.client.post(url, format="json") json = response.json() - expected = {'detail': 'student-not-found', 'status_code': 404} + expected = {"detail": "student-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -103,156 +103,157 @@ def test_generate_certificate_no_cohort_user(self): self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_generate_certificate(self): - """ No main teacher in cohort """ + """No main teacher in cohort""" self.headers(academy=1) - cohort_kwargs = {'stage': 'ENDED', 'current_day': 112113114115} - syllabus_kwargs = {'duration_in_days': 112113114115} - user_specialty_kwargs = {'status': 'ERROR'} - cohort_user_kwargs = {'educational_status': 'GRADUATED', 'finantial_status': 'UP_TO_DATE'} - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_certificate', - role='STUDENT', - cohort_user=True, - syllabus=True, - syllabus_version=True, - syllabus_schedule=True, - specialty=True, - user_specialty=True, - layout_design=True, - cohort_kwargs=cohort_kwargs, - user_specialty_kwargs=user_specialty_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs) + cohort_kwargs = {"stage": "ENDED", "current_day": 112113114115} + syllabus_kwargs = {"duration_in_days": 112113114115} + user_specialty_kwargs = {"status": "ERROR"} + cohort_user_kwargs = {"educational_status": "GRADUATED", "finantial_status": "UP_TO_DATE"} + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_certificate", + role="STUDENT", + cohort_user=True, + syllabus=True, + syllabus_version=True, + syllabus_schedule=True, + specialty=True, + user_specialty=True, + layout_design=True, + cohort_kwargs=cohort_kwargs, + user_specialty_kwargs=user_specialty_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] + del base["user"] + del base["cohort_user"] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - url = reverse_lazy('certificate:cohort_id_student_id', kwargs={'cohort_id': 1, 'student_id': 1}) - data = {'layout_slug': 'vanilla'} + url = reverse_lazy("certificate:cohort_id_student_id", kwargs={"cohort_id": 1, "student_id": 1}) + data = {"layout_slug": "vanilla"} start = timezone.now() - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") end = timezone.now() json = response.json() - self.assertDatetime(json['updated_at']) - del json['updated_at'] + self.assertDatetime(json["updated_at"]) + del json["updated_at"] - issued_at = self.iso_to_datetime(json['issued_at']) + issued_at = self.iso_to_datetime(json["issued_at"]) self.assertGreater(issued_at, start) self.assertLess(issued_at, end) - del json['issued_at'] + del json["issued_at"] expected = { - 'academy': { - 'id': 1, - 'logo_url': model['academy'].logo_url, - 'name': model['academy'].name, - 'slug': model['academy'].slug, - 'website_url': None + "academy": { + "id": 1, + "logo_url": model["academy"].logo_url, + "name": model["academy"].name, + "slug": model["academy"].slug, + "website_url": None, }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(model.cohort.kickoff_date), - 'ending_date': None, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - 'schedule': { - 'id': 1, - 'name': model['syllabus_schedule'].name, - 'syllabus': model['syllabus_schedule'].syllabus.id, + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(model.cohort.kickoff_date), + "ending_date": None, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + "schedule": { + "id": 1, + "name": model["syllabus_schedule"].name, + "syllabus": model["syllabus_schedule"].syllabus.id, }, - 'syllabus_version': { - 'version': model['syllabus_version'].version, - 'name': model['syllabus_version'].syllabus.name, - 'slug': model['syllabus_version'].syllabus.slug, - 'syllabus': model['syllabus_version'].syllabus.id, - 'duration_in_days': model['syllabus_version'].syllabus.duration_in_days, - 'duration_in_hours': model['syllabus_version'].syllabus.duration_in_hours, - 'week_hours': model['syllabus_version'].syllabus.week_hours, + "syllabus_version": { + "version": model["syllabus_version"].version, + "name": model["syllabus_version"].syllabus.name, + "slug": model["syllabus_version"].syllabus.slug, + "syllabus": model["syllabus_version"].syllabus.id, + "duration_in_days": model["syllabus_version"].syllabus.duration_in_days, + "duration_in_hours": model["syllabus_version"].syllabus.duration_in_hours, + "week_hours": model["syllabus_version"].syllabus.week_hours, }, }, - 'created_at': self.datetime_to_iso(model['user_specialty'].created_at), - 'expires_at': model['user_specialty'].expires_at, - 'id': 1, - 'layout': { - 'name': model['layout_design'].name, - 'background_url': model['layout_design'].background_url, - 'slug': model['layout_design'].slug, - 'foot_note': model['layout_design'].foot_note - }, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty': { - 'description': None, - 'created_at': self.datetime_to_iso(model['specialty'].created_at), - 'id': 1, - 'logo_url': None, - 'name': model['specialty'].name, - 'slug': model['specialty'].slug, - 'updated_at': self.datetime_to_iso(model['specialty'].updated_at), + "created_at": self.datetime_to_iso(model["user_specialty"].created_at), + "expires_at": model["user_specialty"].expires_at, + "id": 1, + "layout": { + "name": model["layout_design"].name, + "background_url": model["layout_design"].background_url, + "slug": model["layout_design"].slug, + "foot_note": model["layout_design"].foot_note, }, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user': { - 'first_name': model['user'].first_name, - 'id': 1, - 'last_name': model['user'].last_name + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty": { + "description": None, + "created_at": self.datetime_to_iso(model["specialty"].created_at), + "id": 1, + "logo_url": None, + "name": model["specialty"].name, + "slug": model["specialty"].slug, + "updated_at": self.datetime_to_iso(model["specialty"].updated_at), }, - 'profile_academy': { - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'status': model['profile_academy'].status, - 'phone': model['profile_academy'].phone, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'academy': { - 'id': 1, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user": {"first_name": model["user"].first_name, "id": 1, "last_name": model["user"].last_name}, + "profile_academy": { + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "status": model["profile_academy"].status, + "phone": model["profile_academy"].phone, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "academy": { + "id": 1, + "name": model["academy"].name, + "slug": model["academy"].slug, }, - } + }, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(self.all_user_specialty_dict(), - [{ - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty_id': 1, - 'status': 'PERSISTED', - 'issued_at': issued_at, - 'status_text': 'Certificate successfully queued for PDF generation', - 'token': '9e76a2ab3bd55454c384e0a5cdb5298d17285949', - 'user_id': 1, - 'update_hash': user_specialty.update_hash, - }]) + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + self.all_user_specialty_dict(), + [ + { + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty_id": 1, + "status": "PERSISTED", + "issued_at": issued_at, + "status_text": "Certificate successfully queued for PDF generation", + "token": "9e76a2ab3bd55454c384e0a5cdb5298d17285949", + "user_id": 1, + "update_hash": user_specialty.update_hash, + } + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, @@ -261,4 +262,5 @@ def test_generate_certificate(self): call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Action call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) + ], + ) diff --git a/breathecode/certificate/tests/urls/tests_me.py b/breathecode/certificate/tests/urls/tests_me.py index 3a4e9a53b..1885c3877 100644 --- a/breathecode/certificate/tests/urls/tests_me.py +++ b/breathecode/certificate/tests/urls/tests_me.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -14,58 +15,59 @@ def get_serializer(self, user_specialty, academy, specialty, user): return { - 'academy': { - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, - 'website_url': academy.website_url, + "academy": { + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, + "website_url": academy.website_url, }, - 'cohort': user_specialty.cohort, - 'created_at': self.bc.datetime.to_iso_string(user_specialty.created_at), - 'expires_at': user_specialty.expires_at, - 'id': user_specialty.id, - 'issued_at': user_specialty.issued_at, - 'layout': user_specialty.layout, - 'preview_url': user_specialty.preview_url, - 'signed_by': user_specialty.signed_by, - 'signed_by_role': user_specialty.signed_by_role, - 'specialty': { - 'created_at': self.bc.datetime.to_iso_string(specialty.created_at), - 'description': specialty.description, - 'id': specialty.id, - 'logo_url': specialty.logo_url, - 'name': specialty.name, - 'slug': specialty.slug, - 'updated_at': self.bc.datetime.to_iso_string(specialty.updated_at), + "cohort": user_specialty.cohort, + "created_at": self.bc.datetime.to_iso_string(user_specialty.created_at), + "expires_at": user_specialty.expires_at, + "id": user_specialty.id, + "issued_at": user_specialty.issued_at, + "layout": user_specialty.layout, + "preview_url": user_specialty.preview_url, + "signed_by": user_specialty.signed_by, + "signed_by_role": user_specialty.signed_by_role, + "specialty": { + "created_at": self.bc.datetime.to_iso_string(specialty.created_at), + "description": specialty.description, + "id": specialty.id, + "logo_url": specialty.logo_url, + "name": specialty.name, + "slug": specialty.slug, + "updated_at": self.bc.datetime.to_iso_string(specialty.updated_at), }, - 'status': user_specialty.status, - 'status_text': user_specialty.status_text, - 'updated_at': self.bc.datetime.to_iso_string(user_specialty.updated_at), - 'user': { - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "status": user_specialty.status, + "status_text": user_specialty.status_text, + "updated_at": self.bc.datetime.to_iso_string(user_specialty.updated_at), + "user": { + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'profile_academy': None + "profile_academy": None, } class CertificateTestSuite(CertificateTestCase): """Test /me""" + """ 🔽🔽🔽 Auth """ - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test_without_auth(self): - url = reverse_lazy('certificate:me') + url = reverse_lazy("certificate:me") response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -75,16 +77,16 @@ def test_without_auth(self): 🔽🔽🔽 GET without permission """ - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test__get__without_permission(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('certificate:me') + url = reverse_lazy("certificate:me") response = self.client.get(url) json = response.json() - expected = {'detail': 'without-permission', 'status_code': 403} + expected = {"detail": "without-permission", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -93,13 +95,13 @@ def test__get__without_permission(self): 🔽🔽🔽 GET with zero UserSpecialty """ - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test__get__with_zero_user_specialties(self): - permission = {'codename': 'get_my_certificate'} + permission = {"codename": "get_my_certificate"} model = self.bc.database.create(user=1, permission=permission) self.client.force_authenticate(model.user) - url = reverse_lazy('certificate:me') + url = reverse_lazy("certificate:me") response = self.client.get(url) json = response.json() @@ -112,18 +114,21 @@ def test__get__with_zero_user_specialties(self): 🔽🔽🔽 GET with one UserSpecialty and status 'PENDING' """ - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test__get__with_one_user_specialty_status_pending(self): - permission = {'codename': 'get_my_certificate'} + permission = {"codename": "get_my_certificate"} model = self.bc.database.create(user=1, permission=permission, user_specialty=1) self.client.force_authenticate(model.user) - url = reverse_lazy('certificate:me') + url = reverse_lazy("certificate:me") response = self.client.get(url) json = response.json() - expected = [get_serializer(self, model.user_specialty, model.academy, model.specialty, model.user) - ] if model.user_specialty.status == 'PERSISTED' else [] + expected = ( + [get_serializer(self, model.user_specialty, model.academy, model.specialty, model.user)] + if model.user_specialty.status == "PERSISTED" + else [] + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -132,23 +137,23 @@ def test__get__with_one_user_specialty_status_pending(self): 🔽🔽🔽 GET with one UserSpecialty and status 'PERSISTED' """ - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test__get__with_one_user_specialty_status_persisted(self): - permission = {'codename': 'get_my_certificate'} - model = self.bc.database.create(user=1, - permission=permission, - user_specialty={ - 'token': 'xyz1', - 'status': 'PERSISTED' - }) + permission = {"codename": "get_my_certificate"} + model = self.bc.database.create( + user=1, permission=permission, user_specialty={"token": "xyz1", "status": "PERSISTED"} + ) self.client.force_authenticate(model.user) - url = reverse_lazy('certificate:me') + url = reverse_lazy("certificate:me") response = self.client.get(url) json = response.json() - expected = [get_serializer(self, model.user_specialty, model.academy, model.specialty, model.user) - ] if model.user_specialty.status == 'PERSISTED' else [] + expected = ( + [get_serializer(self, model.user_specialty, model.academy, model.specialty, model.user)] + if model.user_specialty.status == "PERSISTED" + else [] + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -157,21 +162,22 @@ def test__get__with_one_user_specialty_status_persisted(self): 🔽🔽🔽 GET with two UserSpecialty """ - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test__get__with_two_user_specialty(self): - permission = {'codename': 'get_my_certificate'} - user_specialties = [{'token': 'xyz1', 'status': 'PERSISTED'}, {'token': 'xyz2'}] + permission = {"codename": "get_my_certificate"} + user_specialties = [{"token": "xyz1", "status": "PERSISTED"}, {"token": "xyz2"}] model = self.bc.database.create(user=1, permission=permission, user_specialty=user_specialties) self.client.force_authenticate(model.user) - url = reverse_lazy('certificate:me') + url = reverse_lazy("certificate:me") response = self.client.get(url) json = response.json() user_specialties = sorted(model.user_specialty, key=lambda x: x.created_at, reverse=True) expected = [ get_serializer(self, user_specialty, model.academy, model.specialty, model.user) - for user_specialty in user_specialties if user_specialty.status == 'PERSISTED' + for user_specialty in user_specialties + if user_specialty.status == "PERSISTED" ] self.assertEqual(json, expected) @@ -181,14 +187,14 @@ def test__get__with_two_user_specialty(self): 🔽🔽🔽 GET with two UserSpecialty from another user """ - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test__get__with_two_user_specialty__from_another_user(self): - permission = {'codename': 'get_my_certificate'} - user_specialties = [{'token': 'xyz1', 'user_id': 2}, {'token': 'xyz2', 'user_id': 2}] + permission = {"codename": "get_my_certificate"} + user_specialties = [{"token": "xyz1", "user_id": 2}, {"token": "xyz2", "user_id": 2}] model = self.bc.database.create(user=2, permission=permission, user_specialty=user_specialties) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('certificate:me') + url = reverse_lazy("certificate:me") response = self.client.get(url) json = response.json() @@ -201,21 +207,27 @@ def test__get__with_two_user_specialty__from_another_user(self): 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) def test__get__spy_the_extensions(self): - permission = {'codename': 'get_my_certificate'} + permission = {"codename": "get_my_certificate"} model = self.bc.database.create(user=1, permission=permission, user_specialty=1) self.client.force_authenticate(model.user) - url = reverse_lazy('certificate:me') + url = reverse_lazy("certificate:me") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) - - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) + + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) diff --git a/breathecode/certificate/tests/urls/tests_root.py b/breathecode/certificate/tests/urls/tests_root.py index fde9d35e9..7e78f9207 100644 --- a/breathecode/certificate/tests/urls/tests_root.py +++ b/breathecode/certificate/tests/urls/tests_root.py @@ -1,6 +1,7 @@ """ Test /certificate """ + from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -19,27 +20,28 @@ class CertificateTestSuite(CertificateTestCase): """Test /certificate""" + """ 🔽🔽🔽 Auth """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate_cohort_user__without_auth(self): """Test /root without auth""" self.headers(academy=1) - url = reverse_lazy('certificate:root') + url = reverse_lazy("certificate:root") response = self.client.post(url, {}) json = response.json() - self.assertEqual(json, { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED - }) + self.assertEqual( + json, + {"detail": "Authentication credentials were not provided.", "status_code": status.HTTP_401_UNAUTHORIZED}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) @@ -48,28 +50,30 @@ def test_certificate_cohort_user__without_auth(self): 🔽🔽🔽 Post method """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate_re_attempts_without_capability(self): """Test /root with auth""" """ No capability for the request""" self.headers(academy=1) model = self.generate_models(authenticate=True, cohort=True, user=True, profile_academy=True) - url = reverse_lazy('certificate:root') - data = [{ - 'cohort_slug': model['cohort'].slug, - 'user_id': model['user'].id, - }] - response = self.client.post(url, data, format='json') + url = reverse_lazy("certificate:root") + data = [ + { + "cohort_slug": model["cohort"].slug, + "user_id": model["user"].id, + } + ] + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_certificate for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: crud_certificate for academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) @@ -77,225 +81,239 @@ def test_certificate_re_attempts_without_capability(self): self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate_re_attempts_without_cohort_user(self): """Test /root with auth""" """ No cohort_user for the request""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - role='STUDENT', - capability='crud_certificate') - - url = reverse_lazy('certificate:root') - data = [{ - 'cohort_slug': model['cohort'].slug, - 'user_id': model['user'].id, - }] - response = self.client.post(url, data, format='json') + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + role="STUDENT", + capability="crud_certificate", + ) + + url = reverse_lazy("certificate:root") + data = [ + { + "cohort_slug": model["cohort"].slug, + "user_id": model["user"].id, + } + ] + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'student-not-found-in-cohort', 'status_code': 404} + expected = {"detail": "student-not-found-in-cohort", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) self.assertEqual(self.all_user_specialty_dict(), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate_re_attempts_without_user_specialty(self): """Test /root with auth""" """ No user_specialty for the request""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - role='STUDENT', - capability='crud_certificate', - cohort_user=True) - - url = reverse_lazy('certificate:root') - data = [{ - 'cohort_slug': model['cohort'].slug, - 'user_id': model['user'].id, - }] - response = self.client.post(url, data, format='json') + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + role="STUDENT", + capability="crud_certificate", + cohort_user=True, + ) + + url = reverse_lazy("certificate:root") + data = [ + { + "cohort_slug": model["cohort"].slug, + "user_id": model["user"].id, + } + ] + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'no-user-specialty', 'status_code': 404} + expected = {"detail": "no-user-specialty", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) self.assertEqual(self.all_user_specialty_dict(), []) self.assertEqual(signals.user_specialty_saved.send_robust.call_args_list, []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate_re_attempts(self): """Test /root with auth""" """ Good Request """ self.headers(academy=1) - syllabus_kwargs = {'duration_in_days': 543665478761} + syllabus_kwargs = {"duration_in_days": 543665478761} cohort_kwargs = { - 'current_day': 543665478761, - 'stage': 'ENDED', + "current_day": 543665478761, + "stage": "ENDED", } cohort_user_kwargs = { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'GRADUATED', + "finantial_status": "UP_TO_DATE", + "educational_status": "GRADUATED", } - model = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - capability='crud_certificate', - role='STUDENT', - cohort_user=True, - syllabus=True, - syllabus_version=True, - specialty=True, - layout_design=True, - user_specialty=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - syllabus_kwargs=syllabus_kwargs) + model = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + capability="crud_certificate", + role="STUDENT", + cohort_user=True, + syllabus=True, + syllabus_version=True, + specialty=True, + layout_design=True, + user_specialty=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + syllabus_kwargs=syllabus_kwargs, + ) base = model.copy() - del base['user'] - del base['cohort_user'] - - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) - - url = reverse_lazy('certificate:root') - data = [{ - 'cohort_slug': model['cohort'].slug, - 'user_id': model['user'].id, - }] - response = self.client.post(url, data, format='json') + del base["user"] + del base["cohort_user"] + + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) + + url = reverse_lazy("certificate:root") + data = [ + { + "cohort_slug": model["cohort"].slug, + "user_id": model["user"].id, + } + ] + response = self.client.post(url, data, format="json") json = response.json() - self.assertDatetime(json[0]['updated_at']) - del json[0]['updated_at'] - del json[0]['signed_by'] - - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': model['academy'].logo_url, - 'name': model['academy'].name, - 'slug': model['academy'].slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(model.cohort.kickoff_date), - 'ending_date': None, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - 'schedule': { - 'id': model['syllabus_schedule'].id, - 'name': model['syllabus_schedule'].name, - 'syllabus': model['syllabus_schedule'].syllabus.id, + self.assertDatetime(json[0]["updated_at"]) + del json[0]["updated_at"] + del json[0]["signed_by"] + + expected = [ + { + "academy": { + "id": 1, + "logo_url": model["academy"].logo_url, + "name": model["academy"].name, + "slug": model["academy"].slug, + "website_url": None, }, - 'syllabus_version': { - 'version': model['syllabus_version'].version, - 'name': model['syllabus_version'].syllabus.name, - 'slug': model['syllabus_version'].syllabus.slug, - 'syllabus': model['syllabus_version'].syllabus.id, - 'duration_in_days': model['syllabus_version'].syllabus.duration_in_days, - 'duration_in_hours': model['syllabus_version'].syllabus.duration_in_hours, - 'week_hours': model['syllabus_version'].syllabus.week_hours, + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(model.cohort.kickoff_date), + "ending_date": None, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + "schedule": { + "id": model["syllabus_schedule"].id, + "name": model["syllabus_schedule"].name, + "syllabus": model["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": { + "version": model["syllabus_version"].version, + "name": model["syllabus_version"].syllabus.name, + "slug": model["syllabus_version"].syllabus.slug, + "syllabus": model["syllabus_version"].syllabus.id, + "duration_in_days": model["syllabus_version"].syllabus.duration_in_days, + "duration_in_hours": model["syllabus_version"].syllabus.duration_in_hours, + "week_hours": model["syllabus_version"].syllabus.week_hours, + }, }, - }, - 'created_at': self.datetime_to_iso(model['user_specialty'].created_at), - 'expires_at': model['user_specialty'].expires_at, - 'issued_at': model.user_specialty.issued_at, - 'id': 1, - 'layout': { - 'name': model['layout_design'].name, - 'slug': model['layout_design'].slug, - 'background_url': model['layout_design'].background_url, - 'foot_note': model['layout_design'].foot_note - }, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(model['specialty'].created_at), - 'description': model.specialty.description, - 'id': 1, - 'logo_url': None, - 'name': model['specialty'].name, - 'slug': model['specialty'].slug, - 'updated_at': self.datetime_to_iso(model['specialty'].updated_at), - }, - 'status': 'PENDING', - 'status_text': None, - 'user': { - 'first_name': model['user'].first_name, - 'id': 1, - 'last_name': model['user'].last_name - }, - 'profile_academy': { - 'first_name': model['profile_academy'].first_name, - 'id': model['profile_academy'].id, - 'last_name': model['profile_academy'].last_name, - 'status': model['profile_academy'].status, - 'phone': model['profile_academy'].phone, - 'created_at': self.datetime_to_iso(model['profile_academy'].created_at), - 'email': model['profile_academy'].email, - 'academy': { - 'id': 1, - 'name': model['academy'].name, - 'slug': model['academy'].slug, + "created_at": self.datetime_to_iso(model["user_specialty"].created_at), + "expires_at": model["user_specialty"].expires_at, + "issued_at": model.user_specialty.issued_at, + "id": 1, + "layout": { + "name": model["layout_design"].name, + "slug": model["layout_design"].slug, + "background_url": model["layout_design"].background_url, + "foot_note": model["layout_design"].foot_note, + }, + "preview_url": model["user_specialty"].preview_url, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(model["specialty"].created_at), + "description": model.specialty.description, + "id": 1, + "logo_url": None, + "name": model["specialty"].name, + "slug": model["specialty"].slug, + "updated_at": self.datetime_to_iso(model["specialty"].updated_at), + }, + "status": "PENDING", + "status_text": None, + "user": {"first_name": model["user"].first_name, "id": 1, "last_name": model["user"].last_name}, + "profile_academy": { + "first_name": model["profile_academy"].first_name, + "id": model["profile_academy"].id, + "last_name": model["profile_academy"].last_name, + "status": model["profile_academy"].status, + "phone": model["profile_academy"].phone, + "created_at": self.datetime_to_iso(model["profile_academy"].created_at), + "email": model["profile_academy"].email, + "academy": { + "id": 1, + "name": model["academy"].name, + "slug": model["academy"].slug, + }, }, } - }] + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) certificates = self.all_user_specialty_dict() - self.assertDatetime(certificates[0]['issued_at']) - - del certificates[0]['issued_at'] - - user_specialty = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - self.assertEqual(certificates, [{ - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': model['user_specialty'].preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty_id': 1, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user_id': 1, - 'token': '9e76a2ab3bd55454c384e0a5cdb5298d17285949', - 'update_hash': user_specialty.update_hash, - }]) + self.assertDatetime(certificates[0]["issued_at"]) + + del certificates[0]["issued_at"] + + user_specialty = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + self.assertEqual( + certificates, + [ + { + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": model["user_specialty"].preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty_id": 1, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user_id": 1, + "token": "9e76a2ab3bd55454c384e0a5cdb5298d17285949", + "update_hash": user_specialty.update_hash, + } + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, @@ -306,254 +324,258 @@ def test_certificate_re_attempts(self): call(instance=model.user_specialty, sender=model.user_specialty.__class__), # Action call(instance=model.user_specialty, sender=model.user_specialty.__class__), - ]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('breathecode.certificate.signals.user_specialty_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("breathecode.certificate.signals.user_specialty_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate_re_attempts_two_certificates(self): """Test /root with auth""" """ Good Request """ self.headers(academy=1) - syllabus_kwargs = {'duration_in_days': 543665478761} + syllabus_kwargs = {"duration_in_days": 543665478761} cohort_kwargs = { - 'current_day': 543665478761, - 'stage': 'ENDED', + "current_day": 543665478761, + "stage": "ENDED", } cohort_user_kwargs = { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'GRADUATED', + "finantial_status": "UP_TO_DATE", + "educational_status": "GRADUATED", } - base = self.generate_models(authenticate=True, - cohort=True, - capability='crud_certificate', - role='STUDENT', - profile_academy=True, - syllabus=True, - syllabus_version=True, - specialty=True, - syllabus_schedule=True, - layout_design=True, - syllabus_kwargs=syllabus_kwargs, - cohort_kwargs=cohort_kwargs) - - del base['user'] - - user_specialty_1_kwargs = {'token': 'qwerrty'} - user_specialty_2_kwargs = {'token': 'huhuhuhuhu'} + base = self.generate_models( + authenticate=True, + cohort=True, + capability="crud_certificate", + role="STUDENT", + profile_academy=True, + syllabus=True, + syllabus_version=True, + specialty=True, + syllabus_schedule=True, + layout_design=True, + syllabus_kwargs=syllabus_kwargs, + cohort_kwargs=cohort_kwargs, + ) + + del base["user"] + + user_specialty_1_kwargs = {"token": "qwerrty"} + user_specialty_2_kwargs = {"token": "huhuhuhuhu"} models = [ - self.generate_models(user=True, - cohort_user=True, - profile_academy=True, - user_specialty=True, - user_specialty_kwargs=user_specialty_2_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - models=base), - self.generate_models(user=True, - cohort_user=True, - profile_academy=True, - user_specialty=True, - user_specialty_kwargs=user_specialty_1_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - models=base), + self.generate_models( + user=True, + cohort_user=True, + profile_academy=True, + user_specialty=True, + user_specialty_kwargs=user_specialty_2_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + models=base, + ), + self.generate_models( + user=True, + cohort_user=True, + profile_academy=True, + user_specialty=True, + user_specialty_kwargs=user_specialty_1_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + models=base, + ), ] - cohort_user_kwargs = {'role': 'TEACHER'} - teacher_model = self.generate_models(user=True, - cohort_user=True, - cohort_user_kwargs=cohort_user_kwargs, - models=base) + cohort_user_kwargs = {"role": "TEACHER"} + teacher_model = self.generate_models( + user=True, cohort_user=True, cohort_user_kwargs=cohort_user_kwargs, models=base + ) - url = reverse_lazy('certificate:root') + url = reverse_lazy("certificate:root") data = [ { - 'cohort_slug': models[0].cohort.slug, - 'user_id': models[0].user.id, + "cohort_slug": models[0].cohort.slug, + "user_id": models[0].user.id, }, { - 'cohort_slug': models[1].cohort.slug, - 'user_id': models[1].user.id, + "cohort_slug": models[1].cohort.slug, + "user_id": models[1].user.id, }, ] - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - self.assertDatetime(json[0]['updated_at']) - del json[0]['updated_at'] - del json[0]['signed_by'] - - self.assertDatetime(json[1]['updated_at']) - del json[1]['updated_at'] - del json[1]['signed_by'] - - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': models[0].academy.logo_url, - 'name': models[0].academy.name, - 'slug': models[0].academy.slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(models[1].cohort.kickoff_date), - 'ending_date': None, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - 'schedule': { - 'id': models[0]['syllabus_schedule'].id, - 'name': models[0]['syllabus_schedule'].name, - 'syllabus': models[0]['syllabus_schedule'].syllabus.id, + self.assertDatetime(json[0]["updated_at"]) + del json[0]["updated_at"] + del json[0]["signed_by"] + + self.assertDatetime(json[1]["updated_at"]) + del json[1]["updated_at"] + del json[1]["signed_by"] + + expected = [ + { + "academy": { + "id": 1, + "logo_url": models[0].academy.logo_url, + "name": models[0].academy.name, + "slug": models[0].academy.slug, + "website_url": None, }, - 'syllabus_version': { - 'version': models[0]['syllabus_version'].version, - 'name': models[0]['syllabus_version'].syllabus.name, - 'slug': models[0]['syllabus_version'].syllabus.slug, - 'syllabus': models[0]['syllabus_version'].syllabus.id, - 'duration_in_days': models[0]['syllabus_version'].syllabus.duration_in_days, - 'duration_in_hours': models[0]['syllabus_version'].syllabus.duration_in_hours, - 'week_hours': models[0]['syllabus_version'].syllabus.week_hours, + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(models[1].cohort.kickoff_date), + "ending_date": None, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + "schedule": { + "id": models[0]["syllabus_schedule"].id, + "name": models[0]["syllabus_schedule"].name, + "syllabus": models[0]["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": { + "version": models[0]["syllabus_version"].version, + "name": models[0]["syllabus_version"].syllabus.name, + "slug": models[0]["syllabus_version"].syllabus.slug, + "syllabus": models[0]["syllabus_version"].syllabus.id, + "duration_in_days": models[0]["syllabus_version"].syllabus.duration_in_days, + "duration_in_hours": models[0]["syllabus_version"].syllabus.duration_in_hours, + "week_hours": models[0]["syllabus_version"].syllabus.week_hours, + }, }, - }, - 'created_at': self.datetime_to_iso(models[0].user_specialty.created_at), - 'expires_at': models[0].user_specialty.expires_at, - 'issued_at': models[0].user_specialty.issued_at, - 'id': 1, - 'layout': { - 'name': models[0].layout_design.name, - 'background_url': models[0].layout_design.background_url, - 'slug': models[0].layout_design.slug, - 'foot_note': models[0].layout_design.foot_note - }, - 'preview_url': models[0].user_specialty.preview_url, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(models[0].specialty.created_at), - 'description': models[0].specialty.description, - 'id': 1, - 'logo_url': None, - 'name': models[0].specialty.name, - 'slug': models[0].specialty.slug, - 'updated_at': self.datetime_to_iso(models[0].specialty.updated_at), - }, - 'status': 'PENDING', - 'status_text': None, - 'user': { - 'first_name': models[0].user.first_name, - 'id': 2, - 'last_name': models[0].user.last_name - }, - 'profile_academy': None - }, { - 'academy': { - 'id': 1, - 'logo_url': models[1].academy.logo_url, - 'name': models[1].academy.name, - 'slug': models[1].academy.slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(models[1].cohort.kickoff_date), - 'ending_date': None, - 'name': models[1].cohort.name, - 'slug': models[1].cohort.slug, - 'schedule': { - 'id': models[0]['syllabus_schedule'].id, - 'name': models[0]['syllabus_schedule'].name, - 'syllabus': models[0]['syllabus_schedule'].syllabus.id, + "created_at": self.datetime_to_iso(models[0].user_specialty.created_at), + "expires_at": models[0].user_specialty.expires_at, + "issued_at": models[0].user_specialty.issued_at, + "id": 1, + "layout": { + "name": models[0].layout_design.name, + "background_url": models[0].layout_design.background_url, + "slug": models[0].layout_design.slug, + "foot_note": models[0].layout_design.foot_note, }, - 'syllabus_version': { - 'version': models[0]['syllabus_version'].version, - 'name': models[0]['syllabus_version'].syllabus.name, - 'slug': models[0]['syllabus_version'].syllabus.slug, - 'syllabus': models[0]['syllabus_version'].syllabus.id, - 'duration_in_days': models[0]['syllabus_version'].syllabus.duration_in_days, - 'duration_in_hours': models[0]['syllabus_version'].syllabus.duration_in_hours, - 'week_hours': models[0]['syllabus_version'].syllabus.week_hours, + "preview_url": models[0].user_specialty.preview_url, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(models[0].specialty.created_at), + "description": models[0].specialty.description, + "id": 1, + "logo_url": None, + "name": models[0].specialty.name, + "slug": models[0].specialty.slug, + "updated_at": self.datetime_to_iso(models[0].specialty.updated_at), }, + "status": "PENDING", + "status_text": None, + "user": {"first_name": models[0].user.first_name, "id": 2, "last_name": models[0].user.last_name}, + "profile_academy": None, }, - 'created_at': self.datetime_to_iso(models[1].user_specialty.created_at), - 'expires_at': models[1].user_specialty.expires_at, - 'issued_at': models[1].user_specialty.issued_at, - 'id': 2, - 'layout': { - 'name': models[1].layout_design.name, - 'slug': models[1].layout_design.slug, - 'background_url': models[1].layout_design.background_url, - 'foot_note': models[1].layout_design.foot_note - }, - 'preview_url': models[1].user_specialty.preview_url, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(models[1].specialty.created_at), - 'description': models[1].specialty.description, - 'id': 1, - 'logo_url': None, - 'name': models[1].specialty.name, - 'slug': models[1].specialty.slug, - 'updated_at': self.datetime_to_iso(models[1].specialty.updated_at), - }, - 'status': 'PENDING', - 'status_text': None, - 'user': { - 'first_name': models[1].user.first_name, - 'id': 3, - 'last_name': models[1].user.last_name + { + "academy": { + "id": 1, + "logo_url": models[1].academy.logo_url, + "name": models[1].academy.name, + "slug": models[1].academy.slug, + "website_url": None, + }, + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(models[1].cohort.kickoff_date), + "ending_date": None, + "name": models[1].cohort.name, + "slug": models[1].cohort.slug, + "schedule": { + "id": models[0]["syllabus_schedule"].id, + "name": models[0]["syllabus_schedule"].name, + "syllabus": models[0]["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": { + "version": models[0]["syllabus_version"].version, + "name": models[0]["syllabus_version"].syllabus.name, + "slug": models[0]["syllabus_version"].syllabus.slug, + "syllabus": models[0]["syllabus_version"].syllabus.id, + "duration_in_days": models[0]["syllabus_version"].syllabus.duration_in_days, + "duration_in_hours": models[0]["syllabus_version"].syllabus.duration_in_hours, + "week_hours": models[0]["syllabus_version"].syllabus.week_hours, + }, + }, + "created_at": self.datetime_to_iso(models[1].user_specialty.created_at), + "expires_at": models[1].user_specialty.expires_at, + "issued_at": models[1].user_specialty.issued_at, + "id": 2, + "layout": { + "name": models[1].layout_design.name, + "slug": models[1].layout_design.slug, + "background_url": models[1].layout_design.background_url, + "foot_note": models[1].layout_design.foot_note, + }, + "preview_url": models[1].user_specialty.preview_url, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(models[1].specialty.created_at), + "description": models[1].specialty.description, + "id": 1, + "logo_url": None, + "name": models[1].specialty.name, + "slug": models[1].specialty.slug, + "updated_at": self.datetime_to_iso(models[1].specialty.updated_at), + }, + "status": "PENDING", + "status_text": None, + "user": {"first_name": models[1].user.first_name, "id": 3, "last_name": models[1].user.last_name}, + "profile_academy": None, }, - 'profile_academy': None - }] + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) certificates = self.all_user_specialty_dict() - self.assertDatetime(certificates[0]['issued_at']) - self.assertDatetime(certificates[1]['issued_at']) - del certificates[0]['issued_at'] - del certificates[1]['issued_at'] - - user_specialty1 = self.bc.database.get('certificate.UserSpecialty', 1, dict=False) - user_specialty2 = self.bc.database.get('certificate.UserSpecialty', 2, dict=False) - self.assertEqual(certificates, [ - { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 1, - 'layout_id': 1, - 'preview_url': models[0].user_specialty.preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty_id': 1, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user_id': 2, - 'token': 'huhuhuhuhu', - 'update_hash': user_specialty1.update_hash, - }, - { - 'academy_id': 1, - 'cohort_id': 1, - 'expires_at': None, - 'id': 2, - 'layout_id': 1, - 'preview_url': models[1].user_specialty.preview_url, - 'signed_by': teacher_model['user'].first_name + ' ' + teacher_model['user'].last_name, - 'signed_by_role': 'Director', - 'specialty_id': 1, - 'status': 'PERSISTED', - 'status_text': 'Certificate successfully queued for PDF generation', - 'user_id': 3, - 'token': 'qwerrty', - 'update_hash': user_specialty2.update_hash, - }, - ]) + self.assertDatetime(certificates[0]["issued_at"]) + self.assertDatetime(certificates[1]["issued_at"]) + del certificates[0]["issued_at"] + del certificates[1]["issued_at"] + + user_specialty1 = self.bc.database.get("certificate.UserSpecialty", 1, dict=False) + user_specialty2 = self.bc.database.get("certificate.UserSpecialty", 2, dict=False) + self.assertEqual( + certificates, + [ + { + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 1, + "layout_id": 1, + "preview_url": models[0].user_specialty.preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty_id": 1, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user_id": 2, + "token": "huhuhuhuhu", + "update_hash": user_specialty1.update_hash, + }, + { + "academy_id": 1, + "cohort_id": 1, + "expires_at": None, + "id": 2, + "layout_id": 1, + "preview_url": models[1].user_specialty.preview_url, + "signed_by": teacher_model["user"].first_name + " " + teacher_model["user"].last_name, + "signed_by_role": "Director", + "specialty_id": 1, + "status": "PERSISTED", + "status_text": "Certificate successfully queued for PDF generation", + "user_id": 3, + "token": "qwerrty", + "update_hash": user_specialty2.update_hash, + }, + ], + ) self.assertEqual( signals.user_specialty_saved.send_robust.call_args_list, @@ -569,117 +591,122 @@ def test_certificate_re_attempts_two_certificates(self): call(instance=models[1].user_specialty, sender=models[1].user_specialty.__class__), # Action call(instance=models[1].user_specialty, sender=models[1].user_specialty.__class__), - ]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate__with_full_name_in_querystring(self): - """Test /root """ + """Test /root""" self.headers(academy=1) - schedule_kwargs = {'duration_in_days': 543665478761} + schedule_kwargs = {"duration_in_days": 543665478761} cohort_kwargs = { - 'current_day': 543665478761, - 'stage': 'ENDED', + "current_day": 543665478761, + "stage": "ENDED", } - base = self.generate_models(authenticate=True, - cohort=True, - capability='read_certificate', - role='potato', - academy=True, - profile_academy=True, - specialty=True, - syllabus_schedule=True, - syllabus_schedule_kwargs=schedule_kwargs, - syllabus=True, - cohort_kwargs=cohort_kwargs) - - del base['user'] + base = self.generate_models( + authenticate=True, + cohort=True, + capability="read_certificate", + role="potato", + academy=True, + profile_academy=True, + specialty=True, + syllabus_schedule=True, + syllabus_schedule_kwargs=schedule_kwargs, + syllabus=True, + cohort_kwargs=cohort_kwargs, + ) + + del base["user"] user_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } user_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", } - user_specialty_kwargs_1 = {'token': '123dfefef1123rerf346g'} - user_specialty_kwargs_2 = {'token': 'jojfsdknjbs1123rerf346g'} + user_specialty_kwargs_1 = {"token": "123dfefef1123rerf346g"} + user_specialty_kwargs_2 = {"token": "jojfsdknjbs1123rerf346g"} models = [ - self.generate_models(user=True, - user_specialty=True, - cohort_user=True, - user_kwargs=user_kwargs, - user_specialty_kwargs=user_specialty_kwargs_1, - models=base), - self.generate_models(user=True, - user_specialty=True, - cohort_user=True, - user_kwargs=user_kwargs_2, - user_specialty_kwargs=user_specialty_kwargs_2, - models=base) + self.generate_models( + user=True, + user_specialty=True, + cohort_user=True, + user_kwargs=user_kwargs, + user_specialty_kwargs=user_specialty_kwargs_1, + models=base, + ), + self.generate_models( + user=True, + user_specialty=True, + cohort_user=True, + user_kwargs=user_kwargs_2, + user_specialty_kwargs=user_specialty_kwargs_2, + models=base, + ), ] - base_url = reverse_lazy('certificate:root') - url = f'{base_url}?like=Rene Descartes' + base_url = reverse_lazy("certificate:root") + url = f"{base_url}?like=Rene Descartes" response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': models[0].academy.logo_url, - 'name': models[0].academy.name, - 'slug': models[0].academy.slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(models[0].cohort.kickoff_date), - 'ending_date': None, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - 'schedule': { - 'id': models[0]['syllabus_schedule'].id, - 'name': models[0]['syllabus_schedule'].name, - 'syllabus': models[0]['syllabus_schedule'].syllabus.id, + expected = [ + { + "academy": { + "id": 1, + "logo_url": models[0].academy.logo_url, + "name": models[0].academy.name, + "slug": models[0].academy.slug, + "website_url": None, }, - 'syllabus_version': None, - }, - 'created_at': self.datetime_to_iso(models[0].user_specialty.created_at), - 'expires_at': models[0].user_specialty.expires_at, - 'issued_at': models[0].user_specialty.issued_at, - 'id': 1, - 'layout': None, - 'preview_url': models[0].user_specialty.preview_url, - 'signed_by': models[0].user_specialty.signed_by, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(models[0].specialty.created_at), - 'description': models[0].specialty.description, - 'id': 1, - 'logo_url': None, - 'name': models[0].specialty.name, - 'slug': models[0].specialty.slug, - 'updated_at': self.datetime_to_iso(models[0].specialty.updated_at), - }, - 'status': 'PENDING', - 'status_text': None, - 'updated_at': self.datetime_to_iso(models[0].user_specialty.updated_at), - 'user': { - 'first_name': models[0].user.first_name, - 'id': 2, - 'last_name': models[0].user.last_name - }, - 'profile_academy': None - }] + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(models[0].cohort.kickoff_date), + "ending_date": None, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + "schedule": { + "id": models[0]["syllabus_schedule"].id, + "name": models[0]["syllabus_schedule"].name, + "syllabus": models[0]["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": None, + }, + "created_at": self.datetime_to_iso(models[0].user_specialty.created_at), + "expires_at": models[0].user_specialty.expires_at, + "issued_at": models[0].user_specialty.issued_at, + "id": 1, + "layout": None, + "preview_url": models[0].user_specialty.preview_url, + "signed_by": models[0].user_specialty.signed_by, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(models[0].specialty.created_at), + "description": models[0].specialty.description, + "id": 1, + "logo_url": None, + "name": models[0].specialty.name, + "slug": models[0].specialty.slug, + "updated_at": self.datetime_to_iso(models[0].specialty.updated_at), + }, + "status": "PENDING", + "status_text": None, + "updated_at": self.datetime_to_iso(models[0].user_specialty.updated_at), + "user": {"first_name": models[0].user.first_name, "id": 2, "last_name": models[0].user.last_name}, + "profile_academy": None, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -687,337 +714,353 @@ def test_certificate__with_full_name_in_querystring(self): 🔽🔽🔽 With full like querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate__with_first_name_in_querystring(self): - """Test /root """ + """Test /root""" self.headers(academy=1) - schedule_kwargs = {'duration_in_days': 543665478761} + schedule_kwargs = {"duration_in_days": 543665478761} cohort_kwargs = { - 'current_day': 543665478761, - 'stage': 'ENDED', + "current_day": 543665478761, + "stage": "ENDED", } - base = self.generate_models(authenticate=True, - cohort=True, - capability='read_certificate', - role='potato', - academy=True, - profile_academy=True, - specialty=True, - syllabus_schedule=True, - syllabus_schedule_kwargs=schedule_kwargs, - syllabus=True, - cohort_kwargs=cohort_kwargs) - - del base['user'] + base = self.generate_models( + authenticate=True, + cohort=True, + capability="read_certificate", + role="potato", + academy=True, + profile_academy=True, + specialty=True, + syllabus_schedule=True, + syllabus_schedule_kwargs=schedule_kwargs, + syllabus=True, + cohort_kwargs=cohort_kwargs, + ) + + del base["user"] user_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } user_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", } - user_specialty_kwargs_1 = {'token': '123dfefef1123rerf346g'} - user_specialty_kwargs_2 = {'token': 'jojfsdknjbs1123rerf346g'} + user_specialty_kwargs_1 = {"token": "123dfefef1123rerf346g"} + user_specialty_kwargs_2 = {"token": "jojfsdknjbs1123rerf346g"} models = [ - self.generate_models(user=True, - user_specialty=True, - cohort_user=True, - user_kwargs=user_kwargs, - user_specialty_kwargs=user_specialty_kwargs_1, - models=base), - self.generate_models(user=True, - user_specialty=True, - cohort_user=True, - user_kwargs=user_kwargs_2, - user_specialty_kwargs=user_specialty_kwargs_2, - models=base) + self.generate_models( + user=True, + user_specialty=True, + cohort_user=True, + user_kwargs=user_kwargs, + user_specialty_kwargs=user_specialty_kwargs_1, + models=base, + ), + self.generate_models( + user=True, + user_specialty=True, + cohort_user=True, + user_kwargs=user_kwargs_2, + user_specialty_kwargs=user_specialty_kwargs_2, + models=base, + ), ] - base_url = reverse_lazy('certificate:root') - url = f'{base_url}?like=Rene' + base_url = reverse_lazy("certificate:root") + url = f"{base_url}?like=Rene" response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': models[0].academy.logo_url, - 'name': models[0].academy.name, - 'slug': models[0].academy.slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(models[0].cohort.kickoff_date), - 'ending_date': None, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - 'schedule': { - 'id': models[0]['syllabus_schedule'].id, - 'name': models[0]['syllabus_schedule'].name, - 'syllabus': models[0]['syllabus_schedule'].syllabus.id, + expected = [ + { + "academy": { + "id": 1, + "logo_url": models[0].academy.logo_url, + "name": models[0].academy.name, + "slug": models[0].academy.slug, + "website_url": None, }, - 'syllabus_version': None, - }, - 'created_at': self.datetime_to_iso(models[0].user_specialty.created_at), - 'expires_at': models[0].user_specialty.expires_at, - 'issued_at': models[0].user_specialty.issued_at, - 'id': 1, - 'layout': None, - 'preview_url': models[0].user_specialty.preview_url, - 'signed_by': models[0].user_specialty.signed_by, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(models[0].specialty.created_at), - 'description': models[0].specialty.description, - 'id': 1, - 'logo_url': None, - 'name': models[0].specialty.name, - 'slug': models[0].specialty.slug, - 'updated_at': self.datetime_to_iso(models[0].specialty.updated_at), - }, - 'status': 'PENDING', - 'status_text': None, - 'updated_at': self.datetime_to_iso(models[0].user_specialty.updated_at), - 'user': { - 'first_name': models[0].user.first_name, - 'id': 2, - 'last_name': models[0].user.last_name - }, - 'profile_academy': None - }] + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(models[0].cohort.kickoff_date), + "ending_date": None, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + "schedule": { + "id": models[0]["syllabus_schedule"].id, + "name": models[0]["syllabus_schedule"].name, + "syllabus": models[0]["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": None, + }, + "created_at": self.datetime_to_iso(models[0].user_specialty.created_at), + "expires_at": models[0].user_specialty.expires_at, + "issued_at": models[0].user_specialty.issued_at, + "id": 1, + "layout": None, + "preview_url": models[0].user_specialty.preview_url, + "signed_by": models[0].user_specialty.signed_by, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(models[0].specialty.created_at), + "description": models[0].specialty.description, + "id": 1, + "logo_url": None, + "name": models[0].specialty.name, + "slug": models[0].specialty.slug, + "updated_at": self.datetime_to_iso(models[0].specialty.updated_at), + }, + "status": "PENDING", + "status_text": None, + "updated_at": self.datetime_to_iso(models[0].user_specialty.updated_at), + "user": {"first_name": models[0].user.first_name, "id": 2, "last_name": models[0].user.last_name}, + "profile_academy": None, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate__with_last_name_in_querystring(self): - """Test /root """ + """Test /root""" self.headers(academy=1) - schedule_kwargs = {'duration_in_days': 543665478761} + schedule_kwargs = {"duration_in_days": 543665478761} cohort_kwargs = { - 'current_day': 543665478761, - 'stage': 'ENDED', + "current_day": 543665478761, + "stage": "ENDED", } - base = self.generate_models(authenticate=True, - cohort=True, - capability='read_certificate', - role='potato', - academy=True, - profile_academy=True, - specialty=True, - syllabus_schedule=True, - syllabus_schedule_kwargs=schedule_kwargs, - syllabus=True, - cohort_kwargs=cohort_kwargs) - - del base['user'] + base = self.generate_models( + authenticate=True, + cohort=True, + capability="read_certificate", + role="potato", + academy=True, + profile_academy=True, + specialty=True, + syllabus_schedule=True, + syllabus_schedule_kwargs=schedule_kwargs, + syllabus=True, + cohort_kwargs=cohort_kwargs, + ) + + del base["user"] user_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } user_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", } - user_specialty_kwargs_1 = {'token': '123dfefef1123rerf346g'} - user_specialty_kwargs_2 = {'token': 'jojfsdknjbs1123rerf346g'} + user_specialty_kwargs_1 = {"token": "123dfefef1123rerf346g"} + user_specialty_kwargs_2 = {"token": "jojfsdknjbs1123rerf346g"} models = [ - self.generate_models(user=True, - user_specialty=True, - cohort_user=True, - user_kwargs=user_kwargs, - user_specialty_kwargs=user_specialty_kwargs_1, - models=base), - self.generate_models(user=True, - user_specialty=True, - cohort_user=True, - user_kwargs=user_kwargs_2, - user_specialty_kwargs=user_specialty_kwargs_2, - models=base) + self.generate_models( + user=True, + user_specialty=True, + cohort_user=True, + user_kwargs=user_kwargs, + user_specialty_kwargs=user_specialty_kwargs_1, + models=base, + ), + self.generate_models( + user=True, + user_specialty=True, + cohort_user=True, + user_kwargs=user_kwargs_2, + user_specialty_kwargs=user_specialty_kwargs_2, + models=base, + ), ] - base_url = reverse_lazy('certificate:root') - url = f'{base_url}?like=Descartes' + base_url = reverse_lazy("certificate:root") + url = f"{base_url}?like=Descartes" response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': models[0].academy.logo_url, - 'name': models[0].academy.name, - 'slug': models[0].academy.slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(models[0].cohort.kickoff_date), - 'ending_date': None, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - 'schedule': { - 'id': models[0]['syllabus_schedule'].id, - 'name': models[0]['syllabus_schedule'].name, - 'syllabus': models[0]['syllabus_schedule'].syllabus.id, + expected = [ + { + "academy": { + "id": 1, + "logo_url": models[0].academy.logo_url, + "name": models[0].academy.name, + "slug": models[0].academy.slug, + "website_url": None, }, - 'syllabus_version': None, - }, - 'created_at': self.datetime_to_iso(models[0].user_specialty.created_at), - 'expires_at': models[0].user_specialty.expires_at, - 'id': 1, - 'layout': None, - 'preview_url': models[0].user_specialty.preview_url, - 'signed_by': models[0].user_specialty.signed_by, - 'issued_at': models[0].user_specialty.issued_at, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(models[0].specialty.created_at), - 'description': models[0].specialty.description, - 'id': 1, - 'logo_url': None, - 'name': models[0].specialty.name, - 'slug': models[0].specialty.slug, - 'updated_at': self.datetime_to_iso(models[0].specialty.updated_at), - }, - 'status': 'PENDING', - 'status_text': None, - 'updated_at': self.datetime_to_iso(models[0].user_specialty.updated_at), - 'user': { - 'first_name': models[0].user.first_name, - 'id': 2, - 'last_name': models[0].user.last_name - }, - 'profile_academy': None - }] + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(models[0].cohort.kickoff_date), + "ending_date": None, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + "schedule": { + "id": models[0]["syllabus_schedule"].id, + "name": models[0]["syllabus_schedule"].name, + "syllabus": models[0]["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": None, + }, + "created_at": self.datetime_to_iso(models[0].user_specialty.created_at), + "expires_at": models[0].user_specialty.expires_at, + "id": 1, + "layout": None, + "preview_url": models[0].user_specialty.preview_url, + "signed_by": models[0].user_specialty.signed_by, + "issued_at": models[0].user_specialty.issued_at, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(models[0].specialty.created_at), + "description": models[0].specialty.description, + "id": 1, + "logo_url": None, + "name": models[0].specialty.name, + "slug": models[0].specialty.slug, + "updated_at": self.datetime_to_iso(models[0].specialty.updated_at), + }, + "status": "PENDING", + "status_text": None, + "updated_at": self.datetime_to_iso(models[0].user_specialty.updated_at), + "user": {"first_name": models[0].user.first_name, "id": 2, "last_name": models[0].user.last_name}, + "profile_academy": None, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_certificate__with_email_in_querystring(self): - """Test /root """ + """Test /root""" self.headers(academy=1) - schedule_kwargs = {'duration_in_days': 543665478761} + schedule_kwargs = {"duration_in_days": 543665478761} cohort_kwargs = { - 'current_day': 543665478761, - 'stage': 'ENDED', + "current_day": 543665478761, + "stage": "ENDED", } - base = self.generate_models(authenticate=True, - cohort=True, - cohort_finished=True, - capability='read_certificate', - role='potato', - academy=True, - profile_academy=True, - specialty=True, - syllabus_schedule=True, - syllabus_schedule_kwargs=schedule_kwargs, - syllabus=True, - cohort_kwargs=cohort_kwargs) - - del base['user'] + base = self.generate_models( + authenticate=True, + cohort=True, + cohort_finished=True, + capability="read_certificate", + role="potato", + academy=True, + profile_academy=True, + specialty=True, + syllabus_schedule=True, + syllabus_schedule_kwargs=schedule_kwargs, + syllabus=True, + cohort_kwargs=cohort_kwargs, + ) + + del base["user"] user_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } user_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Michael', - 'last_name': 'Jordan', + "email": "a@a.com", + "first_name": "Michael", + "last_name": "Jordan", } - user_specialty_kwargs_1 = {'token': '123dfefef1123rerf346g'} - user_specialty_kwargs_2 = {'token': 'jojfsdknjbs1123rerf346g'} + user_specialty_kwargs_1 = {"token": "123dfefef1123rerf346g"} + user_specialty_kwargs_2 = {"token": "jojfsdknjbs1123rerf346g"} models = [ - self.generate_models(user=True, - user_specialty=True, - cohort_user=True, - user_kwargs=user_kwargs, - user_specialty_kwargs=user_specialty_kwargs_1, - models=base), - self.generate_models(user=True, - user_specialty=True, - cohort_user=True, - user_kwargs=user_kwargs_2, - user_specialty_kwargs=user_specialty_kwargs_2, - models=base) + self.generate_models( + user=True, + user_specialty=True, + cohort_user=True, + user_kwargs=user_kwargs, + user_specialty_kwargs=user_specialty_kwargs_1, + models=base, + ), + self.generate_models( + user=True, + user_specialty=True, + cohort_user=True, + user_kwargs=user_kwargs_2, + user_specialty_kwargs=user_specialty_kwargs_2, + models=base, + ), ] - base_url = reverse_lazy('certificate:root') - url = f'{base_url}?like=b@b.com' + base_url = reverse_lazy("certificate:root") + url = f"{base_url}?like=b@b.com" response = self.client.get(url) json = response.json() - expected = [{ - 'academy': { - 'id': 1, - 'logo_url': models[0].academy.logo_url, - 'name': models[0].academy.name, - 'slug': models[0].academy.slug, - 'website_url': None - }, - 'cohort': { - 'id': 1, - 'kickoff_date': self.datetime_to_iso(models[0].cohort.kickoff_date), - 'ending_date': None, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - 'schedule': { - 'id': models[0]['syllabus_schedule'].id, - 'name': models[0]['syllabus_schedule'].name, - 'syllabus': models[0]['syllabus_schedule'].syllabus.id, + expected = [ + { + "academy": { + "id": 1, + "logo_url": models[0].academy.logo_url, + "name": models[0].academy.name, + "slug": models[0].academy.slug, + "website_url": None, }, - 'syllabus_version': None, - }, - 'created_at': self.datetime_to_iso(models[0].user_specialty.created_at), - 'expires_at': models[0].user_specialty.expires_at, - 'issued_at': models[0].user_specialty.issued_at, - 'id': 1, - 'layout': None, - 'preview_url': models[0].user_specialty.preview_url, - 'signed_by': models[0].user_specialty.signed_by, - 'signed_by_role': 'Director', - 'specialty': { - 'created_at': self.datetime_to_iso(models[0].specialty.created_at), - 'description': models[0].specialty.description, - 'id': 1, - 'logo_url': None, - 'name': models[0].specialty.name, - 'slug': models[0].specialty.slug, - 'updated_at': self.datetime_to_iso(models[0].specialty.updated_at), - }, - 'status': 'PENDING', - 'status_text': None, - 'updated_at': self.datetime_to_iso(models[0].user_specialty.updated_at), - 'user': { - 'first_name': models[0].user.first_name, - 'id': 2, - 'last_name': models[0].user.last_name, - }, - 'profile_academy': None - }] + "cohort": { + "id": 1, + "kickoff_date": self.datetime_to_iso(models[0].cohort.kickoff_date), + "ending_date": None, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + "schedule": { + "id": models[0]["syllabus_schedule"].id, + "name": models[0]["syllabus_schedule"].name, + "syllabus": models[0]["syllabus_schedule"].syllabus.id, + }, + "syllabus_version": None, + }, + "created_at": self.datetime_to_iso(models[0].user_specialty.created_at), + "expires_at": models[0].user_specialty.expires_at, + "issued_at": models[0].user_specialty.issued_at, + "id": 1, + "layout": None, + "preview_url": models[0].user_specialty.preview_url, + "signed_by": models[0].user_specialty.signed_by, + "signed_by_role": "Director", + "specialty": { + "created_at": self.datetime_to_iso(models[0].specialty.created_at), + "description": models[0].specialty.description, + "id": 1, + "logo_url": None, + "name": models[0].specialty.name, + "slug": models[0].specialty.slug, + "updated_at": self.datetime_to_iso(models[0].specialty.updated_at), + }, + "status": "PENDING", + "status_text": None, + "updated_at": self.datetime_to_iso(models[0].user_specialty.updated_at), + "user": { + "first_name": models[0].user.first_name, + "id": 2, + "last_name": models[0].user.last_name, + }, + "profile_academy": None, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -1026,114 +1069,120 @@ def test_certificate__with_email_in_querystring(self): 🔽🔽🔽 Delete """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_certificate_in_bulk_with_two_ids(self): """Test / with two certificates""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - cohort_user=True, - specialty=True, - role='potato') - del base['user'] - - model1 = self.generate_models(user=True, - profile_academy=True, - user_specialty=True, - user_specialty_kwargs={'token': 'hitman3000'}, - models=base) - - model2 = self.generate_models(user=True, - profile_academy=True, - user_specialty=True, - user_specialty_kwargs={'token': 'batman2000'}, - models=base) - - url = reverse_lazy('certificate:root') + '?id=1,2' + base = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + syllabus=True, + capability="crud_certificate", + cohort_user=True, + specialty=True, + role="potato", + ) + del base["user"] + + model1 = self.generate_models( + user=True, + profile_academy=True, + user_specialty=True, + user_specialty_kwargs={"token": "hitman3000"}, + models=base, + ) + + model2 = self.generate_models( + user=True, + profile_academy=True, + user_specialty=True, + user_specialty_kwargs={"token": "batman2000"}, + models=base, + ) + + url = reverse_lazy("certificate:root") + "?id=1,2" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.all_user_invite_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_certificate_in_bulk_not_found(self): """Test / with two certificates""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - cohort_user=True, - specialty=True, - role='potato') - del base['user'] - - model1 = self.generate_models(user=True, - user_specialty=True, - user_specialty_kwargs={'token': 'hitman3000'}, - models=base) - - model2 = self.generate_models(user=True, - user_specialty=True, - user_specialty_kwargs={'token': 'batman2000'}, - models=base) - - url = reverse_lazy('certificate:root') + '?id=3,4' + base = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + syllabus=True, + capability="crud_certificate", + cohort_user=True, + specialty=True, + role="potato", + ) + del base["user"] + + model1 = self.generate_models( + user=True, user_specialty=True, user_specialty_kwargs={"token": "hitman3000"}, models=base + ) + + model2 = self.generate_models( + user=True, user_specialty=True, user_specialty_kwargs={"token": "batman2000"}, models=base + ) + + url = reverse_lazy("certificate:root") + "?id=3,4" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(response.json(), {'detail': 'specialties_not_found', 'status_code': 404}) + self.assertEqual(response.json(), {"detail": "specialties_not_found", "status_code": 404}) self.assertEqual(self.all_user_invite_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_delete_certificate_in_bulk_without_passing_ids(self): """Test / with two certificates""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - cohort=True, - user=True, - profile_academy=True, - syllabus=True, - capability='crud_certificate', - cohort_user=True, - specialty=True, - role='potato') - del base['user'] - - model1 = self.generate_models(user=True, - user_specialty=True, - user_specialty_kwargs={'token': 'hitman3000'}, - models=base) - - model2 = self.generate_models(user=True, - user_specialty=True, - user_specialty_kwargs={'token': 'batman2000'}, - models=base) - - url = reverse_lazy('certificate:root') + base = self.generate_models( + authenticate=True, + cohort=True, + user=True, + profile_academy=True, + syllabus=True, + capability="crud_certificate", + cohort_user=True, + specialty=True, + role="potato", + ) + del base["user"] + + model1 = self.generate_models( + user=True, user_specialty=True, user_specialty_kwargs={"token": "hitman3000"}, models=base + ) + + model2 = self.generate_models( + user=True, user_specialty=True, user_specialty_kwargs={"token": "batman2000"}, models=base + ) + + url = reverse_lazy("certificate:root") response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(response.json(), {'detail': 'missing_ids', 'status_code': 404}) + self.assertEqual(response.json(), {"detail": "missing_ids", "status_code": 404}) self.assertEqual(self.all_user_invite_dict(), []) diff --git a/breathecode/certificate/urls.py b/breathecode/certificate/urls.py index 043bd7ac8..d63892cae 100644 --- a/breathecode/certificate/urls.py +++ b/breathecode/certificate/urls.py @@ -1,15 +1,23 @@ from django.urls import path -from .views import (get_specialties, get_badges, get_certificate, CertificateView, CertificateCohortView, - CertificateAcademyView, LayoutView, CertificateMeView) +from .views import ( + get_specialties, + get_badges, + get_certificate, + CertificateView, + CertificateCohortView, + CertificateAcademyView, + LayoutView, + CertificateMeView, +) -app_name = 'certificate' +app_name = "certificate" urlpatterns = [ - path('specialty', get_specialties), - path('badge', get_badges), - path('academy/layout', LayoutView.as_view()), - path('token/<str:token>/', get_certificate), - path('cohort/<int:cohort_id>/student/<int:student_id>', CertificateView.as_view(), name='cohort_id_student_id'), - path('cohort/<int:cohort_id>', CertificateCohortView.as_view(), name='cohort_id'), - path('', CertificateAcademyView.as_view(), name='root'), - path('me', CertificateMeView.as_view(), name='me'), + path("specialty", get_specialties), + path("badge", get_badges), + path("academy/layout", LayoutView.as_view()), + path("token/<str:token>/", get_certificate), + path("cohort/<int:cohort_id>/student/<int:student_id>", CertificateView.as_view(), name="cohort_id_student_id"), + path("cohort/<int:cohort_id>", CertificateCohortView.as_view(), name="cohort_id"), + path("", CertificateAcademyView.as_view(), name="root"), + path("me", CertificateMeView.as_view(), name="me"), ] diff --git a/breathecode/certificate/views.py b/breathecode/certificate/views.py index 8c5de99a3..090b2ceb2 100644 --- a/breathecode/certificate/views.py +++ b/breathecode/certificate/views.py @@ -23,7 +23,7 @@ logger = logging.getLogger(__name__) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_specialties(request): items = Specialty.objects.all() @@ -31,7 +31,7 @@ def get_specialties(request): return Response(serializer.data, status=status.HTTP_200_OK) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_badges(request): items = Badge.objects.all() @@ -39,12 +39,12 @@ def get_badges(request): return Response(serializer.data, status=status.HTTP_200_OK) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_certificate(request, token): item = UserSpecialty.objects.filter(token=token).first() if item is None: - raise NotFound('Certificate not found') + raise NotFound("Certificate not found") serializer = UserSpecialtySerializer(item) return Response(serializer.data, status=status.HTTP_200_OK) @@ -55,7 +55,7 @@ class LayoutView(APIView): List all snippets, or create a new snippet. """ - @capable_of('read_layout') + @capable_of("read_layout") def get(self, request, academy_id=None): layouts = LayoutDesign.objects.filter(academy__id=academy_id) @@ -69,32 +69,32 @@ class CertificateView(APIView): List all snippets, or create a new snippet. """ - @capable_of('read_certificate') + @capable_of("read_certificate") def get(self, request, cohort_id, student_id, academy_id=None): - cert = UserSpecialty.objects.filter(cohort__id=cohort_id, user__id=student_id, - cohort__academy__id=academy_id).first() + cert = UserSpecialty.objects.filter( + cohort__id=cohort_id, user__id=student_id, cohort__academy__id=academy_id + ).first() if cert is None: - raise serializers.ValidationError('Certificate not found', code=404) + raise serializers.ValidationError("Certificate not found", code=404) serializer = UserSpecialtySerializer(cert, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def post(self, request, cohort_id, student_id, academy_id=None): layout_slug = None - if 'layout_slug' in request.data: - layout_slug = request.data['layout_slug'] + if "layout_slug" in request.data: + layout_slug = request.data["layout_slug"] - cu = CohortUser.objects.filter(cohort__id=cohort_id, - user__id=student_id, - role='STUDENT', - cohort__academy__id=academy_id).first() + cu = CohortUser.objects.filter( + cohort__id=cohort_id, user__id=student_id, role="STUDENT", cohort__academy__id=academy_id + ).first() if cu is None: - raise ValidationException('Student not found for this cohort', code=404, slug='student-not-found') + raise ValidationException("Student not found for this cohort", code=404, slug="student-not-found") cert = generate_certificate(cu.user, cu.cohort, layout_slug) serializer = UserSpecialtySerializer(cert, many=False) @@ -106,41 +106,43 @@ class CertificateCohortView(APIView): List all snippets, or create a new snippet. """ - @capable_of('read_certificate') + @capable_of("read_certificate") def get(self, request, cohort_id, academy_id=None): - cert = UserSpecialty.objects.filter(cohort__id=cohort_id, - cohort__academy__id=academy_id).order_by('-created_at') + cert = UserSpecialty.objects.filter(cohort__id=cohort_id, cohort__academy__id=academy_id).order_by( + "-created_at" + ) serializer = UserSpecialtySerializer(cert, many=True) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def post(self, request, cohort_id, academy_id=None): layout_slug = None - if 'layout_slug' in request.data: - layout_slug = request.data['layout_slug'] + if "layout_slug" in request.data: + layout_slug = request.data["layout_slug"] - cohort_users = CohortUser.objects.filter(cohort__id=cohort_id, role='STUDENT', cohort__academy__id=academy_id) + cohort_users = CohortUser.objects.filter(cohort__id=cohort_id, role="STUDENT", cohort__academy__id=academy_id) all_certs = [] cohort__users = [] if cohort_users.count() == 0: - raise ValidationException('There are no users with STUDENT role in this cohort', - code=400, - slug='no-user-with-student-role') + raise ValidationException( + "There are no users with STUDENT role in this cohort", code=400, slug="no-user-with-student-role" + ) for cohort_user in cohort_users: cohort = cohort_user.cohort - if cohort.stage != 'ENDED' or cohort.never_ends != False: - raise ValidationException('Cohort stage must be ENDED or never ends', - code=400, - slug='cohort-stage-must-be-ended') + if cohort.stage != "ENDED" or cohort.never_ends != False: + raise ValidationException( + "Cohort stage must be ENDED or never ends", code=400, slug="cohort-stage-must-be-ended" + ) if not cohort.syllabus_version: raise ValidationException( - f'The cohort has no syllabus assigned, please set a syllabus for cohort: {cohort.name}', - slug='cohort-has-no-syllabus-version-assigned') + f"The cohort has no syllabus assigned, please set a syllabus for cohort: {cohort.name}", + slug="cohort-has-no-syllabus-version-assigned", + ) else: cohort__users.append(cohort_user) @@ -158,20 +160,20 @@ class CertificateAcademyView(APIView, HeaderLimitOffsetPagination, GenerateLooku List all snippets, or create a new snippet. """ - @capable_of('read_certificate') + @capable_of("read_certificate") def get(self, request, academy_id=None): items = UserSpecialty.objects.filter(cohort__academy__id=academy_id) - like = request.GET.get('like', None) - if like is not None and like != '': - items = query_like_by_full_name(like=like, items=items, prefix='user__') + like = request.GET.get("like", None) + if like is not None and like != "": + items = query_like_by_full_name(like=like, items=items, prefix="user__") if items.count() == 0: items = UserSpecialty.objects.filter(cohort__academy__id=academy_id) - items = query_like_by_full_name(like=like, items=items, prefix='user__profileacademy__') + items = query_like_by_full_name(like=like, items=items, prefix="user__profileacademy__") - sort = request.GET.get('sort', None) - if sort is None or sort == '': - sort = '-created_at' + sort = request.GET.get("sort", None) + if sort is None or sort == "": + sort = "-created_at" items = items.order_by(sort) page = self.paginate_queryset(items, request) @@ -181,20 +183,20 @@ def get(self, request, academy_id=None): else: return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def delete(self, request, cohort_id=None, user_id=None, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) try: - ids = lookups['id__in'] + ids = lookups["id__in"] except Exception: - raise ValidationException('User specialties ids were not provided', 404, slug='missing_ids') + raise ValidationException("User specialties ids were not provided", 404, slug="missing_ids") if lookups and (user_id or cohort_id): raise ValidationException( - 'user_id or cohort_id was provided in url ' - 'in bulk mode request, use querystring style instead', - code=400) + "user_id or cohort_id was provided in url " "in bulk mode request, use querystring style instead", + code=400, + ) elif lookups: items = UserSpecialty.objects.filter(**lookups, academy__id=academy_id) @@ -203,55 +205,56 @@ def delete(self, request, cohort_id=None, user_id=None, academy_id=None): raise ValidationException( f"No user specialties for deletion were found with following id: {','.join(ids)}", code=404, - slug='specialties_not_found') + slug="specialties_not_found", + ) for item in items: item.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) - @capable_of('crud_certificate') + @capable_of("crud_certificate") def post(self, request, academy_id=None): data = request.data if isinstance(request.data, list) else [request.data] cohort_users = [] if len(data) > 0: for items in data: - cohort__slug = items.get('cohort_slug') - user__id = items.get('user_id') - cohort_user = CohortUser.objects.filter(cohort__slug=cohort__slug, - user_id=user__id, - role='STUDENT', - cohort__academy__id=academy_id).first() + cohort__slug = items.get("cohort_slug") + user__id = items.get("user_id") + cohort_user = CohortUser.objects.filter( + cohort__slug=cohort__slug, user_id=user__id, role="STUDENT", cohort__academy__id=academy_id + ).first() if cohort_user is not None: cohort_users.append(cohort_user) else: student = ProfileAcademy.objects.filter(user_id=user__id).first() if student is None: - raise ValidationException(f'User with id {str(user__id)} not found', 404) + raise ValidationException(f"User with id {str(user__id)} not found", 404) raise ValidationException( - f'No student with id {str(student.first_name)} {str(student.last_name)} was found for cohort {str(cohort__slug)}', + f"No student with id {str(student.first_name)} {str(student.last_name)} was found for cohort {str(cohort__slug)}", code=404, - slug='student-not-found-in-cohort') + slug="student-not-found-in-cohort", + ) else: - raise ValidationException('You did not send anything to reattempts') + raise ValidationException("You did not send anything to reattempts") certs = [] for cu in cohort_users: - cert = UserSpecialty.objects.filter(cohort__id=cu.cohort_id, - user__id=cu.user_id, - cohort__academy__id=academy_id).first() + cert = UserSpecialty.objects.filter( + cohort__id=cu.cohort_id, user__id=cu.user_id, cohort__academy__id=academy_id + ).first() if cert is not None: - cert.status = 'PENDING' + cert.status = "PENDING" cert.save() certs.append(cert) else: - raise ValidationException('There is no certificate for this student and cohort', - code=404, - slug='no-user-specialty') + raise ValidationException( + "There is no certificate for this student and cohort", code=404, slug="no-user-specialty" + ) - layout = cert.layout.slug if cert.layout is not None else 'default' + layout = cert.layout.slug if cert.layout is not None else "default" async_generate_certificate.delay(cu.cohort_id, cu.user_id, layout=layout) serializer = UserSpecialtySerializer(certs, many=True) @@ -260,13 +263,13 @@ def post(self, request, academy_id=None): class CertificateMeView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @has_permission('get_my_certificate') + @has_permission("get_my_certificate") def get(self, request): handler = self.extensions(request) - items = UserSpecialty.objects.filter(user=request.user, status='PERSISTED') + items = UserSpecialty.objects.filter(user=request.user, status="PERSISTED") items = handler.queryset(items) serializer = UserSpecialtySerializer(items, many=True) diff --git a/breathecode/commons/actions.py b/breathecode/commons/actions.py index cd165a021..038f7957c 100644 --- a/breathecode/commons/actions.py +++ b/breathecode/commons/actions.py @@ -10,22 +10,22 @@ logger = logging.getLogger(__name__) -__all__ = ['clean_cache'] +__all__ = ["clean_cache"] def is_test(): """Get the environment. It fix a error caused by pytest or python.""" - env = os.getenv('ENV') - if env is None and 'ENV' in os.environ: - env = os.environ['ENV'] + env = os.getenv("ENV") + if env is None and "ENV" in os.environ: + env = os.environ["ENV"] - return env == 'test' + return env == "test" @functools.lru_cache(maxsize=1) def is_output_enable(): # Set to True to enable output within the cache and it's used for testing purposes. - return os.getenv('HIDE_CACHE_LOG', '0') in ['0', 'false', 'False', 'f'] + return os.getenv("HIDE_CACHE_LOG", "0") in ["0", "false", "False", "f"] def clean_cache(model_cls): @@ -36,16 +36,16 @@ def clean_cache(model_cls): if not have_descriptor and not is_a_dependency: if is_output_enable(): - logger.warning(f'Cache not implemented for {model_cls.__name__}, skipping') + logger.warning(f"Cache not implemented for {model_cls.__name__}, skipping") return - key = model_cls.__module__ + '.' + model_cls.__name__ + key = model_cls.__module__ + "." + model_cls.__name__ # build a descriptor if not have_descriptor and is_a_dependency: if is_test() is False: - conn = get_redis_connection('default') - my_lock = Lock(conn, f'cache:descriptor:{key}', timeout=30, blocking_timeout=30) + conn = get_redis_connection("default") + my_lock = Lock(conn, f"cache:descriptor:{key}", timeout=30, blocking_timeout=30) if my_lock.acquire(blocking=True): @@ -60,7 +60,7 @@ class DepCache(Cache): else: if is_output_enable(): - logger.error(f'Could not acquire lock for {key} on get_or_create, operation timed out.') + logger.error(f"Could not acquire lock for {key} on get_or_create, operation timed out.") return else: @@ -69,5 +69,5 @@ class DepCache(Cache): model = model_cls is_dependency = True - key = model_cls.__module__ + '.' + model_cls.__name__ + key = model_cls.__module__ + "." + model_cls.__name__ clean_task.apply_async(args=[key], countdown=0) diff --git a/breathecode/commons/apps.py b/breathecode/commons/apps.py index 5b35ad013..e03ddeea7 100644 --- a/breathecode/commons/apps.py +++ b/breathecode/commons/apps.py @@ -2,8 +2,8 @@ class CommonsConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'breathecode.commons' + default_auto_field = "django.db.models.BigAutoField" + name = "breathecode.commons" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/commons/management/commands/clean_cache.py b/breathecode/commons/management/commands/clean_cache.py index fa87fa306..097feec18 100644 --- a/breathecode/commons/management/commands/clean_cache.py +++ b/breathecode/commons/management/commands/clean_cache.py @@ -6,14 +6,14 @@ class Command(BaseCommand): - help = 'Clean the cache' + help = "Clean the cache" def handle(self, *args, **options): - warnings.warn('Execute this command can degrade the performance of the application', stacklevel=3) + warnings.warn("Execute this command can degrade the performance of the application", stacklevel=3) - self.stdout.write(self.style.WARNING('The cache will be cleaned in 3 seconds, press Ctrl+C to cancel')) + self.stdout.write(self.style.WARNING("The cache will be cleaned in 3 seconds, press Ctrl+C to cancel")) time.sleep(3) - cache.delete_pattern('*') - self.stdout.write(self.style.SUCCESS('Cache cleaned')) + cache.delete_pattern("*") + self.stdout.write(self.style.SUCCESS("Cache cleaned")) diff --git a/breathecode/commons/migrations/0001_initial.py b/breathecode/commons/migrations/0001_initial.py index 23e2b8b30..3b0d64bf8 100644 --- a/breathecode/commons/migrations/0001_initial.py +++ b/breathecode/commons/migrations/0001_initial.py @@ -11,25 +11,34 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='TaskManager', + name="TaskManager", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('current_page', models.IntegerField(blank=True, default=0, null=True)), - ('total_pages', models.IntegerField(blank=True, default=0, null=True)), - ('task_module', models.CharField(max_length=200)), - ('task_name', models.CharField(max_length=200)), - ('reverse_module', models.CharField(blank=True, max_length=200, null=True)), - ('reverse_name', models.CharField(blank=True, max_length=200, null=True)), - ('arguments', models.JSONField(blank=True, default=dict, null=True)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('CANCELLED', 'Cancelled'), - ('REVERSED', 'Reversed'), ('PAUSED', 'Paused')], - default='PENDING', - max_length=20)), - ('killed', models.BooleanField(default=False)), - ('last_run', models.DateTimeField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("current_page", models.IntegerField(blank=True, default=0, null=True)), + ("total_pages", models.IntegerField(blank=True, default=0, null=True)), + ("task_module", models.CharField(max_length=200)), + ("task_name", models.CharField(max_length=200)), + ("reverse_module", models.CharField(blank=True, max_length=200, null=True)), + ("reverse_name", models.CharField(blank=True, max_length=200, null=True)), + ("arguments", models.JSONField(blank=True, default=dict, null=True)), + ( + "status", + models.CharField( + choices=[ + ("PENDING", "Pending"), + ("DONE", "Done"), + ("CANCELLED", "Cancelled"), + ("REVERSED", "Reversed"), + ("PAUSED", "Paused"), + ], + default="PENDING", + max_length=20, + ), + ), + ("killed", models.BooleanField(default=False)), + ("last_run", models.DateTimeField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/breathecode/commons/migrations/0002_auto_20230811_0645.py b/breathecode/commons/migrations/0002_auto_20230811_0645.py index ee22f233b..ec69679a8 100644 --- a/breathecode/commons/migrations/0002_auto_20230811_0645.py +++ b/breathecode/commons/migrations/0002_auto_20230811_0645.py @@ -6,22 +6,30 @@ class Migration(migrations.Migration): dependencies = [ - ('commons', '0001_initial'), + ("commons", "0001_initial"), ] operations = [ migrations.AddField( - model_name='taskmanager', - name='status_message', + model_name="taskmanager", + name="status_message", field=models.TextField(blank=True, max_length=255, null=True), ), migrations.AlterField( - model_name='taskmanager', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('CANCELLED', 'Cancelled'), - ('REVERSED', 'Reversed'), ('PAUSED', 'Paused'), ('ABORTED', 'Aborted'), - ('ERROR', 'Error')], - default='PENDING', - max_length=20), + model_name="taskmanager", + name="status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("DONE", "Done"), + ("CANCELLED", "Cancelled"), + ("REVERSED", "Reversed"), + ("PAUSED", "Paused"), + ("ABORTED", "Aborted"), + ("ERROR", "Error"), + ], + default="PENDING", + max_length=20, + ), ), ] diff --git a/breathecode/commons/migrations/0003_taskmanager_attemps.py b/breathecode/commons/migrations/0003_taskmanager_attemps.py index a5df535de..2033e8a21 100644 --- a/breathecode/commons/migrations/0003_taskmanager_attemps.py +++ b/breathecode/commons/migrations/0003_taskmanager_attemps.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('commons', '0002_auto_20230811_0645'), + ("commons", "0002_auto_20230811_0645"), ] operations = [ migrations.AddField( - model_name='taskmanager', - name='attemps', + model_name="taskmanager", + name="attemps", field=models.IntegerField(default=1), ), ] diff --git a/breathecode/commons/migrations/0004_taskwatcher.py b/breathecode/commons/migrations/0004_taskwatcher.py index 66479e2c2..0037cf920 100644 --- a/breathecode/commons/migrations/0004_taskwatcher.py +++ b/breathecode/commons/migrations/0004_taskwatcher.py @@ -9,25 +9,31 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('commons', '0003_taskmanager_attemps'), + ("commons", "0003_taskmanager_attemps"), ] operations = [ migrations.CreateModel( - name='TaskWatcher', + name="TaskWatcher", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('email', models.EmailField(blank=True, max_length=254, null=True)), - ('on_error', models.BooleanField(default=True)), - ('on_success', models.BooleanField(default=True)), - ('watch_progress', models.BooleanField(default=False)), - ('tasks', - models.ManyToManyField(blank=True, - help_text='Notify for the progress of these tasks', - related_name='watchers', - to='commons.TaskManager')), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("email", models.EmailField(blank=True, max_length=254, null=True)), + ("on_error", models.BooleanField(default=True)), + ("on_success", models.BooleanField(default=True)), + ("watch_progress", models.BooleanField(default=False)), + ( + "tasks", + models.ManyToManyField( + blank=True, + help_text="Notify for the progress of these tasks", + related_name="watchers", + to="commons.TaskManager", + ), + ), + ( + "user", + models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), ], ), ] diff --git a/breathecode/commons/migrations/0005_rename_attemps_taskmanager_attempts.py b/breathecode/commons/migrations/0005_rename_attemps_taskmanager_attempts.py index b3cfb870b..ba5f7e651 100644 --- a/breathecode/commons/migrations/0005_rename_attemps_taskmanager_attempts.py +++ b/breathecode/commons/migrations/0005_rename_attemps_taskmanager_attempts.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('commons', '0004_taskwatcher'), + ("commons", "0004_taskwatcher"), ] operations = [ migrations.RenameField( - model_name='taskmanager', - old_name='attemps', - new_name='attempts', + model_name="taskmanager", + old_name="attemps", + new_name="attempts", ), ] diff --git a/breathecode/commons/migrations/0006_auto_20231209_0707.py b/breathecode/commons/migrations/0006_auto_20231209_0707.py index 53171bce1..5737758e4 100644 --- a/breathecode/commons/migrations/0006_auto_20231209_0707.py +++ b/breathecode/commons/migrations/0006_auto_20231209_0707.py @@ -6,27 +6,36 @@ class Migration(migrations.Migration): dependencies = [ - ('commons', '0005_rename_attemps_taskmanager_attempts'), + ("commons", "0005_rename_attemps_taskmanager_attempts"), ] operations = [ migrations.AddField( - model_name='taskmanager', - name='task_id', - field=models.CharField(default='', max_length=36), + model_name="taskmanager", + name="task_id", + field=models.CharField(default="", max_length=36), ), migrations.AlterField( - model_name='taskmanager', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('CANCELLED', 'Cancelled'), - ('REVERSED', 'Reversed'), ('PAUSED', 'Paused'), ('ABORTED', 'Aborted'), - ('ERROR', 'Error'), ('SCHEDULED', 'Scheduled')], - default='PENDING', - max_length=20), + model_name="taskmanager", + name="status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("DONE", "Done"), + ("CANCELLED", "Cancelled"), + ("REVERSED", "Reversed"), + ("PAUSED", "Paused"), + ("ABORTED", "Aborted"), + ("ERROR", "Error"), + ("SCHEDULED", "Scheduled"), + ], + default="PENDING", + max_length=20, + ), ), migrations.AlterField( - model_name='taskmanager', - name='total_pages', + model_name="taskmanager", + name="total_pages", field=models.IntegerField(blank=True, default=1, null=True), ), ] diff --git a/breathecode/commons/migrations/0007_taskmanager_exception_module_and_more.py b/breathecode/commons/migrations/0007_taskmanager_exception_module_and_more.py index 234edb92e..9dfd1863e 100644 --- a/breathecode/commons/migrations/0007_taskmanager_exception_module_and_more.py +++ b/breathecode/commons/migrations/0007_taskmanager_exception_module_and_more.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('commons', '0006_auto_20231209_0707'), + ("commons", "0006_auto_20231209_0707"), ] operations = [ migrations.AddField( - model_name='taskmanager', - name='exception_module', + model_name="taskmanager", + name="exception_module", field=models.CharField(blank=True, max_length=200, null=True), ), migrations.AddField( - model_name='taskmanager', - name='exception_name', + model_name="taskmanager", + name="exception_name", field=models.CharField(blank=True, max_length=200, null=True), ), ] diff --git a/breathecode/commons/migrations/0007_taskmanager_started_at_alter_taskmanager_task_id.py b/breathecode/commons/migrations/0007_taskmanager_started_at_alter_taskmanager_task_id.py index cc7838b7e..95c8d8411 100644 --- a/breathecode/commons/migrations/0007_taskmanager_started_at_alter_taskmanager_task_id.py +++ b/breathecode/commons/migrations/0007_taskmanager_started_at_alter_taskmanager_task_id.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('commons', '0006_auto_20231209_0707'), + ("commons", "0006_auto_20231209_0707"), ] operations = [ migrations.AddField( - model_name='taskmanager', - name='started_at', + model_name="taskmanager", + name="started_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='taskmanager', - name='task_id', - field=models.CharField(blank=True, default='', max_length=36), + model_name="taskmanager", + name="task_id", + field=models.CharField(blank=True, default="", max_length=36), ), ] diff --git a/breathecode/commons/migrations/0008_merge_20240208_1959.py b/breathecode/commons/migrations/0008_merge_20240208_1959.py index 9e4159d30..f161ca3a9 100644 --- a/breathecode/commons/migrations/0008_merge_20240208_1959.py +++ b/breathecode/commons/migrations/0008_merge_20240208_1959.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('commons', '0007_taskmanager_exception_module_and_more'), - ('commons', '0007_taskmanager_started_at_alter_taskmanager_task_id'), + ("commons", "0007_taskmanager_exception_module_and_more"), + ("commons", "0007_taskmanager_started_at_alter_taskmanager_task_id"), ] operations = [] diff --git a/breathecode/commons/migrations/0009_remove_taskwatcher_tasks_remove_taskwatcher_user_and_more.py b/breathecode/commons/migrations/0009_remove_taskwatcher_tasks_remove_taskwatcher_user_and_more.py index 59e3e74b6..811b606f4 100644 --- a/breathecode/commons/migrations/0009_remove_taskwatcher_tasks_remove_taskwatcher_user_and_more.py +++ b/breathecode/commons/migrations/0009_remove_taskwatcher_tasks_remove_taskwatcher_user_and_more.py @@ -6,18 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('commons', '0008_merge_20240208_1959'), + ("commons", "0008_merge_20240208_1959"), ] operations = [ migrations.RemoveField( - model_name='taskwatcher', - name='tasks', + model_name="taskwatcher", + name="tasks", ), migrations.RemoveField( - model_name='taskwatcher', - name='user', + model_name="taskwatcher", + name="user", + ), + migrations.DeleteModel( + name="TaskManager", + ), + migrations.DeleteModel( + name="TaskWatcher", ), - migrations.DeleteModel(name='TaskManager', ), - migrations.DeleteModel(name='TaskWatcher', ), ] diff --git a/breathecode/commons/receivers.py b/breathecode/commons/receivers.py index 042b8d7c8..6bb3c5213 100644 --- a/breathecode/commons/receivers.py +++ b/breathecode/commons/receivers.py @@ -13,30 +13,30 @@ logger = logging.getLogger(__name__) -ENABLE_LIST_OPTIONS = ['true', '1', 'yes', 'y'] +ENABLE_LIST_OPTIONS = ["true", "1", "yes", "y"] @functools.lru_cache(maxsize=1) def is_cache_enabled(): - return os.getenv('CACHE', '1').lower() in ENABLE_LIST_OPTIONS + return os.getenv("CACHE", "1").lower() in ENABLE_LIST_OPTIONS @receiver(post_save) def on_save(*args: Any, **kwargs: Any): - del kwargs['signal'] + del kwargs["signal"] update_cache.send_robust(*args, **kwargs) @receiver(post_delete) def on_delete(*args: Any, **kwargs: Any): - del kwargs['signal'] + del kwargs["signal"] update_cache.send_robust(*args, **kwargs) @receiver(update_cache) def clean_cache(sender: Type[models.Model], **_: Any): if not is_cache_enabled(): - logger.debug('Cache has been disabled') + logger.debug("Cache has been disabled") return actions.clean_cache(sender) diff --git a/breathecode/commons/signals.py b/breathecode/commons/signals.py index ed184bdf6..f15735e86 100644 --- a/breathecode/commons/signals.py +++ b/breathecode/commons/signals.py @@ -2,6 +2,6 @@ from task_manager.django.dispatch import Emisor -emisor = Emisor('breathecode.commons') +emisor = Emisor("breathecode.commons") -update_cache = emisor.signal('update_cache') +update_cache = emisor.signal("update_cache") diff --git a/breathecode/commons/tasks.py b/breathecode/commons/tasks.py index 98547cb77..dbc90acc7 100644 --- a/breathecode/commons/tasks.py +++ b/breathecode/commons/tasks.py @@ -26,15 +26,17 @@ def clean_task(self, key: str, task_manager_id: int): from breathecode.registry import caches as _ # noqa: F811, F401 task_cls = self.task_manager.__class__ - task_cls.objects.filter(status='SCHEDULED', - task_module=self.task_manager.task_module, - task_name=self.task_manager.task_name, - arguments__args__exact=[key], - arguments__args__len=1).exclude(id=task_manager_id).delete() - - unpack = key.split('.') + task_cls.objects.filter( + status="SCHEDULED", + task_module=self.task_manager.task_module, + task_name=self.task_manager.task_name, + arguments__args__exact=[key], + arguments__args__len=1, + ).exclude(id=task_manager_id).delete() + + unpack = key.split(".") model = unpack[-1] - module = '.'.join(unpack[:-1]) + module = ".".join(unpack[:-1]) if module not in MODULES: MODULES[module] = importlib.import_module(module) @@ -43,14 +45,14 @@ def clean_task(self, key: str, task_manager_id: int): model_cls = getattr(module, model) if model_cls not in CACHE_DESCRIPTORS: - raise AbortTask(f'Cache not implemented for {model_cls.__name__}, skipping', log=actions.is_output_enable()) + raise AbortTask(f"Cache not implemented for {model_cls.__name__}, skipping", log=actions.is_output_enable()) cache = CACHE_DESCRIPTORS[model_cls] try: cache.clear() if actions.is_output_enable(): - logger.debug(f'Cache cleaned for {key}') + logger.debug(f"Cache cleaned for {key}") except Exception: - raise RetryTask(f'Could not clean the cache {key}', log=actions.is_output_enable()) + raise RetryTask(f"Could not clean the cache {key}", log=actions.is_output_enable()) diff --git a/breathecode/commons/templatetags/math.py b/breathecode/commons/templatetags/math.py index 41dba110d..fb3fe4a09 100644 --- a/breathecode/commons/templatetags/math.py +++ b/breathecode/commons/templatetags/math.py @@ -18,10 +18,10 @@ def valid_numeric(arg): def handle_float_decimal_combinations(value, arg, operation): if isinstance(value, float) and isinstance(arg, Decimal): - logger.warning('Unsafe operation: {0!r} {1} {2!r}.'.format(value, operation, arg)) + logger.warning("Unsafe operation: {0!r} {1} {2!r}.".format(value, operation, arg)) value = Decimal(str(value)) if isinstance(value, Decimal) and isinstance(arg, float): - logger.warning('Unsafe operation: {0!r} {1} {2!r}.'.format(value, operation, arg)) + logger.warning("Unsafe operation: {0!r} {1} {2!r}.".format(value, operation, arg)) arg = Decimal(str(arg)) return value, arg @@ -30,55 +30,55 @@ def handle_float_decimal_combinations(value, arg, operation): def sub(value, arg): """Subtract the arg from the value.""" try: - nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), '-') + nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), "-") return nvalue - narg except (ValueError, TypeError): try: return value - arg except Exception: - return '' + return "" @register.filter def mul(value, arg): """Multiply the arg with the value.""" try: - nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), '*') + nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), "*") return nvalue * narg except (ValueError, TypeError): try: return value * arg except Exception: - return '' + return "" @register.filter def div(value, arg): """Divide the arg by the value.""" try: - nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), '/') + nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), "/") return nvalue / narg except (ValueError, TypeError): try: return value / arg except Exception: - return '' + return "" @register.filter def intdiv(value, arg): """Divide the arg by the value. Use integer (floor) division.""" try: - nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), '//') + nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), "//") return nvalue // narg except (ValueError, TypeError): try: return value // arg except Exception: - return '' + return "" -@register.filter(name='abs') +@register.filter(name="abs") def absolute(value): """Return the absolute value.""" try: @@ -87,30 +87,30 @@ def absolute(value): try: return abs(value) except Exception: - return '' + return "" @register.filter def mod(value, arg): """Return the modulo value.""" try: - nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), '%') + nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), "%") return nvalue % narg except (ValueError, TypeError): try: return value % arg except Exception: - return '' + return "" -@register.filter(name='addition') +@register.filter(name="addition") def addition(value, arg): """Float-friendly replacement for Django's built-in `add` filter.""" try: - nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), '+') + nvalue, narg = handle_float_decimal_combinations(valid_numeric(value), valid_numeric(arg), "+") return nvalue + narg except (ValueError, TypeError): try: return value + arg except Exception: - return '' + return "" diff --git a/breathecode/commons/tests/mixins/__init__.py b/breathecode/commons/tests/mixins/__init__.py index 79b0eeb41..2fba03e4c 100644 --- a/breathecode/commons/tests/mixins/__init__.py +++ b/breathecode/commons/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ commons mixins """ + from .commons_test_case import CommonsTestCase # noqa: F401 diff --git a/breathecode/commons/tests/mixins/commons_test_case.py b/breathecode/commons/tests/mixins/commons_test_case.py index f67000281..701482dd4 100644 --- a/breathecode/commons/tests/mixins/commons_test_case.py +++ b/breathecode/commons/tests/mixins/commons_test_case.py @@ -1,13 +1,21 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, - BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + DatetimeMixin, + BreathecodeMixin, +) -class CommonsTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, - BreathecodeMixin): +class CommonsTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, BreathecodeMixin +): """MarketingTestCase with auth methods""" def tearDown(self): diff --git a/breathecode/commons/tests/signals/tests_update_cache.py b/breathecode/commons/tests/signals/tests_update_cache.py index 44530059d..cbb88be07 100644 --- a/breathecode/commons/tests/signals/tests_update_cache.py +++ b/breathecode/commons/tests/signals/tests_update_cache.py @@ -27,16 +27,16 @@ from breathecode.utils.cache import CACHE_DESCRIPTORS, Cache # this fix a problem caused by the geniuses at pytest-xdist -random.seed(os.getenv('RANDOM_SEED')) +random.seed(os.getenv("RANDOM_SEED")) cohort_cache = CohortCache() event_cache = EventCache() -CACHE = {'Cohort': CohortCache, 'Event': EventCache} +CACHE = {"Cohort": CohortCache, "Event": EventCache} def to_snake_case(name): - return re.sub(r'(?<!^)(?=[A-Z])', '_', name).lower() + return re.sub(r"(?<!^)(?=[A-Z])", "_", name).lower() @pytest.fixture(autouse=True) @@ -45,37 +45,39 @@ def setup(db): def set_cache(model_name, value): - json_data = json.dumps(value).encode('utf-8') + json_data = json.dumps(value).encode("utf-8") - cache.set(f'{model_name}__', json_data) - cache.set(f'{model_name}__sort=slug&slug=100%2C101%2C110%2C111', json_data) - cache.set(f'{model_name}__id=1', json_data) - cache.set(f'{model_name}__id=2', json_data) + cache.set(f"{model_name}__", json_data) + cache.set(f"{model_name}__sort=slug&slug=100%2C101%2C110%2C111", json_data) + cache.set(f"{model_name}__id=1", json_data) + cache.set(f"{model_name}__id=2", json_data) cache.set( - f'{model_name}__keys', { - f'{model_name}__', - f'{model_name}__id=1', - f'{model_name}__id=2', - f'{model_name}__sort=slug&slug=100%2C101%2C110%2C111', - }) + f"{model_name}__keys", + { + f"{model_name}__", + f"{model_name}__id=1", + f"{model_name}__id=2", + f"{model_name}__sort=slug&slug=100%2C101%2C110%2C111", + }, + ) def assert_cache_is_empty(model_name): assert CACHE[model_name].keys() == set() - assert cache.get(f'{model_name}__') == None - assert cache.get(f'{model_name}__sort=slug&slug=100%2C101%2C110%2C111') == None - assert cache.get(f'{model_name}__id=1') == None - assert cache.get(f'{model_name}__id=2') == None + assert cache.get(f"{model_name}__") == None + assert cache.get(f"{model_name}__sort=slug&slug=100%2C101%2C110%2C111") == None + assert cache.get(f"{model_name}__id=1") == None + assert cache.get(f"{model_name}__id=2") == None -@pytest.mark.parametrize('model_name,key,value', [('Cohort', 'name', 'x'), ('Event', 'title', 'y')]) -@pytest.mark.parametrize('expected', [[], [{'x': 1}], [{'x': 1}, {'x': 2}]]) +@pytest.mark.parametrize("model_name,key,value", [("Cohort", "name", "x"), ("Event", "title", "y")]) +@pytest.mark.parametrize("expected", [[], [{"x": 1}], [{"x": 1}, {"x": 2}]]) def test_create_update_and_delete(bc: Breathecode, enable_signals, model_name, key, value, expected): enable_signals( - 'django.db.models.signals.post_save', - 'django.db.models.signals.post_delete', - 'breathecode.commons.signals.update_cache', + "django.db.models.signals.post_save", + "django.db.models.signals.post_delete", + "breathecode.commons.signals.update_cache", ) attr = to_snake_case(model_name) @@ -106,490 +108,474 @@ def test_cache_defaults(): assert Cache.max_deep == 2 -@pytest.mark.parametrize('cache_cls', [CohortCache, EventCache]) +@pytest.mark.parametrize("cache_cls", [CohortCache, EventCache]) def test_model_cache(cache_cls): assert cache_cls.max_deep == 2 -@pytest.mark.parametrize('cache_cls', [CohortCache, EventCache]) +@pytest.mark.parametrize("cache_cls", [CohortCache, EventCache]) def test_cache_as_dependency_is_false(cache_cls): assert cache_cls.is_dependency == False -@pytest.mark.parametrize('cache_cls,value', [ - (CohortCache, set([ - notify_models.SlackChannel, - ])), - (EventCache, set([])), -]) +@pytest.mark.parametrize( + "cache_cls,value", + [ + ( + CohortCache, + set( + [ + notify_models.SlackChannel, + ] + ), + ), + (EventCache, set([])), + ], +) def test_model_cache__one_to_one(cache_cls, value): assert cache_cls.one_to_one == value -@pytest.mark.parametrize('cache_cls,value', [ - (CohortCache, - set([ - admissions_models.Academy, - admissions_models.SyllabusVersion, - admissions_models.SyllabusSchedule, - admissions_models.CohortUser, - admissions_models.CohortTimeSlot, - authenticate_models.UserInvite, - authenticate_models.GitpodUser, - events_models.EventTypeVisibilitySetting, - feedback_models.Review, - feedback_models.Survey, - feedback_models.Answer, - assignment_models.Task, - assignment_models.FinalProject, - marketing_models.Course, - certificate_models.UserSpecialty, - payments_models.CohortSet, - payments_models.CohortSetCohort, - payments_models.PlanFinancing, - payments_models.Subscription, - payments_models.SubscriptionServiceItem, - provisioning_models.ProvisioningProfile, - ])), - (EventCache, - set([ - auth_models.User, - events_models.EventbriteWebhook, - events_models.Organization, - events_models.EventType, - events_models.EventCheckin, - events_models.Venue, - admissions_models.Academy, - feedback_models.Answer, - ])), -]) +@pytest.mark.parametrize( + "cache_cls,value", + [ + ( + CohortCache, + set( + [ + admissions_models.Academy, + admissions_models.SyllabusVersion, + admissions_models.SyllabusSchedule, + admissions_models.CohortUser, + admissions_models.CohortTimeSlot, + authenticate_models.UserInvite, + authenticate_models.GitpodUser, + events_models.EventTypeVisibilitySetting, + feedback_models.Review, + feedback_models.Survey, + feedback_models.Answer, + assignment_models.Task, + assignment_models.FinalProject, + marketing_models.Course, + certificate_models.UserSpecialty, + payments_models.CohortSet, + payments_models.CohortSetCohort, + payments_models.PlanFinancing, + payments_models.Subscription, + payments_models.SubscriptionServiceItem, + provisioning_models.ProvisioningProfile, + ] + ), + ), + ( + EventCache, + set( + [ + auth_models.User, + events_models.EventbriteWebhook, + events_models.Organization, + events_models.EventType, + events_models.EventCheckin, + events_models.Venue, + admissions_models.Academy, + feedback_models.Answer, + ] + ), + ), + ], +) def test_model_cache__many_to_one(cache_cls, value): assert cache_cls.many_to_one == value -@pytest.mark.parametrize('cache_cls,value', [ - (CohortCache, - set([ - payments_models.CohortSet, - payments_models.PlanFinancing, - payments_models.Subscription, - payments_models.SubscriptionServiceItem, - provisioning_models.ProvisioningProfile, - ])), - (EventCache, set()), -]) +@pytest.mark.parametrize( + "cache_cls,value", + [ + ( + CohortCache, + set( + [ + payments_models.CohortSet, + payments_models.PlanFinancing, + payments_models.Subscription, + payments_models.SubscriptionServiceItem, + provisioning_models.ProvisioningProfile, + ] + ), + ), + (EventCache, set()), + ], +) def test_model_cache__many_to_many(cache_cls, value): assert cache_cls.many_to_many == value -@pytest.mark.parametrize('cache_cls', [CohortCache, EventCache]) -@pytest.mark.parametrize('value,params,key', [ - ( - [], - {}, - '', - ), - ( - [{ - 'x': 1 - }], - { - 'x': 1 - }, - 'x=1', - ), - ( - [{ - 'x': 1 - }, { - 'y': 2 - }], - { - 'x': 1, - 'y': 2 - }, - 'x=1&y=2', - ), -]) +@pytest.mark.parametrize("cache_cls", [CohortCache, EventCache]) +@pytest.mark.parametrize( + "value,params,key", + [ + ( + [], + {}, + "", + ), + ( + [{"x": 1}], + {"x": 1}, + "x=1", + ), + ( + [{"x": 1}, {"y": 2}], + {"x": 1, "y": 2}, + "x=1&y=2", + ), + ], +) def test_set_cache(cache_cls: Cache, value, params, key): res = cache_cls.set(value, params=params) - serialized = json.dumps(value).encode('utf-8') + serialized = json.dumps(value).encode("utf-8") assert res == { - 'content': serialized, - 'headers': { - 'Content-Type': 'application/json', + "content": serialized, + "headers": { + "Content-Type": "application/json", }, } - keys = f'{cache_cls.model.__name__}__keys' - k = f'{cache_cls.model.__name__}__{key}' + keys = f"{cache_cls.model.__name__}__keys" + k = f"{cache_cls.model.__name__}__{key}" assert sorted(cache.keys()) == sorted([keys, k]) assert cache_cls.keys() == {k} assert cache.get(k) == res -@pytest.mark.parametrize('cache_cls', [CohortCache, EventCache]) -@pytest.mark.parametrize('value,params,key', [ - ( - [], - {}, - '', - ), - ( - [ - { - 'x': 1 - }, - ], - { - 'x': 1 - }, - 'x=1', - ), - ( - [ - { - 'x': 1 - }, - { - 'y': 2 - }, - ], - { - 'x': 1, - 'y': 2 - }, - 'x=1&y=2', - ), -]) +@pytest.mark.parametrize("cache_cls", [CohortCache, EventCache]) +@pytest.mark.parametrize( + "value,params,key", + [ + ( + [], + {}, + "", + ), + ( + [ + {"x": 1}, + ], + {"x": 1}, + "x=1", + ), + ( + [ + {"x": 1}, + {"y": 2}, + ], + {"x": 1, "y": 2}, + "x=1&y=2", + ), + ], +) def test_set_cache_compressed(monkeypatch, cache_cls: Cache, value, params, key): - monkeypatch.setattr('sys.getsizeof', lambda _: (random.randint(10, 1000) * 1024) + 1) + monkeypatch.setattr("sys.getsizeof", lambda _: (random.randint(10, 1000) * 1024) + 1) - res = cache_cls.set(value, params=params, encoding='br') + res = cache_cls.set(value, params=params, encoding="br") - serialized = brotli.compress(json.dumps(value).encode('utf-8')) + serialized = brotli.compress(json.dumps(value).encode("utf-8")) assert res == { - 'content': serialized, - 'headers': { - 'Content-Encoding': 'br', - 'Content-Type': 'application/json', + "content": serialized, + "headers": { + "Content-Encoding": "br", + "Content-Type": "application/json", }, } - keys = f'{cache_cls.model.__name__}__keys' - k = f'{cache_cls.model.__name__}__{key}' + keys = f"{cache_cls.model.__name__}__keys" + k = f"{cache_cls.model.__name__}__{key}" assert sorted(cache.keys()) == sorted([k, keys]) assert cache_cls.keys() == {k} assert cache.get(k) == res -@pytest.mark.parametrize('cache_cls', [CohortCache, EventCache]) -@pytest.mark.parametrize('value,params,key', [ - ( - [], - {}, - '', - ), - ( - [ - { - 'x': 1 - }, - ], - { - 'x': 1 - }, - 'x=1', - ), - ( - [ - { - 'x': 1 - }, - { - 'y': 2 - }, - ], - { - 'x': 1, - 'y': 2 - }, - 'x=1&y=2', - ), -]) -@pytest.mark.parametrize('use_gzip,encoding', [(True, 'br'), (False, 'gzip'), (True, 'gzip')]) +@pytest.mark.parametrize("cache_cls", [CohortCache, EventCache]) +@pytest.mark.parametrize( + "value,params,key", + [ + ( + [], + {}, + "", + ), + ( + [ + {"x": 1}, + ], + {"x": 1}, + "x=1", + ), + ( + [ + {"x": 1}, + {"y": 2}, + ], + {"x": 1, "y": 2}, + "x=1&y=2", + ), + ], +) +@pytest.mark.parametrize("use_gzip,encoding", [(True, "br"), (False, "gzip"), (True, "gzip")]) def test_set_cache_compressed__gzip(monkeypatch, cache_cls: Cache, value, params, key, use_gzip, encoding): - monkeypatch.setattr('sys.getsizeof', lambda _: (random.randint(10, 1000) * 1024) + 1) - monkeypatch.setattr('breathecode.utils.cache.use_gzip', lambda: use_gzip) + monkeypatch.setattr("sys.getsizeof", lambda _: (random.randint(10, 1000) * 1024) + 1) + monkeypatch.setattr("breathecode.utils.cache.use_gzip", lambda: use_gzip) res = cache_cls.set(value, params=params, encoding=encoding) - serialized = gzip.compress(json.dumps(value).encode('utf-8')) + serialized = gzip.compress(json.dumps(value).encode("utf-8")) assert res == { - 'content': serialized, - 'headers': { - 'Content-Encoding': 'gzip', - 'Content-Type': 'application/json', + "content": serialized, + "headers": { + "Content-Encoding": "gzip", + "Content-Type": "application/json", }, } - keys = f'{cache_cls.model.__name__}__keys' - k = f'{cache_cls.model.__name__}__{key}' + keys = f"{cache_cls.model.__name__}__keys" + k = f"{cache_cls.model.__name__}__{key}" assert sorted(cache.keys()) == sorted([k, keys]) assert cache_cls.keys() == {k} assert cache.get(k) == res -@pytest.mark.parametrize('cache_cls', [CohortCache, EventCache]) -@pytest.mark.parametrize('value,params,key,headers', [ - ( - [], - {}, - '', - { - 'Content-Type': 'application/json', - }, - ), - ( - [{ - 'x': 1 - }], - { - 'x': 1 - }, - 'x=1', - { - 'Content-Type': 'application/json', - }, - ), - ( - [{ - 'x': 1 - }, { - 'y': 2 - }], - { - 'x': 1, - 'y': 2 - }, - 'x=1&y=2', - { - 'Content-Type': 'application/json', - }, - ), -]) +@pytest.mark.parametrize("cache_cls", [CohortCache, EventCache]) +@pytest.mark.parametrize( + "value,params,key,headers", + [ + ( + [], + {}, + "", + { + "Content-Type": "application/json", + }, + ), + ( + [{"x": 1}], + {"x": 1}, + "x=1", + { + "Content-Type": "application/json", + }, + ), + ( + [{"x": 1}, {"y": 2}], + {"x": 1, "y": 2}, + "x=1&y=2", + { + "Content-Type": "application/json", + }, + ), + ], +) def test_get_cache__with_meta(cache_cls: Cache, value, params, key, headers): - k = f'{cache_cls.model.__name__}__{key}' - serialized = json.dumps(value).encode('utf-8') + k = f"{cache_cls.model.__name__}__{key}" + serialized = json.dumps(value).encode("utf-8") res = { - 'headers': headers, - 'content': serialized, + "headers": headers, + "content": serialized, } cache.set(k, res) assert cache_cls.get(params) == (serialized, headers) -@pytest.mark.parametrize('cache_cls', [CohortCache, EventCache]) -@pytest.mark.parametrize('value,params,key,headers', [ - ( - [], - {}, - '', - { - 'Content-Encoding': 'br', - 'Content-Type': 'application/json', - }, - ), - ( - [ +@pytest.mark.parametrize("cache_cls", [CohortCache, EventCache]) +@pytest.mark.parametrize( + "value,params,key,headers", + [ + ( + [], + {}, + "", { - 'x': 1 + "Content-Encoding": "br", + "Content-Type": "application/json", }, - ], - { - 'x': 1 - }, - 'x=1', - { - 'Content-Encoding': 'br', - 'Content-Type': 'application/json', - }, - ), - ( - [ + ), + ( + [ + {"x": 1}, + ], + {"x": 1}, + "x=1", { - 'x': 1 + "Content-Encoding": "br", + "Content-Type": "application/json", }, + ), + ( + [ + {"x": 1}, + {"y": 2}, + ], + {"x": 1, "y": 2}, + "x=1&y=2", { - 'y': 2 + "Content-Encoding": "br", + "Content-Type": "application/json", }, - ], - { - 'x': 1, - 'y': 2 - }, - 'x=1&y=2', - { - 'Content-Encoding': 'br', - 'Content-Type': 'application/json', - }, - ), -]) + ), + ], +) def test_get_cache_compressed__with_meta(cache_cls: Cache, value, params, key, headers): - k = f'{cache_cls.model.__name__}__{key}' - v = json.dumps(value).encode('utf-8') + k = f"{cache_cls.model.__name__}__{key}" + v = json.dumps(value).encode("utf-8") serialized = brotli.compress(v) res = { - 'headers': headers, - 'content': serialized, + "headers": headers, + "content": serialized, } cache.set(k, res) assert cache_cls.get(params) == (serialized, headers) -@pytest.mark.parametrize('cache_cls', [CohortCache, EventCache]) -@pytest.mark.parametrize('value,params,key,headers', [ - ( - [], - {}, - '', - { - 'Content-Type': 'application/json', - 'Content-Encoding': 'gzip', - }, - ), - ( - [ +@pytest.mark.parametrize("cache_cls", [CohortCache, EventCache]) +@pytest.mark.parametrize( + "value,params,key,headers", + [ + ( + [], + {}, + "", { - 'x': 1 + "Content-Type": "application/json", + "Content-Encoding": "gzip", }, - ], - { - 'x': 1 - }, - 'x=1', - { - 'Content-Type': 'application/json', - 'Content-Encoding': 'gzip', - }, - ), - ( - [ + ), + ( + [ + {"x": 1}, + ], + {"x": 1}, + "x=1", { - 'x': 1 + "Content-Type": "application/json", + "Content-Encoding": "gzip", }, + ), + ( + [ + {"x": 1}, + {"y": 2}, + ], + {"x": 1, "y": 2}, + "x=1&y=2", { - 'y': 2 + "Content-Type": "application/json", + "Content-Encoding": "gzip", }, - ], - { - 'x': 1, - 'y': 2 - }, - 'x=1&y=2', - { - 'Content-Type': 'application/json', - 'Content-Encoding': 'gzip', - }, - ), -]) + ), + ], +) def test_get_cache_compressed__with_meta__gzip(monkeypatch, cache_cls: Cache, value, params, key, headers): - monkeypatch.setattr('breathecode.utils.cache.use_gzip', lambda: True) + monkeypatch.setattr("breathecode.utils.cache.use_gzip", lambda: True) - k = f'{cache_cls.model.__name__}__{key}' - v = json.dumps(value).encode('utf-8') + k = f"{cache_cls.model.__name__}__{key}" + v = json.dumps(value).encode("utf-8") serialized = gzip.compress(v) res = { - 'headers': headers, - 'content': serialized, + "headers": headers, + "content": serialized, } cache.set(k, res) assert cache_cls.get(params) == (serialized, headers) -@pytest.mark.parametrize('cache_cls,calls', [ - (CohortCache, [ - 'Cohort__', - 'Answer__', - 'CohortSetCohort__', - 'CohortSet__', - 'CohortTimeSlot__', - 'CohortUser__', - 'Course__', - 'EventTypeVisibilitySetting__', - 'FinalProject__', - 'GitpodUser__', - 'PlanFinancing__', - 'ProvisioningProfile__', - 'SyllabusVersion__', - 'Academy__', - 'Review__', - 'SlackChannel__', - 'SubscriptionServiceItem__', - 'Subscription__', - 'Survey__', - 'SyllabusSchedule__', - 'Task__', - 'UserInvite__', - 'UserSpecialty__', - ]), - (EventCache, [ - 'Event__', - 'Answer__', - 'EventCheckin__', - 'EventType__', - 'User__', - 'EventbriteWebhook__', - 'Organization__', - 'Academy__', - 'Venue__', - ]), -]) -@pytest.mark.parametrize('value', [ - [], +@pytest.mark.parametrize( + "cache_cls,calls", [ - { - 'x': 1 - }, + ( + CohortCache, + [ + "Cohort__", + "Answer__", + "CohortSetCohort__", + "CohortSet__", + "CohortTimeSlot__", + "CohortUser__", + "Course__", + "EventTypeVisibilitySetting__", + "FinalProject__", + "GitpodUser__", + "PlanFinancing__", + "ProvisioningProfile__", + "SyllabusVersion__", + "Academy__", + "Review__", + "SlackChannel__", + "SubscriptionServiceItem__", + "Subscription__", + "Survey__", + "SyllabusSchedule__", + "Task__", + "UserInvite__", + "UserSpecialty__", + ], + ), + ( + EventCache, + [ + "Event__", + "Answer__", + "EventCheckin__", + "EventType__", + "User__", + "EventbriteWebhook__", + "Organization__", + "Academy__", + "Venue__", + ], + ), ], +) +@pytest.mark.parametrize( + "value", [ - { - 'x': 1 - }, - { - 'y': 2 - }, + [], + [ + {"x": 1}, + ], + [ + {"x": 1}, + {"y": 2}, + ], ], -]) +) def test_delete_calls(faker, monkeypatch, cache_cls: Cache, calls, value): mock = MagicMock() - monkeypatch.setattr('breathecode.settings.CustomMemCache.delete_many', mock) + monkeypatch.setattr("breathecode.settings.CustomMemCache.delete_many", mock) keys = set() for c in calls: - index = f'{c}keys' - keys.add(f'{c}keys') + index = f"{c}keys" + keys.add(f"{c}keys") inner = set() for _ in range(0, 3): - k = f'{c}{faker.slug()}' + k = f"{c}{faker.slug()}" keys.add(k) inner.add(k) cache.set(index, inner) - k = f'{cache_cls.model.__name__}__' - serialized = json.dumps(value).encode('utf-8') + k = f"{cache_cls.model.__name__}__" + serialized = json.dumps(value).encode("utf-8") cache.set(k, serialized) cache_cls.clear() diff --git a/breathecode/cypress/actions.py b/breathecode/cypress/actions.py index bff41d132..932921260 100644 --- a/breathecode/cypress/actions.py +++ b/breathecode/cypress/actions.py @@ -10,30 +10,30 @@ logger = logging.getLogger(__name__) -PROJECT = 'breathecode' +PROJECT = "breathecode" MODULES = [ - 'admissions', + "admissions", # 'assessment', - 'assignments', - 'authenticate', - 'certificate', - 'events', - 'feedback', - 'freelance', - 'marketing', - 'media', - 'monitoring', - 'notify', + "assignments", + "authenticate", + "certificate", + "events", + "feedback", + "freelance", + "marketing", + "media", + "monitoring", + "notify", ] def clean(): - cleaned = ['User'] + cleaned = ["User"] User.objects.all().delete() for forder in MODULES: - path = f'{PROJECT}.{forder}.models' + path = f"{PROJECT}.{forder}.models" module = importlib.import_module(path) models = [] @@ -45,7 +45,7 @@ def clean(): if not issubclass(model_cls, Model): continue - if (hasattr(model_cls, 'Meta') and hasattr(model_cls.Meta, 'abstract')): + if hasattr(model_cls, "Meta") and hasattr(model_cls.Meta, "abstract"): continue models.append(model_cls) @@ -56,7 +56,7 @@ def clean(): if model_name in cleaned: continue - logger.info(f'{model_name} was cleaned') + logger.info(f"{model_name} was cleaned") model_cls.objects.all().delete() cleaned.append(model_name) @@ -65,26 +65,26 @@ def load_roles(): command = Command() command.handle() - logger.info('Roles loaded') + logger.info("Roles loaded") def get_model(model_name): modules = MODULES found = [] - if '.' in model_name: - parts = model_name.split('.') + if "." in model_name: + parts = model_name.split(".") if len(parts) != 2: - raise Exception('Bad model name format') + raise Exception("Bad model name format") modules = [parts[0]] model_name = parts[1] - if model_name == 'User': + if model_name == "User": found.append(User) for forder in modules: - path = f'{PROJECT}.{forder}.models' + path = f"{PROJECT}.{forder}.models" module = importlib.import_module(path) if not hasattr(module, model_name): @@ -98,16 +98,16 @@ def get_model(model_name): if not issubclass(model, Model): continue - if (hasattr(model, 'Meta') and hasattr(model.Meta, 'abstract')): + if hasattr(model, "Meta") and hasattr(model.Meta, "abstract"): continue found.append(model) if not found: - raise Exception('Model not found') + raise Exception("Model not found") if len(found) > 1: - raise Exception('Exist many app with the same model name, use `app.model` syntax') + raise Exception("Exist many app with the same model name, use `app.model` syntax") return found[0] @@ -118,11 +118,11 @@ def clean_model(model_name): def generate_model(data): - status = 'done' + status = "done" pk = 0 try: - model_name = data.pop('$model') + model_name = data.pop("$model") model_cls = get_model(model_name) element = mixer.blend(model_cls, **data) pk = element.pk @@ -131,12 +131,12 @@ def generate_model(data): status = str(e) result = { - 'model': model_name, - 'status_text': status, + "model": model_name, + "status_text": status, } if pk: - result['pk'] = pk + result["pk"] = pk return result diff --git a/breathecode/cypress/apps.py b/breathecode/cypress/apps.py index 6607e2874..c3d11da18 100644 --- a/breathecode/cypress/apps.py +++ b/breathecode/cypress/apps.py @@ -2,5 +2,5 @@ class CypressConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'breathecode.cypress' + default_auto_field = "django.db.models.BigAutoField" + name = "breathecode.cypress" diff --git a/breathecode/cypress/tests/mixins/__init__.py b/breathecode/cypress/tests/mixins/__init__.py index 3f16d5a41..d647fde55 100644 --- a/breathecode/cypress/tests/mixins/__init__.py +++ b/breathecode/cypress/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Mixins """ + from .cypress_test_case import CypressTestCase # noqa: F401 diff --git a/breathecode/cypress/tests/mixins/cypress_test_case.py b/breathecode/cypress/tests/mixins/cypress_test_case.py index 90c30114e..a45085f21 100644 --- a/breathecode/cypress/tests/mixins/cypress_test_case.py +++ b/breathecode/cypress/tests/mixins/cypress_test_case.py @@ -1,20 +1,36 @@ """ Collections of mixins used to login in authorize microservice """ + import os from rest_framework.test import APITestCase -from breathecode.tests.mixins import (CacheMixin, DatetimeMixin, GenerateModelsMixin, GenerateQueriesMixin, - HeadersMixin, ICallMixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + CacheMixin, + DatetimeMixin, + GenerateModelsMixin, + GenerateQueriesMixin, + HeadersMixin, + ICallMixin, + BreathecodeMixin, +) -class CypressTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, DatetimeMixin, - ICallMixin, BreathecodeMixin): +class CypressTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + ICallMixin, + BreathecodeMixin, +): """AdmissionsTestCase with auth methods""" def setUp(self): - os.environ['API_URL'] = 'http://localhost:8000' + os.environ["API_URL"] = "http://localhost:8000" self.generate_queries() self.set_test_instance(self) diff --git a/breathecode/cypress/tests/urls/tests_clean.py b/breathecode/cypress/tests/urls/tests_clean.py index 47c127d39..dca7b9fb0 100644 --- a/breathecode/cypress/tests/urls/tests_clean.py +++ b/breathecode/cypress/tests/urls/tests_clean.py @@ -9,13 +9,13 @@ class AcademyEventTestSuite(CypressTestCase): def test_clean__bad_environment__not_exits(self): - if 'ALLOW_UNSAFE_CYPRESS_APP' in os.environ: - del os.environ['ALLOW_UNSAFE_CYPRESS_APP'] + if "ALLOW_UNSAFE_CYPRESS_APP" in os.environ: + del os.environ["ALLOW_UNSAFE_CYPRESS_APP"] - url = reverse_lazy('cypress:clean') + url = reverse_lazy("cypress:clean") response = self.client.delete(url) json = response.json() - expected = {'detail': 'is-not-allowed', 'status_code': 400} + expected = {"detail": "is-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -23,12 +23,12 @@ def test_clean__bad_environment__not_exits(self): self.assertEqual(self.all_form_entry_dict(), []) def test_clean__bad_environment__empty_string(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = '' + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "" - url = reverse_lazy('cypress:clean') + url = reverse_lazy("cypress:clean") response = self.client.delete(url) json = response.json() - expected = {'detail': 'is-not-allowed', 'status_code': 400} + expected = {"detail": "is-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -36,8 +36,8 @@ def test_clean__bad_environment__empty_string(self): self.assertEqual(self.all_form_entry_dict(), []) def test_clean(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:clean') + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:clean") self.generate_models(academy=True, form_entry=True) response = self.client.delete(url) diff --git a/breathecode/cypress/tests/urls/tests_clean_model.py b/breathecode/cypress/tests/urls/tests_clean_model.py index aa9edf286..8d4f64965 100644 --- a/breathecode/cypress/tests/urls/tests_clean_model.py +++ b/breathecode/cypress/tests/urls/tests_clean_model.py @@ -9,13 +9,13 @@ class AcademyEventTestSuite(CypressTestCase): def test_clean_model__bad_environment__not_exits(self): - if 'ALLOW_UNSAFE_CYPRESS_APP' in os.environ: - del os.environ['ALLOW_UNSAFE_CYPRESS_APP'] + if "ALLOW_UNSAFE_CYPRESS_APP" in os.environ: + del os.environ["ALLOW_UNSAFE_CYPRESS_APP"] - url = reverse_lazy('cypress:clean_model', kwargs={'model_name': 'TheyKilledKenny'}) + url = reverse_lazy("cypress:clean_model", kwargs={"model_name": "TheyKilledKenny"}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'is-not-allowed', 'status_code': 400} + expected = {"detail": "is-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -23,12 +23,12 @@ def test_clean_model__bad_environment__not_exits(self): self.assertEqual(self.all_form_entry_dict(), []) def test_clean_model__bad_environment__empty_string(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = '' + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "" - url = reverse_lazy('cypress:clean_model', kwargs={'model_name': 'TheyKilledKenny'}) + url = reverse_lazy("cypress:clean_model", kwargs={"model_name": "TheyKilledKenny"}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'is-not-allowed', 'status_code': 400} + expected = {"detail": "is-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -36,46 +36,46 @@ def test_clean_model__bad_environment__empty_string(self): self.assertEqual(self.all_form_entry_dict(), []) def test_clean_model__invalid_model(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:clean_model', kwargs={'model_name': 'TheyKilledKenny'}) + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:clean_model", kwargs={"model_name": "TheyKilledKenny"}) self.generate_models(academy=True, form_entry=True) response = self.client.delete(url) json = response.json() - expected = {'detail': 'model-not-exits', 'status_code': 404} + expected = {"detail": "model-not-exits", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_clean_model__delete_academy__model_name_in_many_apps(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:clean_model', kwargs={'model_name': 'Academy'}) + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:clean_model", kwargs={"model_name": "Academy"}) model = self.generate_models(academy=True) response = self.client.delete(url) json = response.json() - expected = {'detail': 'many-models-with-the-same-name', 'status_code': 404} + expected = {"detail": "many-models-with-the-same-name", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.all_academy_dict(), [{**self.model_to_dict(model, 'academy')}]) + self.assertEqual(self.all_academy_dict(), [{**self.model_to_dict(model, "academy")}]) def test_clean_model__delete_academy__bad_format(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:clean_model', kwargs={'model_name': 'breathecode.admissions.Academy'}) + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:clean_model", kwargs={"model_name": "breathecode.admissions.Academy"}) model = self.generate_models(academy=True) response = self.client.delete(url) json = response.json() - expected = {'detail': 'bad-model-name-format', 'status_code': 404} + expected = {"detail": "bad-model-name-format", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.all_academy_dict(), [{**self.model_to_dict(model, 'academy')}]) + self.assertEqual(self.all_academy_dict(), [{**self.model_to_dict(model, "academy")}]) def test_clean_model__delete_academy(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:clean_model', kwargs={'model_name': 'admissions.Academy'}) + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:clean_model", kwargs={"model_name": "admissions.Academy"}) self.generate_models(academy=True) response = self.client.delete(url) @@ -83,8 +83,8 @@ def test_clean_model__delete_academy(self): self.assertEqual(self.all_academy_dict(), []) def test_clean_model__delete_user(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:clean_model', kwargs={'model_name': 'User'}) + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:clean_model", kwargs={"model_name": "User"}) self.generate_models(user=True) response = self.client.delete(url) diff --git a/breathecode/cypress/tests/urls/tests_load_roles.py b/breathecode/cypress/tests/urls/tests_load_roles.py index 712b35cac..fc0b11920 100644 --- a/breathecode/cypress/tests/urls/tests_load_roles.py +++ b/breathecode/cypress/tests/urls/tests_load_roles.py @@ -8,410 +8,299 @@ from ..mixins import CypressTestCase CAPABILITIES = [ - { - 'slug': 'read_my_academy', - 'description': 'Read your academy information' - }, - { - 'slug': 'crud_my_academy', - 'description': 'Read, or update your academy information (very high level, almost the academy admin)' - }, - { - 'slug': 'crud_member', - 'description': 'Create, update or delete academy members (very high level, almost the academy admin)' - }, - { - 'slug': 'read_member', - 'description': 'Read academy staff member information' - }, - { - 'slug': 'crud_student', - 'description': 'Create, update or delete students' - }, - { - 'slug': 'read_student', - 'description': 'Read student information' - }, - { - 'slug': 'read_invite', - 'description': 'Read invites from users' - }, - { - 'slug': 'read_assignment', - 'description': 'Read assigment information' - }, - { - 'slug': 'read_assignment_sensitive_details', - 'description': - 'The mentor in residence is allowed to see aditional info about the task, like the "delivery url"' - }, - { - 'slug': 'read_shortlink', - 'description': 'Access the list of marketing shortlinks' - }, - { - 'slug': 'crud_shortlink', - 'description': 'Create, update and delete marketing short links' - }, - { - 'slug': 'crud_assignment', - 'description': 'Create, update or delete a assigment' - }, - { - 'slug': 'task_delivery_details', - 'description': 'Get delivery URL for a task, that url can be sent to students for delivery' - }, - { - 'slug': 'read_certificate', - 'description': 'List and read all academy certificates' - }, - { - 'slug': 'crud_certificate', - 'description': 'Create, update or delete student certificates' - }, - { - 'slug': 'read_layout', - 'description': 'Read layouts to generate new certificates' - }, - { - 'slug': 'read_syllabus', - 'description': 'List and read syllabus information' - }, - { - 'slug': 'crud_syllabus', - 'description': 'Create, update or delete syllabus versions' - }, - { - 'slug': 'read_event', - 'description': 'List and retrieve event information' - }, - { - 'slug': 'crud_event', - 'description': 'Create, update or delete event information' - }, - { - 'slug': 'read_all_cohort', - 'description': 'List all the cohorts or a single cohort information' - }, - { - 'slug': 'crud_cohort', - 'description': 'Create, update or delete cohort info' - }, - { - 'slug': 'read_eventcheckin', - 'description': 'List and read all the event_checkins' - }, - { - 'slug': 'read_survey', - 'description': 'List all the nps answers' - }, - { - 'slug': 'crud_survey', - 'description': 'Create, update or delete surveys' - }, - { - 'slug': 'read_nps_answers', - 'description': 'List all the nps answers' - }, - { - 'slug': 'read_lead', - 'description': 'List all the leads' - }, - { - 'slug': 'read_won_lead', - 'description': 'List all the won leads' - }, - { - 'slug': 'crud_lead', - 'description': 'Create, update or delete academy leads' - }, - { - 'slug': 'read_review', - 'description': 'Read review for a particular academy' - }, - { - 'slug': 'crud_review', - 'description': 'Create, update or delete academy reviews' - }, - { - 'slug': 'read_media', - 'description': 'List all the medias' - }, - { - 'slug': 'crud_media', - 'description': 'Create, update or delete academy medias' - }, - { - 'slug': 'read_media_resolution', - 'description': 'List all the medias resolutions' - }, - { - 'slug': 'crud_media_resolution', - 'description': 'Create, update or delete academy media resolutions' - }, - { - 'slug': 'read_cohort_activity', - 'description': 'Read low level activity in a cohort (attendancy, etc.)' - }, - { - 'slug': 'generate_academy_token', - 'description': 'Create a new token only to be used by the academy' - }, - { - 'slug': 'get_academy_token', - 'description': 'Read the academy token' - }, - { - 'slug': 'send_reset_password', - 'description': 'Generate a temporal token and resend forgot password link' - }, - { - 'slug': 'read_activity', - 'description': 'List all the user activities' - }, - { - 'slug': 'crud_activity', - 'description': 'Create, update or delete a user activities' - }, - { - 'slug': 'read_assigment', - 'description': 'List all the assigments' - }, - { - 'slug': 'classroom_activity', - 'description': 'To report student activities during the classroom or cohorts (Specially meant for teachers)' - }, - { - 'slug': 'academy_reporting', - 'description': 'Get detailed reports about the academy activity' - }, - { - 'slug': 'generate_temporal_token', - 'description': 'Generate a temporal token to reset github credential or forgot password' - }, - { - 'slug': 'read_mentorship_service', - 'description': 'Get all mentorship services from one academy' - }, - { - 'slug': 'read_mentorship_mentor', - 'description': 'Get all mentorship mentors from one academy' - }, - { - 'slug': 'read_mentorship_session', - 'description': 'Get all session from one academy' - }, - { - 'slug': 'crud_mentorship_session', - 'description': 'Get all session from one academy' - }, - { - 'slug': 'read_mentor', - 'description': 'Get update academy mentors' - }, - { - 'slug': 'crud_mentor', - 'description': 'Update, create and delete academy mentors' - }, - { - 'slug': 'crud_asset', - 'description': 'Update, create and delete registry assets' - }, - { - 'slug': 'read_tag', - 'description': 'Read marketing tags and their details' - }, - { - 'slug': 'crud_tag', - 'description': 'Update, create and delete a marketing tag and its details' - }, + {"slug": "read_my_academy", "description": "Read your academy information"}, + { + "slug": "crud_my_academy", + "description": "Read, or update your academy information (very high level, almost the academy admin)", + }, + { + "slug": "crud_member", + "description": "Create, update or delete academy members (very high level, almost the academy admin)", + }, + {"slug": "read_member", "description": "Read academy staff member information"}, + {"slug": "crud_student", "description": "Create, update or delete students"}, + {"slug": "read_student", "description": "Read student information"}, + {"slug": "read_invite", "description": "Read invites from users"}, + {"slug": "read_assignment", "description": "Read assigment information"}, + { + "slug": "read_assignment_sensitive_details", + "description": 'The mentor in residence is allowed to see aditional info about the task, like the "delivery url"', + }, + {"slug": "read_shortlink", "description": "Access the list of marketing shortlinks"}, + {"slug": "crud_shortlink", "description": "Create, update and delete marketing short links"}, + {"slug": "crud_assignment", "description": "Create, update or delete a assigment"}, + { + "slug": "task_delivery_details", + "description": "Get delivery URL for a task, that url can be sent to students for delivery", + }, + {"slug": "read_certificate", "description": "List and read all academy certificates"}, + {"slug": "crud_certificate", "description": "Create, update or delete student certificates"}, + {"slug": "read_layout", "description": "Read layouts to generate new certificates"}, + {"slug": "read_syllabus", "description": "List and read syllabus information"}, + {"slug": "crud_syllabus", "description": "Create, update or delete syllabus versions"}, + {"slug": "read_event", "description": "List and retrieve event information"}, + {"slug": "crud_event", "description": "Create, update or delete event information"}, + {"slug": "read_all_cohort", "description": "List all the cohorts or a single cohort information"}, + {"slug": "crud_cohort", "description": "Create, update or delete cohort info"}, + {"slug": "read_eventcheckin", "description": "List and read all the event_checkins"}, + {"slug": "read_survey", "description": "List all the nps answers"}, + {"slug": "crud_survey", "description": "Create, update or delete surveys"}, + {"slug": "read_nps_answers", "description": "List all the nps answers"}, + {"slug": "read_lead", "description": "List all the leads"}, + {"slug": "read_won_lead", "description": "List all the won leads"}, + {"slug": "crud_lead", "description": "Create, update or delete academy leads"}, + {"slug": "read_review", "description": "Read review for a particular academy"}, + {"slug": "crud_review", "description": "Create, update or delete academy reviews"}, + {"slug": "read_media", "description": "List all the medias"}, + {"slug": "crud_media", "description": "Create, update or delete academy medias"}, + {"slug": "read_media_resolution", "description": "List all the medias resolutions"}, + {"slug": "crud_media_resolution", "description": "Create, update or delete academy media resolutions"}, + {"slug": "read_cohort_activity", "description": "Read low level activity in a cohort (attendancy, etc.)"}, + {"slug": "generate_academy_token", "description": "Create a new token only to be used by the academy"}, + {"slug": "get_academy_token", "description": "Read the academy token"}, + {"slug": "send_reset_password", "description": "Generate a temporal token and resend forgot password link"}, + {"slug": "read_activity", "description": "List all the user activities"}, + {"slug": "crud_activity", "description": "Create, update or delete a user activities"}, + {"slug": "read_assigment", "description": "List all the assigments"}, + { + "slug": "classroom_activity", + "description": "To report student activities during the classroom or cohorts (Specially meant for teachers)", + }, + {"slug": "academy_reporting", "description": "Get detailed reports about the academy activity"}, + { + "slug": "generate_temporal_token", + "description": "Generate a temporal token to reset github credential or forgot password", + }, + {"slug": "read_mentorship_service", "description": "Get all mentorship services from one academy"}, + {"slug": "read_mentorship_mentor", "description": "Get all mentorship mentors from one academy"}, + {"slug": "read_mentorship_session", "description": "Get all session from one academy"}, + {"slug": "crud_mentorship_session", "description": "Get all session from one academy"}, + {"slug": "read_mentor", "description": "Get update academy mentors"}, + {"slug": "crud_mentor", "description": "Update, create and delete academy mentors"}, + {"slug": "crud_asset", "description": "Update, create and delete registry assets"}, + {"slug": "read_tag", "description": "Read marketing tags and their details"}, + {"slug": "crud_tag", "description": "Update, create and delete a marketing tag and its details"}, ] ROLES = [ { - 'slug': 'admin', - 'name': 'Admin', - 'caps': [c['slug'] for c in CAPABILITIES], - }, - { - 'slug': - 'academy_token', - 'name': - 'Academy Token', - 'caps': [ - 'read_member', - 'read_syllabus', - 'read_student', - 'read_all_cohort', - 'read_media', - 'read_my_academy', - 'read_invite', - 'read_lead', - 'crud_lead', - 'read_review', - 'read_shortlink', - 'read_mentorship_service', - 'read_mentorship_mentor', + "slug": "admin", + "name": "Admin", + "caps": [c["slug"] for c in CAPABILITIES], + }, + { + "slug": "academy_token", + "name": "Academy Token", + "caps": [ + "read_member", + "read_syllabus", + "read_student", + "read_all_cohort", + "read_media", + "read_my_academy", + "read_invite", + "read_lead", + "crud_lead", + "read_review", + "read_shortlink", + "read_mentorship_service", + "read_mentorship_mentor", ], }, { - 'slug': - 'staff', - 'name': - 'Staff (Base)', - 'caps': [ - 'read_member', - 'read_syllabus', - 'read_student', - 'read_all_cohort', - 'read_media', - 'read_my_academy', - 'read_invite', - 'get_academy_token', - 'crud_activity', - 'read_survey', - 'read_layout', - 'read_event', - 'read_certificate', - 'academy_reporting', - 'read_won_lead', - 'read_eventcheckin', - 'read_review', - 'read_activity', - 'read_shortlink', - 'read_mentorship_service', - 'read_mentorship_mentor', + "slug": "staff", + "name": "Staff (Base)", + "caps": [ + "read_member", + "read_syllabus", + "read_student", + "read_all_cohort", + "read_media", + "read_my_academy", + "read_invite", + "get_academy_token", + "crud_activity", + "read_survey", + "read_layout", + "read_event", + "read_certificate", + "academy_reporting", + "read_won_lead", + "read_eventcheckin", + "read_review", + "read_activity", + "read_shortlink", + "read_mentorship_service", + "read_mentorship_mentor", ], }, { - 'slug': - 'student', - 'name': - 'Student', - 'caps': [ - 'crud_assignment', - 'read_syllabus', - 'read_assignment', - 'read_all_cohort', - 'read_my_academy', - 'crud_activity', - 'read_mentorship_service', - 'read_mentorship_mentor', + "slug": "student", + "name": "Student", + "caps": [ + "crud_assignment", + "read_syllabus", + "read_assignment", + "read_all_cohort", + "read_my_academy", + "crud_activity", + "read_mentorship_service", + "read_mentorship_mentor", ], }, ] def extend_roles(roles: list): - roles.append({ - 'slug': - 'assistant', - 'name': - 'Teacher Assistant', - 'caps': - create_academy_roles.extend(roles, ['staff']) + [ - 'read_assigment', - 'crud_assignment', - 'read_cohort_activity', - 'read_nps_answers', - 'classroom_activity', - 'read_event', - 'task_delivery_details', - 'crud_cohort', - ] - }) - roles.append({ - 'slug': - 'career_support', - 'name': - 'Career Support Specialist', - 'caps': - create_academy_roles.extend(roles, ['staff']) + ['read_certificate', 'crud_certificate', 'crud_shortlink'] - }) - roles.append({ - 'slug': - 'admissions_developer', - 'name': - 'Admissions Developer', - 'caps': - create_academy_roles.extend(roles, ['staff']) + - ['crud_lead', 'crud_student', 'crud_cohort', 'read_all_cohort', 'read_lead', 'read_activity'] - }) - roles.append({ - 'slug': 'syllabus_coordinator', - 'name': 'Syllabus Coordinator', - 'caps': create_academy_roles.extend(roles, ['staff']) + ['crud_syllabus', 'crud_media', 'crud_asset'] - }) - roles.append({ - 'slug': 'culture_and_recruitment', - 'name': 'Culture and Recruitment', - 'caps': create_academy_roles.extend(roles, ['staff']) + ['crud_member'] - }) - roles.append({ - 'slug': - 'community_manager', - 'name': - 'Manage Syllabus, Exercises and all academy content', - 'caps': - create_academy_roles.extend(roles, ['staff']) + [ - 'crud_lead', 'read_event', 'crud_event', 'read_eventcheckin', 'read_nps_answers', 'read_lead', - 'read_all_cohort', 'crud_media' - ] - }) - roles.append({ - 'slug': - 'growth_manager', - 'name': - 'Growth Manager', - 'caps': - create_academy_roles.extend(roles, ['staff', 'community_manager']) + - ['crud_media', 'read_activity', 'read_lead', 'read_won_lead', 'crud_review', 'crud_shortlink', 'crud_tag'] - }) - roles.append({ - 'slug': 'homework_reviewer', - 'name': 'Homework Reviewer', - 'caps': create_academy_roles.extend(roles, ['assistant']) - }) - roles.append({ - 'slug': 'teacher', - 'name': 'Teacher', - 'caps': create_academy_roles.extend(roles, ['assistant']) + ['crud_cohort'] - }) - roles.append({ - 'slug': - 'academy_coordinator', - 'name': - 'Mentor in residence', - 'caps': - create_academy_roles.extend(roles, ['teacher']) + [ - 'crud_syllabus', 'crud_cohort', 'crud_student', 'crud_survey', 'read_won_lead', 'crud_member', - 'send_reset_password', 'generate_temporal_token', 'crud_certificate', 'crud_review', 'crud_mentor', - 'read_mentor', 'read_assignment_sensitive_details', 'crud_shortlink' - ] - }) - roles.append({ - 'slug': - 'country_manager', - 'name': - 'Country Manager', - 'caps': - create_academy_roles.extend(roles, [ - 'academy_coordinator', 'student', 'career_support', 'growth_manager', 'admissions_developer', - 'syllabus_coordinator', 'read_organization', 'crud_organization' - ]) + ['crud_my_academy', 'generate_academy_token', 'send_reset_password', 'generate_temporal_token'] - }) + roles.append( + { + "slug": "assistant", + "name": "Teacher Assistant", + "caps": create_academy_roles.extend(roles, ["staff"]) + + [ + "read_assigment", + "crud_assignment", + "read_cohort_activity", + "read_nps_answers", + "classroom_activity", + "read_event", + "task_delivery_details", + "crud_cohort", + ], + } + ) + roles.append( + { + "slug": "career_support", + "name": "Career Support Specialist", + "caps": create_academy_roles.extend(roles, ["staff"]) + + ["read_certificate", "crud_certificate", "crud_shortlink"], + } + ) + roles.append( + { + "slug": "admissions_developer", + "name": "Admissions Developer", + "caps": create_academy_roles.extend(roles, ["staff"]) + + ["crud_lead", "crud_student", "crud_cohort", "read_all_cohort", "read_lead", "read_activity"], + } + ) + roles.append( + { + "slug": "syllabus_coordinator", + "name": "Syllabus Coordinator", + "caps": create_academy_roles.extend(roles, ["staff"]) + ["crud_syllabus", "crud_media", "crud_asset"], + } + ) + roles.append( + { + "slug": "culture_and_recruitment", + "name": "Culture and Recruitment", + "caps": create_academy_roles.extend(roles, ["staff"]) + ["crud_member"], + } + ) + roles.append( + { + "slug": "community_manager", + "name": "Manage Syllabus, Exercises and all academy content", + "caps": create_academy_roles.extend(roles, ["staff"]) + + [ + "crud_lead", + "read_event", + "crud_event", + "read_eventcheckin", + "read_nps_answers", + "read_lead", + "read_all_cohort", + "crud_media", + ], + } + ) + roles.append( + { + "slug": "growth_manager", + "name": "Growth Manager", + "caps": create_academy_roles.extend(roles, ["staff", "community_manager"]) + + [ + "crud_media", + "read_activity", + "read_lead", + "read_won_lead", + "crud_review", + "crud_shortlink", + "crud_tag", + ], + } + ) + roles.append( + { + "slug": "homework_reviewer", + "name": "Homework Reviewer", + "caps": create_academy_roles.extend(roles, ["assistant"]), + } + ) + roles.append( + { + "slug": "teacher", + "name": "Teacher", + "caps": create_academy_roles.extend(roles, ["assistant"]) + ["crud_cohort"], + } + ) + roles.append( + { + "slug": "academy_coordinator", + "name": "Mentor in residence", + "caps": create_academy_roles.extend(roles, ["teacher"]) + + [ + "crud_syllabus", + "crud_cohort", + "crud_student", + "crud_survey", + "read_won_lead", + "crud_member", + "send_reset_password", + "generate_temporal_token", + "crud_certificate", + "crud_review", + "crud_mentor", + "read_mentor", + "read_assignment_sensitive_details", + "crud_shortlink", + ], + } + ) + roles.append( + { + "slug": "country_manager", + "name": "Country Manager", + "caps": create_academy_roles.extend( + roles, + [ + "academy_coordinator", + "student", + "career_support", + "growth_manager", + "admissions_developer", + "syllabus_coordinator", + "read_organization", + "crud_organization", + ], + ) + + ["crud_my_academy", "generate_academy_token", "send_reset_password", "generate_temporal_token"], + } + ) class AcademyEventTestSuite(CypressTestCase): def test_load_roles__bad_environment__not_exits(self): - if 'ALLOW_UNSAFE_CYPRESS_APP' in os.environ: - del os.environ['ALLOW_UNSAFE_CYPRESS_APP'] + if "ALLOW_UNSAFE_CYPRESS_APP" in os.environ: + del os.environ["ALLOW_UNSAFE_CYPRESS_APP"] - url = reverse_lazy('cypress:load_roles') + url = reverse_lazy("cypress:load_roles") response = self.client.get(url) json = response.json() - expected = {'detail': 'is-not-allowed', 'status_code': 400} + expected = {"detail": "is-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -419,331 +308,204 @@ def test_load_roles__bad_environment__not_exits(self): self.assertEqual(self.all_capability_dict(), []) def test_load_roles__bad_environment__empty_string(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = '' + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "" - url = reverse_lazy('cypress:load_roles') + url = reverse_lazy("cypress:load_roles") response = self.client.get(url) json = response.json() - expected = {'detail': 'is-not-allowed', 'status_code': 400} + expected = {"detail": "is-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(self.all_role_dict(), []) self.assertEqual(self.all_capability_dict(), []) - @patch('breathecode.authenticate.management.commands.create_academy_roles.get_capabilities', - MagicMock(return_value=CAPABILITIES)) - @patch('breathecode.authenticate.management.commands.create_academy_roles.get_roles', MagicMock(return_value=ROLES)) - @patch('breathecode.authenticate.management.commands.create_academy_roles.extend_roles', - MagicMock(side_effect=extend_roles)) + @patch( + "breathecode.authenticate.management.commands.create_academy_roles.get_capabilities", + MagicMock(return_value=CAPABILITIES), + ) + @patch("breathecode.authenticate.management.commands.create_academy_roles.get_roles", MagicMock(return_value=ROLES)) + @patch( + "breathecode.authenticate.management.commands.create_academy_roles.extend_roles", + MagicMock(side_effect=extend_roles), + ) def test_load_roles(self): self.maxDiff = None - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:load_roles') + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:load_roles") response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.all_role_dict(), [ - { - 'slug': 'admin', - 'name': 'Admin', - }, - { - 'slug': 'academy_token', - 'name': 'Academy Token', - }, - { - 'slug': 'staff', - 'name': 'Staff (Base)', - }, - { - 'slug': 'student', - 'name': 'Student', - }, - { - 'slug': 'assistant', - 'name': 'Teacher Assistant', - }, - { - 'slug': 'career_support', - 'name': 'Career Support Specialist', - }, - { - 'slug': 'admissions_developer', - 'name': 'Admissions Developer', - }, - { - 'slug': 'syllabus_coordinator', - 'name': 'Syllabus Coordinator', - }, - { - 'slug': 'culture_and_recruitment', - 'name': 'Culture and Recruitment', - }, - { - 'slug': 'community_manager', - 'name': 'Manage Syllabus, Exercises and all academy content', - }, - { - 'slug': 'growth_manager', - 'name': 'Growth Manager', - }, - { - 'slug': 'homework_reviewer', - 'name': 'Homework Reviewer', - }, - { - 'slug': 'teacher', - 'name': 'Teacher', - }, - { - 'slug': 'academy_coordinator', - 'name': 'Mentor in residence', - }, - { - 'slug': 'country_manager', - 'name': 'Country Manager', - }, - ]) + self.assertEqual( + self.all_role_dict(), + [ + { + "slug": "admin", + "name": "Admin", + }, + { + "slug": "academy_token", + "name": "Academy Token", + }, + { + "slug": "staff", + "name": "Staff (Base)", + }, + { + "slug": "student", + "name": "Student", + }, + { + "slug": "assistant", + "name": "Teacher Assistant", + }, + { + "slug": "career_support", + "name": "Career Support Specialist", + }, + { + "slug": "admissions_developer", + "name": "Admissions Developer", + }, + { + "slug": "syllabus_coordinator", + "name": "Syllabus Coordinator", + }, + { + "slug": "culture_and_recruitment", + "name": "Culture and Recruitment", + }, + { + "slug": "community_manager", + "name": "Manage Syllabus, Exercises and all academy content", + }, + { + "slug": "growth_manager", + "name": "Growth Manager", + }, + { + "slug": "homework_reviewer", + "name": "Homework Reviewer", + }, + { + "slug": "teacher", + "name": "Teacher", + }, + { + "slug": "academy_coordinator", + "name": "Mentor in residence", + }, + { + "slug": "country_manager", + "name": "Country Manager", + }, + ], + ) - self.assertEqual(self.all_capability_dict(), [ - { - 'slug': 'read_my_academy', - 'description': 'Read your academy information' - }, - { - 'slug': 'crud_my_academy', - 'description': 'Read, or update your academy information (very high level, almost the academy admin)' - }, - { - 'slug': 'crud_member', - 'description': 'Create, update or delete academy members (very high level, almost the academy admin)' - }, - { - 'slug': 'read_member', - 'description': 'Read academy staff member information' - }, - { - 'slug': 'crud_student', - 'description': 'Create, update or delete students' - }, - { - 'slug': 'read_student', - 'description': 'Read student information' - }, - { - 'slug': 'read_invite', - 'description': 'Read invites from users' - }, - { - 'slug': 'read_assignment', - 'description': 'Read assigment information' - }, - { - 'description': 'The mentor in residence is allowed to see aditional info ' - 'about the task, like the "delivery url"', - 'slug': 'read_assignment_sensitive_details' - }, - { - 'description': 'Access the list of marketing shortlinks', - 'slug': 'read_shortlink' - }, - { - 'description': 'Create, update and delete marketing short links', - 'slug': 'crud_shortlink' - }, - { - 'slug': 'crud_assignment', - 'description': 'Create, update or delete a assigment' - }, - { - 'description': ('Get delivery URL for a task, that url can be sent to ' - 'students for delivery'), - 'slug': 'task_delivery_details' - }, - { - 'slug': 'read_certificate', - 'description': 'List and read all academy certificates' - }, - { - 'slug': 'crud_certificate', - 'description': 'Create, update or delete student certificates' - }, - { - 'slug': 'read_layout', - 'description': 'Read layouts to generate new certificates' - }, - { - 'slug': 'read_syllabus', - 'description': 'List and read syllabus information' - }, - { - 'slug': 'crud_syllabus', - 'description': 'Create, update or delete syllabus versions' - }, - { - 'slug': 'read_event', - 'description': 'List and retrieve event information' - }, - { - 'slug': 'crud_event', - 'description': 'Create, update or delete event information' - }, - { - 'slug': 'read_all_cohort', - 'description': 'List all the cohorts or a single cohort information' - }, - { - 'slug': 'crud_cohort', - 'description': 'Create, update or delete cohort info' - }, - { - 'slug': 'read_eventcheckin', - 'description': 'List and read all the event_checkins' - }, - { - 'slug': 'read_survey', - 'description': 'List all the nps answers' - }, - { - 'slug': 'crud_survey', - 'description': 'Create, update or delete surveys' - }, - { - 'slug': 'read_nps_answers', - 'description': 'List all the nps answers' - }, - { - 'slug': 'read_lead', - 'description': 'List all the leads' - }, - { - 'slug': 'read_won_lead', - 'description': 'List all the won leads' - }, - { - 'slug': 'crud_lead', - 'description': 'Create, update or delete academy leads' - }, - { - 'slug': 'read_review', - 'description': 'Read review for a particular academy' - }, - { - 'slug': 'crud_review', - 'description': 'Create, update or delete academy reviews' - }, - { - 'slug': 'read_media', - 'description': 'List all the medias' - }, - { - 'slug': 'crud_media', - 'description': 'Create, update or delete academy medias' - }, - { - 'slug': 'read_media_resolution', - 'description': 'List all the medias resolutions' - }, - { - 'slug': 'crud_media_resolution', - 'description': 'Create, update or delete academy media resolutions' - }, - { - 'slug': 'read_cohort_activity', - 'description': 'Read low level activity in a cohort (attendancy, etc.)' - }, - { - 'slug': 'generate_academy_token', - 'description': 'Create a new token only to be used by the academy' - }, - { - 'slug': 'get_academy_token', - 'description': 'Read the academy token' - }, - { - 'slug': 'send_reset_password', - 'description': 'Generate a temporal token and resend forgot password link' - }, - { - 'slug': 'read_activity', - 'description': 'List all the user activities' - }, - { - 'slug': 'crud_activity', - 'description': 'Create, update or delete a user activities' - }, - { - 'slug': 'read_assigment', - 'description': 'List all the assigments' - }, - { - 'slug': 'classroom_activity', - 'description': - 'To report student activities during the classroom or cohorts (Specially meant for teachers)' - }, - { - 'slug': 'academy_reporting', - 'description': 'Get detailed reports about the academy activity' - }, - { - 'slug': 'generate_temporal_token', - 'description': 'Generate a temporal token to reset github credential or forgot password' - }, - { - 'slug': 'read_mentorship_service', - 'description': 'Get all mentorship services from one academy' - }, - { - 'slug': 'read_mentorship_mentor', - 'description': 'Get all mentorship mentors from one academy' - }, - { - 'slug': 'read_mentorship_session', - 'description': 'Get all session from one academy' - }, - { - 'slug': 'crud_mentorship_session', - 'description': 'Get all session from one academy', - }, - { - 'slug': 'read_mentor', - 'description': 'Get update academy mentors' - }, - { - 'slug': 'crud_mentor', - 'description': 'Update, create and delete academy mentors' - }, - { - 'description': 'Update, create and delete registry assets', - 'slug': 'crud_asset' - }, - { - 'slug': 'read_tag', - 'description': 'Read marketing tags and their details' - }, - { - 'slug': 'crud_tag', - 'description': 'Update, create and delete a marketing tag and its details' - }, - ]) + self.assertEqual( + self.all_capability_dict(), + [ + {"slug": "read_my_academy", "description": "Read your academy information"}, + { + "slug": "crud_my_academy", + "description": "Read, or update your academy information (very high level, almost the academy admin)", + }, + { + "slug": "crud_member", + "description": "Create, update or delete academy members (very high level, almost the academy admin)", + }, + {"slug": "read_member", "description": "Read academy staff member information"}, + {"slug": "crud_student", "description": "Create, update or delete students"}, + {"slug": "read_student", "description": "Read student information"}, + {"slug": "read_invite", "description": "Read invites from users"}, + {"slug": "read_assignment", "description": "Read assigment information"}, + { + "description": "The mentor in residence is allowed to see aditional info " + 'about the task, like the "delivery url"', + "slug": "read_assignment_sensitive_details", + }, + {"description": "Access the list of marketing shortlinks", "slug": "read_shortlink"}, + {"description": "Create, update and delete marketing short links", "slug": "crud_shortlink"}, + {"slug": "crud_assignment", "description": "Create, update or delete a assigment"}, + { + "description": ("Get delivery URL for a task, that url can be sent to " "students for delivery"), + "slug": "task_delivery_details", + }, + {"slug": "read_certificate", "description": "List and read all academy certificates"}, + {"slug": "crud_certificate", "description": "Create, update or delete student certificates"}, + {"slug": "read_layout", "description": "Read layouts to generate new certificates"}, + {"slug": "read_syllabus", "description": "List and read syllabus information"}, + {"slug": "crud_syllabus", "description": "Create, update or delete syllabus versions"}, + {"slug": "read_event", "description": "List and retrieve event information"}, + {"slug": "crud_event", "description": "Create, update or delete event information"}, + {"slug": "read_all_cohort", "description": "List all the cohorts or a single cohort information"}, + {"slug": "crud_cohort", "description": "Create, update or delete cohort info"}, + {"slug": "read_eventcheckin", "description": "List and read all the event_checkins"}, + {"slug": "read_survey", "description": "List all the nps answers"}, + {"slug": "crud_survey", "description": "Create, update or delete surveys"}, + {"slug": "read_nps_answers", "description": "List all the nps answers"}, + {"slug": "read_lead", "description": "List all the leads"}, + {"slug": "read_won_lead", "description": "List all the won leads"}, + {"slug": "crud_lead", "description": "Create, update or delete academy leads"}, + {"slug": "read_review", "description": "Read review for a particular academy"}, + {"slug": "crud_review", "description": "Create, update or delete academy reviews"}, + {"slug": "read_media", "description": "List all the medias"}, + {"slug": "crud_media", "description": "Create, update or delete academy medias"}, + {"slug": "read_media_resolution", "description": "List all the medias resolutions"}, + {"slug": "crud_media_resolution", "description": "Create, update or delete academy media resolutions"}, + { + "slug": "read_cohort_activity", + "description": "Read low level activity in a cohort (attendancy, etc.)", + }, + {"slug": "generate_academy_token", "description": "Create a new token only to be used by the academy"}, + {"slug": "get_academy_token", "description": "Read the academy token"}, + { + "slug": "send_reset_password", + "description": "Generate a temporal token and resend forgot password link", + }, + {"slug": "read_activity", "description": "List all the user activities"}, + {"slug": "crud_activity", "description": "Create, update or delete a user activities"}, + {"slug": "read_assigment", "description": "List all the assigments"}, + { + "slug": "classroom_activity", + "description": "To report student activities during the classroom or cohorts (Specially meant for teachers)", + }, + {"slug": "academy_reporting", "description": "Get detailed reports about the academy activity"}, + { + "slug": "generate_temporal_token", + "description": "Generate a temporal token to reset github credential or forgot password", + }, + {"slug": "read_mentorship_service", "description": "Get all mentorship services from one academy"}, + {"slug": "read_mentorship_mentor", "description": "Get all mentorship mentors from one academy"}, + {"slug": "read_mentorship_session", "description": "Get all session from one academy"}, + { + "slug": "crud_mentorship_session", + "description": "Get all session from one academy", + }, + {"slug": "read_mentor", "description": "Get update academy mentors"}, + {"slug": "crud_mentor", "description": "Update, create and delete academy mentors"}, + {"description": "Update, create and delete registry assets", "slug": "crud_asset"}, + {"slug": "read_tag", "description": "Read marketing tags and their details"}, + {"slug": "crud_tag", "description": "Update, create and delete a marketing tag and its details"}, + ], + ) def test_load_roles__check_the_capabilities(self): from ....authenticate.management.commands.create_academy_roles import CAPABILITIES for capability in CAPABILITIES: - self.assertRegex(capability['slug'], r'^[a-z_]+$') - self.assertRegex(capability['description'], r'^[a-zA-Z,. _()"]+$') + self.assertRegex(capability["slug"], r"^[a-z_]+$") + self.assertRegex(capability["description"], r'^[a-zA-Z,. _()"]+$') self.assertEqual(len(capability), 2) def test_load_roles__check_the_roles(self): from ....authenticate.management.commands.create_academy_roles import ROLES for role in ROLES: - self.assertRegex(role['slug'], r'^[a-z_]+$') - self.assertRegex(role['name'], r'^[a-zA-Z. ()"]+$') + self.assertRegex(role["slug"], r"^[a-z_]+$") + self.assertRegex(role["name"], r'^[a-zA-Z. ()"]+$') - for capability in role['caps']: - self.assertRegex(capability, r'^[a-z_]+$') + for capability in role["caps"]: + self.assertRegex(capability, r"^[a-z_]+$") self.assertEqual(len(role), 3) diff --git a/breathecode/cypress/tests/urls/tests_mixer.py b/breathecode/cypress/tests/urls/tests_mixer.py index 49ed44f63..c8ad75c49 100644 --- a/breathecode/cypress/tests/urls/tests_mixer.py +++ b/breathecode/cypress/tests/urls/tests_mixer.py @@ -10,118 +10,116 @@ class AcademyEventTestSuite(CypressTestCase): def test_mixer_model__bad_environment__not_exits(self): - if 'ALLOW_UNSAFE_CYPRESS_APP' in os.environ: - del os.environ['ALLOW_UNSAFE_CYPRESS_APP'] + if "ALLOW_UNSAFE_CYPRESS_APP" in os.environ: + del os.environ["ALLOW_UNSAFE_CYPRESS_APP"] - url = reverse_lazy('cypress:mixer') + url = reverse_lazy("cypress:mixer") data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'is-not-allowed', 'status_code': 400} + expected = {"detail": "is-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_mixer_model__bad_environment__empty_string(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = '' + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "" - url = reverse_lazy('cypress:mixer') + url = reverse_lazy("cypress:mixer") data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'is-not-allowed', 'status_code': 400} + expected = {"detail": "is-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_mixer_model__without_data(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:mixer') + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:mixer") data = {} - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'is-empty', 'status_code': 400} + expected = {"detail": "is-empty", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_mixer_model__bad_model_name(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:mixer') + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:mixer") - data = {'$model': 'TheyKilledKenny', 'first_name': 'konan'} - response = self.client.post(url, data, format='json') + data = {"$model": "TheyKilledKenny", "first_name": "konan"} + response = self.client.post(url, data, format="json") json = response.json() - expected = [{'model': 'TheyKilledKenny', 'status_text': 'Model not found'}] + expected = [{"model": "TheyKilledKenny", "status_text": "Model not found"}] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_mixer_model__generate_one_profile_academy(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:mixer') + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:mixer") - data = {'$model': 'ProfileAcademy', 'first_name': 'konan'} - response = self.client.post(url, data, format='json') + data = {"$model": "ProfileAcademy", "first_name": "konan"} + response = self.client.post(url, data, format="json") json = response.json() - expected = [{'model': 'ProfileAcademy', 'pk': 1, 'status_text': 'done'}] + expected = [{"model": "ProfileAcademy", "pk": 1, "status_text": "done"}] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) all_profile_academy = self.all_profile_academy_dict() self.assertEqual(len(all_profile_academy), 1) - self.assertEqual(all_profile_academy[0]['first_name'], 'konan') + self.assertEqual(all_profile_academy[0]["first_name"], "konan") def test_mixer_model__generate_one_user(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:mixer') + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:mixer") - data = {'$model': 'User', 'first_name': 'konan'} - response = self.client.post(url, data, format='json') + data = {"$model": "User", "first_name": "konan"} + response = self.client.post(url, data, format="json") json = response.json() - expected = [{'model': 'User', 'pk': 1, 'status_text': 'done'}] + expected = [{"model": "User", "pk": 1, "status_text": "done"}] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) all_user = self.all_user_dict() self.assertEqual(len(all_user), 1) - self.assertEqual(all_user[0]['first_name'], 'konan') + self.assertEqual(all_user[0]["first_name"], "konan") def test_mixer_model__generate_one_user_and_profile_academy(self): - os.environ['ALLOW_UNSAFE_CYPRESS_APP'] = 'True' - url = reverse_lazy('cypress:mixer') - - data = [{ - '$model': 'User', - 'first_name': 'konan', - }, { - '$model': 'ProfileAcademy', - 'first_name': 'konan', - }] - response = self.client.post(url, data, format='json') + os.environ["ALLOW_UNSAFE_CYPRESS_APP"] = "True" + url = reverse_lazy("cypress:mixer") + + data = [ + { + "$model": "User", + "first_name": "konan", + }, + { + "$model": "ProfileAcademy", + "first_name": "konan", + }, + ] + response = self.client.post(url, data, format="json") json = response.json() - expected = [{ - 'model': 'User', - 'pk': 1, - 'status_text': 'done' - }, { - 'model': 'ProfileAcademy', - 'pk': 1, - 'status_text': 'done' - }] + expected = [ + {"model": "User", "pk": 1, "status_text": "done"}, + {"model": "ProfileAcademy", "pk": 1, "status_text": "done"}, + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) all_user = self.all_user_dict() self.assertEqual(len(all_user), 1) - self.assertEqual(all_user[0]['first_name'], 'konan') + self.assertEqual(all_user[0]["first_name"], "konan") all_profile_academy = self.all_profile_academy_dict() self.assertEqual(len(all_profile_academy), 1) - self.assertEqual(all_profile_academy[0]['first_name'], 'konan') + self.assertEqual(all_profile_academy[0]["first_name"], "konan") diff --git a/breathecode/cypress/urls.py b/breathecode/cypress/urls.py index 70eb6ace8..863ab5bac 100644 --- a/breathecode/cypress/urls.py +++ b/breathecode/cypress/urls.py @@ -2,10 +2,10 @@ from .views import CleanView, LoadRolesView, MixerView -app_name = 'events' +app_name = "events" urlpatterns = [ - path('load/roles', LoadRolesView.as_view(), name='load_roles'), - path('mixer', MixerView.as_view(), name='mixer'), - path('clean', CleanView.as_view(), name='clean'), - path('clean/<str:model_name>', CleanView.as_view(), name='clean_model'), + path("load/roles", LoadRolesView.as_view(), name="load_roles"), + path("mixer", MixerView.as_view(), name="mixer"), + path("clean", CleanView.as_view(), name="clean"), + path("clean/<str:model_name>", CleanView.as_view(), name="clean_model"), ] diff --git a/breathecode/cypress/views.py b/breathecode/cypress/views.py index c89c84369..7170f796a 100644 --- a/breathecode/cypress/views.py +++ b/breathecode/cypress/views.py @@ -13,7 +13,7 @@ def get_cypress_env(): - return os.getenv('ALLOW_UNSAFE_CYPRESS_APP') + return os.getenv("ALLOW_UNSAFE_CYPRESS_APP") class LoadRolesView(APIView): @@ -21,7 +21,7 @@ class LoadRolesView(APIView): def get(self, request): if not get_cypress_env(): - raise ValidationException('Nothing to load', slug='is-not-allowed') + raise ValidationException("Nothing to load", slug="is-not-allowed") load_roles() return Response(status=status.HTTP_204_NO_CONTENT) @@ -32,22 +32,22 @@ class CleanView(APIView): def delete(self, request, model_name=None): if not get_cypress_env(): - raise ValidationException('Nothing to clean', slug='is-not-allowed') + raise ValidationException("Nothing to clean", slug="is-not-allowed") if model_name: try: clean_model(model_name) except Exception as e: error = str(e) - slug = 'model-not-exits' + slug = "model-not-exits" logger.error(error) - if error.startswith('Exist many app with the same model name'): - slug = 'many-models-with-the-same-name' + if error.startswith("Exist many app with the same model name"): + slug = "many-models-with-the-same-name" - elif error == 'Bad model name format': - slug = 'bad-model-name-format' + elif error == "Bad model name format": + slug = "bad-model-name-format" raise ValidationException(error, code=404, slug=slug) @@ -62,10 +62,10 @@ class MixerView(APIView): def post(self, request): if not get_cypress_env(): - raise ValidationException('Nothing to load', slug='is-not-allowed') + raise ValidationException("Nothing to load", slug="is-not-allowed") if not request.data: - raise ValidationException('Empty request', slug='is-empty') + raise ValidationException("Empty request", slug="is-empty") data = request.data diff --git a/breathecode/events/actions.py b/breathecode/events/actions.py index 600117e53..7ab67fefb 100644 --- a/breathecode/events/actions.py +++ b/breathecode/events/actions.py @@ -19,12 +19,12 @@ logger = logging.getLogger(__name__) status_map = { - 'draft': 'DRAFT', - 'live': 'ACTIVE', - 'completed': 'COMPLETED', - 'started': 'ACTIVE', - 'ended': 'ACTIVE', - 'canceled': 'DELETED', + "draft": "DRAFT", + "live": "ACTIVE", + "completed": "COMPLETED", + "started": "ACTIVE", + "ended": "ACTIVE", + "canceled": "DELETED", } @@ -36,9 +36,9 @@ def build_query_params(cohort=None, syllabus=None, academy=None): """ return { - 'visibility_settings__cohort': cohort, - 'visibility_settings__syllabus': syllabus, - 'visibility_settings__academy': academy, + "visibility_settings__cohort": cohort, + "visibility_settings__syllabus": syllabus, + "visibility_settings__academy": academy, } def get_related_resources(): @@ -48,25 +48,35 @@ def get_related_resources(): def process_i_owe_you(i_owe_them: QuerySet[AbstractIOweYou]): for i_owe_you in i_owe_them: - if (i_owe_you.selected_cohort_set and i_owe_you.selected_cohort_set.cohorts.first().academy - and i_owe_you.selected_cohort_set.cohorts.first().academy not in academies): + if ( + i_owe_you.selected_cohort_set + and i_owe_you.selected_cohort_set.cohorts.first().academy + and i_owe_you.selected_cohort_set.cohorts.first().academy not in academies + ): academies.append(i_owe_you.selected_cohort_set.cohorts.first().academy) if i_owe_you.selected_cohort_set and i_owe_you.selected_cohort_set.cohorts.first() not in cohorts: cohorts.append(i_owe_you.selected_cohort_set.cohorts.first()) - if (i_owe_you.selected_cohort_set and i_owe_you.selected_cohort_set.cohorts.first().syllabus_version - and i_owe_you.selected_cohort_set.cohorts.first().syllabus_version.syllabus not in syllabus): - syllabus.append({ - 'syllabus': i_owe_you.selected_cohort_set.cohorts.first().syllabus_version.syllabus, - 'academy': i_owe_you.selected_cohort_set.cohorts.first().academy, - }) - - if (i_owe_you.selected_event_type_set and i_owe_you.selected_event_type_set.academy not in academies): + if ( + i_owe_you.selected_cohort_set + and i_owe_you.selected_cohort_set.cohorts.first().syllabus_version + and i_owe_you.selected_cohort_set.cohorts.first().syllabus_version.syllabus not in syllabus + ): + syllabus.append( + { + "syllabus": i_owe_you.selected_cohort_set.cohorts.first().syllabus_version.syllabus, + "academy": i_owe_you.selected_cohort_set.cohorts.first().academy, + } + ) + + if i_owe_you.selected_event_type_set and i_owe_you.selected_event_type_set.academy not in academies: academies.append(i_owe_you.selected_event_type_set.academy) - if (i_owe_you.selected_mentorship_service_set - and i_owe_you.selected_mentorship_service_set.academy not in academies): + if ( + i_owe_you.selected_mentorship_service_set + and i_owe_you.selected_mentorship_service_set.academy not in academies + ): academies.append(i_owe_you.selected_mentorship_service_set.academy) if i_owe_you.selected_event_type_set: @@ -80,29 +90,32 @@ def process_i_owe_you(i_owe_them: QuerySet[AbstractIOweYou]): ids = [] utc_now = timezone.now() - statuses = ['CANCELLED', 'DEPRECATED'] - at_least_one_resource_linked = (Q(selected_cohort_set__isnull=False) - | Q(selected_mentorship_service_set__isnull=False) - | Q(selected_event_type_set__isnull=False)) + statuses = ["CANCELLED", "DEPRECATED"] + at_least_one_resource_linked = ( + Q(selected_cohort_set__isnull=False) + | Q(selected_mentorship_service_set__isnull=False) + | Q(selected_event_type_set__isnull=False) + ) cohort_users = CohortUser.objects.filter(user=_user) cohort_users_with_syllabus = cohort_users.filter(cohort__syllabus_version__isnull=False) - subscriptions = Subscription.objects.filter(at_least_one_resource_linked, - Q(valid_until=None) - | Q(valid_until__gte=utc_now), - user=_user).exclude(status__in=statuses) + subscriptions = Subscription.objects.filter( + at_least_one_resource_linked, Q(valid_until=None) | Q(valid_until__gte=utc_now), user=_user + ).exclude(status__in=statuses) - plan_financings = PlanFinancing.objects.filter(at_least_one_resource_linked, - valid_until__gte=utc_now, - user=_user).exclude(status__in=statuses) + plan_financings = PlanFinancing.objects.filter( + at_least_one_resource_linked, valid_until__gte=utc_now, user=_user + ).exclude(status__in=statuses) for cohort_user in cohort_users_with_syllabus: if cohort_user.cohort.syllabus_version.syllabus not in cohorts: - syllabus.append({ - 'syllabus': cohort_user.cohort.syllabus_version.syllabus, - 'academy': cohort_user.cohort.academy, - }) + syllabus.append( + { + "syllabus": cohort_user.cohort.syllabus_version.syllabus, + "academy": cohort_user.cohort.academy, + } + ) for cohort_user in cohort_users: if cohort_user.cohort.academy not in cohorts: @@ -141,11 +154,11 @@ def my_events(): # shared with a specific syllabus for s in syllabus: - kwargs = build_query_params(academy=s['academy'], syllabus=s['syllabus']) + kwargs = build_query_params(academy=s["academy"], syllabus=s["syllabus"]) if query: - query |= Q(**kwargs, academy=s['academy']) | Q(**kwargs, allow_shared_creation=True) + query |= Q(**kwargs, academy=s["academy"]) | Q(**kwargs, allow_shared_creation=True) else: - query = Q(**kwargs, academy=s['academy']) | Q(**kwargs, allow_shared_creation=True) + query = Q(**kwargs, academy=s["academy"]) | Q(**kwargs, allow_shared_creation=True) if ids: if query: @@ -163,12 +176,12 @@ def my_events(): def sync_org_venues(org): if org.academy is None: - raise Exception('First you must specify to which academy this organization belongs') + raise Exception("First you must specify to which academy this organization belongs") client = Eventbrite(org.eventbrite_key) result = client.get_organization_venues(org.eventbrite_id) - for data in result['venues']: + for data in result["venues"]: create_or_update_venue(data, org, force_update=True) return True @@ -176,51 +189,50 @@ def sync_org_venues(org): def create_or_update_organizer(data, org, force_update=False): if org.academy is None: - raise Exception('First you must specify to which academy this organization belongs') + raise Exception("First you must specify to which academy this organization belongs") - organizer = Organizer.objects.filter(eventbrite_id=data['id']).first() + organizer = Organizer.objects.filter(eventbrite_id=data["id"]).first() try: if organizer is None: - organizer = Organizer(name=data['name'], - description=data['description']['text'], - eventbrite_id=data['id'], - organization=org) + organizer = Organizer( + name=data["name"], description=data["description"]["text"], eventbrite_id=data["id"], organization=org + ) organizer.save() elif force_update == True: - organizer.name = data['name'] - organizer.description = data['description']['text'] + organizer.name = data["name"] + organizer.description = data["description"]["text"] organizer.save() except Exception as e: - print('Error saving organizer eventbrite_id: ' + str(data['id']) + ' skipping to the next', e) + print("Error saving organizer eventbrite_id: " + str(data["id"]) + " skipping to the next", e) return organizer def create_or_update_venue(data, org, force_update=False): if not org.academy: - logger.error(f'The organization {org} not have a academy assigned') + logger.error(f"The organization {org} not have a academy assigned") return - venue = Venue.objects.filter(eventbrite_id=data['id'], academy__id=org.academy.id).first() + venue = Venue.objects.filter(eventbrite_id=data["id"], academy__id=org.academy.id).first() if venue and not force_update: return kwargs = { - 'title': data['name'], - 'street_address': data['address']['address_1'], - 'country': data['address']['country'], - 'city': data['address']['city'], - 'state': data['address']['region'], - 'zip_code': data['address']['postal_code'], - 'latitude': data['latitude'], - 'longitude': data['longitude'], - 'eventbrite_id': data['id'], - 'eventbrite_url': data['resource_uri'], - 'academy': org.academy, + "title": data["name"], + "street_address": data["address"]["address_1"], + "country": data["address"]["country"], + "city": data["address"]["city"], + "state": data["address"]["region"], + "zip_code": data["address"]["postal_code"], + "latitude": data["latitude"], + "longitude": data["longitude"], + "eventbrite_id": data["id"], + "eventbrite_url": data["resource_uri"], + "academy": org.academy, # 'organization': org, } @@ -242,57 +254,58 @@ def create_or_update_venue(data, org, force_update=False): def export_event_description_to_eventbrite(event: Event) -> None: if not event: - logger.error('Event is not being provided') + logger.error("Event is not being provided") return if not event.eventbrite_id: - logger.error(f'Event {event.id} not have the integration with eventbrite') + logger.error(f"Event {event.id} not have the integration with eventbrite") return if not event.organization: - logger.error(f'Event {event.id} not have a organization assigned') + logger.error(f"Event {event.id} not have a organization assigned") return if not event.description: - logger.warning(f'The event {event.id} not have description yet') + logger.warning(f"The event {event.id} not have description yet") return eventbrite_id = event.eventbrite_id client = Eventbrite(event.organization.eventbrite_key) payload = { - 'modules': [{ - 'type': 'text', - 'data': { - 'body': { - 'type': 'text', - 'text': event.description, - 'alignment': 'left', + "modules": [ + { + "type": "text", + "data": { + "body": { + "type": "text", + "text": event.description, + "alignment": "left", + }, }, - }, - }], - 'publish': - True, - 'purpose': - 'listing', + } + ], + "publish": True, + "purpose": "listing", } try: structured_content = client.get_event_description(eventbrite_id) - result = client.create_or_update_event_description(eventbrite_id, structured_content['page_version_number'], - payload) + result = client.create_or_update_event_description( + eventbrite_id, structured_content["page_version_number"], payload + ) - if not result['modules']: - error = 'Could not create event description in eventbrite' + if not result["modules"]: + error = "Could not create event description in eventbrite" logger.error(error) event.eventbrite_sync_description = error - event.eventbrite_sync_status = 'ERROR' + event.eventbrite_sync_status = "ERROR" event.save() else: event.eventbrite_sync_description = timezone.now() - event.eventbrite_sync_status = 'SYNCHED' + event.eventbrite_sync_status = "SYNCHED" event.save() except Exception as e: @@ -300,13 +313,13 @@ def export_event_description_to_eventbrite(event: Event) -> None: logger.error(error) event.eventbrite_sync_description = error - event.eventbrite_sync_status = 'ERROR' + event.eventbrite_sync_status = "ERROR" event.save() def export_event_to_eventbrite(event: Event, org: Organization): if not org.academy: - logger.error(f'The organization {org} not have a academy assigned') + logger.error(f"The organization {org} not have a academy assigned") return timezone = org.academy.timezone @@ -314,23 +327,23 @@ def export_event_to_eventbrite(event: Event, org: Organization): now = get_current_iso_string() data = { - 'event.name.html': event.title, - 'event.description.html': event.description, - 'event.start.utc': re.sub(r'\+00:00$', 'Z', event.starting_at.isoformat()), - 'event.end.utc': re.sub(r'\+00:00$', 'Z', event.ending_at.isoformat()), + "event.name.html": event.title, + "event.description.html": event.description, + "event.start.utc": re.sub(r"\+00:00$", "Z", event.starting_at.isoformat()), + "event.end.utc": re.sub(r"\+00:00$", "Z", event.ending_at.isoformat()), # 'event.summary': event.excerpt, - 'event.capacity': event.capacity, - 'event.online_event': event.online_event, - 'event.url': event.eventbrite_url, - 'event.currency': event.currency, + "event.capacity": event.capacity, + "event.online_event": event.online_event, + "event.url": event.eventbrite_url, + "event.currency": event.currency, } if event.eventbrite_organizer_id: - data['event.organizer_id'] = event.eventbrite_organizer_id + data["event.organizer_id"] = event.eventbrite_organizer_id if timezone: - data['event.start.timezone'] = timezone - data['event.end.timezone'] = timezone + data["event.start.timezone"] = timezone + data["event.end.timezone"] = timezone try: if event.eventbrite_id: @@ -338,16 +351,16 @@ def export_event_to_eventbrite(event: Event, org: Organization): else: result = client.create_organization_event(org.eventbrite_id, data) - event.eventbrite_id = str(result['id']) + event.eventbrite_id = str(result["id"]) event.eventbrite_sync_description = now - event.eventbrite_sync_status = 'SYNCHED' + event.eventbrite_sync_status = "SYNCHED" export_event_description_to_eventbrite(event) except Exception as e: - event.eventbrite_sync_description = f'{now} => {e}' - event.eventbrite_sync_status = 'ERROR' + event.eventbrite_sync_description = f"{now} => {e}" + event.eventbrite_sync_status = "ERROR" event.save() return event @@ -355,7 +368,7 @@ def export_event_to_eventbrite(event: Event, org: Organization): def sync_org_events(org): if not org.academy: - logger.error(f'The organization {org} not have a academy assigned') + logger.error(f"The organization {org} not have a academy assigned") return client = Eventbrite(org.eventbrite_key) @@ -363,21 +376,21 @@ def sync_org_events(org): try: - for data in result['events']: + for data in result["events"]: update_or_create_event(data, org) - org.sync_status = 'PERSISTED' + org.sync_status = "PERSISTED" org.sync_desc = f"Success with {len(result['events'])} events..." org.save() except Exception as e: if org: - org.sync_status = 'ERROR' - org.sync_desc = 'Error: ' + str(e) + org.sync_status = "ERROR" + org.sync_desc = "Error: " + str(e) org.save() raise e - events = Event.objects.filter(sync_with_eventbrite=True, eventbrite_sync_status='PENDING') + events = Event.objects.filter(sync_with_eventbrite=True, eventbrite_sync_status="PENDING") for event in events: export_event_to_eventbrite(event, org) @@ -393,15 +406,15 @@ def get_current_iso_string(): def update_event_description_from_eventbrite(event: Event) -> None: if not event: - logger.error('Event is not being provided') + logger.error("Event is not being provided") return if not event.eventbrite_id: - logger.error(f'Event {event.id} not have the integration with eventbrite') + logger.error(f"Event {event.id} not have the integration with eventbrite") return if not event.organization: - logger.error(f'Event {event.id} not have a organization assigned') + logger.error(f"Event {event.id} not have a organization assigned") return eventbrite_id = event.eventbrite_id @@ -409,58 +422,58 @@ def update_event_description_from_eventbrite(event: Event) -> None: try: data = client.get_event_description(eventbrite_id) - event.description = data['modules'][0]['data']['body']['text'] + event.description = data["modules"][0]["data"]["body"]["text"] event.eventbrite_sync_description = timezone.now() - event.eventbrite_sync_status = 'PERSISTED' + event.eventbrite_sync_status = "PERSISTED" event.save() except Exception: - error = f'The event {eventbrite_id} is coming from eventbrite not have a description' + error = f"The event {eventbrite_id} is coming from eventbrite not have a description" logger.warning(error) event.eventbrite_sync_description = error - event.eventbrite_sync_status = 'ERROR' + event.eventbrite_sync_status = "ERROR" def update_or_create_event(data, org): - if data is None: #skip if no data - logger.warning('Ignored event') + if data is None: # skip if no data + logger.warning("Ignored event") return False if not org.academy: - logger.error(f'The organization {org} not have a academy assigned') + logger.error(f"The organization {org} not have a academy assigned") return now = get_current_iso_string() - if data['status'] not in status_map: - raise Exception('Uknown eventbrite status ' + data['status']) + if data["status"] not in status_map: + raise Exception("Uknown eventbrite status " + data["status"]) - event = Event.objects.filter(eventbrite_id=data['id'], organization__id=org.id).first() + event = Event.objects.filter(eventbrite_id=data["id"], organization__id=org.id).first() try: venue = None - if 'venue' in data and data['venue'] is not None: - venue = create_or_update_venue(data['venue'], org) + if "venue" in data and data["venue"] is not None: + venue = create_or_update_venue(data["venue"], org) organizer = None - if 'organizer' in data and data['organizer'] is not None: - organizer = create_or_update_organizer(data['organizer'], org, force_update=True) + if "organizer" in data and data["organizer"] is not None: + organizer = create_or_update_organizer(data["organizer"], org, force_update=True) else: - print('Event without organizer', data) + print("Event without organizer", data) kwargs = { - 'title': data['name']['text'], - 'excerpt': data['description']['text'], - 'starting_at': data['start']['utc'], - 'ending_at': data['end']['utc'], - 'capacity': data['capacity'], - 'online_event': data['online_event'], - 'eventbrite_id': data['id'], - 'eventbrite_url': data['url'], - 'status': status_map[data['status']], - 'eventbrite_status': data['status'], - 'currency': data['currency'], - 'organization': org, + "title": data["name"]["text"], + "excerpt": data["description"]["text"], + "starting_at": data["start"]["utc"], + "ending_at": data["end"]["utc"], + "capacity": data["capacity"], + "online_event": data["online_event"], + "eventbrite_id": data["id"], + "eventbrite_url": data["url"], + "status": status_map[data["status"]], + "eventbrite_status": data["status"], + "currency": data["currency"], + "organization": org, # organizer: organizer, - 'venue': venue, + "venue": venue, } if event is None: @@ -471,11 +484,11 @@ def update_or_create_event(data, org): for attr in kwargs: setattr(event, attr, kwargs[attr]) - if 'published' in data: - event.published_at = data['published'] + if "published" in data: + event.published_at = data["published"] - if 'logo' in data and data['logo'] is not None: - event.banner = data['logo']['url'] + if "logo" in data and data["logo"] is not None: + event.banner = data["logo"]["url"] if not event.url: event.url = event.eventbrite_url @@ -488,15 +501,15 @@ def update_or_create_event(data, org): event.academy = org.academy event.eventbrite_sync_description = now - event.eventbrite_sync_status = 'PERSISTED' + event.eventbrite_sync_status = "PERSISTED" event.save() update_event_description_from_eventbrite(event) except Exception as e: if event is not None: - event.eventbrite_sync_description = f'{now} => {e}' - event.eventbrite_sync_status = 'ERROR' + event.eventbrite_sync_description = f"{now} => {e}" + event.eventbrite_sync_status = "ERROR" event.save() raise e @@ -504,32 +517,32 @@ def update_or_create_event(data, org): def publish_event_from_eventbrite(data, org: Organization) -> None: - if not data: #skip if no data - logger.info('Ignored event') - raise ValueError('data is empty') + if not data: # skip if no data + logger.info("Ignored event") + raise ValueError("data is empty") now = get_current_iso_string() try: - events = Event.objects.filter(eventbrite_id=data['id'], organization__id=org.id) + events = Event.objects.filter(eventbrite_id=data["id"], organization__id=org.id) if events.count() == 0: raise Warning(f'The event with the eventbrite id `{data["id"]}` doesn\'t exist') for event in events: - event.status = 'ACTIVE' - event.eventbrite_status = data['status'] + event.status = "ACTIVE" + event.eventbrite_status = data["status"] event.eventbrite_sync_description = now - event.eventbrite_sync_status = 'PERSISTED' + event.eventbrite_sync_status = "PERSISTED" event.save() logger.info(f'The events with the eventbrite id `{data["id"]}` were saved') return events.first() except Warning as e: - logger.error(f'{now} => {e}') + logger.error(f"{now} => {e}") raise e except Exception as e: - logger.exception(f'{now} => the body is coming from eventbrite has change') + logger.exception(f"{now} => the body is coming from eventbrite has change") raise e @@ -608,28 +621,32 @@ def update_timeslots_out_of_range(start: datetime, end: datetime, timeslots: Que ending_at = fix_datetime_weekday(end, ending_at, prev=True) starting_at = ending_at - delta - lists.append({ - **vars(timeslot), - 'starting_at': starting_at, - 'ending_at': ending_at, - }) + lists.append( + { + **vars(timeslot), + "starting_at": starting_at, + "ending_at": ending_at, + } + ) - return sorted(lists, key=lambda x: (x['starting_at'], x['ending_at'])) + return sorted(lists, key=lambda x: (x["starting_at"], x["ending_at"])) def fix_datetime_weekday(current: datetime, timeslot: datetime, prev: bool = False, next: bool = False) -> datetime: if not prev and not next: - raise Exception('You should provide a prev or next argument') + raise Exception("You should provide a prev or next argument") days = 0 weekday = timeslot.weekday() - postulate = datetime(year=current.year, - month=current.month, - day=current.day, - hour=timeslot.hour, - minute=timeslot.minute, - second=timeslot.second, - tzinfo=timeslot.tzinfo) + postulate = datetime( + year=current.year, + month=current.month, + day=current.day, + hour=timeslot.hour, + minute=timeslot.minute, + second=timeslot.second, + tzinfo=timeslot.tzinfo, + ) while True: if prev: @@ -646,51 +663,51 @@ def fix_datetime_weekday(current: datetime, timeslot: datetime, prev: bool = Fal RECURRENCY_TYPE = { - 'DAILY': 'day', - 'WEEKLY': 'week', - 'MONTHLY': 'month', + "DAILY": "day", + "WEEKLY": "week", + "MONTHLY": "month", } def get_cohort_description(timeslot: CohortTimeSlot) -> str: - description = '' + description = "" if timeslot.recurrent: - description += f'every {RECURRENCY_TYPE[timeslot.recurrency_type]}, ' + description += f"every {RECURRENCY_TYPE[timeslot.recurrency_type]}, " localtime = pytz.timezone(timeslot.cohort.academy.timezone) starting_at = localtime.localize(timeslot.starting_at) ending_at = localtime.localize(timeslot.ending_at) - starting_weekday = starting_at.strftime('%A').upper() - ending_weekday = ending_at.strftime('%A').upper() + starting_weekday = starting_at.strftime("%A").upper() + ending_weekday = ending_at.strftime("%A").upper() if starting_weekday == ending_weekday: - description += f'{starting_weekday}' + description += f"{starting_weekday}" else: - description += f'{starting_weekday} and {ending_weekday} ' + description += f"{starting_weekday} and {ending_weekday} " - starting_hour = starting_at.strftime('%I:%M %p') - ending_hour = ending_at.strftime('%I:%M %p') - description += f'from {starting_hour} to {ending_hour}' + starting_hour = starting_at.strftime("%I:%M %p") + ending_hour = ending_at.strftime("%I:%M %p") + description += f"from {starting_hour} to {ending_hour}" return description.capitalize() def get_ical_cohort_description(item: Cohort): - description = '' + description = "" # description = f'{description}Url: {item.url}\n' if item.name: - description = f'{description}Name: {item.name}\n' + description = f"{description}Name: {item.name}\n" if item.academy: - description = f'{description}Academy: {item.academy.name}\n' + description = f"{description}Academy: {item.academy.name}\n" if item.language: - description = f'{description}Language: {item.language.upper()}\n' + description = f"{description}Language: {item.language.upper()}\n" if item.private: description = f'{description}Private: {"Yes" if item.private else "No"}\n' @@ -705,7 +722,7 @@ def get_ical_cohort_description(item: Cohort): @functools.lru_cache(maxsize=1) def is_eventbrite_enabled(): - if 'ENV' in os.environ and os.environ['ENV'] == 'test': + if "ENV" in os.environ and os.environ["ENV"] == "test": return True - return os.getenv('EVENTBRITE', '0') == '1' + return os.getenv("EVENTBRITE", "0") == "1" diff --git a/breathecode/events/admin.py b/breathecode/events/admin.py index cb2a12607..700c496b2 100644 --- a/breathecode/events/admin.py +++ b/breathecode/events/admin.py @@ -4,8 +4,17 @@ from django.contrib import messages from django.utils.html import format_html import breathecode.events.tasks as tasks -from .models import (Event, EventTypeVisibilitySetting, LiveClass, Venue, EventType, EventCheckin, Organization, - Organizer, EventbriteWebhook) +from .models import ( + Event, + EventTypeVisibilitySetting, + LiveClass, + Venue, + EventType, + EventCheckin, + Organization, + Organizer, + EventbriteWebhook, +) from .actions import sync_org_venues, sync_org_events from breathecode.utils import AdminExportCsvMixin import breathecode.marketing.tasks as marketing_tasks @@ -18,8 +27,8 @@ def pull_eventbrite_venues(modeladmin, request, queryset): for entry in entries: sync_org_venues(entry) except Exception as e: - print('error', str(e)) - messages.error(request, f'There was an error retriving the venues {str(e)}') + print("error", str(e)) + messages.error(request, f"There was an error retriving the venues {str(e)}") def pull_eventbrite_events(modeladmin, request, queryset): @@ -31,21 +40,21 @@ def pull_eventbrite_events(modeladmin, request, queryset): @admin.register(Organization) class OrganizationAdmin(admin.ModelAdmin): - list_display = ('name', 'eventbrite_id', 'sync_status', 'sync_desc', 'academy') - list_filter = ['sync_status', 'academy'] - search_fields = ['name', 'eventbrite_id'] + list_display = ("name", "eventbrite_id", "sync_status", "sync_desc", "academy") + list_filter = ["sync_status", "academy"] + search_fields = ["name", "eventbrite_id"] actions = [pull_eventbrite_venues, pull_eventbrite_events] @admin.register(Organizer) class OrganizerAdmin(admin.ModelAdmin): - list_display = ('name', 'eventbrite_id', 'academy', 'organization') - list_filter = ['academy', 'organization'] - search_fields = ['name', 'eventbrite_id'] + list_display = ("name", "eventbrite_id", "academy", "organization") + list_filter = ["academy", "organization"] + search_fields = ["name", "eventbrite_id"] actions = [] -@admin.display(description='Reattempt add event slug to Active Campaign') +@admin.display(description="Reattempt add event slug to Active Campaign") def reattempt_add_event_slug_as_acp_tag(modeladmin, request, queryset): for instance in queryset: if instance.academy: @@ -55,27 +64,36 @@ def reattempt_add_event_slug_as_acp_tag(modeladmin, request, queryset): # Register your models here. @admin.register(Event) class EventAdmin(admin.ModelAdmin, AdminExportCsvMixin): - list_display = ('slug', 'eventbrite_sync_status', 'title', 'status', 'eventbrite_status', 'starting_at', - 'ending_at', 'eventbrite_sync_description', 'sync_with_eventbrite') + list_display = ( + "slug", + "eventbrite_sync_status", + "title", + "status", + "eventbrite_status", + "starting_at", + "ending_at", + "eventbrite_sync_description", + "sync_with_eventbrite", + ) list_filter = [ - 'status', - 'eventbrite_status', - 'eventbrite_sync_status', - 'sync_with_eventbrite', - 'currency', - 'lang', - 'academy', - 'organization', - 'online_event', - 'event_type', + "status", + "eventbrite_status", + "eventbrite_sync_status", + "sync_with_eventbrite", + "currency", + "lang", + "academy", + "organization", + "online_event", + "event_type", ] - search_fields = ['slug', 'title', 'eventbrite_id', 'eventbrite_organizer_id'] - raw_id_fields = ['host_user'] - actions = ['export_as_csv', reattempt_add_event_slug_as_acp_tag] + search_fields = ["slug", "title", "eventbrite_id", "eventbrite_organizer_id"] + raw_id_fields = ["host_user"] + actions = ["export_as_csv", reattempt_add_event_slug_as_acp_tag] def formfield_for_foreignkey(self, db_field, request, **kwargs): - if db_field.name == 'author': - kwargs['queryset'] = User.objects.filter(is_staff=True) + if db_field.name == "author": + kwargs["queryset"] = User.objects.filter(is_staff=True) return super().formfield_for_foreignkey(db_field, request, **kwargs) def organizer(self, obj): @@ -85,27 +103,27 @@ def organizer(self, obj): # Register your models here. @admin.register(Venue) class VenueAdmin(admin.ModelAdmin): - list_display = ('title', 'country', 'state', 'city', 'street_address', 'academy', 'organization') - list_filter = ['academy', 'organization'] - search_fields = ['title', 'eventbrite_id', 'country', 'state', 'city', 'street_address'] + list_display = ("title", "country", "state", "city", "street_address", "academy", "organization") + list_filter = ["academy", "organization"] + search_fields = ["title", "eventbrite_id", "country", "state", "city", "street_address"] # Register your models here. @admin.register(EventType) class EventTypeAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'academy') - list_filter = ['academy'] - search_fields = ['slug', 'name'] - raw_id_fields = ['academy'] + list_display = ("slug", "name", "academy") + list_filter = ["academy"] + search_fields = ["slug", "name"] + raw_id_fields = ["academy"] # Register your models here. @admin.register(EventCheckin) class EventCheckinAdmin(admin.ModelAdmin): - list_display = ('id', 'email', 'attendee', 'event', 'status', 'created_at', 'attended_at', 'utm_source') - list_filter = ['status', 'utm_source', 'utm_medium'] - search_fields = ['email', 'event__title', 'event__slug'] - raw_id_fields = ['event', 'attendee'] + list_display = ("id", "email", "attendee", "event", "status", "created_at", "attended_at", "utm_source") + list_filter = ["status", "utm_source", "utm_medium"] + search_fields = ["email", "event__title", "event__slug"] + raw_id_fields = ["event", "attendee"] def reattempt_eventbrite_webhook(modeladmin, request, queryset): @@ -117,13 +135,19 @@ def reattempt_eventbrite_webhook(modeladmin, request, queryset): @admin.register(EventbriteWebhook) class EventbriteWebhookAdmin(admin.ModelAdmin): - list_display = ('id', 'current_status', 'action', 'organization', 'user_attendee', 'event', 'created_at') - list_filter = ['organization_id', 'status', 'action'] + list_display = ("id", "current_status", "action", "organization", "user_attendee", "event", "created_at") + list_filter = ["organization_id", "status", "action"] search_fields = [ - 'organization_id', 'status', 'event__title', 'event__slug', 'attendee__email', 'attendee__first_name', - 'attendee__last_name', 'event__uuid' + "organization_id", + "status", + "event__title", + "event__slug", + "attendee__email", + "attendee__first_name", + "attendee__last_name", + "event__uuid", ] - raw_id_fields = ['event', 'attendee'] + raw_id_fields = ["event", "attendee"] actions = [reattempt_eventbrite_webhook] def organization(self, obj): @@ -131,38 +155,51 @@ def organization(self, obj): def current_status(self, obj): colors = { - 'DONE': 'bg-success', - 'ERROR': 'bg-error', - 'PENDING': 'bg-warning', + "DONE": "bg-success", + "ERROR": "bg-error", + "PENDING": "bg-warning", } - if obj.status == 'DONE': + if obj.status == "DONE": return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") return format_html( - f"<div><span class='badge {colors[obj.status]}'>{obj.status}</span></div><small>{obj.status_text}</small>") + f"<div><span class='badge {colors[obj.status]}'>{obj.status}</span></div><small>{obj.status_text}</small>" + ) def user_attendee(self, obj): if obj.attendee is None: - return '-' + return "-" return format_html(f"<a href='/admin/auth/user/{obj.attendee.id}/change/'>{str(obj.attendee)}</a>") @admin.register(EventTypeVisibilitySetting) class EventTypeVisibilitySettingAdmin(admin.ModelAdmin): - list_display = ('academy', 'cohort', 'syllabus') - list_filter = ['academy'] + list_display = ("academy", "cohort", "syllabus") + list_filter = ["academy"] search_fields = [ - 'academy__slug', 'academy__name', 'syllabus__slug', 'syllabus__name', 'cohort__slug', 'cohort__name' + "academy__slug", + "academy__name", + "syllabus__slug", + "syllabus__name", + "cohort__slug", + "cohort__name", ] actions = [reattempt_eventbrite_webhook] - raw_id_fields = ['syllabus', 'cohort', 'academy'] + raw_id_fields = ["syllabus", "cohort", "academy"] @admin.register(LiveClass) class LiveClassAdmin(admin.ModelAdmin): - list_display = ('cohort_time_slot', 'remote_meeting_url', 'starting_at', 'ending_at', 'started_at', 'ended_at', - 'did_it_close_automatically') - list_filter = ['cohort_time_slot__recurrent', 'cohort_time_slot__recurrency_type'] - search_fields = ['id', 'remote_meeting_url'] + list_display = ( + "cohort_time_slot", + "remote_meeting_url", + "starting_at", + "ending_at", + "started_at", + "ended_at", + "did_it_close_automatically", + ) + list_filter = ["cohort_time_slot__recurrent", "cohort_time_slot__recurrency_type"] + search_fields = ["id", "remote_meeting_url"] def did_it_close_automatically(self, obj: LiveClass): if not obj.ended_at: diff --git a/breathecode/events/apps.py b/breathecode/events/apps.py index e9767466a..92afd2193 100644 --- a/breathecode/events/apps.py +++ b/breathecode/events/apps.py @@ -2,7 +2,7 @@ class EventsConfig(AppConfig): - name = 'breathecode.events' + name = "breathecode.events" def ready(self): from . import receivers # noqa diff --git a/breathecode/events/management/commands/build_live_classes.py b/breathecode/events/management/commands/build_live_classes.py index d09ac9c0e..80f5dc08d 100644 --- a/breathecode/events/management/commands/build_live_classes.py +++ b/breathecode/events/management/commands/build_live_classes.py @@ -7,17 +7,20 @@ class Command(BaseCommand): - help = 'Build live classes' + help = "Build live classes" def handle(self, *args, **options): utc_now = timezone.now() - cohorts = Cohort.objects.filter(ending_date__gte=utc_now, - never_ends=False).exclude(stage__in=['DELETED', 'PREWORK']) + cohorts = Cohort.objects.filter(ending_date__gte=utc_now, never_ends=False).exclude( + stage__in=["DELETED", "PREWORK"] + ) self.stdout.write( self.style.SUCCESS( - f'Found {str(cohorts.count())} cohorts that have not finished and should have live classes')) + f"Found {str(cohorts.count())} cohorts that have not finished and should have live classes" + ) + ) for cohort in cohorts: timeslots = CohortTimeSlot.objects.filter(cohort=cohort) @@ -25,10 +28,14 @@ def handle(self, *args, **options): if total_cohort_timeslots == 0: self.stderr.write( self.style.ERROR( - f'Cohort {cohort.slug} live classes will not be generated because it does not have timeslots')) + f"Cohort {cohort.slug} live classes will not be generated because it does not have timeslots" + ) + ) else: self.stdout.write( self.style.SUCCESS( - f'Adding cohort {cohort.slug} to the generation queue, it ends on {str(cohort.ending_date)}')) + f"Adding cohort {cohort.slug} to the generation queue, it ends on {str(cohort.ending_date)}" + ) + ) for timeslot in timeslots: tasks.build_live_classes_from_timeslot.delay(timeslot.id) diff --git a/breathecode/events/management/commands/close_live_classes.py b/breathecode/events/management/commands/close_live_classes.py index e0c12e943..7b368cfea 100644 --- a/breathecode/events/management/commands/close_live_classes.py +++ b/breathecode/events/management/commands/close_live_classes.py @@ -7,7 +7,7 @@ class Command(BaseCommand): - help = 'Close live classes' + help = "Close live classes" def handle(self, *args: Any, **options: Any): live_classes = LiveClass.objects.filter(started_at__isnull=False, ended_at=None) diff --git a/breathecode/events/management/commands/fix_live_class_dates.py b/breathecode/events/management/commands/fix_live_class_dates.py index 782ee97f5..645d2ea84 100644 --- a/breathecode/events/management/commands/fix_live_class_dates.py +++ b/breathecode/events/management/commands/fix_live_class_dates.py @@ -7,17 +7,20 @@ class Command(BaseCommand): - help = 'Fix live classes' + help = "Fix live classes" def handle(self, *args, **options): utc_now = timezone.now() - cohorts = Cohort.objects.filter(ending_date__gte=utc_now, - never_ends=False).exclude(stage__in=['DELETED', 'PREWORK']) + cohorts = Cohort.objects.filter(ending_date__gte=utc_now, never_ends=False).exclude( + stage__in=["DELETED", "PREWORK"] + ) self.stdout.write( self.style.SUCCESS( - f'Found {str(cohorts.count())} cohorts that have not finished and should have live classes')) + f"Found {str(cohorts.count())} cohorts that have not finished and should have live classes" + ) + ) for cohort in cohorts: timeslots = CohortTimeSlot.objects.filter(cohort=cohort) @@ -29,6 +32,8 @@ def handle(self, *args, **options): else: self.stdout.write( self.style.SUCCESS( - f'Adding cohort {cohort.slug} to the fixing queue, it ends on {cohort.ending_date}')) + f"Adding cohort {cohort.slug} to the fixing queue, it ends on {cohort.ending_date}" + ) + ) for timeslot in timeslots: tasks.fix_live_class_dates.delay(timeslot.id) diff --git a/breathecode/events/management/commands/garbage_collect_events.py b/breathecode/events/management/commands/garbage_collect_events.py index 9c99e7d24..08877a3d3 100644 --- a/breathecode/events/management/commands/garbage_collect_events.py +++ b/breathecode/events/management/commands/garbage_collect_events.py @@ -5,21 +5,24 @@ class Command(BaseCommand): - help = 'Delete logs and other garbage' + help = "Delete logs and other garbage" def handle(self, *args, **options): how_many_days_with_error = 60 how_many_days_with_done = 30 - webhooks = EventbriteWebhook.objects.filter(created_at__lte=timezone.now() - - timedelta(days=how_many_days_with_done), - status='DONE') + webhooks = EventbriteWebhook.objects.filter( + created_at__lte=timezone.now() - timedelta(days=how_many_days_with_done), status="DONE" + ) count_done = webhooks.count() webhooks.delete() - webhooks = EventbriteWebhook.objects.filter(created_at__lte=timezone.now() - - timedelta(days=how_many_days_with_error)).exclude(status='DONE') + webhooks = EventbriteWebhook.objects.filter( + created_at__lte=timezone.now() - timedelta(days=how_many_days_with_error) + ).exclude(status="DONE") count_error = webhooks.count() webhooks.delete() self.stdout.write( self.style.SUCCESS( - f"Successfully deleted {str(count_done)} done, and {str(count_error)} errored EventbriteWebhook's")) + f"Successfully deleted {str(count_done)} done, and {str(count_error)} errored EventbriteWebhook's" + ) + ) diff --git a/breathecode/events/management/commands/rerun_eventbrinte_order_placed.py b/breathecode/events/management/commands/rerun_eventbrinte_order_placed.py index a56e24169..cf6d39107 100644 --- a/breathecode/events/management/commands/rerun_eventbrinte_order_placed.py +++ b/breathecode/events/management/commands/rerun_eventbrinte_order_placed.py @@ -4,8 +4,8 @@ class Command(BaseCommand): - help = 'Sync all the EventbriteWebhook of type order.placed' + help = "Sync all the EventbriteWebhook of type order.placed" def handle(self, *args, **options): - for element in EventbriteWebhook.objects.filter(action='order.placed'): + for element in EventbriteWebhook.objects.filter(action="order.placed"): tasks.async_eventbrite_webhook.delay(element.id) diff --git a/breathecode/events/management/commands/sync_eventbrite.py b/breathecode/events/management/commands/sync_eventbrite.py index abac09d69..a1ab8609c 100644 --- a/breathecode/events/management/commands/sync_eventbrite.py +++ b/breathecode/events/management/commands/sync_eventbrite.py @@ -5,23 +5,23 @@ class Command(BaseCommand): - help = 'Sync from eventbrite, please make sure to add the argument, Eg: sync_eventbrite events' + help = "Sync from eventbrite, please make sure to add the argument, Eg: sync_eventbrite events" def add_arguments(self, parser): - parser.add_argument('entity', type=str) + parser.add_argument("entity", type=str) parser.add_argument( - '--override', - action='store_true', - help='Delete and add again', + "--override", + action="store_true", + help="Delete and add again", ) - parser.add_argument('--limit', action='store', dest='limit', type=int, default=0, help='How many to import') + parser.add_argument("--limit", action="store", dest="limit", type=int, default=0, help="How many to import") def handle(self, *args, **options): - if 'entity' not in options: - return self.stderr.write(self.style.ERROR('Entity argument not provided')) + if "entity" not in options: + return self.stderr.write(self.style.ERROR("Entity argument not provided")) try: - func = getattr(self, options['entity']) + func = getattr(self, options["entity"]) func(options) except Exception: return self.stderr.write(self.style.ERROR(f'Sync method for `{options["entity"]}` no Found!')) @@ -32,15 +32,15 @@ def events(self, options): count = 0 for org in orgs: if not org.eventbrite_key or not org.eventbrite_id: - org.sync_status = 'ERROR' - org.sync_desc = 'Missing eventbrite key or id' + org.sync_status = "ERROR" + org.sync_desc = "Missing eventbrite key or id" org.save() - self.stderr.write(self.style.ERROR(f'Organization {str(org)} is missing evenbrite key or ID')) + self.stderr.write(self.style.ERROR(f"Organization {str(org)} is missing evenbrite key or ID")) else: - org.sync_status = 'PENDING' - org.sync_desc = 'Running sync_eventbrite command at ' + str(now) + org.sync_status = "PENDING" + org.sync_desc = "Running sync_eventbrite command at " + str(now) org.save() - persist_organization_events.delay({'org_id': org.id}) + persist_organization_events.delay({"org_id": org.id}) count = count + 1 - self.stdout.write(self.style.SUCCESS(f'Enqueued {count} of {len(orgs)} for sync events')) + self.stdout.write(self.style.SUCCESS(f"Enqueued {count} of {len(orgs)} for sync events")) diff --git a/breathecode/events/migrations/0001_initial.py b/breathecode/events/migrations/0001_initial.py index f3c97d605..82a1226fa 100644 --- a/breathecode/events/migrations/0001_initial.py +++ b/breathecode/events/migrations/0001_initial.py @@ -15,97 +15,109 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Event', + name="Event", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('description', models.TextField(max_length=2000)), - ('title', models.CharField(max_length=255)), - ('lang', models.CharField(max_length=2)), - ('url', models.URLField(max_length=255)), - ('banner', models.URLField(max_length=255)), - ('capacity', models.IntegerField()), - ('starting_at', models.DateTimeField()), - ('ending_at', models.DateTimeField()), - ('status', - models.CharField(blank=True, - choices=[('ACTIVE', 'Active'), ('DELETED', 'Deleted')], - default='DRAFT', - max_length=9)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('author', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("description", models.TextField(max_length=2000)), + ("title", models.CharField(max_length=255)), + ("lang", models.CharField(max_length=2)), + ("url", models.URLField(max_length=255)), + ("banner", models.URLField(max_length=255)), + ("capacity", models.IntegerField()), + ("starting_at", models.DateTimeField()), + ("ending_at", models.DateTimeField()), + ( + "status", + models.CharField( + blank=True, + choices=[("ACTIVE", "Active"), ("DELETED", "Deleted")], + default="DRAFT", + max_length=9, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "author", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + ), + ), ], ), migrations.CreateModel( - name='EventType', + name="EventType", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), ], ), migrations.CreateModel( - name='Venue', + name="Venue", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.TextField(blank=True, max_length=100)), - ('street_address', models.CharField(blank=True, max_length=250)), - ('country', models.CharField(blank=True, max_length=30)), - ('city', models.CharField(blank=True, max_length=30)), - ('latitude', models.DecimalField(decimal_places=6, default=0, max_digits=9)), - ('longitude', models.DecimalField(decimal_places=6, default=0, max_digits=9)), - ('state', models.CharField(blank=True, max_length=30)), - ('zip_code', models.IntegerField(blank=True, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('status', - models.CharField(blank=True, - choices=[('ACTIVE', 'Active'), ('DRAFT', 'Draft'), ('DELETED', 'Deleted')], - default='DRAFT', - max_length=9)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.TextField(blank=True, max_length=100)), + ("street_address", models.CharField(blank=True, max_length=250)), + ("country", models.CharField(blank=True, max_length=30)), + ("city", models.CharField(blank=True, max_length=30)), + ("latitude", models.DecimalField(decimal_places=6, default=0, max_digits=9)), + ("longitude", models.DecimalField(decimal_places=6, default=0, max_digits=9)), + ("state", models.CharField(blank=True, max_length=30)), + ("zip_code", models.IntegerField(blank=True, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "status", + models.CharField( + blank=True, + choices=[("ACTIVE", "Active"), ("DRAFT", "Draft"), ("DELETED", "Deleted")], + default="DRAFT", + max_length=9, + ), + ), ], ), migrations.CreateModel( - name='EventCheckin', + name="EventCheckin", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('email', models.EmailField(max_length=150)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('attendee', - models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), - ('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='events.Event')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("email", models.EmailField(max_length=150)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "attendee", + models.ForeignKey( + blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), + ("event", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="events.Event")), ], ), migrations.AddField( - model_name='event', - name='event_type', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='events.EventType'), + model_name="event", + name="event_type", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to="events.EventType" + ), ), migrations.AddField( - model_name='event', - name='host', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name='host', - to=settings.AUTH_USER_MODEL), + model_name="event", + name="host", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="host", + to=settings.AUTH_USER_MODEL, + ), ), migrations.AddField( - model_name='event', - name='venue', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='events.Venue'), + model_name="event", + name="venue", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="events.Venue" + ), ), ] diff --git a/breathecode/events/migrations/0002_auto_20200806_0005.py b/breathecode/events/migrations/0002_auto_20200806_0005.py index d26b11696..e9c39322f 100644 --- a/breathecode/events/migrations/0002_auto_20200806_0005.py +++ b/breathecode/events/migrations/0002_auto_20200806_0005.py @@ -6,27 +6,29 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0001_initial'), + ("events", "0001_initial"), ] operations = [ migrations.AddField( - model_name='event', - name='exerpt', - field=models.TextField(default='something', max_length=500), + model_name="event", + name="exerpt", + field=models.TextField(default="something", max_length=500), preserve_default=False, ), migrations.AlterField( - model_name='event', - name='status', - field=models.CharField(blank=True, - choices=[('ACTIVE', 'Active'), ('DRAFT', 'Draft'), ('DELETED', 'Deleted')], - default='DRAFT', - max_length=9), + model_name="event", + name="status", + field=models.CharField( + blank=True, + choices=[("ACTIVE", "Active"), ("DRAFT", "Draft"), ("DELETED", "Deleted")], + default="DRAFT", + max_length=9, + ), ), migrations.AlterField( - model_name='venue', - name='title', + model_name="venue", + name="title", field=models.CharField(blank=True, max_length=200), ), ] diff --git a/breathecode/events/migrations/0003_auto_20200806_0035.py b/breathecode/events/migrations/0003_auto_20200806_0035.py index 76048cc17..c61fb8f7e 100644 --- a/breathecode/events/migrations/0003_auto_20200806_0035.py +++ b/breathecode/events/migrations/0003_auto_20200806_0035.py @@ -7,33 +7,30 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0008_auto_20200708_0049'), - ('events', '0002_auto_20200806_0005'), + ("admissions", "0008_auto_20200708_0049"), + ("events", "0002_auto_20200806_0005"), ] operations = [ migrations.AddField( - model_name='event', - name='academy', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.Academy'), + model_name="event", + name="academy", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.Academy" + ), ), migrations.AddField( - model_name='eventtype', - name='academy', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.Academy'), + model_name="eventtype", + name="academy", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.Academy" + ), ), migrations.AddField( - model_name='venue', - name='academy', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.Academy'), + model_name="venue", + name="academy", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.Academy" + ), ), ] diff --git a/breathecode/events/migrations/0004_auto_20200806_0042.py b/breathecode/events/migrations/0004_auto_20200806_0042.py index 8d693421c..ba29c4c6a 100644 --- a/breathecode/events/migrations/0004_auto_20200806_0042.py +++ b/breathecode/events/migrations/0004_auto_20200806_0042.py @@ -7,19 +7,19 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0003_auto_20200806_0035'), + ("events", "0003_auto_20200806_0035"), ] operations = [ migrations.AddField( - model_name='eventtype', - name='created_at', + model_name="eventtype", + name="created_at", field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( - model_name='eventtype', - name='updated_at', + model_name="eventtype", + name="updated_at", field=models.DateTimeField(auto_now=True), ), ] diff --git a/breathecode/events/migrations/0005_auto_20201010_0257.py b/breathecode/events/migrations/0005_auto_20201010_0257.py index 022d32e3a..acc5b8591 100644 --- a/breathecode/events/migrations/0005_auto_20201010_0257.py +++ b/breathecode/events/migrations/0005_auto_20201010_0257.py @@ -7,33 +7,33 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('events', '0004_auto_20200806_0042'), + ("admissions", "0011_auto_20201006_0058"), + ("events", "0004_auto_20200806_0042"), ] operations = [ migrations.CreateModel( - name='Organizacion', + name="Organizacion", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('eventbrite_id', models.CharField(blank=True, max_length=30)), - ('eventbrite_key', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('name', models.CharField(blank=True, default=None, max_length=100, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("eventbrite_id", models.CharField(blank=True, max_length=30)), + ("eventbrite_key", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("name", models.CharField(blank=True, default=None, max_length=100, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), ], ), migrations.AddField( - model_name='venue', - name='organization', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='events.organizacion'), + model_name="venue", + name="organization", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="events.organizacion" + ), ), ] diff --git a/breathecode/events/migrations/0006_auto_20201010_0315.py b/breathecode/events/migrations/0006_auto_20201010_0315.py index 03c1ef95c..eb8dcae58 100644 --- a/breathecode/events/migrations/0006_auto_20201010_0315.py +++ b/breathecode/events/migrations/0006_auto_20201010_0315.py @@ -6,23 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0005_auto_20201010_0257'), + ("events", "0005_auto_20201010_0257"), ] operations = [ migrations.AddField( - model_name='venue', - name='eventbrite_id', + model_name="venue", + name="eventbrite_id", field=models.CharField(blank=True, default=None, max_length=80, null=True, unique=True), ), migrations.AddField( - model_name='venue', - name='eventbrite_url', + model_name="venue", + name="eventbrite_url", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AlterField( - model_name='organizacion', - name='eventbrite_id', + model_name="organizacion", + name="eventbrite_id", field=models.CharField(blank=True, max_length=30, unique=True), ), ] diff --git a/breathecode/events/migrations/0007_auto_20201011_0014.py b/breathecode/events/migrations/0007_auto_20201011_0014.py index c48f2bf00..11253b259 100644 --- a/breathecode/events/migrations/0007_auto_20201011_0014.py +++ b/breathecode/events/migrations/0007_auto_20201011_0014.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0006_auto_20201010_0315'), + ("events", "0006_auto_20201010_0315"), ] operations = [ migrations.AlterField( - model_name='venue', - name='title', + model_name="venue", + name="title", field=models.CharField(blank=True, default=None, max_length=200, null=True), ), ] diff --git a/breathecode/events/migrations/0008_auto_20201011_0016.py b/breathecode/events/migrations/0008_auto_20201011_0016.py index fe41a63ea..d8b53d700 100644 --- a/breathecode/events/migrations/0008_auto_20201011_0016.py +++ b/breathecode/events/migrations/0008_auto_20201011_0016.py @@ -6,38 +6,38 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0007_auto_20201011_0014'), + ("events", "0007_auto_20201011_0014"), ] operations = [ migrations.AlterField( - model_name='venue', - name='city', + model_name="venue", + name="city", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='venue', - name='country', + model_name="venue", + name="country", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='venue', - name='latitude', + model_name="venue", + name="latitude", field=models.DecimalField(decimal_places=15, default=0, max_digits=20), ), migrations.AlterField( - model_name='venue', - name='longitude', + model_name="venue", + name="longitude", field=models.DecimalField(decimal_places=15, default=0, max_digits=20), ), migrations.AlterField( - model_name='venue', - name='state', + model_name="venue", + name="state", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='venue', - name='zip_code', + model_name="venue", + name="zip_code", field=models.IntegerField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/events/migrations/0009_auto_20201011_0019.py b/breathecode/events/migrations/0009_auto_20201011_0019.py index b87ed540d..7d8b7db29 100644 --- a/breathecode/events/migrations/0009_auto_20201011_0019.py +++ b/breathecode/events/migrations/0009_auto_20201011_0019.py @@ -6,20 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0008_auto_20201011_0016'), + ("events", "0008_auto_20201011_0016"), ] operations = [ migrations.AlterField( - model_name='venue', - name='status', - field=models.CharField(choices=[('ACTIVE', 'Active'), ('DRAFT', 'Draft'), ('DELETED', 'Deleted')], - default='DRAFT', - max_length=9), + model_name="venue", + name="status", + field=models.CharField( + choices=[("ACTIVE", "Active"), ("DRAFT", "Draft"), ("DELETED", "Deleted")], + default="DRAFT", + max_length=9, + ), ), migrations.AlterField( - model_name='venue', - name='street_address', + model_name="venue", + name="street_address", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), ] diff --git a/breathecode/events/migrations/0010_auto_20201012_1548.py b/breathecode/events/migrations/0010_auto_20201012_1548.py index e11a66c4e..61ec7815e 100644 --- a/breathecode/events/migrations/0010_auto_20201012_1548.py +++ b/breathecode/events/migrations/0010_auto_20201012_1548.py @@ -7,58 +7,58 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0009_auto_20201011_0019'), + ("events", "0009_auto_20201011_0019"), ] operations = [ migrations.AddField( - model_name='event', - name='eventbrite_id', + model_name="event", + name="eventbrite_id", field=models.CharField(blank=True, default=None, max_length=80, null=True, unique=True), ), migrations.AddField( - model_name='event', - name='eventbrite_organizer_id', + model_name="event", + name="eventbrite_organizer_id", field=models.CharField(blank=True, default=None, max_length=80, null=True), ), migrations.AddField( - model_name='event', - name='eventbrite_status', - field=models.CharField(default='draft', - help_text='One of: draft, live, started, ended, completed and canceled', - max_length=9), + model_name="event", + name="eventbrite_status", + field=models.CharField( + default="draft", help_text="One of: draft, live, started, ended, completed and canceled", max_length=9 + ), preserve_default=False, ), migrations.AddField( - model_name='event', - name='eventbrite_url', + model_name="event", + name="eventbrite_url", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( - model_name='event', - name='organizacion', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='events.organizacion'), + model_name="event", + name="organizacion", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="events.organizacion" + ), ), migrations.AddField( - model_name='event', - name='published_at', + model_name="event", + name="published_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='event', - name='sync_desc', + model_name="event", + name="sync_desc", field=models.TextField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( - model_name='event', - name='sync_status', + model_name="event", + name="sync_status", field=models.CharField( - choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('ERROR', 'Error')], - default='PENDING', - help_text='One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status', - max_length=9), + choices=[("PENDING", "Pending"), ("PERSISTED", "Persisted"), ("ERROR", "Error")], + default="PENDING", + help_text="One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status", + max_length=9, + ), ), ] diff --git a/breathecode/events/migrations/0011_remove_event_slug.py b/breathecode/events/migrations/0011_remove_event_slug.py index a74589221..6976ba63d 100644 --- a/breathecode/events/migrations/0011_remove_event_slug.py +++ b/breathecode/events/migrations/0011_remove_event_slug.py @@ -6,12 +6,12 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0010_auto_20201012_1548'), + ("events", "0010_auto_20201012_1548"), ] operations = [ migrations.RemoveField( - model_name='event', - name='slug', + model_name="event", + name="slug", ), ] diff --git a/breathecode/events/migrations/0012_auto_20201012_1650.py b/breathecode/events/migrations/0012_auto_20201012_1650.py index e0d02a7dc..4f5027072 100644 --- a/breathecode/events/migrations/0012_auto_20201012_1650.py +++ b/breathecode/events/migrations/0012_auto_20201012_1650.py @@ -6,28 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0011_remove_event_slug'), + ("events", "0011_remove_event_slug"), ] operations = [ migrations.AlterField( - model_name='event', - name='description', + model_name="event", + name="description", field=models.TextField(blank=True, default=None, max_length=2000, null=True), ), migrations.AlterField( - model_name='event', - name='exerpt', + model_name="event", + name="exerpt", field=models.TextField(blank=True, default=None, max_length=500, null=True), ), migrations.AlterField( - model_name='event', - name='lang', + model_name="event", + name="lang", field=models.CharField(blank=True, default=None, max_length=2, null=True), ), migrations.AlterField( - model_name='event', - name='title', + model_name="event", + name="title", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), ] diff --git a/breathecode/events/migrations/0013_auto_20201012_1805.py b/breathecode/events/migrations/0013_auto_20201012_1805.py index a3fa0b018..c541f6ff6 100644 --- a/breathecode/events/migrations/0013_auto_20201012_1805.py +++ b/breathecode/events/migrations/0013_auto_20201012_1805.py @@ -7,54 +7,58 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('events', '0012_auto_20201012_1650'), + ("admissions", "0011_auto_20201006_0058"), + ("events", "0012_auto_20201012_1650"), ] operations = [ migrations.AddField( - model_name='event', - name='online_event', + model_name="event", + name="online_event", field=models.BooleanField(default=False), ), migrations.CreateModel( - name='Organization', + name="Organization", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('eventbrite_id', models.CharField(blank=True, max_length=30, unique=True)), - ('eventbrite_key', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('name', models.CharField(blank=True, default=None, max_length=100, null=True)), - ('sync_status', - models.CharField( - choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('ERROR', 'Error')], - default='PENDING', - help_text='One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status', - max_length=9)), - ('sync_desc', models.TextField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("eventbrite_id", models.CharField(blank=True, max_length=30, unique=True)), + ("eventbrite_key", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("name", models.CharField(blank=True, default=None, max_length=100, null=True)), + ( + "sync_status", + models.CharField( + choices=[("PENDING", "Pending"), ("PERSISTED", "Persisted"), ("ERROR", "Error")], + default="PENDING", + help_text="One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status", + max_length=9, + ), + ), + ("sync_desc", models.TextField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), ], ), migrations.AlterField( - model_name='event', - name='organizacion', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='events.organization'), + model_name="event", + name="organizacion", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="events.organization" + ), ), migrations.AlterField( - model_name='venue', - name='organization', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='events.organization'), + model_name="venue", + name="organization", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="events.organization" + ), + ), + migrations.DeleteModel( + name="Organizacion", ), - migrations.DeleteModel(name='Organizacion', ), ] diff --git a/breathecode/events/migrations/0014_organizer.py b/breathecode/events/migrations/0014_organizer.py index 8fafefa4f..4222b86c0 100644 --- a/breathecode/events/migrations/0014_organizer.py +++ b/breathecode/events/migrations/0014_organizer.py @@ -7,26 +7,29 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('events', '0013_auto_20201012_1805'), + ("admissions", "0011_auto_20201006_0058"), + ("events", "0013_auto_20201012_1805"), ] operations = [ migrations.CreateModel( - name='Organizer', + name="Organizer", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('eventbrite_id', models.CharField(blank=True, max_length=30, unique=True)), - ('name', models.CharField(blank=True, default=None, max_length=100, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('organization', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='events.organization')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("eventbrite_id", models.CharField(blank=True, max_length=30, unique=True)), + ("name", models.CharField(blank=True, default=None, max_length=100, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + ( + "organization", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="events.organization"), + ), ], ), ] diff --git a/breathecode/events/migrations/0015_organizer_description.py b/breathecode/events/migrations/0015_organizer_description.py index d0acbaddc..602419948 100644 --- a/breathecode/events/migrations/0015_organizer_description.py +++ b/breathecode/events/migrations/0015_organizer_description.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0014_organizer'), + ("events", "0014_organizer"), ] operations = [ migrations.AddField( - model_name='organizer', - name='description', + model_name="organizer", + name="description", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), ] diff --git a/breathecode/events/migrations/0016_auto_20201012_2115.py b/breathecode/events/migrations/0016_auto_20201012_2115.py index 6896dc9b9..8ec9441f1 100644 --- a/breathecode/events/migrations/0016_auto_20201012_2115.py +++ b/breathecode/events/migrations/0016_auto_20201012_2115.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0015_organizer_description'), + ("events", "0015_organizer_description"), ] operations = [ migrations.AlterField( - model_name='organizer', - name='description', + model_name="organizer", + name="description", field=models.TextField(blank=True, default=None, max_length=500, null=True), ), ] diff --git a/breathecode/events/migrations/0017_auto_20201012_2305.py b/breathecode/events/migrations/0017_auto_20201012_2305.py index 7722101d7..fc6cf2904 100644 --- a/breathecode/events/migrations/0017_auto_20201012_2305.py +++ b/breathecode/events/migrations/0017_auto_20201012_2305.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0016_auto_20201012_2115'), + ("events", "0016_auto_20201012_2115"), ] operations = [ migrations.RenameField( - model_name='event', - old_name='organizacion', - new_name='organization', + model_name="event", + old_name="organizacion", + new_name="organization", ), migrations.AlterField( - model_name='organization', - name='name', - field=models.CharField(blank=True, default='', max_length=100, null=True), + model_name="organization", + name="name", + field=models.CharField(blank=True, default="", max_length=100, null=True), ), ] diff --git a/breathecode/events/migrations/0018_eventticket.py b/breathecode/events/migrations/0018_eventticket.py index 003cce918..531e1b47a 100644 --- a/breathecode/events/migrations/0018_eventticket.py +++ b/breathecode/events/migrations/0018_eventticket.py @@ -9,25 +9,32 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('events', '0017_auto_20201012_2305'), + ("events", "0017_auto_20201012_2305"), ] operations = [ migrations.CreateModel( - name='EventTicket', + name="EventTicket", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('email', models.EmailField(max_length=150)), - ('status', - models.CharField(choices=[('PURCHASED', 'Purchased'), ('ATTENDED', 'Attended')], - default='PURCHASED', - max_length=9)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('attendee', - models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), - ('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='events.event')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("email", models.EmailField(max_length=150)), + ( + "status", + models.CharField( + choices=[("PURCHASED", "Purchased"), ("ATTENDED", "Attended")], + default="PURCHASED", + max_length=9, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "attendee", + models.ForeignKey( + blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), + ("event", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="events.event")), ], ), ] diff --git a/breathecode/events/migrations/0019_auto_20201230_2201.py b/breathecode/events/migrations/0019_auto_20201230_2201.py index be761d285..9b525b66b 100644 --- a/breathecode/events/migrations/0019_auto_20201230_2201.py +++ b/breathecode/events/migrations/0019_auto_20201230_2201.py @@ -6,14 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0018_eventticket'), + ("events", "0018_eventticket"), ] operations = [ migrations.AddField( - model_name='eventcheckin', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done')], default='PENDING', max_length=9), + model_name="eventcheckin", + name="status", + field=models.CharField(choices=[("PENDING", "Pending"), ("DONE", "Done")], default="PENDING", max_length=9), + ), + migrations.DeleteModel( + name="EventTicket", ), - migrations.DeleteModel(name='EventTicket', ), ] diff --git a/breathecode/events/migrations/0020_eventbritewebhook.py b/breathecode/events/migrations/0020_eventbritewebhook.py index a8c173503..6f9bf9a3c 100644 --- a/breathecode/events/migrations/0020_eventbritewebhook.py +++ b/breathecode/events/migrations/0020_eventbritewebhook.py @@ -6,23 +6,27 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0019_auto_20201230_2201'), + ("events", "0019_auto_20201230_2201"), ] operations = [ migrations.CreateModel( - name='EventbriteWebhook', + name="EventbriteWebhook", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('api_url', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('user_id', models.CharField(blank=True, default=None, max_length=20, null=True)), - ('action', models.CharField(blank=True, default=None, max_length=15, null=True)), - ('webhook_id', models.CharField(blank=True, default=None, max_length=20, null=True)), - ('endpoint_url', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done')], default='PENDING', max_length=9)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("api_url", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("user_id", models.CharField(blank=True, default=None, max_length=20, null=True)), + ("action", models.CharField(blank=True, default=None, max_length=15, null=True)), + ("webhook_id", models.CharField(blank=True, default=None, max_length=20, null=True)), + ("endpoint_url", models.CharField(blank=True, default=None, max_length=255, null=True)), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done")], default="PENDING", max_length=9 + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/breathecode/events/migrations/0021_auto_20210111_2030.py b/breathecode/events/migrations/0021_auto_20210111_2030.py index b46dbd6be..f5ee9fd89 100644 --- a/breathecode/events/migrations/0021_auto_20210111_2030.py +++ b/breathecode/events/migrations/0021_auto_20210111_2030.py @@ -6,20 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0020_eventbritewebhook'), + ("events", "0020_eventbritewebhook"), ] operations = [ migrations.AddField( - model_name='eventbritewebhook', - name='status_text', + model_name="eventbritewebhook", + name="status_text", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AlterField( - model_name='eventbritewebhook', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('ERROR', 'Error')], - default='PENDING', - max_length=9), + model_name="eventbritewebhook", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("ERROR", "Error")], default="PENDING", max_length=9 + ), ), ] diff --git a/breathecode/events/migrations/0022_auto_20210113_1940.py b/breathecode/events/migrations/0022_auto_20210113_1940.py index 274e6a0c2..89ab9a1e4 100644 --- a/breathecode/events/migrations/0022_auto_20210113_1940.py +++ b/breathecode/events/migrations/0022_auto_20210113_1940.py @@ -9,17 +9,19 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('events', '0021_auto_20210111_2030'), + ("events", "0021_auto_20210111_2030"), ] operations = [ migrations.AlterField( - model_name='eventcheckin', - name='attendee', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="eventcheckin", + name="attendee", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/breathecode/events/migrations/0023_eventbritewebhook_organization_id.py b/breathecode/events/migrations/0023_eventbritewebhook_organization_id.py index badaf0a87..f448a7954 100644 --- a/breathecode/events/migrations/0023_eventbritewebhook_organization_id.py +++ b/breathecode/events/migrations/0023_eventbritewebhook_organization_id.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0022_auto_20210113_1940'), + ("events", "0022_auto_20210113_1940"), ] operations = [ migrations.AddField( - model_name='eventbritewebhook', - name='organization_id', + model_name="eventbritewebhook", + name="organization_id", field=models.CharField(blank=True, default=None, max_length=20, null=True), ), ] diff --git a/breathecode/events/migrations/0024_auto_20210123_0324.py b/breathecode/events/migrations/0024_auto_20210123_0324.py index e395136e5..2f96faadb 100644 --- a/breathecode/events/migrations/0024_auto_20210123_0324.py +++ b/breathecode/events/migrations/0024_auto_20210123_0324.py @@ -6,22 +6,24 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0023_eventbritewebhook_organization_id'), + ("events", "0023_eventbritewebhook_organization_id"), ] operations = [ migrations.RenameField( - model_name='event', - old_name='exerpt', - new_name='excerpt', + model_name="event", + old_name="exerpt", + new_name="excerpt", ), migrations.AlterField( - model_name='event', - name='eventbrite_status', - field=models.CharField(blank=True, - default=None, - help_text='One of: draft, live, started, ended, completed and canceled', - max_length=9, - null=True), + model_name="event", + name="eventbrite_status", + field=models.CharField( + blank=True, + default=None, + help_text="One of: draft, live, started, ended, completed and canceled", + max_length=9, + null=True, + ), ), ] diff --git a/breathecode/events/migrations/0025_eventcheckin_attended_at.py b/breathecode/events/migrations/0025_eventcheckin_attended_at.py index f4450a3f6..eaf2da5cc 100644 --- a/breathecode/events/migrations/0025_eventcheckin_attended_at.py +++ b/breathecode/events/migrations/0025_eventcheckin_attended_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0024_auto_20210123_0324'), + ("events", "0024_auto_20210123_0324"), ] operations = [ migrations.AddField( - model_name='eventcheckin', - name='attended_at', + model_name="eventcheckin", + name="attended_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/events/migrations/0026_auto_20211129_1758.py b/breathecode/events/migrations/0026_auto_20211129_1758.py index a1407f8b8..d0d9e943a 100644 --- a/breathecode/events/migrations/0026_auto_20211129_1758.py +++ b/breathecode/events/migrations/0026_auto_20211129_1758.py @@ -6,33 +6,45 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0025_eventcheckin_attended_at'), + ("events", "0025_eventcheckin_attended_at"), ] operations = [ migrations.AddField( - model_name='event', - name='sync_with_eventbrite', + model_name="event", + name="sync_with_eventbrite", field=models.BooleanField(default=False), ), migrations.AlterField( - model_name='event', - name='sync_status', + model_name="event", + name="sync_status", field=models.CharField( - choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('ERROR', 'Error'), ('WARNING', 'Warning'), - ('SYNCHED', 'Synched')], - default='PENDING', - help_text='One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status', - max_length=9), + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("ERROR", "Error"), + ("WARNING", "Warning"), + ("SYNCHED", "Synched"), + ], + default="PENDING", + help_text="One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status", + max_length=9, + ), ), migrations.AlterField( - model_name='organization', - name='sync_status', + model_name="organization", + name="sync_status", field=models.CharField( - choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('ERROR', 'Error'), ('WARNING', 'Warning'), - ('SYNCHED', 'Synched')], - default='PENDING', - help_text='One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status', - max_length=9), + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("ERROR", "Error"), + ("WARNING", "Warning"), + ("SYNCHED", "Synched"), + ], + default="PENDING", + help_text="One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status", + max_length=9, + ), ), ] diff --git a/breathecode/events/migrations/0027_rename_sync_status_event_eventbrite_sync_status.py b/breathecode/events/migrations/0027_rename_sync_status_event_eventbrite_sync_status.py index e1955f642..bcce94188 100644 --- a/breathecode/events/migrations/0027_rename_sync_status_event_eventbrite_sync_status.py +++ b/breathecode/events/migrations/0027_rename_sync_status_event_eventbrite_sync_status.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0026_auto_20211129_1758'), + ("events", "0026_auto_20211129_1758"), ] operations = [ migrations.RenameField( - model_name='event', - old_name='sync_status', - new_name='eventbrite_sync_status', + model_name="event", + old_name="sync_status", + new_name="eventbrite_sync_status", ), ] diff --git a/breathecode/events/migrations/0028_rename_sync_desc_event_eventbrite_sync_description.py b/breathecode/events/migrations/0028_rename_sync_desc_event_eventbrite_sync_description.py index 351281b2c..c48ee4757 100644 --- a/breathecode/events/migrations/0028_rename_sync_desc_event_eventbrite_sync_description.py +++ b/breathecode/events/migrations/0028_rename_sync_desc_event_eventbrite_sync_description.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0027_rename_sync_status_event_eventbrite_sync_status'), + ("events", "0027_rename_sync_status_event_eventbrite_sync_status"), ] operations = [ migrations.RenameField( - model_name='event', - old_name='sync_desc', - new_name='eventbrite_sync_description', + model_name="event", + old_name="sync_desc", + new_name="eventbrite_sync_description", ), ] diff --git a/breathecode/events/migrations/0029_event_currency.py b/breathecode/events/migrations/0029_event_currency.py index 114766640..146326eb6 100644 --- a/breathecode/events/migrations/0029_event_currency.py +++ b/breathecode/events/migrations/0029_event_currency.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0028_rename_sync_desc_event_eventbrite_sync_description'), + ("events", "0028_rename_sync_desc_event_eventbrite_sync_description"), ] operations = [ migrations.AddField( - model_name='event', - name='currency', - field=models.CharField(blank=True, - choices=[('USD', 'USD'), ('CRC', 'CRC'), ('CLP', 'CLP'), ('EUR', 'EUR'), - ('UYU', 'UYU')], - default='USD', - max_length=3), + model_name="event", + name="currency", + field=models.CharField( + blank=True, + choices=[("USD", "USD"), ("CRC", "CRC"), ("CLP", "CLP"), ("EUR", "EUR"), ("UYU", "UYU")], + default="USD", + max_length=3, + ), ), ] diff --git a/breathecode/events/migrations/0030_alter_event_host.py b/breathecode/events/migrations/0030_alter_event_host.py index 71c3b108c..2a1fa8789 100644 --- a/breathecode/events/migrations/0030_alter_event_host.py +++ b/breathecode/events/migrations/0030_alter_event_host.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0029_event_currency'), + ("events", "0029_event_currency"), ] operations = [ migrations.AlterField( - model_name='event', - name='host', + model_name="event", + name="host", field=models.CharField(blank=True, default=None, max_length=100, null=True), ), ] diff --git a/breathecode/events/migrations/0030_auto_20220111_0518.py b/breathecode/events/migrations/0030_auto_20220111_0518.py index 190707a9a..0b981af77 100644 --- a/breathecode/events/migrations/0030_auto_20220111_0518.py +++ b/breathecode/events/migrations/0030_auto_20220111_0518.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0029_event_currency'), + ("events", "0029_event_currency"), ] operations = [ migrations.AddField( - model_name='event', - name='slug', + model_name="event", + name="slug", field=models.SlugField(blank=True, default=None, max_length=150, null=True), ), migrations.AddField( - model_name='event', - name='tags', - field=models.CharField(blank=True, default='', max_length=100), + model_name="event", + name="tags", + field=models.CharField(blank=True, default="", max_length=100), ), ] diff --git a/breathecode/events/migrations/0031_merge_0030_alter_event_host_0030_auto_20220111_0518.py b/breathecode/events/migrations/0031_merge_0030_alter_event_host_0030_auto_20220111_0518.py index 6eedec808..cade2fbfb 100644 --- a/breathecode/events/migrations/0031_merge_0030_alter_event_host_0030_auto_20220111_0518.py +++ b/breathecode/events/migrations/0031_merge_0030_alter_event_host_0030_auto_20220111_0518.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0030_alter_event_host'), - ('events', '0030_auto_20220111_0518'), + ("events", "0030_alter_event_host"), + ("events", "0030_auto_20220111_0518"), ] operations = [] diff --git a/breathecode/events/migrations/0032_alter_event_url.py b/breathecode/events/migrations/0032_alter_event_url.py index 7596077f6..61fb1ceda 100644 --- a/breathecode/events/migrations/0032_alter_event_url.py +++ b/breathecode/events/migrations/0032_alter_event_url.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0031_merge_0030_alter_event_host_0030_auto_20220111_0518'), + ("events", "0031_merge_0030_alter_event_host_0030_auto_20220111_0518"), ] operations = [ migrations.AlterField( - model_name='event', - name='url', + model_name="event", + name="url", field=models.URLField( blank=True, default=None, - help_text= - 'URL can be blank if the event will be synched with EventBrite, it will be filled automatically by the API.', + help_text="URL can be blank if the event will be synched with EventBrite, it will be filled automatically by the API.", max_length=255, - null=True), + null=True, + ), ), ] diff --git a/breathecode/events/migrations/0033_auto_20221208_1246.py b/breathecode/events/migrations/0033_auto_20221208_1246.py index e861c11c7..a21546fe5 100644 --- a/breathecode/events/migrations/0033_auto_20221208_1246.py +++ b/breathecode/events/migrations/0033_auto_20221208_1246.py @@ -7,61 +7,63 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), - ('events', '0032_alter_event_url'), + ("admissions", "0048_academy_main_currency"), + ("events", "0032_alter_event_url"), ] operations = [ migrations.AddField( - model_name='eventtype', - name='allow_shared_creation', + model_name="eventtype", + name="allow_shared_creation", field=models.BooleanField(default=True), ), migrations.AddField( - model_name='eventtype', - name='description', - field=models.CharField(default='', max_length=255), + model_name="eventtype", + name="description", + field=models.CharField(default="", max_length=255), ), migrations.CreateModel( - name='EventTypeSyllabus', + name="EventTypeSyllabus", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('event_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='events.eventtype')), - ('syllabus', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.syllabus')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("event_type", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="events.eventtype")), + ("syllabus", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.syllabus")), ], ), migrations.CreateModel( - name='EventTypeCohort', + name="EventTypeCohort", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('cohort', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.cohort')), - ('event_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='events.eventtype')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("cohort", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort")), + ("event_type", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="events.eventtype")), ], ), migrations.CreateModel( - name='EventTypeAcademy', + name="EventTypeAcademy", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('event_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='events.eventtype')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("event_type", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="events.eventtype")), ], ), migrations.AddField( - model_name='eventtype', - name='shared_with_academies', - field=models.ManyToManyField(blank=True, - related_name='shared_event_types', - through='events.EventTypeAcademy', - to='admissions.Academy'), + model_name="eventtype", + name="shared_with_academies", + field=models.ManyToManyField( + blank=True, + related_name="shared_event_types", + through="events.EventTypeAcademy", + to="admissions.Academy", + ), ), migrations.AddField( - model_name='eventtype', - name='shared_with_cohorts', - field=models.ManyToManyField(blank=True, through='events.EventTypeCohort', to='admissions.Cohort'), + model_name="eventtype", + name="shared_with_cohorts", + field=models.ManyToManyField(blank=True, through="events.EventTypeCohort", to="admissions.Cohort"), ), migrations.AddField( - model_name='eventtype', - name='shared_with_syllabus', - field=models.ManyToManyField(blank=True, through='events.EventTypeSyllabus', to='admissions.Syllabus'), + model_name="eventtype", + name="shared_with_syllabus", + field=models.ManyToManyField(blank=True, through="events.EventTypeSyllabus", to="admissions.Syllabus"), ), ] diff --git a/breathecode/events/migrations/0034_auto_20221216_2003.py b/breathecode/events/migrations/0034_auto_20221216_2003.py index 13e99cd36..107c81bec 100644 --- a/breathecode/events/migrations/0034_auto_20221216_2003.py +++ b/breathecode/events/migrations/0034_auto_20221216_2003.py @@ -7,62 +7,70 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), - ('events', '0033_auto_20221208_1246'), + ("admissions", "0048_academy_main_currency"), + ("events", "0033_auto_20221208_1246"), ] operations = [ migrations.CreateModel( - name='EventTypeVisibilitySetting', + name="EventTypeVisibilitySetting", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('cohort', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort')), - ('syllabus', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.syllabus')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "cohort", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), + ), + ( + "syllabus", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.syllabus" + ), + ), ], ), migrations.RemoveField( - model_name='eventtypecohort', - name='cohort', + model_name="eventtypecohort", + name="cohort", ), migrations.RemoveField( - model_name='eventtypecohort', - name='event_type', + model_name="eventtypecohort", + name="event_type", ), migrations.RemoveField( - model_name='eventtypesyllabus', - name='event_type', + model_name="eventtypesyllabus", + name="event_type", ), migrations.RemoveField( - model_name='eventtypesyllabus', - name='syllabus', + model_name="eventtypesyllabus", + name="syllabus", ), migrations.RemoveField( - model_name='eventtype', - name='shared_with_academies', + model_name="eventtype", + name="shared_with_academies", ), migrations.RemoveField( - model_name='eventtype', - name='shared_with_cohorts', + model_name="eventtype", + name="shared_with_cohorts", ), migrations.RemoveField( - model_name='eventtype', - name='shared_with_syllabus', + model_name="eventtype", + name="shared_with_syllabus", + ), + migrations.DeleteModel( + name="EventTypeAcademy", + ), + migrations.DeleteModel( + name="EventTypeCohort", + ), + migrations.DeleteModel( + name="EventTypeSyllabus", ), - migrations.DeleteModel(name='EventTypeAcademy', ), - migrations.DeleteModel(name='EventTypeCohort', ), - migrations.DeleteModel(name='EventTypeSyllabus', ), migrations.AddField( - model_name='eventtype', - name='visibility_settings', - field=models.ManyToManyField(blank=True, to='events.EventTypeVisibilitySetting'), + model_name="eventtype", + name="visibility_settings", + field=models.ManyToManyField(blank=True, to="events.EventTypeVisibilitySetting"), ), ] diff --git a/breathecode/events/migrations/0035_alter_eventtype_academy.py b/breathecode/events/migrations/0035_alter_eventtype_academy.py index 19608e097..1fcb10e6e 100644 --- a/breathecode/events/migrations/0035_alter_eventtype_academy.py +++ b/breathecode/events/migrations/0035_alter_eventtype_academy.py @@ -7,14 +7,14 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), - ('events', '0034_auto_20221216_2003'), + ("admissions", "0048_academy_main_currency"), + ("events", "0034_auto_20221216_2003"), ] operations = [ migrations.AlterField( - model_name='eventtype', - name='academy', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='admissions.academy'), + model_name="eventtype", + name="academy", + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy"), ), ] diff --git a/breathecode/events/migrations/0036_auto_20221227_2039.py b/breathecode/events/migrations/0036_auto_20221227_2039.py index da90f6be2..fe1981592 100644 --- a/breathecode/events/migrations/0036_auto_20221227_2039.py +++ b/breathecode/events/migrations/0036_auto_20221227_2039.py @@ -7,24 +7,26 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0035_alter_eventtype_academy'), + ("events", "0035_alter_eventtype_academy"), ] operations = [ migrations.AddField( - model_name='eventtype', - name='lang', - field=models.CharField(default='en', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code]), + model_name="eventtype", + name="lang", + field=models.CharField( + default="en", max_length=5, validators=[breathecode.utils.validators.language.validate_language_code] + ), ), migrations.AlterField( - model_name='event', - name='lang', - field=models.CharField(blank=True, - default=None, - max_length=5, - null=True, - validators=[breathecode.utils.validators.language.validate_language_code]), + model_name="event", + name="lang", + field=models.CharField( + blank=True, + default=None, + max_length=5, + null=True, + validators=[breathecode.utils.validators.language.validate_language_code], + ), ), ] diff --git a/breathecode/events/migrations/0037_alter_eventtype_visibility_settings.py b/breathecode/events/migrations/0037_alter_eventtype_visibility_settings.py index 62b3eacea..deab674f9 100644 --- a/breathecode/events/migrations/0037_alter_eventtype_visibility_settings.py +++ b/breathecode/events/migrations/0037_alter_eventtype_visibility_settings.py @@ -6,15 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0036_auto_20221227_2039'), + ("events", "0036_auto_20221227_2039"), ] operations = [ migrations.AlterField( - model_name='eventtype', - name='visibility_settings', - field=models.ManyToManyField(blank=True, - help_text='Visibility has to be configured every academy separately', - to='events.EventTypeVisibilitySetting'), + model_name="eventtype", + name="visibility_settings", + field=models.ManyToManyField( + blank=True, + help_text="Visibility has to be configured every academy separately", + to="events.EventTypeVisibilitySetting", + ), ), ] diff --git a/breathecode/events/migrations/0038_event_live_stream_url.py b/breathecode/events/migrations/0038_event_live_stream_url.py index 66f56916d..dcce7f544 100644 --- a/breathecode/events/migrations/0038_event_live_stream_url.py +++ b/breathecode/events/migrations/0038_event_live_stream_url.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0037_alter_eventtype_visibility_settings'), + ("events", "0037_alter_eventtype_visibility_settings"), ] operations = [ migrations.AddField( - model_name='event', - name='live_stream_url', + model_name="event", + name="live_stream_url", field=models.URLField( blank=True, default=None, - help_text= - "This URL should have the URL of the meeting if it is an online event, if it's not online it should be empty.", + help_text="This URL should have the URL of the meeting if it is an online event, if it's not online it should be empty.", max_length=255, - null=True), + null=True, + ), ), ] diff --git a/breathecode/events/migrations/0039_liveclass.py b/breathecode/events/migrations/0039_liveclass.py index 46ece812c..774748b53 100644 --- a/breathecode/events/migrations/0039_liveclass.py +++ b/breathecode/events/migrations/0039_liveclass.py @@ -7,26 +7,28 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0051_auto_20230117_2219'), - ('events', '0038_event_live_stream_url'), + ("admissions", "0051_auto_20230117_2219"), + ("events", "0038_event_live_stream_url"), ] operations = [ migrations.CreateModel( - name='LiveClass', + name="LiveClass", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('log', models.JSONField(default=dict)), - ('remote_meeting_url', models.URLField()), - ('hash', models.CharField(max_length=40, unique=True)), - ('started_at', models.DateTimeField(blank=True, default=None, null=True)), - ('ended_at', models.DateTimeField(blank=True, default=None, null=True)), - ('starting_at', models.DateTimeField()), - ('ending_at', models.DateTimeField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('cohort_time_slot', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.cohorttimeslot')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("log", models.JSONField(default=dict)), + ("remote_meeting_url", models.URLField()), + ("hash", models.CharField(max_length=40, unique=True)), + ("started_at", models.DateTimeField(blank=True, default=None, null=True)), + ("ended_at", models.DateTimeField(blank=True, default=None, null=True)), + ("starting_at", models.DateTimeField()), + ("ending_at", models.DateTimeField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "cohort_time_slot", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.cohorttimeslot"), + ), ], ), ] diff --git a/breathecode/events/migrations/0040_alter_venue_zip_code.py b/breathecode/events/migrations/0040_alter_venue_zip_code.py index d2d97eff1..fc154d97d 100644 --- a/breathecode/events/migrations/0040_alter_venue_zip_code.py +++ b/breathecode/events/migrations/0040_alter_venue_zip_code.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0039_liveclass'), + ("events", "0039_liveclass"), ] operations = [ migrations.AlterField( - model_name='venue', - name='zip_code', + model_name="venue", + name="zip_code", field=models.CharField(blank=True, default=None, max_length=15, null=True), ), ] diff --git a/breathecode/events/migrations/0041_eventtype_icon_url.py b/breathecode/events/migrations/0041_eventtype_icon_url.py index eb3af2db3..8fb55f118 100644 --- a/breathecode/events/migrations/0041_eventtype_icon_url.py +++ b/breathecode/events/migrations/0041_eventtype_icon_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0040_alter_venue_zip_code'), + ("events", "0040_alter_venue_zip_code"), ] operations = [ migrations.AddField( - model_name='eventtype', - name='icon_url', + model_name="eventtype", + name="icon_url", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/events/migrations/0042_alter_eventtype_icon_url.py b/breathecode/events/migrations/0042_alter_eventtype_icon_url.py index 562fdb5fa..ababee9a7 100644 --- a/breathecode/events/migrations/0042_alter_eventtype_icon_url.py +++ b/breathecode/events/migrations/0042_alter_eventtype_icon_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0041_eventtype_icon_url'), + ("events", "0041_eventtype_icon_url"), ] operations = [ migrations.AlterField( - model_name='eventtype', - name='icon_url', + model_name="eventtype", + name="icon_url", field=models.URLField(default=None, null=True), ), ] diff --git a/breathecode/events/migrations/0043_auto_20230509_0801.py b/breathecode/events/migrations/0043_auto_20230509_0801.py index ac67ff237..6bb0e53b7 100644 --- a/breathecode/events/migrations/0043_auto_20230509_0801.py +++ b/breathecode/events/migrations/0043_auto_20230509_0801.py @@ -9,27 +9,27 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('events', '0042_alter_eventtype_icon_url'), + ("events", "0042_alter_eventtype_icon_url"), ] operations = [ migrations.AddField( - model_name='event', - name='host_user', - field=models.ForeignKey(blank=True, - help_text='4geeks user that is the host of the event', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name='event_host', - to=settings.AUTH_USER_MODEL), + model_name="event", + name="host_user", + field=models.ForeignKey( + blank=True, + help_text="4geeks user that is the host of the event", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="event_host", + to=settings.AUTH_USER_MODEL, + ), ), migrations.AlterField( - model_name='event', - name='host', - field=models.CharField(blank=True, - default=None, - help_text='Host name that appear in Eventbrite', - max_length=100, - null=True), + model_name="event", + name="host", + field=models.CharField( + blank=True, default=None, help_text="Host name that appear in Eventbrite", max_length=100, null=True + ), ), ] diff --git a/breathecode/events/migrations/0044_auto_20230513_0007.py b/breathecode/events/migrations/0044_auto_20230513_0007.py index d42c11d4a..de70a4ecc 100644 --- a/breathecode/events/migrations/0044_auto_20230513_0007.py +++ b/breathecode/events/migrations/0044_auto_20230513_0007.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0043_auto_20230509_0801'), + ("events", "0043_auto_20230509_0801"), ] operations = [ migrations.AddField( - model_name='event', - name='free_for_bootcamps', + model_name="event", + name="free_for_bootcamps", field=models.BooleanField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='eventtype', - name='free_for_bootcamps', + model_name="eventtype", + name="free_for_bootcamps", field=models.BooleanField(default=True), ), ] diff --git a/breathecode/events/migrations/0045_auto_20230518_2218.py b/breathecode/events/migrations/0045_auto_20230518_2218.py index c67d855b3..73c3d156e 100644 --- a/breathecode/events/migrations/0045_auto_20230518_2218.py +++ b/breathecode/events/migrations/0045_auto_20230518_2218.py @@ -7,22 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0044_auto_20230513_0007'), + ("events", "0044_auto_20230513_0007"), ] operations = [ migrations.AddField( - model_name='eventbritewebhook', - name='event', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='events.event'), + model_name="eventbritewebhook", + name="event", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to="events.event" + ), ), migrations.AddField( - model_name='eventbritewebhook', - name='payload', - field=models.JSONField(blank=True, default=None, help_text='Will be set by async task', null=True), + model_name="eventbritewebhook", + name="payload", + field=models.JSONField(blank=True, default=None, help_text="Will be set by async task", null=True), ), ] diff --git a/breathecode/events/migrations/0046_eventbritewebhook_attendee.py b/breathecode/events/migrations/0046_eventbritewebhook_attendee.py index c8dab1171..216093431 100644 --- a/breathecode/events/migrations/0046_eventbritewebhook_attendee.py +++ b/breathecode/events/migrations/0046_eventbritewebhook_attendee.py @@ -9,17 +9,19 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('events', '0045_auto_20230518_2218'), + ("events", "0045_auto_20230518_2218"), ] operations = [ migrations.AddField( - model_name='eventbritewebhook', - name='attendee', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="eventbritewebhook", + name="attendee", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/breathecode/events/migrations/0047_alter_event_status.py b/breathecode/events/migrations/0047_alter_event_status.py index 06ba184fb..6b45a043a 100644 --- a/breathecode/events/migrations/0047_alter_event_status.py +++ b/breathecode/events/migrations/0047_alter_event_status.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0046_eventbritewebhook_attendee'), + ("events", "0046_eventbritewebhook_attendee"), ] operations = [ migrations.AlterField( - model_name='event', - name='status', - field=models.CharField(blank=True, - choices=[('ACTIVE', 'Active'), ('DRAFT', 'Draft'), ('DELETED', 'Deleted'), - ('FINISHED', 'Finished')], - default='DRAFT', - max_length=9), + model_name="event", + name="status", + field=models.CharField( + blank=True, + choices=[("ACTIVE", "Active"), ("DRAFT", "Draft"), ("DELETED", "Deleted"), ("FINISHED", "Finished")], + default="DRAFT", + max_length=9, + ), ), ] diff --git a/breathecode/events/migrations/0047_auto_20230606_2328.py b/breathecode/events/migrations/0047_auto_20230606_2328.py index b8c9c7229..be30d6dcf 100644 --- a/breathecode/events/migrations/0047_auto_20230606_2328.py +++ b/breathecode/events/migrations/0047_auto_20230606_2328.py @@ -6,30 +6,33 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0046_eventbritewebhook_attendee'), + ("events", "0046_eventbritewebhook_attendee"), ] operations = [ migrations.AlterField( - model_name='event', - name='status', - field=models.CharField(blank=True, - choices=[('ACTIVE', 'Active'), ('DRAFT', 'Draft'), ('DELETED', 'Deleted'), - ('FINISHED', 'Finished')], - default='DRAFT', - max_length=9), + model_name="event", + name="status", + field=models.CharField( + blank=True, + choices=[("ACTIVE", "Active"), ("DRAFT", "Draft"), ("DELETED", "Deleted"), ("FINISHED", "Finished")], + default="DRAFT", + max_length=9, + ), ), migrations.AlterField( - model_name='eventtype', - name='allow_shared_creation', - field=models.BooleanField(default=True, - help_text='Other academies are allowed to create events of this type'), + model_name="eventtype", + name="allow_shared_creation", + field=models.BooleanField( + default=True, help_text="Other academies are allowed to create events of this type" + ), ), migrations.AlterField( - model_name='eventtype', - name='free_for_bootcamps', + model_name="eventtype", + name="free_for_bootcamps", field=models.BooleanField( default=True, - help_text='Users that belong to other no-saas academies will be able to join without consuming'), + help_text="Users that belong to other no-saas academies will be able to join without consuming", + ), ), ] diff --git a/breathecode/events/migrations/0048_merge_0047_alter_event_status_0047_auto_20230606_2328.py b/breathecode/events/migrations/0048_merge_0047_alter_event_status_0047_auto_20230606_2328.py index 635278f82..de113b1c3 100644 --- a/breathecode/events/migrations/0048_merge_0047_alter_event_status_0047_auto_20230606_2328.py +++ b/breathecode/events/migrations/0048_merge_0047_alter_event_status_0047_auto_20230606_2328.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0047_alter_event_status'), - ('events', '0047_auto_20230606_2328'), + ("events", "0047_alter_event_status"), + ("events", "0047_auto_20230606_2328"), ] operations = [] diff --git a/breathecode/events/migrations/0049_alter_event_free_for_bootcamps.py b/breathecode/events/migrations/0049_alter_event_free_for_bootcamps.py index 336f8db6d..ad58b47e3 100644 --- a/breathecode/events/migrations/0049_alter_event_free_for_bootcamps.py +++ b/breathecode/events/migrations/0049_alter_event_free_for_bootcamps.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0048_merge_0047_alter_event_status_0047_auto_20230606_2328'), + ("events", "0048_merge_0047_alter_event_status_0047_auto_20230606_2328"), ] operations = [ migrations.AlterField( - model_name='event', - name='free_for_bootcamps', + model_name="event", + name="free_for_bootcamps", field=models.BooleanField( blank=True, default=None, - help_text= - 'Determines if users that belong to an academy not available as saas can join the event for free.', - null=True), + help_text="Determines if users that belong to an academy not available as saas can join the event for free.", + null=True, + ), ), ] diff --git a/breathecode/events/migrations/0050_auto_20230721_0158.py b/breathecode/events/migrations/0050_auto_20230721_0158.py index 5dd9e43b9..6a7c70fd9 100644 --- a/breathecode/events/migrations/0050_auto_20230721_0158.py +++ b/breathecode/events/migrations/0050_auto_20230721_0158.py @@ -6,20 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0049_alter_event_free_for_bootcamps'), + ("events", "0049_alter_event_free_for_bootcamps"), ] operations = [ migrations.AlterField( - model_name='eventtype', - name='description', - field=models.CharField(default='', - help_text='This will be publicly shown to 4geeks.com users', - max_length=255), + model_name="eventtype", + name="description", + field=models.CharField( + default="", help_text="This will be publicly shown to 4geeks.com users", max_length=255 + ), ), migrations.AlterField( - model_name='eventtype', - name='icon_url', + model_name="eventtype", + name="icon_url", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/events/migrations/0051_alter_event_free_for_bootcamps.py b/breathecode/events/migrations/0051_alter_event_free_for_bootcamps.py index fbe7ced91..38154cc6b 100644 --- a/breathecode/events/migrations/0051_alter_event_free_for_bootcamps.py +++ b/breathecode/events/migrations/0051_alter_event_free_for_bootcamps.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0050_auto_20230721_0158'), + ("events", "0050_auto_20230721_0158"), ] operations = [ migrations.AlterField( - model_name='event', - name='free_for_bootcamps', + model_name="event", + name="free_for_bootcamps", field=models.BooleanField( blank=True, default=True, - help_text= - 'Determines if users that belong to an academy not available as saas can join the event for free.', - null=True), + help_text="Determines if users that belong to an academy not available as saas can join the event for free.", + null=True, + ), ), ] diff --git a/breathecode/events/migrations/0052_event_uuid.py b/breathecode/events/migrations/0052_event_uuid.py index 5d500fd78..06cfb0e0d 100644 --- a/breathecode/events/migrations/0052_event_uuid.py +++ b/breathecode/events/migrations/0052_event_uuid.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0051_alter_event_free_for_bootcamps'), + ("events", "0051_alter_event_free_for_bootcamps"), ] operations = [ migrations.AddField( - model_name='event', - name='uuid', + model_name="event", + name="uuid", field=models.UUIDField(blank=True, default=None, editable=False, null=True), ), ] diff --git a/breathecode/events/migrations/0053_alter_event_uuid.py b/breathecode/events/migrations/0053_alter_event_uuid.py index 8c75367bb..e57c11437 100644 --- a/breathecode/events/migrations/0053_alter_event_uuid.py +++ b/breathecode/events/migrations/0053_alter_event_uuid.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0052_event_uuid'), + ("events", "0052_event_uuid"), ] operations = [ migrations.AlterField( - model_name='event', - name='uuid', + model_name="event", + name="uuid", field=models.UUIDField(blank=True, null=True, unique=True), ), ] diff --git a/breathecode/events/migrations/0054_alter_event_uuid.py b/breathecode/events/migrations/0054_alter_event_uuid.py index b152af1e4..5de2d6295 100644 --- a/breathecode/events/migrations/0054_alter_event_uuid.py +++ b/breathecode/events/migrations/0054_alter_event_uuid.py @@ -7,13 +7,13 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0053_alter_event_uuid'), + ("events", "0053_alter_event_uuid"), ] operations = [ migrations.AlterField( - model_name='event', - name='uuid', + model_name="event", + name="uuid", field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True), ), ] diff --git a/breathecode/events/migrations/0055_auto_20231009_2240.py b/breathecode/events/migrations/0055_auto_20231009_2240.py index 1b50ae008..9d090ac2f 100644 --- a/breathecode/events/migrations/0055_auto_20231009_2240.py +++ b/breathecode/events/migrations/0055_auto_20231009_2240.py @@ -6,28 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0054_alter_event_uuid'), + ("events", "0054_alter_event_uuid"), ] operations = [ migrations.AddField( - model_name='eventcheckin', - name='utm_campaign', + model_name="eventcheckin", + name="utm_campaign", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), migrations.AddField( - model_name='eventcheckin', - name='utm_medium', + model_name="eventcheckin", + name="utm_medium", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), migrations.AddField( - model_name='eventcheckin', - name='utm_source', + model_name="eventcheckin", + name="utm_source", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), migrations.AddField( - model_name='eventcheckin', - name='utm_url', + model_name="eventcheckin", + name="utm_url", field=models.CharField(blank=True, default=None, max_length=2000, null=True), ), ] diff --git a/breathecode/events/migrations/0056_auto_20231017_0605.py b/breathecode/events/migrations/0056_auto_20231017_0605.py index 189dedf74..434f03238 100644 --- a/breathecode/events/migrations/0056_auto_20231017_0605.py +++ b/breathecode/events/migrations/0056_auto_20231017_0605.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0055_auto_20231009_2240'), + ("events", "0055_auto_20231009_2240"), ] operations = [ migrations.AlterField( - model_name='liveclass', - name='ending_at', + model_name="liveclass", + name="ending_at", field=models.DateTimeField(db_index=True), ), migrations.AlterField( - model_name='liveclass', - name='starting_at', + model_name="liveclass", + name="starting_at", field=models.DateTimeField(db_index=True), ), ] diff --git a/breathecode/events/migrations/0057_event_free_for_all.py b/breathecode/events/migrations/0057_event_free_for_all.py index ad74ebe06..3b1709850 100644 --- a/breathecode/events/migrations/0057_event_free_for_all.py +++ b/breathecode/events/migrations/0057_event_free_for_all.py @@ -6,14 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0056_auto_20231017_0605'), + ("events", "0056_auto_20231017_0605"), ] operations = [ migrations.AddField( - model_name='event', - name='free_for_all', + model_name="event", + name="free_for_all", field=models.BooleanField( - default=False, help_text='Determines if any user (From bootcamp or not) can join the event for free.'), + default=False, help_text="Determines if any user (From bootcamp or not) can join the event for free." + ), ), ] diff --git a/breathecode/events/migrations/0058_auto_20231130_2015.py b/breathecode/events/migrations/0058_auto_20231130_2015.py index b863d28bf..d921ea631 100644 --- a/breathecode/events/migrations/0058_auto_20231130_2015.py +++ b/breathecode/events/migrations/0058_auto_20231130_2015.py @@ -6,22 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0057_event_free_for_all'), + ("events", "0057_event_free_for_all"), ] operations = [ migrations.AddField( - model_name='event', - name='ended_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='This field contains the value of when the event actually finished.', - null=True), + model_name="event", + name="ended_at", + field=models.DateTimeField( + blank=True, + default=None, + help_text="This field contains the value of when the event actually finished.", + null=True, + ), ), migrations.AlterField( - model_name='event', - name='ending_at', + model_name="event", + name="ending_at", field=models.DateTimeField( - help_text='This field contains the value of when the event is supposed to be finished.'), + help_text="This field contains the value of when the event is supposed to be finished." + ), ), ] diff --git a/breathecode/events/migrations/0059_event_asset_slug.py b/breathecode/events/migrations/0059_event_asset_slug.py index a057705f6..7c7cb33b4 100644 --- a/breathecode/events/migrations/0059_event_asset_slug.py +++ b/breathecode/events/migrations/0059_event_asset_slug.py @@ -6,18 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0058_auto_20231130_2015'), + ("events", "0058_auto_20231130_2015"), ] operations = [ migrations.AddField( - model_name='event', - name='asset_slug', + model_name="event", + name="asset_slug", field=models.SlugField( blank=True, default=None, - help_text='Is the event is about a project, this field will have the asset slug of that project.', + help_text="Is the event is about a project, this field will have the asset slug of that project.", max_length=200, - null=True), + null=True, + ), ), ] diff --git a/breathecode/events/models.py b/breathecode/events/models.py index a90c12901..466dbe5f5 100644 --- a/breathecode/events/models.py +++ b/breathecode/events/models.py @@ -11,40 +11,41 @@ from .signals import event_status_updated, new_event_attendee, new_event_order -PENDING = 'PENDING' -PERSISTED = 'PERSISTED' -ERROR = 'ERROR' -WARNING = 'WARNING' -SYNCHED = 'SYNCHED' +PENDING = "PENDING" +PERSISTED = "PERSISTED" +ERROR = "ERROR" +WARNING = "WARNING" +SYNCHED = "SYNCHED" SYNC_STATUS = ( - (PENDING, 'Pending'), - (PERSISTED, 'Persisted'), - (ERROR, 'Error'), - (WARNING, 'Warning'), - (SYNCHED, 'Synched'), + (PENDING, "Pending"), + (PERSISTED, "Persisted"), + (ERROR, "Error"), + (WARNING, "Warning"), + (SYNCHED, "Synched"), ) -__all__ = ['Organization', 'Organizer', 'Venue', 'EventType', 'Event', 'EventCheckin', 'EventbriteWebhook'] +__all__ = ["Organization", "Organizer", "Venue", "EventType", "Event", "EventCheckin", "EventbriteWebhook"] class Organization(models.Model): eventbrite_id = models.CharField(unique=True, max_length=30, blank=True) academy = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True) eventbrite_key = models.CharField(max_length=255, blank=True, null=True, default=None) - name = models.CharField(max_length=100, blank=True, null=True, default='') + name = models.CharField(max_length=100, blank=True, null=True, default="") sync_status = models.CharField( max_length=9, choices=SYNC_STATUS, default=PENDING, - help_text='One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status') + help_text="One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status", + ) sync_desc = models.TextField(max_length=255, null=True, default=None, blank=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return self.name or 'Nameless' + return self.name or "Nameless" class Organizer(models.Model): @@ -60,18 +61,18 @@ class Organizer(models.Model): def __str__(self): if self.name is not None: - return self.name + '(' + str(self.id) + ')' + return self.name + "(" + str(self.id) + ")" else: - return 'Organizer ' + str(self.id) + return "Organizer " + str(self.id) -ACTIVE = 'ACTIVE' -DRAFT = 'DRAFT' -DELETED = 'DELETED' +ACTIVE = "ACTIVE" +DRAFT = "DRAFT" +DELETED = "DELETED" VENUE_STATUS = ( - (ACTIVE, 'Active'), - (DRAFT, 'Draft'), - (DELETED, 'Deleted'), + (ACTIVE, "Active"), + (DRAFT, "Draft"), + (DELETED, "Deleted"), ) @@ -95,7 +96,7 @@ class Venue(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return self.title or 'No title' + return self.title or "No title" class EventTypeVisibilitySetting(models.Model): @@ -110,27 +111,28 @@ class EventTypeVisibilitySetting(models.Model): academy = models.ForeignKey(Academy, on_delete=models.CASCADE) def __str__(self): - return f'{str(self.academy)}, {str(self.syllabus)}, {str(self.cohort)}' + return f"{str(self.academy)}, {str(self.syllabus)}, {str(self.cohort)}" class EventType(models.Model): slug = models.SlugField(max_length=150, unique=True) name = models.CharField(max_length=150) - description = models.CharField(max_length=255, - default='', - null=False, - help_text='This will be publicly shown to 4geeks.com users') + description = models.CharField( + max_length=255, default="", null=False, help_text="This will be publicly shown to 4geeks.com users" + ) icon_url = models.URLField(blank=True, null=True, default=None) academy = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=False, null=True) - lang = models.CharField(max_length=5, default='en', validators=[validate_language_code]) + lang = models.CharField(max_length=5, default="en", validators=[validate_language_code]) free_for_bootcamps = models.BooleanField( - default=True, help_text='Users that belong to other no-saas academies will be able to join without consuming') + default=True, help_text="Users that belong to other no-saas academies will be able to join without consuming" + ) - visibility_settings = models.ManyToManyField(EventTypeVisibilitySetting, - blank=True, - help_text='Visibility has to be configured every academy separately') - allow_shared_creation = models.BooleanField(default=True, - help_text='Other academies are allowed to create events of this type') + visibility_settings = models.ManyToManyField( + EventTypeVisibilitySetting, blank=True, help_text="Visibility has to be configured every academy separately" + ) + allow_shared_creation = models.BooleanField( + default=True, help_text="Other academies are allowed to create events of this type" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -143,25 +145,25 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) -FINISHED = 'FINISHED' +FINISHED = "FINISHED" EVENT_STATUS = ( - (ACTIVE, 'Active'), - (DRAFT, 'Draft'), - (DELETED, 'Deleted'), - (FINISHED, 'Finished'), + (ACTIVE, "Active"), + (DRAFT, "Draft"), + (DELETED, "Deleted"), + (FINISHED, "Finished"), ) -USD = 'USD' # United States dollar -CRC = 'CRC' # Costa Rican colón -CLP = 'CLP' # Chilean peso -EUR = 'EUR' # Euro -UYU = 'UYU' # Uruguayan peso +USD = "USD" # United States dollar +CRC = "CRC" # Costa Rican colón +CLP = "CLP" # Chilean peso +EUR = "EUR" # Euro +UYU = "UYU" # Uruguayan peso CURRENCIES = ( - (USD, 'USD'), - (CRC, 'CRC'), - (CLP, 'CLP'), - (EUR, 'EUR'), - (UYU, 'UYU'), + (USD, "USD"), + (CRC, "CRC"), + (CLP, "CLP"), + (EUR, "EUR"), + (UYU, "UYU"), ) @@ -180,26 +182,29 @@ def __init__(self, *args, **kwargs): blank=True, default=None, null=True, - help_text='Is the event is about a project, this field will have the asset slug of that project.') + help_text="Is the event is about a project, this field will have the asset slug of that project.", + ) title = models.CharField(max_length=255, blank=True, default=None, null=True) lang = models.CharField(max_length=5, blank=True, default=None, null=True, validators=[validate_language_code]) currency = models.CharField(max_length=3, choices=CURRENCIES, default=USD, blank=True) - tags = models.CharField(max_length=100, default='', blank=True) + tags = models.CharField(max_length=100, default="", blank=True) free_for_all = models.BooleanField( - default=False, help_text='Determines if any user (From bootcamp or not) can join the event for free.') + default=False, help_text="Determines if any user (From bootcamp or not) can join the event for free." + ) free_for_bootcamps = models.BooleanField( default=True, blank=True, null=True, - help_text='Determines if users that belong to an academy not available as saas can join the event for free.') + help_text="Determines if users that belong to an academy not available as saas can join the event for free.", + ) url = models.URLField( max_length=255, null=True, blank=True, default=None, - help_text= - 'URL can be blank if the event will be synched with EventBrite, it will be filled automatically by the API.') + help_text="URL can be blank if the event will be synched with EventBrite, it will be filled automatically by the API.", + ) banner = models.URLField(max_length=255) capacity = models.IntegerField() live_stream_url = models.URLField( @@ -207,28 +212,31 @@ def __init__(self, *args, **kwargs): null=True, blank=True, default=None, - help_text= - 'This URL should have the URL of the meeting if it is an online event, if it\'s not online it should be empty.') + help_text="This URL should have the URL of the meeting if it is an online event, if it's not online it should be empty.", + ) starting_at = models.DateTimeField(blank=False) ending_at = models.DateTimeField( - blank=False, help_text='This field contains the value of when the event is supposed to be finished.') - ended_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='This field contains the value of when the event actually finished.') - - host = models.CharField(max_length=100, - blank=True, - default=None, - null=True, - help_text='Host name that appear in Eventbrite') - host_user = models.ForeignKey(User, - on_delete=models.SET_NULL, - blank=True, - null=True, - related_name='event_host', - help_text='4geeks user that is the host of the event') + blank=False, help_text="This field contains the value of when the event is supposed to be finished." + ) + ended_at = models.DateTimeField( + blank=True, + null=True, + default=None, + help_text="This field contains the value of when the event actually finished.", + ) + + host = models.CharField( + max_length=100, blank=True, default=None, null=True, help_text="Host name that appear in Eventbrite" + ) + host_user = models.ForeignKey( + User, + on_delete=models.SET_NULL, + blank=True, + null=True, + related_name="event_host", + help_text="4geeks user that is the host of the event", + ) academy = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True) organization = models.ForeignKey(Organization, on_delete=models.CASCADE, blank=True, null=True) @@ -243,18 +251,21 @@ def __init__(self, *args, **kwargs): eventbrite_organizer_id = models.CharField(max_length=80, blank=True, default=None, null=True) status = models.CharField(max_length=9, choices=EVENT_STATUS, default=DRAFT, blank=True) - eventbrite_status = models.CharField(max_length=9, - help_text='One of: draft, live, started, ended, completed and canceled', - blank=True, - default=None, - null=True) + eventbrite_status = models.CharField( + max_length=9, + help_text="One of: draft, live, started, ended, completed and canceled", + blank=True, + default=None, + null=True, + ) sync_with_eventbrite = models.BooleanField(default=False) eventbrite_sync_status = models.CharField( max_length=9, choices=SYNC_STATUS, default=PENDING, - help_text='One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status') + help_text="One of: PENDING, PERSISTED or ERROR depending on how the eventbrite sync status", + ) eventbrite_sync_description = models.TextField(max_length=255, null=True, default=None, blank=True) published_at = models.DateTimeField(null=True, default=None, blank=True) @@ -262,7 +273,7 @@ def __init__(self, *args, **kwargs): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return self.title or 'No title' + return self.title or "No title" def clean(self, *args, **kwargs): if self.free_for_all == True: @@ -278,20 +289,21 @@ def save(self, *args, **kwargs): created = not self.id if self.title and not self.slug: - self.slug = f'{slugify(self.title).lower()}-{self.uuid}' + self.slug = f"{slugify(self.title).lower()}-{self.uuid}" super().save(*args, **kwargs) event_saved.send_robust(instance=self, sender=self.__class__, created=created) - if status_updated: event_status_updated.send_robust(instance=self, sender=Event) + if status_updated: + event_status_updated.send_robust(instance=self, sender=Event) -PENDING = 'PENDING' -DONE = 'DONE' +PENDING = "PENDING" +DONE = "DONE" CHECKIN_STATUS = ( - (PENDING, 'Pending'), - (DONE, 'Done'), + (PENDING, "Pending"), + (DONE, "Done"), ) @@ -331,8 +343,9 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) - if creating: new_event_order.send_robust(instance=self, sender=EventCheckin) - elif status_updated and self.status == 'DONE': + if creating: + new_event_order.send_robust(instance=self, sender=EventCheckin) + elif status_updated and self.status == "DONE": new_event_attendee.send_robust(instance=self, sender=EventCheckin) @@ -340,9 +353,9 @@ def save(self, *args, **kwargs): # DONE = 'DONE' # ERROR='ERROR' EVENTBRITE_WEBHOOK_STATUS = ( - (PENDING, 'Pending'), - (DONE, 'Done'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (DONE, "Done"), + (ERROR, "Error"), ) @@ -351,7 +364,7 @@ class EventbriteWebhook(models.Model): user_id = models.CharField(max_length=20, blank=True, null=True, default=None) action = models.CharField(max_length=15, blank=True, null=True, default=None) webhook_id = models.CharField(max_length=20, blank=True, null=True, default=None) - payload = models.JSONField(blank=True, null=True, default=None, help_text='Will be set by async task') + payload = models.JSONField(blank=True, null=True, default=None, help_text="Will be set by async task") event = models.ForeignKey(Event, on_delete=models.SET_NULL, blank=True, null=True, default=None) attendee = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True, default=None) @@ -365,13 +378,14 @@ class EventbriteWebhook(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'Action {self.action} {self.status} => {self.api_url}' + return f"Action {self.action} {self.status} => {self.api_url}" class LiveClass(models.Model): """ It represents a live class that will be built from a CohortTimeSlot """ + cohort_time_slot = models.ForeignKey(CohortTimeSlot, on_delete=models.CASCADE) log = models.JSONField(default=dict) remote_meeting_url = models.URLField() diff --git a/breathecode/events/permissions/consumers.py b/breathecode/events/permissions/consumers.py index 25d119915..00eca1d45 100644 --- a/breathecode/events/permissions/consumers.py +++ b/breathecode/events/permissions/consumers.py @@ -16,145 +16,173 @@ def event_by_url_param(context: ServiceContext, args: tuple, kwargs: dict) -> tuple[dict, tuple, dict]: - context['price'] = 0 + context["price"] = 0 - request = context['request'] + request = context["request"] lang = get_user_language(request) items = get_my_event_types(request.user) - pk = Q(id=kwargs.get('event_id')) | Q(slug=kwargs.get('event_slug'), slug__isnull=False) + pk = Q(id=kwargs.get("event_id")) | Q(slug=kwargs.get("event_slug"), slug__isnull=False) belongs_to_this_event = Q(event_type__in=items) | Q(host_user=request.user) event = Event.objects.filter(pk, belongs_to_this_event).first() if not event: - raise ValidationException(translation(lang, - en='Event not found or you dont have access', - es='Evento no encontrado o no tienes acceso', - slug='not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="Event not found or you dont have access", + es="Evento no encontrado o no tienes acceso", + slug="not-found", + ), + code=404, + ) if not event.live_stream_url: raise ValidationException( - translation(lang, - en='Event live stream URL was not found', - es='No se encontró la URL de transmisión en vivo del evento', - slug='event-online-meeting-url-not-found')) + translation( + lang, + en="Event live stream URL was not found", + es="No se encontró la URL de transmisión en vivo del evento", + slug="event-online-meeting-url-not-found", + ) + ) - kwargs['event'] = event + kwargs["event"] = event - if 'event_id' in kwargs: - del kwargs['event_id'] + if "event_id" in kwargs: + del kwargs["event_id"] - if 'event_slug' in kwargs: - del kwargs['event_slug'] + if "event_slug" in kwargs: + del kwargs["event_slug"] - if context['is_consumption_session']: + if context["is_consumption_session"]: return (context, args, kwargs) event_type = event.event_type is_host = event.host_user == request.user is_free_for_all = event.free_for_all - is_free_for_bootcamps = is_free_for_all or ((event.free_for_bootcamps) or - (event.free_for_bootcamps is None and event_type.free_for_bootcamps)) + is_free_for_bootcamps = is_free_for_all or ( + (event.free_for_bootcamps) or (event.free_for_bootcamps is None and event_type.free_for_bootcamps) + ) user_with_available_as_saas_false = CohortUser.objects.filter( Q(cohort__available_as_saas=False) | Q(cohort__available_as_saas=None, cohort__academy__available_as_saas=False), - user=request.user).exists() + user=request.user, + ).exists() if not is_host and not is_free_for_all and (not is_free_for_bootcamps or not user_with_available_as_saas_false): - context['price'] = 1 + context["price"] = 1 - if context['price'] == 0 and is_no_saas_student_up_to_date_in_any_cohort(context['request'].user, - academy=event.academy) is False: + if ( + context["price"] == 0 + and is_no_saas_student_up_to_date_in_any_cohort(context["request"].user, academy=event.academy) is False + ): raise PaymentException( - translation(lang, - en='You can\'t access this asset because your finantial status is not up to date', - es='No puedes acceder a este recurso porque tu estado financiero no está al dia', - slug='cohort-user-status-later')) + translation( + lang, + en="You can't access this asset because your finantial status is not up to date", + es="No puedes acceder a este recurso porque tu estado financiero no está al dia", + slug="cohort-user-status-later", + ) + ) - context['consumables'] = context['consumables'].filter(event_type_set__event_types=event_type) + context["consumables"] = context["consumables"].filter(event_type_set__event_types=event_type) utc_now = timezone.now() if event.ending_at < utc_now: - raise ValidationException(translation(lang, - en='This event has already finished', - es='Este evento ya ha terminado', - slug='event-has-ended'), - code=400) + raise ValidationException( + translation( + lang, en="This event has already finished", es="Este evento ya ha terminado", slug="event-has-ended" + ), + code=400, + ) - if context['price']: + if context["price"]: delta = event.ending_at - utc_now - context['lifetime'] = delta + context["lifetime"] = delta return (context, args, kwargs) def live_class_by_url_param(context: ServiceContext, args: tuple, kwargs: dict) -> tuple[dict, tuple, dict]: - context['price'] = 0 + context["price"] = 0 - request = context['request'] + request = context["request"] lang = get_user_language(request) - live_class = LiveClass.objects.filter(cohort_time_slot__cohort__cohortuser__user=request.user, - hash=kwargs.get('hash')).first() + live_class = LiveClass.objects.filter( + cohort_time_slot__cohort__cohortuser__user=request.user, hash=kwargs.get("hash") + ).first() if not live_class: - raise ValidationException(translation(lang, - en='Live class not found', - es='Clase en vivo no encontrada', - slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Live class not found", es="Clase en vivo no encontrada", slug="not-found"), code=404 + ) if not live_class.cohort_time_slot.cohort.online_meeting_url: raise ValidationException( - translation(lang, - en='Cohort online meeting URL was not found', - es='No se encontró la URL de la reunión en línea del cohorte', - slug='cohort-online-meeting-url-not-found')) - - kwargs['live_class'] = live_class - kwargs['lang'] = lang - del kwargs['hash'] - - if context['is_consumption_session']: + translation( + lang, + en="Cohort online meeting URL was not found", + es="No se encontró la URL de la reunión en línea del cohorte", + slug="cohort-online-meeting-url-not-found", + ) + ) + + kwargs["live_class"] = live_class + kwargs["lang"] = lang + del kwargs["hash"] + + if context["is_consumption_session"]: return (context, args, kwargs) # avoid to be taken if the cohort is available as saas is not set - cohort_available_as_saas = (live_class.cohort_time_slot.cohort.available_as_saas is not None - and live_class.cohort_time_slot.cohort.available_as_saas) + cohort_available_as_saas = ( + live_class.cohort_time_slot.cohort.available_as_saas is not None + and live_class.cohort_time_slot.cohort.available_as_saas + ) # avoid to be taken if the cohort is available as saas is set - academy_available_as_saas = (live_class.cohort_time_slot.cohort.available_as_saas is None - and live_class.cohort_time_slot.cohort.academy - and live_class.cohort_time_slot.cohort.academy.available_as_saas) + academy_available_as_saas = ( + live_class.cohort_time_slot.cohort.available_as_saas is None + and live_class.cohort_time_slot.cohort.academy + and live_class.cohort_time_slot.cohort.academy.available_as_saas + ) if cohort_available_as_saas or academy_available_as_saas: - context['price'] = 1 + context["price"] = 1 # CohortSet requires that Academy be available as saas, this line should be uncovered - if context['price'] == 0 and is_no_saas_student_up_to_date_in_any_cohort( - context['request'].user, cohort=live_class.cohort_time_slot.cohort) is False: + if ( + context["price"] == 0 + and is_no_saas_student_up_to_date_in_any_cohort( + context["request"].user, cohort=live_class.cohort_time_slot.cohort + ) + is False + ): raise PaymentException( - translation(lang, - en='You can\'t access this asset because your finantial status is not up to date', - es='No puedes acceder a este recurso porque tu estado financiero no está al dia', - slug='cohort-user-status-later')) - - context['consumables'] = context['consumables'].filter( - cohort_set__cohortsetcohort__cohort=live_class.cohort_time_slot.cohort) + translation( + lang, + en="You can't access this asset because your finantial status is not up to date", + es="No puedes acceder a este recurso porque tu estado financiero no está al dia", + slug="cohort-user-status-later", + ) + ) + + context["consumables"] = context["consumables"].filter( + cohort_set__cohortsetcohort__cohort=live_class.cohort_time_slot.cohort + ) utc_now = timezone.now() if live_class.ending_at < utc_now: - raise ValidationException(translation(lang, - en='Class has ended', - es='La clase ha terminado', - slug='class-has-ended'), - code=400) + raise ValidationException( + translation(lang, en="Class has ended", es="La clase ha terminado", slug="class-has-ended"), code=400 + ) - if context['price']: + if context["price"]: delta = live_class.ending_at - utc_now - context['lifetime'] = delta + context["lifetime"] = delta return (context, args, kwargs) diff --git a/breathecode/events/permissions/contexts.py b/breathecode/events/permissions/contexts.py index 8cb107382..07c5666b2 100644 --- a/breathecode/events/permissions/contexts.py +++ b/breathecode/events/permissions/contexts.py @@ -3,31 +3,31 @@ def event(client: LaunchDarkly, event: Event): - key = f'{event.id}' + key = f"{event.id}" name = event.title - kind = 'event' + kind = "event" context = { - 'id': event.id, - 'slug': event.slug, - 'lang': event.lang, - 'academy': event.academy.slug if event.academy else 'unknown', - 'organization': event.organization.name if event.organization else 'unknown', - 'published_at': event.published_at, - 'event_type': event.event_type.slug if event.event_type else 'unknown', + "id": event.id, + "slug": event.slug, + "lang": event.lang, + "academy": event.academy.slug if event.academy else "unknown", + "organization": event.organization.name if event.organization else "unknown", + "published_at": event.published_at, + "event_type": event.event_type.slug if event.event_type else "unknown", } return client.context(key, name, kind, context) def event_type(client: LaunchDarkly, event_type: EventType): - key = f'{event_type.id}' + key = f"{event_type.id}" name = event_type.name - kind = 'event-type' + kind = "event-type" context = { - 'id': event_type.id, - 'slug': event_type.slug, - 'academy': event_type.academy.slug, - 'lang': event_type.lang, + "id": event_type.id, + "slug": event_type.slug, + "academy": event_type.academy.slug, + "lang": event_type.lang, } return client.context(key, name, kind, context) diff --git a/breathecode/events/permissions/flags.py b/breathecode/events/permissions/flags.py index 1f2b2905b..412a8ccfc 100644 --- a/breathecode/events/permissions/flags.py +++ b/breathecode/events/permissions/flags.py @@ -7,7 +7,7 @@ from breathecode.services import LaunchDarkly -__all__ = ['api'] +__all__ = ["api"] logger = logging.getLogger(__name__) @@ -18,7 +18,7 @@ class Release: def enable_consume_live_classes(user: User) -> bool: ld = LaunchDarkly() user_context = authenticate_contexts.user(ld, user) - return ld.get('api.release.enable_consume_live_classes', user_context, False) + return ld.get("api.release.enable_consume_live_classes", user_context, False) @staticmethod def enable_consume_live_events(user: User, event: Event) -> bool: @@ -37,7 +37,7 @@ def enable_consume_live_events(user: User, event: Event) -> bool: context = ld.join_contexts(*collected_contexts) - return ld.get('api.release.enable_consume_live_events', context, False) + return ld.get("api.release.enable_consume_live_events", context, False) class API: diff --git a/breathecode/events/receivers.py b/breathecode/events/receivers.py index 397e89140..49ba1439d 100644 --- a/breathecode/events/receivers.py +++ b/breathecode/events/receivers.py @@ -13,8 +13,11 @@ @receiver(timeslot_saved, sender=CohortTimeSlot) def post_save_cohort_time_slot(sender: Type[CohortTimeSlot], instance: CohortTimeSlot, **kwargs: Any): - logger.info('Procesing CohortTimeSlot save') + logger.info("Procesing CohortTimeSlot save") - if instance.cohort.ending_date and instance.cohort.ending_date > timezone.now( - ) and instance.cohort.never_ends == False: + if ( + instance.cohort.ending_date + and instance.cohort.ending_date > timezone.now() + and instance.cohort.never_ends == False + ): tasks.build_live_classes_from_timeslot.delay(instance.id) diff --git a/breathecode/events/serializers.py b/breathecode/events/serializers.py index 21e083cfc..6395651f0 100644 --- a/breathecode/events/serializers.py +++ b/breathecode/events/serializers.py @@ -355,78 +355,90 @@ class Meta: exclude = () def validate(self, data: dict[str, Any]): - lang = data.get('lang', 'en') + lang = data.get("lang", "en") - academy = self.context.get('academy_id') + academy = self.context.get("academy_id") - if ('tags' not in data and self.instance.tags == '') or ('tags' in data and data['tags'] == ''): + if ("tags" not in data and self.instance.tags == "") or ("tags" in data and data["tags"] == ""): raise ValidationException( - translation(lang, - en='Event must have at least one tag', - es='El evento debe tener al menos un tag', - slug='empty-tags')) + translation( + lang, + en="Event must have at least one tag", + es="El evento debe tener al menos un tag", + slug="empty-tags", + ) + ) - validate_marketing_tags(data['tags'], academy, types=['DISCOVERY'], lang=lang) + validate_marketing_tags(data["tags"], academy, types=["DISCOVERY"], lang=lang) - title = data.get('title') - slug = data.get('slug') + title = data.get("title") + slug = data.get("slug") if slug and self.instance: raise ValidationException( - translation(lang, - en='The slug field is readonly', - es='El campo slug es de solo lectura', - slug='try-update-slug')) + translation( + lang, en="The slug field is readonly", es="El campo slug es de solo lectura", slug="try-update-slug" + ) + ) if title and not slug: - slug = slugify(data['title']).lower() + slug = slugify(data["title"]).lower() elif slug: slug = f'{data["slug"].lower()}' - online_event = data.get('online_event') - live_stream_url = data.get('live_stream_url') - if online_event == True and (live_stream_url is None or live_stream_url == ''): + online_event = data.get("online_event") + live_stream_url = data.get("live_stream_url") + if online_event == True and (live_stream_url is None or live_stream_url == ""): raise ValidationException( - translation(lang, - en='live_stream_url cannot be empty if the event is online.', - es='Si el evento es online, entonces live_stream_url no puede estar vacío.', - slug='live-stream-url-empty')) + translation( + lang, + en="live_stream_url cannot be empty if the event is online.", + es="Si el evento es online, entonces live_stream_url no puede estar vacío.", + slug="live-stream-url-empty", + ) + ) existing_events = Event.objects.filter(slug=slug) if slug and not self.instance and existing_events.exists(): raise ValidationException( - translation(lang, - en=f'Event slug {slug} already taken, try a different slug', - es=f'El slug {slug} ya está en uso, prueba con otro slug', - slug='slug-taken')) - - if 'event_type' not in data or data['event_type'] is None: + translation( + lang, + en=f"Event slug {slug} already taken, try a different slug", + es=f"El slug {slug} ya está en uso, prueba con otro slug", + slug="slug-taken", + ) + ) + + if "event_type" not in data or data["event_type"] is None: raise ValidationException( - translation(lang, - en='Missing event type', - es='Debes especificar un tipo de evento', - slug='no-event-type')) + translation( + lang, en="Missing event type", es="Debes especificar un tipo de evento", slug="no-event-type" + ) + ) - if 'lang' in data and data['event_type'].lang != data.get('lang', 'en'): + if "lang" in data and data["event_type"].lang != data.get("lang", "en"): raise ValidationException( - translation(lang, - en='Event type and event language must match', - es='El tipo de evento y el idioma del evento deben coincidir', - slug='event-type-lang-mismatch')) + translation( + lang, + en="Event type and event language must match", + es="El tipo de evento y el idioma del evento deben coincidir", + slug="event-type-lang-mismatch", + ) + ) - if 'event_type' in data: - data['lang'] = data['event_type'].lang + if "event_type" in data: + data["lang"] = data["event_type"].lang if not self.instance: - data['slug'] = slug + data["slug"] = slug return data def create(self, validated_data): # hard-code the organizer to the academy organizer try: - validated_data['organizer'] = validated_data['academy'].organizer + validated_data["organizer"] = validated_data["academy"].organizer except Exception: pass @@ -447,74 +459,88 @@ class Meta: exclude = () def validate(self, data: dict[str, Any]): - lang = data.get('lang', 'en') + lang = data.get("lang", "en") - academy = self.context.get('academy_id') + academy = self.context.get("academy_id") - if 'tags' in data: - if data['tags'] == '': + if "tags" in data: + if data["tags"] == "": raise ValidationException( - translation(lang, - en='Event must have at least one tag', - es='El evento debe tener al menos un tag', - slug='empty-tags')) + translation( + lang, + en="Event must have at least one tag", + es="El evento debe tener al menos un tag", + slug="empty-tags", + ) + ) - validate_marketing_tags(data['tags'], academy, types=['DISCOVERY'], lang=lang) + validate_marketing_tags(data["tags"], academy, types=["DISCOVERY"], lang=lang) - title = data.get('title') - slug = data.get('slug') + title = data.get("title") + slug = data.get("slug") if slug and self.instance: raise ValidationException( - translation(lang, - en='The slug field is readonly', - es='El campo slug es de solo lectura', - slug='try-update-slug')) + translation( + lang, en="The slug field is readonly", es="El campo slug es de solo lectura", slug="try-update-slug" + ) + ) if title and not slug: - slug = slugify(data['title']).lower() + slug = slugify(data["title"]).lower() elif slug: slug = f'{data["slug"].lower()}' - online_event = data.get('online_event') - live_stream_url = data.get('live_stream_url') - if online_event == True and (live_stream_url is None - or live_stream_url == '') and (self.instance.live_stream_url is None - or self.instance.live_stream_url == ''): + online_event = data.get("online_event") + live_stream_url = data.get("live_stream_url") + if ( + online_event == True + and (live_stream_url is None or live_stream_url == "") + and (self.instance.live_stream_url is None or self.instance.live_stream_url == "") + ): raise ValidationException( - translation(lang, - en='live_stream_url cannot be empty if the event is online.', - es='Si el evento es online, entonces live_stream_url no puede estar vacío.', - slug='live-stream-url-empty')) + translation( + lang, + en="live_stream_url cannot be empty if the event is online.", + es="Si el evento es online, entonces live_stream_url no puede estar vacío.", + slug="live-stream-url-empty", + ) + ) existing_events = Event.objects.filter(slug=slug) if slug and not self.instance and existing_events.exists(): raise ValidationException( - translation(lang, - en=f'Event slug {slug} already taken, try a different slug', - es=f'El slug {slug} ya está en uso, prueba con otro slug', - slug='slug-taken')) - - event_type = data['event_type'] if 'event_type' in data else self.instance.event_type + translation( + lang, + en=f"Event slug {slug} already taken, try a different slug", + es=f"El slug {slug} ya está en uso, prueba con otro slug", + slug="slug-taken", + ) + ) + + event_type = data["event_type"] if "event_type" in data else self.instance.event_type if not event_type: raise ValidationException( - translation(lang, - en='Missing event type', - es='Debes especificar un tipo de evento', - slug='no-event-type')) + translation( + lang, en="Missing event type", es="Debes especificar un tipo de evento", slug="no-event-type" + ) + ) - if 'lang' in data and event_type.lang != data['lang']: + if "lang" in data and event_type.lang != data["lang"]: raise ValidationException( - translation(lang, - en='Event type and event language must match', - es='El tipo de evento y el idioma del evento deben coincidir', - slug='event-type-lang-mismatch')) + translation( + lang, + en="Event type and event language must match", + es="El tipo de evento y el idioma del evento deben coincidir", + slug="event-type-lang-mismatch", + ) + ) - data['lang'] = event_type.lang + data["lang"] = event_type.lang if not self.instance: - data['slug'] = slug + data["slug"] = slug return data @@ -522,7 +548,7 @@ def update(self, instance, validated_data): # hard-code the organizer to the academy organizer try: - validated_data['organizer'] = validated_data['academy'].organizer + validated_data["organizer"] = validated_data["academy"].organizer except Exception: pass @@ -547,14 +573,14 @@ class EventTypeSerializerMixin(serializers.ModelSerializer): class Meta: model = EventType - exclude = ('visibility_settings', ) + exclude = ("visibility_settings",) def validate(self, data: dict[str, Any]): - academy_id = self.context.get('academy_id') - data['academy'] = Academy.objects.filter(id=academy_id).get() + academy_id = self.context.get("academy_id") + data["academy"] = Academy.objects.filter(id=academy_id).get() - if ('visibility_settings' in data): - del data['visibility_settings'] + if "visibility_settings" in data: + del data["visibility_settings"] return data @@ -575,7 +601,7 @@ class PUTEventCheckinSerializer(serializers.ModelSerializer): class Meta: model = EventCheckin - exclude = ('event', 'created_at', 'updated_at') + exclude = ("event", "created_at", "updated_at") def validate(self, data: dict[str, Any]): return data @@ -586,11 +612,13 @@ def update(self, instance, validated_data): # if "attended_at" not in data and self.instance.attended_at is None: # new_data['attended_at'] = timezone.now() - if 'attended_at' in validated_data and self.instance.attended_at is None: - tasks_activity.add_activity.delay(self.instance.attendee, - 'event_checkin_assisted', - related_type='events.EventCheckin', - related_id=self.instance.id) + if "attended_at" in validated_data and self.instance.attended_at is None: + tasks_activity.add_activity.delay( + self.instance.attendee, + "event_checkin_assisted", + related_type="events.EventCheckin", + related_id=self.instance.id, + ) event_checkin = super().update(instance, {**validated_data, **new_data}) return event_checkin @@ -600,33 +628,39 @@ class POSTEventCheckinSerializer(serializers.ModelSerializer): class Meta: model = EventCheckin - exclude = ('created_at', 'updated_at', 'attended_at', 'status') + exclude = ("created_at", "updated_at", "attended_at", "status") def validate(self, data): - event_checkin = EventCheckin.objects.filter(Q(attendee=data['attendee']) - | Q(email=data['email']), - event=data['event']).first() + event_checkin = EventCheckin.objects.filter( + Q(attendee=data["attendee"]) | Q(email=data["email"]), event=data["event"] + ).first() if event_checkin is not None: if event_checkin.attendee is None: - event_checkin.attendee = self.context['user'] + event_checkin.attendee = self.context["user"] event_checkin.save() - raise ValidationException(translation(self.context['lang'], - en='This user already has an event checkin associated to this event', - es='Este usuario ya esta registrado en este evento', - slug='user-registered-in-event'), - code=400) + raise ValidationException( + translation( + self.context["lang"], + en="This user already has an event checkin associated to this event", + es="Este usuario ya esta registrado en este evento", + slug="user-registered-in-event", + ), + code=400, + ) return data def create(self, validated_data): event_checkin = super().create(validated_data) - tasks_activity.add_activity.delay(event_checkin.attendee.id, - 'event_checkin_created', - related_type='events.EventCheckin', - related_id=event_checkin.id) + tasks_activity.add_activity.delay( + event_checkin.attendee.id, + "event_checkin_created", + related_type="events.EventCheckin", + related_id=event_checkin.id, + ) return event_checkin @@ -667,88 +701,130 @@ class Meta: def _validate_started_at(self, data: dict[str, Any]): utc_now = timezone.now() - if not self.instance and 'started_at' in data: + if not self.instance and "started_at" in data: raise ValidationException( - translation(self.context['lang'], - en='You cannot start a live class before it has been created.', - es='No puedes iniciar una clase en vivo antes de que se haya creado.', - slug='started-at-on-creation')) - - if self.instance and 'started_at' in data and len(data) > 1: + translation( + self.context["lang"], + en="You cannot start a live class before it has been created.", + es="No puedes iniciar una clase en vivo antes de que se haya creado.", + slug="started-at-on-creation", + ) + ) + + if self.instance and "started_at" in data and len(data) > 1: raise ValidationException( - translation(self.context['lang'], - en='Start the class before you can update any other of its attributes.', - es='Inicia la clase antes de poder actualizar cualquiera de sus atributos.', - slug='only-started-at')) - - if self.instance and 'started_at' in data and self.instance.started_at: + translation( + self.context["lang"], + en="Start the class before you can update any other of its attributes.", + es="Inicia la clase antes de poder actualizar cualquiera de sus atributos.", + slug="only-started-at", + ) + ) + + if self.instance and "started_at" in data and self.instance.started_at: raise ValidationException( - translation(self.context['lang'], - en='This class has already been started.', - es='Esta clase ya ha sido iniciada.', - slug='started-at-already-set')) - - if self.instance and 'started_at' in data and (data['started_at'] < utc_now - timedelta(minutes=2) - or data['started_at'] > utc_now + timedelta(minutes=2)): + translation( + self.context["lang"], + en="This class has already been started.", + es="Esta clase ya ha sido iniciada.", + slug="started-at-already-set", + ) + ) + + if ( + self.instance + and "started_at" in data + and ( + data["started_at"] < utc_now - timedelta(minutes=2) + or data["started_at"] > utc_now + timedelta(minutes=2) + ) + ): raise ValidationException( - translation(self.context['lang'], - en='Started at cannot be so different from the current time.', - es='La fecha de inicio no puede ser tan diferente de la hora actual.', - slug='started-at-too-different')) + translation( + self.context["lang"], + en="Started at cannot be so different from the current time.", + es="La fecha de inicio no puede ser tan diferente de la hora actual.", + slug="started-at-too-different", + ) + ) def _validate_ended_at(self, data: dict[str, Any]): utc_now = timezone.now() - if not self.instance and 'ended_at' in data: + if not self.instance and "ended_at" in data: raise ValidationException( - translation(self.context['lang'], - en='Ended at cannot be set on creation', - es='La fecha de finalización no se puede establecer en la creación', - slug='ended-at-on-creation')) - - if self.instance and 'ended_at' in data and len(data) > 1: + translation( + self.context["lang"], + en="Ended at cannot be set on creation", + es="La fecha de finalización no se puede establecer en la creación", + slug="ended-at-on-creation", + ) + ) + + if self.instance and "ended_at" in data and len(data) > 1: raise ValidationException( - translation(self.context['lang'], - en='Only ended at can be updated', - es='Solo se puede actualizar la fecha de finalización', - slug='only-ended-at')) - - if self.instance and 'ended_at' in data and self.instance.ended_at: + translation( + self.context["lang"], + en="Only ended at can be updated", + es="Solo se puede actualizar la fecha de finalización", + slug="only-ended-at", + ) + ) + + if self.instance and "ended_at" in data and self.instance.ended_at: raise ValidationException( - translation(self.context['lang'], - en='Ended at already set', - es='La fecha de finalización ya está establecida', - slug='ended-at-already-set')) - - if self.instance and 'ended_at' in data and not self.instance.started_at: + translation( + self.context["lang"], + en="Ended at already set", + es="La fecha de finalización ya está establecida", + slug="ended-at-already-set", + ) + ) + + if self.instance and "ended_at" in data and not self.instance.started_at: raise ValidationException( - translation(self.context['lang'], - en='You cannot end a live class if it has not yet been started.', - es='No puede finalizar una clase en vivo si aún no se ha iniciado.', - slug='schedule-must-have-started-at-before-ended-at')) - - if self.instance and 'ended_at' in data and self.instance.started_at >= data['ended_at']: + translation( + self.context["lang"], + en="You cannot end a live class if it has not yet been started.", + es="No puede finalizar una clase en vivo si aún no se ha iniciado.", + slug="schedule-must-have-started-at-before-ended-at", + ) + ) + + if self.instance and "ended_at" in data and self.instance.started_at >= data["ended_at"]: raise ValidationException( - translation(self.context['lang'], - en='The live class cannot have ended before starting.', - es='La clase en vivo no puede haber finalizado antes de comenzar.', - slug='ended-at-cannot-be-less-than-started-at')) - - if self.instance and 'ended_at' in data and (data['ended_at'] < utc_now - timedelta(minutes=2) - or data['ended_at'] > utc_now + timedelta(minutes=2)): + translation( + self.context["lang"], + en="The live class cannot have ended before starting.", + es="La clase en vivo no puede haber finalizado antes de comenzar.", + slug="ended-at-cannot-be-less-than-started-at", + ) + ) + + if ( + self.instance + and "ended_at" in data + and (data["ended_at"] < utc_now - timedelta(minutes=2) or data["ended_at"] > utc_now + timedelta(minutes=2)) + ): raise ValidationException( - translation(self.context['lang'], - en='Ended at at cannot be so different from the current time.', - es='La fecha de finalización no puede ser tan diferente de la hora actual.', - slug='ended-at-too-different')) + translation( + self.context["lang"], + en="Ended at at cannot be so different from the current time.", + es="La fecha de finalización no puede ser tan diferente de la hora actual.", + slug="ended-at-too-different", + ) + ) def _validate_cohort(self, data: dict[str, Any]): - if 'cohort' in data and data['cohort'].academy.id != int(self.context['academy_id']): + if "cohort" in data and data["cohort"].academy.id != int(self.context["academy_id"]): raise ValidationException( - translation(self.context['lang'], - en='This cohort does not belong to any of your academies.', - es='Este cohort no pertenece a ninguna de tus academias.', - slug='cohort-not-belong-to-academy')) + translation( + self.context["lang"], + en="This cohort does not belong to any of your academies.", + es="Este cohort no pertenece a ninguna de tus academias.", + slug="cohort-not-belong-to-academy", + ) + ) def validate(self, data: dict[str, Any]): self._validate_started_at(data) diff --git a/breathecode/events/signals.py b/breathecode/events/signals.py index c54c326c3..a5b138dba 100644 --- a/breathecode/events/signals.py +++ b/breathecode/events/signals.py @@ -2,9 +2,9 @@ from task_manager.django.dispatch import Emisor -emisor = Emisor('breathecode.events') +emisor = Emisor("breathecode.events") -event_saved = emisor.signal('event_saved') -event_status_updated = emisor.signal('event_status_updated') -new_event_attendee = emisor.signal('new_event_attendee') -new_event_order = emisor.signal('new_event_order') +event_saved = emisor.signal("event_saved") +event_status_updated = emisor.signal("event_status_updated") +new_event_attendee = emisor.signal("new_event_attendee") +new_event_order = emisor.signal("new_event_order") diff --git a/breathecode/events/syndication.py b/breathecode/events/syndication.py index cf3317553..c6132acdc 100644 --- a/breathecode/events/syndication.py +++ b/breathecode/events/syndication.py @@ -5,9 +5,9 @@ class LatestEventsFeed(Feed): - title = 'Latest Events Feed' - link = '/feeds/latest-events/' # Update to your desired link - description = 'Feed of the latest events based on provided filters.' + title = "Latest Events Feed" + link = "/feeds/latest-events/" # Update to your desired link + description = "Feed of the latest events based on provided filters." def get_object(self, request, *args, **kwargs): lookup = {} @@ -15,54 +15,54 @@ def get_object(self, request, *args, **kwargs): # All the query filtering you provided goes here... # Note: I'm directly using the code you provided to build the lookup dictionary. - if 'city' in request.GET: - city = request.GET.get('city') - lookup['venue__city__iexact'] = city + if "city" in request.GET: + city = request.GET.get("city") + lookup["venue__city__iexact"] = city - if 'country' in request.GET: - value = request.GET.get('country') - lookup['venue__country__iexact'] = value + if "country" in request.GET: + value = request.GET.get("country") + lookup["venue__country__iexact"] = value - if 'type' in request.GET: - value = request.GET.get('type') - lookup['event_type__slug'] = value + if "type" in request.GET: + value = request.GET.get("type") + lookup["event_type__slug"] = value - if 'zip_code' in request.GET: - value = request.GET.get('zip_code') - lookup['venue__zip_code'] = value + if "zip_code" in request.GET: + value = request.GET.get("zip_code") + lookup["venue__zip_code"] = value - if 'academy' in request.GET: - value = request.GET.get('academy') - lookup['academy__slug__in'] = value.split(',') + if "academy" in request.GET: + value = request.GET.get("academy") + lookup["academy__slug__in"] = value.split(",") - if 'academy_id' in request.GET: - value = request.GET.get('academy_id') - lookup['academy__id__in'] = value.split(',') + if "academy_id" in request.GET: + value = request.GET.get("academy_id") + lookup["academy__id__in"] = value.split(",") - if 'lang' in request.GET: - value = request.GET.get('lang') - lookup['lang'] = value.split(',') + if "lang" in request.GET: + value = request.GET.get("lang") + lookup["lang"] = value.split(",") - if 'status' in request.GET: - value = request.GET.get('status') - lookup['status__in'] = value.split(',') + if "status" in request.GET: + value = request.GET.get("status") + lookup["status__in"] = value.split(",") else: - lookup['status'] = 'ACTIVE' + lookup["status"] = "ACTIVE" - online_event = request.GET.get('online_event', None) - if online_event == 'true': - lookup['online_event'] = True - elif online_event == 'false': - lookup['online_event'] = False + online_event = request.GET.get("online_event", None) + if online_event == "true": + lookup["online_event"] = True + elif online_event == "false": + lookup["online_event"] = False # upcoming by default - lookup['ending_at__gte'] = timezone.now() - if 'past' in request.GET: - if request.GET.get('past') == 'true': - lookup.pop('ending_at__gte') - lookup['starting_at__lte'] = timezone.now() + lookup["ending_at__gte"] = timezone.now() + if "past" in request.GET: + if request.GET.get("past") == "true": + lookup.pop("ending_at__gte") + lookup["starting_at__lte"] = timezone.now() - items = Event.objects.filter(**lookup).order_by('starting_at') + items = Event.objects.filter(**lookup).order_by("starting_at") return items def items(self, obj): @@ -78,8 +78,8 @@ def item_description(self, item): def item_link(self, item): basename = item.academy.white_label_url - if basename is None or basename == '': - basename = os.getenv('APP_URL', '') + if basename is None or basename == "": + basename = os.getenv("APP_URL", "") - lang = '' if item.lang in ['us', 'en'] else f'{item.lang}/' - return f'{basename}/{lang}workshops/{item.slug}' + lang = "" if item.lang in ["us", "en"] else f"{item.lang}/" + return f"{basename}/{lang}workshops/{item.slug}" diff --git a/breathecode/events/tasks.py b/breathecode/events/tasks.py index bbdf2e773..a7c5d5e9c 100644 --- a/breathecode/events/tasks.py +++ b/breathecode/events/tasks.py @@ -16,13 +16,13 @@ @shared_task(bind=True, priority=TaskPriority.STUDENT.value) def mark_live_class_as_started(self, live_class_id: int): - logger.info(f'Starting mark live class {live_class_id} as started') + logger.info(f"Starting mark live class {live_class_id} as started") now = timezone.now() live_class = LiveClass.objects.filter(id=live_class_id).first() if not live_class: - logger.error(f'Live Class {live_class_id} not fount') + logger.error(f"Live Class {live_class_id} not fount") return live_class.started_at = now @@ -34,16 +34,16 @@ def mark_live_class_as_started(self, live_class_id: int): def persist_organization_events(self, args): from .actions import sync_org_events - logger.debug('Starting persist_organization_events') - org = Organization.objects.get(id=args['org_id']) + logger.debug("Starting persist_organization_events") + org = Organization.objects.get(id=args["org_id"]) sync_org_events(org) return True @shared_task(bind=True, priority=TaskPriority.ACADEMY.value) def async_eventbrite_webhook(self, eventbrite_webhook_id): - logger.debug('Starting async_eventbrite_webhook') - status = 'ok' + logger.debug("Starting async_eventbrite_webhook") + status = "ok" webhook = EventbriteWebhook.objects.filter(id=eventbrite_webhook_id).first() organization_id = webhook.organization_id @@ -54,30 +54,30 @@ def async_eventbrite_webhook(self, eventbrite_webhook_id): client = Eventbrite(organization.eventbrite_key) client.execute_action(eventbrite_webhook_id) except Exception as e: - logger.debug('Eventbrite exception') + logger.debug("Eventbrite exception") logger.debug(str(e)) - status = 'error' + status = "error" else: - message = f"Organization {organization_id} doesn\'t exist" + message = f"Organization {organization_id} doesn't exist" - webhook.status = 'ERROR' + webhook.status = "ERROR" webhook.status_text = message webhook.save() logger.debug(message) - status = 'error' + status = "error" - logger.debug(f'Eventbrite status: {status}') + logger.debug(f"Eventbrite status: {status}") @shared_task(bind=True, priority=TaskPriority.ACADEMY.value) def build_live_classes_from_timeslot(self, timeslot_id: int): - logger.info(f'Starting build_live_classes_from_timeslot with id {timeslot_id}') + logger.info(f"Starting build_live_classes_from_timeslot with id {timeslot_id}") timeslot = CohortTimeSlot.objects.filter(id=timeslot_id).first() if not timeslot: - logger.error(f'Timeslot {timeslot_id} not fount') + logger.error(f"Timeslot {timeslot_id} not fount") return utc_now = timezone.now() @@ -95,24 +95,24 @@ def build_live_classes_from_timeslot(self, timeslot_id: int): ending_at += relativedelta(days=1) if not until_date: - logger.error(f'Timeslot {timeslot_id} not have a ending date') + logger.error(f"Timeslot {timeslot_id} not have a ending date") live_classes.delete() return delta = relativedelta(0) - if timeslot.recurrency_type == 'DAILY': + if timeslot.recurrency_type == "DAILY": delta += relativedelta(days=1) - if timeslot.recurrency_type == 'WEEKLY': + if timeslot.recurrency_type == "WEEKLY": delta += relativedelta(weeks=1) - if timeslot.recurrency_type == 'MONTHLY': + if timeslot.recurrency_type == "MONTHLY": delta += relativedelta(months=1) if not delta: - logger.error(f'{timeslot.recurrency_type} is not a valid or not implemented recurrency_type') + logger.error(f"{timeslot.recurrency_type} is not a valid or not implemented recurrency_type") return while True: @@ -125,7 +125,8 @@ def build_live_classes_from_timeslot(self, timeslot_id: int): starting_at=starting_at, ending_at=ending_at, cohort_time_slot=timeslot, - defaults={'remote_meeting_url': cohort.online_meeting_url or ''}) + defaults={"remote_meeting_url": cohort.online_meeting_url or ""}, + ) live_classes = live_classes.exclude(id=schedule.id) @@ -140,17 +141,17 @@ def build_live_classes_from_timeslot(self, timeslot_id: int): @shared_task(bind=False, priority=TaskPriority.FIXER.value) def fix_live_class_dates(timeslot_id: int): - logger.info(f'Starting fix_live_class_dates with id {timeslot_id}') + logger.info(f"Starting fix_live_class_dates with id {timeslot_id}") timeslot = CohortTimeSlot.objects.filter(id=timeslot_id).first() if not timeslot: - logger.error(f'Timeslot {timeslot_id} not fount') + logger.error(f"Timeslot {timeslot_id} not fount") return utc_now = timezone.now() if timeslot.cohort.ending_date and timeslot.cohort.ending_date < utc_now: - logger.info(f'Cohort {timeslot.cohort.id} is finished') + logger.info(f"Cohort {timeslot.cohort.id} is finished") return cohort = timeslot.cohort @@ -163,20 +164,20 @@ def fix_live_class_dates(timeslot_id: int): delta = relativedelta(0) - if timeslot.recurrency_type == 'DAILY': + if timeslot.recurrency_type == "DAILY": delta += relativedelta(days=1) - if timeslot.recurrency_type == 'WEEKLY': + if timeslot.recurrency_type == "WEEKLY": delta += relativedelta(weeks=1) - if timeslot.recurrency_type == 'MONTHLY': + if timeslot.recurrency_type == "MONTHLY": delta += relativedelta(months=1) if not delta: - logger.error(f'{timeslot.recurrency_type} is not a valid or not implemented recurrency_type') + logger.error(f"{timeslot.recurrency_type} is not a valid or not implemented recurrency_type") return - for live_class in LiveClass.objects.filter(cohort_time_slot=timeslot).order_by('starting_at'): + for live_class in LiveClass.objects.filter(cohort_time_slot=timeslot).order_by("starting_at"): if live_class.starting_at < utc_now or starting_at < cohort.kickoff_date: starting_at += delta diff --git a/breathecode/events/tests/actions/tests_create_or_update_organizer.py b/breathecode/events/tests/actions/tests_create_or_update_organizer.py index 6d09632f1..0eeb3d0ee 100644 --- a/breathecode/events/tests/actions/tests_create_or_update_organizer.py +++ b/breathecode/events/tests/actions/tests_create_or_update_organizer.py @@ -9,15 +9,15 @@ class SyncOrgVenuesTestSuite(EventTestCase): """ def test_create_or_update_organizer__without_academy(self): - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) with self.assertRaises(Exception) as cm: - create_or_update_organizer(EVENTBRITE_EVENTS['events'][0], model['organization'], force_update=False) + create_or_update_organizer(EVENTBRITE_EVENTS["events"][0], model["organization"], force_update=False) - self.assertEqual(str(cm.exception), 'First you must specify to which academy this organization belongs') + self.assertEqual(str(cm.exception), "First you must specify to which academy this organization belongs") - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) self.assertEqual(self.all_organizer_dict(), []) """ @@ -25,23 +25,23 @@ def test_create_or_update_organizer__without_academy(self): """ def test_create_or_update_organizer__with_academy(self): - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(academy=True, organization=True, organization_kwargs=organization_kwargs) - create_or_update_organizer(EVENTBRITE_EVENTS['events'][0]['organizer'], - model['organization'], - force_update=False) + create_or_update_organizer( + EVENTBRITE_EVENTS["events"][0]["organizer"], model["organization"], force_update=False + ) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) - organizer = EVENTBRITE_EVENTS['events'][0]['organizer'] + organizer = EVENTBRITE_EVENTS["events"][0]["organizer"] kwargs = { - 'id': 1, - 'name': organizer['name'], - 'description': organizer['description']['text'], - 'eventbrite_id': organizer['id'], - 'organization_id': 1, - 'academy_id': None, + "id": 1, + "name": organizer["name"], + "description": organizer["description"]["text"], + "eventbrite_id": organizer["id"], + "organization_id": 1, + "academy_id": None, } self.assertEqual(self.all_organizer_dict(), [kwargs]) @@ -50,43 +50,52 @@ def test_create_or_update_organizer__with_academy(self): """ def test_create_or_update_organizer__with_organizer(self): - organization_kwargs = {'eventbrite_id': '1'} - organizer_kwargs = {'eventbrite_id': '1'} - model = self.generate_models(academy=True, - organizer=True, - organization=True, - organizer_kwargs=organizer_kwargs, - organization_kwargs=organization_kwargs) - - create_or_update_organizer(EVENTBRITE_EVENTS['events'][0]['organizer'], - model['organization'], - force_update=False) - - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.all_organizer_dict(), [self.model_to_dict(model, 'organizer')]) + organization_kwargs = {"eventbrite_id": "1"} + organizer_kwargs = {"eventbrite_id": "1"} + model = self.generate_models( + academy=True, + organizer=True, + organization=True, + organizer_kwargs=organizer_kwargs, + organization_kwargs=organization_kwargs, + ) + + create_or_update_organizer( + EVENTBRITE_EVENTS["events"][0]["organizer"], model["organization"], force_update=False + ) + + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.all_organizer_dict(), [self.model_to_dict(model, "organizer")]) """ 🔽🔽🔽 With academy and organizer with force update """ def test_create_or_update_organizer__with_organizer__with_force_update(self): - organization_kwargs = {'eventbrite_id': '1'} - organizer_kwargs = {'eventbrite_id': '1'} - model = self.generate_models(academy=True, - organizer=True, - organization=True, - organizer_kwargs=organizer_kwargs, - organization_kwargs=organization_kwargs) - - create_or_update_organizer(EVENTBRITE_EVENTS['events'][0]['organizer'], - model['organization'], - force_update=True) - - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - - organizer = EVENTBRITE_EVENTS['events'][0]['organizer'] - self.assertEqual(self.all_organizer_dict(), [{ - **self.model_to_dict(model, 'organizer'), - 'name': organizer['name'], - 'description': organizer['description']['text'], - }]) + organization_kwargs = {"eventbrite_id": "1"} + organizer_kwargs = {"eventbrite_id": "1"} + model = self.generate_models( + academy=True, + organizer=True, + organization=True, + organizer_kwargs=organizer_kwargs, + organization_kwargs=organization_kwargs, + ) + + create_or_update_organizer( + EVENTBRITE_EVENTS["events"][0]["organizer"], model["organization"], force_update=True + ) + + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + + organizer = EVENTBRITE_EVENTS["events"][0]["organizer"] + self.assertEqual( + self.all_organizer_dict(), + [ + { + **self.model_to_dict(model, "organizer"), + "name": organizer["name"], + "description": organizer["description"]["text"], + } + ], + ) diff --git a/breathecode/events/tests/actions/tests_create_or_update_venue.py b/breathecode/events/tests/actions/tests_create_or_update_venue.py index 7b767345b..47ca728f7 100644 --- a/breathecode/events/tests/actions/tests_create_or_update_venue.py +++ b/breathecode/events/tests/actions/tests_create_or_update_venue.py @@ -19,119 +19,130 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 Without academy """ - @patch.object(logging.Logger, 'error', log_mock()) + @patch.object(logging.Logger, "error", log_mock()) def test_create_or_update_venue__without_academy(self): import logging - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) - create_or_update_venue(EVENTBRITE_VENUES['venues'][0], model.organization, force_update=False) + create_or_update_venue(EVENTBRITE_VENUES["venues"][0], model.organization, force_update=False) - self.assertEqual(logging.Logger.error.call_args_list, - [call('The organization Nameless not have a academy assigned')]) + self.assertEqual( + logging.Logger.error.call_args_list, [call("The organization Nameless not have a academy assigned")] + ) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) self.assertEqual(self.all_venue_dict(), []) """ 🔽🔽🔽 With academy """ - @patch.object(logging.Logger, 'error', log_mock()) + @patch.object(logging.Logger, "error", log_mock()) def test_create_or_update_venue__with_academy(self): import logging - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(academy=True, organization=True, organization_kwargs=organization_kwargs) - create_or_update_venue(EVENTBRITE_VENUES['venues'][0], model.organization, force_update=False) + create_or_update_venue(EVENTBRITE_VENUES["venues"][0], model.organization, force_update=False) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) - event = EVENTBRITE_VENUES['venues'][0] + event = EVENTBRITE_VENUES["venues"][0] self.assertEqual( self.all_venue_dict(), - [{ - 'id': 1, - 'title': event['name'], - 'street_address': event['address']['address_1'], - 'country': event['address']['country'], - 'city': event['address']['city'], - 'state': event['address']['region'], - 'zip_code': str(event['address']['postal_code']), - 'latitude': Decimal('25.758059600000000'), - 'longitude': Decimal('-80.377022000000000'), - 'eventbrite_id': event['id'], - 'eventbrite_url': event['resource_uri'], - 'academy_id': 1, - 'organization_id': None, - 'status': 'DRAFT', # TODO: we want every new venue are saved like 'DRAFT'? - }]) + [ + { + "id": 1, + "title": event["name"], + "street_address": event["address"]["address_1"], + "country": event["address"]["country"], + "city": event["address"]["city"], + "state": event["address"]["region"], + "zip_code": str(event["address"]["postal_code"]), + "latitude": Decimal("25.758059600000000"), + "longitude": Decimal("-80.377022000000000"), + "eventbrite_id": event["id"], + "eventbrite_url": event["resource_uri"], + "academy_id": 1, + "organization_id": None, + "status": "DRAFT", # TODO: we want every new venue are saved like 'DRAFT'? + } + ], + ) """ 🔽🔽🔽 With academy, event exists and is not updated """ - @patch.object(logging.Logger, 'error', log_mock()) + @patch.object(logging.Logger, "error", log_mock()) def test_create_or_update_venue__with_event__is_not_updated(self): import logging - organization_kwargs = {'eventbrite_id': '1'} - venue_kwargs = {'eventbrite_id': '1'} - model = self.generate_models(academy=True, - venue=True, - organization=True, - venue_kwargs=venue_kwargs, - organization_kwargs=organization_kwargs) + organization_kwargs = {"eventbrite_id": "1"} + venue_kwargs = {"eventbrite_id": "1"} + model = self.generate_models( + academy=True, + venue=True, + organization=True, + venue_kwargs=venue_kwargs, + organization_kwargs=organization_kwargs, + ) - create_or_update_venue(EVENTBRITE_VENUES['venues'][0], model.organization, force_update=False) + create_or_update_venue(EVENTBRITE_VENUES["venues"][0], model.organization, force_update=False) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.all_venue_dict(), [self.model_to_dict(model, 'venue')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.all_venue_dict(), [self.model_to_dict(model, "venue")]) """ 🔽🔽🔽 With academy, event exists and it force updated """ - @patch.object(logging.Logger, 'error', log_mock()) + @patch.object(logging.Logger, "error", log_mock()) def test_create_or_update_venue__with_event__with_force_update(self): import logging - organization_kwargs = {'eventbrite_id': '1'} - venue_kwargs = {'eventbrite_id': '1'} - model = self.generate_models(academy=True, - venue=True, - organization=True, - venue_kwargs=venue_kwargs, - organization_kwargs=organization_kwargs) + organization_kwargs = {"eventbrite_id": "1"} + venue_kwargs = {"eventbrite_id": "1"} + model = self.generate_models( + academy=True, + venue=True, + organization=True, + venue_kwargs=venue_kwargs, + organization_kwargs=organization_kwargs, + ) - create_or_update_venue(EVENTBRITE_VENUES['venues'][0], model.organization, force_update=True) + create_or_update_venue(EVENTBRITE_VENUES["venues"][0], model.organization, force_update=True) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) - event = EVENTBRITE_VENUES['venues'][0] + event = EVENTBRITE_VENUES["venues"][0] self.assertEqual( self.all_venue_dict(), - [{ - 'id': 1, - 'title': event['name'], - 'street_address': event['address']['address_1'], - 'country': event['address']['country'], - 'city': event['address']['city'], - 'state': event['address']['region'], - 'zip_code': str(event['address']['postal_code']), - 'latitude': Decimal('25.758059600000000'), - 'longitude': Decimal('-80.377022000000000'), - 'eventbrite_id': event['id'], - 'eventbrite_url': event['resource_uri'], - 'academy_id': 1, - 'organization_id': 1, # this relation is generated by generate_models - 'status': 'DRAFT', # TODO: we want every new venue are saved like 'DRAFT'? - }]) + [ + { + "id": 1, + "title": event["name"], + "street_address": event["address"]["address_1"], + "country": event["address"]["country"], + "city": event["address"]["city"], + "state": event["address"]["region"], + "zip_code": str(event["address"]["postal_code"]), + "latitude": Decimal("25.758059600000000"), + "longitude": Decimal("-80.377022000000000"), + "eventbrite_id": event["id"], + "eventbrite_url": event["resource_uri"], + "academy_id": 1, + "organization_id": 1, # this relation is generated by generate_models + "status": "DRAFT", # TODO: we want every new venue are saved like 'DRAFT'? + } + ], + ) diff --git a/breathecode/events/tests/actions/tests_export_event_description_to_eventbrite.py b/breathecode/events/tests/actions/tests_export_event_description_to_eventbrite.py index 756f6318b..f0d4a69e6 100644 --- a/breathecode/events/tests/actions/tests_export_event_description_to_eventbrite.py +++ b/breathecode/events/tests/actions/tests_export_event_description_to_eventbrite.py @@ -7,19 +7,19 @@ from ..mixins import EventTestCase export_event_description_to_eventbrite = actions.export_event_description_to_eventbrite -sync_desc = '2021-11-23 09:10:58.295264+00:00' -eventbrite_get_url = 'https://www.eventbriteapi.com/v3/events/1/structured_content/' -eventbrite_post_url = 'https://www.eventbriteapi.com/v3/events/1/structured_content/1/' -eventbrite_get_event = {'page_version_number': '1'} -eventbrite_good_post_event = {'modules': [{'id': '1'}]} -eventbrite_bad_post_event = {'modules': []} +sync_desc = "2021-11-23 09:10:58.295264+00:00" +eventbrite_get_url = "https://www.eventbriteapi.com/v3/events/1/structured_content/" +eventbrite_post_url = "https://www.eventbriteapi.com/v3/events/1/structured_content/1/" +eventbrite_get_event = {"page_version_number": "1"} +eventbrite_good_post_event = {"modules": [{"id": "1"}]} +eventbrite_bad_post_event = {"modules": []} status_map = { - 'draft': 'DRAFT', - 'live': 'ACTIVE', - 'completed': 'COMPLETED', - 'started': 'ACTIVE', - 'ended': 'ACTIVE', - 'canceled': 'DELETED', + "draft": "DRAFT", + "live": "ACTIVE", + "completed": "COMPLETED", + "started": "ACTIVE", + "ended": "ACTIVE", + "canceled": "DELETED", } UTC_NOW = timezone.now() @@ -30,15 +30,18 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 Without Event """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (400, eventbrite_post_url, eventbrite_bad_post_event), - (200, eventbrite_get_url, eventbrite_get_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (400, eventbrite_post_url, eventbrite_bad_post_event), + (200, eventbrite_get_url, eventbrite_get_event), + ] + ), + ) def test_export_event_description_to_eventbrite__without_event(self): import logging import requests @@ -46,24 +49,27 @@ def test_export_event_description_to_eventbrite__without_event(self): export_event_description_to_eventbrite(None) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [call('Event is not being provided')]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Event is not being provided")]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) self.assertEqual(requests.request.call_args_list, []) """ 🔽🔽🔽 Without eventbrite id """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (400, eventbrite_post_url, eventbrite_bad_post_event), - (200, eventbrite_get_url, eventbrite_get_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (400, eventbrite_post_url, eventbrite_bad_post_event), + (200, eventbrite_get_url, eventbrite_get_event), + ] + ), + ) def test_export_event_description_to_eventbrite__without_eventbrite_id(self): import logging import requests @@ -74,201 +80,246 @@ def test_export_event_description_to_eventbrite__without_eventbrite_id(self): export_event_description_to_eventbrite(model.event) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Event 1 not have the integration with eventbrite'), - ]) - - self.assertEqual(self.bc.database.list_of('events.Event'), [db]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Event 1 not have the integration with eventbrite"), + ], + ) + + self.assertEqual(self.bc.database.list_of("events.Event"), [db]) self.assertEqual(requests.request.call_args_list, []) """ 🔽🔽🔽 With Event """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (400, eventbrite_post_url, eventbrite_bad_post_event), - (200, eventbrite_get_url, eventbrite_get_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (400, eventbrite_post_url, eventbrite_bad_post_event), + (200, eventbrite_get_url, eventbrite_get_event), + ] + ), + ) def test_export_event_description_to_eventbrite__with_event(self): import logging import requests - event = {'eventbrite_id': '1'} + event = {"eventbrite_id": "1"} model = self.generate_models(event=event) db = self.bc.format.to_dict(model.event) export_event_description_to_eventbrite(model.event) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Event 1 not have a organization assigned'), - ]) - - self.assertEqual(self.bc.database.list_of('events.Event'), [db]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Event 1 not have a organization assigned"), + ], + ) + + self.assertEqual(self.bc.database.list_of("events.Event"), [db]) self.assertEqual(requests.request.call_args_list, []) """ 🔽🔽🔽 Empty description """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (400, eventbrite_post_url, eventbrite_bad_post_event), - (200, eventbrite_get_url, eventbrite_get_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (400, eventbrite_post_url, eventbrite_bad_post_event), + (200, eventbrite_get_url, eventbrite_get_event), + ] + ), + ) def test_export_event_description_to_eventbrite__empty_description(self): import logging import requests - organization = {'eventbrite_id': '1', 'eventbrite_key': 'x'} - event = {'eventbrite_id': '1', 'description': ''} + organization = {"eventbrite_id": "1", "eventbrite_key": "x"} + event = {"eventbrite_id": "1", "description": ""} model = self.generate_models(event=event, organization=organization) db = self.bc.format.to_dict(model.event) export_event_description_to_eventbrite(model.event) - self.assertEqual(logging.Logger.warning.call_args_list, [ - call('The event 1 not have description yet'), - ]) + self.assertEqual( + logging.Logger.warning.call_args_list, + [ + call("The event 1 not have description yet"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('events.Event'), [db]) + self.assertEqual(self.bc.database.list_of("events.Event"), [db]) self.assertEqual(requests.request.call_args_list, []) """ 🔽🔽🔽 The eventbrite response is changed and now emit a exception """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (400, eventbrite_post_url, eventbrite_bad_post_event), - (200, eventbrite_get_url, {}), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (400, eventbrite_post_url, eventbrite_bad_post_event), + (200, eventbrite_get_url, {}), + ] + ), + ) def test_export_event_description_to_eventbrite__the_get_not_return_page_version_number(self): import logging import requests - organization = {'eventbrite_id': '1', 'eventbrite_key': 'x'} - event = {'eventbrite_id': '1', 'description': 'The killed kenny'} + organization = {"eventbrite_id": "1", "eventbrite_key": "x"} + event = {"eventbrite_id": "1", "description": "The killed kenny"} model = self.generate_models(event=event, organization=organization) db = self.bc.format.to_dict(model.event) export_event_description_to_eventbrite(model.event) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('\'page_version_number\''), - ]) - - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **db, - 'eventbrite_sync_description': "'page_version_number'", - 'eventbrite_sync_status': 'ERROR', - }]) - - self.assertEqual(requests.request.call_args_list, [ - call('GET', - 'https://www.eventbriteapi.com/v3/events/1/structured_content/', - headers={'Authorization': f'Bearer {model.organization.eventbrite_key}'}, - data=None, - timeout=2) - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("'page_version_number'"), + ], + ) + + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **db, + "eventbrite_sync_description": "'page_version_number'", + "eventbrite_sync_status": "ERROR", + } + ], + ) + + self.assertEqual( + requests.request.call_args_list, + [ + call( + "GET", + "https://www.eventbriteapi.com/v3/events/1/structured_content/", + headers={"Authorization": f"Bearer {model.organization.eventbrite_key}"}, + data=None, + timeout=2, + ) + ], + ) """ 🔽🔽🔽 The description could not be saved """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (400, eventbrite_post_url, eventbrite_bad_post_event), - (200, eventbrite_get_url, eventbrite_get_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (400, eventbrite_post_url, eventbrite_bad_post_event), + (200, eventbrite_get_url, eventbrite_get_event), + ] + ), + ) def test_export_event_description_to_eventbrite__post_with_400(self): import logging import requests - organization = {'eventbrite_id': '1', 'eventbrite_key': 'x'} - event = {'eventbrite_id': '1', 'description': 'The killed kenny'} + organization = {"eventbrite_id": "1", "eventbrite_key": "x"} + event = {"eventbrite_id": "1", "description": "The killed kenny"} model = self.generate_models(event=event, organization=organization) db = self.bc.format.to_dict(model.event) export_event_description_to_eventbrite(model.event) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Could not create event description in eventbrite'), - ]) - - self.assertEqual(self.bc.database.list_of('events.Event'), - [{ - **db, - 'eventbrite_sync_status': 'ERROR', - 'eventbrite_sync_description': 'Could not create event description in eventbrite', - }]) - self.assertEqual(requests.request.call_args_list, [ - call('GET', - 'https://www.eventbriteapi.com/v3/events/1/structured_content/', - headers={'Authorization': f'Bearer {model.organization.eventbrite_key}'}, - data=None, - timeout=2), - call('POST', - 'https://www.eventbriteapi.com/v3/events/1/structured_content/1/', - headers={'Authorization': f'Bearer {model.organization.eventbrite_key}'}, - data={ - 'modules': [{ - 'type': 'text', - 'data': { - 'body': { - 'type': 'text', - 'text': 'The killed kenny', - 'alignment': 'left' - } - } - }], - 'publish': - True, - 'purpose': - 'listing' - }, - timeout=2) - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Could not create event description in eventbrite"), + ], + ) + + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **db, + "eventbrite_sync_status": "ERROR", + "eventbrite_sync_description": "Could not create event description in eventbrite", + } + ], + ) + self.assertEqual( + requests.request.call_args_list, + [ + call( + "GET", + "https://www.eventbriteapi.com/v3/events/1/structured_content/", + headers={"Authorization": f"Bearer {model.organization.eventbrite_key}"}, + data=None, + timeout=2, + ), + call( + "POST", + "https://www.eventbriteapi.com/v3/events/1/structured_content/1/", + headers={"Authorization": f"Bearer {model.organization.eventbrite_key}"}, + data={ + "modules": [ + { + "type": "text", + "data": {"body": {"type": "text", "text": "The killed kenny", "alignment": "left"}}, + } + ], + "publish": True, + "purpose": "listing", + }, + timeout=2, + ), + ], + ) """ 🔽🔽🔽 The description was saved """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (200, eventbrite_post_url, eventbrite_good_post_event), - (200, eventbrite_get_url, eventbrite_get_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (200, eventbrite_post_url, eventbrite_good_post_event), + (200, eventbrite_get_url, eventbrite_get_event), + ] + ), + ) def test_export_event_description_to_eventbrite(self): import logging import requests - organization = {'eventbrite_id': '1', 'eventbrite_key': 'x'} - event = {'eventbrite_id': '1', 'description': 'The killed kenny'} + organization = {"eventbrite_id": "1", "eventbrite_key": "x"} + event = {"eventbrite_id": "1", "description": "The killed kenny"} model = self.generate_models(event=event, organization=organization) db = self.bc.format.to_dict(model.event) @@ -277,36 +328,42 @@ def test_export_event_description_to_eventbrite(self): self.assertEqual(logging.Logger.warning.call_args_list, []) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **db, - 'eventbrite_sync_status': 'SYNCHED', - 'eventbrite_sync_description': str(UTC_NOW), - }]) - - self.assertEqual(requests.request.call_args_list, [ - call('GET', - 'https://www.eventbriteapi.com/v3/events/1/structured_content/', - headers={'Authorization': f'Bearer {model.organization.eventbrite_key}'}, - data=None, - timeout=2), - call('POST', - 'https://www.eventbriteapi.com/v3/events/1/structured_content/1/', - headers={'Authorization': f'Bearer {model.organization.eventbrite_key}'}, - data={ - 'modules': [{ - 'type': 'text', - 'data': { - 'body': { - 'type': 'text', - 'text': 'The killed kenny', - 'alignment': 'left' - } - } - }], - 'publish': - True, - 'purpose': - 'listing' - }, - timeout=2) - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **db, + "eventbrite_sync_status": "SYNCHED", + "eventbrite_sync_description": str(UTC_NOW), + } + ], + ) + + self.assertEqual( + requests.request.call_args_list, + [ + call( + "GET", + "https://www.eventbriteapi.com/v3/events/1/structured_content/", + headers={"Authorization": f"Bearer {model.organization.eventbrite_key}"}, + data=None, + timeout=2, + ), + call( + "POST", + "https://www.eventbriteapi.com/v3/events/1/structured_content/1/", + headers={"Authorization": f"Bearer {model.organization.eventbrite_key}"}, + data={ + "modules": [ + { + "type": "text", + "data": {"body": {"type": "text", "text": "The killed kenny", "alignment": "left"}}, + } + ], + "publish": True, + "purpose": "listing", + }, + timeout=2, + ), + ], + ) diff --git a/breathecode/events/tests/actions/tests_export_event_to_eventbrite.py b/breathecode/events/tests/actions/tests_export_event_to_eventbrite.py index 95e3b46c8..885740dce 100644 --- a/breathecode/events/tests/actions/tests_export_event_to_eventbrite.py +++ b/breathecode/events/tests/actions/tests_export_event_to_eventbrite.py @@ -8,17 +8,17 @@ from ..mixins import EventTestCase export_event_to_eventbrite = actions.export_event_to_eventbrite -sync_desc = '2021-11-23 09:10:58.295264+00:00' -eventbrite_post_url = 'https://www.eventbriteapi.com/v3/organizations/1/events/' -eventbrite_put_url = 'https://www.eventbriteapi.com/v3/events/1/' -eventbrite_event = {'id': 1} +sync_desc = "2021-11-23 09:10:58.295264+00:00" +eventbrite_post_url = "https://www.eventbriteapi.com/v3/organizations/1/events/" +eventbrite_put_url = "https://www.eventbriteapi.com/v3/events/1/" +eventbrite_event = {"id": 1} status_map = { - 'draft': 'DRAFT', - 'live': 'ACTIVE', - 'completed': 'COMPLETED', - 'started': 'ACTIVE', - 'ended': 'ACTIVE', - 'canceled': 'DELETED', + "draft": "DRAFT", + "live": "ACTIVE", + "completed": "COMPLETED", + "started": "ACTIVE", + "ended": "ACTIVE", + "canceled": "DELETED", } @@ -43,57 +43,66 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 Without academy """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'get_current_iso_string', get_current_iso_string_mock()) - @patch.object(actions, 'export_event_description_to_eventbrite', MagicMock()) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "get_current_iso_string", get_current_iso_string_mock()) + @patch.object(actions, "export_event_description_to_eventbrite", MagicMock()) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (201, eventbrite_post_url, eventbrite_event), - (200, eventbrite_put_url, eventbrite_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (201, eventbrite_post_url, eventbrite_event), + (200, eventbrite_put_url, eventbrite_event), + ] + ), + ) def test_export_event_to_eventbrite__without_academy(self): import logging - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) export_event_to_eventbrite(None, model.organization) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, - [call('The organization Nameless not have a academy assigned')]) + self.assertEqual( + logging.Logger.error.call_args_list, [call("The organization Nameless not have a academy assigned")] + ) self.assertEqual(actions.export_event_description_to_eventbrite.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 With academy and event with title """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'get_current_iso_string', get_current_iso_string_mock()) - @patch.object(actions, 'export_event_description_to_eventbrite', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "get_current_iso_string", get_current_iso_string_mock()) + @patch.object(actions, "export_event_description_to_eventbrite", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (201, eventbrite_post_url, eventbrite_event), - (200, eventbrite_put_url, eventbrite_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (201, eventbrite_post_url, eventbrite_event), + (200, eventbrite_put_url, eventbrite_event), + ] + ), + ) def test_export_event_to_eventbrite__with_event(self): import logging - organization_kwargs = {'eventbrite_id': '1'} - event_kwargs = {'title': 'They killed kenny'} - model = self.generate_models(academy=True, - event=True, - organization=True, - event_kwargs=event_kwargs, - organization_kwargs=organization_kwargs) + organization_kwargs = {"eventbrite_id": "1"} + event_kwargs = {"title": "They killed kenny"} + model = self.generate_models( + academy=True, + event=True, + organization=True, + event_kwargs=event_kwargs, + organization_kwargs=organization_kwargs, + ) export_event_to_eventbrite(model.event, model.organization) @@ -101,42 +110,51 @@ def test_export_event_to_eventbrite__with_event(self): self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(actions.export_event_description_to_eventbrite.call_args_list, [call(model.event)]) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), - [{ - **self.model_to_dict(model, 'event'), - 'eventbrite_sync_status': 'SYNCHED', - 'eventbrite_sync_description': '2021-11-23 09:10:58.295264+00:00', - }]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + "eventbrite_sync_status": "SYNCHED", + "eventbrite_sync_description": "2021-11-23 09:10:58.295264+00:00", + } + ], + ) """ 🔽🔽🔽 Check the payload without eventbrite_id """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'get_current_iso_string', get_current_iso_string_mock()) - @patch.object(actions, 'export_event_description_to_eventbrite', MagicMock()) - @patch.object(Eventbrite, 'request', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "get_current_iso_string", get_current_iso_string_mock()) + @patch.object(actions, "export_event_description_to_eventbrite", MagicMock()) + @patch.object(Eventbrite, "request", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (201, eventbrite_post_url, eventbrite_event), - (200, eventbrite_put_url, eventbrite_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (201, eventbrite_post_url, eventbrite_event), + (200, eventbrite_put_url, eventbrite_event), + ] + ), + ) def test_export_event_to_eventbrite__check_the_payload__without_eventbrite_id(self): import logging from breathecode.events.utils import Eventbrite - organization_kwargs = {'eventbrite_id': '1'} - event_kwargs = {'title': 'They killed kenny'} - model = self.generate_models(academy=True, - event=True, - organization=True, - event_kwargs=event_kwargs, - organization_kwargs=organization_kwargs) + organization_kwargs = {"eventbrite_id": "1"} + event_kwargs = {"title": "They killed kenny"} + model = self.generate_models( + academy=True, + event=True, + organization=True, + event_kwargs=event_kwargs, + organization_kwargs=organization_kwargs, + ) export_event_to_eventbrite(model.event, model.organization) @@ -147,58 +165,68 @@ def test_export_event_to_eventbrite__check_the_payload__without_eventbrite_id(se Eventbrite.request.call_args_list, [ call( - 'POST', - '/organizations/1/events/', + "POST", + "/organizations/1/events/", data={ - 'event.name.html': 'They killed kenny', - 'event.description.html': model.event.description, - 'event.start.utc': self.datetime_to_iso(model.event.starting_at), - 'event.end.utc': self.datetime_to_iso(model.event.ending_at), + "event.name.html": "They killed kenny", + "event.description.html": model.event.description, + "event.start.utc": self.datetime_to_iso(model.event.starting_at), + "event.end.utc": self.datetime_to_iso(model.event.ending_at), # 'event.summary': model.event.excerpt, - 'event.capacity': model.event.capacity, - 'event.online_event': model.event.online_event, - 'event.url': model.event.eventbrite_url, - 'event.currency': model.event.currency, + "event.capacity": model.event.capacity, + "event.online_event": model.event.online_event, + "event.url": model.event.eventbrite_url, + "event.currency": model.event.currency, }, ), - ]) + ], + ) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), - [{ - **self.model_to_dict(model, 'event'), - 'eventbrite_sync_status': 'SYNCHED', - 'eventbrite_sync_description': '2021-11-23 09:10:58.295264+00:00', - }]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + "eventbrite_sync_status": "SYNCHED", + "eventbrite_sync_description": "2021-11-23 09:10:58.295264+00:00", + } + ], + ) """ 🔽🔽🔽 Check the payload with eventbrite_id """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'get_current_iso_string', get_current_iso_string_mock()) - @patch.object(actions, 'export_event_description_to_eventbrite', MagicMock()) - @patch.object(Eventbrite, 'request', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "get_current_iso_string", get_current_iso_string_mock()) + @patch.object(actions, "export_event_description_to_eventbrite", MagicMock()) + @patch.object(Eventbrite, "request", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (201, eventbrite_post_url, eventbrite_event), - (200, eventbrite_put_url, eventbrite_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (201, eventbrite_post_url, eventbrite_event), + (200, eventbrite_put_url, eventbrite_event), + ] + ), + ) def test_export_event_to_eventbrite__check_the_payload__with_eventbrite_id(self): import logging from breathecode.events.utils import Eventbrite - organization_kwargs = {'eventbrite_id': '1'} - event_kwargs = {'title': 'They killed kenny', 'eventbrite_id': '1'} - model = self.generate_models(academy=True, - event=True, - organization=True, - event_kwargs=event_kwargs, - organization_kwargs=organization_kwargs) + organization_kwargs = {"eventbrite_id": "1"} + event_kwargs = {"title": "They killed kenny", "eventbrite_id": "1"} + model = self.generate_models( + academy=True, + event=True, + organization=True, + event_kwargs=event_kwargs, + organization_kwargs=organization_kwargs, + ) export_event_to_eventbrite(model.event, model.organization) @@ -209,59 +237,69 @@ def test_export_event_to_eventbrite__check_the_payload__with_eventbrite_id(self) Eventbrite.request.call_args_list, [ call( - 'PUT', - '/events/1/', + "PUT", + "/events/1/", data={ - 'event.name.html': 'They killed kenny', - 'event.description.html': model.event.description, - 'event.start.utc': self.datetime_to_iso(model.event.starting_at), - 'event.end.utc': self.datetime_to_iso(model.event.ending_at), + "event.name.html": "They killed kenny", + "event.description.html": model.event.description, + "event.start.utc": self.datetime_to_iso(model.event.starting_at), + "event.end.utc": self.datetime_to_iso(model.event.ending_at), # 'event.summary': model.event.excerpt, - 'event.capacity': model.event.capacity, - 'event.online_event': model.event.online_event, - 'event.url': model.event.eventbrite_url, - 'event.currency': model.event.currency, + "event.capacity": model.event.capacity, + "event.online_event": model.event.online_event, + "event.url": model.event.eventbrite_url, + "event.currency": model.event.currency, }, ), - ]) + ], + ) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), - [{ - **self.model_to_dict(model, 'event'), - 'eventbrite_sync_status': 'SYNCHED', - 'eventbrite_sync_description': '2021-11-23 09:10:58.295264+00:00', - }]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + "eventbrite_sync_status": "SYNCHED", + "eventbrite_sync_description": "2021-11-23 09:10:58.295264+00:00", + } + ], + ) """ 🔽🔽🔽 Check the payload with organizer_id """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'get_current_iso_string', get_current_iso_string_mock()) - @patch.object(actions, 'export_event_description_to_eventbrite', MagicMock()) - @patch.object(Eventbrite, 'request', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "get_current_iso_string", get_current_iso_string_mock()) + @patch.object(actions, "export_event_description_to_eventbrite", MagicMock()) + @patch.object(Eventbrite, "request", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) @patch( - REQUESTS_PATH['request'], - apply_requests_request_mock([ - (201, eventbrite_post_url, eventbrite_event), - (200, eventbrite_put_url, eventbrite_event), - ])) + REQUESTS_PATH["request"], + apply_requests_request_mock( + [ + (201, eventbrite_post_url, eventbrite_event), + (200, eventbrite_put_url, eventbrite_event), + ] + ), + ) def test_export_event_to_eventbrite__check_the_payload__with_organizer_id(self): import logging from breathecode.events.utils import Eventbrite - organization_kwargs = {'eventbrite_id': '1'} - event_kwargs = {'title': 'They killed kenny', 'eventbrite_id': '1', 'eventbrite_organizer_id': '1'} - model = self.generate_models(academy=True, - event=True, - organizer=True, - organization=True, - event_kwargs=event_kwargs, - organization_kwargs=organization_kwargs) + organization_kwargs = {"eventbrite_id": "1"} + event_kwargs = {"title": "They killed kenny", "eventbrite_id": "1", "eventbrite_organizer_id": "1"} + model = self.generate_models( + academy=True, + event=True, + organizer=True, + organization=True, + event_kwargs=event_kwargs, + organization_kwargs=organization_kwargs, + ) export_event_to_eventbrite(model.event, model.organization) @@ -272,27 +310,32 @@ def test_export_event_to_eventbrite__check_the_payload__with_organizer_id(self): Eventbrite.request.call_args_list, [ call( - 'PUT', - '/events/1/', + "PUT", + "/events/1/", data={ - 'event.name.html': 'They killed kenny', - 'event.description.html': model.event.description, - 'event.start.utc': self.datetime_to_iso(model.event.starting_at), - 'event.end.utc': self.datetime_to_iso(model.event.ending_at), + "event.name.html": "They killed kenny", + "event.description.html": model.event.description, + "event.start.utc": self.datetime_to_iso(model.event.starting_at), + "event.end.utc": self.datetime_to_iso(model.event.ending_at), # 'event.summary': model.event.excerpt, - 'event.capacity': model.event.capacity, - 'event.online_event': model.event.online_event, - 'event.url': model.event.eventbrite_url, - 'event.currency': model.event.currency, - 'event.organizer_id': '1', + "event.capacity": model.event.capacity, + "event.online_event": model.event.online_event, + "event.url": model.event.eventbrite_url, + "event.currency": model.event.currency, + "event.organizer_id": "1", }, ), - ]) - - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), - [{ - **self.model_to_dict(model, 'event'), - 'eventbrite_sync_status': 'SYNCHED', - 'eventbrite_sync_description': '2021-11-23 09:10:58.295264+00:00', - }]) + ], + ) + + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + "eventbrite_sync_status": "SYNCHED", + "eventbrite_sync_description": "2021-11-23 09:10:58.295264+00:00", + } + ], + ) diff --git a/breathecode/events/tests/actions/tests_publish_event_from_eventbrite.py b/breathecode/events/tests/actions/tests_publish_event_from_eventbrite.py index b3e0e52bf..f3a4d36e8 100644 --- a/breathecode/events/tests/actions/tests_publish_event_from_eventbrite.py +++ b/breathecode/events/tests/actions/tests_publish_event_from_eventbrite.py @@ -14,11 +14,11 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 Empty data """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=now)) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=now)) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_publish_event_from_eventbrite__empty_data(self): """ Descriptions of models are being generated: @@ -27,30 +27,33 @@ def test_publish_event_from_eventbrite__empty_data(self): """ from logging import Logger - organization = {'eventbrite_id': '1'} + organization = {"eventbrite_id": "1"} model = self.bc.database.create(organization=organization) Logger.info.call_args_list = [] - with self.assertRaisesMessage(ValueError, 'data is empty'): + with self.assertRaisesMessage(ValueError, "data is empty"): publish_event_from_eventbrite({}, model.organization) - self.assertEqual(Logger.info.call_args_list, [call('Ignored event')]) + self.assertEqual(Logger.info.call_args_list, [call("Ignored event")]) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('events.Organization'), [ - self.bc.format.to_dict(model.organization), - ]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual( + self.bc.database.list_of("events.Organization"), + [ + self.bc.format.to_dict(model.organization), + ], + ) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 Bad data """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=now)) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=now)) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_publish_event_from_eventbrite__bad_data(self): """ Descriptions of models are being generated: @@ -59,32 +62,38 @@ def test_publish_event_from_eventbrite__bad_data(self): """ from logging import Logger - organization = {'eventbrite_id': '1'} + organization = {"eventbrite_id": "1"} model = self.bc.database.create(organization=organization) Logger.info.call_args_list = [] - with self.assertRaisesMessage(KeyError, 'id'): - publish_event_from_eventbrite({'irrelevant': 'value'}, model.organization) + with self.assertRaisesMessage(KeyError, "id"): + publish_event_from_eventbrite({"irrelevant": "value"}, model.organization) self.assertEqual(Logger.info.call_args_list, []) - self.assertEqual(Logger.error.call_args_list, [ - call(f'{now} => the body is coming from eventbrite has change', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('events.Organization'), [ - self.bc.format.to_dict(model.organization), - ]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual( + Logger.error.call_args_list, + [ + call(f"{now} => the body is coming from eventbrite has change", exc_info=True), + ], + ) + + self.assertEqual( + self.bc.database.list_of("events.Organization"), + [ + self.bc.format.to_dict(model.organization), + ], + ) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 Event not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=now)) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=now)) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_publish_event_from_eventbrite__event_not_found(self): """ Descriptions of models are being generated: @@ -93,27 +102,30 @@ def test_publish_event_from_eventbrite__event_not_found(self): """ from logging import Logger - organization = {'eventbrite_id': 1} + organization = {"eventbrite_id": 1} model = self.bc.database.create(organization=organization) Logger.info.call_args_list = [] - exception_message = 'The event with the eventbrite id `1` doesn\'t exist' + exception_message = "The event with the eventbrite id `1` doesn't exist" with self.assertRaisesMessage(Warning, exception_message): - publish_event_from_eventbrite({'id': '1'}, model.organization) + publish_event_from_eventbrite({"id": "1"}, model.organization) self.assertEqual(Logger.info.call_args_list, []) - self.assertEqual(Logger.error.call_args_list, [call(f'{now} => {exception_message}')]) - - self.assertEqual(self.bc.database.list_of('events.Organization'), [ - self.bc.format.to_dict(model.organization), - ]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=now)) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + self.assertEqual(Logger.error.call_args_list, [call(f"{now} => {exception_message}")]) + + self.assertEqual( + self.bc.database.list_of("events.Organization"), + [ + self.bc.format.to_dict(model.organization), + ], + ) + self.assertEqual(self.bc.database.list_of("events.Event"), []) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=now)) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_publish_event_from_eventbrite__event_not_found__with_one_event(self): """ Descriptions of models are being generated: @@ -125,33 +137,39 @@ def test_publish_event_from_eventbrite__event_not_found__with_one_event(self): """ from logging import Logger - organization = {'eventbrite_id': 1} + organization = {"eventbrite_id": 1} model = self.bc.database.create(organization=organization, event=1) Logger.info.call_args_list = [] - exception_message = 'The event with the eventbrite id `1` doesn\'t exist' + exception_message = "The event with the eventbrite id `1` doesn't exist" with self.assertRaisesMessage(Warning, exception_message): - publish_event_from_eventbrite({'id': '1'}, model.organization) + publish_event_from_eventbrite({"id": "1"}, model.organization) self.assertEqual(Logger.info.call_args_list, []) - self.assertEqual(Logger.error.call_args_list, [call(f'{now} => {exception_message}')]) - - self.assertEqual(self.bc.database.list_of('events.Organization'), [ - self.bc.format.to_dict(model.organization), - ]) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual(Logger.error.call_args_list, [call(f"{now} => {exception_message}")]) + + self.assertEqual( + self.bc.database.list_of("events.Organization"), + [ + self.bc.format.to_dict(model.organization), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) """ 🔽🔽🔽 With a correct Event """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=now)) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=now)) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_publish_event_from_eventbrite__event_not_found__with_one_event(self): """ Descriptions of models are being generated: @@ -164,24 +182,30 @@ def test_publish_event_from_eventbrite__event_not_found__with_one_event(self): from logging import Logger - organization = {'eventbrite_id': 1} - event = {'eventbrite_id': 1} + organization = {"eventbrite_id": 1} + event = {"eventbrite_id": 1} model = self.bc.database.create(organization=organization, event=event) Logger.info.call_args_list = [] - publish_event_from_eventbrite({'id': '1', 'status': 'they-killed-kenny'}, model.organization) - self.bc.check.calls(Logger.info.call_args_list, [call('The events with the eventbrite id `1` were saved')]) + publish_event_from_eventbrite({"id": "1", "status": "they-killed-kenny"}, model.organization) + self.bc.check.calls(Logger.info.call_args_list, [call("The events with the eventbrite id `1` were saved")]) self.bc.check.calls(Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('events.Organization'), [ - self.bc.format.to_dict(model.organization), - ]) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - { - **self.bc.format.to_dict(model.event), - 'eventbrite_status': 'they-killed-kenny', - 'eventbrite_sync_description': str(now), - 'eventbrite_sync_status': 'PERSISTED', - 'status': 'ACTIVE', - }, - ]) + self.assertEqual( + self.bc.database.list_of("events.Organization"), + [ + self.bc.format.to_dict(model.organization), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.bc.format.to_dict(model.event), + "eventbrite_status": "they-killed-kenny", + "eventbrite_sync_description": str(now), + "eventbrite_sync_status": "PERSISTED", + "status": "ACTIVE", + }, + ], + ) diff --git a/breathecode/events/tests/actions/tests_sync_org_events.py b/breathecode/events/tests/actions/tests_sync_org_events.py index 82f41bd88..b529eac82 100644 --- a/breathecode/events/tests/actions/tests_sync_org_events.py +++ b/breathecode/events/tests/actions/tests_sync_org_events.py @@ -10,7 +10,7 @@ sync_org_events = actions.sync_org_events -eventbrite_events_endpoint = get_eventbrite_events_url('1') +eventbrite_events_endpoint = get_eventbrite_events_url("1") def log_mock(): @@ -25,7 +25,7 @@ def update_or_create_event_mock(raise_error=False): def update_or_create_event(self, *args, **kwargs): if raise_error: - raise Exception('Random error in creating') + raise Exception("Random error in creating") return MagicMock(side_effect=update_or_create_event) @@ -34,7 +34,7 @@ def export_event_to_eventbrite_mock(raise_error=False): def export_event_to_eventbrite(self, *args, **kwargs): if raise_error: - raise Exception('Random error getting') + raise Exception("Random error getting") return MagicMock(side_effect=export_event_to_eventbrite) @@ -44,165 +44,182 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 Without academy """ - @patch.object(logging.Logger, 'info', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'update_or_create_event', update_or_create_event_mock()) - @patch.object(actions, 'export_event_to_eventbrite', export_event_to_eventbrite_mock()) - @patch(REQUESTS_PATH['request'], - apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)])) + @patch.object(logging.Logger, "info", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "update_or_create_event", update_or_create_event_mock()) + @patch.object(actions, "export_event_to_eventbrite", export_event_to_eventbrite_mock()) + @patch( + REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)]) + ) def test_sync_org_events__without_academy(self): """Test /answer without auth""" import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) logging.Logger.info.call_args_list = [] - sync_org_events(model['organization']) + sync_org_events(model["organization"]) self.assertEqual(actions.export_event_to_eventbrite.call_args_list, []) self.assertEqual(actions.update_or_create_event.call_args_list, []) self.assertEqual(logging.Logger.info.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, - [call('The organization Nameless not have a academy assigned')]) + self.assertEqual( + logging.Logger.error.call_args_list, [call("The organization Nameless not have a academy assigned")] + ) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 With academy, call update_or_create_event """ - @patch.object(logging.Logger, 'info', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'update_or_create_event', update_or_create_event_mock()) - @patch.object(actions, 'export_event_to_eventbrite', export_event_to_eventbrite_mock()) - @patch(REQUESTS_PATH['request'], - apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)])) + @patch.object(logging.Logger, "info", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "update_or_create_event", update_or_create_event_mock()) + @patch.object(actions, "export_event_to_eventbrite", export_event_to_eventbrite_mock()) + @patch( + REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)]) + ) def test_sync_org_events(self): """Test /answer without auth""" import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(academy=True, organization=True, organization_kwargs=organization_kwargs) logging.Logger.info.call_args_list = [] - sync_org_events(model['organization']) + sync_org_events(model["organization"]) self.assertEqual(actions.export_event_to_eventbrite.call_args_list, []) - self.assertEqual(actions.update_or_create_event.call_args_list, - [call(EVENTBRITE_EVENTS['events'][0], model.organization)]) + self.assertEqual( + actions.update_or_create_event.call_args_list, [call(EVENTBRITE_EVENTS["events"][0], model.organization)] + ) self.assertEqual(logging.Logger.info.call_args_list, []) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 With academy, raise error """ - @patch.object(logging.Logger, 'info', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'update_or_create_event', update_or_create_event_mock(raise_error=True)) - @patch.object(actions, 'export_event_to_eventbrite', export_event_to_eventbrite_mock()) - @patch(REQUESTS_PATH['request'], - apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)])) + @patch.object(logging.Logger, "info", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "update_or_create_event", update_or_create_event_mock(raise_error=True)) + @patch.object(actions, "export_event_to_eventbrite", export_event_to_eventbrite_mock()) + @patch( + REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)]) + ) def test_sync_org_events__raise_error(self): """Test /answer without auth""" import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(academy=True, organization=True, organization_kwargs=organization_kwargs) logging.Logger.info.call_args_list = [] with self.assertRaises(Exception) as cm: - sync_org_events(model['organization']) + sync_org_events(model["organization"]) - self.assertEqual(str(cm.exception), 'Random error in creating') + self.assertEqual(str(cm.exception), "Random error in creating") self.assertEqual(actions.export_event_to_eventbrite.call_args_list, []) - self.assertEqual(actions.update_or_create_event.call_args_list, - [call(EVENTBRITE_EVENTS['events'][0], model.organization)]) + self.assertEqual( + actions.update_or_create_event.call_args_list, [call(EVENTBRITE_EVENTS["events"][0], model.organization)] + ) self.assertEqual(logging.Logger.info.call_args_list, []) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [{ - **self.model_to_dict(model, 'organization'), - 'sync_status': 'ERROR', - 'sync_desc': 'Error: Random error in creating', - }]) + self.assertEqual( + self.all_organization_dict(), + [ + { + **self.model_to_dict(model, "organization"), + "sync_status": "ERROR", + "sync_desc": "Error: Random error in creating", + } + ], + ) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 With academy, call export_event_to_eventbrite, without events """ - @patch.object(logging.Logger, 'info', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'update_or_create_event', update_or_create_event_mock()) - @patch.object(actions, 'export_event_to_eventbrite', export_event_to_eventbrite_mock()) - @patch(REQUESTS_PATH['request'], - apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)])) + @patch.object(logging.Logger, "info", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "update_or_create_event", update_or_create_event_mock()) + @patch.object(actions, "export_event_to_eventbrite", export_event_to_eventbrite_mock()) + @patch( + REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)]) + ) def test_sync_org_events__call_export_event_to_eventbrite__without_events(self): """Test /answer without auth""" import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(academy=True, organization=True, organization_kwargs=organization_kwargs) logging.Logger.info.call_args_list = [] - sync_org_events(model['organization']) + sync_org_events(model["organization"]) self.assertEqual(actions.export_event_to_eventbrite.call_args_list, []) - self.assertEqual(actions.update_or_create_event.call_args_list, - [call(EVENTBRITE_EVENTS['events'][0], model.organization)]) + self.assertEqual( + actions.update_or_create_event.call_args_list, [call(EVENTBRITE_EVENTS["events"][0], model.organization)] + ) self.assertEqual(logging.Logger.info.call_args_list, []) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 With academy, call export_event_to_eventbrite, with event """ - @patch.object(logging.Logger, 'info', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'update_or_create_event', update_or_create_event_mock()) - @patch.object(actions, 'export_event_to_eventbrite', export_event_to_eventbrite_mock()) - @patch(REQUESTS_PATH['request'], - apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)])) + @patch.object(logging.Logger, "info", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "update_or_create_event", update_or_create_event_mock()) + @patch.object(actions, "export_event_to_eventbrite", export_event_to_eventbrite_mock()) + @patch( + REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_events_endpoint, EVENTBRITE_EVENTS)]) + ) def test_sync_org_events__call_export_event_to_eventbrite__with_event(self): """Test /answer without auth""" import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} - event_kwargs = {'sync_with_eventbrite': True} - model = self.generate_models(academy=True, - event=True, - organization=True, - event_kwargs=event_kwargs, - organization_kwargs=organization_kwargs) + organization_kwargs = {"eventbrite_id": "1"} + event_kwargs = {"sync_with_eventbrite": True} + model = self.generate_models( + academy=True, + event=True, + organization=True, + event_kwargs=event_kwargs, + organization_kwargs=organization_kwargs, + ) logging.Logger.info.call_args_list = [] actions.export_event_to_eventbrite.call_args_list = [] - sync_org_events(model['organization']) + sync_org_events(model["organization"]) self.assertEqual(actions.export_event_to_eventbrite.call_args_list, [call(model.event, model.organization)]) - self.assertEqual(actions.update_or_create_event.call_args_list, - [call(EVENTBRITE_EVENTS['events'][0], model.organization)]) + self.assertEqual( + actions.update_or_create_event.call_args_list, [call(EVENTBRITE_EVENTS["events"][0], model.organization)] + ) self.assertEqual(logging.Logger.info.call_args_list, []) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.model_to_dict(model, 'event')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.model_to_dict(model, "event")]) diff --git a/breathecode/events/tests/actions/tests_sync_org_venues.py b/breathecode/events/tests/actions/tests_sync_org_venues.py index ca33a39f5..7534531ac 100644 --- a/breathecode/events/tests/actions/tests_sync_org_venues.py +++ b/breathecode/events/tests/actions/tests_sync_org_venues.py @@ -15,7 +15,7 @@ def create_or_update_venue_mock(raise_error=False): def create_or_update_venue(self, *args, **kwargs): if raise_error: - raise Exception('Random error getting') + raise Exception("Random error getting") return MagicMock(side_effect=create_or_update_venue) @@ -25,46 +25,53 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 Without academy """ - @patch.object(actions, 'create_or_update_venue', create_or_update_venue_mock()) - @patch(REQUESTS_PATH['request'], - apply_requests_request_mock([(200, get_eventbrite_venues_url('1'), EVENTBRITE_VENUES)])) + @patch.object(actions, "create_or_update_venue", create_or_update_venue_mock()) + @patch( + REQUESTS_PATH["request"], + apply_requests_request_mock([(200, get_eventbrite_venues_url("1"), EVENTBRITE_VENUES)]), + ) def test_sync_org_venues__without_academy(self): import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) logging.Logger.info.call_args_list = [] with self.assertRaises(Exception) as cm: - sync_org_venues(model['organization']) + sync_org_venues(model["organization"]) - self.assertEqual(str(cm.exception), 'First you must specify to which academy this organization belongs') + self.assertEqual(str(cm.exception), "First you must specify to which academy this organization belongs") self.assertEqual(actions.create_or_update_venue.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) self.assertEqual(self.all_venue_dict(), []) """ 🔽🔽🔽 With academy """ - @patch.object(actions, 'create_or_update_venue', create_or_update_venue_mock()) - @patch(REQUESTS_PATH['request'], - apply_requests_request_mock([(200, get_eventbrite_venues_url('1'), EVENTBRITE_VENUES)])) + @patch.object(actions, "create_or_update_venue", create_or_update_venue_mock()) + @patch( + REQUESTS_PATH["request"], + apply_requests_request_mock([(200, get_eventbrite_venues_url("1"), EVENTBRITE_VENUES)]), + ) def test_sync_org_venues__with_academy(self): import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(academy=True, organization=True, organization_kwargs=organization_kwargs) logging.Logger.info.call_args_list = [] - sync_org_venues(model['organization']) + sync_org_venues(model["organization"]) - self.assertEqual(actions.create_or_update_venue.call_args_list, [ - call(EVENTBRITE_VENUES['venues'][0], model.organization, force_update=True), - ]) + self.assertEqual( + actions.create_or_update_venue.call_args_list, + [ + call(EVENTBRITE_VENUES["venues"][0], model.organization, force_update=True), + ], + ) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) self.assertEqual(self.all_venue_dict(), []) diff --git a/breathecode/events/tests/actions/tests_update_event_description_from_eventbrite.py b/breathecode/events/tests/actions/tests_update_event_description_from_eventbrite.py index e91144077..4050f892e 100644 --- a/breathecode/events/tests/actions/tests_update_event_description_from_eventbrite.py +++ b/breathecode/events/tests/actions/tests_update_event_description_from_eventbrite.py @@ -6,18 +6,18 @@ from ..mixins import EventTestCase update_event_description_from_eventbrite = actions.update_event_description_from_eventbrite -sync_desc = '2021-11-23 09:10:58.295264+00:00' -eventbrite_get_url = 'https://www.eventbriteapi.com/v3/events/1/structured_content/' -eventbrite_post_url = 'https://www.eventbriteapi.com/v3/events/1/structured_content/1/' +sync_desc = "2021-11-23 09:10:58.295264+00:00" +eventbrite_get_url = "https://www.eventbriteapi.com/v3/events/1/structured_content/" +eventbrite_post_url = "https://www.eventbriteapi.com/v3/events/1/structured_content/1/" eventbrite_bad_get_event = {} -eventbrite_good_get_event = {'modules': [{'data': {'body': {'text': 'They Killed Kenny'}}}]} +eventbrite_good_get_event = {"modules": [{"data": {"body": {"text": "They Killed Kenny"}}}]} status_map = { - 'draft': 'DRAFT', - 'live': 'ACTIVE', - 'completed': 'COMPLETED', - 'started': 'ACTIVE', - 'ended': 'ACTIVE', - 'canceled': 'DELETED', + "draft": "DRAFT", + "live": "ACTIVE", + "completed": "COMPLETED", + "started": "ACTIVE", + "ended": "ACTIVE", + "canceled": "DELETED", } UTC_NOW = timezone.now() @@ -28,10 +28,10 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 Without Event """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_bad_get_event)])) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_bad_get_event)])) def test_update_event_description_from_eventbrite__without_event(self): import logging import requests @@ -39,19 +39,19 @@ def test_update_event_description_from_eventbrite__without_event(self): update_event_description_from_eventbrite(None) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [call('Event is not being provided')]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Event is not being provided")]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) self.assertEqual(requests.request.call_args_list, []) """ 🔽🔽🔽 Without eventbrite id """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_bad_get_event)])) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_bad_get_event)])) def test_update_event_description_from_eventbrite__without_eventbrite_id(self): import logging import requests @@ -62,87 +62,102 @@ def test_update_event_description_from_eventbrite__without_eventbrite_id(self): update_event_description_from_eventbrite(model.event) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Event 1 not have the integration with eventbrite'), - ]) - - self.assertEqual(self.bc.database.list_of('events.Event'), [db]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Event 1 not have the integration with eventbrite"), + ], + ) + + self.assertEqual(self.bc.database.list_of("events.Event"), [db]) self.assertEqual(requests.request.call_args_list, []) """ 🔽🔽🔽 With Event """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_bad_get_event)])) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_bad_get_event)])) def test_update_event_description_from_eventbrite__with_event(self): import logging import requests - event = {'eventbrite_id': '1'} + event = {"eventbrite_id": "1"} model = self.generate_models(event=event) db = self.bc.format.to_dict(model.event) update_event_description_from_eventbrite(model.event) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Event 1 not have a organization assigned'), - ]) - - self.assertEqual(self.bc.database.list_of('events.Event'), [db]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Event 1 not have a organization assigned"), + ], + ) + + self.assertEqual(self.bc.database.list_of("events.Event"), [db]) self.assertEqual(requests.request.call_args_list, []) """ 🔽🔽🔽 Without description in eventbrite """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_bad_get_event)])) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_bad_get_event)])) def test_update_event_description_from_eventbrite__without_event_in_eventbrite(self): import logging import requests - organization = {'eventbrite_id': '1', 'eventbrite_key': 'x'} - event = {'eventbrite_id': '1'} + organization = {"eventbrite_id": "1", "eventbrite_key": "x"} + event = {"eventbrite_id": "1"} model = self.generate_models(event=event, organization=organization) db = self.bc.format.to_dict(model.event) update_event_description_from_eventbrite(model.event) - self.assertEqual(logging.Logger.warning.call_args_list, [ - call('The event 1 is coming from eventbrite not have a description'), - ]) + self.assertEqual( + logging.Logger.warning.call_args_list, + [ + call("The event 1 is coming from eventbrite not have a description"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('events.Event'), [db]) - self.assertEqual(requests.request.call_args_list, [ - call('GET', - 'https://www.eventbriteapi.com/v3/events/1/structured_content/', - headers={'Authorization': f'Bearer {model.organization.eventbrite_key}'}, - data=None, - timeout=2) - ]) + self.assertEqual(self.bc.database.list_of("events.Event"), [db]) + self.assertEqual( + requests.request.call_args_list, + [ + call( + "GET", + "https://www.eventbriteapi.com/v3/events/1/structured_content/", + headers={"Authorization": f"Bearer {model.organization.eventbrite_key}"}, + data=None, + timeout=2, + ) + ], + ) """ 🔽🔽🔽 With description in eventbrite """ - @patch.object(logging.Logger, 'warning', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch(REQUESTS_PATH['request'], - apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_good_get_event)])) + @patch.object(logging.Logger, "warning", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch( + REQUESTS_PATH["request"], apply_requests_request_mock([(200, eventbrite_get_url, eventbrite_good_get_event)]) + ) def test_update_event_description_from_eventbrite__with_event_in_eventbrite(self): import logging import requests - organization = {'eventbrite_id': '1', 'eventbrite_key': 'x'} - event = {'eventbrite_id': '1'} + organization = {"eventbrite_id": "1", "eventbrite_key": "x"} + event = {"eventbrite_id": "1"} model = self.generate_models(event=event, organization=organization) db = self.bc.format.to_dict(model.event) @@ -151,17 +166,27 @@ def test_update_event_description_from_eventbrite__with_event_in_eventbrite(self self.assertEqual(logging.Logger.warning.call_args_list, []) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **db, - 'description': 'They Killed Kenny', - 'eventbrite_sync_status': 'PERSISTED', - 'eventbrite_sync_description': str(UTC_NOW), - }]) - - self.assertEqual(requests.request.call_args_list, [ - call('GET', - 'https://www.eventbriteapi.com/v3/events/1/structured_content/', - headers={'Authorization': f'Bearer {model.organization.eventbrite_key}'}, - data=None, - timeout=2) - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **db, + "description": "They Killed Kenny", + "eventbrite_sync_status": "PERSISTED", + "eventbrite_sync_description": str(UTC_NOW), + } + ], + ) + + self.assertEqual( + requests.request.call_args_list, + [ + call( + "GET", + "https://www.eventbriteapi.com/v3/events/1/structured_content/", + headers={"Authorization": f"Bearer {model.organization.eventbrite_key}"}, + data=None, + timeout=2, + ) + ], + ) diff --git a/breathecode/events/tests/actions/tests_update_or_create_event.py b/breathecode/events/tests/actions/tests_update_or_create_event.py index a8e7b5eb6..a95294a27 100644 --- a/breathecode/events/tests/actions/tests_update_or_create_event.py +++ b/breathecode/events/tests/actions/tests_update_or_create_event.py @@ -12,14 +12,14 @@ from ..mixins import EventTestCase update_or_create_event = actions.update_or_create_event -sync_desc = '2021-11-23 09:10:58.295264+00:00' +sync_desc = "2021-11-23 09:10:58.295264+00:00" status_map = { - 'draft': 'DRAFT', - 'live': 'ACTIVE', - 'completed': 'COMPLETED', - 'started': 'ACTIVE', - 'ended': 'ACTIVE', - 'canceled': 'DELETED', + "draft": "DRAFT", + "live": "ACTIVE", + "completed": "COMPLETED", + "started": "ACTIVE", + "ended": "ACTIVE", + "canceled": "DELETED", } seed = os.urandom(16) @@ -63,222 +63,235 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 Data is None """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'create_or_update_venue', create_or_update_venue_mock()) - @patch.object(actions, 'create_or_update_organizer', create_or_update_organizer_mock()) - @patch.object(actions, 'update_event_description_from_eventbrite', MagicMock()) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "create_or_update_venue", create_or_update_venue_mock()) + @patch.object(actions, "create_or_update_organizer", create_or_update_organizer_mock()) + @patch.object(actions, "update_event_description_from_eventbrite", MagicMock()) def test_update_or_create_event__data_is_none(self): import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) update_or_create_event(None, model.organization) - self.assertEqual(logging.Logger.warning.call_args_list, [call('Ignored event')]) + self.assertEqual(logging.Logger.warning.call_args_list, [call("Ignored event")]) self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(actions.create_or_update_venue.call_args_list, []) self.assertEqual(actions.create_or_update_organizer.call_args_list, []) self.assertEqual(actions.update_event_description_from_eventbrite.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 Without academy """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'create_or_update_venue', create_or_update_venue_mock()) - @patch.object(actions, 'create_or_update_organizer', create_or_update_organizer_mock()) - @patch.object(actions, 'update_event_description_from_eventbrite', MagicMock()) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "create_or_update_venue", create_or_update_venue_mock()) + @patch.object(actions, "create_or_update_organizer", create_or_update_organizer_mock()) + @patch.object(actions, "update_event_description_from_eventbrite", MagicMock()) def test_update_or_create_event__without_academy(self): import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) - update_or_create_event(EVENTBRITE_EVENTS['events'][0], model.organization) + update_or_create_event(EVENTBRITE_EVENTS["events"][0], model.organization) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, - [call('The organization Nameless not have a academy assigned')]) + self.assertEqual( + logging.Logger.error.call_args_list, [call("The organization Nameless not have a academy assigned")] + ) self.assertEqual(actions.create_or_update_venue.call_args_list, []) self.assertEqual(actions.create_or_update_organizer.call_args_list, []) self.assertEqual(actions.update_event_description_from_eventbrite.call_args_list, []) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 With academy """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'get_current_iso_string', get_current_iso_string_mock()) - @patch.object(actions, 'create_or_update_venue', create_or_update_venue_mock()) - @patch.object(actions, 'create_or_update_organizer', create_or_update_organizer_mock()) - @patch.object(actions, 'update_event_description_from_eventbrite', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "get_current_iso_string", get_current_iso_string_mock()) + @patch.object(actions, "create_or_update_venue", create_or_update_venue_mock()) + @patch.object(actions, "create_or_update_organizer", create_or_update_organizer_mock()) + @patch.object(actions, "update_event_description_from_eventbrite", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_update_or_create_event__with_academy(self): import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} + organization_kwargs = {"eventbrite_id": "1"} model = self.generate_models(academy=True, organization=True, organization_kwargs=organization_kwargs) - update_or_create_event(EVENTBRITE_EVENTS['events'][0], model.organization) - event = EVENTBRITE_EVENTS['events'][0] + update_or_create_event(EVENTBRITE_EVENTS["events"][0], model.organization) + event = EVENTBRITE_EVENTS["events"][0] self.assertEqual(logging.Logger.warning.call_args_list, []) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(actions.create_or_update_venue.call_args_list, [call(event['venue'], model.organization)]) - self.assertEqual(actions.create_or_update_organizer.call_args_list, - [call(event['organizer'], model.organization, force_update=True)]) - - self.assertEqual(actions.update_event_description_from_eventbrite.call_args_list, [ - call(self.bc.database.get('events.Event', 1, dict=False)), - ]) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) - - event = EVENTBRITE_EVENTS['events'][0] + self.assertEqual(actions.create_or_update_venue.call_args_list, [call(event["venue"], model.organization)]) + self.assertEqual( + actions.create_or_update_organizer.call_args_list, + [call(event["organizer"], model.organization, force_update=True)], + ) + + self.assertEqual( + actions.update_event_description_from_eventbrite.call_args_list, + [ + call(self.bc.database.get("events.Event", 1, dict=False)), + ], + ) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) + + event = EVENTBRITE_EVENTS["events"][0] kwargs = { - 'id': 1, - 'description': None, - 'excerpt': event['description']['text'], - 'title': event['name']['text'], - 'lang': None, - 'url': event['url'], - 'banner': event['logo']['url'], - 'tags': '', - 'slug': f'geektalks-presentacion-de-proyectos-finales-{uuid}', - 'capacity': event['capacity'], - 'currency': event['currency'], - 'starting_at': self.iso_to_datetime(event['start']['utc']), - 'ending_at': self.iso_to_datetime(event['end']['utc']), - 'ended_at': None, - 'host': None, - 'academy_id': 1, - 'organization_id': model.organization.id, - 'author_id': None, - 'online_event': event['online_event'], - 'venue_id': None, - 'event_type_id': None, - 'eventbrite_id': event['id'], - 'eventbrite_url': event['url'], - 'status': status_map[event['status']], - 'eventbrite_status': event['status'], + "id": 1, + "description": None, + "excerpt": event["description"]["text"], + "title": event["name"]["text"], + "lang": None, + "url": event["url"], + "banner": event["logo"]["url"], + "tags": "", + "slug": f"geektalks-presentacion-de-proyectos-finales-{uuid}", + "capacity": event["capacity"], + "currency": event["currency"], + "starting_at": self.iso_to_datetime(event["start"]["utc"]), + "ending_at": self.iso_to_datetime(event["end"]["utc"]), + "ended_at": None, + "host": None, + "academy_id": 1, + "organization_id": model.organization.id, + "author_id": None, + "online_event": event["online_event"], + "venue_id": None, + "event_type_id": None, + "eventbrite_id": event["id"], + "eventbrite_url": event["url"], + "status": status_map[event["status"]], + "eventbrite_status": event["status"], # organizer: organizer, - 'published_at': self.iso_to_datetime(event['published']), - 'sync_with_eventbrite': True, - 'eventbrite_sync_status': 'PERSISTED', - 'eventbrite_organizer_id': None, - 'live_stream_url': None, - 'eventbrite_sync_description': '2021-11-23 09:10:58.295264+00:00', - 'host_user_id': None, - 'free_for_bootcamps': True, - 'live_stream_url': None, - 'asset_slug': None, - 'free_for_all': False, - 'uuid': uuid, + "published_at": self.iso_to_datetime(event["published"]), + "sync_with_eventbrite": True, + "eventbrite_sync_status": "PERSISTED", + "eventbrite_organizer_id": None, + "live_stream_url": None, + "eventbrite_sync_description": "2021-11-23 09:10:58.295264+00:00", + "host_user_id": None, + "free_for_bootcamps": True, + "live_stream_url": None, + "asset_slug": None, + "free_for_all": False, + "uuid": uuid, } - self.assertEqual(self.bc.database.list_of('events.Event'), [kwargs]) + self.assertEqual(self.bc.database.list_of("events.Event"), [kwargs]) """ 🔽🔽🔽 With academy and event """ - @patch.object(logging.Logger, 'warning', log_mock()) - @patch.object(logging.Logger, 'error', log_mock()) - @patch.object(actions, 'get_current_iso_string', get_current_iso_string_mock()) - @patch.object(actions, 'create_or_update_venue', create_or_update_venue_mock()) - @patch.object(actions, 'create_or_update_organizer', create_or_update_organizer_mock()) - @patch.object(actions, 'update_event_description_from_eventbrite', MagicMock()) - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + @patch.object(logging.Logger, "warning", log_mock()) + @patch.object(logging.Logger, "error", log_mock()) + @patch.object(actions, "get_current_iso_string", get_current_iso_string_mock()) + @patch.object(actions, "create_or_update_venue", create_or_update_venue_mock()) + @patch.object(actions, "create_or_update_organizer", create_or_update_organizer_mock()) + @patch.object(actions, "update_event_description_from_eventbrite", MagicMock()) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_update_or_create_event__with_event(self): import logging import breathecode.events.actions as actions - organization_kwargs = {'eventbrite_id': '1'} - event_kwargs = {'eventbrite_id': '1'} - model = self.generate_models(event=True, - academy=True, - organization=True, - event_kwargs=event_kwargs, - organization_kwargs=organization_kwargs) + organization_kwargs = {"eventbrite_id": "1"} + event_kwargs = {"eventbrite_id": "1"} + model = self.generate_models( + event=True, + academy=True, + organization=True, + event_kwargs=event_kwargs, + organization_kwargs=organization_kwargs, + ) - update_or_create_event(EVENTBRITE_EVENTS['events'][0], model.organization) - event = EVENTBRITE_EVENTS['events'][0] + update_or_create_event(EVENTBRITE_EVENTS["events"][0], model.organization) + event = EVENTBRITE_EVENTS["events"][0] self.assertEqual(logging.Logger.warning.call_args_list, []) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(actions.create_or_update_venue.call_args_list, [call(event['venue'], model.organization)]) - self.assertEqual(actions.create_or_update_organizer.call_args_list, - [call(event['organizer'], model.organization, force_update=True)]) + self.assertEqual(actions.create_or_update_venue.call_args_list, [call(event["venue"], model.organization)]) + self.assertEqual( + actions.create_or_update_organizer.call_args_list, + [call(event["organizer"], model.organization, force_update=True)], + ) - self.assertEqual(actions.update_event_description_from_eventbrite.call_args_list, [ - call(model.event), - ]) + self.assertEqual( + actions.update_event_description_from_eventbrite.call_args_list, + [ + call(model.event), + ], + ) - self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, 'organization')]) + self.assertEqual(self.all_organization_dict(), [self.model_to_dict(model, "organization")]) - event = EVENTBRITE_EVENTS['events'][0] + event = EVENTBRITE_EVENTS["events"][0] kwargs = { - 'id': 1, - 'description': None, - 'excerpt': event['description']['text'], - 'title': event['name']['text'], - 'currency': event['currency'], - 'lang': None, - 'url': EVENTBRITE_EVENTS['events'][0]['url'], - 'banner': event['logo']['url'], - 'capacity': event['capacity'], - 'tags': '', - 'slug': f'geektalks-presentacion-de-proyectos-finales-{uuid}', - 'starting_at': self.iso_to_datetime(event['start']['utc']), - 'ending_at': self.iso_to_datetime(event['end']['utc']), - 'ended_at': None, - 'host': None, - 'academy_id': 1, - 'organization_id': model.organization.id, - 'author_id': None, - 'online_event': event['online_event'], - 'venue_id': None, - 'event_type_id': None, - 'eventbrite_id': event['id'], - 'eventbrite_url': event['url'], - 'status': status_map[event['status']], - 'eventbrite_status': event['status'], + "id": 1, + "description": None, + "excerpt": event["description"]["text"], + "title": event["name"]["text"], + "currency": event["currency"], + "lang": None, + "url": EVENTBRITE_EVENTS["events"][0]["url"], + "banner": event["logo"]["url"], + "capacity": event["capacity"], + "tags": "", + "slug": f"geektalks-presentacion-de-proyectos-finales-{uuid}", + "starting_at": self.iso_to_datetime(event["start"]["utc"]), + "ending_at": self.iso_to_datetime(event["end"]["utc"]), + "ended_at": None, + "host": None, + "academy_id": 1, + "organization_id": model.organization.id, + "author_id": None, + "online_event": event["online_event"], + "venue_id": None, + "event_type_id": None, + "eventbrite_id": event["id"], + "eventbrite_url": event["url"], + "status": status_map[event["status"]], + "eventbrite_status": event["status"], # organizer: organizer, - 'published_at': self.iso_to_datetime(event['published']), - 'sync_with_eventbrite': False, - 'eventbrite_sync_status': 'PERSISTED', - 'eventbrite_organizer_id': None, - 'live_stream_url': None, - 'eventbrite_sync_description': '2021-11-23 09:10:58.295264+00:00', - 'host_user_id': None, - 'free_for_bootcamps': True, - 'live_stream_url': None, - 'asset_slug': None, - 'free_for_all': False, - 'uuid': uuid, + "published_at": self.iso_to_datetime(event["published"]), + "sync_with_eventbrite": False, + "eventbrite_sync_status": "PERSISTED", + "eventbrite_organizer_id": None, + "live_stream_url": None, + "eventbrite_sync_description": "2021-11-23 09:10:58.295264+00:00", + "host_user_id": None, + "free_for_bootcamps": True, + "live_stream_url": None, + "asset_slug": None, + "free_for_all": False, + "uuid": uuid, } - self.assertEqual(self.bc.database.list_of('events.Event'), [kwargs]) + self.assertEqual(self.bc.database.list_of("events.Event"), [kwargs]) diff --git a/breathecode/events/tests/admin/tests_reattempt_add_event_slug_as_acp_tag.py b/breathecode/events/tests/admin/tests_reattempt_add_event_slug_as_acp_tag.py index 941d492a0..3c613ddeb 100644 --- a/breathecode/events/tests/admin/tests_reattempt_add_event_slug_as_acp_tag.py +++ b/breathecode/events/tests/admin/tests_reattempt_add_event_slug_as_acp_tag.py @@ -13,10 +13,10 @@ class ParseDateAdminTestSuite(EventTestCase): 🔽🔽🔽 With zero Event """ - @patch('breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay", MagicMock()) def test_with_zero_events(self): - Event = self.bc.database.get_model('events.Event') + Event = self.bc.database.get_model("events.Event") queryset = Event.objects.filter() reattempt_add_event_slug_as_acp_tag(None, None, queryset) @@ -27,12 +27,12 @@ def test_with_zero_events(self): 🔽🔽🔽 With two Event """ - @patch('breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay", MagicMock()) def test_with_two_event(self): self.bc.database.create(event=2) - Event = self.bc.database.get_model('events.Event') + Event = self.bc.database.get_model("events.Event") queryset = Event.objects.filter() reattempt_add_event_slug_as_acp_tag(None, None, queryset) @@ -43,12 +43,12 @@ def test_with_two_event(self): 🔽🔽🔽 With zero Event with Academy """ - @patch('breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay", MagicMock()) def test_with_zero_events__with_academy(self): self.bc.database.create(academy=1) - Event = self.bc.database.get_model('events.Event') + Event = self.bc.database.get_model("events.Event") queryset = Event.objects.filter() reattempt_add_event_slug_as_acp_tag(None, None, queryset) @@ -59,17 +59,20 @@ def test_with_zero_events__with_academy(self): 🔽🔽🔽 With two Event with Academy """ - @patch('breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay", MagicMock()) def test_with_two_events__with_academy(self): self.bc.database.create(event=2, academy=1) - Event = self.bc.database.get_model('events.Event') + Event = self.bc.database.get_model("events.Event") queryset = Event.objects.filter() reattempt_add_event_slug_as_acp_tag(None, None, queryset) - self.assertEqual(tasks.add_event_slug_as_acp_tag.delay.call_args_list, [ - call(1, 1, force=True), - call(2, 1, force=True), - ]) + self.assertEqual( + tasks.add_event_slug_as_acp_tag.delay.call_args_list, + [ + call(1, 1, force=True), + call(2, 1, force=True), + ], + ) diff --git a/breathecode/events/tests/admin/tests_reattempt_eventbrite_webhook.py b/breathecode/events/tests/admin/tests_reattempt_eventbrite_webhook.py index 1c64284ba..1922403f8 100644 --- a/breathecode/events/tests/admin/tests_reattempt_eventbrite_webhook.py +++ b/breathecode/events/tests/admin/tests_reattempt_eventbrite_webhook.py @@ -11,31 +11,31 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 With zero EventbriteWebhook """ - @patch('breathecode.events.tasks.async_eventbrite_webhook.delay', MagicMock()) + @patch("breathecode.events.tasks.async_eventbrite_webhook.delay", MagicMock()) def test__with_zero_eventbrite_webwooks(self): - EventbriteWebhook = self.bc.database.get_model('events.EventbriteWebhook') + EventbriteWebhook = self.bc.database.get_model("events.EventbriteWebhook") queryset = EventbriteWebhook.objects.filter() reattempt_eventbrite_webhook(None, None, queryset) - self.assertEqual(self.bc.database.list_of('events.EventbriteWebhook'), []) + self.assertEqual(self.bc.database.list_of("events.EventbriteWebhook"), []) self.assertEqual(tasks.async_eventbrite_webhook.delay.call_args_list, []) """ 🔽🔽🔽 With two EventbriteWebhook """ - @patch('breathecode.events.tasks.async_eventbrite_webhook.delay', MagicMock()) + @patch("breathecode.events.tasks.async_eventbrite_webhook.delay", MagicMock()) def test__with_two_eventbrite_webwooks(self): self.bc.database.create(eventbrite_webhook=2) - EventbriteWebhook = self.bc.database.get_model('events.EventbriteWebhook') + EventbriteWebhook = self.bc.database.get_model("events.EventbriteWebhook") queryset = EventbriteWebhook.objects.filter() reattempt_eventbrite_webhook(None, None, queryset) self.assertEqual( - self.bc.database.list_of('events.EventbriteWebhook'), + self.bc.database.list_of("events.EventbriteWebhook"), self.bc.format.to_dict(queryset), ) self.assertEqual(tasks.async_eventbrite_webhook.delay.call_args_list, [call(1), call(2)]) diff --git a/breathecode/events/tests/management/commands/tests_build_live_classes.py b/breathecode/events/tests/management/commands/tests_build_live_classes.py index 077c393eb..8deb38cd8 100644 --- a/breathecode/events/tests/management/commands/tests_build_live_classes.py +++ b/breathecode/events/tests/management/commands/tests_build_live_classes.py @@ -14,21 +14,21 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 With zero CohortTimeSlot """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_without_timeslots(self): command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual(tasks.build_live_classes_from_timeslot.delay.call_args_list, []) """ 🔽🔽🔽 With invalid Cohort """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_invalid_cohort(self): cases = [ (UTC_NOW - timedelta(seconds=random.randint(1, 1000)), False), @@ -36,13 +36,13 @@ def test_with_invalid_cohort(self): ] index = 0 for ending_date, never_ends in cases: - cohort = {'ending_date': ending_date, 'never_ends': never_ends} - cohort_time_slots = [{'cohort_id': n + index * 2} for n in range(1, 3)] + cohort = {"ending_date": ending_date, "never_ends": never_ends} + cohort_time_slots = [{"cohort_id": n + index * 2} for n in range(1, 3)] self.bc.database.create(cohort_time_slot=cohort_time_slots, cohort=(2, cohort)) command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual(tasks.build_live_classes_from_timeslot.delay.call_args_list, []) index += 1 @@ -50,14 +50,14 @@ def test_with_invalid_cohort(self): 🔽🔽🔽 With invalid Cohort """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_right_cohort(self): ending_date = UTC_NOW + timedelta(seconds=random.randint(1, 1000)) never_ends = False - cohort = {'ending_date': ending_date, 'never_ends': never_ends} - cohort_time_slots = [{'cohort_id': n} for n in range(1, 3)] + cohort = {"ending_date": ending_date, "never_ends": never_ends} + cohort_time_slots = [{"cohort_id": n} for n in range(1, 3)] self.bc.database.create(cohort_time_slot=cohort_time_slots, cohort=(2, cohort)) tasks.build_live_classes_from_timeslot.delay.call_args_list = [] @@ -65,5 +65,5 @@ def test_with_right_cohort(self): command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual(tasks.build_live_classes_from_timeslot.delay.call_args_list, [call(1), call(2)]) diff --git a/breathecode/events/tests/management/commands/tests_close_live_classes.py b/breathecode/events/tests/management/commands/tests_close_live_classes.py index ee4ae7780..7de89ec11 100644 --- a/breathecode/events/tests/management/commands/tests_close_live_classes.py +++ b/breathecode/events/tests/management/commands/tests_close_live_classes.py @@ -15,72 +15,87 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 With zero LiveClass """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_without_live_classes(self): command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) """ 🔽🔽🔽 With two LiveClass before ending_at + 30 minutes """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW - DELTA)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW - DELTA)) def test_with_two_live_classes_before_ending_at_more_30_minutes__started_at_null(self): - live_classes = [{ - 'ending_at': UTC_NOW, - } for n in range(1, 3)] + live_classes = [ + { + "ending_at": UTC_NOW, + } + for n in range(1, 3) + ] model = self.bc.database.create(live_class=live_classes) command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), self.bc.format.to_dict(model.live_class)) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), self.bc.format.to_dict(model.live_class)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW - DELTA)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW - DELTA)) def test_with_two_live_classes_before_ending_at_more_30_minutes__started_at_set(self): - live_classes = [{ - 'started_at': UTC_NOW, - 'ending_at': UTC_NOW, - } for n in range(1, 3)] + live_classes = [ + { + "started_at": UTC_NOW, + "ending_at": UTC_NOW, + } + for n in range(1, 3) + ] model = self.bc.database.create(live_class=live_classes) command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), self.bc.format.to_dict(model.live_class)) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), self.bc.format.to_dict(model.live_class)) """ 🔽🔽🔽 With two LiveClass after ending_at + 30 minutes """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW + DELTA)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW + DELTA)) def test_with_two_live_classes_after_ending_at_more_30_minutes__started_at_null(self): - live_classes = [{ - 'ending_at': UTC_NOW, - } for n in range(1, 3)] + live_classes = [ + { + "ending_at": UTC_NOW, + } + for n in range(1, 3) + ] model = self.bc.database.create(live_class=live_classes) command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), self.bc.format.to_dict(model.live_class)) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), self.bc.format.to_dict(model.live_class)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW + DELTA)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW + DELTA)) def test_with_two_live_classes_after_ending_at_more_30_minutes__started_at_set(self): - live_classes = [{ - 'started_at': UTC_NOW, - 'ending_at': UTC_NOW, - } for n in range(1, 3)] + live_classes = [ + { + "started_at": UTC_NOW, + "ending_at": UTC_NOW, + } + for n in range(1, 3) + ] model = self.bc.database.create(live_class=live_classes) command = Command() command.handle() self.assertEqual( - self.bc.database.list_of('events.LiveClass'), - [{ - **self.bc.format.to_dict(model.live_class[0]), - 'ended_at': UTC_NOW + timedelta(minutes=30), - }, { - **self.bc.format.to_dict(model.live_class[1]), - 'ended_at': UTC_NOW + timedelta(minutes=30), - }], + self.bc.database.list_of("events.LiveClass"), + [ + { + **self.bc.format.to_dict(model.live_class[0]), + "ended_at": UTC_NOW + timedelta(minutes=30), + }, + { + **self.bc.format.to_dict(model.live_class[1]), + "ended_at": UTC_NOW + timedelta(minutes=30), + }, + ], ) diff --git a/breathecode/events/tests/management/commands/tests_fix_live_class_dates.py b/breathecode/events/tests/management/commands/tests_fix_live_class_dates.py index 41deff65b..9d48a76e0 100644 --- a/breathecode/events/tests/management/commands/tests_fix_live_class_dates.py +++ b/breathecode/events/tests/management/commands/tests_fix_live_class_dates.py @@ -17,110 +17,132 @@ class TestSyncOrgVenues(LegacyAPITestCase): # When: no LiveClass and no Cohort exists # Then: nothing should happen - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.tasks.fix_live_class_dates.delay', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) - @patch.object(sys.stdout, 'write', MagicMock()) - @patch.object(sys.stderr, 'write', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.fix_live_class_dates.delay", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) + @patch.object(sys.stdout, "write", MagicMock()) + @patch.object(sys.stderr, "write", MagicMock()) def test_0_live_classes(self): command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.bc.check.calls(tasks.fix_live_class_dates.delay.call_args_list, []) - self.bc.check.calls(sys.stdout.write.call_args_list, [ - call('Found 0 cohorts that have not finished and should have live classes\n'), - ]) + self.bc.check.calls( + sys.stdout.write.call_args_list, + [ + call("Found 0 cohorts that have not finished and should have live classes\n"), + ], + ) self.bc.check.calls(sys.stderr.write.call_args_list, []) # When: a Cohort exists and it's starting_at is less than now # Then: nothing should happen - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.tasks.fix_live_class_dates.delay', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) - @patch.object(sys.stdout, 'write', MagicMock()) - @patch.object(sys.stderr, 'write', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.fix_live_class_dates.delay", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) + @patch.object(sys.stdout, "write", MagicMock()) + @patch.object(sys.stderr, "write", MagicMock()) def test_2_cohorts__in_the_past(self): - cohorts = [{'never_ends': False, 'ending_date': UTC_NOW - DELTA} for _ in range(2)] + cohorts = [{"never_ends": False, "ending_date": UTC_NOW - DELTA} for _ in range(2)] model = self.bc.database.create(cohort=cohorts) command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual( - self.bc.database.list_of('admissions.Cohort'), + self.bc.database.list_of("admissions.Cohort"), self.bc.format.to_dict(model.cohort), ) self.bc.check.calls(tasks.fix_live_class_dates.delay.call_args_list, []) - self.bc.check.calls(sys.stdout.write.call_args_list, [ - call('Found 0 cohorts that have not finished and should have live classes\n'), - ]) + self.bc.check.calls( + sys.stdout.write.call_args_list, + [ + call("Found 0 cohorts that have not finished and should have live classes\n"), + ], + ) self.bc.check.calls(sys.stderr.write.call_args_list, []) # When: a Cohort exists and it's starting_at is less than now # Then: found 2 cohorts without timeslots - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.tasks.fix_live_class_dates.delay', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) - @patch.object(sys.stdout, 'write', MagicMock()) - @patch.object(sys.stderr, 'write', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.fix_live_class_dates.delay", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) + @patch.object(sys.stdout, "write", MagicMock()) + @patch.object(sys.stderr, "write", MagicMock()) def test_2_cohorts__in_the_future(self): - cohorts = [{'never_ends': False, 'ending_date': UTC_NOW + DELTA} for _ in range(2)] + cohorts = [{"never_ends": False, "ending_date": UTC_NOW + DELTA} for _ in range(2)] model = self.bc.database.create(cohort=cohorts) command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual( - self.bc.database.list_of('admissions.Cohort'), + self.bc.database.list_of("admissions.Cohort"), self.bc.format.to_dict(model.cohort), ) self.bc.check.calls(tasks.fix_live_class_dates.delay.call_args_list, []) - self.bc.check.calls(sys.stdout.write.call_args_list, [ - call('Found 2 cohorts that have not finished and should have live classes\n'), - ]) + self.bc.check.calls( + sys.stdout.write.call_args_list, + [ + call("Found 2 cohorts that have not finished and should have live classes\n"), + ], + ) self.bc.check.calls(sys.stderr.write.call_args_list, []) # When: 2 LiveClass and 2 Cohort exists and it's starting_at is greater than now # Then: nothing should happen - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.tasks.fix_live_class_dates.delay', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) - @patch.object(sys.stdout, 'write', MagicMock()) - @patch.object(sys.stderr, 'write', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.fix_live_class_dates.delay", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) + @patch.object(sys.stdout, "write", MagicMock()) + @patch.object(sys.stderr, "write", MagicMock()) def test_2_live_classes(self): - live_classes = [{ - 'cohort_time_slot_id': n, - 'starting_at': UTC_NOW + DELTA, - } for n in range(1, 5)] - cohorts = [{'never_ends': False, 'ending_date': UTC_NOW + DELTA} for _ in range(2)] - cohort_time_slots = [{'cohort_id': 1} for n in range(2)] - cohort_time_slots += [{'cohort_id': 2} for n in range(2)] + live_classes = [ + { + "cohort_time_slot_id": n, + "starting_at": UTC_NOW + DELTA, + } + for n in range(1, 5) + ] + cohorts = [{"never_ends": False, "ending_date": UTC_NOW + DELTA} for _ in range(2)] + cohort_time_slots = [{"cohort_id": 1} for n in range(2)] + cohort_time_slots += [{"cohort_id": 2} for n in range(2)] model = self.bc.database.create(live_class=live_classes, cohort=cohorts, cohort_time_slot=cohort_time_slots) command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), self.bc.format.to_dict(model.live_class)) - - self.bc.check.calls(tasks.fix_live_class_dates.delay.call_args_list, [ - call(1), - call(2), - call(3), - call(4), - ]) - self.bc.check.calls(sys.stdout.write.call_args_list, [ - call('Found 2 cohorts that have not finished and should have live classes\n'), - call(f'Adding cohort {model.cohort[0].slug} to the fixing queue, it ends on ' - f'{model.cohort[0].ending_date}\n'), - call(f'Adding cohort {model.cohort[1].slug} to the fixing queue, it ends on ' - f'{model.cohort[1].ending_date}\n'), - ]) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), self.bc.format.to_dict(model.live_class)) + + self.bc.check.calls( + tasks.fix_live_class_dates.delay.call_args_list, + [ + call(1), + call(2), + call(3), + call(4), + ], + ) + self.bc.check.calls( + sys.stdout.write.call_args_list, + [ + call("Found 2 cohorts that have not finished and should have live classes\n"), + call( + f"Adding cohort {model.cohort[0].slug} to the fixing queue, it ends on " + f"{model.cohort[0].ending_date}\n" + ), + call( + f"Adding cohort {model.cohort[1].slug} to the fixing queue, it ends on " + f"{model.cohort[1].ending_date}\n" + ), + ], + ) self.bc.check.calls(sys.stderr.write.call_args_list, []) diff --git a/breathecode/events/tests/management/commands/tests_rerun_eventbrinte_order_placed.py b/breathecode/events/tests/management/commands/tests_rerun_eventbrinte_order_placed.py index 0661a5f9e..3292196e6 100644 --- a/breathecode/events/tests/management/commands/tests_rerun_eventbrinte_order_placed.py +++ b/breathecode/events/tests/management/commands/tests_rerun_eventbrinte_order_placed.py @@ -9,19 +9,19 @@ class SyncOrgVenuesTestSuite(EventTestCase): 🔽🔽🔽 With zero EventbriteWebhook """ - @patch('breathecode.events.tasks.async_eventbrite_webhook.delay', MagicMock()) + @patch("breathecode.events.tasks.async_eventbrite_webhook.delay", MagicMock()) def test__with_zero_eventbrite_webwooks(self): command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('events.EventbriteWebhook'), []) + self.assertEqual(self.bc.database.list_of("events.EventbriteWebhook"), []) self.assertEqual(tasks.async_eventbrite_webhook.delay.call_args_list, []) """ 🔽🔽🔽 With two EventbriteWebhook, action does not match """ - @patch('breathecode.events.tasks.async_eventbrite_webhook.delay', MagicMock()) + @patch("breathecode.events.tasks.async_eventbrite_webhook.delay", MagicMock()) def test__with_two_eventbrite_webwooks__action_does_not_match(self): model = self.bc.database.create(eventbrite_webhook=2) @@ -29,7 +29,7 @@ def test__with_two_eventbrite_webwooks__action_does_not_match(self): command.handle() self.assertEqual( - self.bc.database.list_of('events.EventbriteWebhook'), + self.bc.database.list_of("events.EventbriteWebhook"), self.bc.format.to_dict(model.eventbrite_webhook), ) self.assertEqual(tasks.async_eventbrite_webhook.delay.call_args_list, []) @@ -38,16 +38,16 @@ def test__with_two_eventbrite_webwooks__action_does_not_match(self): 🔽🔽🔽 With two EventbriteWebhook, action match """ - @patch('breathecode.events.tasks.async_eventbrite_webhook.delay', MagicMock()) + @patch("breathecode.events.tasks.async_eventbrite_webhook.delay", MagicMock()) def test__with_two_eventbrite_webwooks__action_match(self): - eventbrite_webhook = {'action': 'order.placed'} + eventbrite_webhook = {"action": "order.placed"} model = self.bc.database.create(eventbrite_webhook=(2, eventbrite_webhook)) command = Command() command.handle() self.assertEqual( - self.bc.database.list_of('events.EventbriteWebhook'), + self.bc.database.list_of("events.EventbriteWebhook"), self.bc.format.to_dict(model.eventbrite_webhook), ) self.assertEqual(tasks.async_eventbrite_webhook.delay.call_args_list, [call(1), call(2)]) diff --git a/breathecode/events/tests/management/commands/tests_sync_eventbrite.py b/breathecode/events/tests/management/commands/tests_sync_eventbrite.py index b226cc3ec..da1902fe4 100644 --- a/breathecode/events/tests/management/commands/tests_sync_eventbrite.py +++ b/breathecode/events/tests/management/commands/tests_sync_eventbrite.py @@ -24,13 +24,14 @@ def sync_org_events(org): class SyncEventbriteTestSuite(EventTestCase): """Test /answer""" + """ 🔽🔽🔽 Without pass entity argument """ - @patch.object(sys.stdout, 'write', write_mock()) - @patch.object(sys.stderr, 'write', write_mock()) - @patch.object(actions, 'sync_org_events', sync_org_events_mock()) + @patch.object(sys.stdout, "write", write_mock()) + @patch.object(sys.stderr, "write", write_mock()) + @patch.object(actions, "sync_org_events", sync_org_events_mock()) def test_sync_eventbrite__without_entity(self): """Test /answer without auth""" import breathecode.events.actions as actions @@ -40,16 +41,16 @@ def test_sync_eventbrite__without_entity(self): command.handle() self.assertEqual(sys.stdout.write.call_args_list, []) - self.assertEqual(sys.stderr.write.call_args_list, [call('Entity argument not provided\n')]) + self.assertEqual(sys.stderr.write.call_args_list, [call("Entity argument not provided\n")]) self.assertEqual(actions.sync_org_events.call_args_list, []) """ 🔽🔽🔽 Passing a bad entity """ - @patch.object(sys.stdout, 'write', write_mock()) - @patch.object(sys.stderr, 'write', write_mock()) - @patch.object(actions, 'sync_org_events', sync_org_events_mock()) + @patch.object(sys.stdout, "write", write_mock()) + @patch.object(sys.stderr, "write", write_mock()) + @patch.object(actions, "sync_org_events", sync_org_events_mock()) def test_sync_eventbrite__bad_entity(self): """Test /answer without auth""" import breathecode.events.actions as actions @@ -57,20 +58,20 @@ def test_sync_eventbrite__bad_entity(self): # model = self.generate_models(organization=True) command = Command() - entity = 'they_killed_kenny' + entity = "they_killed_kenny" command.handle(entity=entity) self.assertEqual(sys.stdout.write.call_args_list, []) - self.assertEqual(sys.stderr.write.call_args_list, [call(f'Sync method for `{entity}` no Found!\n')]) + self.assertEqual(sys.stderr.write.call_args_list, [call(f"Sync method for `{entity}` no Found!\n")]) self.assertEqual(actions.sync_org_events.call_args_list, []) """ 🔽🔽🔽 With zero organizations """ - @patch.object(sys.stdout, 'write', write_mock()) - @patch.object(sys.stderr, 'write', write_mock()) - @patch.object(actions, 'sync_org_events', sync_org_events_mock()) + @patch.object(sys.stdout, "write", write_mock()) + @patch.object(sys.stderr, "write", write_mock()) + @patch.object(actions, "sync_org_events", sync_org_events_mock()) def test_sync_eventbrite__without_organization(self): """Test /answer without auth""" import breathecode.events.actions as actions @@ -78,11 +79,11 @@ def test_sync_eventbrite__without_organization(self): # model = self.generate_models(organization=True) command = Command() - entity = 'events' + entity = "events" command.handle(entity=entity) - self.assertEqual(sys.stdout.write.call_args_list, [call('Enqueued 0 of 0 for sync events\n')]) + self.assertEqual(sys.stdout.write.call_args_list, [call("Enqueued 0 of 0 for sync events\n")]) self.assertEqual(sys.stderr.write.call_args_list, []) self.assertEqual(actions.sync_org_events.call_args_list, []) @@ -90,10 +91,10 @@ def test_sync_eventbrite__without_organization(self): 🔽🔽🔽 With one organization without eventbrite credentials without name """ - @patch.object(sys.stdout, 'write', write_mock()) - @patch.object(sys.stderr, 'write', write_mock()) - @patch.object(actions, 'sync_org_events', sync_org_events_mock()) - @patch('builtins.print', MagicMock()) + @patch.object(sys.stdout, "write", write_mock()) + @patch.object(sys.stderr, "write", write_mock()) + @patch.object(actions, "sync_org_events", sync_org_events_mock()) + @patch("builtins.print", MagicMock()) def test_sync_eventbrite__with_organization__without_name(self): """Test /answer without auth""" import breathecode.events.actions as actions @@ -101,64 +102,66 @@ def test_sync_eventbrite__with_organization__without_name(self): model = self.generate_models(organization=True) command = Command() - entity = 'events' + entity = "events" command.handle(entity=entity) - self.assertEqual(sys.stdout.write.call_args_list, [call('Enqueued 0 of 1 for sync events\n')]) - self.assertEqual(sys.stderr.write.call_args_list, - [call(f'Organization Nameless is missing evenbrite key or ID\n')]) + self.assertEqual(sys.stdout.write.call_args_list, [call("Enqueued 0 of 1 for sync events\n")]) + self.assertEqual( + sys.stderr.write.call_args_list, [call(f"Organization Nameless is missing evenbrite key or ID\n")] + ) self.assertEqual(actions.sync_org_events.call_args_list, []) """ 🔽🔽🔽 With one organization without eventbrite credentials with name """ - @patch.object(sys.stdout, 'write', write_mock()) - @patch.object(sys.stderr, 'write', write_mock()) - @patch.object(actions, 'sync_org_events', sync_org_events_mock()) - @patch('builtins.print', MagicMock()) + @patch.object(sys.stdout, "write", write_mock()) + @patch.object(sys.stderr, "write", write_mock()) + @patch.object(actions, "sync_org_events", sync_org_events_mock()) + @patch("builtins.print", MagicMock()) def test_sync_eventbrite__with_organization__with_name(self): """Test /answer without auth""" import breathecode.events.actions as actions import sys - organization_kwargs = {'name': 'They killed kenny'} + organization_kwargs = {"name": "They killed kenny"} model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) command = Command() - entity = 'events' + entity = "events" command.handle(entity=entity) - self.assertEqual(sys.stdout.write.call_args_list, [call('Enqueued 0 of 1 for sync events\n')]) - self.assertEqual(sys.stderr.write.call_args_list, - [call(f'Organization They killed kenny is missing evenbrite key or ID\n')]) + self.assertEqual(sys.stdout.write.call_args_list, [call("Enqueued 0 of 1 for sync events\n")]) + self.assertEqual( + sys.stderr.write.call_args_list, [call(f"Organization They killed kenny is missing evenbrite key or ID\n")] + ) self.assertEqual(actions.sync_org_events.call_args_list, []) """ 🔽🔽🔽 With one organization """ - @patch.object(sys.stdout, 'write', write_mock()) - @patch.object(sys.stderr, 'write', write_mock()) - @patch.object(actions, 'sync_org_events', sync_org_events_mock()) - @patch('builtins.print', MagicMock()) + @patch.object(sys.stdout, "write", write_mock()) + @patch.object(sys.stderr, "write", write_mock()) + @patch.object(actions, "sync_org_events", sync_org_events_mock()) + @patch("builtins.print", MagicMock()) def test_sync_eventbrite__with_organization(self): """Test /answer without auth""" import breathecode.events.actions as actions import sys organization_kwargs = { - 'name': 'They killed kenny', - 'eventbrite_key': 'they-killed-kenny', - 'eventbrite_id': 10131911, # don't forget 🦾 + "name": "They killed kenny", + "eventbrite_key": "they-killed-kenny", + "eventbrite_id": 10131911, # don't forget 🦾 } model = self.generate_models(organization=True, organization_kwargs=organization_kwargs) command = Command() - entity = 'events' + entity = "events" command.handle(entity=entity) - self.assertEqual(sys.stdout.write.call_args_list, [call('Enqueued 1 of 1 for sync events\n')]) + self.assertEqual(sys.stdout.write.call_args_list, [call("Enqueued 1 of 1 for sync events\n")]) # self.assertEqual(len(sys.stderr.write.call_args_list), 1) # the test environment is not consistent self.assertEqual(actions.sync_org_events.call_args_list, [call(model.organization)]) diff --git a/breathecode/events/tests/mixins/__init__.py b/breathecode/events/tests/mixins/__init__.py index 917a0252a..8f94d7ca5 100644 --- a/breathecode/events/tests/mixins/__init__.py +++ b/breathecode/events/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Mixins """ + from .event_test_case import EventTestCase # noqa: F401 diff --git a/breathecode/events/tests/mixins/event_test_case.py b/breathecode/events/tests/mixins/event_test_case.py index 7cbeac9c9..d51725e86 100644 --- a/breathecode/events/tests/mixins/event_test_case.py +++ b/breathecode/events/tests/mixins/event_test_case.py @@ -1,13 +1,27 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, OldBreathecodeMixin, - DatetimeMixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + OldBreathecodeMixin, + DatetimeMixin, + BreathecodeMixin, +) -class EventTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, OldBreathecodeMixin, - DatetimeMixin, BreathecodeMixin): +class EventTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + OldBreathecodeMixin, + DatetimeMixin, + BreathecodeMixin, +): """AdmissionsTestCase with auth methods""" def setUp(self): @@ -18,23 +32,23 @@ def setUp(self): def tearDown(self): self.clear_cache() - def data(self, action='test', url='https://www.eventbriteapi.com/v3/test'): + def data(self, action="test", url="https://www.eventbriteapi.com/v3/test"): return { - 'api_url': url, - 'config': { - 'user_id': '123456789012', - 'action': action, - 'webhook_id': '1234567', - 'endpoint_url': 'https://something.io/eventbrite/webhook' - } + "api_url": url, + "config": { + "user_id": "123456789012", + "action": action, + "webhook_id": "1234567", + "endpoint_url": "https://something.io/eventbrite/webhook", + }, } - def headers(self, event='test'): + def headers(self, event="test"): return { - 'X-Eventbrite-Event': event, - 'Accept': 'text/plain', - 'User-Agent': 'Eventbrite Hookshot 12345c6', - 'X-Eventbrite-Delivery': '1234567', - 'Content-type': 'application/json', - 'User-ID-Sender': '123456789012', + "X-Eventbrite-Event": event, + "Accept": "text/plain", + "User-Agent": "Eventbrite Hookshot 12345c6", + "X-Eventbrite-Delivery": "1234567", + "Content-type": "application/json", + "User-ID-Sender": "123456789012", } diff --git a/breathecode/events/tests/mixins/new_events_tests_case.py b/breathecode/events/tests/mixins/new_events_tests_case.py index 3aac0594b..cb4055556 100644 --- a/breathecode/events/tests/mixins/new_events_tests_case.py +++ b/breathecode/events/tests/mixins/new_events_tests_case.py @@ -1,19 +1,35 @@ """ Collections of mixins used to login in authorize microservice """ + import os from django.urls import reverse_lazy from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, ICallMixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + ICallMixin, + BreathecodeMixin, +) -class EventTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, DatetimeMixin, - ICallMixin, BreathecodeMixin): +class EventTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + ICallMixin, + BreathecodeMixin, +): """AdmissionsTestCase with auth methods""" def setUp(self): - os.environ['API_URL'] = 'http://localhost:8000' + os.environ["API_URL"] = "http://localhost:8000" self.generate_queries() self.set_test_instance(self) @@ -22,51 +38,62 @@ def tearDown(self): def check_all_academy_events(self, models=None): self.headers(academy=1) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") if models is None: models = [ - self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - event=True) + self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + event=True, + ) ] response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['event'].id, - 'banner': model['event'].banner, - 'ending_at': self.bc.datetime.to_iso_string(model['event'].ending_at), - 'event_type': model['event'].event_type, - 'excerpt': model['event'].excerpt, - 'lang': model['event'].lang, - 'online_event': model['event'].online_event, - 'tags': model['event'].tags, - 'slug': model['event'].slug, - 'starting_at': self.bc.datetime.to_iso_string(model['event'].starting_at), - 'ended_at': model['event'].ended_at, - 'status': model['event'].status, - 'title': model['event'].title, - 'url': model['event'].url, - 'venue': model['event'].venue, - 'host': model['event'].host, - 'asset_slug': model['event'].asset_slug, - 'capacity': model['event'].capacity, - 'sync_with_eventbrite': model['event'].sync_with_eventbrite, - 'eventbrite_sync_description': model['event'].eventbrite_sync_description, - 'eventbrite_sync_status': model['event'].eventbrite_sync_status, - } for model in models] + expected = [ + { + "id": model["event"].id, + "banner": model["event"].banner, + "ending_at": self.bc.datetime.to_iso_string(model["event"].ending_at), + "event_type": model["event"].event_type, + "excerpt": model["event"].excerpt, + "lang": model["event"].lang, + "online_event": model["event"].online_event, + "tags": model["event"].tags, + "slug": model["event"].slug, + "starting_at": self.bc.datetime.to_iso_string(model["event"].starting_at), + "ended_at": model["event"].ended_at, + "status": model["event"].status, + "title": model["event"].title, + "url": model["event"].url, + "venue": model["event"].venue, + "host": model["event"].host, + "asset_slug": model["event"].asset_slug, + "capacity": model["event"].capacity, + "sync_with_eventbrite": model["event"].sync_with_eventbrite, + "eventbrite_sync_description": model["event"].eventbrite_sync_description, + "eventbrite_sync_status": model["event"].eventbrite_sync_status, + } + for model in models + ] expected.reverse() self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **self.model_to_dict(model, 'event'), - } for model in models]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + } + for model in models + ], + ) return models diff --git a/breathecode/events/tests/permissions/contexts/tests_event.py b/breathecode/events/tests/permissions/contexts/tests_event.py index a31d6d9ca..77fd9fb3f 100644 --- a/breathecode/events/tests/permissions/contexts/tests_event.py +++ b/breathecode/events/tests/permissions/contexts/tests_event.py @@ -7,16 +7,15 @@ def serializer(event): - author = (f'{event.author.first_name} {event.author.last_name} ({event.author.email})' - if event.author else 'unknown') + author = f"{event.author.first_name} {event.author.last_name} ({event.author.email})" if event.author else "unknown" return { - 'id': event.id, - 'slug': event.slug, - 'lang': event.lang, - 'academy': event.academy.slug if event.academy else 'unknown', - 'organization': event.organization.name if event.organization else 'unknown', - 'published_at': event.published_at, - 'event_type': event.event_type.slug if event.event_type else 'unknown', + "id": event.id, + "slug": event.slug, + "lang": event.lang, + "academy": event.academy.slug if event.academy else "unknown", + "organization": event.organization.name if event.organization else "unknown", + "published_at": event.published_at, + "event_type": event.event_type.slug if event.event_type else "unknown", } @@ -25,22 +24,28 @@ def serializer(event): class AcademyEventTestSuite(EventTestCase): - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.context', MagicMock(return_value=value)) + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.context", MagicMock(return_value=value)) def test_make_right_calls(self): model = self.bc.database.create(event=1) ld = LaunchDarkly() result = event(ld, model.event) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) contexts = serializer(model.event) - self.assertEqual(LaunchDarkly.context.call_args_list, [ - call('1', model.event.title, 'event', contexts), - ]) + self.assertEqual( + LaunchDarkly.context.call_args_list, + [ + call("1", model.event.title, "event", contexts), + ], + ) self.assertEqual(result, value) diff --git a/breathecode/events/tests/permissions/contexts/tests_event_type.py b/breathecode/events/tests/permissions/contexts/tests_event_type.py index 331f24e0b..10ac6c4d4 100644 --- a/breathecode/events/tests/permissions/contexts/tests_event_type.py +++ b/breathecode/events/tests/permissions/contexts/tests_event_type.py @@ -8,10 +8,10 @@ def serializer(event_type): return { - 'id': event_type.id, - 'slug': event_type.slug, - 'academy': event_type.academy.slug, - 'lang': event_type.lang, + "id": event_type.id, + "slug": event_type.slug, + "academy": event_type.academy.slug, + "lang": event_type.lang, } @@ -20,23 +20,29 @@ def serializer(event_type): class AcademyEventTestSuite(EventTestCase): - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.context', MagicMock(return_value=value)) + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.context", MagicMock(return_value=value)) def test_make_right_calls(self): - kwargs = {'icon_url': self.bc.fake.url()} + kwargs = {"icon_url": self.bc.fake.url()} model = self.bc.database.create(event_type=kwargs) ld = LaunchDarkly() result = event_type(ld, model.event_type) - self.assertEqual(self.bc.database.list_of('events.EventType'), [ - self.bc.format.to_dict(model.event_type), - ]) + self.assertEqual( + self.bc.database.list_of("events.EventType"), + [ + self.bc.format.to_dict(model.event_type), + ], + ) contexts = serializer(model.event_type) - self.assertEqual(LaunchDarkly.context.call_args_list, [ - call('1', model.event_type.name, 'event-type', contexts), - ]) + self.assertEqual( + LaunchDarkly.context.call_args_list, + [ + call("1", model.event_type.name, "event-type", contexts), + ], + ) self.assertEqual(result, value) diff --git a/breathecode/events/tests/permissions/flags/release/tests_enable_consume_live_classes.py b/breathecode/events/tests/permissions/flags/release/tests_enable_consume_live_classes.py index 7a2eb1dfd..41c85e5ae 100644 --- a/breathecode/events/tests/permissions/flags/release/tests_enable_consume_live_classes.py +++ b/breathecode/events/tests/permissions/flags/release/tests_enable_consume_live_classes.py @@ -12,17 +12,20 @@ class AcademyEventTestSuite(EventTestCase): - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.get', MagicMock(return_value=value)) - @patch('breathecode.authenticate.permissions.contexts.user', MagicMock(return_value=context_value)) + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.get", MagicMock(return_value=value)) + @patch("breathecode.authenticate.permissions.contexts.user", MagicMock(return_value=context_value)) def test_make_right_calls(self): model = self.bc.database.create(user=1) result = api.release.enable_consume_live_classes(model.user) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) self.assertEqual(len(authenticate_contexts.user.call_args_list), 1) args, kwargs = authenticate_contexts.user.call_args_list[0] @@ -35,6 +38,9 @@ def test_make_right_calls(self): self.assertEqual(result, value) - self.assertEqual(LaunchDarkly.get.call_args_list, [ - call('api.release.enable_consume_live_classes', context_value, False), - ]) + self.assertEqual( + LaunchDarkly.get.call_args_list, + [ + call("api.release.enable_consume_live_classes", context_value, False), + ], + ) diff --git a/breathecode/events/tests/permissions/flags/release/tests_enable_consume_live_events.py b/breathecode/events/tests/permissions/flags/release/tests_enable_consume_live_events.py index 497c37f70..0e792ce2b 100644 --- a/breathecode/events/tests/permissions/flags/release/tests_enable_consume_live_events.py +++ b/breathecode/events/tests/permissions/flags/release/tests_enable_consume_live_events.py @@ -29,22 +29,27 @@ def assert_context_was_call(self, fn, model): class AcademyEventTestSuite(EventTestCase): - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.get', MagicMock(return_value=value)) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.join_contexts', - MagicMock(return_value=join_contexts_value)) - @patch('breathecode.authenticate.permissions.contexts.user', MagicMock(return_value=context1)) - @patch('breathecode.events.permissions.contexts.event', MagicMock(return_value=context2)) - @patch('breathecode.events.permissions.contexts.event_type', MagicMock(return_value=context3)) - @patch('breathecode.admissions.permissions.contexts.academy', MagicMock(return_value=context4)) + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.get", MagicMock(return_value=value)) + @patch( + "breathecode.services.launch_darkly.client.LaunchDarkly.join_contexts", + MagicMock(return_value=join_contexts_value), + ) + @patch("breathecode.authenticate.permissions.contexts.user", MagicMock(return_value=context1)) + @patch("breathecode.events.permissions.contexts.event", MagicMock(return_value=context2)) + @patch("breathecode.events.permissions.contexts.event_type", MagicMock(return_value=context3)) + @patch("breathecode.admissions.permissions.contexts.academy", MagicMock(return_value=context4)) def test_make_right_calls__without_all_contexts(self): model = self.bc.database.create(user=1, event=1) result = api.release.enable_consume_live_events(model.user, model.event) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) assert_context_was_call(self, authenticate_contexts.user, model.user) # assert_context_call(self, admissions_contexts.academy, model.academy) @@ -56,31 +61,42 @@ def test_make_right_calls__without_all_contexts(self): self.assertEqual(result, value) - self.assertEqual(LaunchDarkly.join_contexts.call_args_list, [ - call(context1, context2), - ]) - - self.assertEqual(LaunchDarkly.get.call_args_list, [ - call('api.release.enable_consume_live_events', join_contexts_value, False), - ]) - - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.get', MagicMock(return_value=value)) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.join_contexts', - MagicMock(return_value=join_contexts_value)) - @patch('breathecode.authenticate.permissions.contexts.user', MagicMock(return_value=context1)) - @patch('breathecode.events.permissions.contexts.event', MagicMock(return_value=context2)) - @patch('breathecode.events.permissions.contexts.event_type', MagicMock(return_value=context3)) - @patch('breathecode.admissions.permissions.contexts.academy', MagicMock(return_value=context4)) + self.assertEqual( + LaunchDarkly.join_contexts.call_args_list, + [ + call(context1, context2), + ], + ) + + self.assertEqual( + LaunchDarkly.get.call_args_list, + [ + call("api.release.enable_consume_live_events", join_contexts_value, False), + ], + ) + + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.get", MagicMock(return_value=value)) + @patch( + "breathecode.services.launch_darkly.client.LaunchDarkly.join_contexts", + MagicMock(return_value=join_contexts_value), + ) + @patch("breathecode.authenticate.permissions.contexts.user", MagicMock(return_value=context1)) + @patch("breathecode.events.permissions.contexts.event", MagicMock(return_value=context2)) + @patch("breathecode.events.permissions.contexts.event_type", MagicMock(return_value=context3)) + @patch("breathecode.admissions.permissions.contexts.academy", MagicMock(return_value=context4)) def test_make_right_calls__with_all_contexts(self): - event_type = {'icon_url': self.bc.fake.url()} + event_type = {"icon_url": self.bc.fake.url()} model = self.bc.database.create(user=1, event=1, academy=1, event_type=event_type) result = api.release.enable_consume_live_events(model.user, model.event) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) assert_context_was_call(self, authenticate_contexts.user, model.user) assert_context_was_call(self, admissions_contexts.academy, model.academy) @@ -89,10 +105,16 @@ def test_make_right_calls__with_all_contexts(self): self.assertEqual(result, value) - self.assertEqual(LaunchDarkly.join_contexts.call_args_list, [ - call(context1, context2, context3, context4), - ]) - - self.assertEqual(LaunchDarkly.get.call_args_list, [ - call('api.release.enable_consume_live_events', join_contexts_value, False), - ]) + self.assertEqual( + LaunchDarkly.join_contexts.call_args_list, + [ + call(context1, context2, context3, context4), + ], + ) + + self.assertEqual( + LaunchDarkly.get.call_args_list, + [ + call("api.release.enable_consume_live_events", join_contexts_value, False), + ], + ) diff --git a/breathecode/events/tests/receivers/tests_post_save_cohort_time_slot.py b/breathecode/events/tests/receivers/tests_post_save_cohort_time_slot.py index b48ae95a0..8f4f4685e 100644 --- a/breathecode/events/tests/receivers/tests_post_save_cohort_time_slot.py +++ b/breathecode/events/tests/receivers/tests_post_save_cohort_time_slot.py @@ -14,20 +14,20 @@ class TestSyncOrgVenues(LegacyAPITestCase): 🔽🔽🔽 With zero CohortTimeSlot """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_without_timeslots(self, enable_signals): enable_signals() - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual(tasks.build_live_classes_from_timeslot.delay.call_args_list, []) """ 🔽🔽🔽 With invalid Cohort """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_invalid_cohort(self, enable_signals): enable_signals() @@ -37,10 +37,10 @@ def test_with_invalid_cohort(self, enable_signals): ] index = 0 for ending_date, never_ends in cases: - cohort = {'ending_date': ending_date, 'never_ends': never_ends} + cohort = {"ending_date": ending_date, "never_ends": never_ends} self.bc.database.create(cohort_time_slot=1, cohort=cohort) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual(tasks.build_live_classes_from_timeslot.delay.call_args_list, []) index += 1 @@ -48,17 +48,17 @@ def test_with_invalid_cohort(self, enable_signals): 🔽🔽🔽 With invalid Cohort """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_right_cohort(self, enable_signals): enable_signals() ending_date = UTC_NOW + timedelta(seconds=random.randint(1, 1000)) never_ends = False - cohort = {'ending_date': ending_date, 'never_ends': never_ends} - cohort_time_slots = [{'cohort_id': n} for n in range(1, 3)] + cohort = {"ending_date": ending_date, "never_ends": never_ends} + cohort_time_slots = [{"cohort_id": n} for n in range(1, 3)] self.bc.database.create(cohort_time_slot=cohort_time_slots, cohort=(2, cohort)) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) self.assertEqual(tasks.build_live_classes_from_timeslot.delay.call_args_list, [call(1), call(2)]) diff --git a/breathecode/events/tests/tasks/tests_build_live_classes_from_timeslot.py b/breathecode/events/tests/tasks/tests_build_live_classes_from_timeslot.py index e6abc6e06..a956a97c1 100644 --- a/breathecode/events/tests/tasks/tests_build_live_classes_from_timeslot.py +++ b/breathecode/events/tests/tasks/tests_build_live_classes_from_timeslot.py @@ -17,15 +17,15 @@ def live_class_item(data={}): return { - 'id': 0, - 'cohort_time_slot_id': 0, - 'log': {}, - 'remote_meeting_url': '', - 'hash': '', - 'started_at': None, - 'ended_at': None, - 'starting_at': UTC_NOW, - 'ending_at': UTC_NOW, + "id": 0, + "cohort_time_slot_id": 0, + "log": {}, + "remote_meeting_url": "", + "hash": "", + "started_at": None, + "ended_at": None, + "starting_at": UTC_NOW, + "ending_at": UTC_NOW, **data, } @@ -35,328 +35,397 @@ class AcademyEventTestSuite(EventTestCase): 🔽🔽🔽 with 0 CohortTimeSlot """ - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) def test_zero_cohort_time_slots(self): build_live_classes_from_timeslot(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) """ 🔽🔽🔽 with 1 CohortTimeSlot, Cohort never ends """ - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) def test_one_cohort_time_slot_with_cohort_never_ends(self): - cohort = {'never_ends': True, 'ending_date': None} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + cohort = {"never_ends": True, "ending_date": None} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort_time_slot=1, cohort=cohort) build_live_classes_from_timeslot(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), [ - self.bc.format.to_dict(model.cohort_time_slot), - ]) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + self.bc.format.to_dict(model.cohort_time_slot), + ], + ) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) """ 🔽🔽🔽 with 1 CohortTimeSlot, Cohort with ending_date in the past """ - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=DATE)) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=DATE)) def test_one_cohort_time_slot_with_ending_date_in_the_past(self): base_date = DATE cohort = { - 'never_ends': False, - 'ending_date': base_date - timedelta(weeks=3 * 5), - 'online_meeting_url': self.bc.fake.url(), - 'kickoff_date': base_date - timedelta(weeks=3 * 10), + "never_ends": False, + "ending_date": base_date - timedelta(weeks=3 * 5), + "online_meeting_url": self.bc.fake.url(), + "kickoff_date": base_date - timedelta(weeks=3 * 10), } - starting_at = self.bc.datetime.to_datetime_integer('America/New_York', base_date - timedelta(weeks=3 * 4)) + starting_at = self.bc.datetime.to_datetime_integer("America/New_York", base_date - timedelta(weeks=3 * 4)) - ending_at = self.bc.datetime.to_datetime_integer('America/New_York', - base_date - timedelta(weeks=3 * 4) + timedelta(hours=3)) + ending_at = self.bc.datetime.to_datetime_integer( + "America/New_York", base_date - timedelta(weeks=3 * 4) + timedelta(hours=3) + ) cohort_time_slot = { - 'starting_at': starting_at, - 'ending_at': ending_at, - 'timezone': 'America/New_York', - 'recurrent': True, - 'recurrency_type': 'WEEKLY', - 'removed_at': None, + "starting_at": starting_at, + "ending_at": ending_at, + "timezone": "America/New_York", + "recurrent": True, + "recurrency_type": "WEEKLY", + "removed_at": None, } - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort_time_slot=cohort_time_slot, cohort=cohort) build_live_classes_from_timeslot(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), [ - self.bc.format.to_dict(model.cohort_time_slot), - ]) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + self.bc.format.to_dict(model.cohort_time_slot), + ], + ) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) """ 🔽🔽🔽 with 1 CohortTimeSlot, Cohort with ending_date in the future, it's weekly """ - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=DATE)) - @patch('breathecode.events.models.LiveClass._get_hash', MagicMock(side_effect=[ - 'r1', - 'r2', - 'r3', - 'r4', - 'r5', - 'r6', - ])) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=DATE)) + @patch( + "breathecode.events.models.LiveClass._get_hash", + MagicMock( + side_effect=[ + "r1", + "r2", + "r3", + "r4", + "r5", + "r6", + ] + ), + ) def test_one_cohort_time_slot_with_ending_date_in_the_future__weekly(self): base_date = DATE cohort = { - 'never_ends': False, - 'ending_date': base_date, - 'online_meeting_url': self.bc.fake.url(), - 'kickoff_date': base_date - timedelta(weeks=3 * 2), + "never_ends": False, + "ending_date": base_date, + "online_meeting_url": self.bc.fake.url(), + "kickoff_date": base_date - timedelta(weeks=3 * 2), } - starting_at = self.bc.datetime.to_datetime_integer('America/New_York', base_date - timedelta(weeks=3 * 4)) + starting_at = self.bc.datetime.to_datetime_integer("America/New_York", base_date - timedelta(weeks=3 * 4)) - ending_at = self.bc.datetime.to_datetime_integer('America/New_York', - base_date - timedelta(weeks=3 * 4) + timedelta(hours=3)) + ending_at = self.bc.datetime.to_datetime_integer( + "America/New_York", base_date - timedelta(weeks=3 * 4) + timedelta(hours=3) + ) cohort_time_slot = { - 'starting_at': starting_at, - 'ending_at': ending_at, - 'timezone': 'America/New_York', - 'recurrent': True, - 'recurrency_type': 'WEEKLY', - 'removed_at': None, + "starting_at": starting_at, + "ending_at": ending_at, + "timezone": "America/New_York", + "recurrent": True, + "recurrency_type": "WEEKLY", + "removed_at": None, } - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort_time_slot=cohort_time_slot, cohort=cohort) build_live_classes_from_timeslot(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), [ - self.bc.format.to_dict(model.cohort_time_slot), - ]) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - live_class_item({ - 'id': 1, - 'cohort_time_slot_id': 1, - 'hash': 'r1', - 'starting_at': datetime(2022, 11, 18, 10, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 11, 18, 13, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 2, - 'cohort_time_slot_id': 1, - 'hash': 'r2', - 'starting_at': datetime(2022, 11, 25, 10, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 11, 25, 13, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 3, - 'cohort_time_slot_id': 1, - 'hash': 'r3', - 'starting_at': datetime(2022, 12, 2, 10, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 2, 13, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 4, - 'cohort_time_slot_id': 1, - 'hash': 'r4', - 'starting_at': datetime(2022, 12, 9, 10, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 9, 13, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 5, - 'cohort_time_slot_id': 1, - 'hash': 'r5', - 'starting_at': datetime(2022, 12, 16, 10, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 16, 13, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 6, - 'cohort_time_slot_id': 1, - 'hash': 'r6', - 'starting_at': datetime(2022, 12, 23, 10, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 23, 13, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + self.bc.format.to_dict(model.cohort_time_slot), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + live_class_item( + { + "id": 1, + "cohort_time_slot_id": 1, + "hash": "r1", + "starting_at": datetime(2022, 11, 18, 10, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 11, 18, 13, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 2, + "cohort_time_slot_id": 1, + "hash": "r2", + "starting_at": datetime(2022, 11, 25, 10, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 11, 25, 13, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 3, + "cohort_time_slot_id": 1, + "hash": "r3", + "starting_at": datetime(2022, 12, 2, 10, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 2, 13, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 4, + "cohort_time_slot_id": 1, + "hash": "r4", + "starting_at": datetime(2022, 12, 9, 10, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 9, 13, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 5, + "cohort_time_slot_id": 1, + "hash": "r5", + "starting_at": datetime(2022, 12, 16, 10, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 16, 13, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 6, + "cohort_time_slot_id": 1, + "hash": "r6", + "starting_at": datetime(2022, 12, 23, 10, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 23, 13, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + ], + ) assert LiveClass._get_hash.call_args_list == [call() for _ in range(6)] """ 🔽🔽🔽 with 1 CohortTimeSlot, Cohort with ending_date in the future, it's weekly """ - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=DATE)) - @patch('breathecode.events.models.LiveClass._get_hash', MagicMock(side_effect=[ - 'r1', - 'r2', - 'r3', - 'r4', - 'r5', - 'r6', - ])) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=DATE)) + @patch( + "breathecode.events.models.LiveClass._get_hash", + MagicMock( + side_effect=[ + "r1", + "r2", + "r3", + "r4", + "r5", + "r6", + ] + ), + ) def test_one_cohort_time_slot_with_ending_date_in_the_future__monthly(self): base_date = DATE cohort = { - 'never_ends': False, - 'ending_date': base_date, - 'online_meeting_url': self.bc.fake.url(), - 'kickoff_date': base_date - timedelta(weeks=3 * 2), + "never_ends": False, + "ending_date": base_date, + "online_meeting_url": self.bc.fake.url(), + "kickoff_date": base_date - timedelta(weeks=3 * 2), } - starting_at = self.bc.datetime.to_datetime_integer('America/New_York', base_date - timedelta(weeks=3 * 4)) + starting_at = self.bc.datetime.to_datetime_integer("America/New_York", base_date - timedelta(weeks=3 * 4)) - ending_at = self.bc.datetime.to_datetime_integer('America/New_York', - base_date - timedelta(weeks=3 * 4) + timedelta(hours=3)) + ending_at = self.bc.datetime.to_datetime_integer( + "America/New_York", base_date - timedelta(weeks=3 * 4) + timedelta(hours=3) + ) cohort_time_slot = { - 'starting_at': starting_at, - 'ending_at': ending_at, - 'timezone': 'America/New_York', - 'recurrent': True, - 'recurrency_type': 'MONTHLY', - 'removed_at': None, + "starting_at": starting_at, + "ending_at": ending_at, + "timezone": "America/New_York", + "recurrent": True, + "recurrency_type": "MONTHLY", + "removed_at": None, } - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort_time_slot=cohort_time_slot, cohort=cohort) build_live_classes_from_timeslot(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), [ - self.bc.format.to_dict(model.cohort_time_slot), - ]) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - live_class_item({ - 'id': 1, - 'cohort_time_slot_id': 1, - 'hash': 'r1', - 'starting_at': datetime(2022, 12, 7, 10, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 7, 13, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + self.bc.format.to_dict(model.cohort_time_slot), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + live_class_item( + { + "id": 1, + "cohort_time_slot_id": 1, + "hash": "r1", + "starting_at": datetime(2022, 12, 7, 10, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 7, 13, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + ], + ) assert LiveClass._get_hash.call_args_list == [call()] """ 🔽🔽🔽 with 1 CohortTimeSlot, Cohort with ending_date in the future, it's daily """ - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=DATE)) - @patch('breathecode.events.models.LiveClass._get_hash', MagicMock(side_effect=[ - 'r1', - 'r2', - 'r3', - 'r4', - 'r5', - 'r6', - ])) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=DATE)) + @patch( + "breathecode.events.models.LiveClass._get_hash", + MagicMock( + side_effect=[ + "r1", + "r2", + "r3", + "r4", + "r5", + "r6", + ] + ), + ) def test_one_cohort_time_slot_with_ending_date_in_the_future__daily(self): base_date = DATE cohort = { - 'never_ends': False, - 'ending_date': base_date, - 'online_meeting_url': self.bc.fake.url(), - 'kickoff_date': base_date - timedelta(weeks=1), + "never_ends": False, + "ending_date": base_date, + "online_meeting_url": self.bc.fake.url(), + "kickoff_date": base_date - timedelta(weeks=1), } - starting_at = self.bc.datetime.to_datetime_integer('America/New_York', base_date - timedelta(weeks=2)) + starting_at = self.bc.datetime.to_datetime_integer("America/New_York", base_date - timedelta(weeks=2)) - ending_at = self.bc.datetime.to_datetime_integer('America/New_York', - base_date - timedelta(weeks=2) + timedelta(hours=3)) + ending_at = self.bc.datetime.to_datetime_integer( + "America/New_York", base_date - timedelta(weeks=2) + timedelta(hours=3) + ) cohort_time_slot = { - 'starting_at': starting_at, - 'ending_at': ending_at, - 'timezone': 'America/New_York', - 'recurrent': True, - 'recurrency_type': 'DAILY', - 'removed_at': None, + "starting_at": starting_at, + "ending_at": ending_at, + "timezone": "America/New_York", + "recurrent": True, + "recurrency_type": "DAILY", + "removed_at": None, } - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort_time_slot=cohort_time_slot, cohort=cohort) build_live_classes_from_timeslot(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), [ - self.bc.format.to_dict(model.cohort_time_slot), - ]) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - live_class_item({ - 'id': 1, - 'cohort_time_slot_id': 1, - 'hash': 'r1', - 'starting_at': datetime(2022, 12, 24, 9, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 24, 12, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 2, - 'cohort_time_slot_id': 1, - 'hash': 'r2', - 'starting_at': datetime(2022, 12, 25, 9, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 25, 12, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 3, - 'cohort_time_slot_id': 1, - 'hash': 'r3', - 'starting_at': datetime(2022, 12, 26, 9, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 26, 12, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 4, - 'cohort_time_slot_id': 1, - 'hash': 'r4', - 'starting_at': datetime(2022, 12, 27, 9, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 27, 12, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 5, - 'cohort_time_slot_id': 1, - 'hash': 'r5', - 'starting_at': datetime(2022, 12, 28, 9, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 28, 12, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - live_class_item({ - 'id': 6, - 'cohort_time_slot_id': 1, - 'hash': 'r6', - 'starting_at': datetime(2022, 12, 29, 9, 20, tzinfo=pytz.UTC), - 'ending_at': datetime(2022, 12, 29, 12, 20, tzinfo=pytz.UTC), - 'remote_meeting_url': model.cohort.online_meeting_url, - }), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + self.bc.format.to_dict(model.cohort_time_slot), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + live_class_item( + { + "id": 1, + "cohort_time_slot_id": 1, + "hash": "r1", + "starting_at": datetime(2022, 12, 24, 9, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 24, 12, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 2, + "cohort_time_slot_id": 1, + "hash": "r2", + "starting_at": datetime(2022, 12, 25, 9, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 25, 12, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 3, + "cohort_time_slot_id": 1, + "hash": "r3", + "starting_at": datetime(2022, 12, 26, 9, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 26, 12, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 4, + "cohort_time_slot_id": 1, + "hash": "r4", + "starting_at": datetime(2022, 12, 27, 9, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 27, 12, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 5, + "cohort_time_slot_id": 1, + "hash": "r5", + "starting_at": datetime(2022, 12, 28, 9, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 28, 12, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + live_class_item( + { + "id": 6, + "cohort_time_slot_id": 1, + "hash": "r6", + "starting_at": datetime(2022, 12, 29, 9, 20, tzinfo=pytz.UTC), + "ending_at": datetime(2022, 12, 29, 12, 20, tzinfo=pytz.UTC), + "remote_meeting_url": model.cohort.online_meeting_url, + } + ), + ], + ) assert LiveClass._get_hash.call_args_list == [call() for _ in range(6)] diff --git a/breathecode/events/tests/tasks/tests_fix_live_class_dates.py b/breathecode/events/tests/tasks/tests_fix_live_class_dates.py index 42e0254b3..271a4d9d2 100644 --- a/breathecode/events/tests/tasks/tests_fix_live_class_dates.py +++ b/breathecode/events/tests/tasks/tests_fix_live_class_dates.py @@ -20,15 +20,15 @@ def live_class_item(data={}): return { - 'id': 0, - 'cohort_time_slot_id': 0, - 'log': {}, - 'remote_meeting_url': '', - 'hash': '', - 'started_at': None, - 'ended_at': None, - 'starting_at': UTC_NOW, - 'ending_at': UTC_NOW, + "id": 0, + "cohort_time_slot_id": 0, + "log": {}, + "remote_meeting_url": "", + "hash": "", + "started_at": None, + "ended_at": None, + "starting_at": UTC_NOW, + "ending_at": UTC_NOW, **data, } @@ -36,56 +36,60 @@ def live_class_item(data={}): class AcademyEventTestSuite(EventTestCase): # When: I call the task with 0 CohortTimeSlot # Then: I expect to receive an empty list of LiveClass - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) def test_0_items(self): fix_live_class_dates.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), []) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("admissions.CohortTimeSlot"), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) # When: I call the task with 1 CohortTimeSlot and Cohort ends in the past # Then: I expect to receive an empty list of LiveClass - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) def test_cohort_in_the_past(self): cohort = { - 'never_ends': False, - 'online_meeting_url': self.bc.fake.url(), - 'kickoff_date': UTC_NOW - timedelta(seconds=random.randint(1000, 1000000)), - 'ending_date': UTC_NOW - timedelta(seconds=random.randint(1, 1000)), + "never_ends": False, + "online_meeting_url": self.bc.fake.url(), + "kickoff_date": UTC_NOW - timedelta(seconds=random.randint(1000, 1000000)), + "ending_date": UTC_NOW - timedelta(seconds=random.randint(1, 1000)), } - starting_at = self.bc.datetime.to_datetime_integer('America/New_York', UTC_NOW - timedelta(weeks=3 * 4)) + starting_at = self.bc.datetime.to_datetime_integer("America/New_York", UTC_NOW - timedelta(weeks=3 * 4)) - ending_at = self.bc.datetime.to_datetime_integer('America/New_York', - UTC_NOW - timedelta(weeks=3 * 4) + timedelta(hours=3)) + ending_at = self.bc.datetime.to_datetime_integer( + "America/New_York", UTC_NOW - timedelta(weeks=3 * 4) + timedelta(hours=3) + ) cohort_time_slot = { - 'starting_at': starting_at, - 'ending_at': ending_at, - 'timezone': 'America/New_York', - 'recurrent': True, - 'recurrency_type': 'WEEKLY', - 'removed_at': None, + "starting_at": starting_at, + "ending_at": ending_at, + "timezone": "America/New_York", + "recurrent": True, + "recurrency_type": "WEEKLY", + "removed_at": None, } - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort_time_slot=cohort_time_slot, cohort=cohort, live_class=2) fix_live_class_dates.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), [ - self.bc.format.to_dict(model.cohort_time_slot), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + self.bc.format.to_dict(model.cohort_time_slot), + ], + ) self.assertEqual( - self.bc.database.list_of('events.LiveClass'), + self.bc.database.list_of("events.LiveClass"), self.bc.format.to_dict(model.live_class), ) @@ -93,70 +97,77 @@ def test_cohort_in_the_past(self): 🔽🔽🔽 with 1 CohortTimeSlot, Cohort with ending_date in the future, it's weekly """ - @patch.object(actions, 'export_event_to_eventbrite', MagicMock()) - @patch.object(logging.Logger, 'error', MagicMock()) - @patch.object(logging.Logger, 'debug', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) + @patch.object(actions, "export_event_to_eventbrite", MagicMock()) + @patch.object(logging.Logger, "error", MagicMock()) + @patch.object(logging.Logger, "debug", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) def test_upcoming_cohort(self): cohort = { - 'never_ends': False, - 'online_meeting_url': self.bc.fake.url(), - 'kickoff_date': UTC_NOW - timedelta(weeks=2), - 'ending_date': UTC_NOW + timedelta(weeks=3) + timedelta(hours=3), + "never_ends": False, + "online_meeting_url": self.bc.fake.url(), + "kickoff_date": UTC_NOW - timedelta(weeks=2), + "ending_date": UTC_NOW + timedelta(weeks=3) + timedelta(hours=3), } - starting_at = self.bc.datetime.to_datetime_integer('America/New_York', UTC_NOW + timedelta(weeks=1)) + starting_at = self.bc.datetime.to_datetime_integer("America/New_York", UTC_NOW + timedelta(weeks=1)) - ending_at = self.bc.datetime.to_datetime_integer('America/New_York', - UTC_NOW + timedelta(weeks=1) + timedelta(hours=3)) + ending_at = self.bc.datetime.to_datetime_integer( + "America/New_York", UTC_NOW + timedelta(weeks=1) + timedelta(hours=3) + ) cohort_time_slot = { - 'starting_at': starting_at, - 'ending_at': ending_at, - 'timezone': 'America/New_York', - 'recurrent': True, - 'recurrency_type': 'WEEKLY', - 'removed_at': None, + "starting_at": starting_at, + "ending_at": ending_at, + "timezone": "America/New_York", + "recurrent": True, + "recurrency_type": "WEEKLY", + "removed_at": None, } live_classes = [ { - 'starting_at': UTC_NOW + timedelta(weeks=1), - 'ending_at': UTC_NOW + timedelta(weeks=1) + timedelta(hours=3), + "starting_at": UTC_NOW + timedelta(weeks=1), + "ending_at": UTC_NOW + timedelta(weeks=1) + timedelta(hours=3), }, { - 'starting_at': UTC_NOW + timedelta(weeks=2), - 'ending_at': UTC_NOW + timedelta(weeks=2) + timedelta(hours=3), + "starting_at": UTC_NOW + timedelta(weeks=2), + "ending_at": UTC_NOW + timedelta(weeks=2) + timedelta(hours=3), }, ] for key in range(2): if bool(random.randbytes(1)): - live_classes[key]['starting_at'] += timedelta(hours=1) - live_classes[key]['ending_at'] += timedelta(hours=1) + live_classes[key]["starting_at"] += timedelta(hours=1) + live_classes[key]["ending_at"] += timedelta(hours=1) else: - live_classes[key]['starting_at'] -= timedelta(hours=1) - live_classes[key]['ending_at'] -= timedelta(hours=1) + live_classes[key]["starting_at"] -= timedelta(hours=1) + live_classes[key]["ending_at"] -= timedelta(hours=1) - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(cohort_time_slot=cohort_time_slot, cohort=cohort, live_class=live_classes) fix_live_class_dates.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.CohortTimeSlot'), [ - self.bc.format.to_dict(model.cohort_time_slot), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortTimeSlot"), + [ + self.bc.format.to_dict(model.cohort_time_slot), + ], + ) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - { - **self.bc.format.to_dict(model.live_class[0]), - 'ending_at': datetime(2023, 1, 6, 12, 20, tzinfo=pytz.UTC), - 'starting_at': datetime(2023, 1, 6, 9, 20, tzinfo=pytz.UTC), - }, - { - **self.bc.format.to_dict(model.live_class[1]), - 'ending_at': datetime(2023, 1, 13, 12, 20, tzinfo=pytz.UTC), - 'starting_at': datetime(2023, 1, 13, 9, 20, tzinfo=pytz.UTC), - }, - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + { + **self.bc.format.to_dict(model.live_class[0]), + "ending_at": datetime(2023, 1, 6, 12, 20, tzinfo=pytz.UTC), + "starting_at": datetime(2023, 1, 6, 9, 20, tzinfo=pytz.UTC), + }, + { + **self.bc.format.to_dict(model.live_class[1]), + "ending_at": datetime(2023, 1, 13, 12, 20, tzinfo=pytz.UTC), + "starting_at": datetime(2023, 1, 13, 9, 20, tzinfo=pytz.UTC), + }, + ], + ) diff --git a/breathecode/events/tests/tasks/tests_mark_live_class_as_started.py b/breathecode/events/tests/tasks/tests_mark_live_class_as_started.py index 397e29fcc..8b8e9c14e 100644 --- a/breathecode/events/tests/tasks/tests_mark_live_class_as_started.py +++ b/breathecode/events/tests/tasks/tests_mark_live_class_as_started.py @@ -18,24 +18,24 @@ def setup(db): def live_class_item(data={}): return { - 'id': 0, - 'cohort_time_slot_id': 0, - 'log': {}, - 'remote_meeting_url': '', - 'hash': '', - 'started_at': None, - 'ended_at': None, - 'starting_at': UTC_NOW, - 'ending_at': UTC_NOW, + "id": 0, + "cohort_time_slot_id": 0, + "log": {}, + "remote_meeting_url": "", + "hash": "", + "started_at": None, + "ended_at": None, + "starting_at": UTC_NOW, + "ending_at": UTC_NOW, **data, } class AcademyEventTestSuite(EventTestCase): - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_mark_live_class_as_started(self): model = self.bc.database.create(live_class=1) @@ -44,19 +44,25 @@ def test_mark_live_class_as_started(self): logging.Logger.info.call_args_list = [] tasks.mark_live_class_as_started(1) - self.assertEqual(logging.Logger.info.call_args_list, - [call(f'Starting mark live class {model.live_class.id} as started')]) + self.assertEqual( + logging.Logger.info.call_args_list, [call(f"Starting mark live class {model.live_class.id} as started")] + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - live_class_item({ - **base_model, - 'started_at': UTC_NOW, - }), - ]) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + live_class_item( + { + **base_model, + "started_at": UTC_NOW, + } + ), + ], + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_mark_live_class_as_started_with_wrong_live_class(self): model = self.bc.database.create(live_class=1) @@ -65,10 +71,15 @@ def test_mark_live_class_as_started_with_wrong_live_class(self): logging.Logger.info.call_args_list = [] tasks.mark_live_class_as_started(2) - self.assertEqual(logging.Logger.info.call_args_list, [call('Starting mark live class 2 as started')]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Live Class 2 not fount')]) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - live_class_item({ - **base_model, - }), - ]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Starting mark live class 2 as started")]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Live Class 2 not fount")]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + live_class_item( + { + **base_model, + } + ), + ], + ) diff --git a/breathecode/events/tests/urls/tests_academy_checkin.py b/breathecode/events/tests/urls/tests_academy_checkin.py index f1c0888cb..bbe0ab188 100644 --- a/breathecode/events/tests/urls/tests_academy_checkin.py +++ b/breathecode/events/tests/urls/tests_academy_checkin.py @@ -13,25 +13,25 @@ class AcademyEventTestSuite(EventTestCase): def test_academy_checkin_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:academy_checkin') + url = reverse_lazy("events:academy_checkin") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) def test_academy_checkin_without_capability(self): self.headers(academy=1) - url = reverse_lazy('events:academy_checkin') + url = reverse_lazy("events:academy_checkin") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_eventcheckin for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: read_eventcheckin for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -39,11 +39,10 @@ def test_academy_checkin_without_capability(self): def test_academy_checkin_without_data(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') - url = reverse_lazy('events:academy_checkin') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) + url = reverse_lazy("events:academy_checkin") response = self.client.get(url) json = response.json() @@ -54,331 +53,345 @@ def test_academy_checkin_without_data(self): def test_academy_checkin_with_bad_academy(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) - event_kwargs = {'academy': base['academy']} + event_kwargs = {"academy": base["academy"]} model = self.generate_models(event_checkin=True, event_kwargs=event_kwargs, models=base) - url = reverse_lazy('events:academy_checkin') + url = reverse_lazy("events:academy_checkin") response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin__(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') - - event_kwargs = {'academy': base['academy']} - event_checkin_kwargs = {'attended_at': self.datetime_now()} - model = self.generate_models(event=True, - event_checkin=True, - event_kwargs=event_kwargs, - models=base, - event_checkin_kwargs=event_checkin_kwargs) - url = reverse_lazy('events:academy_checkin') + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) + + event_kwargs = {"academy": base["academy"]} + event_checkin_kwargs = {"attended_at": self.datetime_now()} + model = self.generate_models( + event=True, + event_checkin=True, + event_kwargs=event_kwargs, + models=base, + event_checkin_kwargs=event_checkin_kwargs, + ) + url = reverse_lazy("events:academy_checkin") response = self.client.get(url) json = response.json() - expected = [{ - 'attendee': { - 'first_name': model['event_checkin'].attendee.first_name, - 'id': model['event_checkin'].attendee.id, - 'last_name': model['event_checkin'].attendee.last_name - }, - 'email': model['event_checkin'].email, - 'event': { - 'ending_at': self.datetime_to_iso(model['event_checkin'].event.ending_at), - 'event_type': model['event_checkin'].event.event_type, - 'id': model['event_checkin'].event.id, - 'starting_at': self.datetime_to_iso(model['event_checkin'].event.starting_at), - 'title': model['event_checkin'].event.title - }, - 'id': model['event_checkin'].id, - 'status': model['event_checkin'].status, - 'created_at': self.datetime_to_iso(model['event_checkin'].created_at), - 'attended_at': self.datetime_to_iso(model['event_checkin'].attended_at) - }] + expected = [ + { + "attendee": { + "first_name": model["event_checkin"].attendee.first_name, + "id": model["event_checkin"].attendee.id, + "last_name": model["event_checkin"].attendee.last_name, + }, + "email": model["event_checkin"].email, + "event": { + "ending_at": self.datetime_to_iso(model["event_checkin"].event.ending_at), + "event_type": model["event_checkin"].event.event_type, + "id": model["event_checkin"].event.id, + "starting_at": self.datetime_to_iso(model["event_checkin"].event.starting_at), + "title": model["event_checkin"].event.title, + }, + "id": model["event_checkin"].id, + "status": model["event_checkin"].status, + "created_at": self.datetime_to_iso(model["event_checkin"].created_at), + "attended_at": self.datetime_to_iso(model["event_checkin"].attended_at), + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin_with_bad_status(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) - event_kwargs = {'academy': base['academy']} + event_kwargs = {"academy": base["academy"]} model = self.generate_models(event=True, event_checkin=True, event_kwargs=event_kwargs, models=base) - url = reverse_lazy('events:academy_checkin') + '?status=DONE' + url = reverse_lazy("events:academy_checkin") + "?status=DONE" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin_with_status(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') - - event_kwargs = {'academy': base['academy']} - event_checkin_kwargs = {'attended_at': self.datetime_now()} - model = self.generate_models(event=True, - event_checkin=True, - event_kwargs=event_kwargs, - models=base, - event_checkin_kwargs=event_checkin_kwargs) - url = reverse_lazy('events:academy_checkin') + '?status=PENDING' + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) + + event_kwargs = {"academy": base["academy"]} + event_checkin_kwargs = {"attended_at": self.datetime_now()} + model = self.generate_models( + event=True, + event_checkin=True, + event_kwargs=event_kwargs, + models=base, + event_checkin_kwargs=event_checkin_kwargs, + ) + url = reverse_lazy("events:academy_checkin") + "?status=PENDING" response = self.client.get(url) json = response.json() - expected = [{ - 'attendee': { - 'first_name': model['event_checkin'].attendee.first_name, - 'id': model['event_checkin'].attendee.id, - 'last_name': model['event_checkin'].attendee.last_name - }, - 'email': model['event_checkin'].email, - 'event': { - 'ending_at': self.datetime_to_iso(model['event_checkin'].event.ending_at), - 'event_type': model['event_checkin'].event.event_type, - 'id': model['event_checkin'].event.id, - 'starting_at': self.datetime_to_iso(model['event_checkin'].event.starting_at), - 'title': model['event_checkin'].event.title - }, - 'id': model['event_checkin'].id, - 'status': model['event_checkin'].status, - 'created_at': self.datetime_to_iso(model['event_checkin'].created_at), - 'attended_at': self.datetime_to_iso(model['event_checkin'].attended_at) - }] + expected = [ + { + "attendee": { + "first_name": model["event_checkin"].attendee.first_name, + "id": model["event_checkin"].attendee.id, + "last_name": model["event_checkin"].attendee.last_name, + }, + "email": model["event_checkin"].email, + "event": { + "ending_at": self.datetime_to_iso(model["event_checkin"].event.ending_at), + "event_type": model["event_checkin"].event.event_type, + "id": model["event_checkin"].event.id, + "starting_at": self.datetime_to_iso(model["event_checkin"].event.starting_at), + "title": model["event_checkin"].event.title, + }, + "id": model["event_checkin"].id, + "status": model["event_checkin"].status, + "created_at": self.datetime_to_iso(model["event_checkin"].created_at), + "attended_at": self.datetime_to_iso(model["event_checkin"].attended_at), + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin_with_bad_event(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) - event_kwargs = {'academy': base['academy']} + event_kwargs = {"academy": base["academy"]} model = self.generate_models(event=True, event_checkin=True, event_kwargs=event_kwargs, models=base) - url = reverse_lazy('events:academy_checkin') + '?event=2' + url = reverse_lazy("events:academy_checkin") + "?event=2" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin_with_event(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') - - event_kwargs = {'academy': base['academy']} - event_checkin_kwargs = {'attended_at': self.datetime_now()} - model = self.generate_models(event=True, - event_checkin=True, - event_kwargs=event_kwargs, - models=base, - event_checkin_kwargs=event_checkin_kwargs) - url = reverse_lazy('events:academy_checkin') + '?event=1' + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) + + event_kwargs = {"academy": base["academy"]} + event_checkin_kwargs = {"attended_at": self.datetime_now()} + model = self.generate_models( + event=True, + event_checkin=True, + event_kwargs=event_kwargs, + models=base, + event_checkin_kwargs=event_checkin_kwargs, + ) + url = reverse_lazy("events:academy_checkin") + "?event=1" response = self.client.get(url) json = response.json() - expected = [{ - 'attendee': { - 'first_name': model['event_checkin'].attendee.first_name, - 'id': model['event_checkin'].attendee.id, - 'last_name': model['event_checkin'].attendee.last_name - }, - 'email': model['event_checkin'].email, - 'event': { - 'ending_at': self.datetime_to_iso(model['event_checkin'].event.ending_at), - 'event_type': model['event_checkin'].event.event_type, - 'id': model['event_checkin'].event.id, - 'starting_at': self.datetime_to_iso(model['event_checkin'].event.starting_at), - 'title': model['event_checkin'].event.title - }, - 'id': model['event_checkin'].id, - 'status': model['event_checkin'].status, - 'created_at': self.datetime_to_iso(model['event_checkin'].created_at), - 'attended_at': self.datetime_to_iso(model['event_checkin'].attended_at) - }] + expected = [ + { + "attendee": { + "first_name": model["event_checkin"].attendee.first_name, + "id": model["event_checkin"].attendee.id, + "last_name": model["event_checkin"].attendee.last_name, + }, + "email": model["event_checkin"].email, + "event": { + "ending_at": self.datetime_to_iso(model["event_checkin"].event.ending_at), + "event_type": model["event_checkin"].event.event_type, + "id": model["event_checkin"].event.id, + "starting_at": self.datetime_to_iso(model["event_checkin"].event.starting_at), + "title": model["event_checkin"].event.title, + }, + "id": model["event_checkin"].id, + "status": model["event_checkin"].status, + "created_at": self.datetime_to_iso(model["event_checkin"].created_at), + "attended_at": self.datetime_to_iso(model["event_checkin"].attended_at), + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin_with_bad_start(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) - event_kwargs = {'academy': base['academy']} + event_kwargs = {"academy": base["academy"]} model = self.generate_models(event=True, event_checkin=True, event_kwargs=event_kwargs, models=base) - url = reverse_lazy('events:academy_checkin') + '?start=3000-01-01' + url = reverse_lazy("events:academy_checkin") + "?start=3000-01-01" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin_with_start(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') - - event_kwargs = {'academy': base['academy']} - event_checkin_kwargs = {'attended_at': self.datetime_now()} - model = self.generate_models(event=True, - event_checkin=True, - event_kwargs=event_kwargs, - models=base, - event_checkin_kwargs=event_checkin_kwargs) - date = model['event_checkin'].created_at - url = (reverse_lazy('events:academy_checkin') + f'?start={date.year}-{date.month}-{date.day}') + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) + + event_kwargs = {"academy": base["academy"]} + event_checkin_kwargs = {"attended_at": self.datetime_now()} + model = self.generate_models( + event=True, + event_checkin=True, + event_kwargs=event_kwargs, + models=base, + event_checkin_kwargs=event_checkin_kwargs, + ) + date = model["event_checkin"].created_at + url = reverse_lazy("events:academy_checkin") + f"?start={date.year}-{date.month}-{date.day}" response = self.client.get(url) json = response.json() - expected = [{ - 'attendee': { - 'first_name': model['event_checkin'].attendee.first_name, - 'id': model['event_checkin'].attendee.id, - 'last_name': model['event_checkin'].attendee.last_name - }, - 'email': model['event_checkin'].email, - 'event': { - 'ending_at': self.datetime_to_iso(model['event_checkin'].event.ending_at), - 'event_type': model['event_checkin'].event.event_type, - 'id': model['event_checkin'].event.id, - 'starting_at': self.datetime_to_iso(model['event_checkin'].event.starting_at), - 'title': model['event_checkin'].event.title - }, - 'id': model['event_checkin'].id, - 'status': model['event_checkin'].status, - 'created_at': self.datetime_to_iso(model['event_checkin'].created_at), - 'attended_at': self.datetime_to_iso(model['event_checkin'].attended_at) - }] + expected = [ + { + "attendee": { + "first_name": model["event_checkin"].attendee.first_name, + "id": model["event_checkin"].attendee.id, + "last_name": model["event_checkin"].attendee.last_name, + }, + "email": model["event_checkin"].email, + "event": { + "ending_at": self.datetime_to_iso(model["event_checkin"].event.ending_at), + "event_type": model["event_checkin"].event.event_type, + "id": model["event_checkin"].event.id, + "starting_at": self.datetime_to_iso(model["event_checkin"].event.starting_at), + "title": model["event_checkin"].event.title, + }, + "id": model["event_checkin"].id, + "status": model["event_checkin"].status, + "created_at": self.datetime_to_iso(model["event_checkin"].created_at), + "attended_at": self.datetime_to_iso(model["event_checkin"].attended_at), + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin_with_bad_end(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) - event_kwargs = {'academy': base['academy']} + event_kwargs = {"academy": base["academy"]} model = self.generate_models(event=True, event_checkin=True, event_kwargs=event_kwargs, models=base) - url = reverse_lazy('events:academy_checkin') + '?end=1000-01-01' + url = reverse_lazy("events:academy_checkin") + "?end=1000-01-01" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) def test_academy_checkin_with_end(self): self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') - - event_kwargs = {'academy': base['academy']} - event_checkin_kwargs = {'attended_at': self.datetime_now()} - model = self.generate_models(event=True, - event_checkin=True, - event_kwargs=event_kwargs, - models=base, - event_checkin_kwargs=event_checkin_kwargs) - date = model['event_checkin'].updated_at - url = (reverse_lazy('events:academy_checkin') + f'?end={date.year + 1}-{date.month}-{date.day}') + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) + + event_kwargs = {"academy": base["academy"]} + event_checkin_kwargs = {"attended_at": self.datetime_now()} + model = self.generate_models( + event=True, + event_checkin=True, + event_kwargs=event_kwargs, + models=base, + event_checkin_kwargs=event_checkin_kwargs, + ) + date = model["event_checkin"].updated_at + url = reverse_lazy("events:academy_checkin") + f"?end={date.year + 1}-{date.month}-{date.day}" response = self.client.get(url) json = response.json() - expected = [{ - 'attendee': { - 'first_name': model['event_checkin'].attendee.first_name, - 'id': model['event_checkin'].attendee.id, - 'last_name': model['event_checkin'].attendee.last_name - }, - 'email': model['event_checkin'].email, - 'event': { - 'ending_at': self.datetime_to_iso(model['event_checkin'].event.ending_at), - 'event_type': model['event_checkin'].event.event_type, - 'id': model['event_checkin'].event.id, - 'starting_at': self.datetime_to_iso(model['event_checkin'].event.starting_at), - 'title': model['event_checkin'].event.title - }, - 'id': model['event_checkin'].id, - 'status': model['event_checkin'].status, - 'created_at': self.datetime_to_iso(model['event_checkin'].created_at), - 'attended_at': self.datetime_to_iso(model['event_checkin'].attended_at) - }] + expected = [ + { + "attendee": { + "first_name": model["event_checkin"].attendee.first_name, + "id": model["event_checkin"].attendee.id, + "last_name": model["event_checkin"].attendee.last_name, + }, + "email": model["event_checkin"].email, + "event": { + "ending_at": self.datetime_to_iso(model["event_checkin"].event.ending_at), + "event_type": model["event_checkin"].event.event_type, + "id": model["event_checkin"].event.id, + "starting_at": self.datetime_to_iso(model["event_checkin"].event.starting_at), + "title": model["event_checkin"].event.title, + }, + "id": model["event_checkin"].id, + "status": model["event_checkin"].status, + "created_at": self.datetime_to_iso(model["event_checkin"].created_at), + "attended_at": self.datetime_to_iso(model["event_checkin"].attended_at), + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, 'event_checkin')}]) + self.assertEqual(self.all_event_checkin_dict(), [{**self.model_to_dict(model, "event_checkin")}]) """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_academy_checkin__spy_extensions(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') - url = reverse_lazy('events:academy_checkin') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) + url = reverse_lazy("events:academy_checkin") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_academy_checkin__spy_extension_arguments(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_eventcheckin', - role='potato') - url = reverse_lazy('events:academy_checkin') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_eventcheckin", role="potato" + ) + url = reverse_lazy("events:academy_checkin") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) diff --git a/breathecode/events/tests/urls/tests_academy_event.py b/breathecode/events/tests/urls/tests_academy_event.py index 8da3e4f8c..943f0a1dc 100644 --- a/breathecode/events/tests/urls/tests_academy_event.py +++ b/breathecode/events/tests/urls/tests_academy_event.py @@ -17,84 +17,84 @@ def post_serializer(data={}): return { - 'tags': '', - 'url': '', - 'banner': '', - 'capacity': 0, - 'starting_at': ..., - 'ending_at': ..., - 'academy': 0, - 'author': None, - 'description': None, - 'free_for_bootcamps': None, - 'event_type': None, - 'eventbrite_id': None, - 'eventbrite_organizer_id': None, - 'eventbrite_status': None, - 'eventbrite_url': None, - 'slug': None, - 'excerpt': None, - 'host': None, - 'id': 0, - 'lang': None, - 'online_event': False, - 'live_stream_url': None, - 'free_for_bootcamps': True, - 'asset_slug': None, - 'ended_at': None, - 'organization': 0, - 'published_at': None, - 'status': 'DRAFT', - 'eventbrite_sync_description': None, - 'eventbrite_sync_status': 'PENDING', - 'title': None, - 'venue': None, - 'sync_with_eventbrite': False, - 'currency': 'USD', - 'live_stream_url': None, - 'host_user': None, + "tags": "", + "url": "", + "banner": "", + "capacity": 0, + "starting_at": ..., + "ending_at": ..., + "academy": 0, + "author": None, + "description": None, + "free_for_bootcamps": None, + "event_type": None, + "eventbrite_id": None, + "eventbrite_organizer_id": None, + "eventbrite_status": None, + "eventbrite_url": None, + "slug": None, + "excerpt": None, + "host": None, + "id": 0, + "lang": None, + "online_event": False, + "live_stream_url": None, + "free_for_bootcamps": True, + "asset_slug": None, + "ended_at": None, + "organization": 0, + "published_at": None, + "status": "DRAFT", + "eventbrite_sync_description": None, + "eventbrite_sync_status": "PENDING", + "title": None, + "venue": None, + "sync_with_eventbrite": False, + "currency": "USD", + "live_stream_url": None, + "host_user": None, **data, } def event_table(data={}): return { - 'academy_id': 0, - 'author_id': None, - 'banner': '', - 'capacity': 0, - 'description': None, - 'ending_at': ..., - 'event_type_id': None, - 'eventbrite_id': None, - 'eventbrite_organizer_id': None, - 'eventbrite_status': None, - 'eventbrite_url': None, - 'free_for_bootcamps': None, - 'excerpt': None, - 'tags': '', - 'slug': None, - 'host': None, - 'id': 0, - 'lang': None, - 'online_event': False, - 'live_stream_url': None, - 'free_for_bootcamps': True, - 'asset_slug': None, - 'organization_id': 0, - 'host_user_id': None, - 'published_at': None, - 'starting_at': ..., - 'status': 'DRAFT', - 'eventbrite_sync_description': None, - 'eventbrite_sync_status': '', - 'title': None, - 'ended_at': None, - 'url': '', - 'venue_id': None, - 'live_stream_url': None, - 'sync_with_eventbrite': False, - 'currency': '', + "academy_id": 0, + "author_id": None, + "banner": "", + "capacity": 0, + "description": None, + "ending_at": ..., + "event_type_id": None, + "eventbrite_id": None, + "eventbrite_organizer_id": None, + "eventbrite_status": None, + "eventbrite_url": None, + "free_for_bootcamps": None, + "excerpt": None, + "tags": "", + "slug": None, + "host": None, + "id": 0, + "lang": None, + "online_event": False, + "live_stream_url": None, + "free_for_bootcamps": True, + "asset_slug": None, + "organization_id": 0, + "host_user_id": None, + "published_at": None, + "starting_at": ..., + "status": "DRAFT", + "eventbrite_sync_description": None, + "eventbrite_sync_status": "", + "title": None, + "ended_at": None, + "url": "", + "venue_id": None, + "live_stream_url": None, + "sync_with_eventbrite": False, + "currency": "", **data, } @@ -104,37 +104,39 @@ class AcademyEventTestSuite(EventTestCase): def test_all_academy_events_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) def test_all_academy_events_without_capability(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: read_event for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: read_event for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) def test_all_academy_events_wrong_city(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue=True, - event=True) - url = reverse_lazy('events:academy_event') + '?city=patata' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue=True, + event=True, + ) + url = reverse_lazy("events:academy_event") + "?city=patata" response = self.client.get(url) json = response.json() @@ -145,63 +147,69 @@ def test_all_academy_events_wrong_city(self): def test_all_academy_events_correct_city(self): self.headers(academy=1) - venue_kwargs = {'city': 'santiago'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue_kwargs=venue_kwargs, - venue=True, - event=True) - url = reverse_lazy('events:academy_event') + '?city=santiago' + venue_kwargs = {"city": "santiago"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue_kwargs=venue_kwargs, + venue=True, + event=True, + ) + url = reverse_lazy("events:academy_event") + "?city=santiago" response = self.client.get(url) json = response.json() - expected = [{ - 'banner': model['event'].banner, - 'ending_at': datetime_to_iso_format(model['event'].ending_at), - 'event_type': model['event'].event_type, - 'excerpt': model['event'].excerpt, - 'tags': model['event'].tags, - 'slug': model['event'].slug, - 'id': model['event'].id, - 'lang': model['event'].lang, - 'online_event': model['event'].online_event, - 'starting_at': datetime_to_iso_format(model['event'].starting_at), - 'ended_at': model['event'].ended_at, - 'status': model['event'].status, - 'title': model['event'].title, - 'url': model['event'].url, - 'host': model['event'].host, - 'asset_slug': model['event'].asset_slug, - 'capacity': model['event'].capacity, - 'venue': { - 'city': model['event'].venue.city, - 'id': model['event'].id, - 'state': model['event'].venue.state, - 'street_address': model['event'].venue.street_address, - 'title': model['event'].venue.title, - 'zip_code': model['event'].venue.zip_code, - 'updated_at': self.bc.datetime.to_iso_string(model.venue.updated_at), - }, - 'sync_with_eventbrite': model['event'].sync_with_eventbrite, - 'eventbrite_sync_description': model['event'].eventbrite_sync_description, - 'eventbrite_sync_status': model['event'].eventbrite_sync_status, - }] + expected = [ + { + "banner": model["event"].banner, + "ending_at": datetime_to_iso_format(model["event"].ending_at), + "event_type": model["event"].event_type, + "excerpt": model["event"].excerpt, + "tags": model["event"].tags, + "slug": model["event"].slug, + "id": model["event"].id, + "lang": model["event"].lang, + "online_event": model["event"].online_event, + "starting_at": datetime_to_iso_format(model["event"].starting_at), + "ended_at": model["event"].ended_at, + "status": model["event"].status, + "title": model["event"].title, + "url": model["event"].url, + "host": model["event"].host, + "asset_slug": model["event"].asset_slug, + "capacity": model["event"].capacity, + "venue": { + "city": model["event"].venue.city, + "id": model["event"].id, + "state": model["event"].venue.state, + "street_address": model["event"].venue.street_address, + "title": model["event"].venue.title, + "zip_code": model["event"].venue.zip_code, + "updated_at": self.bc.datetime.to_iso_string(model.venue.updated_at), + }, + "sync_with_eventbrite": model["event"].sync_with_eventbrite, + "eventbrite_sync_description": model["event"].eventbrite_sync_description, + "eventbrite_sync_status": model["event"].eventbrite_sync_status, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) def test_all_academy_events_wrong_country(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue=True, - event=True) - url = reverse_lazy('events:academy_event') + '?country=patata' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue=True, + event=True, + ) + url = reverse_lazy("events:academy_event") + "?country=patata" response = self.client.get(url) json = response.json() @@ -212,63 +220,69 @@ def test_all_academy_events_wrong_country(self): def test_all_academy_events_correct_country(self): self.headers(academy=1) - venue_kwargs = {'country': 'chile'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue_kwargs=venue_kwargs, - venue=True, - event=True) - url = reverse_lazy('events:academy_event') + '?country=chile' + venue_kwargs = {"country": "chile"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue_kwargs=venue_kwargs, + venue=True, + event=True, + ) + url = reverse_lazy("events:academy_event") + "?country=chile" response = self.client.get(url) json = response.json() - expected = [{ - 'banner': model['event'].banner, - 'ending_at': datetime_to_iso_format(model['event'].ending_at), - 'event_type': model['event'].event_type, - 'excerpt': model['event'].excerpt, - 'tags': model['event'].tags, - 'slug': model['event'].slug, - 'id': model['event'].id, - 'lang': model['event'].lang, - 'online_event': model['event'].online_event, - 'starting_at': datetime_to_iso_format(model['event'].starting_at), - 'ended_at': model['event'].ended_at, - 'status': model['event'].status, - 'title': model['event'].title, - 'url': model['event'].url, - 'host': model['event'].host, - 'asset_slug': model['event'].asset_slug, - 'capacity': model['event'].capacity, - 'venue': { - 'city': model['event'].venue.city, - 'id': model['event'].id, - 'state': model['event'].venue.state, - 'street_address': model['event'].venue.street_address, - 'title': model['event'].venue.title, - 'zip_code': model['event'].venue.zip_code, - 'updated_at': self.bc.datetime.to_iso_string(model.venue.updated_at), - }, - 'sync_with_eventbrite': model['event'].sync_with_eventbrite, - 'eventbrite_sync_description': model['event'].eventbrite_sync_description, - 'eventbrite_sync_status': model['event'].eventbrite_sync_status, - }] + expected = [ + { + "banner": model["event"].banner, + "ending_at": datetime_to_iso_format(model["event"].ending_at), + "event_type": model["event"].event_type, + "excerpt": model["event"].excerpt, + "tags": model["event"].tags, + "slug": model["event"].slug, + "id": model["event"].id, + "lang": model["event"].lang, + "online_event": model["event"].online_event, + "starting_at": datetime_to_iso_format(model["event"].starting_at), + "ended_at": model["event"].ended_at, + "status": model["event"].status, + "title": model["event"].title, + "url": model["event"].url, + "host": model["event"].host, + "asset_slug": model["event"].asset_slug, + "capacity": model["event"].capacity, + "venue": { + "city": model["event"].venue.city, + "id": model["event"].id, + "state": model["event"].venue.state, + "street_address": model["event"].venue.street_address, + "title": model["event"].venue.title, + "zip_code": model["event"].venue.zip_code, + "updated_at": self.bc.datetime.to_iso_string(model.venue.updated_at), + }, + "sync_with_eventbrite": model["event"].sync_with_eventbrite, + "eventbrite_sync_description": model["event"].eventbrite_sync_description, + "eventbrite_sync_status": model["event"].eventbrite_sync_status, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) def test_all_academy_events_wrong_zip_code(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue=True, - event=True) - url = reverse_lazy('events:academy_event') + '?zip_code=12345678965412' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue=True, + event=True, + ) + url = reverse_lazy("events:academy_event") + "?zip_code=12345678965412" response = self.client.get(url) json = response.json() @@ -279,111 +293,117 @@ def test_all_academy_events_wrong_zip_code(self): def test_all_academy_events_correct_zip_code(self): self.headers(academy=1) - venue_kwargs = {'zip_code': '33178'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue_kwargs=venue_kwargs, - venue=True, - event=True) - url = reverse_lazy('events:academy_event') + '?zip_code=33178' + venue_kwargs = {"zip_code": "33178"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue_kwargs=venue_kwargs, + venue=True, + event=True, + ) + url = reverse_lazy("events:academy_event") + "?zip_code=33178" response = self.client.get(url) json = response.json() - expected = [{ - 'banner': model['event'].banner, - 'ending_at': datetime_to_iso_format(model['event'].ending_at), - 'event_type': model['event'].event_type, - 'excerpt': model['event'].excerpt, - 'tags': model['event'].tags, - 'slug': model['event'].slug, - 'id': model['event'].id, - 'lang': model['event'].lang, - 'online_event': model['event'].online_event, - 'starting_at': datetime_to_iso_format(model['event'].starting_at), - 'ended_at': model['event'].ended_at, - 'status': model['event'].status, - 'title': model['event'].title, - 'url': model['event'].url, - 'host': model['event'].host, - 'asset_slug': model['event'].asset_slug, - 'capacity': model['event'].capacity, - 'venue': { - 'city': model['event'].venue.city, - 'id': model['event'].id, - 'state': model['event'].venue.state, - 'street_address': model['event'].venue.street_address, - 'title': model['event'].venue.title, - 'zip_code': model['event'].venue.zip_code, - 'updated_at': self.bc.datetime.to_iso_string(model.venue.updated_at), - }, - 'sync_with_eventbrite': model['event'].sync_with_eventbrite, - 'eventbrite_sync_description': model['event'].eventbrite_sync_description, - 'eventbrite_sync_status': model['event'].eventbrite_sync_status, - }] + expected = [ + { + "banner": model["event"].banner, + "ending_at": datetime_to_iso_format(model["event"].ending_at), + "event_type": model["event"].event_type, + "excerpt": model["event"].excerpt, + "tags": model["event"].tags, + "slug": model["event"].slug, + "id": model["event"].id, + "lang": model["event"].lang, + "online_event": model["event"].online_event, + "starting_at": datetime_to_iso_format(model["event"].starting_at), + "ended_at": model["event"].ended_at, + "status": model["event"].status, + "title": model["event"].title, + "url": model["event"].url, + "host": model["event"].host, + "asset_slug": model["event"].asset_slug, + "capacity": model["event"].capacity, + "venue": { + "city": model["event"].venue.city, + "id": model["event"].id, + "state": model["event"].venue.state, + "street_address": model["event"].venue.street_address, + "title": model["event"].venue.title, + "zip_code": model["event"].venue.zip_code, + "updated_at": self.bc.datetime.to_iso_string(model.venue.updated_at), + }, + "sync_with_eventbrite": model["event"].sync_with_eventbrite, + "eventbrite_sync_description": model["event"].eventbrite_sync_description, + "eventbrite_sync_status": model["event"].eventbrite_sync_status, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) def test_all_academy_events_upcoming(self): self.headers(academy=1) - event_kwargs = {'starting_at': timezone.now()} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue=True, - event=True, - event_kwargs=event_kwargs) - url = reverse_lazy('events:academy_event') + '?past=true' + event_kwargs = {"starting_at": timezone.now()} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue=True, + event=True, + event_kwargs=event_kwargs, + ) + url = reverse_lazy("events:academy_event") + "?past=true" response = self.client.get(url) json = response.json() - expected = [{ - 'banner': model['event'].banner, - 'ending_at': datetime_to_iso_format(model['event'].ending_at), - 'event_type': model['event'].event_type, - 'excerpt': model['event'].excerpt, - 'tags': model['event'].tags, - 'slug': model['event'].slug, - 'id': model['event'].id, - 'lang': model['event'].lang, - 'online_event': model['event'].online_event, - 'starting_at': datetime_to_iso_format(model['event'].starting_at), - 'ended_at': model['event'].ended_at, - 'status': model['event'].status, - 'title': model['event'].title, - 'url': model['event'].url, - 'host': model['event'].host, - 'asset_slug': model['event'].asset_slug, - 'capacity': model['event'].capacity, - 'venue': { - 'city': model['event'].venue.city, - 'id': model['event'].id, - 'state': model['event'].venue.state, - 'street_address': model['event'].venue.street_address, - 'title': model['event'].venue.title, - 'zip_code': model['event'].venue.zip_code, - 'updated_at': self.bc.datetime.to_iso_string(model.venue.updated_at), - }, - 'sync_with_eventbrite': model['event'].sync_with_eventbrite, - 'eventbrite_sync_description': model['event'].eventbrite_sync_description, - 'eventbrite_sync_status': model['event'].eventbrite_sync_status, - }] + expected = [ + { + "banner": model["event"].banner, + "ending_at": datetime_to_iso_format(model["event"].ending_at), + "event_type": model["event"].event_type, + "excerpt": model["event"].excerpt, + "tags": model["event"].tags, + "slug": model["event"].slug, + "id": model["event"].id, + "lang": model["event"].lang, + "online_event": model["event"].online_event, + "starting_at": datetime_to_iso_format(model["event"].starting_at), + "ended_at": model["event"].ended_at, + "status": model["event"].status, + "title": model["event"].title, + "url": model["event"].url, + "host": model["event"].host, + "asset_slug": model["event"].asset_slug, + "capacity": model["event"].capacity, + "venue": { + "city": model["event"].venue.city, + "id": model["event"].id, + "state": model["event"].venue.state, + "street_address": model["event"].venue.street_address, + "title": model["event"].venue.title, + "zip_code": model["event"].venue.zip_code, + "updated_at": self.bc.datetime.to_iso_string(model.venue.updated_at), + }, + "sync_with_eventbrite": model["event"].sync_with_eventbrite, + "eventbrite_sync_description": model["event"].eventbrite_sync_description, + "eventbrite_sync_status": model["event"].eventbrite_sync_status, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) def test_all_academy_events_not_found(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True) + url = reverse_lazy("events:academy_event") + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_event", role="potato", syllabus=True + ) response = self.client.get(url) json = response.json() @@ -398,22 +418,22 @@ def test_all_academy_events(self): def test_all_academy_events__post__no_required_fields(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_event', role='potato') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_event", role="potato") - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") data = {} response = self.client.post(url, data) json = response.json() expected = { - 'banner': ['This field is required.'], - 'capacity': ['This field is required.'], - 'ending_at': ['This field is required.'], - 'starting_at': ['This field is required.'], + "banner": ["This field is required."], + "capacity": ["This field is required."], + "ending_at": ["This field is required."], + "starting_at": ["This field is required."], } self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 Post - bad tags @@ -422,292 +442,283 @@ def test_all_academy_events__post__no_required_fields(self): def test_all_academy_events__post__bad_tags__two_commas(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato') + model = self.generate_models( + authenticate=True, organization=True, profile_academy=True, capability="crud_event", role="potato" + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': ',,', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": ",,", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'two-commas-together', 'status_code': 400} + expected = {"detail": "two-commas-together", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) def test_all_academy_events__post__bad_tags__with_spaces(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato') + model = self.generate_models( + authenticate=True, organization=True, profile_academy=True, capability="crud_event", role="potato" + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': ' expecto-patronum sirius-black ', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": " expecto-patronum sirius-black ", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'spaces-are-not-allowed', 'status_code': 400} + expected = {"detail": "spaces-are-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) def test_all_academy_events__post__bad_tags__starts_with_comma(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato') + model = self.generate_models( + authenticate=True, organization=True, profile_academy=True, capability="crud_event", role="potato" + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': ',expecto-patronum', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": ",expecto-patronum", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'starts-with-comma', 'status_code': 400} + expected = {"detail": "starts-with-comma", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) def test_all_academy_events__post__bad_tags__ends_with_comma(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato') + model = self.generate_models( + authenticate=True, organization=True, profile_academy=True, capability="crud_event", role="potato" + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': 'expecto-patronum,', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": "expecto-patronum,", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'ends-with-comma', 'status_code': 400} + expected = {"detail": "ends-with-comma", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) def test_all_academy_events__post__bad_tags__one_tag_not_exists(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato') + model = self.generate_models( + authenticate=True, organization=True, profile_academy=True, capability="crud_event", role="potato" + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': 'expecto-patronum', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": "expecto-patronum", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'have-less-two-tags', 'status_code': 400} + expected = {"detail": "have-less-two-tags", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) def test_all_academy_events__post__bad_tags__two_tags_not_exists(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato') + model = self.generate_models( + authenticate=True, organization=True, profile_academy=True, capability="crud_event", role="potato" + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': 'expecto-patronum,wingardium-leviosa', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": "expecto-patronum,wingardium-leviosa", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'tag-not-exist', 'status_code': 400} + expected = {"detail": "tag-not-exist", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) def test_all_academy_events__post__bad_tags__one_of_two_tags_not_exists(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - tag=True, - capability='crud_event', - role='potato') + model = self.generate_models( + authenticate=True, organization=True, profile_academy=True, tag=True, capability="crud_event", role="potato" + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': f'expecto-patronum,{model.tag.slug}', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": f"expecto-patronum,{model.tag.slug}", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'tag-not-exist', 'status_code': 400} + expected = {"detail": "tag-not-exist", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) """ 🔽🔽🔽 Post bad slug """ - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_all_academy_events__post__bad_slug(self): self.headers(academy=1) lang = self.bc.fake.slug()[:2] - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - active_campaign_academy=True, - role='potato', - event_type={'lang': lang}) - - url = reverse_lazy('events:academy_event') + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + tag=(2, {"tag_type": "DISCOVERY"}), + active_campaign_academy=True, + role="potato", + event_type={"lang": lang}, + ) + + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': ','.join([x.slug for x in model.tag]), - 'slug': 'they-killed-kenny', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), - 'lang': lang, - 'event_type': 1, + "tags": ",".join([x.slug for x in model.tag]), + "slug": "they-killed-kenny", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), + "lang": lang, + "event_type": 1, } response = self.client.post(url, data) json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) - - del json['created_at'] - del json['updated_at'] - - expected = post_serializer({ - **data, - 'id': 1, - 'slug': 'they-killed-kenny', - 'academy': 1, - 'organization': None, - 'eventbrite_sync_status': 'PENDING', - 'tags': ','.join([x.slug for x in model.tag]), - 'currency': 'USD', - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': str(uuid), - }) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) + + del json["created_at"] + del json["updated_at"] + + expected = post_serializer( + { + **data, + "id": 1, + "slug": "they-killed-kenny", + "academy": 1, + "organization": None, + "eventbrite_sync_status": "PENDING", + "tags": ",".join([x.slug for x in model.tag]), + "currency": "USD", + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": str(uuid), + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 201) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - event_table( - data={ - 'academy_id': 1, - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'slug': 'they-killed-kenny', - 'ending_at': current_date, - 'tags': ','.join([x.slug for x in model.tag]), - 'id': 1, - 'organization_id': None, - 'lang': lang, - 'event_type_id': 1, - 'starting_at': current_date, - 'status': 'DRAFT', - 'eventbrite_sync_status': 'PENDING', - 'url': 'https://www.google.com/', - 'sync_with_eventbrite': False, - 'currency': 'USD', - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': uuid, - }), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + event_table( + data={ + "academy_id": 1, + "banner": "https://www.google.com/banner", + "capacity": 11, + "slug": "they-killed-kenny", + "ending_at": current_date, + "tags": ",".join([x.slug for x in model.tag]), + "id": 1, + "organization_id": None, + "lang": lang, + "event_type_id": 1, + "starting_at": current_date, + "status": "DRAFT", + "eventbrite_sync_status": "PENDING", + "url": "https://www.google.com/", + "sync_with_eventbrite": False, + "currency": "USD", + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": uuid, + } + ), + ], + ) """ 🔽🔽🔽 Post @@ -716,711 +727,760 @@ def test_all_academy_events__post__bad_slug(self): def test_all_academy_events__post__tags_is_blank(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato') + model = self.generate_models( + authenticate=True, organization=True, profile_academy=True, capability="crud_event", role="potato" + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': '', - 'slug': 'event-they-killed-kenny', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": "", + "slug": "event-they-killed-kenny", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'empty-tags', 'status_code': 400} + expected = {"detail": "empty-tags", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) # @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) # @patch('os.urandom', MagicMock(return_value=seed)) def test_all_academy_events__post__event_exist_with_the_same_eventbrite_id_as_null(self): self.headers(academy=1) - event = {'eventbrite_id': None} + event = {"eventbrite_id": None} seed = os.urandom(16) seed2 = os.urandom(16) uuid = UUID(bytes=seed2, version=4) lang = self.bc.fake.slug()[:2] - with patch('os.urandom', MagicMock(return_value=seed)): - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - event=event, - capability='crud_event', - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - active_campaign_academy=True, - role='potato', - event_type={'lang': lang}) - - url = reverse_lazy('events:academy_event') + with patch("os.urandom", MagicMock(return_value=seed)): + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + event=event, + capability="crud_event", + tag=(2, {"tag_type": "DISCOVERY"}), + active_campaign_academy=True, + role="potato", + event_type={"lang": lang}, + ) + + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': ','.join([x.slug for x in model.tag]), - 'slug': 'EVENT-THEY-KILLED-KENNY', - 'url': 'https://www.google.com/', - 'eventbrite_id': None, - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), - 'lang': lang, - 'event_type': 1, + "tags": ",".join([x.slug for x in model.tag]), + "slug": "EVENT-THEY-KILLED-KENNY", + "url": "https://www.google.com/", + "eventbrite_id": None, + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), + "lang": lang, + "event_type": 1, } - with patch('os.urandom', MagicMock(return_value=seed2)): - response = self.client.post(url, data, format='json') + with patch("os.urandom", MagicMock(return_value=seed2)): + response = self.client.post(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) - - del json['created_at'] - del json['updated_at'] - - expected = post_serializer({ - **data, - 'id': 2, - 'slug': 'event-they-killed-kenny', - 'academy': 1, - 'organization': None, - 'tags': ','.join([x.slug for x in model.tag]), - 'eventbrite_sync_status': 'PENDING', - 'currency': 'USD', - 'free_for_bootcamps': True, - 'free_for_all': False, - 'sync_with_eventbrite': False, - 'uuid': str(uuid), - }) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) + + del json["created_at"] + del json["updated_at"] + + expected = post_serializer( + { + **data, + "id": 2, + "slug": "event-they-killed-kenny", + "academy": 1, + "organization": None, + "tags": ",".join([x.slug for x in model.tag]), + "eventbrite_sync_status": "PENDING", + "currency": "USD", + "free_for_bootcamps": True, + "free_for_all": False, + "sync_with_eventbrite": False, + "uuid": str(uuid), + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 201) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - { - **self.bc.format.to_dict(model.event), - 'eventbrite_id': None, - }, - event_table( - data={ - 'academy_id': 1, - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'slug': 'event-they-killed-kenny', - 'ending_at': current_date, - 'tags': data['tags'], - 'id': 2, - 'organization_id': None, - 'lang': lang, - 'event_type_id': 1, - 'starting_at': current_date, - 'status': 'DRAFT', - 'eventbrite_sync_status': 'PENDING', - 'url': 'https://www.google.com/', - 'sync_with_eventbrite': False, - 'currency': 'USD', - 'tags': ','.join([x.slug for x in model.tag]), - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': uuid, - }), - ]) - - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.bc.format.to_dict(model.event), + "eventbrite_id": None, + }, + event_table( + data={ + "academy_id": 1, + "banner": "https://www.google.com/banner", + "capacity": 11, + "slug": "event-they-killed-kenny", + "ending_at": current_date, + "tags": data["tags"], + "id": 2, + "organization_id": None, + "lang": lang, + "event_type_id": 1, + "starting_at": current_date, + "status": "DRAFT", + "eventbrite_sync_status": "PENDING", + "url": "https://www.google.com/", + "sync_with_eventbrite": False, + "currency": "USD", + "tags": ",".join([x.slug for x in model.tag]), + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": uuid, + } + ), + ], + ) + + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_all_academy_events__post__tags_is_blank__slug_in_uppercase(self): self.headers(academy=1) lang = self.bc.fake.slug()[:2] - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - active_campaign_academy=True, - role='potato', - event_type={'lang': lang}) - - url = reverse_lazy('events:academy_event') + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + tag=(2, {"tag_type": "DISCOVERY"}), + active_campaign_academy=True, + role="potato", + event_type={"lang": lang}, + ) + + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': ','.join([x.slug for x in model.tag]), - 'slug': 'EVENT-THEY-KILLED-KENNY', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), - 'lang': lang, - 'event_type': 1, + "tags": ",".join([x.slug for x in model.tag]), + "slug": "EVENT-THEY-KILLED-KENNY", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), + "lang": lang, + "event_type": 1, } response = self.client.post(url, data) json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) - - del json['created_at'] - del json['updated_at'] - - expected = post_serializer({ - **data, - 'id': 1, - 'slug': 'event-they-killed-kenny', - 'academy': 1, - 'organization': None, - 'lang': lang, - 'eventbrite_sync_status': 'PENDING', - 'currency': 'USD', - 'tags': ','.join([x.slug for x in model.tag]), - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': str(uuid), - }) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) + + del json["created_at"] + del json["updated_at"] + + expected = post_serializer( + { + **data, + "id": 1, + "slug": "event-they-killed-kenny", + "academy": 1, + "organization": None, + "lang": lang, + "eventbrite_sync_status": "PENDING", + "currency": "USD", + "tags": ",".join([x.slug for x in model.tag]), + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": str(uuid), + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 201) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - event_table( - data={ - 'academy_id': 1, - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'slug': 'event-they-killed-kenny', - 'ending_at': current_date, - 'tags': data['tags'], - 'id': 1, - 'organization_id': None, - 'lang': lang, - 'event_type_id': 1, - 'starting_at': current_date, - 'status': 'DRAFT', - 'eventbrite_sync_status': 'PENDING', - 'url': 'https://www.google.com/', - 'sync_with_eventbrite': False, - 'currency': 'USD', - 'tags': ','.join([x.slug for x in model.tag]), - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': uuid, - }), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + event_table( + data={ + "academy_id": 1, + "banner": "https://www.google.com/banner", + "capacity": 11, + "slug": "event-they-killed-kenny", + "ending_at": current_date, + "tags": data["tags"], + "id": 1, + "organization_id": None, + "lang": lang, + "event_type_id": 1, + "starting_at": current_date, + "status": "DRAFT", + "eventbrite_sync_status": "PENDING", + "url": "https://www.google.com/", + "sync_with_eventbrite": False, + "currency": "USD", + "tags": ",".join([x.slug for x in model.tag]), + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": uuid, + } + ), + ], + ) def test_all_academy_events__post__with_tags__without_acp(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - academy=True, - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - capability='crud_event', - role='potato') - - url = reverse_lazy('events:academy_event') + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + academy=True, + tag=(2, {"tag_type": "DISCOVERY"}), + capability="crud_event", + role="potato", + ) + + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': ','.join([x.slug for x in model.tag]), - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "tags": ",".join([x.slug for x in model.tag]), + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } response = self.client.post(url, data) json = response.json() - expected = {'detail': 'tag-not-exist', 'status_code': 400} + expected = {"detail": "tag-not-exist", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_all_academy_events__post__with_tags(self): self.headers(academy=1) lang = self.bc.fake.slug()[:2] - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - academy=True, - active_campaign_academy=True, - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - capability='crud_event', - role='potato', - event_type={'lang': lang}) - - url = reverse_lazy('events:academy_event') + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + academy=True, + active_campaign_academy=True, + tag=(2, {"tag_type": "DISCOVERY"}), + capability="crud_event", + role="potato", + event_type={"lang": lang}, + ) + + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': ','.join([x.slug for x in model.tag]), - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), - 'lang': lang, - 'event_type': 1, + "tags": ",".join([x.slug for x in model.tag]), + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), + "lang": lang, + "event_type": 1, } response = self.client.post(url, data) json = response.json() - del json['updated_at'] - del json['created_at'] - - expected = post_serializer({ - **data, - 'id': 1, - 'academy': 1, - 'organization': None, - 'eventbrite_sync_status': 'PENDING', - 'currency': 'USD', - 'tags': ','.join([x.slug for x in model.tag]), - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': str(uuid), - }) + del json["updated_at"] + del json["created_at"] + + expected = post_serializer( + { + **data, + "id": 1, + "academy": 1, + "organization": None, + "eventbrite_sync_status": "PENDING", + "currency": "USD", + "tags": ",".join([x.slug for x in model.tag]), + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": str(uuid), + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 201) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - event_table( - data={ - 'academy_id': 1, - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'ending_at': current_date, - 'tags': data['tags'], - 'id': 1, - 'organization_id': None, - 'starting_at': current_date, - 'status': 'DRAFT', - 'eventbrite_sync_status': 'PENDING', - 'url': 'https://www.google.com/', - 'sync_with_eventbrite': False, - 'currency': 'USD', - 'tags': ','.join([x.slug for x in model.tag]), - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': uuid, - 'lang': lang, - 'event_type_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + event_table( + data={ + "academy_id": 1, + "banner": "https://www.google.com/banner", + "capacity": 11, + "ending_at": current_date, + "tags": data["tags"], + "id": 1, + "organization_id": None, + "starting_at": current_date, + "status": "DRAFT", + "eventbrite_sync_status": "PENDING", + "url": "https://www.google.com/", + "sync_with_eventbrite": False, + "currency": "USD", + "tags": ",".join([x.slug for x in model.tag]), + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": uuid, + "lang": lang, + "event_type_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 Put with duplicate tags """ - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_all_academy_events__post__with_duplicate_tags(self): self.headers(academy=1) tags = [ - { - 'slug': 'they-killed-kenny', - 'tag_type': 'DISCOVERY' - }, - { - 'slug': 'they-killed-kenny', - 'tag_type': 'DISCOVERY' - }, - { - 'slug': 'kenny-has-born-again', - 'tag_type': 'DISCOVERY' - }, + {"slug": "they-killed-kenny", "tag_type": "DISCOVERY"}, + {"slug": "they-killed-kenny", "tag_type": "DISCOVERY"}, + {"slug": "kenny-has-born-again", "tag_type": "DISCOVERY"}, ] lang = self.bc.fake.slug()[:2] - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - academy=True, - active_campaign_academy=True, - tag=tags, - capability='crud_event', - role='potato', - event_type={'lang': lang}) - - url = reverse_lazy('events:academy_event') + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + academy=True, + active_campaign_academy=True, + tag=tags, + capability="crud_event", + role="potato", + event_type={"lang": lang}, + ) + + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { - 'tags': 'they-killed-kenny,kenny-has-born-again', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), - 'lang': lang, - 'event_type': 1, + "tags": "they-killed-kenny,kenny-has-born-again", + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), + "lang": lang, + "event_type": 1, } response = self.client.post(url, data) json = response.json() - del json['updated_at'] - del json['created_at'] - - expected = post_serializer({ - **data, - 'id': 1, - 'academy': 1, - 'organization': None, - 'eventbrite_sync_status': 'PENDING', - 'currency': 'USD', - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': str(uuid), - }) + del json["updated_at"] + del json["created_at"] + + expected = post_serializer( + { + **data, + "id": 1, + "academy": 1, + "organization": None, + "eventbrite_sync_status": "PENDING", + "currency": "USD", + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": str(uuid), + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 201) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - event_table( - data={ - 'academy_id': 1, - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'ending_at': current_date, - 'tags': data['tags'], - 'id': 1, - 'organization_id': None, - 'event_type_id': 1, - 'lang': lang, - 'starting_at': current_date, - 'status': 'DRAFT', - 'eventbrite_sync_status': 'PENDING', - 'url': 'https://www.google.com/', - 'sync_with_eventbrite': False, - 'currency': 'USD', - 'tags': data['tags'], - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': uuid, - }), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + event_table( + data={ + "academy_id": 1, + "banner": "https://www.google.com/banner", + "capacity": 11, + "ending_at": current_date, + "tags": data["tags"], + "id": 1, + "organization_id": None, + "event_type_id": 1, + "lang": lang, + "starting_at": current_date, + "status": "DRAFT", + "eventbrite_sync_status": "PENDING", + "url": "https://www.google.com/", + "sync_with_eventbrite": False, + "currency": "USD", + "tags": data["tags"], + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": uuid, + } + ), + ], + ) """ 🔽🔽🔽 Post bad slug """ - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_all_academy_events__post__bad_slug____(self): self.headers(academy=1) - tags = [{'slug': self.bc.random.string(lower=True, size=10), 'tag_type': 'DISCOVERY'} for _ in range(2)] + tags = [{"slug": self.bc.random.string(lower=True, size=10), "tag_type": "DISCOVERY"} for _ in range(2)] event_type = { - 'lang': self.bc.random.string(lower=True, size=2), - 'icon_url': 'https://www.google.com', + "lang": self.bc.random.string(lower=True, size=2), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - tag=tags, - event_type=event_type, - capability='crud_event', - active_campaign_academy=True, - role='potato') - - url = reverse_lazy('events:academy_event') + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + tag=tags, + event_type=event_type, + capability="crud_event", + active_campaign_academy=True, + role="potato", + ) + + url = reverse_lazy("events:academy_event") current_date = self.datetime_now() data = { # 'slug': 'they-killed-kenny', - 'tags': f'{tags[0]["slug"]},{tags[1]["slug"]}', - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), - 'event_type': 1, + "tags": f'{tags[0]["slug"]},{tags[1]["slug"]}', + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), + "event_type": 1, } response = self.client.post(url, data) json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) - - del json['created_at'] - del json['updated_at'] - - expected = post_serializer({ - **data, - 'id': 1, - 'slug': None, - 'academy': 1, - 'organization': None, - 'event_type': 1, - 'eventbrite_sync_status': 'PENDING', - 'lang': model.event_type.lang, - 'tags': ','.join([x.slug for x in model.tag]), - 'currency': 'USD', - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': str(uuid), - }) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) + + del json["created_at"] + del json["updated_at"] + + expected = post_serializer( + { + **data, + "id": 1, + "slug": None, + "academy": 1, + "organization": None, + "event_type": 1, + "eventbrite_sync_status": "PENDING", + "lang": model.event_type.lang, + "tags": ",".join([x.slug for x in model.tag]), + "currency": "USD", + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": str(uuid), + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 201) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - event_table( - data={ - 'academy_id': 1, - 'banner': 'https://www.google.com/banner', - 'capacity': 11, - 'slug': None, - 'ending_at': current_date, - 'tags': ','.join([x.slug for x in model.tag]), - 'id': 1, - 'organization_id': None, - 'event_type_id': 1, - 'starting_at': current_date, - 'status': 'DRAFT', - 'eventbrite_sync_status': 'PENDING', - 'url': 'https://www.google.com/', - 'sync_with_eventbrite': False, - 'lang': model.event_type.lang, - 'currency': 'USD', - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': uuid, - }), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + event_table( + data={ + "academy_id": 1, + "banner": "https://www.google.com/banner", + "capacity": 11, + "slug": None, + "ending_at": current_date, + "tags": ",".join([x.slug for x in model.tag]), + "id": 1, + "organization_id": None, + "event_type_id": 1, + "starting_at": current_date, + "status": "DRAFT", + "eventbrite_sync_status": "PENDING", + "url": "https://www.google.com/", + "sync_with_eventbrite": False, + "lang": model.event_type.lang, + "currency": "USD", + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": uuid, + } + ), + ], + ) """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_all_academy_events__spy_extensions(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue=True, - event=True) - - url = reverse_lazy('events:academy_event') + '?city=patata' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue=True, + event=True, + ) + + url = reverse_lazy("events:academy_event") + "?city=patata" self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_all_academy_events__spy_extension_arguments(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - venue=True, - event=True) - - url = reverse_lazy('events:academy_event') + '?city=patata' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_event", + role="potato", + syllabus=True, + venue=True, + event=True, + ) + + url = reverse_lazy("events:academy_event") + "?city=patata" self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=EventCache, sort='-starting_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=EventCache, sort="-starting_at", paginate=True), + ], + ) """ 🔽🔽🔽 DELETE """ def test_academy_event__delete__without_lookups(self): - status = 'DRAFT' + status = "DRAFT" self.headers(academy=1) - event = {'status': status} - model = self.generate_models(authenticate=True, - role=1, - capability='crud_event', - profile_academy=1, - event=(2, event)) + event = {"status": status} + model = self.generate_models( + authenticate=True, role=1, capability="crud_event", profile_academy=1, event=(2, event) + ) - url = reverse_lazy('events:academy_event') + url = reverse_lazy("events:academy_event") response = self.client.delete(url) json = response.json() - expected = {'detail': 'without-lookups-and-event-id', 'status_code': 400} + expected = {"detail": "without-lookups-and-event-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), self.bc.format.to_dict(model.event)) + self.assertEqual(self.bc.database.list_of("events.Event"), self.bc.format.to_dict(model.event)) def test_academy_event__delete__can_delete(self): - status = 'DRAFT' + status = "DRAFT" self.headers(academy=1) - event = {'status': status} - model = self.generate_models(authenticate=True, - role=1, - capability='crud_event', - profile_academy=1, - event=(2, event)) + event = {"status": status} + model = self.generate_models( + authenticate=True, role=1, capability="crud_event", profile_academy=1, event=(2, event) + ) - url = reverse_lazy('events:academy_event') + f'?id={",".join([str(x.id) for x in model.event])}' + url = reverse_lazy("events:academy_event") + f'?id={",".join([str(x.id) for x in model.event])}' response = self.client.delete(url) self.assertEqual(response.status_code, 204) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) def test_academy_event__delete__bad_status(self): - statuses = ['ACTIVE', 'DELETED'] + statuses = ["ACTIVE", "DELETED"] for status in statuses: - event = {'status': status} - model = self.generate_models(user=1, role=1, capability='crud_event', profile_academy=1, event=(2, event)) + event = {"status": status} + model = self.generate_models(user=1, role=1, capability="crud_event", profile_academy=1, event=(2, event)) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('events:academy_event') + f'?id={",".join([str(x.id) for x in model.event])}' + url = reverse_lazy("events:academy_event") + f'?id={",".join([str(x.id) for x in model.event])}' response = self.client.delete(url) json = response.json() expected = { - 'failure': [{ - 'detail': - 'non-draft-event', - 'resources': [{ - 'display_field': 'slug', - 'display_value': model.event[0].slug, - 'pk': model.event[0].id, - }, { - 'display_field': 'slug', - 'display_value': model.event[1].slug, - 'pk': model.event[1].id, - }], - 'status_code': - 400, - }], - 'success': [] + "failure": [ + { + "detail": "non-draft-event", + "resources": [ + { + "display_field": "slug", + "display_value": model.event[0].slug, + "pk": model.event[0].id, + }, + { + "display_field": "slug", + "display_value": model.event[1].slug, + "pk": model.event[1].id, + }, + ], + "status_code": 400, + } + ], + "success": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, 207) - self.assertEqual(self.bc.database.list_of('events.Event'), self.bc.format.to_dict(model.event)) + self.assertEqual(self.bc.database.list_of("events.Event"), self.bc.format.to_dict(model.event)) - self.bc.database.delete('events.Event') + self.bc.database.delete("events.Event") def test_academy_event__delete__all_errors_and_success_cases(self): - bad_statuses = ['ACTIVE', 'DELETED'] - - events_with_bad_statuses = [{ - 'status': status, - 'slug': self.bc.fake.slug(), - } for status in bad_statuses] - events_from_other_academy = [{ - 'status': 'DRAFT', - 'academy_id': 2, - 'slug': None, - }, { - 'status': 'DRAFT', - 'academy_id': 2, - 'slug': None, - }] - right_events = [{ - 'status': 'DRAFT', - 'academy_id': 1, - 'slug': self.bc.fake.slug(), - }, { - 'status': 'DRAFT', - 'academy_id': 1, - 'slug': self.bc.fake.slug(), - }] + bad_statuses = ["ACTIVE", "DELETED"] + + events_with_bad_statuses = [ + { + "status": status, + "slug": self.bc.fake.slug(), + } + for status in bad_statuses + ] + events_from_other_academy = [ + { + "status": "DRAFT", + "academy_id": 2, + "slug": None, + }, + { + "status": "DRAFT", + "academy_id": 2, + "slug": None, + }, + ] + right_events = [ + { + "status": "DRAFT", + "academy_id": 1, + "slug": self.bc.fake.slug(), + }, + { + "status": "DRAFT", + "academy_id": 1, + "slug": self.bc.fake.slug(), + }, + ] events = events_with_bad_statuses + events_from_other_academy + right_events - model = self.generate_models(user=1, - role=1, - academy=2, - capability='crud_event', - profile_academy=1, - event=events) + model = self.generate_models( + user=1, role=1, academy=2, capability="crud_event", profile_academy=1, event=events + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('events:academy_event') + f'?id={",".join([str(x.id) for x in model.event])}' + url = reverse_lazy("events:academy_event") + f'?id={",".join([str(x.id) for x in model.event])}' response = self.client.delete(url) json = response.json() expected = { - 'success': [{ - 'status_code': - 204, - 'resources': [{ - 'pk': model.event[4].id, - 'display_field': 'slug', - 'display_value': model.event[4].slug, - }, { - 'pk': model.event[5].id, - 'display_field': 'slug', - 'display_value': model.event[5].slug, - }], - }], - 'failure': [{ - 'detail': - 'not-found', - 'status_code': - 400, - 'resources': [{ - 'pk': model.event[2].id, - 'display_field': 'slug', - 'display_value': model.event[2].slug, - }, { - 'pk': model.event[3].id, - 'display_field': 'slug', - 'display_value': model.event[3].slug, - }], - }, { - 'detail': - 'non-draft-event', - 'status_code': - 400, - 'resources': [{ - 'pk': model.event[0].id, - 'display_field': 'slug', - 'display_value': model.event[0].slug, - }, { - 'pk': model.event[1].id, - 'display_field': 'slug', - 'display_value': model.event[1].slug, - }], - }] + "success": [ + { + "status_code": 204, + "resources": [ + { + "pk": model.event[4].id, + "display_field": "slug", + "display_value": model.event[4].slug, + }, + { + "pk": model.event[5].id, + "display_field": "slug", + "display_value": model.event[5].slug, + }, + ], + } + ], + "failure": [ + { + "detail": "not-found", + "status_code": 400, + "resources": [ + { + "pk": model.event[2].id, + "display_field": "slug", + "display_value": model.event[2].slug, + }, + { + "pk": model.event[3].id, + "display_field": "slug", + "display_value": model.event[3].slug, + }, + ], + }, + { + "detail": "non-draft-event", + "status_code": 400, + "resources": [ + { + "pk": model.event[0].id, + "display_field": "slug", + "display_value": model.event[0].slug, + }, + { + "pk": model.event[1].id, + "display_field": "slug", + "display_value": model.event[1].slug, + }, + ], + }, + ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, 207) - self.assertEqual(self.bc.database.list_of('events.Event'), self.bc.format.to_dict(model.event[:4])) + self.assertEqual(self.bc.database.list_of("events.Event"), self.bc.format.to_dict(model.event[:4])) diff --git a/breathecode/events/tests/urls/tests_academy_event_id.py b/breathecode/events/tests/urls/tests_academy_event_id.py index ca6144f12..2186d0d6e 100644 --- a/breathecode/events/tests/urls/tests_academy_event_id.py +++ b/breathecode/events/tests/urls/tests_academy_event_id.py @@ -13,26 +13,26 @@ def user_serializer(user): return { - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, } def asset_serializer(asset): return { - 'id': asset.id, - 'slug': asset.slug, - 'title': asset.title, - 'lang': asset.title, - 'asset_type': asset.asset_type, - 'status': asset.status, - 'published_at': datetime_to_iso_format(asset.published_at), - 'category': { - 'id': asset.category.id, - 'slug': asset.category.slug, - 'title': asset.category.title, - } + "id": asset.id, + "slug": asset.slug, + "title": asset.title, + "lang": asset.title, + "asset_type": asset.asset_type, + "status": asset.status, + "published_at": datetime_to_iso_format(asset.published_at), + "category": { + "id": asset.category.id, + "slug": asset.category.slug, + "title": asset.category.title, + }, } @@ -42,41 +42,34 @@ def asset_serializer(asset): def get_serializer(event, academy, asset=None, data={}): return { - 'id': event.id, - 'author': user_serializer(event.author), - 'host_user': user_serializer(event.host_user), - 'capacity': event.capacity, - 'description': event.description, - 'excerpt': event.excerpt, - 'free_for_all': event.free_for_all, - 'title': event.title, - 'lang': event.lang, - 'url': event.url, - 'banner': event.banner, - 'tags': event.tags, - 'slug': event.slug, - 'host': event.host, - 'starting_at': datetime_to_iso_format(event.starting_at), - 'ending_at': datetime_to_iso_format(event.ending_at), - 'ended_at': event.ended_at, - 'status': event.status, - 'event_type': event.event_type, - 'online_event': event.online_event, - 'live_stream_url': event.live_stream_url, - 'venue': event.venue, - 'academy': { - 'id': 1, - 'slug': academy.slug, - 'name': academy.name, - 'city': { - 'name': event.academy.city.name - } - }, - 'sync_with_eventbrite': event.sync_with_eventbrite, - 'live_stream_url': event.live_stream_url, - 'eventbrite_sync_status': event.eventbrite_sync_status, - 'eventbrite_sync_description': event.eventbrite_sync_description, - 'asset': asset_serializer(asset) if asset else None, + "id": event.id, + "author": user_serializer(event.author), + "host_user": user_serializer(event.host_user), + "capacity": event.capacity, + "description": event.description, + "excerpt": event.excerpt, + "free_for_all": event.free_for_all, + "title": event.title, + "lang": event.lang, + "url": event.url, + "banner": event.banner, + "tags": event.tags, + "slug": event.slug, + "host": event.host, + "starting_at": datetime_to_iso_format(event.starting_at), + "ending_at": datetime_to_iso_format(event.ending_at), + "ended_at": event.ended_at, + "status": event.status, + "event_type": event.event_type, + "online_event": event.online_event, + "live_stream_url": event.live_stream_url, + "venue": event.venue, + "academy": {"id": 1, "slug": academy.slug, "name": academy.name, "city": {"name": event.academy.city.name}}, + "sync_with_eventbrite": event.sync_with_eventbrite, + "live_stream_url": event.live_stream_url, + "eventbrite_sync_status": event.eventbrite_sync_status, + "eventbrite_sync_description": event.eventbrite_sync_description, + "asset": asset_serializer(asset) if asset else None, **data, } @@ -84,68 +77,65 @@ def get_serializer(event, academy, asset=None, data={}): class AcademyEventIdTestSuite(EventTestCase): cache = EventCache() - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_event_id_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_all_academy_events_without_capability(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: read_event for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: read_event for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_event_id_invalid_id(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_event", role="potato", syllabus=True + ) response = self.client.get(url) json = response.json() - expected = {'detail': 'Event not found', 'status_code': 404} + expected = {"detail": "Event not found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_event_id_valid_id(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - event=True) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_event", role="potato", syllabus=True, event=True + ) response = self.client.get(url) json = response.json() - expected = get_serializer(model.event, - model.academy, - data={ - 'sync_with_eventbrite': False, - 'eventbrite_sync_status': 'PENDING', - 'eventbrite_sync_description': None, - }) + expected = get_serializer( + model.event, + model.academy, + data={ + "sync_with_eventbrite": False, + "eventbrite_sync_status": "PENDING", + "eventbrite_sync_description": None, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) @@ -154,759 +144,793 @@ def test_academy_event_id_valid_id(self): 🔽🔽🔽 Put - bad tags """ - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__two_commas(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - event=True) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + event=True, + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': ',,', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": ",,", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'two-commas-together', 'status_code': 400} + expected = {"detail": "two-commas-together", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.model_to_dict(model, 'event')]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.model_to_dict(model, "event")]) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__with_spaces(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - event=True) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + event=True, + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': ' expecto-patronum sirius-black ', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": " expecto-patronum sirius-black ", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'spaces-are-not-allowed', 'status_code': 400} + expected = {"detail": "spaces-are-not-allowed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.model_to_dict(model, 'event')]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.model_to_dict(model, "event")]) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__starts_with_comma(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - event=True) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + event=True, + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': ',expecto-patronum', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": ",expecto-patronum", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'starts-with-comma', 'status_code': 400} + expected = {"detail": "starts-with-comma", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.model_to_dict(model, 'event')]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.model_to_dict(model, "event")]) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__ends_with_comma(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - event=True) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + event=True, + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': 'expecto-patronum,', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": "expecto-patronum,", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'ends-with-comma', 'status_code': 400} + expected = {"detail": "ends-with-comma", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.model_to_dict(model, 'event')]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.model_to_dict(model, "event")]) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__one_tag_not_exists(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - event=True) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + event=True, + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': 'expecto-patronum', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": "expecto-patronum", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'have-less-two-tags', 'status_code': 400} + expected = {"detail": "have-less-two-tags", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.model_to_dict(model, 'event')]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.model_to_dict(model, "event")]) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__two_tags_not_exists(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - event=True) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + event=True, + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': 'expecto-patronum,wingardium-leviosa', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": "expecto-patronum,wingardium-leviosa", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'tag-not-exist', 'status_code': 400} + expected = {"detail": "tag-not-exist", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.model_to_dict(model, 'event')]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.model_to_dict(model, "event")]) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__one_of_two_tags_not_exists(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - tag=True, - capability='crud_event', - role='potato2', - event=True) - - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + tag=True, + capability="crud_event", + role="potato2", + event=True, + ) + + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': f'expecto-patronum,{model.tag.slug}', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": f"expecto-patronum,{model.tag.slug}", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'tag-not-exist', 'status_code': 400} + expected = {"detail": "tag-not-exist", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.model_to_dict(model, 'event')]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.model_to_dict(model, "event")]) """ 🔽🔽🔽 Put """ - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_academy_cohort_id__put(self): """Test /cohort without auth""" self.headers(academy=1) lang = self.bc.fake.slug()[:2] - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - active_campaign_academy=True, - event={'lang': lang}, - event_type={'lang': lang}) - - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + tag=(2, {"tag_type": "DISCOVERY"}), + active_campaign_academy=True, + event={"lang": lang}, + event_type={"lang": lang}, + ) + + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': ','.join([x.slug for x in model.tag]), - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": ",".join([x.slug for x in model.tag]), + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'academy': 1, - 'author': 1, - 'description': None, - 'event_type': 1, - 'eventbrite_id': None, - 'eventbrite_organizer_id': None, - 'eventbrite_status': None, - 'eventbrite_url': None, - 'excerpt': None, - 'host': model['event'].host, - 'id': 2, - 'lang': lang, - 'online_event': False, - 'organization': 1, - 'published_at': None, - 'status': 'DRAFT', - 'eventbrite_sync_description': None, - 'eventbrite_sync_status': 'PENDING', - 'title': None, - 'venue': None, - 'sync_with_eventbrite': False, - 'eventbrite_sync_status': 'PENDING', - 'currency': 'USD', - 'tags': '', - 'slug': None, - 'live_stream_url': None, - 'asset_slug': None, - 'ended_at': None, - 'host_user': 1, - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': str(uuid), + "academy": 1, + "author": 1, + "description": None, + "event_type": 1, + "eventbrite_id": None, + "eventbrite_organizer_id": None, + "eventbrite_status": None, + "eventbrite_url": None, + "excerpt": None, + "host": model["event"].host, + "id": 2, + "lang": lang, + "online_event": False, + "organization": 1, + "published_at": None, + "status": "DRAFT", + "eventbrite_sync_description": None, + "eventbrite_sync_status": "PENDING", + "title": None, + "venue": None, + "sync_with_eventbrite": False, + "eventbrite_sync_status": "PENDING", + "currency": "USD", + "tags": "", + "slug": None, + "live_stream_url": None, + "asset_slug": None, + "ended_at": None, + "host_user": 1, + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": str(uuid), **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **self.model_to_dict(model, 'event'), - **data, - 'organization_id': 1, - 'event_type_id': 1, - 'starting_at': current_date, - 'ending_at': current_date, - 'slug': None, - 'free_for_bootcamps': True, - 'lang': lang, - }]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + **data, + "organization_id": 1, + "event_type_id": 1, + "starting_at": current_date, + "ending_at": current_date, + "slug": None, + "free_for_bootcamps": True, + "lang": lang, + } + ], + ) """ 🔽🔽🔽 Put, tags empty """ - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__tags_is_blank(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - event=True) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + event=True, + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': '', - 'slug': 'event-they-killed-kenny', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": "", + "slug": "event-they-killed-kenny", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'tags': ['This field may not be blank.']} + expected = {"tags": ["This field may not be blank."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **self.model_to_dict(model, 'event'), - }]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + } + ], + ) """ 🔽🔽🔽 Try to update the slug """ - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__tags_is_blank__try_to_update_the_slug(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - capability='crud_event', - role='potato2', - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - active_campaign_academy=True, - event=True) - - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + capability="crud_event", + role="potato2", + tag=(2, {"tag_type": "DISCOVERY"}), + active_campaign_academy=True, + event=True, + ) + + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': ','.join([x.slug for x in model.tag]), - 'slug': 'EVENT-THEY-KILLED-KENNY', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": ",".join([x.slug for x in model.tag]), + "slug": "EVENT-THEY-KILLED-KENNY", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'try-update-slug', 'status_code': 400} + expected = {"detail": "try-update-slug", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **self.model_to_dict(model, 'event'), - }]) - - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + } + ], + ) + + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) def test_academy_cohort_id__put__with_tags__without_acp(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - academy=True, - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - capability='crud_event', - role='potato2', - event=True) - - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + academy=True, + tag=(2, {"tag_type": "DISCOVERY"}), + capability="crud_event", + role="potato2", + event=True, + ) + + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': ','.join([x.slug for x in model.tag]), - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": ",".join([x.slug for x in model.tag]), + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'tag-not-exist', 'status_code': 400} + expected = {"detail": "tag-not-exist", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **self.model_to_dict(model, 'event'), - }]) - - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + } + ], + ) + + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_academy_cohort_id__put__with_tags(self): """Test /cohort without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - academy=True, - active_campaign_academy=True, - tag=(2, { - 'tag_type': 'DISCOVERY' - }), - capability='crud_event', - role='potato2', - event_type=1, - event=True) - - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + academy=True, + active_campaign_academy=True, + tag=(2, {"tag_type": "DISCOVERY"}), + capability="crud_event", + role="potato2", + event_type=1, + event=True, + ) + + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': ','.join([x.slug for x in model.tag]), - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": ",".join([x.slug for x in model.tag]), + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - del json['updated_at'] - del json['created_at'] + del json["updated_at"] + del json["created_at"] expected = { - 'academy': 1, - 'author': 1, - 'description': None, - 'event_type': 1, - 'eventbrite_id': None, - 'eventbrite_organizer_id': None, - 'eventbrite_status': None, - 'eventbrite_url': None, - 'excerpt': None, - 'host': model['event'].host, - 'id': 2, - 'lang': 'en', - 'slug': None, - 'online_event': False, - 'free_for_all': False, - 'organization': 1, - 'published_at': None, - 'asset_slug': None, - 'status': 'DRAFT', - 'eventbrite_sync_description': None, - 'eventbrite_sync_status': 'PENDING', - 'title': None, - 'venue': None, - 'sync_with_eventbrite': False, - 'ended_at': None, - 'eventbrite_sync_status': 'PENDING', - 'currency': 'USD', - 'live_stream_url': None, - 'host_user': 1, - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': str(uuid), + "academy": 1, + "author": 1, + "description": None, + "event_type": 1, + "eventbrite_id": None, + "eventbrite_organizer_id": None, + "eventbrite_status": None, + "eventbrite_url": None, + "excerpt": None, + "host": model["event"].host, + "id": 2, + "lang": "en", + "slug": None, + "online_event": False, + "free_for_all": False, + "organization": 1, + "published_at": None, + "asset_slug": None, + "status": "DRAFT", + "eventbrite_sync_description": None, + "eventbrite_sync_status": "PENDING", + "title": None, + "venue": None, + "sync_with_eventbrite": False, + "ended_at": None, + "eventbrite_sync_status": "PENDING", + "currency": "USD", + "live_stream_url": None, + "host_user": 1, + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": str(uuid), **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **self.model_to_dict(model, 'event'), - **data, - 'organization_id': 1, - 'starting_at': current_date, - 'ending_at': current_date, - 'free_for_bootcamps': True, - 'event_type_id': 1, - 'lang': 'en', - }]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + **data, + "organization_id": 1, + "starting_at": current_date, + "ending_at": current_date, + "free_for_bootcamps": True, + "event_type_id": 1, + "lang": "en", + } + ], + ) """ 🔽🔽🔽 Put with duplicate tags """ - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch('uuid.uuid4', PropertyMock(MagicMock=uuid)) - @patch('os.urandom', MagicMock(return_value=seed)) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch("uuid.uuid4", PropertyMock(MagicMock=uuid)) + @patch("os.urandom", MagicMock(return_value=seed)) def test_academy_cohort_id__put__with_duplicate_tags(self): self.headers(academy=1) tags = [ - { - 'slug': 'they-killed-kenny', - 'tag_type': 'DISCOVERY' - }, - { - 'slug': 'they-killed-kenny', - 'tag_type': 'DISCOVERY' - }, - { - 'slug': 'kenny-has-born-again', - 'tag_type': 'DISCOVERY' - }, + {"slug": "they-killed-kenny", "tag_type": "DISCOVERY"}, + {"slug": "they-killed-kenny", "tag_type": "DISCOVERY"}, + {"slug": "kenny-has-born-again", "tag_type": "DISCOVERY"}, ] lang = self.bc.fake.slug()[:2] - model = self.generate_models(authenticate=True, - organization=True, - profile_academy=True, - academy=True, - active_campaign_academy=True, - tag=tags, - capability='crud_event', - role='potato2', - event={'lang': lang}, - event_type={'lang': lang}) - - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + model = self.generate_models( + authenticate=True, + organization=True, + profile_academy=True, + academy=True, + active_campaign_academy=True, + tag=tags, + capability="crud_event", + role="potato2", + event={"lang": lang}, + event_type={"lang": lang}, + ) + + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) current_date = self.datetime_now() data = { - 'id': 1, - 'url': 'https://www.google.com/', - 'banner': 'https://www.google.com/banner', - 'tags': 'they-killed-kenny,kenny-has-born-again', - 'capacity': 11, - 'starting_at': self.datetime_to_iso(current_date), - 'ending_at': self.datetime_to_iso(current_date), + "id": 1, + "url": "https://www.google.com/", + "banner": "https://www.google.com/banner", + "tags": "they-killed-kenny,kenny-has-born-again", + "capacity": 11, + "starting_at": self.datetime_to_iso(current_date), + "ending_at": self.datetime_to_iso(current_date), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - del json['updated_at'] - del json['created_at'] + del json["updated_at"] + del json["created_at"] expected = { - 'academy': 1, - 'author': 1, - 'description': None, - 'event_type': 1, - 'eventbrite_id': None, - 'eventbrite_organizer_id': None, - 'eventbrite_status': None, - 'eventbrite_url': None, - 'excerpt': None, - 'host': model['event'].host, - 'id': 2, - 'lang': lang, - 'slug': None, - 'online_event': False, - 'organization': 1, - 'published_at': None, - 'status': 'DRAFT', - 'eventbrite_sync_description': None, - 'eventbrite_sync_status': 'PENDING', - 'title': None, - 'ended_at': None, - 'venue': None, - 'sync_with_eventbrite': False, - 'eventbrite_sync_status': 'PENDING', - 'currency': 'USD', - 'live_stream_url': None, - 'host_user': 1, - 'asset_slug': None, - 'free_for_bootcamps': True, - 'free_for_all': False, - 'uuid': str(uuid), + "academy": 1, + "author": 1, + "description": None, + "event_type": 1, + "eventbrite_id": None, + "eventbrite_organizer_id": None, + "eventbrite_status": None, + "eventbrite_url": None, + "excerpt": None, + "host": model["event"].host, + "id": 2, + "lang": lang, + "slug": None, + "online_event": False, + "organization": 1, + "published_at": None, + "status": "DRAFT", + "eventbrite_sync_description": None, + "eventbrite_sync_status": "PENDING", + "title": None, + "ended_at": None, + "venue": None, + "sync_with_eventbrite": False, + "eventbrite_sync_status": "PENDING", + "currency": "USD", + "live_stream_url": None, + "host_user": 1, + "asset_slug": None, + "free_for_bootcamps": True, + "free_for_all": False, + "uuid": str(uuid), **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [{ - **self.model_to_dict(model, 'event'), - **data, - 'organization_id': 1, - 'starting_at': current_date, - 'ending_at': current_date, - 'free_for_bootcamps': True, - }]) - - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + { + **self.model_to_dict(model, "event"), + **data, + "organization_id": 1, + "starting_at": current_date, + "ending_at": current_date, + "free_for_bootcamps": True, + } + ], + ) + + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_academy_event_id__spy_extensions(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - event=True) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_event", role="potato", syllabus=True, event=True + ) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) - - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) + + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_academy_event_id__spy_extension_arguments(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - syllabus=True, - event=True) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_event", role="potato", syllabus=True, event=True + ) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=EventCache, sort='-starting_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=EventCache, sort="-starting_at", paginate=True), + ], + ) """ 🔽🔽🔽 DELETE """ def test_academy_event__delete__with_lookups(self): - status = 'DRAFT' + status = "DRAFT" self.headers(academy=1) - event = {'status': status} - model = self.generate_models(authenticate=True, - role=1, - capability='crud_event', - profile_academy=1, - event=(2, event)) + event = {"status": status} + model = self.generate_models( + authenticate=True, role=1, capability="crud_event", profile_academy=1, event=(2, event) + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + '?id=1,2' + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) + "?id=1,2" response = self.client.delete(url) json = response.json() - expected = {'detail': 'lookups-and-event-id-together', 'status_code': 400} + expected = {"detail": "lookups-and-event-id-together", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), self.bc.format.to_dict(model.event)) + self.assertEqual(self.bc.database.list_of("events.Event"), self.bc.format.to_dict(model.event)) def test_academy_event__delete__deleting(self): - status = 'DRAFT' + status = "DRAFT" self.headers(academy=1) - event = {'status': status} - model = self.generate_models(authenticate=True, role=1, capability='crud_event', profile_academy=1, event=event) + event = {"status": status} + model = self.generate_models(authenticate=True, role=1, capability="crud_event", profile_academy=1, event=event) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, 204) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) def test_academy_event__delete__non_draft_event(self): - statuses = ['ACTIVE', 'DELETED'] + statuses = ["ACTIVE", "DELETED"] for status in statuses: - event = {'status': status} - model = self.generate_models(authenticate=True, - role=1, - capability='crud_event', - profile_academy=1, - event=event) + event = {"status": status} + model = self.generate_models( + authenticate=True, role=1, capability="crud_event", profile_academy=1, event=event + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': model.event.id}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": model.event.id}) self.headers(academy=model.academy.id) response = self.client.delete(url) json = response.json() - expected = {'detail': 'non-draft-event', 'status_code': 400} + expected = {"detail": "non-draft-event", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.bc.format.to_dict(model.event)]) - self.bc.database.delete('events.Event') + self.assertEqual(self.bc.database.list_of("events.Event"), [self.bc.format.to_dict(model.event)]) + self.bc.database.delete("events.Event") def test_academy_event__delete__deleting_from_other_academy(self): - status = 'DRAFT' + status = "DRAFT" self.headers(academy=1) - event = {'status': status, 'academy_id': 2} - model = self.generate_models(authenticate=True, - role=1, - academy=2, - capability='crud_event', - profile_academy=1, - event=event) + event = {"status": status, "academy_id": 2} + model = self.generate_models( + authenticate=True, role=1, academy=2, capability="crud_event", profile_academy=1, event=event + ) - url = reverse_lazy('events:academy_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id", kwargs={"event_id": 1}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 400} + expected = {"detail": "not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.bc.format.to_dict(model.event)]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.bc.format.to_dict(model.event)]) diff --git a/breathecode/events/tests/urls/tests_academy_event_id_join.py b/breathecode/events/tests/urls/tests_academy_event_id_join.py index 518bf16a3..98979f230 100644 --- a/breathecode/events/tests/urls/tests_academy_event_id_join.py +++ b/breathecode/events/tests/urls/tests_academy_event_id_join.py @@ -9,45 +9,45 @@ # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_message(message, academy=None): request = None - context = {'MESSAGE': message, 'BUTTON': None, 'BUTTON_TARGET': '_blank', 'LINK': None} + context = {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None} if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - if 'heading' not in context: - context['heading'] = academy.name + if "heading" not in context: + context["heading"] = academy.name - return loader.render_to_string('message.html', context, request) + return loader.render_to_string("message.html", context, request) class AcademyVenueTestSuite(EventTestCase): # When: no auth # Then: return 401 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id_join', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id_join", kwargs={"event_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) - self.assertEqual(self.bc.database.list_of('events.event'), []) + self.assertEqual(self.bc.database.list_of("events.event"), []) # When: no capability # Then: return 403 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_no_capability(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id_join', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id_join", kwargs={"event_id": 1}) model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) @@ -55,99 +55,101 @@ def test_with_no_capability(self): response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: start_or_end_event for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: start_or_end_event for academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) - self.assertEqual(self.bc.database.list_of('events.event'), []) + self.assertEqual(self.bc.database.list_of("events.event"), []) # When: no Event # Then: return 404 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_live_classes(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_id_join', kwargs={'event_id': 1}) - model = self.bc.database.create(user=1, profile_academy=1, capability='start_or_end_event', role='potato') + url = reverse_lazy("events:academy_event_id_join", kwargs={"event_id": 1}) + model = self.bc.database.create(user=1, profile_academy=1, capability="start_or_end_event", role="potato") self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 400} + expected = {"detail": "not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.event'), []) + self.assertEqual(self.bc.database.list_of("events.event"), []) # When: have a Event with no url # Then: return 400 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_url(self): self.headers(academy=1) - model = self.bc.database.create(user=1, - profile_academy=1, - capability='start_or_end_event', - role='potato', - event=1) + model = self.bc.database.create( + user=1, profile_academy=1, capability="start_or_end_event", role="potato", event=1 + ) self.client.force_authenticate(model.user) - url = reverse_lazy('events:academy_event_id_join', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id_join", kwargs={"event_id": 1}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('no-live-stream-url', academy=model.academy) + expected = render_message("no-live-stream-url", academy=model.academy) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.event"), + [ + self.bc.format.to_dict(model.event), + ], + ) # When: have a Event with url # Then: redirect to the liveclass - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_a_live_class(self): self.headers(academy=1) - event = {'live_stream_url': self.bc.fake.url()} - model = self.bc.database.create(user=1, - profile_academy=1, - capability='start_or_end_event', - role='potato', - event=event) + event = {"live_stream_url": self.bc.fake.url()} + model = self.bc.database.create( + user=1, profile_academy=1, capability="start_or_end_event", role="potato", event=event + ) self.client.force_authenticate(model.user) - url = reverse_lazy('events:academy_event_id_join', kwargs={'event_id': 1}) + url = reverse_lazy("events:academy_event_id_join", kwargs={"event_id": 1}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, model.event.live_stream_url) - self.assertEqual(self.bc.database.list_of('events.event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.event"), + [ + self.bc.format.to_dict(model.event), + ], + ) diff --git a/breathecode/events/tests/urls/tests_academy_event_liveclass.py b/breathecode/events/tests/urls/tests_academy_event_liveclass.py index 61ed32420..8663fdf36 100644 --- a/breathecode/events/tests/urls/tests_academy_event_liveclass.py +++ b/breathecode/events/tests/urls/tests_academy_event_liveclass.py @@ -9,9 +9,9 @@ def cohort_serializer(cohort): return { - 'id': cohort.id, - 'name': cohort.name, - 'slug': cohort.slug, + "id": cohort.id, + "name": cohort.name, + "slug": cohort.slug, } @@ -25,13 +25,13 @@ def get_serializer(self, event_type, cohort, data={}): started_at = self.bc.datetime.to_iso_string(event_type.started_at) return { - 'id': event_type.id, - 'started_at': started_at, - 'ended_at': ended_at, - 'cohort': cohort_serializer(cohort), - 'starting_at': self.bc.datetime.to_iso_string(event_type.starting_at), - 'ending_at': self.bc.datetime.to_iso_string(event_type.ending_at), - 'hash': event_type.hash, + "id": event_type.id, + "started_at": started_at, + "ended_at": ended_at, + "cohort": cohort_serializer(cohort), + "starting_at": self.bc.datetime.to_iso_string(event_type.starting_at), + "ending_at": self.bc.datetime.to_iso_string(event_type.ending_at), + "hash": event_type.hash, **data, } @@ -40,16 +40,16 @@ class AcademyEventTestSuite(EventTestCase): # When: I call the API without authentication # Then: I should get a 401 error - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_auth(self): - url = reverse_lazy('events:academy_event_liveclass') + url = reverse_lazy("events:academy_event_liveclass") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -57,16 +57,16 @@ def test_no_auth(self): # Given: User # When: User is authenticated and has no LiveClass # Then: I should get a 200 status code with no data - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_zero_live_classes(self): - model = self.bc.database.create(user=1, profile_academy=1, role=1, capability='start_or_end_class') + model = self.bc.database.create(user=1, profile_academy=1, role=1, capability="start_or_end_class") self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_event_liveclass') + url = reverse_lazy("events:academy_event_liveclass") response = self.client.get(url) json = response.json() @@ -75,28 +75,30 @@ def test_zero_live_classes(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) # Given: a User, LiveClass, Cohort and CohortTimeSlot # When: User is authenticated, has LiveClass and CohortUser belongs to this LiveClass # Then: I should get a 200 status code with the LiveClass data - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_live_class(self): - model = self.bc.database.create(user=1, - live_class=1, - cohort=1, - cohort_time_slot=1, - cohort_user=1, - profile_academy=1, - role=1, - capability='start_or_end_class') + model = self.bc.database.create( + user=1, + live_class=1, + cohort=1, + cohort_time_slot=1, + cohort_user=1, + profile_academy=1, + role=1, + capability="start_or_end_class", + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_event_liveclass') + url = reverse_lazy("events:academy_event_liveclass") response = self.client.get(url) json = response.json() @@ -104,80 +106,90 @@ def test_one_live_class(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) # Given: LiveClass.objects.filter is mocked # When: the mock is called # Then: the mock should be called with the correct arguments and does not raise an exception - @patch('breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup', - MagicMock(wraps=lookup_extension.compile_lookup)) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.admissions.signals.timeslot_saved.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch( + "breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup", + MagicMock(wraps=lookup_extension.compile_lookup), + ) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.admissions.signals.timeslot_saved.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_lookup_extension(self): - model = self.bc.database.create(user=1, - live_class=1, - cohort=1, - cohort_time_slot=1, - cohort_user=1, - profile_academy=1, - role=1, - capability='start_or_end_class') + model = self.bc.database.create( + user=1, + live_class=1, + cohort=1, + cohort_time_slot=1, + cohort_user=1, + profile_academy=1, + role=1, + capability="start_or_end_class", + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) args, kwargs = self.bc.format.call( - 'en', + "en", strings={ - 'exact': [ - 'remote_meeting_url', - 'cohort_time_slot__cohort__cohortuser__user__email', + "exact": [ + "remote_meeting_url", + "cohort_time_slot__cohort__cohortuser__user__email", ], }, bools={ - 'is_null': ['ended_at'], + "is_null": ["ended_at"], }, datetimes={ - 'gte': ['starting_at'], - 'lte': ['ending_at'], + "gte": ["starting_at"], + "lte": ["ending_at"], }, slugs=[ - 'cohort_time_slot__cohort__cohortuser__user', - 'cohort_time_slot__cohort', - 'cohort_time_slot__cohort__academy', - 'cohort_time_slot__cohort__syllabus_version__syllabus', + "cohort_time_slot__cohort__cohortuser__user", + "cohort_time_slot__cohort", + "cohort_time_slot__cohort__academy", + "cohort_time_slot__cohort__syllabus_version__syllabus", ], overwrite={ - 'cohort': 'cohort_time_slot__cohort', - 'academy': 'cohort_time_slot__cohort__academy', - 'syllabus': 'cohort_time_slot__cohort__syllabus_version__syllabus', - 'start': 'starting_at', - 'end': 'ending_at', - 'upcoming': 'ended_at', - 'user': 'cohort_time_slot__cohort__cohortuser__user', - 'user_email': 'cohort_time_slot__cohort__cohortuser__user__email', + "cohort": "cohort_time_slot__cohort", + "academy": "cohort_time_slot__cohort__academy", + "syllabus": "cohort_time_slot__cohort__syllabus_version__syllabus", + "start": "starting_at", + "end": "ending_at", + "upcoming": "ended_at", + "user": "cohort_time_slot__cohort__cohortuser__user", + "user_email": "cohort_time_slot__cohort__cohortuser__user__email", }, ) query = self.bc.format.lookup(*args, **kwargs) - url = reverse_lazy('events:academy_event_liveclass') + '?' + self.bc.format.querystring(query) - - self.assertEqual([x for x in query], [ - 'user', - 'cohort', - 'academy', - 'syllabus', - 'remote_meeting_url', - 'user_email', - 'start', - 'end', - 'upcoming', - ]) + url = reverse_lazy("events:academy_event_liveclass") + "?" + self.bc.format.querystring(query) + + self.assertEqual( + [x for x in query], + [ + "user", + "cohort", + "academy", + "syllabus", + "remote_meeting_url", + "user_email", + "start", + "end", + "upcoming", + ], + ) response = self.client.get(url) @@ -187,15 +199,15 @@ def test_lookup_extension(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - for x in ['overwrite', 'custom_fields']: + for x in ["overwrite", "custom_fields"]: if x in kwargs: del kwargs[x] - for field in ['ids', 'slugs']: + for field in ["ids", "slugs"]: values = kwargs.get(field, tuple()) kwargs[field] = tuple(values) - for field in ['ints', 'strings', 'bools', 'datetimes']: + for field in ["ints", "strings", "bools", "datetimes"]: modes = kwargs.get(field, {}) for mode in modes: if not isinstance(kwargs[field][mode], tuple): @@ -203,10 +215,16 @@ def test_lookup_extension(self): kwargs[field] = frozenset(modes.items()) - self.bc.check.calls(lookup_extension.compile_lookup.call_args_list, [ - call(**kwargs), - ]) + self.bc.check.calls( + lookup_extension.compile_lookup.call_args_list, + [ + call(**kwargs), + ], + ) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) diff --git a/breathecode/events/tests/urls/tests_academy_event_liveclass_join_hash.py b/breathecode/events/tests/urls/tests_academy_event_liveclass_join_hash.py index 15e11996d..0aa006e54 100644 --- a/breathecode/events/tests/urls/tests_academy_event_liveclass_join_hash.py +++ b/breathecode/events/tests/urls/tests_academy_event_liveclass_join_hash.py @@ -9,45 +9,45 @@ # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_message(message, academy=None): request = None - context = {'MESSAGE': message, 'BUTTON': None, 'BUTTON_TARGET': '_blank', 'LINK': None} + context = {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None} if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - if 'heading' not in context: - context['heading'] = academy.name + if "heading" not in context: + context["heading"] = academy.name - return loader.render_to_string('message.html', context, request) + return loader.render_to_string("message.html", context, request) class AcademyVenueTestSuite(EventTestCase): # When: no auth # Then: return 401 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_liveclass_join_hash', kwargs={'hash': '1234'}) + url = reverse_lazy("events:academy_event_liveclass_join_hash", kwargs={"hash": "1234"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) # When: no capability # Then: return 403 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_no_capability(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_liveclass_join_hash', kwargs={'hash': '1234'}) + url = reverse_lazy("events:academy_event_liveclass_join_hash", kwargs={"hash": "1234"}) model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) @@ -55,103 +55,113 @@ def test_with_no_capability(self): response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: start_or_end_class for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: start_or_end_class for academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) # When: no LiveClass # Then: return 404 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_live_classes(self): self.headers(academy=1) - url = reverse_lazy('events:academy_event_liveclass_join_hash', kwargs={'hash': '1234'}) - model = self.bc.database.create(user=1, profile_academy=1, capability='start_or_end_class', role='potato') + url = reverse_lazy("events:academy_event_liveclass_join_hash", kwargs={"hash": "1234"}) + model = self.bc.database.create(user=1, profile_academy=1, capability="start_or_end_class", role="potato") self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 400} + expected = {"detail": "not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) # When: have a LiveClass with no url # Then: return 400 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_url(self): self.headers(academy=1) - model = self.bc.database.create(user=1, - profile_academy=1, - capability='start_or_end_class', - role='potato', - live_class=1, - cohort=1, - cohort_user=1) + model = self.bc.database.create( + user=1, + profile_academy=1, + capability="start_or_end_class", + role="potato", + live_class=1, + cohort=1, + cohort_user=1, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('events:academy_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash}) + url = reverse_lazy("events:academy_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('no-meeting-url', academy=model.academy) + expected = render_message("no-meeting-url", academy=model.academy) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) # When: have a LiveClass # Then: redirect to the liveclass - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_a_live_class(self): self.headers(academy=1) - cohort = {'online_meeting_url': self.bc.fake.url()} - model = self.bc.database.create(user=1, - profile_academy=1, - capability='start_or_end_class', - role='potato', - live_class=1, - cohort=cohort, - cohort_user=1) + cohort = {"online_meeting_url": self.bc.fake.url()} + model = self.bc.database.create( + user=1, + profile_academy=1, + capability="start_or_end_class", + role="potato", + live_class=1, + cohort=cohort, + cohort_user=1, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('events:academy_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash}) + url = reverse_lazy("events:academy_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, model.cohort.online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) diff --git a/breathecode/events/tests/urls/tests_academy_eventype.py b/breathecode/events/tests/urls/tests_academy_eventype.py index 4f83ab1df..eb9936c3f 100644 --- a/breathecode/events/tests/urls/tests_academy_eventype.py +++ b/breathecode/events/tests/urls/tests_academy_eventype.py @@ -16,24 +16,24 @@ def get_serializer(event_type, academy=None, city=None, data={}): if city: city_serialized = { - 'name': city.name, + "name": city.name, } if academy: academy_serialized = { - 'city': city_serialized, - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "city": city_serialized, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } return { - 'academy': academy_serialized, - 'id': event_type.id, - 'name': event_type.name, - 'slug': event_type.slug, - 'lang': event_type.lang, - 'description': event_type.description, + "academy": academy_serialized, + "id": event_type.id, + "name": event_type.name, + "slug": event_type.slug, + "lang": event_type.lang, + "description": event_type.description, **data, } @@ -43,11 +43,11 @@ class AcademyEventTestSuite(EventTestCase): def test_all_academy_events_no_auth(self): - url = reverse_lazy('events:academy_eventype') + url = reverse_lazy("events:academy_eventype") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -56,8 +56,8 @@ def test_academy_event_type_no_results(self): self.bc.request.set_headers(academy=1) # TODO: this is bad placed - url = reverse_lazy('events:academy_eventype') - self.generate_models(authenticate=True, profile_academy=1, role=1, capability='read_event_type') + url = reverse_lazy("events:academy_eventype") + self.generate_models(authenticate=True, profile_academy=1, role=1, capability="read_event_type") response = self.client.get(url) json = response.json() @@ -70,21 +70,23 @@ def test_academy_event_type_with_results(self): self.bc.request.set_headers(academy=1) # TODO: this is bad placed - url = reverse_lazy('events:academy_eventype') + url = reverse_lazy("events:academy_eventype") event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='read_event_type') + model = self.generate_models( + authenticate=True, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="read_event_type", + ) response = self.client.get(url) json = response.json() @@ -93,28 +95,35 @@ def test_academy_event_type_with_results(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_bad_academy_slug(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype') + '?academy=banana' + url = reverse_lazy("events:academy_eventype") + "?academy=banana" event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='read_event_type') + model = self.generate_models( + authenticate=True, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="read_event_type", + ) response = self.client.get(url) json = response.json() @@ -123,29 +132,36 @@ def test_bad_academy_slug(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_properly_academy_slug(self): self.bc.request.set_headers(academy=1) event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - academy=1, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='read_event_type') - url = reverse_lazy('events:academy_eventype') + f'?academy={model.academy.slug}' + model = self.generate_models( + authenticate=True, + academy=1, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="read_event_type", + ) + url = reverse_lazy("events:academy_eventype") + f"?academy={model.academy.slug}" response = self.client.get(url) json = response.json() @@ -154,28 +170,35 @@ def test_properly_academy_slug(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_bad_allow_shared_creation_slug(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype') + '?allow_shared_creation=false' + url = reverse_lazy("events:academy_eventype") + "?allow_shared_creation=false" event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='read_event_type') + model = self.generate_models( + authenticate=True, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="read_event_type", + ) response = self.client.get(url) json = response.json() @@ -184,29 +207,36 @@ def test_bad_allow_shared_creation_slug(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_properly_allow_shared_creation_slug(self): self.bc.request.set_headers(academy=1) event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - academy=1, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='read_event_type') - url = reverse_lazy('events:academy_eventype') + f'?allow_shared_creation=true' + model = self.generate_models( + authenticate=True, + academy=1, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="read_event_type", + ) + url = reverse_lazy("events:academy_eventype") + f"?allow_shared_creation=true" response = self.client.get(url) json = response.json() @@ -216,27 +246,30 @@ def test_properly_allow_shared_creation_slug(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_post_event_type_without_slug(self): self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - academy=1, - profile_academy=1, - role='potato', - capability='crud_event_type') - data = {'name': 'Potato', 'description': 'Potato', 'icon_url': 'https://www.google.com', 'lang': 'en'} + model = self.generate_models( + authenticate=True, academy=1, profile_academy=1, role="potato", capability="crud_event_type" + ) + data = {"name": "Potato", "description": "Potato", "icon_url": "https://www.google.com", "lang": "en"} - url = reverse_lazy('events:academy_eventype') + url = reverse_lazy("events:academy_eventype") response = self.client.post(url, data) json = response.json() - expected = {'slug': ['This field is required.']} + expected = {"slug": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) @@ -246,46 +279,41 @@ def test_post_event_type_without_slug(self): def test_post_event_type(self): self.bc.request.set_headers(academy=1) - model = self.generate_models(authenticate=True, - academy=1, - profile_academy=1, - role='potato', - capability='crud_event_type') + model = self.generate_models( + authenticate=True, academy=1, profile_academy=1, role="potato", capability="crud_event_type" + ) data = { - 'slug': 'potato', - 'name': 'Potato', - 'description': 'Potato', - 'icon_url': 'https://www.google.com', - 'lang': 'en' + "slug": "potato", + "name": "Potato", + "description": "Potato", + "icon_url": "https://www.google.com", + "lang": "en", } - url = reverse_lazy('events:academy_eventype') + url = reverse_lazy("events:academy_eventype") response = self.client.post(url, data) json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'id': 1, - 'academy': 1, - 'free_for_bootcamps': False, - 'allow_shared_creation': False, - 'visibility_settings': [], + "id": 1, + "academy": 1, + "free_for_bootcamps": False, + "allow_shared_creation": False, + "visibility_settings": [], **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, 201) - self.assertEqual(self.all_event_type_dict(), [{ - 'id': 1, - 'academy_id': 1, - 'allow_shared_creation': False, - 'free_for_bootcamps': False, - **data - }]) + self.assertEqual( + self.all_event_type_dict(), + [{"id": 1, "academy_id": 1, "allow_shared_creation": False, "free_for_bootcamps": False, **data}], + ) diff --git a/breathecode/events/tests/urls/tests_academy_eventype_slug.py b/breathecode/events/tests/urls/tests_academy_eventype_slug.py index 17543b146..916d6fd73 100644 --- a/breathecode/events/tests/urls/tests_academy_eventype_slug.py +++ b/breathecode/events/tests/urls/tests_academy_eventype_slug.py @@ -14,27 +14,27 @@ def get_serializer(event_type, academy=None, city=None, data={}): if city: city_serialized = { - 'name': city.name, + "name": city.name, } if academy: academy_serialized = { - 'city': city_serialized, - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "city": city_serialized, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } return { - 'academy': academy_serialized, - 'id': event_type.id, - 'name': event_type.name, - 'slug': event_type.slug, - 'lang': event_type.lang, - 'icon_url': event_type.icon_url, - 'allow_shared_creation': event_type.allow_shared_creation, - 'description': event_type.description, - 'visibility_settings': event_type.visibility_settings, + "academy": academy_serialized, + "id": event_type.id, + "name": event_type.name, + "slug": event_type.slug, + "lang": event_type.lang, + "icon_url": event_type.icon_url, + "allow_shared_creation": event_type.allow_shared_creation, + "description": event_type.description, + "visibility_settings": event_type.visibility_settings, **data, } @@ -42,15 +42,15 @@ def get_serializer(event_type, academy=None, city=None, data={}): def put_serializer(event_type, data={}): return { - 'academy': event_type.academy, - 'id': event_type.id, - 'name': event_type.name, - 'slug': event_type.slug, - 'lang': event_type.lang, - 'icon_url': event_type.icon_url, - 'allow_shared_creation': event_type.allow_shared_creation, - 'free_for_bootcamps': event_type.free_for_bootcamps, - 'description': event_type.description, + "academy": event_type.academy, + "id": event_type.id, + "name": event_type.name, + "slug": event_type.slug, + "lang": event_type.lang, + "icon_url": event_type.icon_url, + "allow_shared_creation": event_type.allow_shared_creation, + "free_for_bootcamps": event_type.free_for_bootcamps, + "description": event_type.description, **data, } @@ -60,11 +60,11 @@ class AcademyEventTestSuite(EventTestCase): def test_academy_event_type_slug_no_auth(self): - url = reverse_lazy('events:academy_eventype_slug', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug", kwargs={"event_type_slug": "funny_event"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -72,17 +72,17 @@ def test_academy_event_type_slug_no_auth(self): def test_academy_event_type_with_bad_slug(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug", kwargs={"event_type_slug": "funny_event"}) self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='read_event_type', + capability="read_event_type", ) response = self.client.get(url) json = response.json() - expected = {'detail': 'event-type-not-found', 'status_code': 400} + expected = {"detail": "event-type-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) @@ -90,114 +90,125 @@ def test_academy_event_type_with_bad_slug(self): def test_academy_event_type_with_results(self): self.bc.request.set_headers(academy=1) - event_type_slug = 'potato' + event_type_slug = "potato" event_type_kwargs = { - 'slug': event_type_slug, - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": event_type_slug, + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - url = reverse_lazy('events:academy_eventype_slug', kwargs={'event_type_slug': event_type_slug}) - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='read_event_type') + url = reverse_lazy("events:academy_eventype_slug", kwargs={"event_type_slug": event_type_slug}) + model = self.generate_models( + authenticate=True, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="read_event_type", + ) response = self.client.get(url) json = response.json() - expected = get_serializer(model.event_type, model.academy, model.city, {'visibility_settings': []}) + expected = get_serializer(model.event_type, model.academy, model.city, {"visibility_settings": []}) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_academy_event_type_slug__put(self): """Test /cohort without auth""" self.headers(academy=1) - event_type_slug = 'potato' + event_type_slug = "potato" event_type_kwargs = { - 'slug': event_type_slug, - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": event_type_slug, + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='crud_event_type') + model = self.generate_models( + authenticate=True, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="crud_event_type", + ) - url = reverse_lazy('events:academy_eventype_slug', kwargs={'event_type_slug': 'potato'}) + url = reverse_lazy("events:academy_eventype_slug", kwargs={"event_type_slug": "potato"}) current_date = self.datetime_now() data = { - 'id': 1, - 'slug': 'potato', - 'name': 'SUPER NEW event type changed', - 'description': 'funtastic event type' + "id": 1, + "slug": "potato", + "name": "SUPER NEW event type changed", + "description": "funtastic event type", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] - expected = put_serializer(model.event_type, {**data, 'visibility_settings': [], 'academy': 1}) + expected = put_serializer(model.event_type, {**data, "visibility_settings": [], "academy": 1}) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.EventType'), self.bc.format.to_dict(model.event_type)) + self.assertEqual(self.bc.database.list_of("events.EventType"), self.bc.format.to_dict(model.event_type)) def test_academy_event_type_slug__put_with_bad_slug(self): """Test /cohort without auth""" self.headers(academy=1) - event_type_slug = 'potato' + event_type_slug = "potato" event_type_kwargs = { - 'slug': event_type_slug, - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": event_type_slug, + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='crud_event_type') + model = self.generate_models( + authenticate=True, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="crud_event_type", + ) - url = reverse_lazy('events:academy_eventype_slug', kwargs={'event_type_slug': 'potattto'}) + url = reverse_lazy("events:academy_eventype_slug", kwargs={"event_type_slug": "potattto"}) current_date = self.datetime_now() data = { - 'id': 1, - 'slug': 'potato', - 'name': 'SUPER NEW event type changed', - 'description': 'funtastic event type' + "id": 1, + "slug": "potato", + "name": "SUPER NEW event type changed", + "description": "funtastic event type", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'event-type-not-found', 'status_code': 400} + expected = {"detail": "event-type-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) @@ -206,86 +217,89 @@ def test_academy_event_type_slug_put_without_icon_url(self): """Test /cohort without auth""" self.headers(academy=1) - event_type_slug = 'potato' + event_type_slug = "potato" event_type_kwargs = { - 'slug': event_type_slug, - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": event_type_slug, + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='crud_event_type') + model = self.generate_models( + authenticate=True, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="crud_event_type", + ) - url = reverse_lazy('events:academy_eventype_slug', kwargs={'event_type_slug': 'potato'}) + url = reverse_lazy("events:academy_eventype_slug", kwargs={"event_type_slug": "potato"}) current_date = self.datetime_now() data = { - 'id': 1, - 'slug': 'potato', - 'name': 'SUPER NEW event type changed', - 'description': 'funtastic event type' + "id": 1, + "slug": "potato", + "name": "SUPER NEW event type changed", + "description": "funtastic event type", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'icon_url': ['This field is required.']} + expected = {"icon_url": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.EventType'), [{**self.bc.format.to_dict(model.event_type)}]) + self.assertEqual(self.bc.database.list_of("events.EventType"), [{**self.bc.format.to_dict(model.event_type)}]) def test_academy_event_type_slug__put(self): """Test /cohort without auth""" self.headers(academy=1) - event_type_slug = 'potato' + event_type_slug = "potato" event_type_kwargs = { - 'slug': event_type_slug, - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": event_type_slug, + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs, - profile_academy=1, - role=1, - capability='crud_event_type') + model = self.generate_models( + authenticate=True, + event=True, + event_type=True, + event_type_kwargs=event_type_kwargs, + profile_academy=1, + role=1, + capability="crud_event_type", + ) - url = reverse_lazy('events:academy_eventype_slug', kwargs={'event_type_slug': 'potato'}) + url = reverse_lazy("events:academy_eventype_slug", kwargs={"event_type_slug": "potato"}) current_date = self.datetime_now() data = { - 'id': 1, - 'slug': 'potato', - 'name': 'SUPER NEW event type changed', - 'icon_url': 'https://www.google.com', - 'description': 'funtastic event type' + "id": 1, + "slug": "potato", + "name": "SUPER NEW event type changed", + "icon_url": "https://www.google.com", + "description": "funtastic event type", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] - expected = put_serializer(model.event_type, {**data, 'academy': 1}) + expected = put_serializer(model.event_type, {**data, "academy": 1}) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.EventType'), [{ - **self.bc.format.to_dict(model.event_type), - **data - }]) + self.assertEqual( + self.bc.database.list_of("events.EventType"), [{**self.bc.format.to_dict(model.event_type), **data}] + ) diff --git a/breathecode/events/tests/urls/tests_academy_eventype_slug_visibilitysetting.py b/breathecode/events/tests/urls/tests_academy_eventype_slug_visibilitysetting.py index b2edd24e4..d6e8e57be 100644 --- a/breathecode/events/tests/urls/tests_academy_eventype_slug_visibilitysetting.py +++ b/breathecode/events/tests/urls/tests_academy_eventype_slug_visibilitysetting.py @@ -11,22 +11,30 @@ def get_serializer(visibility_setting, data={}): return { - 'id': visibility_setting['id'], - 'academy': { - 'id': visibility_setting['academy']['id'], - 'name': visibility_setting['academy']['name'], - 'slug': visibility_setting['academy']['slug'], + "id": visibility_setting["id"], + "academy": { + "id": visibility_setting["academy"]["id"], + "name": visibility_setting["academy"]["name"], + "slug": visibility_setting["academy"]["slug"], }, - 'cohort': { - 'id': visibility_setting['cohort']['id'], - 'name': visibility_setting['cohort']['name'], - 'slug': visibility_setting['cohort']['slug'], - } if visibility_setting['cohort'] else None, - 'syllabus': { - 'id': visibility_setting['syllabus']['id'], - 'name': visibility_setting['syllabus']['name'], - 'slug': visibility_setting['syllabus']['slug'], - } if visibility_setting['syllabus'] else None, + "cohort": ( + { + "id": visibility_setting["cohort"]["id"], + "name": visibility_setting["cohort"]["name"], + "slug": visibility_setting["cohort"]["slug"], + } + if visibility_setting["cohort"] + else None + ), + "syllabus": ( + { + "id": visibility_setting["syllabus"]["id"], + "name": visibility_setting["syllabus"]["name"], + "slug": visibility_setting["syllabus"]["slug"], + } + if visibility_setting["syllabus"] + else None + ), **data, } @@ -36,11 +44,11 @@ class AcademyEventTypeVisibilitySettingsTestSuite(EventTestCase): def test_post_event_type_with_no_auth(self): - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug_visibilitysetting", kwargs={"event_type_slug": "funny_event"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -48,17 +56,17 @@ def test_post_event_type_with_no_auth(self): def test_get_visibilitysetting_with_bad_slug(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug_visibilitysetting", kwargs={"event_type_slug": "funny_event"}) self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='read_event_type', + capability="read_event_type", ) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 400} + expected = {"detail": "not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) @@ -66,18 +74,14 @@ def test_get_visibilitysetting_with_bad_slug(self): def test_get_visibilitysetting(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug_visibilitysetting", kwargs={"event_type_slug": "funny_event"}) model = self.generate_models( authenticate=True, profile_academy=1, role=1, event_type_visibility_setting=True, - event_type={ - 'slug': 'funny_event', - 'icon_url': 'https://www.google.com', - 'visibility_settings': 1 - }, - capability='read_event_type', + event_type={"slug": "funny_event", "icon_url": "https://www.google.com", "visibility_settings": 1}, + capability="read_event_type", ) response = self.client.get(url) @@ -91,17 +95,17 @@ def test_get_visibilitysetting(self): def test_post_visibilitysetting_with_bad_slug(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug_visibilitysetting", kwargs={"event_type_slug": "funny_event"}) self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='crud_event_type', + capability="crud_event_type", ) response = self.client.post(url) json = response.json() - expected = {'detail': 'event-type-not-found', 'status_code': 400} + expected = {"detail": "event-type-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) @@ -109,22 +113,19 @@ def test_post_visibilitysetting_with_bad_slug(self): def test_post_visibilitysetting_with_bad_syllabus(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug_visibilitysetting", kwargs={"event_type_slug": "funny_event"}) model = self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='crud_event_type', - event_type={ - 'slug': 'funny_event', - 'icon_url': 'https://www.google.com' - }, + capability="crud_event_type", + event_type={"slug": "funny_event", "icon_url": "https://www.google.com"}, ) - data = {'syllabus': 1} + data = {"syllabus": 1} response = self.client.post(url, data) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 400} + expected = {"detail": "syllabus-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) @@ -133,22 +134,19 @@ def test_post_visibilitysetting_with_bad_syllabus(self): def test_post_visibilitysetting_with_bad_cohort(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug_visibilitysetting", kwargs={"event_type_slug": "funny_event"}) model = self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='crud_event_type', - event_type={ - 'slug': 'funny_event', - 'icon_url': 'https://www.google.com' - }, + capability="crud_event_type", + event_type={"slug": "funny_event", "icon_url": "https://www.google.com"}, ) - data = {'cohort': 2} + data = {"cohort": 2} response = self.client.post(url, data) json = response.json() - expected = {'detail': 'cohort-not-found', 'status_code': 400} + expected = {"detail": "cohort-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) @@ -157,48 +155,43 @@ def test_post_visibilitysetting_with_bad_cohort(self): def test_post_visibilitysetting(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting', kwargs={'event_type_slug': 'funny_event'}) + url = reverse_lazy("events:academy_eventype_slug_visibilitysetting", kwargs={"event_type_slug": "funny_event"}) model = self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='crud_event_type', - event_type={ - 'slug': 'funny_event', - 'icon_url': 'https://www.google.com' - }, + capability="crud_event_type", + event_type={"slug": "funny_event", "icon_url": "https://www.google.com"}, cohort=True, syllabus=True, ) - data = {'academy': 1, 'syllabus': 1, 'cohort': 1} + data = {"academy": 1, "syllabus": 1, "cohort": 1} response = self.client.post(url, data) json = response.json() expected = { - 'id': 1, - 'academy': { - 'id': model.academy.id, - 'name': model.academy.name, - 'slug': model.academy.slug, + "id": 1, + "academy": { + "id": model.academy.id, + "name": model.academy.name, + "slug": model.academy.slug, }, - 'cohort': { - 'id': model.cohort.id, - 'name': model.cohort.name, - 'slug': model.cohort.slug, + "cohort": { + "id": model.cohort.id, + "name": model.cohort.name, + "slug": model.cohort.slug, + }, + "syllabus": { + "id": model.syllabus.id, + "name": model.syllabus.name, + "slug": model.syllabus.slug, }, - 'syllabus': { - 'id': model.syllabus.id, - 'name': model.syllabus.name, - 'slug': model.syllabus.slug, - } } self.assertEqual(json, expected) self.assertEqual(response.status_code, 201) self.bc.check.queryset_with_pks(model.event_type.visibility_settings.all(), [1]) - self.assertEqual(self.bc.database.list_of('events.EventTypeVisibilitySetting'), [{ - 'id': 1, - 'academy_id': 1, - 'syllabus_id': 1, - 'cohort_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("events.EventTypeVisibilitySetting"), + [{"id": 1, "academy_id": 1, "syllabus_id": 1, "cohort_id": 1}], + ) diff --git a/breathecode/events/tests/urls/tests_academy_eventype_slug_visibilitysetting_id.py b/breathecode/events/tests/urls/tests_academy_eventype_slug_visibilitysetting_id.py index fff99c65e..af1e308eb 100644 --- a/breathecode/events/tests/urls/tests_academy_eventype_slug_visibilitysetting_id.py +++ b/breathecode/events/tests/urls/tests_academy_eventype_slug_visibilitysetting_id.py @@ -11,22 +11,30 @@ def get_serializer(visibility_setting, academy=None, city=None, data={}): return { - 'id': visibility_setting.id, - 'academy': { - 'id': visibility_setting.academy.id, - 'name': visibility_setting.academy.name, - 'slug': visibility_setting.academy.slug, + "id": visibility_setting.id, + "academy": { + "id": visibility_setting.academy.id, + "name": visibility_setting.academy.name, + "slug": visibility_setting.academy.slug, }, - 'cohort': { - 'id': visibility_setting.cohort.id, - 'name': visibility_setting.cohort.name, - 'slug': visibility_setting.cohort.slug, - } if visibility_setting.cohort else None, - 'syllabus': { - 'id': visibility_setting.syllabus.id, - 'name': visibility_setting.syllabus.name, - 'slug': visibility_setting.syllabus.slug, - } if visibility_setting.syllabus else None, + "cohort": ( + { + "id": visibility_setting.cohort.id, + "name": visibility_setting.cohort.name, + "slug": visibility_setting.cohort.slug, + } + if visibility_setting.cohort + else None + ), + "syllabus": ( + { + "id": visibility_setting.syllabus.id, + "name": visibility_setting.syllabus.name, + "slug": visibility_setting.syllabus.slug, + } + if visibility_setting.syllabus + else None + ), **data, } @@ -36,15 +44,14 @@ class AcademyEventTypeVisibilitySettingsTestSuite(EventTestCase): def test_delete_event_type_vs_no_auth(self): - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting_id', - kwargs={ - 'event_type_slug': 'funny_event', - 'visibility_setting_id': 1 - }) + url = reverse_lazy( + "events:academy_eventype_slug_visibilitysetting_id", + kwargs={"event_type_slug": "funny_event", "visibility_setting_id": 1}, + ) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -52,124 +59,97 @@ def test_delete_event_type_vs_no_auth(self): def test_delete_visibilitysetting_with_bad_id(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting_id', - kwargs={ - 'event_type_slug': 'funny_event', - 'visibility_setting_id': 2 - }) + url = reverse_lazy( + "events:academy_eventype_slug_visibilitysetting_id", + kwargs={"event_type_slug": "funny_event", "visibility_setting_id": 2}, + ) model = self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='crud_event_type', - event_type={ - 'slug': 'funny_event', - 'icon_url': 'https://www.google.com', - 'visibility_settings': 1 - }, + capability="crud_event_type", + event_type={"slug": "funny_event", "icon_url": "https://www.google.com", "visibility_settings": 1}, event_type_visibility_setting=True, ) response = self.client.delete(url) json = response.json() - expected = {'detail': 'event-type-visibility-setting-not-found', 'status_code': 404} + expected = {"detail": "event-type-visibility-setting-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) self.bc.check.queryset_with_pks(model.event_type.visibility_settings.all(), [1]) - self.assertEqual(self.bc.database.list_of('events.EventTypeVisibilitySetting'), [{ - 'id': 1, - 'academy_id': 1, - 'syllabus_id': None, - 'cohort_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("events.EventTypeVisibilitySetting"), + [{"id": 1, "academy_id": 1, "syllabus_id": None, "cohort_id": 1}], + ) def test_delete_visibilitysetting_with_bad_slug(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting_id', - kwargs={ - 'event_type_slug': 'funny_event', - 'visibility_setting_id': 1 - }) + url = reverse_lazy( + "events:academy_eventype_slug_visibilitysetting_id", + kwargs={"event_type_slug": "funny_event", "visibility_setting_id": 1}, + ) model = self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='crud_event_type', + capability="crud_event_type", event_type_visibility_setting=True, - event_type={ - 'slug': 'kenny', - 'icon_url': 'https://www.google.com', - 'visibility_settings': 1 - }, + event_type={"slug": "kenny", "icon_url": "https://www.google.com", "visibility_settings": 1}, ) response = self.client.delete(url) json = response.json() - expected = {'detail': 'event-type-not-found', 'status_code': 400} + expected = {"detail": "event-type-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) self.bc.check.queryset_with_pks(model.event_type.visibility_settings.all(), [1]) - self.assertEqual(self.bc.database.list_of('events.EventTypeVisibilitySetting'), [{ - 'id': 1, - 'academy_id': 1, - 'syllabus_id': None, - 'cohort_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("events.EventTypeVisibilitySetting"), + [{"id": 1, "academy_id": 1, "syllabus_id": None, "cohort_id": 1}], + ) def test_delete_visibilitysetting_with_no_other_event_type(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting_id', - kwargs={ - 'event_type_slug': 'funny_event', - 'visibility_setting_id': 1 - }) + url = reverse_lazy( + "events:academy_eventype_slug_visibilitysetting_id", + kwargs={"event_type_slug": "funny_event", "visibility_setting_id": 1}, + ) model = self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='crud_event_type', + capability="crud_event_type", event_type_visibility_setting=True, - event_type={ - 'slug': 'funny_event', - 'icon_url': 'https://www.google.com' - }, + event_type={"slug": "funny_event", "icon_url": "https://www.google.com"}, ) response = self.client.delete(url) self.assertEqual(response.status_code, 204) self.bc.check.queryset_with_pks(model.event_type.visibility_settings.all(), []) - self.assertEqual(self.bc.database.list_of('events.EventTypeVisibilitySetting'), []) + self.assertEqual(self.bc.database.list_of("events.EventTypeVisibilitySetting"), []) def test_delete_visibilitysetting_with_other_event_type(self): self.bc.request.set_headers(academy=1) - url = reverse_lazy('events:academy_eventype_slug_visibilitysetting_id', - kwargs={ - 'event_type_slug': 'funny_event', - 'visibility_setting_id': 1 - }) + url = reverse_lazy( + "events:academy_eventype_slug_visibilitysetting_id", + kwargs={"event_type_slug": "funny_event", "visibility_setting_id": 1}, + ) model = self.generate_models( authenticate=True, profile_academy=1, role=1, - capability='crud_event_type', + capability="crud_event_type", event_type_visibility_setting=True, event_type=[ - { - 'slug': 'funny_event', - 'icon_url': 'https://www.google.com', - 'visibility_settings': 1 - }, - { - 'slug': 'great_event', - 'icon_url': 'https://www.google.com', - 'visibility_settings': 1 - }, + {"slug": "funny_event", "icon_url": "https://www.google.com", "visibility_settings": 1}, + {"slug": "great_event", "icon_url": "https://www.google.com", "visibility_settings": 1}, ], ) @@ -177,9 +157,7 @@ def test_delete_visibilitysetting_with_other_event_type(self): self.assertEqual(response.status_code, 204) self.bc.check.queryset_with_pks(model.event_type[1].visibility_settings.all(), [1]) - self.assertEqual(self.bc.database.list_of('events.EventTypeVisibilitySetting'), [{ - 'id': 1, - 'academy_id': 1, - 'syllabus_id': None, - 'cohort_id': 1 - }]) + self.assertEqual( + self.bc.database.list_of("events.EventTypeVisibilitySetting"), + [{"id": 1, "academy_id": 1, "syllabus_id": None, "cohort_id": 1}], + ) diff --git a/breathecode/events/tests/urls/tests_academy_organization_eventbrite_webhook.py b/breathecode/events/tests/urls/tests_academy_organization_eventbrite_webhook.py index 8610315d8..fcf1483f8 100644 --- a/breathecode/events/tests/urls/tests_academy_organization_eventbrite_webhook.py +++ b/breathecode/events/tests/urls/tests_academy_organization_eventbrite_webhook.py @@ -9,18 +9,20 @@ class AcademyEventbriteWebhookTestSuite(EventTestCase): def test_all_eventbrite_webhooks_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:academy_organizarion_eventbrite_webhook') - eventbrite_webhook = {'organization_id': 1} - model = self.bc.database.create(eventbrite_webhook=eventbrite_webhook, - profile_academy=1, - organization=1, - capability='read_organization', - role='potato', - cohort=1) + url = reverse_lazy("events:academy_organizarion_eventbrite_webhook") + eventbrite_webhook = {"organization_id": 1} + model = self.bc.database.create( + eventbrite_webhook=eventbrite_webhook, + profile_academy=1, + organization=1, + capability="read_organization", + role="potato", + cohort=1, + ) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -28,36 +30,40 @@ def test_all_eventbrite_webhooks_no_auth(self): def test_all_eventbrite_webhooks_no_organization(self): self.headers(academy=1) - url = reverse_lazy('events:academy_organizarion_eventbrite_webhook') - eventbrite_webhook = {'organization_id': 1} - model = self.bc.database.create(eventbrite_webhook=eventbrite_webhook, - profile_academy=1, - capability='read_organization', - role='potato', - cohort=1) + url = reverse_lazy("events:academy_organizarion_eventbrite_webhook") + eventbrite_webhook = {"organization_id": 1} + model = self.bc.database.create( + eventbrite_webhook=eventbrite_webhook, + profile_academy=1, + capability="read_organization", + role="potato", + cohort=1, + ) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = {'detail': 'organization-no-found', 'status_code': 400} + expected = {"detail": "organization-no-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) def test_all_eventbrite_webhooks_no_academy(self): - url = reverse_lazy('events:academy_organizarion_eventbrite_webhook') - eventbrite_webhook = {'organization_id': 1} - model = self.bc.database.create(eventbrite_webhook=eventbrite_webhook, - profile_academy=1, - capability='read_organization', - role='potato', - cohort=1) + url = reverse_lazy("events:academy_organizarion_eventbrite_webhook") + eventbrite_webhook = {"organization_id": 1} + model = self.bc.database.create( + eventbrite_webhook=eventbrite_webhook, + profile_academy=1, + capability="read_organization", + role="potato", + cohort=1, + ) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -66,41 +72,45 @@ def test_all_eventbrite_webhooks_no_academy(self): def test_all_eventbrite_webhooks(self): self.headers(academy=1) - url = reverse_lazy('events:academy_organizarion_eventbrite_webhook') - eventbrite_webhook = {'organization_id': 1} + url = reverse_lazy("events:academy_organizarion_eventbrite_webhook") + eventbrite_webhook = {"organization_id": 1} start = timezone.now() - model = self.bc.database.create(eventbrite_webhook=eventbrite_webhook, - profile_academy=1, - organization=1, - capability='read_organization', - role='potato', - cohort=1) + model = self.bc.database.create( + eventbrite_webhook=eventbrite_webhook, + profile_academy=1, + organization=1, + capability="read_organization", + role="potato", + cohort=1, + ) end = timezone.now() self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - created_at = self.bc.datetime.from_iso_string(json[0]['created_at']) - updated_at = self.bc.datetime.from_iso_string(json[0]['updated_at']) + created_at = self.bc.datetime.from_iso_string(json[0]["created_at"]) + updated_at = self.bc.datetime.from_iso_string(json[0]["updated_at"]) self.bc.check.datetime_in_range(start, end, created_at) self.bc.check.datetime_in_range(start, end, updated_at) - del json[0]['created_at'] - del json[0]['updated_at'] - - expected = [{ - 'id': model['eventbrite_webhook'].id, - 'status': model['eventbrite_webhook'].status, - 'status_text': model['eventbrite_webhook'].status_text, - 'api_url': model['eventbrite_webhook'].api_url, - 'user_id': model['eventbrite_webhook'].user_id, - 'action': model['eventbrite_webhook'].action, - 'attendee': model['eventbrite_webhook'].attendee, - 'event': model['eventbrite_webhook'].event, - 'payload': model['eventbrite_webhook'].payload, - 'webhook_id': model['eventbrite_webhook'].webhook_id, - 'organization_id': model['eventbrite_webhook'].organization_id, - 'endpoint_url': model['eventbrite_webhook'].endpoint_url, - }] + del json[0]["created_at"] + del json[0]["updated_at"] + + expected = [ + { + "id": model["eventbrite_webhook"].id, + "status": model["eventbrite_webhook"].status, + "status_text": model["eventbrite_webhook"].status_text, + "api_url": model["eventbrite_webhook"].api_url, + "user_id": model["eventbrite_webhook"].user_id, + "action": model["eventbrite_webhook"].action, + "attendee": model["eventbrite_webhook"].attendee, + "event": model["eventbrite_webhook"].event, + "payload": model["eventbrite_webhook"].payload, + "webhook_id": model["eventbrite_webhook"].webhook_id, + "organization_id": model["eventbrite_webhook"].organization_id, + "endpoint_url": model["eventbrite_webhook"].endpoint_url, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) diff --git a/breathecode/events/tests/urls/tests_academy_venues.py b/breathecode/events/tests/urls/tests_academy_venues.py index 70d80ae7a..37d8445a7 100644 --- a/breathecode/events/tests/urls/tests_academy_venues.py +++ b/breathecode/events/tests/urls/tests_academy_venues.py @@ -7,31 +7,31 @@ class AcademyVenueTestSuite(EventTestCase): def test_academy_venues_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:academy_venues') + url = reverse_lazy("events:academy_venues") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) def test_academy_venues_with_auth(self): self.headers(academy=1) - url = reverse_lazy('events:academy_venues') + url = reverse_lazy("events:academy_venues") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: read_event for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: read_event for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) def test_academy_venues_with_capability(self): self.headers(academy=1) - url = reverse_lazy('events:academy_venues') - self.generate_models(authenticate=True, profile_academy=True, capability='read_event', role='potato') + url = reverse_lazy("events:academy_venues") + self.generate_models(authenticate=True, profile_academy=True, capability="read_event", role="potato") response = self.client.get(url) json = response.json() @@ -42,28 +42,33 @@ def test_academy_venues_with_capability(self): def test_academy_venues_with_results(self): self.headers(academy=1) - url = reverse_lazy('events:academy_venues') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_event', - role='potato', - venue=True) + url = reverse_lazy("events:academy_venues") + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_event", role="potato", venue=True + ) response = self.client.get(url) json = response.json() - expected = [{ - 'city': model['venue'].city, - 'id': model['venue'].id, - 'state': model['venue'].state, - 'street_address': model['venue'].street_address, - 'title': model['venue'].title, - 'zip_code': model['venue'].zip_code, - 'updated_at': self.bc.datetime.to_iso_string(model['venue'].updated_at), - }] + expected = [ + { + "city": model["venue"].city, + "id": model["venue"].id, + "state": model["venue"].state, + "street_address": model["venue"].street_address, + "title": model["venue"].title, + "zip_code": model["venue"].zip_code, + "updated_at": self.bc.datetime.to_iso_string(model["venue"].updated_at), + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_venue_dict(), [{ - **self.model_to_dict(model, 'venue'), - }]) + self.assertEqual( + self.all_venue_dict(), + [ + { + **self.model_to_dict(model, "venue"), + } + ], + ) diff --git a/breathecode/events/tests/urls/tests_eventbrite_webhook_id.py b/breathecode/events/tests/urls/tests_eventbrite_webhook_id.py index 9b17d58b1..5daaff055 100644 --- a/breathecode/events/tests/urls/tests_eventbrite_webhook_id.py +++ b/breathecode/events/tests/urls/tests_eventbrite_webhook_id.py @@ -1,6 +1,7 @@ """ Test /eventbrite/webhook """ + from unittest.mock import MagicMock, call, patch import requests @@ -18,32 +19,32 @@ from ..mixins import EventTestCase -eventbrite_url = 'https://www.eventbriteapi.com/v3/events/1/' -eventbrite_url_with_query = eventbrite_url + '?expand=organizer,venue' +eventbrite_url = "https://www.eventbriteapi.com/v3/events/1/" +eventbrite_url_with_query = eventbrite_url + "?expand=organizer,venue" def update_or_create_event_mock(raise_error=False): def update_or_create_event(self, *args, **kwargs): if raise_error: - raise Exception('Random error in creating') + raise Exception("Random error in creating") return MagicMock(side_effect=update_or_create_event) -#FIXME: this file have performance issues often +# FIXME: this file have performance issues often class EventbriteWebhookTestSuite(EventTestCase): """Test /eventbrite/webhook""" - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('time.sleep', MagicMock()) + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_without_data(self): """Test /eventbrite/webhook without auth""" - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, {}, headers=self.headers(), format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post(url, {}, headers=self.headers(), format="json") content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_event_checkin_dict(), []) self.assertEqual(self.all_eventbrite_webhook_dict(), []) @@ -53,748 +54,877 @@ def test_eventbrite_webhook_without_data(self): 🔽🔽🔽 order.placed """ - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_without_organization(self): from breathecode.marketing.tasks import add_event_tags_to_student - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_event_checkin_dict(), []) - self.assertEqual(self.all_eventbrite_webhook_dict(), [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'payload': None, - 'event_id': None, - 'attendee_id': None, - 'organization_id': '1', - 'status': 'ERROR', - 'status_text': 'Organization 1 doesn\'t exist', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) + self.assertEqual( + self.all_eventbrite_webhook_dict(), + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "payload": None, + "event_id": None, + "attendee_id": None, + "organization_id": "1", + "status": "ERROR", + "status_text": "Organization 1 doesn't exist", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) self.check_old_breathecode_calls({}, []) self.assertEqual(add_event_tags_to_student.delay.call_args_list, []) - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_without_academy(self): from breathecode.marketing.tasks import add_event_tags_to_student model = self.generate_models(organization=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_event_checkin_dict(), []) - db = self.bc.database.list_of('events.EventbriteWebhook') - self.assertRegex(db[0]['status_text'], r'Exception: Organization not have one Academy\n') - self.bc.check.partial_equality(db, [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'ERROR', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) + db = self.bc.database.list_of("events.EventbriteWebhook") + self.assertRegex(db[0]["status_text"], r"Exception: Organization not have one Academy\n") + self.bc.check.partial_equality( + db, + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "ERROR", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) self.check_old_breathecode_calls(model, []) self.assertEqual(add_event_tags_to_student.delay.call_args_list, []) - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_without_event(self): from breathecode.marketing.tasks import add_event_tags_to_student model = self.generate_models(organization=True, academy=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_event_checkin_dict(), []) - db = self.bc.database.list_of('events.EventbriteWebhook') - self.assertRegex(db[0]['status_text'], r'Exception: event doesn\'t exist\n') - self.bc.check.partial_equality(db, [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'ERROR', - 'user_id': '123456789012', - 'webhook_id': '1234567', - }]) + db = self.bc.database.list_of("events.EventbriteWebhook") + self.assertRegex(db[0]["status_text"], r"Exception: event doesn\'t exist\n") + self.bc.check.partial_equality( + db, + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "ERROR", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) self.check_old_breathecode_calls(model, []) self.assertEqual(add_event_tags_to_student.delay.call_args_list, []) - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_with_event_without_eventbrite_id(self): from breathecode.marketing.tasks import add_event_tags_to_student model = self.generate_models(organization=True, academy=True, event=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_event_checkin_dict(), []) - db = self.bc.database.list_of('events.EventbriteWebhook') - self.assertRegex(db[0]['status_text'], r'Exception: event doesn\'t exist\n') - self.bc.check.partial_equality(db, [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'ERROR', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) + db = self.bc.database.list_of("events.EventbriteWebhook") + self.assertRegex(db[0]["status_text"], r"Exception: event doesn\'t exist\n") + self.bc.check.partial_equality( + db, + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "ERROR", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) self.check_old_breathecode_calls(model, []) self.assertEqual(add_event_tags_to_student.delay.call_args_list, []) - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_without_active_campaign_academy(self): from breathecode.marketing.tasks import add_event_tags_to_student - model = self.generate_models(organization=True, - academy=True, - event=True, - event_kwargs={'eventbrite_id': 1}, - attendee=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + model = self.generate_models( + organization=True, academy=True, event=True, event_kwargs={"eventbrite_id": 1}, attendee=True + ) + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) - db = self.bc.database.list_of('events.EventbriteWebhook') - self.assertRegex(db[0]['status_text'], r'Exception: ActiveCampaignAcademy doesn\'t exist\n') - - self.bc.check.partial_equality(db, [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'ERROR', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) - - self.assertEqual(self.all_event_checkin_dict(), [{ - 'attendee_id': None, - 'email': 'john.smith@example.com', - 'event_id': 1, - 'id': 1, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_source': 'eventbrite', - 'utm_url': None, - 'status': 'PENDING', - 'attended_at': None - }]) + db = self.bc.database.list_of("events.EventbriteWebhook") + self.assertRegex(db[0]["status_text"], r"Exception: ActiveCampaignAcademy doesn\'t exist\n") + + self.bc.check.partial_equality( + db, + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "ERROR", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) + + self.assertEqual( + self.all_event_checkin_dict(), + [ + { + "attendee_id": None, + "email": "john.smith@example.com", + "event_id": 1, + "id": 1, + "utm_campaign": None, + "utm_medium": None, + "utm_source": "eventbrite", + "utm_url": None, + "status": "PENDING", + "attended_at": None, + } + ], + ) self.check_old_breathecode_calls(model, []) self.assertEqual(add_event_tags_to_student.delay.call_args_list, []) - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_without_automation(self): from breathecode.marketing.tasks import add_event_tags_to_student - model = self.generate_models(organization=True, - event=True, - event_kwargs={'eventbrite_id': 1}, - active_campaign_academy=True, - academy=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + model = self.generate_models( + organization=True, event=True, event_kwargs={"eventbrite_id": 1}, active_campaign_academy=True, academy=True + ) + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) - db = self.bc.database.list_of('events.EventbriteWebhook') - self.assertRegex(db[0]['status_text'], r'Exception: Automation for order_placed doesn\'t exist\n') - self.bc.check.partial_equality(db, [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'ERROR', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) - - self.assertEqual(self.all_event_checkin_dict(), [{ - 'attendee_id': None, - 'email': 'john.smith@example.com', - 'utm_campaign': None, - 'utm_medium': None, - 'utm_source': 'eventbrite', - 'utm_url': None, - 'event_id': 1, - 'id': 1, - 'status': 'PENDING', - 'attended_at': None - }]) + db = self.bc.database.list_of("events.EventbriteWebhook") + self.assertRegex(db[0]["status_text"], r"Exception: Automation for order_placed doesn\'t exist\n") + self.bc.check.partial_equality( + db, + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "ERROR", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) + + self.assertEqual( + self.all_event_checkin_dict(), + [ + { + "attendee_id": None, + "email": "john.smith@example.com", + "utm_campaign": None, + "utm_medium": None, + "utm_source": "eventbrite", + "utm_url": None, + "event_id": 1, + "id": 1, + "status": "PENDING", + "attended_at": None, + } + ], + ) self.check_old_breathecode_calls(model, []) self.assertEqual(add_event_tags_to_student.delay.call_args_list, []) - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_without_lang(self): from breathecode.marketing.tasks import add_event_tags_to_student - model = self.generate_models(organization=True, - event=True, - event_kwargs={'eventbrite_id': 1}, - active_campaign_academy=True, - automation=True, - user=True, - academy=True, - active_campaign_academy_kwargs={'ac_url': 'https://old.hardcoded.breathecode.url'}, - user_kwargs={ - 'email': 'john.smith@example.com', - 'first_name': 'John', - 'last_name': 'Smith' - }) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + model = self.generate_models( + organization=True, + event=True, + event_kwargs={"eventbrite_id": 1}, + active_campaign_academy=True, + automation=True, + user=True, + academy=True, + active_campaign_academy_kwargs={"ac_url": "https://old.hardcoded.breathecode.url"}, + user_kwargs={"email": "john.smith@example.com", "first_name": "John", "last_name": "Smith"}, + ) + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) webhook_dicts = self.all_eventbrite_webhook_dict() for wd in webhook_dicts: - del wd['payload'] - self.assertEqual(webhook_dicts, [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'attendee_id': 1, - 'event_id': 1, - 'organization_id': '1', - 'status': 'DONE', - 'status_text': 'OK', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) - - self.assertEqual(self.all_event_checkin_dict(), [{ - 'attendee_id': 1, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_source': 'eventbrite', - 'utm_url': None, - 'email': 'john.smith@example.com', - 'event_id': 1, - 'id': 1, - 'status': 'PENDING', - 'attended_at': None - }]) - - self.assertEqual(requests.get.call_args_list, [ - call('https://www.eventbriteapi.com/v3/events/1/orders/1/', headers={'Authorization': 'Bearer '}, - timeout=5), - ]) + del wd["payload"] + self.assertEqual( + webhook_dicts, + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "attendee_id": 1, + "event_id": 1, + "organization_id": "1", + "status": "DONE", + "status_text": "OK", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) + + self.assertEqual( + self.all_event_checkin_dict(), + [ + { + "attendee_id": 1, + "utm_campaign": None, + "utm_medium": None, + "utm_source": "eventbrite", + "utm_url": None, + "email": "john.smith@example.com", + "event_id": 1, + "id": 1, + "status": "PENDING", + "attended_at": None, + } + ], + ) + + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://www.eventbriteapi.com/v3/events/1/orders/1/", + headers={"Authorization": "Bearer "}, + timeout=5, + ), + ], + ) assert requests.request.call_args_list == [ - call('POST', - 'https://old.hardcoded.breathecode.url/admin/api.php', - params=[('api_action', 'contact_sync'), ('api_key', model.active_campaign_academy.ac_key), - ('api_output', 'json')], - data={ - 'email': model.user.email, - 'first_name': model.user.first_name, - 'last_name': model.user.last_name, - 'field[18,0]': model.academy.slug, - 'field[59,0]': 'eventbrite', - 'field[33,0]': 'eventbrite order placed' - }, - timeout=3), - call('POST', - 'https://old.hardcoded.breathecode.url/api/3/contactAutomations', - headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - 'Api-Token': model.active_campaign_academy.ac_key, - }, - json={'contactAutomation': { - 'contact': 1, - 'automation': model.automation.acp_id, - }}, - timeout=2), + call( + "POST", + "https://old.hardcoded.breathecode.url/admin/api.php", + params=[ + ("api_action", "contact_sync"), + ("api_key", model.active_campaign_academy.ac_key), + ("api_output", "json"), + ], + data={ + "email": model.user.email, + "first_name": model.user.first_name, + "last_name": model.user.last_name, + "field[18,0]": model.academy.slug, + "field[59,0]": "eventbrite", + "field[33,0]": "eventbrite order placed", + }, + timeout=3, + ), + call( + "POST", + "https://old.hardcoded.breathecode.url/api/3/contactAutomations", + headers={ + "Accept": "application/json", + "Content-Type": "application/json", + "Api-Token": model.active_campaign_academy.ac_key, + }, + json={ + "contactAutomation": { + "contact": 1, + "automation": model.automation.acp_id, + } + }, + timeout=2, + ), ] - self.assertEqual(add_event_tags_to_student.delay.call_args_list, [ - call(model.event.id, email=model.user.email), - ]) - - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + self.assertEqual( + add_event_tags_to_student.delay.call_args_list, + [ + call(model.event.id, email=model.user.email), + ], + ) + + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook_without_lang__active_campaign_belong_to_other_academy_than_event(self): from breathecode.marketing.tasks import add_event_tags_to_student - model = self.generate_models(organization=True, - event={ - 'eventbrite_id': 1, - 'academy_id': 2, - }, - academy=2, - active_campaign_academy={'ac_url': 'https://old.hardcoded.breathecode.url'}, - automation=True, - user={ - 'email': 'john.smith@example.com', - 'first_name': 'John', - 'last_name': 'Smith' - }) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + model = self.generate_models( + organization=True, + event={ + "eventbrite_id": 1, + "academy_id": 2, + }, + academy=2, + active_campaign_academy={"ac_url": "https://old.hardcoded.breathecode.url"}, + automation=True, + user={"email": "john.smith@example.com", "first_name": "John", "last_name": "Smith"}, + ) + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) webhook_dict = self.all_eventbrite_webhook_dict() for wd in webhook_dict: - del wd['payload'] - - self.assertEqual(webhook_dict, [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'attendee_id': 1, - 'event_id': 1, - 'organization_id': '1', - 'status': 'DONE', - 'status_text': 'OK', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) - - self.assertEqual(self.all_event_checkin_dict(), [{ - 'attendee_id': 1, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_source': 'eventbrite', - 'utm_url': None, - 'email': 'john.smith@example.com', - 'event_id': 1, - 'id': 1, - 'status': 'PENDING', - 'attended_at': None - }]) - - self.assertEqual(requests.get.call_args_list, [ - call('https://www.eventbriteapi.com/v3/events/1/orders/1/', headers={'Authorization': 'Bearer '}, - timeout=5), - ]) - - self.assertEqual(requests.request.call_args_list, [ - call('POST', - 'https://old.hardcoded.breathecode.url/admin/api.php', - params=[('api_action', 'contact_sync'), ('api_key', model.active_campaign_academy.ac_key), - ('api_output', 'json')], - data={ - 'email': model.user.email, - 'first_name': model.user.first_name, - 'last_name': model.user.last_name, - 'field[18,0]': model.academy[1].slug, - 'field[59,0]': 'eventbrite', - 'field[33,0]': 'eventbrite order placed' - }, - timeout=3), - call('POST', - 'https://old.hardcoded.breathecode.url/api/3/contactAutomations', - headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - 'Api-Token': model.active_campaign_academy.ac_key, - }, - json={'contactAutomation': { - 'contact': 1, - 'automation': model.automation.acp_id, - }}, - timeout=2), - ]) - self.assertEqual(add_event_tags_to_student.delay.call_args_list, [ - call(model.event.id, email=model.user.email), - ]) - - @patch('requests.get', apply_eventbrite_requests_post_mock()) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('breathecode.marketing.tasks.add_event_tags_to_student', MagicMock()) - @patch('time.sleep', MagicMock()) + del wd["payload"] + + self.assertEqual( + webhook_dict, + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "attendee_id": 1, + "event_id": 1, + "organization_id": "1", + "status": "DONE", + "status_text": "OK", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) + + self.assertEqual( + self.all_event_checkin_dict(), + [ + { + "attendee_id": 1, + "utm_campaign": None, + "utm_medium": None, + "utm_source": "eventbrite", + "utm_url": None, + "email": "john.smith@example.com", + "event_id": 1, + "id": 1, + "status": "PENDING", + "attended_at": None, + } + ], + ) + + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://www.eventbriteapi.com/v3/events/1/orders/1/", + headers={"Authorization": "Bearer "}, + timeout=5, + ), + ], + ) + + self.assertEqual( + requests.request.call_args_list, + [ + call( + "POST", + "https://old.hardcoded.breathecode.url/admin/api.php", + params=[ + ("api_action", "contact_sync"), + ("api_key", model.active_campaign_academy.ac_key), + ("api_output", "json"), + ], + data={ + "email": model.user.email, + "first_name": model.user.first_name, + "last_name": model.user.last_name, + "field[18,0]": model.academy[1].slug, + "field[59,0]": "eventbrite", + "field[33,0]": "eventbrite order placed", + }, + timeout=3, + ), + call( + "POST", + "https://old.hardcoded.breathecode.url/api/3/contactAutomations", + headers={ + "Accept": "application/json", + "Content-Type": "application/json", + "Api-Token": model.active_campaign_academy.ac_key, + }, + json={ + "contactAutomation": { + "contact": 1, + "automation": model.automation.acp_id, + } + }, + timeout=2, + ), + ], + ) + self.assertEqual( + add_event_tags_to_student.delay.call_args_list, + [ + call(model.event.id, email=model.user.email), + ], + ) + + @patch("requests.get", apply_eventbrite_requests_post_mock()) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("breathecode.marketing.tasks.add_event_tags_to_student", MagicMock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook(self): from breathecode.marketing.tasks import add_event_tags_to_student - model = self.generate_models(organization=True, - event=True, - event_kwargs={ - 'eventbrite_id': 1, - 'lang': 'en' - }, - active_campaign_academy=True, - automation=True, - user=True, - academy=True, - active_campaign_academy_kwargs={'ac_url': 'https://old.hardcoded.breathecode.url'}, - user_kwargs={ - 'email': 'john.smith@example.com', - 'first_name': 'John', - 'last_name': 'Smith' - }) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('order.placed', EVENTBRITE_ORDER_URL), - headers=self.headers('order.placed'), - format='json') + model = self.generate_models( + organization=True, + event=True, + event_kwargs={"eventbrite_id": 1, "lang": "en"}, + active_campaign_academy=True, + automation=True, + user=True, + academy=True, + active_campaign_academy_kwargs={"ac_url": "https://old.hardcoded.breathecode.url"}, + user_kwargs={"email": "john.smith@example.com", "first_name": "John", "last_name": "Smith"}, + ) + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("order.placed", EVENTBRITE_ORDER_URL), headers=self.headers("order.placed"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) webhook_dicts = self.all_eventbrite_webhook_dict() for wd in webhook_dicts: - del wd['payload'] - self.assertEqual(webhook_dicts, [{ - 'action': 'order.placed', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/orders/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'DONE', - 'event_id': 1, - 'attendee_id': 1, - 'status_text': 'OK', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) - - self.assertEqual(self.all_event_checkin_dict(), [{ - 'attendee_id': 1, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_source': 'eventbrite', - 'utm_url': None, - 'email': 'john.smith@example.com', - 'event_id': 1, - 'id': 1, - 'status': 'PENDING', - 'attended_at': None - }]) - - self.assertEqual(requests.get.call_args_list, [ - call('https://www.eventbriteapi.com/v3/events/1/orders/1/', headers={'Authorization': 'Bearer '}, - timeout=5), - ]) + del wd["payload"] + self.assertEqual( + webhook_dicts, + [ + { + "action": "order.placed", + "api_url": "https://www.eventbriteapi.com/v3/events/1/orders/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "DONE", + "event_id": 1, + "attendee_id": 1, + "status_text": "OK", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) + + self.assertEqual( + self.all_event_checkin_dict(), + [ + { + "attendee_id": 1, + "utm_campaign": None, + "utm_medium": None, + "utm_source": "eventbrite", + "utm_url": None, + "email": "john.smith@example.com", + "event_id": 1, + "id": 1, + "status": "PENDING", + "attended_at": None, + } + ], + ) + + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://www.eventbriteapi.com/v3/events/1/orders/1/", + headers={"Authorization": "Bearer "}, + timeout=5, + ), + ], + ) assert requests.request.call_args_list == [ - call('POST', - 'https://old.hardcoded.breathecode.url/admin/api.php', - params=[('api_action', 'contact_sync'), ('api_key', model.active_campaign_academy.ac_key), - ('api_output', 'json')], - data={ - 'email': model.user.email, - 'first_name': model.user.first_name, - 'last_name': model.user.last_name, - 'field[18,0]': model.academy.slug, - 'field[59,0]': 'eventbrite', - 'field[33,0]': 'eventbrite order placed', - 'field[16,0]': 'en', - }, - timeout=3), - call('POST', - 'https://old.hardcoded.breathecode.url/api/3/contactAutomations', - headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - 'Api-Token': model.active_campaign_academy.ac_key, - }, - json={'contactAutomation': { - 'contact': 1, - 'automation': model.automation.acp_id, - }}, - timeout=2), + call( + "POST", + "https://old.hardcoded.breathecode.url/admin/api.php", + params=[ + ("api_action", "contact_sync"), + ("api_key", model.active_campaign_academy.ac_key), + ("api_output", "json"), + ], + data={ + "email": model.user.email, + "first_name": model.user.first_name, + "last_name": model.user.last_name, + "field[18,0]": model.academy.slug, + "field[59,0]": "eventbrite", + "field[33,0]": "eventbrite order placed", + "field[16,0]": "en", + }, + timeout=3, + ), + call( + "POST", + "https://old.hardcoded.breathecode.url/api/3/contactAutomations", + headers={ + "Accept": "application/json", + "Content-Type": "application/json", + "Api-Token": model.active_campaign_academy.ac_key, + }, + json={ + "contactAutomation": { + "contact": 1, + "automation": model.automation.acp_id, + } + }, + timeout=2, + ), ] - self.assertEqual(add_event_tags_to_student.delay.call_args_list, [ - call(model.event.id, email=model.user.email), - ]) + self.assertEqual( + add_event_tags_to_student.delay.call_args_list, + [ + call(model.event.id, email=model.user.email), + ], + ) """ 🔽🔽🔽 event.created """ - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) - @patch.object(actions, 'update_or_create_event', MagicMock(side_effect=Exception('Random error in creating'))) - @patch('time.sleep', MagicMock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) + @patch.object(actions, "update_or_create_event", MagicMock(side_effect=Exception("Random error in creating"))) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook__event_created__raise_error(self): """Test /eventbrite/webhook without auth""" model = self.generate_models(organization=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('event.created', eventbrite_url), - headers=self.headers('event.created'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("event.created", eventbrite_url), headers=self.headers("event.created"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(actions.update_or_create_event.call_args_list, [call(EVENTBRITE_EVENT, model.organization)]) - db = self.bc.database.list_of('events.EventbriteWebhook') - self.assertRegex(db[0]['status_text'], r'Exception: Random error in creating\n') - - self.bc.check.partial_equality(db, [{ - 'action': 'event.created', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'ERROR', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) - - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) - @patch.object(actions, 'update_or_create_event', update_or_create_event_mock()) - @patch('time.sleep', MagicMock()) + db = self.bc.database.list_of("events.EventbriteWebhook") + self.assertRegex(db[0]["status_text"], r"Exception: Random error in creating\n") + + self.bc.check.partial_equality( + db, + [ + { + "action": "event.created", + "api_url": "https://www.eventbriteapi.com/v3/events/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "ERROR", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) + + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) + @patch.object(actions, "update_or_create_event", update_or_create_event_mock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook__event_created(self): """Test /eventbrite/webhook without auth""" model = self.generate_models(organization=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('event.created', eventbrite_url), - headers=self.headers('event.created'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("event.created", eventbrite_url), headers=self.headers("event.created"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(actions.update_or_create_event.call_args_list, [call(EVENTBRITE_EVENT, model.organization)]) webhook_dicts = self.all_eventbrite_webhook_dict() for wd in webhook_dicts: - del wd['payload'] - self.assertEqual(webhook_dicts, [{ - 'action': 'event.created', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'attendee_id': None, - 'event_id': None, - 'status': 'DONE', - 'status_text': 'OK', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) + del wd["payload"] + self.assertEqual( + webhook_dicts, + [ + { + "action": "event.created", + "api_url": "https://www.eventbriteapi.com/v3/events/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "attendee_id": None, + "event_id": None, + "status": "DONE", + "status_text": "OK", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) """ 🔽🔽🔽 event.updated """ - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) - @patch.object(actions, 'update_or_create_event', MagicMock(side_effect=Exception('Random error in creating'))) - @patch('time.sleep', MagicMock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) + @patch.object(actions, "update_or_create_event", MagicMock(side_effect=Exception("Random error in creating"))) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook__event_updated__raise_error(self): """Test /eventbrite/webhook without auth""" model = self.generate_models(organization=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('event.updated', eventbrite_url), - headers=self.headers('event.updated'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("event.updated", eventbrite_url), headers=self.headers("event.updated"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(actions.update_or_create_event.call_args_list, [call(EVENTBRITE_EVENT, model.organization)]) - db = self.bc.database.list_of('events.EventbriteWebhook') - self.assertRegex(db[0]['status_text'], r'Exception: Random error in creating\n') - - self.bc.check.partial_equality(db, [{ - 'action': 'event.updated', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'ERROR', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) - - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) - @patch.object(actions, 'update_or_create_event', update_or_create_event_mock()) - @patch('time.sleep', MagicMock()) + db = self.bc.database.list_of("events.EventbriteWebhook") + self.assertRegex(db[0]["status_text"], r"Exception: Random error in creating\n") + + self.bc.check.partial_equality( + db, + [ + { + "action": "event.updated", + "api_url": "https://www.eventbriteapi.com/v3/events/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "ERROR", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) + + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) + @patch.object(actions, "update_or_create_event", update_or_create_event_mock()) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook__event_updated(self): """Test /eventbrite/webhook without auth""" model = self.generate_models(organization=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('event.updated', eventbrite_url), - headers=self.headers('event.updated'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("event.updated", eventbrite_url), headers=self.headers("event.updated"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(actions.update_or_create_event.call_args_list, [call(EVENTBRITE_EVENT, model.organization)]) webhook_dicts = self.all_eventbrite_webhook_dict() for wd in webhook_dicts: - del wd['payload'] - self.assertEqual(webhook_dicts, [{ - 'action': 'event.updated', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'attendee_id': None, - 'event_id': None, - 'organization_id': '1', - 'status': 'DONE', - 'status_text': 'OK', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) + del wd["payload"] + self.assertEqual( + webhook_dicts, + [ + { + "action": "event.updated", + "api_url": "https://www.eventbriteapi.com/v3/events/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "attendee_id": None, + "event_id": None, + "organization_id": "1", + "status": "DONE", + "status_text": "OK", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) """ 🔽🔽🔽 event.published """ - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) - @patch('breathecode.events.actions.publish_event_from_eventbrite', MagicMock(side_effect=Exception('Random error'))) - @patch('time.sleep', MagicMock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) + @patch("breathecode.events.actions.publish_event_from_eventbrite", MagicMock(side_effect=Exception("Random error"))) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook__event_published__raise_error(self): from breathecode.events.actions import publish_event_from_eventbrite model = self.generate_models(organization=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('event.published', eventbrite_url), - headers=self.headers('event.published'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("event.published", eventbrite_url), headers=self.headers("event.published"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(publish_event_from_eventbrite.call_args_list, [call(EVENTBRITE_EVENT, model.organization)]) - db = self.bc.database.list_of('events.EventbriteWebhook') - self.assertRegex(db[0]['status_text'], r'Exception: Random error\n') - - self.bc.check.partial_equality(db, [{ - 'action': 'event.published', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'organization_id': '1', - 'status': 'ERROR', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) - - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) - @patch.object(actions, 'publish_event_from_eventbrite', MagicMock(return_value=None)) - @patch('time.sleep', MagicMock()) + db = self.bc.database.list_of("events.EventbriteWebhook") + self.assertRegex(db[0]["status_text"], r"Exception: Random error\n") + + self.bc.check.partial_equality( + db, + [ + { + "action": "event.published", + "api_url": "https://www.eventbriteapi.com/v3/events/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "organization_id": "1", + "status": "ERROR", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) + + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, eventbrite_url_with_query, EVENTBRITE_EVENT)])) + @patch.object(actions, "publish_event_from_eventbrite", MagicMock(return_value=None)) + @patch("time.sleep", MagicMock()) def test_eventbrite_webhook__event_published(self): from breathecode.events.actions import publish_event_from_eventbrite model = self.generate_models(organization=True) - url = reverse_lazy('events:eventbrite_webhook_id', kwargs={'organization_id': 1}) - response = self.client.post(url, - self.data('event.published', eventbrite_url), - headers=self.headers('event.published'), - format='json') + url = reverse_lazy("events:eventbrite_webhook_id", kwargs={"organization_id": 1}) + response = self.client.post( + url, self.data("event.published", eventbrite_url), headers=self.headers("event.published"), format="json" + ) content = response.content - self.assertEqual(content, b'ok') + self.assertEqual(content, b"ok") self.assertEqual(response.status_code, status.HTTP_200_OK) webhook_dicts = self.all_eventbrite_webhook_dict() for wd in webhook_dicts: - del wd['payload'] - self.assertEqual(webhook_dicts, [{ - 'action': 'event.published', - 'api_url': 'https://www.eventbriteapi.com/v3/events/1/', - 'endpoint_url': 'https://something.io/eventbrite/webhook', - 'id': 1, - 'attendee_id': None, - 'event_id': None, - 'organization_id': '1', - 'status': 'DONE', - 'status_text': 'OK', - 'user_id': '123456789012', - 'webhook_id': '1234567' - }]) + del wd["payload"] + self.assertEqual( + webhook_dicts, + [ + { + "action": "event.published", + "api_url": "https://www.eventbriteapi.com/v3/events/1/", + "endpoint_url": "https://something.io/eventbrite/webhook", + "id": 1, + "attendee_id": None, + "event_id": None, + "organization_id": "1", + "status": "DONE", + "status_text": "OK", + "user_id": "123456789012", + "webhook_id": "1234567", + } + ], + ) diff --git a/breathecode/events/tests/urls/tests_eventype.py b/breathecode/events/tests/urls/tests_eventype.py index 57be267e7..9288d87f3 100644 --- a/breathecode/events/tests/urls/tests_eventype.py +++ b/breathecode/events/tests/urls/tests_eventype.py @@ -14,24 +14,24 @@ def get_serializer(event_type, academy=None, city=None, data={}): if city: city_serialized = { - 'name': city.name, + "name": city.name, } if academy: academy_serialized = { - 'city': city_serialized, - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "city": city_serialized, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } return { - 'academy': academy_serialized, - 'id': event_type.id, - 'name': event_type.name, - 'slug': event_type.slug, - 'lang': event_type.lang, - 'description': event_type.description, + "academy": academy_serialized, + "id": event_type.id, + "name": event_type.name, + "slug": event_type.slug, + "lang": event_type.lang, + "description": event_type.description, **data, } @@ -41,11 +41,11 @@ class AcademyEventTestSuite(EventTestCase): def test_all_academy_events_no_auth(self): - url = reverse_lazy('events:eventype') + url = reverse_lazy("events:eventype") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -53,7 +53,7 @@ def test_all_academy_events_no_auth(self): def test_academy_event_type_no_results(self): # TODO: this is bad placed - url = reverse_lazy('events:eventype') + url = reverse_lazy("events:eventype") self.generate_models(authenticate=True) response = self.client.get(url) @@ -66,18 +66,17 @@ def test_academy_event_type_no_results(self): def test_academy_event_type_with_results(self): # TODO: this is bad placed - url = reverse_lazy('events:eventype') + url = reverse_lazy("events:eventype") event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs) + model = self.generate_models( + authenticate=True, event=True, event_type=True, event_type_kwargs=event_type_kwargs + ) response = self.client.get(url) json = response.json() @@ -86,24 +85,28 @@ def test_academy_event_type_with_results(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_bad_academy_slug(self): - url = reverse_lazy('events:eventype') + '?academy=banana' + url = reverse_lazy("events:eventype") + "?academy=banana" event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs) + model = self.generate_models( + authenticate=True, event=True, event_type=True, event_type_kwargs=event_type_kwargs + ) response = self.client.get(url) json = response.json() @@ -112,25 +115,28 @@ def test_bad_academy_slug(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_properly_academy_slug(self): event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - academy=1, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs) - url = reverse_lazy('events:eventype') + f'?academy={model.academy.slug}' + model = self.generate_models( + authenticate=True, academy=1, event=True, event_type=True, event_type_kwargs=event_type_kwargs + ) + url = reverse_lazy("events:eventype") + f"?academy={model.academy.slug}" response = self.client.get(url) json = response.json() @@ -139,24 +145,28 @@ def test_properly_academy_slug(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_bad_allow_shared_creation_slug(self): - url = reverse_lazy('events:eventype') + '?allow_shared_creation=false' + url = reverse_lazy("events:eventype") + "?allow_shared_creation=false" event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs) + model = self.generate_models( + authenticate=True, event=True, event_type=True, event_type_kwargs=event_type_kwargs + ) response = self.client.get(url) json = response.json() @@ -165,25 +175,28 @@ def test_bad_allow_shared_creation_slug(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) def test_properly_allow_shared_creation_slug(self): event_type_kwargs = { - 'slug': 'potato', - 'name': 'Potato', - 'created_at': timezone.now(), - 'updated_at': timezone.now(), - 'icon_url': 'https://www.google.com', + "slug": "potato", + "name": "Potato", + "created_at": timezone.now(), + "updated_at": timezone.now(), + "icon_url": "https://www.google.com", } - model = self.generate_models(authenticate=True, - academy=1, - event=True, - event_type=True, - event_type_kwargs=event_type_kwargs) - url = reverse_lazy('events:eventype') + f'?allow_shared_creation=true' + model = self.generate_models( + authenticate=True, academy=1, event=True, event_type=True, event_type_kwargs=event_type_kwargs + ) + url = reverse_lazy("events:eventype") + f"?allow_shared_creation=true" response = self.client.get(url) json = response.json() @@ -192,6 +205,11 @@ def test_properly_allow_shared_creation_slug(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.all_event_type_dict(), [{ - **self.model_to_dict(model, 'event_type'), - }]) + self.assertEqual( + self.all_event_type_dict(), + [ + { + **self.model_to_dict(model, "event_type"), + } + ], + ) diff --git a/breathecode/events/tests/urls/tests_ical_cohorts.py b/breathecode/events/tests/urls/tests_ical_cohorts.py index 533bb53de..23e39e395 100644 --- a/breathecode/events/tests/urls/tests_ical_cohorts.py +++ b/breathecode/events/tests/urls/tests_ical_cohorts.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import urllib from datetime import datetime, timedelta from unittest.mock import MagicMock, patch @@ -20,865 +21,894 @@ class AcademyCohortTestSuite(EventTestCase): """Test /academy/cohort""" - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__without_academy(self): """Test /academy/cohort without auth""" - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) json = response.json() - expected = {'detail': 'Some academy not exist', 'status_code': 400} + expected = {"detail": "Some academy not exist", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__without_events(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} model = self.generate_models(academy=True, skip_cohort=True, device_id=True, device_id_kwargs=device_id_kwargs) - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Cohorts', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Cohorts", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__dont_get_status_deleted(self): """Test /academy/cohort without auth""" - cohort_kwargs = {'stage': 'DELETED'} - device_id_kwargs = {'name': 'server'} - model = self.generate_models(academy=True, - event=True, - cohort=True, - device_id=True, - cohort_kwargs=cohort_kwargs, - device_id_kwargs=device_id_kwargs) - - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + cohort_kwargs = {"stage": "DELETED"} + device_id_kwargs = {"name": "server"} + model = self.generate_models( + academy=True, + event=True, + cohort=True, + device_id=True, + cohort_kwargs=cohort_kwargs, + device_id_kwargs=device_id_kwargs, + ) + + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort = { - 'ending_date': timezone.now() + timedelta(weeks=10 * 52), - 'kickoff_date': datetime.today().isoformat() + "ending_date": timezone.now() + timedelta(weeks=10 * 52), + "kickoff_date": datetime.today().isoformat(), } model = self.generate_models(academy=True, cohort=cohort, device_id=True, device_id_kwargs=device_id_kwargs) - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) - cohort = model['cohort'] - academy = model['academy'] + cohort = model["cohort"] + academy = model["academy"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name}', - f'DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name}", + f"DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__ending_date_is_none(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} model = self.generate_models(academy=1, cohort=1, device_id=1, device_id_kwargs=device_id_kwargs) - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__never_ends_true(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} - cohort = {'never_ends': True} + device_id_kwargs = {"name": "server"} + cohort = {"never_ends": True} model = self.generate_models(academy=1, cohort=cohort, device_id=1, device_id_kwargs=device_id_kwargs) - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__with_incoming_true__return_zero_cohorts(self): """Test /academy/cohort without auth""" - cohort_kwargs = {'kickoff_date': timezone.now() - timedelta(days=1)} - device_id_kwargs = {'name': 'server'} - model = self.generate_models(academy=True, - cohort=True, - device_id=True, - cohort_kwargs=cohort_kwargs, - device_id_kwargs=device_id_kwargs) - - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1', 'upcoming': 'true'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + cohort_kwargs = {"kickoff_date": timezone.now() - timedelta(days=1)} + device_id_kwargs = {"name": "server"} + model = self.generate_models( + academy=True, cohort=True, device_id=True, cohort_kwargs=cohort_kwargs, device_id_kwargs=device_id_kwargs + ) + + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1", "upcoming": "true"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__with_incoming_true(self): """Test /academy/cohort without auth""" cohort_kwargs = { - 'kickoff_date': timezone.now() + timedelta(days=1), - 'ending_date': timezone.now() + timedelta(weeks=10 * 52), + "kickoff_date": timezone.now() + timedelta(days=1), + "ending_date": timezone.now() + timedelta(weeks=10 * 52), } - device_id_kwargs = {'name': 'server'} - model = self.generate_models(academy=True, - cohort=True, - device_id=True, - cohort_kwargs=cohort_kwargs, - device_id_kwargs=device_id_kwargs) - - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1', 'upcoming': 'true'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - cohort = model['cohort'] - academy = model['academy'] + device_id_kwargs = {"name": "server"} + model = self.generate_models( + academy=True, cohort=True, device_id=True, cohort_kwargs=cohort_kwargs, device_id_kwargs=device_id_kwargs + ) + + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1", "upcoming": "true"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + cohort = model["cohort"] + academy = model["academy"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name}', - f'DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name}", + f"DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__with_teacher__with_ending_date(self): """Test /academy/cohort without auth""" - cohort_user_kwargs = {'role': 'TEACHER'} - cohort_kwargs = {'ending_date': timezone.now()} - device_id_kwargs = {'name': 'server'} - model = self.generate_models(academy=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - device_id=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs, - device_id_kwargs=device_id_kwargs) - - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - cohort = model['cohort'] - academy = model['academy'] - user = model['user'] + cohort_user_kwargs = {"role": "TEACHER"} + cohort_kwargs = {"ending_date": timezone.now()} + device_id_kwargs = {"name": "server"} + model = self.generate_models( + academy=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + device_id=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + device_id_kwargs=device_id_kwargs, + ) + + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + cohort = model["cohort"] + academy = model["academy"] + user = model["user"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name}', - f'DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_{key}', - f'LOCATION:{academy.name}', - self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name}", + f"DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_{key}", + f"LOCATION:{academy.name}", + self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_two(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} base = self.generate_models(academy=True, device_id=True, skip_cohort=True, device_id_kwargs=device_id_kwargs) cohort = { - 'kickoff_date': datetime.today().isoformat(), - 'ending_date': timezone.now() + timedelta(weeks=10 * 52) + "kickoff_date": datetime.today().isoformat(), + "ending_date": timezone.now() + timedelta(weeks=10 * 52), } models = [ self.generate_models(user=True, cohort=cohort, models=base), self.generate_models(user=True, cohort=cohort, models=base), ] - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) - cohort1 = models[0]['cohort'] - cohort2 = models[1]['cohort'] - academy1 = models[0]['academy'] - academy2 = models[1]['academy'] + cohort1 = models[0]["cohort"] + cohort2 = models[1]["cohort"] + academy1 = models[0]["academy"] + academy2 = models[1]["academy"] key = base.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name}', - f'DTSTART:{self.datetime_to_ical(cohort1.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort1.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort1.created_at)}', - f'UID:breathecode_cohort_{cohort1.id}_{key}', - f'LOCATION:{academy1.name}', - 'END:VEVENT', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name}', - f'DTSTART:{self.datetime_to_ical(cohort2.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort1.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort2.created_at)}', - f'UID:breathecode_cohort_{cohort2.id}_{key}', - f'LOCATION:{academy2.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name}", + f"DTSTART:{self.datetime_to_ical(cohort1.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort1.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort1.created_at)}", + f"UID:breathecode_cohort_{cohort1.id}_{key}", + f"LOCATION:{academy1.name}", + "END:VEVENT", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name}", + f"DTSTART:{self.datetime_to_ical(cohort2.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort1.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort2.created_at)}", + f"UID:breathecode_cohort_{cohort2.id}_{key}", + f"LOCATION:{academy2.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_two__with_teacher__with_ending_date(self): """Test /academy/cohort without auth""" - cohort_user_kwargs = {'role': 'TEACHER'} - cohort_kwargs = {'ending_date': timezone.now()} - device_id_kwargs = {'name': 'server'} + cohort_user_kwargs = {"role": "TEACHER"} + cohort_kwargs = {"ending_date": timezone.now()} + device_id_kwargs = {"name": "server"} base = self.generate_models(academy=True, device_id=True, skip_cohort=True, device_id_kwargs=device_id_kwargs) models = [ - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), ] - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) - cohort1 = models[0]['cohort'] - cohort2 = models[1]['cohort'] - academy1 = models[0]['academy'] - academy2 = models[1]['academy'] - user1 = models[0]['user'] - user2 = models[1]['user'] + cohort1 = models[0]["cohort"] + cohort2 = models[1]["cohort"] + academy1 = models[0]["academy"] + academy2 = models[1]["academy"] + user1 = models[0]["user"] + user2 = models[1]["user"] key = base.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name}', - f'DTSTART:{self.datetime_to_ical(cohort1.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort1.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort1.created_at)}', - f'UID:breathecode_cohort_{cohort1.id}_{key}', - f'LOCATION:{academy1.name}', - self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), - 'END:VEVENT', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name}', - f'DTSTART:{self.datetime_to_ical(cohort2.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort2.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort2.created_at)}', - f'UID:breathecode_cohort_{cohort2.id}_{key}', - f'LOCATION:{academy2.name}', - self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name}", + f"DTSTART:{self.datetime_to_ical(cohort1.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort1.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort1.created_at)}", + f"UID:breathecode_cohort_{cohort1.id}_{key}", + f"LOCATION:{academy1.name}", + self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), + "END:VEVENT", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name}", + f"DTSTART:{self.datetime_to_ical(cohort2.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort2.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort2.created_at)}", + f"UID:breathecode_cohort_{cohort2.id}_{key}", + f"LOCATION:{academy2.name}", + self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_two__with_teacher__with_ending_date__with_two_academies_id(self): """Test /academy/cohort without auth""" - cohort_user_kwargs = {'role': 'TEACHER'} - cohort_kwargs = {'ending_date': timezone.now()} - device_id_kwargs = {'name': 'server'} + cohort_user_kwargs = {"role": "TEACHER"} + cohort_kwargs = {"ending_date": timezone.now()} + device_id_kwargs = {"name": "server"} base = self.generate_models(device_id=True, device_id_kwargs=device_id_kwargs) base1 = self.generate_models(academy=True, skip_cohort=True, models=base) models = [ - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base1, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base1, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base1, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base1, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), ] base2 = self.generate_models(academy=True, skip_cohort=True, models=base) models = models + [ - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base2, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base2, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base2, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base2, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), ] - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1,2'} - url = url + '?' + urllib.parse.urlencode(args) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1,2"} + url = url + "?" + urllib.parse.urlencode(args) response = self.client.get(url) - cohort1 = models[0]['cohort'] - cohort2 = models[1]['cohort'] - cohort3 = models[2]['cohort'] - cohort4 = models[3]['cohort'] - academy1 = models[0]['academy'] - academy2 = models[1]['academy'] - academy3 = models[2]['academy'] - academy4 = models[3]['academy'] - user1 = models[0]['user'] - user2 = models[1]['user'] - user3 = models[2]['user'] - user4 = models[3]['user'] + cohort1 = models[0]["cohort"] + cohort2 = models[1]["cohort"] + cohort3 = models[2]["cohort"] + cohort4 = models[3]["cohort"] + academy1 = models[0]["academy"] + academy2 = models[1]["academy"] + academy3 = models[2]["academy"] + academy4 = models[3]["academy"] + user1 = models[0]["user"] + user2 = models[1]["user"] + user3 = models[2]["user"] + user4 = models[3]["user"] key = base.device_id.key - url = url.replace('%2C', ',') - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1\\,2) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - self.line_limit(f'URL:http://localhost:8000{url}'), - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name}', - f'DTSTART:{self.datetime_to_ical(cohort1.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort1.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort1.created_at)}', - f'UID:breathecode_cohort_{cohort1.id}_{key}', - f'LOCATION:{academy1.name}', - self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), - 'END:VEVENT', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name}', - f'DTSTART:{self.datetime_to_ical(cohort2.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort2.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort2.created_at)}', - f'UID:breathecode_cohort_{cohort2.id}_{key}', - f'LOCATION:{academy2.name}', - self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), - 'END:VEVENT', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort3.name}', - f'DTSTART:{self.datetime_to_ical(cohort3.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort3.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort3.created_at)}', - f'UID:breathecode_cohort_{cohort3.id}_{key}', - f'LOCATION:{academy3.name}', - self.line_limit(f'ORGANIZER;CN="{user3.first_name} {user3.last_name}";ROLE=OWNER:MAILTO:{user3.email}'), - 'END:VEVENT', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort4.name}', - f'DTSTART:{self.datetime_to_ical(cohort4.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort4.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort4.created_at)}', - f'UID:breathecode_cohort_{cohort4.id}_{key}', - f'LOCATION:{academy4.name}', - self.line_limit(f'ORGANIZER;CN="{user4.first_name} {user4.last_name}";ROLE=OWNER:MAILTO:{user4.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + url = url.replace("%2C", ",") + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1\\,2) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + self.line_limit(f"URL:http://localhost:8000{url}"), + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name}", + f"DTSTART:{self.datetime_to_ical(cohort1.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort1.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort1.created_at)}", + f"UID:breathecode_cohort_{cohort1.id}_{key}", + f"LOCATION:{academy1.name}", + self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), + "END:VEVENT", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name}", + f"DTSTART:{self.datetime_to_ical(cohort2.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort2.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort2.created_at)}", + f"UID:breathecode_cohort_{cohort2.id}_{key}", + f"LOCATION:{academy2.name}", + self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), + "END:VEVENT", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort3.name}", + f"DTSTART:{self.datetime_to_ical(cohort3.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort3.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort3.created_at)}", + f"UID:breathecode_cohort_{cohort3.id}_{key}", + f"LOCATION:{academy3.name}", + self.line_limit(f'ORGANIZER;CN="{user3.first_name} {user3.last_name}";ROLE=OWNER:MAILTO:{user3.email}'), + "END:VEVENT", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort4.name}", + f"DTSTART:{self.datetime_to_ical(cohort4.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort4.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort4.created_at)}", + f"UID:breathecode_cohort_{cohort4.id}_{key}", + f"LOCATION:{academy4.name}", + self.line_limit(f'ORGANIZER;CN="{user4.first_name} {user4.last_name}";ROLE=OWNER:MAILTO:{user4.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_two__with_teacher__with_ending_date__with_two_academies_slug(self): """Test /academy/cohort without auth""" - cohort_user_kwargs = {'role': 'TEACHER'} - cohort_kwargs = {'ending_date': timezone.now()} + cohort_user_kwargs = {"role": "TEACHER"} + cohort_kwargs = {"ending_date": timezone.now()} - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} base = self.generate_models(device_id=True, device_id_kwargs=device_id_kwargs) base1 = self.generate_models(academy=True, skip_cohort=True, models=base) models = [ - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base1, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base1, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base1, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base1, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), ] base2 = self.generate_models(academy=True, skip_cohort=True, models=base) models = models + [ - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base2, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), - self.generate_models(user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - models=base2, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base2, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), + self.generate_models( + user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + models=base2, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ), ] models = sorted(models, key=lambda x: x.cohort.id) - url = reverse_lazy('events:ical_cohorts') - args = {'academy_slug': ','.join(list(dict.fromkeys([x.academy.slug for x in models])))} - url = url + '?' + urllib.parse.urlencode(args) + url = reverse_lazy("events:ical_cohorts") + args = {"academy_slug": ",".join(list(dict.fromkeys([x.academy.slug for x in models])))} + url = url + "?" + urllib.parse.urlencode(args) response = self.client.get(url) - cohort1 = models[0]['cohort'] - cohort2 = models[1]['cohort'] - cohort3 = models[2]['cohort'] - cohort4 = models[3]['cohort'] - academy1 = models[0]['academy'] - academy2 = models[1]['academy'] - academy3 = models[2]['academy'] - academy4 = models[3]['academy'] - user1 = models[0]['user'] - user2 = models[1]['user'] - user3 = models[2]['user'] - user4 = models[3]['user'] + cohort1 = models[0]["cohort"] + cohort2 = models[1]["cohort"] + cohort3 = models[2]["cohort"] + cohort4 = models[3]["cohort"] + academy1 = models[0]["academy"] + academy2 = models[1]["academy"] + academy3 = models[2]["academy"] + academy4 = models[3]["academy"] + user1 = models[0]["user"] + user2 = models[1]["user"] + user3 = models[2]["user"] + user4 = models[3]["user"] key = base.device_id.key - url = url.replace('%2C', ',') - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1\\,2) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - self.line_limit(f'URL:http://localhost:8000{url}'), - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name}', - f'DTSTART:{self.datetime_to_ical(cohort1.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort1.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort1.created_at)}', - f'UID:breathecode_cohort_{cohort1.id}_{key}', - f'LOCATION:{academy1.name}', - self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), - 'END:VEVENT', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name}', - f'DTSTART:{self.datetime_to_ical(cohort2.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort2.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort2.created_at)}', - f'UID:breathecode_cohort_{cohort2.id}_{key}', - f'LOCATION:{academy2.name}', - self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), - 'END:VEVENT', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort3.name}', - f'DTSTART:{self.datetime_to_ical(cohort3.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort3.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort3.created_at)}', - f'UID:breathecode_cohort_{cohort3.id}_{key}', - f'LOCATION:{academy3.name}', - self.line_limit(f'ORGANIZER;CN="{user3.first_name} {user3.last_name}";ROLE=OWNER:MAILTO:{user3.email}'), - 'END:VEVENT', - - # ================================================================= - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort4.name}', - f'DTSTART:{self.datetime_to_ical(cohort4.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort4.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort4.created_at)}', - f'UID:breathecode_cohort_{cohort4.id}_{key}', - f'LOCATION:{academy4.name}', - self.line_limit(f'ORGANIZER;CN="{user4.first_name} {user4.last_name}";ROLE=OWNER:MAILTO:{user4.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + url = url.replace("%2C", ",") + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1\\,2) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + self.line_limit(f"URL:http://localhost:8000{url}"), + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name}", + f"DTSTART:{self.datetime_to_ical(cohort1.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort1.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort1.created_at)}", + f"UID:breathecode_cohort_{cohort1.id}_{key}", + f"LOCATION:{academy1.name}", + self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), + "END:VEVENT", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name}", + f"DTSTART:{self.datetime_to_ical(cohort2.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort2.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort2.created_at)}", + f"UID:breathecode_cohort_{cohort2.id}_{key}", + f"LOCATION:{academy2.name}", + self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), + "END:VEVENT", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort3.name}", + f"DTSTART:{self.datetime_to_ical(cohort3.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort3.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort3.created_at)}", + f"UID:breathecode_cohort_{cohort3.id}_{key}", + f"LOCATION:{academy3.name}", + self.line_limit(f'ORGANIZER;CN="{user3.first_name} {user3.last_name}";ROLE=OWNER:MAILTO:{user3.email}'), + "END:VEVENT", + # ================================================================= + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort4.name}", + f"DTSTART:{self.datetime_to_ical(cohort4.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort4.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort4.created_at)}", + f"UID:breathecode_cohort_{cohort4.id}_{key}", + f"LOCATION:{academy4.name}", + self.line_limit(f'ORGANIZER;CN="{user4.first_name} {user4.last_name}";ROLE=OWNER:MAILTO:{user4.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 With first cohort day and last cohort day """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__first_day__last_day(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'kickoff_date': datetime(year=2029, month=1, day=10, tzinfo=pytz.timezone('UTC')), - 'ending_date': datetime(year=2030, month=10, day=10, tzinfo=pytz.timezone('UTC')), + "kickoff_date": datetime(year=2029, month=1, day=10, tzinfo=pytz.timezone("UTC")), + "ending_date": datetime(year=2030, month=10, day=10, tzinfo=pytz.timezone("UTC")), } starting_datetime_integer = 202810080030 ending_datetime_integer = 202810080630 cohort_time_slot_kwargs = { - 'timezone': 'Europe/Madrid', - 'starting_at': starting_datetime_integer, - 'ending_at': ending_datetime_integer, + "timezone": "Europe/Madrid", + "starting_at": starting_datetime_integer, + "ending_at": ending_datetime_integer, } - model = self.generate_models(academy=True, - cohort=True, - device_id=True, - cohort_time_slot=True, - device_id_kwargs=device_id_kwargs, - cohort_kwargs=cohort_kwargs, - cohort_time_slot_kwargs=cohort_time_slot_kwargs) - - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - cohort = model['cohort'] - timeslot = model['cohort_time_slot'] - academy = model['academy'] - last_timeslot_starting_at = datetime(year=2030, - month=10, - day=6, - hour=0, - minute=30, - tzinfo=gettz('Europe/Madrid')) - - last_timeslot_ending_at = datetime(year=2030, month=10, day=6, hour=6, minute=30, tzinfo=gettz('Europe/Madrid')) + model = self.generate_models( + academy=True, + cohort=True, + device_id=True, + cohort_time_slot=True, + device_id_kwargs=device_id_kwargs, + cohort_kwargs=cohort_kwargs, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + ) + + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + cohort = model["cohort"] + timeslot = model["cohort_time_slot"] + academy = model["academy"] + last_timeslot_starting_at = datetime( + year=2030, month=10, day=6, hour=0, minute=30, tzinfo=gettz("Europe/Madrid") + ) + + last_timeslot_ending_at = datetime(year=2030, month=10, day=6, hour=6, minute=30, tzinfo=gettz("Europe/Madrid")) key = model.device_id.key starting_at = DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at) - starting_at_fixed = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, - starting_at, - next=True), - utc=False) + starting_at_fixed = self.datetime_to_ical( + fix_datetime_weekday(model.cohort.kickoff_date, starting_at, next=True), utc=False + ) ending_at = DatetimeInteger.to_datetime(timeslot.timezone, timeslot.ending_at) - ending_at_fixed = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, ending_at, next=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # First event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name} - First day', - f'DTSTART;TZID=Europe/Madrid:{starting_at_fixed}', - f'DTEND;TZID=Europe/Madrid:{ending_at_fixed}', - f'DTSTAMP:{self.datetime_to_ical(timeslot.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_first_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name}', - f'DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # Last event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name} - Last day', - f'DTSTART;TZID=Europe/Madrid:{self.datetime_to_ical(last_timeslot_starting_at, utc=False)}', - f'DTEND;TZID=Europe/Madrid:{self.datetime_to_ical(last_timeslot_ending_at, utc=False)}', - f'DTSTAMP:{self.datetime_to_ical(timeslot.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_last_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + ending_at_fixed = self.datetime_to_ical( + fix_datetime_weekday(model.cohort.kickoff_date, ending_at, next=True), utc=False + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # First event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name} - First day", + f"DTSTART;TZID=Europe/Madrid:{starting_at_fixed}", + f"DTEND;TZID=Europe/Madrid:{ending_at_fixed}", + f"DTSTAMP:{self.datetime_to_ical(timeslot.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_first_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name}", + f"DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # Last event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name} - Last day", + f"DTSTART;TZID=Europe/Madrid:{self.datetime_to_ical(last_timeslot_starting_at, utc=False)}", + f"DTEND;TZID=Europe/Madrid:{self.datetime_to_ical(last_timeslot_ending_at, utc=False)}", + f"DTSTAMP:{self.datetime_to_ical(timeslot.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_last_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__first_day__last_day__timeslot_not_recurrent(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'kickoff_date': datetime(year=2020, month=10, day=10, tzinfo=pytz.timezone('UTC')), - 'ending_date': datetime(year=2030, month=10, day=10, tzinfo=pytz.timezone('UTC')), + "kickoff_date": datetime(year=2020, month=10, day=10, tzinfo=pytz.timezone("UTC")), + "ending_date": datetime(year=2030, month=10, day=10, tzinfo=pytz.timezone("UTC")), } starting_datetime_integer = 202510080030 ending_datetime_integer = 202510080630 cohort_time_slot_kwargs = { - 'starting_at': starting_datetime_integer, - 'ending_at': ending_datetime_integer, - 'timezone': 'Europe/Madrid', - 'recurrent': False, + "starting_at": starting_datetime_integer, + "ending_at": ending_datetime_integer, + "timezone": "Europe/Madrid", + "recurrent": False, } - model = self.generate_models(academy=True, - cohort=True, - device_id=True, - cohort_time_slot=True, - device_id_kwargs=device_id_kwargs, - cohort_kwargs=cohort_kwargs, - cohort_time_slot_kwargs=cohort_time_slot_kwargs) - - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - cohort = model['cohort'] - timeslot = model['cohort_time_slot'] - academy = model['academy'] + model = self.generate_models( + academy=True, + cohort=True, + device_id=True, + cohort_time_slot=True, + device_id_kwargs=device_id_kwargs, + cohort_kwargs=cohort_kwargs, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + ) + + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + cohort = model["cohort"] + timeslot = model["cohort_time_slot"] + academy = model["academy"] key = model.device_id.key starting_at = DatetimeInteger.to_datetime(timeslot.timezone, timeslot.starting_at) @@ -891,102 +921,100 @@ def test_ical_cohorts__with_one__first_day__last_day__timeslot_not_recurrent(sel last_timeslot_ending_at = self.datetime_to_ical(ending_at, utc=False) - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # First event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name} - First day', - f'DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}', - f'DTSTAMP:{self.datetime_to_ical(timeslot.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_first_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name}', - f'DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}', - f'DTEND:{self.datetime_to_ical(cohort.ending_date)}', - f'DTSTAMP:{self.datetime_to_ical(cohort.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # Last event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort.name} - Last day', - f'DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}', - f'DTSTAMP:{self.datetime_to_ical(timeslot.created_at)}', - f'UID:breathecode_cohort_{cohort.id}_last_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # First event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name} - First day", + f"DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}", + f"DTSTAMP:{self.datetime_to_ical(timeslot.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_first_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name}", + f"DTSTART:{self.datetime_to_ical(cohort.kickoff_date)}", + f"DTEND:{self.datetime_to_ical(cohort.ending_date)}", + f"DTSTAMP:{self.datetime_to_ical(cohort.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # Last event + "BEGIN:VEVENT", + f"SUMMARY:{cohort.name} - Last day", + f"DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}", + f"DTSTAMP:{self.datetime_to_ical(timeslot.created_at)}", + f"UID:breathecode_cohort_{cohort.id}_last_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_two__first_day__last_day__two_timeslots(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'kickoff_date': datetime(year=2020, month=10, day=10, tzinfo=pytz.timezone('UTC')), - 'ending_date': datetime(year=2030, month=10, day=10, tzinfo=pytz.timezone('UTC')), + "kickoff_date": datetime(year=2020, month=10, day=10, tzinfo=pytz.timezone("UTC")), + "ending_date": datetime(year=2030, month=10, day=10, tzinfo=pytz.timezone("UTC")), } first_cohort_time_slot_kwargs = { - 'timezone': 'Europe/Madrid', - 'starting_at': 202110080030, - 'ending_at': 202110080630, + "timezone": "Europe/Madrid", + "starting_at": 202110080030, + "ending_at": 202110080630, } last_cohort_time_slot_kwargs = { - 'timezone': 'Europe/Madrid', - 'starting_at': 202810080030, - 'ending_at': 202810080630, + "timezone": "Europe/Madrid", + "starting_at": 202810080030, + "ending_at": 202810080630, } cohort_time_slots = [ { **first_cohort_time_slot_kwargs, - 'cohort_id': 1, + "cohort_id": 1, }, { **last_cohort_time_slot_kwargs, - 'cohort_id': 1, + "cohort_id": 1, }, { **first_cohort_time_slot_kwargs, - 'cohort_id': 2, + "cohort_id": 2, }, { **last_cohort_time_slot_kwargs, - 'cohort_id': 2, + "cohort_id": 2, }, ] - model = self.generate_models(academy=True, - cohort=(2, cohort_kwargs), - device_id=device_id_kwargs, - cohort_time_slot=cohort_time_slots) + model = self.generate_models( + academy=True, cohort=(2, cohort_kwargs), device_id=device_id_kwargs, cohort_time_slot=cohort_time_slots + ) - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) cohort1 = model.cohort[0] cohort2 = model.cohort[1] @@ -1007,163 +1035,173 @@ def test_ical_cohorts__with_two__first_day__last_day__two_timeslots(self): ending_at1 = self.datetime_to_ical(model.cohort[0].ending_date) ending_at2 = self.datetime_to_ical(model.cohort[1].ending_date) - first_timeslot_starting_at = self.datetime_to_ical(fix_datetime_weekday( - model.cohort[0].kickoff_date, - DatetimeInteger.to_datetime(timeslot1.timezone, first_cohort_time_slot_kwargs['starting_at']), - next=True), - utc=False) + first_timeslot_starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort[0].kickoff_date, + DatetimeInteger.to_datetime(timeslot1.timezone, first_cohort_time_slot_kwargs["starting_at"]), + next=True, + ), + utc=False, + ) first_timeslot_starting_at_utc1 = self.datetime_to_ical(timeslot1.created_at, utc=True) first_timeslot_starting_at_utc2 = self.datetime_to_ical(timeslot3.created_at, utc=True) - first_timeslot_ending_at = self.datetime_to_ical(fix_datetime_weekday( - model.cohort[0].kickoff_date, - DatetimeInteger.to_datetime(timeslot1.timezone, first_cohort_time_slot_kwargs['ending_at']), - next=True), - utc=False) - - last_timeslot_starting_at = self.datetime_to_ical(fix_datetime_weekday( - model.cohort[0].ending_date, - DatetimeInteger.to_datetime(timeslot2.timezone, last_cohort_time_slot_kwargs['starting_at']), - prev=True), - utc=False) + first_timeslot_ending_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort[0].kickoff_date, + DatetimeInteger.to_datetime(timeslot1.timezone, first_cohort_time_slot_kwargs["ending_at"]), + next=True, + ), + utc=False, + ) + + last_timeslot_starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort[0].ending_date, + DatetimeInteger.to_datetime(timeslot2.timezone, last_cohort_time_slot_kwargs["starting_at"]), + prev=True, + ), + utc=False, + ) last_timeslot_starting_at_utc = self.datetime_to_ical(timeslot2.cohort.created_at, utc=True) - last_timeslot_ending_at = self.datetime_to_ical(fix_datetime_weekday( - model.cohort[1].ending_date, - DatetimeInteger.to_datetime(timeslot2.timezone, last_cohort_time_slot_kwargs['ending_at']), - prev=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # First event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name} - First day', - f'DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}', - f'DTSTAMP:{first_timeslot_starting_at_utc1}', - f'UID:breathecode_cohort_{cohort1.id}_first_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name}', - f'DTSTART:{starting_at1}', - f'DTEND:{ending_at1}', - f'DTSTAMP:{starting_at_utc1}', - f'UID:breathecode_cohort_{cohort1.id}_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # Last event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name} - Last day', - f'DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}', - f'DTSTAMP:{last_timeslot_starting_at_utc}', - f'UID:breathecode_cohort_{cohort1.id}_last_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # ================================================================= - # First event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name} - First day', - f'DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}', - f'DTSTAMP:{first_timeslot_starting_at_utc2}', - f'UID:breathecode_cohort_{cohort2.id}_first_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name}', - f'DTSTART:{starting_at2}', - f'DTEND:{ending_at2}', - f'DTSTAMP:{starting_at_utc2}', - f'UID:breathecode_cohort_{cohort2.id}_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - - # Last event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name} - Last day', - f'DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}', - f'DTSTAMP:{last_timeslot_starting_at_utc}', - f'UID:breathecode_cohort_{cohort2.id}_last_{key}', - f'LOCATION:{academy.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + last_timeslot_ending_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort[1].ending_date, + DatetimeInteger.to_datetime(timeslot2.timezone, last_cohort_time_slot_kwargs["ending_at"]), + prev=True, + ), + utc=False, + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # First event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name} - First day", + f"DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}", + f"DTSTAMP:{first_timeslot_starting_at_utc1}", + f"UID:breathecode_cohort_{cohort1.id}_first_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name}", + f"DTSTART:{starting_at1}", + f"DTEND:{ending_at1}", + f"DTSTAMP:{starting_at_utc1}", + f"UID:breathecode_cohort_{cohort1.id}_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # Last event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name} - Last day", + f"DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}", + f"DTSTAMP:{last_timeslot_starting_at_utc}", + f"UID:breathecode_cohort_{cohort1.id}_last_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # ================================================================= + # First event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name} - First day", + f"DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}", + f"DTSTAMP:{first_timeslot_starting_at_utc2}", + f"UID:breathecode_cohort_{cohort2.id}_first_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name}", + f"DTSTART:{starting_at2}", + f"DTEND:{ending_at2}", + f"DTSTAMP:{starting_at_utc2}", + f"UID:breathecode_cohort_{cohort2.id}_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + # Last event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name} - Last day", + f"DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}", + f"DTSTAMP:{last_timeslot_starting_at_utc}", + f"UID:breathecode_cohort_{cohort2.id}_last_{key}", + f"LOCATION:{academy.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_two__first_day__last_day__two_timeslots__cohort_with_meeting_url(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} - cohort_kwargs = [{ - 'online_meeting_url': self.bc.fake.url(), - 'kickoff_date': datetime(year=2020, month=10, day=10, tzinfo=pytz.timezone('UTC')), - 'ending_date': datetime(year=2030, month=10, day=10, tzinfo=pytz.timezone('UTC')), - } for _ in range(0, 2)] + device_id_kwargs = {"name": "server"} + cohort_kwargs = [ + { + "online_meeting_url": self.bc.fake.url(), + "kickoff_date": datetime(year=2020, month=10, day=10, tzinfo=pytz.timezone("UTC")), + "ending_date": datetime(year=2030, month=10, day=10, tzinfo=pytz.timezone("UTC")), + } + for _ in range(0, 2) + ] first_cohort_time_slot_kwargs = { - 'timezone': 'Europe/Madrid', - 'starting_at': 202110080030, - 'ending_at': 202110080630, + "timezone": "Europe/Madrid", + "starting_at": 202110080030, + "ending_at": 202110080630, } last_cohort_time_slot_kwargs = { - 'timezone': 'Europe/Madrid', - 'starting_at': 202810080030, - 'ending_at': 202810080630, + "timezone": "Europe/Madrid", + "starting_at": 202810080030, + "ending_at": 202810080630, } cohort_time_slots = [ { **first_cohort_time_slot_kwargs, - 'cohort_id': 1, + "cohort_id": 1, }, { **last_cohort_time_slot_kwargs, - 'cohort_id': 1, + "cohort_id": 1, }, { **first_cohort_time_slot_kwargs, - 'cohort_id': 2, + "cohort_id": 2, }, { **last_cohort_time_slot_kwargs, - 'cohort_id': 2, + "cohort_id": 2, }, ] - model = self.generate_models(academy=True, - cohort=cohort_kwargs, - device_id=device_id_kwargs, - cohort_time_slot=cohort_time_slots) + model = self.generate_models( + academy=True, cohort=cohort_kwargs, device_id=device_id_kwargs, cohort_time_slot=cohort_time_slots + ) - url = reverse_lazy('events:ical_cohorts') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_cohorts") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) cohort1 = model.cohort[0] cohort2 = model.cohort[1] @@ -1183,109 +1221,117 @@ def test_ical_cohorts__with_two__first_day__last_day__two_timeslots__cohort_with ending_at1 = self.datetime_to_ical(model.cohort[0].ending_date) ending_at2 = self.datetime_to_ical(model.cohort[1].ending_date) - first_timeslot_starting_at = self.datetime_to_ical(fix_datetime_weekday( - model.cohort[0].kickoff_date, - DatetimeInteger.to_datetime(timeslot1.timezone, first_cohort_time_slot_kwargs['starting_at']), - next=True), - utc=False) + first_timeslot_starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort[0].kickoff_date, + DatetimeInteger.to_datetime(timeslot1.timezone, first_cohort_time_slot_kwargs["starting_at"]), + next=True, + ), + utc=False, + ) first_timeslot_starting_at_utc1 = self.datetime_to_ical(timeslot1.created_at, utc=True) first_timeslot_starting_at_utc2 = self.datetime_to_ical(timeslot3.created_at, utc=True) - first_timeslot_ending_at = self.datetime_to_ical(fix_datetime_weekday( - model.cohort[0].kickoff_date, - DatetimeInteger.to_datetime(timeslot1.timezone, first_cohort_time_slot_kwargs['ending_at']), - next=True), - utc=False) - - last_timeslot_starting_at = self.datetime_to_ical(fix_datetime_weekday( - model.cohort[0].ending_date, - DatetimeInteger.to_datetime(timeslot2.timezone, last_cohort_time_slot_kwargs['starting_at']), - prev=True), - utc=False) + first_timeslot_ending_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort[0].kickoff_date, + DatetimeInteger.to_datetime(timeslot1.timezone, first_cohort_time_slot_kwargs["ending_at"]), + next=True, + ), + utc=False, + ) + + last_timeslot_starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort[0].ending_date, + DatetimeInteger.to_datetime(timeslot2.timezone, last_cohort_time_slot_kwargs["starting_at"]), + prev=True, + ), + utc=False, + ) last_timeslot_starting_at_utc = self.datetime_to_ical(timeslot2.cohort.created_at, utc=True) - last_timeslot_ending_at = self.datetime_to_ical(fix_datetime_weekday( - model.cohort[1].ending_date, - DatetimeInteger.to_datetime(timeslot2.timezone, last_cohort_time_slot_kwargs['ending_at']), - prev=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/cohorts?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Cohorts', - - # ================================================================= - # First event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name} - First day', - f'DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}', - f'DTSTAMP:{first_timeslot_starting_at_utc1}', - f'UID:breathecode_cohort_{cohort1.id}_first_{key}', - f'LOCATION:{cohort_kwargs[0]["online_meeting_url"]}', - 'END:VEVENT', - - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name}', - f'DTSTART:{starting_at1}', - f'DTEND:{ending_at1}', - f'DTSTAMP:{starting_at_utc1}', - f'UID:breathecode_cohort_{cohort1.id}_{key}', - f'LOCATION:{cohort_kwargs[0]["online_meeting_url"]}', - 'END:VEVENT', - - # Last event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort1.name} - Last day', - f'DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}', - f'DTSTAMP:{last_timeslot_starting_at_utc}', - f'UID:breathecode_cohort_{cohort1.id}_last_{key}', - f'LOCATION:{cohort_kwargs[0]["online_meeting_url"]}', - 'END:VEVENT', - - # ================================================================= - # First event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name} - First day', - f'DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}', - f'DTSTAMP:{first_timeslot_starting_at_utc2}', - f'UID:breathecode_cohort_{cohort2.id}_first_{key}', - f'LOCATION:{cohort_kwargs[1]["online_meeting_url"]}', - 'END:VEVENT', - - # Event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name}', - f'DTSTART:{starting_at2}', - f'DTEND:{ending_at2}', - f'DTSTAMP:{starting_at_utc2}', - f'UID:breathecode_cohort_{cohort2.id}_{key}', - f'LOCATION:{cohort_kwargs[1]["online_meeting_url"]}', - 'END:VEVENT', - - # Last event - 'BEGIN:VEVENT', - f'SUMMARY:{cohort2.name} - Last day', - f'DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}', - f'DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}', - f'DTSTAMP:{last_timeslot_starting_at_utc}', - f'UID:breathecode_cohort_{cohort2.id}_last_{key}', - f'LOCATION:{cohort_kwargs[1]["online_meeting_url"]}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + last_timeslot_ending_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort[1].ending_date, + DatetimeInteger.to_datetime(timeslot2.timezone, last_cohort_time_slot_kwargs["ending_at"]), + prev=True, + ), + utc=False, + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Cohorts (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/cohorts?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Cohorts", + # ================================================================= + # First event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name} - First day", + f"DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}", + f"DTSTAMP:{first_timeslot_starting_at_utc1}", + f"UID:breathecode_cohort_{cohort1.id}_first_{key}", + f'LOCATION:{cohort_kwargs[0]["online_meeting_url"]}', + "END:VEVENT", + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name}", + f"DTSTART:{starting_at1}", + f"DTEND:{ending_at1}", + f"DTSTAMP:{starting_at_utc1}", + f"UID:breathecode_cohort_{cohort1.id}_{key}", + f'LOCATION:{cohort_kwargs[0]["online_meeting_url"]}', + "END:VEVENT", + # Last event + "BEGIN:VEVENT", + f"SUMMARY:{cohort1.name} - Last day", + f"DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}", + f"DTSTAMP:{last_timeslot_starting_at_utc}", + f"UID:breathecode_cohort_{cohort1.id}_last_{key}", + f'LOCATION:{cohort_kwargs[0]["online_meeting_url"]}', + "END:VEVENT", + # ================================================================= + # First event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name} - First day", + f"DTSTART;TZID=Europe/Madrid:{first_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{first_timeslot_ending_at}", + f"DTSTAMP:{first_timeslot_starting_at_utc2}", + f"UID:breathecode_cohort_{cohort2.id}_first_{key}", + f'LOCATION:{cohort_kwargs[1]["online_meeting_url"]}', + "END:VEVENT", + # Event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name}", + f"DTSTART:{starting_at2}", + f"DTEND:{ending_at2}", + f"DTSTAMP:{starting_at_utc2}", + f"UID:breathecode_cohort_{cohort2.id}_{key}", + f'LOCATION:{cohort_kwargs[1]["online_meeting_url"]}', + "END:VEVENT", + # Last event + "BEGIN:VEVENT", + f"SUMMARY:{cohort2.name} - Last day", + f"DTSTART;TZID=Europe/Madrid:{last_timeslot_starting_at}", + f"DTEND;TZID=Europe/Madrid:{last_timeslot_ending_at}", + f"DTSTAMP:{last_timeslot_starting_at_utc}", + f"UID:breathecode_cohort_{cohort2.id}_last_{key}", + f'LOCATION:{cohort_kwargs[1]["online_meeting_url"]}', + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/events/tests/urls/tests_ical_events.py b/breathecode/events/tests/urls/tests_ical_events.py index fffb1fd70..45ded7464 100644 --- a/breathecode/events/tests/urls/tests_ical_events.py +++ b/breathecode/events/tests/urls/tests_ical_events.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import urllib from datetime import timedelta from unittest.mock import MagicMock, patch @@ -17,353 +18,381 @@ class AcademyCohortTestSuite(EventTestCase): def test_ical_events__without_academy(self): """Test /academy/cohort without auth""" - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) json = response.json() - expected = {'detail': 'Some academy not exist', 'status_code': 400} + expected = {"detail": "Some academy not exist", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_ical_events__without_events(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} model = self.generate_models(academy=True, device_id=True, device_id_kwargs=device_id_kwargs) - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__dont_get_status_draft(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} model = self.generate_models(academy=True, event=True, device_id=True, device_id_kwargs=device_id_kwargs) - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) - academy = model['academy'] + academy = model["academy"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__dont_get_status_deleted(self): """Test /academy/cohort without auth""" - event_kwargs = {'status': 'DELETED'} - device_id_kwargs = {'name': 'server'} - model = self.generate_models(academy=True, - event=True, - device_id=True, - event_kwargs=event_kwargs, - device_id_kwargs=device_id_kwargs) - - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - academy = model['academy'] + event_kwargs = {"status": "DELETED"} + device_id_kwargs = {"name": "server"} + model = self.generate_models( + academy=True, event=True, device_id=True, event_kwargs=event_kwargs, device_id_kwargs=device_id_kwargs + ) + + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + academy = model["academy"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_one(self): """Test /academy/cohort without auth""" - event_kwargs = {'status': 'ACTIVE'} - device_id_kwargs = {'name': 'server'} - model = self.generate_models(academy=True, - user=True, - event=True, - device_id=True, - event_kwargs=event_kwargs, - device_id_kwargs=device_id_kwargs) - - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - event = model['event'] - user = model['user'] - academy = model['academy'] + event_kwargs = {"status": "ACTIVE"} + device_id_kwargs = {"name": "server"} + model = self.generate_models( + academy=True, + user=True, + event=True, + device_id=True, + event_kwargs=event_kwargs, + device_id_kwargs=device_id_kwargs, + ) + + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + event = model["event"] + user = model["user"] + academy = model["academy"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event.starting_at)}', - f'DTEND:{self.datetime_to_ical(event.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event.created_at)}', - f'UID:breathecode_event_{event.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event.url}\\nAcademy: ' - f'{event.academy.name}\\n'), - self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event.starting_at)}", + f"DTEND:{self.datetime_to_ical(event.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event.created_at)}", + f"UID:breathecode_event_{event.id}_{key}", + self.line_limit(f"DESCRIPTION:Url: {event.url}\\nAcademy: " f"{event.academy.name}\\n"), + self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_one_and_online_event(self): """Test /academy/cohort without auth""" - event_kwargs = {'status': 'ACTIVE', 'online_event': True} - device_id_kwargs = {'name': 'server'} - model = self.generate_models(academy=True, - user=True, - event=True, - device_id=True, - event_kwargs=event_kwargs, - device_id_kwargs=device_id_kwargs) - - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - event = model['event'] - user = model['user'] - academy = model['academy'] + event_kwargs = {"status": "ACTIVE", "online_event": True} + device_id_kwargs = {"name": "server"} + model = self.generate_models( + academy=True, + user=True, + event=True, + device_id=True, + event_kwargs=event_kwargs, + device_id_kwargs=device_id_kwargs, + ) + + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + event = model["event"] + user = model["user"] + academy = model["academy"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event.starting_at)}', - f'DTEND:{self.datetime_to_ical(event.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event.created_at)}', - f'UID:breathecode_event_{event.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event.url}\\nAcademy: ' - f'{event.academy.name}\\nLocation: online\\n'), - self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event.starting_at)}", + f"DTEND:{self.datetime_to_ical(event.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event.created_at)}", + f"UID:breathecode_event_{event.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event.url}\\nAcademy: " f"{event.academy.name}\\nLocation: online\\n" + ), + self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_one_and_venue(self): """Test /academy/cohort without auth""" - event_kwargs = {'status': 'ACTIVE'} - device_id_kwargs = {'name': 'server'} + event_kwargs = {"status": "ACTIVE"} + device_id_kwargs = {"name": "server"} venue_kwargs = { - 'title': 'Title', - 'street_address': 'Street 2 #10-51', - 'city': 'Gaira', - 'state': 'Magdalena', - 'country': 'Colombia', + "title": "Title", + "street_address": "Street 2 #10-51", + "city": "Gaira", + "state": "Magdalena", + "country": "Colombia", } - model = self.generate_models(academy=True, - user=True, - event=True, - device_id=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - device_id_kwargs=device_id_kwargs) - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - event = model['event'] - user = model['user'] - academy = model['academy'] + model = self.generate_models( + academy=True, + user=True, + event=True, + device_id=True, + venue=True, + event_kwargs=event_kwargs, + venue_kwargs=venue_kwargs, + device_id_kwargs=device_id_kwargs, + ) + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + event = model["event"] + user = model["user"] + academy = model["academy"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event.starting_at)}', - f'DTEND:{self.datetime_to_ical(event.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event.created_at)}', - f'UID:breathecode_event_{event.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event.url}\\nAcademy: ' - f'{event.academy.name}\\nVenue: {event.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event.starting_at)}", + f"DTEND:{self.datetime_to_ical(event.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event.created_at)}", + f"UID:breathecode_event_{event.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event.url}\\nAcademy: " + f"{event.academy.name}\\nVenue: {event.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_one_and_venue__upcoming_true__return_zero_events(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} event_kwargs = { - 'status': 'ACTIVE', - 'starting_at': timezone.now() - timedelta(days=1), + "status": "ACTIVE", + "starting_at": timezone.now() - timedelta(days=1), } venue_kwargs = { - 'street_address': 'Street 2 #10-51', - 'city': 'Gaira', - 'state': 'Magdalena', - 'country': 'Colombia', + "street_address": "Street 2 #10-51", + "city": "Gaira", + "state": "Magdalena", + "country": "Colombia", } - model = self.generate_models(academy=True, - user=True, - event=True, - device_id=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - device_id_kwargs=device_id_kwargs) - url = reverse_lazy('events:ical_events') - args = {'academy': '1', 'upcoming': 'true'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + model = self.generate_models( + academy=True, + user=True, + event=True, + device_id=True, + venue=True, + event_kwargs=event_kwargs, + venue_kwargs=venue_kwargs, + device_id_kwargs=device_id_kwargs, + ) + url = reverse_lazy("events:ical_events") + args = {"academy": "1", "upcoming": "true"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_one_and_venue__upcoming_true(self): """Test /academy/cohort without auth""" - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} event_kwargs = { - 'status': 'ACTIVE', - 'starting_at': timezone.now() + timedelta(days=1), + "status": "ACTIVE", + "starting_at": timezone.now() + timedelta(days=1), } venue_kwargs = { - 'title': 'Title', - 'street_address': 'Street 2 #10-51', - 'city': 'Gaira', - 'state': 'Magdalena', - 'country': 'Colombia', + "title": "Title", + "street_address": "Street 2 #10-51", + "city": "Gaira", + "state": "Magdalena", + "country": "Colombia", } - model = self.generate_models(academy=True, - user=True, - event=True, - device_id=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - device_id_kwargs=device_id_kwargs) - - url = reverse_lazy('events:ical_events') - args = {'academy': '1', 'upcoming': 'true'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - event = model['event'] - user = model['user'] + model = self.generate_models( + academy=True, + user=True, + event=True, + device_id=True, + venue=True, + event_kwargs=event_kwargs, + venue_kwargs=venue_kwargs, + device_id_kwargs=device_id_kwargs, + ) + + url = reverse_lazy("events:ical_events") + args = {"academy": "1", "upcoming": "true"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + event = model["event"] + user = model["user"] key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event.starting_at)}', - f'DTEND:{self.datetime_to_ical(event.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event.created_at)}', - f'UID:breathecode_event_{event.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event.url}\\nAcademy: ' - f'{event.academy.name}\\nVenue: {event.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event.starting_at)}", + f"DTEND:{self.datetime_to_ical(event.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event.created_at)}", + f"UID:breathecode_event_{event.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event.url}\\nAcademy: " + f"{event.academy.name}\\nVenue: {event.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user.first_name} {user.last_name}";ROLE=OWNER:MAILTO:{user.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_two(self): """Test /academy/cohort without auth""" - event_kwargs = {'status': 'ACTIVE'} - device_id_kwargs = {'name': 'server'} + event_kwargs = {"status": "ACTIVE"} + device_id_kwargs = {"name": "server"} base = self.generate_models(device_id=True, academy=True, device_id_kwargs=device_id_kwargs) models = [ @@ -371,362 +400,374 @@ def test_ical_events__with_two(self): self.generate_models(user=True, event=True, event_kwargs=event_kwargs, models=base), ] - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) - event1 = models[0]['event'] - event2 = models[1]['event'] - user1 = models[0]['user'] - user2 = models[1]['user'] - academy = models[0]['academy'] + event1 = models[0]["event"] + event2 = models[1]["event"] + user1 = models[0]["user"] + user2 = models[1]["user"] + academy = models[0]["academy"] key = base.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event1.starting_at)}', - f'DTEND:{self.datetime_to_ical(event1.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event1.created_at)}', - f'UID:breathecode_event_{event1.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event1.url}\\nAcademy: ' - f'{event1.academy.name}\\n'), - self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event2.starting_at)}', - f'DTEND:{self.datetime_to_ical(event2.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event2.created_at)}', - f'UID:breathecode_event_{event2.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event2.url}\\nAcademy: ' - f'{event2.academy.name}\\n'), - self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event1.starting_at)}", + f"DTEND:{self.datetime_to_ical(event1.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event1.created_at)}", + f"UID:breathecode_event_{event1.id}_{key}", + self.line_limit(f"DESCRIPTION:Url: {event1.url}\\nAcademy: " f"{event1.academy.name}\\n"), + self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event2.starting_at)}", + f"DTEND:{self.datetime_to_ical(event2.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event2.created_at)}", + f"UID:breathecode_event_{event2.id}_{key}", + self.line_limit(f"DESCRIPTION:Url: {event2.url}\\nAcademy: " f"{event2.academy.name}\\n"), + self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_two_and_venue(self): """Test /academy/cohort without auth""" - event_kwargs = {'status': 'ACTIVE'} + event_kwargs = {"status": "ACTIVE"} venue_kwargs = { - 'title': 'Title', - 'street_address': 'Street 2 #10-51', - 'city': 'Gaira', - 'state': 'Magdalena', - 'country': 'Colombia', + "title": "Title", + "street_address": "Street 2 #10-51", + "city": "Gaira", + "state": "Magdalena", + "country": "Colombia", } - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} base = self.generate_models(device_id=True, academy=True, device_id_kwargs=device_id_kwargs) models = [ - self.generate_models(user=True, - event=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - models=base), - self.generate_models(user=True, - event=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - models=base), + self.generate_models( + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base + ), + self.generate_models( + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base + ), ] - url = reverse_lazy('events:ical_events') - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_events") + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) - event1 = models[0]['event'] - event2 = models[1]['event'] - user1 = models[0]['user'] - user2 = models[1]['user'] - academy = models[0]['academy'] + event1 = models[0]["event"] + event2 = models[1]["event"] + user1 = models[0]["user"] + user2 = models[1]["user"] + academy = models[0]["academy"] key = base.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/events?academy=1', - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event1.starting_at)}', - f'DTEND:{self.datetime_to_ical(event1.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event1.created_at)}', - f'UID:breathecode_event_{event1.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event1.url}\\nAcademy: ' - f'{event1.academy.name}\\nVenue: {event1.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event2.starting_at)}', - f'DTEND:{self.datetime_to_ical(event2.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event2.created_at)}', - f'UID:breathecode_event_{event2.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event2.url}\\nAcademy: ' - f'{event2.academy.name}\\nVenue: {event2.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/events?academy=1", + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event1.starting_at)}", + f"DTEND:{self.datetime_to_ical(event1.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event1.created_at)}", + f"UID:breathecode_event_{event1.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event1.url}\\nAcademy: " + f"{event1.academy.name}\\nVenue: {event1.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event2.starting_at)}", + f"DTEND:{self.datetime_to_ical(event2.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event2.created_at)}", + f"UID:breathecode_event_{event2.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event2.url}\\nAcademy: " + f"{event2.academy.name}\\nVenue: {event2.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_two_and_venue__with_two_academies_id(self): """Test /academy/cohort without auth""" - event_kwargs = {'status': 'ACTIVE'} + event_kwargs = {"status": "ACTIVE"} venue_kwargs = { - 'title': 'Title', - 'street_address': 'Street 2 #10-51', - 'city': 'Gaira', - 'state': 'Magdalena', - 'country': 'Colombia', + "title": "Title", + "street_address": "Street 2 #10-51", + "city": "Gaira", + "state": "Magdalena", + "country": "Colombia", } - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} base = self.generate_models(device_id=True, device_id_kwargs=device_id_kwargs) base1 = self.generate_models(academy=True, models=base) models = [ - self.generate_models(user=True, - event=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - models=base1), - self.generate_models(user=True, - event=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - models=base1), + self.generate_models( + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base1 + ), + self.generate_models( + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base1 + ), ] base2 = self.generate_models(academy=True, models=base) models = models + [ self.generate_models( - user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base2), + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base2 + ), self.generate_models( - user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base2), + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base2 + ), ] - url = reverse_lazy('events:ical_events') - args = {'academy': '1,2'} - url = url + '?' + urllib.parse.urlencode(args) + url = reverse_lazy("events:ical_events") + args = {"academy": "1,2"} + url = url + "?" + urllib.parse.urlencode(args) response = self.client.get(url) - event1 = models[0]['event'] - event2 = models[1]['event'] - event3 = models[2]['event'] - event4 = models[3]['event'] - user1 = models[0]['user'] - user2 = models[1]['user'] - user3 = models[2]['user'] - user4 = models[3]['user'] + event1 = models[0]["event"] + event2 = models[1]["event"] + event3 = models[2]["event"] + event4 = models[3]["event"] + user1 = models[0]["user"] + user2 = models[1]["user"] + user3 = models[2]["user"] + user4 = models[3]["user"] key = base.device_id.key - url = url.replace('%2C', ',') - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1\\,2) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - self.line_limit(f'URL:http://localhost:8000{url}'), - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event1.starting_at)}', - f'DTEND:{self.datetime_to_ical(event1.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event1.created_at)}', - f'UID:breathecode_event_{event1.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event1.url}\\nAcademy: ' - f'{event1.academy.name}\\nVenue: {event1.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event2.starting_at)}', - f'DTEND:{self.datetime_to_ical(event2.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event2.created_at)}', - f'UID:breathecode_event_{event2.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event2.url}\\nAcademy: ' - f'{event2.academy.name}\\nVenue: {event2.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event3.starting_at)}', - f'DTEND:{self.datetime_to_ical(event3.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event3.created_at)}', - f'UID:breathecode_event_{event3.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event3.url}\\nAcademy: ' - f'{event3.academy.name}\\nVenue: {event3.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user3.first_name} {user3.last_name}";ROLE=OWNER:MAILTO:{user3.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event4.starting_at)}', - f'DTEND:{self.datetime_to_ical(event4.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event4.created_at)}', - f'UID:breathecode_event_{event4.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event4.url}\\nAcademy: ' - f'{event4.academy.name}\\nVenue: {event4.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user4.first_name} {user4.last_name}";ROLE=OWNER:MAILTO:{user4.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + url = url.replace("%2C", ",") + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1\\,2) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + self.line_limit(f"URL:http://localhost:8000{url}"), + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event1.starting_at)}", + f"DTEND:{self.datetime_to_ical(event1.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event1.created_at)}", + f"UID:breathecode_event_{event1.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event1.url}\\nAcademy: " + f"{event1.academy.name}\\nVenue: {event1.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event2.starting_at)}", + f"DTEND:{self.datetime_to_ical(event2.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event2.created_at)}", + f"UID:breathecode_event_{event2.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event2.url}\\nAcademy: " + f"{event2.academy.name}\\nVenue: {event2.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event3.starting_at)}", + f"DTEND:{self.datetime_to_ical(event3.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event3.created_at)}", + f"UID:breathecode_event_{event3.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event3.url}\\nAcademy: " + f"{event3.academy.name}\\nVenue: {event3.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user3.first_name} {user3.last_name}";ROLE=OWNER:MAILTO:{user3.email}'), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event4.starting_at)}", + f"DTEND:{self.datetime_to_ical(event4.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event4.created_at)}", + f"UID:breathecode_event_{event4.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event4.url}\\nAcademy: " + f"{event4.academy.name}\\nVenue: {event4.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user4.first_name} {user4.last_name}";ROLE=OWNER:MAILTO:{user4.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_ical_events__with_two_and_venue__with_two_academies_slug(self): """Test /academy/cohort without auth""" - event_kwargs = {'status': 'ACTIVE'} + event_kwargs = {"status": "ACTIVE"} venue_kwargs = { - 'title': 'Title', - 'street_address': 'Street 2 #10-51', - 'city': 'Gaira', - 'state': 'Magdalena', - 'country': 'Colombia', + "title": "Title", + "street_address": "Street 2 #10-51", + "city": "Gaira", + "state": "Magdalena", + "country": "Colombia", } - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} base = self.generate_models(device_id=True, device_id_kwargs=device_id_kwargs) base1 = self.generate_models(academy=True, models=base) models = [ - self.generate_models(user=True, - event=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - models=base1), - self.generate_models(user=True, - event=True, - venue=True, - event_kwargs=event_kwargs, - venue_kwargs=venue_kwargs, - models=base1), + self.generate_models( + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base1 + ), + self.generate_models( + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base1 + ), ] base2 = self.generate_models(academy=True, models=base) models = models + [ self.generate_models( - user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base2), + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base2 + ), self.generate_models( - user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base2), + user=True, event=True, venue=True, event_kwargs=event_kwargs, venue_kwargs=venue_kwargs, models=base2 + ), ] - url = reverse_lazy('events:ical_events') - args = {'academy_slug': ','.join(list(dict.fromkeys([x.academy.slug for x in models])))} - url = url + '?' + urllib.parse.urlencode(args) - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) - - event1 = models[0]['event'] - event2 = models[1]['event'] - event3 = models[2]['event'] - event4 = models[3]['event'] - user1 = models[0]['user'] - user2 = models[1]['user'] - user3 = models[2]['user'] - user4 = models[3]['user'] + url = reverse_lazy("events:ical_events") + args = {"academy_slug": ",".join(list(dict.fromkeys([x.academy.slug for x in models])))} + url = url + "?" + urllib.parse.urlencode(args) + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) + + event1 = models[0]["event"] + event2 = models[1]["event"] + event3 = models[2]["event"] + event4 = models[3]["event"] + user1 = models[0]["user"] + user2 = models[1]["user"] + user3 = models[2]["user"] + user4 = models[3]["user"] key = base.device_id.key - url = url.replace('%2C', ',') - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Academy Events (1\\,2) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - self.line_limit(f'URL:http://localhost:8000{url}'), - 'X-WR-CALDESC:', - f'X-WR-CALNAME:Academy - Events', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event1.starting_at)}', - f'DTEND:{self.datetime_to_ical(event1.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event1.created_at)}', - f'UID:breathecode_event_{event1.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event1.url}\\nAcademy: ' - f'{event1.academy.name}\\nVenue: {event1.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event2.starting_at)}', - f'DTEND:{self.datetime_to_ical(event2.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event2.created_at)}', - f'UID:breathecode_event_{event2.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event2.url}\\nAcademy: ' - f'{event2.academy.name}\\nVenue: {event2.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event3.starting_at)}', - f'DTEND:{self.datetime_to_ical(event3.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event3.created_at)}', - f'UID:breathecode_event_{event3.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event3.url}\\nAcademy: ' - f'{event3.academy.name}\\nVenue: {event3.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user3.first_name} {user3.last_name}";ROLE=OWNER:MAILTO:{user3.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'DTSTART:{self.datetime_to_ical(event4.starting_at)}', - f'DTEND:{self.datetime_to_ical(event4.ending_at)}', - f'DTSTAMP:{self.datetime_to_ical(event4.created_at)}', - f'UID:breathecode_event_{event4.id}_{key}', - self.line_limit(f'DESCRIPTION:Url: {event4.url}\\nAcademy: ' - f'{event4.academy.name}\\nVenue: {event4.venue.title}\\n' - ''), - 'LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia', - self.line_limit(f'ORGANIZER;CN="{user4.first_name} {user4.last_name}";ROLE=OWNER:MAILTO:{user4.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + url = url.replace("%2C", ",") + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Academy Events (1\\,2) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + self.line_limit(f"URL:http://localhost:8000{url}"), + "X-WR-CALDESC:", + f"X-WR-CALNAME:Academy - Events", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event1.starting_at)}", + f"DTEND:{self.datetime_to_ical(event1.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event1.created_at)}", + f"UID:breathecode_event_{event1.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event1.url}\\nAcademy: " + f"{event1.academy.name}\\nVenue: {event1.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user1.first_name} {user1.last_name}";ROLE=OWNER:MAILTO:{user1.email}'), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event2.starting_at)}", + f"DTEND:{self.datetime_to_ical(event2.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event2.created_at)}", + f"UID:breathecode_event_{event2.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event2.url}\\nAcademy: " + f"{event2.academy.name}\\nVenue: {event2.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user2.first_name} {user2.last_name}";ROLE=OWNER:MAILTO:{user2.email}'), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event3.starting_at)}", + f"DTEND:{self.datetime_to_ical(event3.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event3.created_at)}", + f"UID:breathecode_event_{event3.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event3.url}\\nAcademy: " + f"{event3.academy.name}\\nVenue: {event3.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user3.first_name} {user3.last_name}";ROLE=OWNER:MAILTO:{user3.email}'), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"DTSTART:{self.datetime_to_ical(event4.starting_at)}", + f"DTEND:{self.datetime_to_ical(event4.ending_at)}", + f"DTSTAMP:{self.datetime_to_ical(event4.created_at)}", + f"UID:breathecode_event_{event4.id}_{key}", + self.line_limit( + f"DESCRIPTION:Url: {event4.url}\\nAcademy: " + f"{event4.academy.name}\\nVenue: {event4.venue.title}\\n" + "" + ), + "LOCATION:Street 2 #10-51\\, Gaira\\, Magdalena\\, Colombia", + self.line_limit(f'ORGANIZER;CN="{user4.first_name} {user4.last_name}";ROLE=OWNER:MAILTO:{user4.email}'), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) # # this test is comment because is util to check and generate one example diff --git a/breathecode/events/tests/urls/tests_ical_student_id.py b/breathecode/events/tests/urls/tests_ical_student_id.py index 35fc3f94f..75f6e5661 100644 --- a/breathecode/events/tests/urls/tests_ical_student_id.py +++ b/breathecode/events/tests/urls/tests_ical_student_id.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import urllib from datetime import datetime, timedelta from unittest.mock import MagicMock, patch @@ -18,22 +19,23 @@ class AcademyCohortTestSuite(EventTestCase): """Test /academy/cohort""" + """ 🔽🔽🔽 Without student """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__without_student(self): - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) - args = {'academy': '1'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) + args = {"academy": "1"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) json = response.json() expected = { - 'detail': 'student-not-exist', - 'status_code': 404, + "detail": "student-not-exist", + "status_code": 404, } self.assertEqual(json, expected) @@ -43,655 +45,719 @@ def test_ical_cohorts__without_student(self): 🔽🔽🔽 Without time slot """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__without_cohort_time_slot(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} model = self.generate_models(academy=True, device_id=True, device_id_kwargs=device_id_kwargs, cohort_user=True) - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 One time slot and the Cohort never ends """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__cohort_never_ends(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { # 'ending_date': datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, # tzinfo=pytz.UTC), - 'never_ends': True, + "never_ends": True, } # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - model = self.generate_models(academy=True, - device_id=device_id_kwargs, - cohort_user=1, - cohort_time_slot=cohort_time_slot_kwargs, - cohort=cohort_kwargs) + model = self.generate_models( + academy=True, + device_id=device_id_kwargs, + cohort_user=1, + cohort_time_slot=cohort_time_slot_kwargs, + cohort=cohort_kwargs, + ) - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 One time slot and the Cohort with ending_date as None """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__cohort_without_ending_date(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - model = self.generate_models(academy=True, - device_id=device_id_kwargs, - cohort_user=1, - cohort_time_slot=cohort_time_slot_kwargs, - cohort=1) + model = self.generate_models( + academy=True, device_id=device_id_kwargs, cohort_user=1, cohort_time_slot=cohort_time_slot_kwargs, cohort=1 + ) - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 One time slot with ending_date in Cohort """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__with_ending_date(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'ending_date': datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC) + "ending_date": datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC) } # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - model = self.generate_models(academy=True, - device_id=True, - device_id_kwargs=device_id_kwargs, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - cohort_time_slot=True, - cohort_kwargs=cohort_kwargs, - cohort_time_slot_kwargs=cohort_time_slot_kwargs) - - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + model = self.generate_models( + academy=True, + device_id=True, + device_id_kwargs=device_id_kwargs, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + cohort_time_slot=True, + cohort_kwargs=cohort_kwargs, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + ) + + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) key = model.device_id.key - starting_at = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model.cohort_time_slot.timezone, - model.cohort_time_slot.starting_at), - next=True), - utc=False) - - starting_at_utc = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model.cohort_time_slot.timezone, - model.cohort_time_slot.starting_at), - utc=True) - - ending_at = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model.cohort_time_slot.timezone, - model.cohort_time_slot.ending_at), - next=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - # event - 'BEGIN:VEVENT', - f'SUMMARY:{model.cohort.name}', - f'DTSTART;TZID=America/Bogota:{starting_at}', - f'DTEND;TZID=America/Bogota:{ending_at}', - f'DTSTAMP:{starting_at_utc}', - f'UID:breathecode_cohort_time_slot_{model.cohort_time_slot.id}_{key}', - f'RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z', - f'LOCATION:{model.academy.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort.kickoff_date, + DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + starting_at_utc = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + utc=True, + ) + + ending_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort.kickoff_date, + DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.ending_at), + next=True, + ), + utc=False, + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + # event + "BEGIN:VEVENT", + f"SUMMARY:{model.cohort.name}", + f"DTSTART;TZID=America/Bogota:{starting_at}", + f"DTEND;TZID=America/Bogota:{ending_at}", + f"DTSTAMP:{starting_at_utc}", + f"UID:breathecode_cohort_time_slot_{model.cohort_time_slot.id}_{key}", + f"RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z", + f"LOCATION:{model.academy.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 One time slot it's not recurrent """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__not_recurrent(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'ending_date': datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC) + "ending_date": datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC) } # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, - 'recurrent': False, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, + "recurrent": False, } - model = self.generate_models(academy=True, - device_id=True, - device_id_kwargs=device_id_kwargs, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - cohort_time_slot=True, - cohort_kwargs=cohort_kwargs, - cohort_time_slot_kwargs=cohort_time_slot_kwargs) - - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + model = self.generate_models( + academy=True, + device_id=True, + device_id_kwargs=device_id_kwargs, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + cohort_time_slot=True, + cohort_kwargs=cohort_kwargs, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + ) + + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) key = model.device_id.key - starting_at = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model.cohort_time_slot.timezone, - model.cohort_time_slot.starting_at), - next=True), - utc=False) - - starting_at_utc = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model.cohort_time_slot.timezone, - model.cohort_time_slot.starting_at), - utc=True) - - ending_at = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model.cohort_time_slot.timezone, - model.cohort_time_slot.ending_at), - next=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - # event - 'BEGIN:VEVENT', - f'SUMMARY:{model.cohort.name}', - f'DTSTART;TZID=America/Bogota:{starting_at}', - f'DTEND;TZID=America/Bogota:{ending_at}', - f'DTSTAMP:{starting_at_utc}', - f'UID:breathecode_cohort_time_slot_{model.cohort_time_slot.id}_{key}', - f'LOCATION:{model.academy.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort.kickoff_date, + DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + starting_at_utc = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + utc=True, + ) + + ending_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort.kickoff_date, + DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.ending_at), + next=True, + ), + utc=False, + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + # event + "BEGIN:VEVENT", + f"SUMMARY:{model.cohort.name}", + f"DTSTART;TZID=America/Bogota:{starting_at}", + f"DTEND;TZID=America/Bogota:{ending_at}", + f"DTSTAMP:{starting_at_utc}", + f"UID:breathecode_cohort_time_slot_{model.cohort_time_slot.id}_{key}", + f"LOCATION:{model.academy.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 One time slot without cohort ending date """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__without_ending_date(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - model = self.generate_models(academy=True, - device_id=True, - device_id_kwargs=device_id_kwargs, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - cohort_time_slot=True, - cohort_time_slot_kwargs=cohort_time_slot_kwargs) - - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + model = self.generate_models( + academy=True, + device_id=True, + device_id_kwargs=device_id_kwargs, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + cohort_time_slot=True, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + ) + + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) key = model.device_id.key - starting_at = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model.cohort_time_slot.timezone, - model.cohort_time_slot.starting_at), - next=True), - utc=False) - - created_at = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model.cohort_time_slot.timezone, - model.cohort_time_slot.starting_at), - utc=True) - - ending_at = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model.cohort_time_slot.timezone, - model.cohort_time_slot.ending_at), - next=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort.kickoff_date, + DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + created_at = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + utc=True, + ) + + ending_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort.kickoff_date, + DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.ending_at), + next=True, + ), + utc=False, + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 One time slot with cohort stage deleted """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__stage_deleted(self): - device_id_kwargs = {'name': 'server'} - cohort_kwargs = {'stage': 'DELETED'} + device_id_kwargs = {"name": "server"} + cohort_kwargs = {"stage": "DELETED"} # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - model = self.generate_models(academy=True, - device_id=True, - device_id_kwargs=device_id_kwargs, - cohort_user=True, - cohort_time_slot=True, - cohort_kwargs=cohort_kwargs, - cohort_time_slot_kwargs=cohort_time_slot_kwargs) - - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + model = self.generate_models( + academy=True, + device_id=True, + device_id_kwargs=device_id_kwargs, + cohort_user=True, + cohort_time_slot=True, + cohort_kwargs=cohort_kwargs, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + ) + + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 One time slot with incoming true in querystring """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__with_incoming_true__return_zero_time_slots(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - model = self.generate_models(academy=True, - device_id=True, - device_id_kwargs=device_id_kwargs, - cohort_user=True, - cohort_time_slot=True, - cohort_time_slot_kwargs=cohort_time_slot_kwargs) + model = self.generate_models( + academy=True, + device_id=True, + device_id_kwargs=device_id_kwargs, + cohort_user=True, + cohort_time_slot=True, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + ) - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) - args = {'upcoming': 'true'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) + args = {"upcoming": "true"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__with_incoming_true(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'kickoff_date': timezone.now() + timedelta(days=2), - 'ending_date': datetime(year=2060, - day=31, - month=12, - hour=12, - minute=0, - second=0, - tzinfo=pytz.timezone('UTC')), + "kickoff_date": timezone.now() + timedelta(days=2), + "ending_date": datetime( + year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.timezone("UTC") + ), } # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - model = self.generate_models(academy=True, - device_id=True, - device_id_kwargs=device_id_kwargs, - cohort_user=True, - cohort_time_slot=True, - cohort_kwargs=cohort_kwargs, - cohort_time_slot_kwargs=cohort_time_slot_kwargs) + model = self.generate_models( + academy=True, + device_id=True, + device_id_kwargs=device_id_kwargs, + cohort_user=True, + cohort_time_slot=True, + cohort_kwargs=cohort_kwargs, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + ) - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) - args = {'upcoming': 'true'} - response = self.client.get(url + '?' + urllib.parse.urlencode(args)) + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) + args = {"upcoming": "true"} + response = self.client.get(url + "?" + urllib.parse.urlencode(args)) key = model.device_id.key - starting_at = self.datetime_to_ical(fix_datetime_weekday(model.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model.cohort_time_slot.timezone, - model.cohort_time_slot.starting_at), - next=True), - utc=False) - - starting_at_utc = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model.cohort_time_slot.timezone, - model.cohort_time_slot.starting_at), - utc=True) - - ending_at = fix_datetime_weekday(model.cohort.kickoff_date, - DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, - model.cohort_time_slot.ending_at), - next=True) + starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model.cohort.kickoff_date, + DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + starting_at_utc = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.starting_at), + utc=True, + ) + + ending_at = fix_datetime_weekday( + model.cohort.kickoff_date, + DatetimeInteger.to_datetime(model.cohort_time_slot.timezone, model.cohort_time_slot.ending_at), + next=True, + ) ending_at = self.datetime_to_ical(ending_at, utc=False) - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - # event - 'BEGIN:VEVENT', - f'SUMMARY:{model.cohort.name}', - f'DTSTART;TZID=America/Bogota:{starting_at}', - f'DTEND;TZID=America/Bogota:{ending_at}', - f'DTSTAMP:{starting_at_utc}', - f'UID:breathecode_cohort_time_slot_{model.cohort_time_slot.id}_{key}', - f'RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z', - f'LOCATION:{model.academy.name}', - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + # event + "BEGIN:VEVENT", + f"SUMMARY:{model.cohort.name}", + f"DTSTART;TZID=America/Bogota:{starting_at}", + f"DTEND;TZID=America/Bogota:{ending_at}", + f"DTSTAMP:{starting_at_utc}", + f"UID:breathecode_cohort_time_slot_{model.cohort_time_slot.id}_{key}", + f"RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z", + f"LOCATION:{model.academy.name}", + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 One time slot with teacher """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohorts__with_one__with_teacher(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'ending_date': datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC) + "ending_date": datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC) } - teacher_kwargs = {'role': 'TEACHER'} + teacher_kwargs = {"role": "TEACHER"} # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - base = self.generate_models(academy=True, - device_id=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - device_id_kwargs=device_id_kwargs, - cohort_kwargs=cohort_kwargs) + base = self.generate_models( + academy=True, + device_id=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + device_id_kwargs=device_id_kwargs, + cohort_kwargs=cohort_kwargs, + ) models = [ - self.generate_models(cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - cohort_time_slot=True, - cohort_time_slot_kwargs=cohort_time_slot_kwargs, - models=base), - self.generate_models(cohort_user=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - models=base, - cohort_user_kwargs=teacher_kwargs), + self.generate_models( + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + cohort_time_slot=True, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + models=base, + ), + self.generate_models( + cohort_user=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + models=base, + cohort_user_kwargs=teacher_kwargs, + ), ] - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) model1 = models[0] model2 = models[1] key = model1.device_id.key - starting_at = self.datetime_to_ical(fix_datetime_weekday(model1.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model1.cohort_time_slot.timezone, - model1.cohort_time_slot.starting_at), - next=True), - utc=False) - - starting_at_utc = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model1.cohort_time_slot.timezone, - model1.cohort_time_slot.starting_at), - utc=True) - - ending_at = self.datetime_to_ical(fix_datetime_weekday(model1.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model1.cohort_time_slot.timezone, - model1.cohort_time_slot.ending_at), - next=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - # event - 'BEGIN:VEVENT', - f'SUMMARY:{model1.cohort.name}', - f'DTSTART;TZID=America/Bogota:{starting_at}', - f'DTEND;TZID=America/Bogota:{ending_at}', - f'DTSTAMP:{starting_at_utc}', - f'UID:breathecode_cohort_time_slot_{model1.cohort_time_slot.id}_{key}', - f'RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z', - f'LOCATION:{model1.academy.name}', - self.line_limit(f'ORGANIZER;CN="{model2.user.first_name} ' - f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + starting_at = self.datetime_to_ical( + fix_datetime_weekday( + model1.cohort.kickoff_date, + DatetimeInteger.to_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + starting_at_utc = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.starting_at), + utc=True, + ) + + ending_at = self.datetime_to_ical( + fix_datetime_weekday( + model1.cohort.kickoff_date, + DatetimeInteger.to_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.ending_at), + next=True, + ), + utc=False, + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + # event + "BEGIN:VEVENT", + f"SUMMARY:{model1.cohort.name}", + f"DTSTART;TZID=America/Bogota:{starting_at}", + f"DTEND;TZID=America/Bogota:{ending_at}", + f"DTSTAMP:{starting_at_utc}", + f"UID:breathecode_cohort_time_slot_{model1.cohort_time_slot.id}_{key}", + f"RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z", + f"LOCATION:{model1.academy.name}", + self.line_limit( + f'ORGANIZER;CN="{model2.user.first_name} ' + f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}' + ), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 Two time slot with teacher """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohort__with_two__with_teacher(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'ending_date': datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC) + "ending_date": datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC) } - teacher_kwargs = {'role': 'TEACHER'} + teacher_kwargs = {"role": "TEACHER"} # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - base = self.generate_models(academy=True, - device_id=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - device_id_kwargs=device_id_kwargs, - cohort_kwargs=cohort_kwargs) + base = self.generate_models( + academy=True, + device_id=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + device_id_kwargs=device_id_kwargs, + cohort_kwargs=cohort_kwargs, + ) models = [ - self.generate_models(cohort_user=True, - cohort_time_slot=True, - cohort_time_slot_kwargs=cohort_time_slot_kwargs, - models=base), + self.generate_models( + cohort_user=True, cohort_time_slot=True, cohort_time_slot_kwargs=cohort_time_slot_kwargs, models=base + ), self.generate_models(cohort_user=True, models=base, cohort_user_kwargs=teacher_kwargs), ] models.append( - self.generate_models(user=models[0].user, - cohort_user=models[0].cohort_user, - cohort_time_slot=True, - cohort_time_slot_kwargs=cohort_time_slot_kwargs, - models=base)) - - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + self.generate_models( + user=models[0].user, + cohort_user=models[0].cohort_user, + cohort_time_slot=True, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + models=base, + ) + ) + + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) model1 = models[0] # student @@ -699,129 +765,152 @@ def test_ical_cohort__with_two__with_teacher(self): model3 = models[2] # student key = model1.device_id.key - starting_at1 = self.datetime_to_ical(fix_datetime_weekday(model1.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model1.cohort_time_slot.timezone, - model1.cohort_time_slot.starting_at), - next=True), - utc=False) - - starting_at3 = self.datetime_to_ical(fix_datetime_weekday(model3.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model3.cohort_time_slot.timezone, - model3.cohort_time_slot.starting_at), - next=True), - utc=False) - - starting_at_utc1 = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model1.cohort_time_slot.timezone, - model1.cohort_time_slot.starting_at), - utc=True) - - starting_at_utc3 = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model3.cohort_time_slot.timezone, - model3.cohort_time_slot.starting_at), - utc=True) - - ending_at1 = self.datetime_to_ical(fix_datetime_weekday(model1.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model1.cohort_time_slot.timezone, - model1.cohort_time_slot.ending_at), - next=True), - utc=False) - - ending_at3 = self.datetime_to_ical(fix_datetime_weekday(model3.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model3.cohort_time_slot.timezone, - model3.cohort_time_slot.ending_at), - next=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - # event - 'BEGIN:VEVENT', - f'SUMMARY:{model1.cohort.name}', - f'DTSTART;TZID=America/Bogota:{starting_at1}', - f'DTEND;TZID=America/Bogota:{ending_at1}', - f'DTSTAMP:{starting_at_utc1}', - f'UID:breathecode_cohort_time_slot_{model1.cohort_time_slot.id}_{key}', - f'RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z', - f'LOCATION:{model1.academy.name}', - self.line_limit(f'ORGANIZER;CN="{model2.user.first_name} ' - f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'SUMMARY:{model3.cohort.name}', - f'DTSTART;TZID=America/Bogota:{starting_at3}', - f'DTEND;TZID=America/Bogota:{ending_at3}', - f'DTSTAMP:{starting_at_utc3}', - f'UID:breathecode_cohort_time_slot_{model3.cohort_time_slot.id}_{key}', - f'RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z', - f'LOCATION:{model3.academy.name}', - self.line_limit(f'ORGANIZER;CN="{model2.user.first_name} ' - f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + starting_at1 = self.datetime_to_ical( + fix_datetime_weekday( + model1.cohort.kickoff_date, + DatetimeInteger.to_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + starting_at3 = self.datetime_to_ical( + fix_datetime_weekday( + model3.cohort.kickoff_date, + DatetimeInteger.to_datetime(model3.cohort_time_slot.timezone, model3.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + starting_at_utc1 = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.starting_at), + utc=True, + ) + + starting_at_utc3 = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model3.cohort_time_slot.timezone, model3.cohort_time_slot.starting_at), + utc=True, + ) + + ending_at1 = self.datetime_to_ical( + fix_datetime_weekday( + model1.cohort.kickoff_date, + DatetimeInteger.to_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.ending_at), + next=True, + ), + utc=False, + ) + + ending_at3 = self.datetime_to_ical( + fix_datetime_weekday( + model3.cohort.kickoff_date, + DatetimeInteger.to_datetime(model3.cohort_time_slot.timezone, model3.cohort_time_slot.ending_at), + next=True, + ), + utc=False, + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + # event + "BEGIN:VEVENT", + f"SUMMARY:{model1.cohort.name}", + f"DTSTART;TZID=America/Bogota:{starting_at1}", + f"DTEND;TZID=America/Bogota:{ending_at1}", + f"DTSTAMP:{starting_at_utc1}", + f"UID:breathecode_cohort_time_slot_{model1.cohort_time_slot.id}_{key}", + f"RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z", + f"LOCATION:{model1.academy.name}", + self.line_limit( + f'ORGANIZER;CN="{model2.user.first_name} ' + f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}' + ), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"SUMMARY:{model3.cohort.name}", + f"DTSTART;TZID=America/Bogota:{starting_at3}", + f"DTEND;TZID=America/Bogota:{ending_at3}", + f"DTSTAMP:{starting_at_utc3}", + f"UID:breathecode_cohort_time_slot_{model3.cohort_time_slot.id}_{key}", + f"RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z", + f"LOCATION:{model3.academy.name}", + self.line_limit( + f'ORGANIZER;CN="{model2.user.first_name} ' + f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}' + ), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ 🔽🔽🔽 Two time slot with teacher """ - @patch('breathecode.events.tasks.build_live_classes_from_timeslot.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.events.tasks.build_live_classes_from_timeslot.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_ical_cohort__with_two__with_teacher__cohort_with_meeting_url(self): - device_id_kwargs = {'name': 'server'} + device_id_kwargs = {"name": "server"} cohort_kwargs = { - 'ending_date': datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC), - 'online_meeting_url': self.bc.fake.url(), + "ending_date": datetime(year=2060, day=31, month=12, hour=12, minute=0, second=0, tzinfo=pytz.UTC), + "online_meeting_url": self.bc.fake.url(), } - teacher_kwargs = {'role': 'TEACHER'} + teacher_kwargs = {"role": "TEACHER"} # don't forget 🦾 2021 - 1010 datetime_integer = 202109111330 cohort_time_slot_kwargs = { - 'timezone': 'America/Bogota', - 'starting_at': datetime_integer, - 'ending_at': datetime_integer, + "timezone": "America/Bogota", + "starting_at": datetime_integer, + "ending_at": datetime_integer, } - base = self.generate_models(academy=True, - device_id=True, - cohort={'kickoff_date': datetime.today().isoformat()}, - device_id_kwargs=device_id_kwargs, - cohort_kwargs=cohort_kwargs) + base = self.generate_models( + academy=True, + device_id=True, + cohort={"kickoff_date": datetime.today().isoformat()}, + device_id_kwargs=device_id_kwargs, + cohort_kwargs=cohort_kwargs, + ) models = [ - self.generate_models(cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=True, - cohort_time_slot=True, - cohort_time_slot_kwargs=cohort_time_slot_kwargs, - models=base), + self.generate_models( + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=True, + cohort_time_slot=True, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + models=base, + ), self.generate_models(cohort_user=True, models=base, cohort_user_kwargs=teacher_kwargs), ] models.append( - self.generate_models(user=models[0].user, - cohort={'kickoff_date': datetime.today().isoformat()}, - cohort_user=models[0].cohort_user, - cohort_time_slot=True, - cohort_time_slot_kwargs=cohort_time_slot_kwargs, - models=base)) - - url = reverse_lazy('events:ical_student_id', kwargs={'user_id': 1}) + self.generate_models( + user=models[0].user, + cohort={"kickoff_date": datetime.today().isoformat()}, + cohort_user=models[0].cohort_user, + cohort_time_slot=True, + cohort_time_slot_kwargs=cohort_time_slot_kwargs, + models=base, + ) + ) + + url = reverse_lazy("events:ical_student_id", kwargs={"user_id": 1}) response = self.client.get(url) model1 = models[0] # student @@ -829,78 +918,94 @@ def test_ical_cohort__with_two__with_teacher__cohort_with_meeting_url(self): model3 = models[2] # student key = model1.device_id.key - starting_at1 = self.datetime_to_ical(fix_datetime_weekday(model1.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model1.cohort_time_slot.timezone, - model1.cohort_time_slot.starting_at), - next=True), - utc=False) - - starting_at3 = self.datetime_to_ical(fix_datetime_weekday(model3.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model3.cohort_time_slot.timezone, - model3.cohort_time_slot.starting_at), - next=True), - utc=False) - - starting_at_utc1 = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model1.cohort_time_slot.timezone, - model1.cohort_time_slot.starting_at), - utc=True) - - starting_at_utc3 = self.datetime_to_ical(DatetimeInteger.to_utc_datetime(model3.cohort_time_slot.timezone, - model3.cohort_time_slot.starting_at), - utc=True) - - ending_at1 = self.datetime_to_ical(fix_datetime_weekday(model1.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model1.cohort_time_slot.timezone, - model1.cohort_time_slot.ending_at), - next=True), - utc=False) - - ending_at3 = self.datetime_to_ical(fix_datetime_weekday(model3.cohort.kickoff_date, - DatetimeInteger.to_datetime( - model3.cohort_time_slot.timezone, - model3.cohort_time_slot.ending_at), - next=True), - utc=False) - - expected = '\r\n'.join([ - 'BEGIN:VCALENDAR', - 'VERSION:2.0', - f'PRODID:-//4Geeks//Student Schedule (1) {key}//EN', - 'METHOD:PUBLISH', - 'REFRESH-INTERVAL;VALUE=DURATION:PT15M', - 'URL:http://localhost:8000/v1/events/ical/student/1', - 'X-WR-CALDESC:', - 'X-WR-CALNAME:Academy - Schedule', - # event - 'BEGIN:VEVENT', - f'SUMMARY:{model1.cohort.name}', - f'DTSTART;TZID=America/Bogota:{starting_at1}', - f'DTEND;TZID=America/Bogota:{ending_at1}', - f'DTSTAMP:{starting_at_utc1}', - f'UID:breathecode_cohort_time_slot_{model1.cohort_time_slot.id}_{key}', - f'RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z', - f'LOCATION:{model1.cohort.online_meeting_url}', - self.line_limit(f'ORGANIZER;CN="{model2.user.first_name} ' - f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}'), - 'END:VEVENT', - # event - 'BEGIN:VEVENT', - f'SUMMARY:{model3.cohort.name}', - f'DTSTART;TZID=America/Bogota:{starting_at3}', - f'DTEND;TZID=America/Bogota:{ending_at3}', - f'DTSTAMP:{starting_at_utc3}', - f'UID:breathecode_cohort_time_slot_{model3.cohort_time_slot.id}_{key}', - f'RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z', - f'LOCATION:{model3.cohort.online_meeting_url}', - self.line_limit(f'ORGANIZER;CN="{model2.user.first_name} ' - f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}'), - 'END:VEVENT', - 'END:VCALENDAR', - '', - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + starting_at1 = self.datetime_to_ical( + fix_datetime_weekday( + model1.cohort.kickoff_date, + DatetimeInteger.to_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + starting_at3 = self.datetime_to_ical( + fix_datetime_weekday( + model3.cohort.kickoff_date, + DatetimeInteger.to_datetime(model3.cohort_time_slot.timezone, model3.cohort_time_slot.starting_at), + next=True, + ), + utc=False, + ) + + starting_at_utc1 = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.starting_at), + utc=True, + ) + + starting_at_utc3 = self.datetime_to_ical( + DatetimeInteger.to_utc_datetime(model3.cohort_time_slot.timezone, model3.cohort_time_slot.starting_at), + utc=True, + ) + + ending_at1 = self.datetime_to_ical( + fix_datetime_weekday( + model1.cohort.kickoff_date, + DatetimeInteger.to_datetime(model1.cohort_time_slot.timezone, model1.cohort_time_slot.ending_at), + next=True, + ), + utc=False, + ) + + ending_at3 = self.datetime_to_ical( + fix_datetime_weekday( + model3.cohort.kickoff_date, + DatetimeInteger.to_datetime(model3.cohort_time_slot.timezone, model3.cohort_time_slot.ending_at), + next=True, + ), + utc=False, + ) + + expected = "\r\n".join( + [ + "BEGIN:VCALENDAR", + "VERSION:2.0", + f"PRODID:-//4Geeks//Student Schedule (1) {key}//EN", + "METHOD:PUBLISH", + "REFRESH-INTERVAL;VALUE=DURATION:PT15M", + "URL:http://localhost:8000/v1/events/ical/student/1", + "X-WR-CALDESC:", + "X-WR-CALNAME:Academy - Schedule", + # event + "BEGIN:VEVENT", + f"SUMMARY:{model1.cohort.name}", + f"DTSTART;TZID=America/Bogota:{starting_at1}", + f"DTEND;TZID=America/Bogota:{ending_at1}", + f"DTSTAMP:{starting_at_utc1}", + f"UID:breathecode_cohort_time_slot_{model1.cohort_time_slot.id}_{key}", + f"RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z", + f"LOCATION:{model1.cohort.online_meeting_url}", + self.line_limit( + f'ORGANIZER;CN="{model2.user.first_name} ' + f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}' + ), + "END:VEVENT", + # event + "BEGIN:VEVENT", + f"SUMMARY:{model3.cohort.name}", + f"DTSTART;TZID=America/Bogota:{starting_at3}", + f"DTEND;TZID=America/Bogota:{ending_at3}", + f"DTSTAMP:{starting_at_utc3}", + f"UID:breathecode_cohort_time_slot_{model3.cohort_time_slot.id}_{key}", + f"RRULE:FREQ=WEEKLY;UNTIL=20601231T212600Z", + f"LOCATION:{model3.cohort.online_meeting_url}", + self.line_limit( + f'ORGANIZER;CN="{model2.user.first_name} ' + f'{model2.user.last_name}";ROLE=OWNER:MAILTO:{model2.user.email}' + ), + "END:VEVENT", + "END:VCALENDAR", + "", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/events/tests/urls/tests_me.py b/breathecode/events/tests/urls/tests_me.py index 3eb4bc804..25edf0a0e 100644 --- a/breathecode/events/tests/urls/tests_me.py +++ b/breathecode/events/tests/urls/tests_me.py @@ -13,160 +13,165 @@ def visibility_settings_serializer(visibility_settings): all_vs = visibility_settings.all() - serialized_vs = [{ - 'id': item.id, - 'cohort': { - 'id': item.cohort.id, - 'name': item.cohort.name, - 'slug': item.cohort.slug, - } if item.cohort else None, - 'academy': { - 'id': item.academy.id, - 'name': item.academy.name, - 'slug': item.academy.slug, - }, - 'syllabus': { - 'id': item.syllabus.id, - 'name': item.syllabus.name, - 'slug': item.syllabus.slug, - } if item.syllabus else None, - } for item in all_vs] + serialized_vs = [ + { + "id": item.id, + "cohort": ( + { + "id": item.cohort.id, + "name": item.cohort.name, + "slug": item.cohort.slug, + } + if item.cohort + else None + ), + "academy": { + "id": item.academy.id, + "name": item.academy.name, + "slug": item.academy.slug, + }, + "syllabus": ( + { + "id": item.syllabus.id, + "name": item.syllabus.name, + "slug": item.syllabus.slug, + } + if item.syllabus + else None + ), + } + for item in all_vs + ] return serialized_vs def profile_translation_serializer(profile_translation): return { - 'bio': profile_translation.bio, - 'lang': profile_translation.lang, + "bio": profile_translation.bio, + "lang": profile_translation.lang, } def profile_serializer(profile, profile_translations=[]): return { - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'blog': profile.blog, - 'github_username': profile.github_username, - 'linkedin_url': profile.linkedin_url, - 'phone': profile.phone, - 'portfolio_url': profile.portfolio_url, - 'translations': [profile_translation_serializer(item) for item in profile_translations], - 'twitter_username': profile.twitter_username, + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "blog": profile.blog, + "github_username": profile.github_username, + "linkedin_url": profile.linkedin_url, + "phone": profile.phone, + "portfolio_url": profile.portfolio_url, + "translations": [profile_translation_serializer(item) for item in profile_translations], + "twitter_username": profile.twitter_username, } -def get_serializer(self, - event, - event_type, - user, - academy=None, - city=None, - profile=None, - profile_translations=[], - data={}): +def get_serializer( + self, event, event_type, user, academy=None, city=None, profile=None, profile_translations=[], data={} +): academy_serialized = None city_serialized = None if city: city_serialized = { - 'name': city.name, + "name": city.name, } if academy: academy_serialized = { - 'city': city_serialized, - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "city": city_serialized, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } return { - 'academy': academy_serialized, - 'asset': None, - 'author': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, + "academy": academy_serialized, + "asset": None, + "author": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, }, - 'host_user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'profile': profile_serializer(profile, profile_translations) if profile else None, + "host_user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "profile": profile_serializer(profile, profile_translations) if profile else None, }, - 'banner': event.banner, - 'capacity': event.capacity, - 'created_at': self.bc.datetime.to_iso_string(event.created_at), - 'currency': event.currency, - 'description': event.description, - 'ending_at': self.bc.datetime.to_iso_string(event.ending_at), - 'event_type': { - 'academy': academy_serialized, - 'id': event_type.id, - 'name': event_type.name, - 'slug': event_type.slug, - 'lang': event_type.lang, - 'icon_url': event_type.icon_url, - 'allow_shared_creation': event_type.allow_shared_creation, - 'description': event_type.description, - 'visibility_settings': visibility_settings_serializer(event_type.visibility_settings), + "banner": event.banner, + "capacity": event.capacity, + "created_at": self.bc.datetime.to_iso_string(event.created_at), + "currency": event.currency, + "description": event.description, + "ending_at": self.bc.datetime.to_iso_string(event.ending_at), + "event_type": { + "academy": academy_serialized, + "id": event_type.id, + "name": event_type.name, + "slug": event_type.slug, + "lang": event_type.lang, + "icon_url": event_type.icon_url, + "allow_shared_creation": event_type.allow_shared_creation, + "description": event_type.description, + "visibility_settings": visibility_settings_serializer(event_type.visibility_settings), }, - 'eventbrite_id': event.eventbrite_id, - 'eventbrite_organizer_id': event.eventbrite_organizer_id, - 'eventbrite_status': event.eventbrite_status, - 'eventbrite_sync_description': event.eventbrite_sync_description, - 'eventbrite_sync_status': event.eventbrite_sync_status, - 'eventbrite_url': event.eventbrite_url, - 'excerpt': event.excerpt, - 'host': event.host, - 'id': event.id, - 'lang': event.lang, - 'online_event': event.online_event, - 'free_for_bootcamps': event.free_for_bootcamps, - 'free_for_all': event.free_for_all, - 'live_stream_url': event.live_stream_url, - 'asset_slug': event.asset_slug, - 'organization': event.organization, - 'published_at': event.published_at, - 'slug': event.slug, - 'ended_at': event.ended_at, - 'starting_at': self.bc.datetime.to_iso_string(event.starting_at), - 'status': event.status, - 'sync_with_eventbrite': event.sync_with_eventbrite, - 'tags': event.tags, - 'title': event.title, - 'updated_at': self.bc.datetime.to_iso_string(event.updated_at), - 'url': event.url, - 'venue': event.venue, + "eventbrite_id": event.eventbrite_id, + "eventbrite_organizer_id": event.eventbrite_organizer_id, + "eventbrite_status": event.eventbrite_status, + "eventbrite_sync_description": event.eventbrite_sync_description, + "eventbrite_sync_status": event.eventbrite_sync_status, + "eventbrite_url": event.eventbrite_url, + "excerpt": event.excerpt, + "host": event.host, + "id": event.id, + "lang": event.lang, + "online_event": event.online_event, + "free_for_bootcamps": event.free_for_bootcamps, + "free_for_all": event.free_for_all, + "live_stream_url": event.live_stream_url, + "asset_slug": event.asset_slug, + "organization": event.organization, + "published_at": event.published_at, + "slug": event.slug, + "ended_at": event.ended_at, + "starting_at": self.bc.datetime.to_iso_string(event.starting_at), + "status": event.status, + "sync_with_eventbrite": event.sync_with_eventbrite, + "tags": event.tags, + "title": event.title, + "updated_at": self.bc.datetime.to_iso_string(event.updated_at), + "url": event.url, + "venue": event.venue, **data, } def extract_starting_at(d): - return datetime.strptime(str(d.starting_at), '%Y-%m-%d %H:%M:%S%z') + return datetime.strptime(str(d.starting_at), "%Y-%m-%d %H:%M:%S%z") class AcademyEventTestSuite(EventTestCase): cache = EventCache() - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:me') + url = reverse_lazy("events:me") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_zero_items(self): self.headers(academy=1) - url = reverse_lazy('events:me') + url = reverse_lazy("events:me") model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) @@ -178,13 +183,13 @@ def test_zero_items(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_item__non_visible(self): self.headers(academy=1) - url = reverse_lazy('events:me') + url = reverse_lazy("events:me") - model = self.bc.database.create(user=1, event=1, event_type={'icon_url': 'https://www.google.com'}) + model = self.bc.database.create(user=1, event=1, event_type={"icon_url": "https://www.google.com"}) self.client.force_authenticate(model.user) response = self.client.get(url) @@ -198,31 +203,33 @@ def test_one_item__non_visible(self): 🥆🥆🥆 Academy hunter """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_item__academy_non_visible__because_owner_dont_allow_share_the_event_type(self): event_type_visibility_setting = { - 'academy_id': 2, - 'cohort_id': None, - 'syllabus_id': None, + "academy_id": 2, + "cohort_id": None, + "syllabus_id": None, } event_type = { - 'academy_id': 1, - 'allow_shared_creation': False, - 'icon_url': 'https://www.google.com', + "academy_id": 1, + "allow_shared_creation": False, + "icon_url": "https://www.google.com", } cohort = { - 'academy_id': 2, + "academy_id": 2, } self.headers(academy=1) - url = reverse_lazy('events:me') - model = self.bc.database.create(user=1, - event=2, - event_type=event_type, - academy=2, - cohort=cohort, - cohort_user=1, - event_type_visibility_setting=event_type_visibility_setting) + url = reverse_lazy("events:me") + model = self.bc.database.create( + user=1, + event=2, + event_type=event_type, + academy=2, + cohort=cohort, + cohort_user=1, + event_type_visibility_setting=event_type_visibility_setting, + ) self.client.force_authenticate(model.user) response = self.client.get(url) @@ -232,52 +239,54 @@ def test_one_item__academy_non_visible__because_owner_dont_allow_share_the_event self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_item__academy_visible(self): cases = [ ( { - 'academy_id': 1, - 'cohort_id': None, - 'syllabus_id': None, + "academy_id": 1, + "cohort_id": None, + "syllabus_id": None, }, { - 'academy_id': 1, - 'allow_shared_creation': False, - 'icon_url': 'https://www.google.com', + "academy_id": 1, + "allow_shared_creation": False, + "icon_url": "https://www.google.com", }, { - 'academy_id': 1, + "academy_id": 1, }, ), ( { - 'academy_id': 4, - 'cohort_id': None, - 'syllabus_id': None, + "academy_id": 4, + "cohort_id": None, + "syllabus_id": None, }, { - 'academy_id': 3, - 'allow_shared_creation': True, - 'icon_url': 'https://www.google.com', + "academy_id": 3, + "allow_shared_creation": True, + "icon_url": "https://www.google.com", }, { - 'academy_id': 4, + "academy_id": 4, }, ), ] self.headers(academy=1) - url = reverse_lazy('events:me') + url = reverse_lazy("events:me") for event_type_visibility_setting, event_type, cohort in cases: - model = self.bc.database.create(user=1, - event=2, - event_kwargs={'status': 'ACTIVE'}, - event_type=event_type, - academy=2, - cohort=cohort, - cohort_user=1, - event_type_visibility_setting=event_type_visibility_setting) + model = self.bc.database.create( + user=1, + event=2, + event_kwargs={"status": "ACTIVE"}, + event_type=event_type, + academy=2, + cohort=cohort, + cohort_user=1, + event_type_visibility_setting=event_type_visibility_setting, + ) self.client.force_authenticate(model.user) response = self.client.get(url) @@ -287,7 +296,7 @@ def test_one_item__academy_visible(self): get_serializer(self, event, model.event_type, model.user, model.academy[0], model.city) for event in reversed(model.event) ] - expected = sorted(expected, key=lambda d: d['starting_at']) + expected = sorted(expected, key=lambda d: d["starting_at"]) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) @@ -296,31 +305,33 @@ def test_one_item__academy_visible(self): 🥆🥆🥆 Cohort hunter """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_item__cohort_non_visible__because_owner_dont_allow_share_the_event_type(self): event_type_visibility_setting = { - 'academy_id': 2, - 'cohort_id': 2, - 'syllabus_id': None, + "academy_id": 2, + "cohort_id": 2, + "syllabus_id": None, } event_type = { - 'academy_id': 1, - 'allow_shared_creation': False, - 'icon_url': 'https://www.google.com', + "academy_id": 1, + "allow_shared_creation": False, + "icon_url": "https://www.google.com", } cohort = { - 'academy_id': 2, + "academy_id": 2, } self.headers(academy=1) - url = reverse_lazy('events:me') - model = self.bc.database.create(user=1, - event=2, - event_type=event_type, - academy=2, - cohort=(2, cohort), - cohort_user=1, - event_type_visibility_setting=event_type_visibility_setting) + url = reverse_lazy("events:me") + model = self.bc.database.create( + user=1, + event=2, + event_type=event_type, + academy=2, + cohort=(2, cohort), + cohort_user=1, + event_type_visibility_setting=event_type_visibility_setting, + ) self.client.force_authenticate(model.user) response = self.client.get(url) @@ -330,52 +341,46 @@ def test_one_item__cohort_non_visible__because_owner_dont_allow_share_the_event_ self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_item__cohort_visible(self): cases = [ ( { - 'academy_id': 1, - 'cohort_id': 1, - 'syllabus_id': None, - }, - { - 'academy_id': 1, - 'allow_shared_creation': False, - 'icon_url': 'https://www.google.com' + "academy_id": 1, + "cohort_id": 1, + "syllabus_id": None, }, + {"academy_id": 1, "allow_shared_creation": False, "icon_url": "https://www.google.com"}, { - 'academy_id': 1, + "academy_id": 1, }, ), ( { - 'academy_id': 4, - 'cohort_id': 2, - 'syllabus_id': None, - }, - { - 'academy_id': 3, - 'allow_shared_creation': True, - 'icon_url': 'https://www.google.com' + "academy_id": 4, + "cohort_id": 2, + "syllabus_id": None, }, + {"academy_id": 3, "allow_shared_creation": True, "icon_url": "https://www.google.com"}, { - 'academy_id': 4, + "academy_id": 4, }, ), ] self.headers(academy=1) - url = reverse_lazy('events:me') + url = reverse_lazy("events:me") for event_type_visibility_setting, event_type, cohort in cases: - model = self.bc.database.create(user=1, - event=2, - event_kwargs={'status': 'ACTIVE'}, - event_type=event_type, - academy=2, - cohort=cohort, - cohort_user=1, - event_type_visibility_setting=event_type_visibility_setting) + model = self.bc.database.create( + user=1, + event=2, + event_kwargs={"status": "ACTIVE"}, + event_type=event_type, + academy=2, + cohort=cohort, + cohort_user=1, + event_type_visibility_setting=event_type_visibility_setting, + ) self.client.force_authenticate(model.user) response = self.client.get(url) @@ -386,7 +391,7 @@ def test_one_item__cohort_visible(self): for event in reversed(model.event) ] - expected = sorted(expected, key=lambda d: d['starting_at']) + expected = sorted(expected, key=lambda d: d["starting_at"]) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) @@ -395,29 +400,31 @@ def test_one_item__cohort_visible(self): 🥆🥆🥆 Syllabus hunter """ - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_item__syllabus_non_visible__because_owner_dont_allow_share_the_event_type(self): event_type_visibility_setting = { - 'academy_id': 2, - 'cohort_id': None, - 'syllabus_id': 2, + "academy_id": 2, + "cohort_id": None, + "syllabus_id": 2, } - event_type = {'academy_id': 1, 'allow_shared_creation': False, 'icon_url': 'https://www.google.com'} + event_type = {"academy_id": 1, "allow_shared_creation": False, "icon_url": "https://www.google.com"} cohort = { - 'academy_id': 2, + "academy_id": 2, } self.headers(academy=1) - url = reverse_lazy('events:me') - model = self.bc.database.create(user=1, - event=2, - event_type=event_type, - academy=2, - cohort=(2, cohort), - cohort_user=1, - syllabus=2, - syllabus_version=1, - event_type_visibility_setting=event_type_visibility_setting) + url = reverse_lazy("events:me") + model = self.bc.database.create( + user=1, + event=2, + event_type=event_type, + academy=2, + cohort=(2, cohort), + cohort_user=1, + syllabus=2, + syllabus_version=1, + event_type_visibility_setting=event_type_visibility_setting, + ) self.client.force_authenticate(model.user) response = self.client.get(url) @@ -427,56 +434,50 @@ def test_one_item__syllabus_non_visible__because_owner_dont_allow_share_the_even self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_item__syllabus_visible(self): cases = [ ( { - 'academy_id': 1, - 'cohort_id': None, - 'syllabus_id': 1, + "academy_id": 1, + "cohort_id": None, + "syllabus_id": 1, }, + {"academy_id": 1, "allow_shared_creation": False, "icon_url": "https://www.google.com"}, { - 'academy_id': 1, - 'allow_shared_creation': False, - 'icon_url': 'https://www.google.com' - }, - { - 'academy_id': 1, + "academy_id": 1, }, ), ( { - 'academy_id': 4, - 'cohort_id': None, - 'syllabus_id': 2, + "academy_id": 4, + "cohort_id": None, + "syllabus_id": 2, }, + {"academy_id": 3, "allow_shared_creation": True, "icon_url": "https://www.google.com"}, { - 'academy_id': 3, - 'allow_shared_creation': True, - 'icon_url': 'https://www.google.com' - }, - { - 'academy_id': 4, + "academy_id": 4, }, ), ] self.headers(academy=1) - url = reverse_lazy('events:me') + url = reverse_lazy("events:me") for event_type_visibility_setting, event_type, cohort in cases: - model = self.bc.database.create(user=1, - event=2, - event_kwargs={'status': 'ACTIVE'}, - event_type=event_type, - academy=2, - cohort=cohort, - cohort_user=1, - syllabus=1, - syllabus_version=1, - profile=1, - profile_translation=2, - event_type_visibility_setting=event_type_visibility_setting) + model = self.bc.database.create( + user=1, + event=2, + event_kwargs={"status": "ACTIVE"}, + event_type=event_type, + academy=2, + cohort=cohort, + cohort_user=1, + syllabus=1, + syllabus_version=1, + profile=1, + profile_translation=2, + event_type_visibility_setting=event_type_visibility_setting, + ) self.client.force_authenticate(model.user) @@ -493,60 +494,55 @@ def test_one_item__syllabus_visible(self): model.city, profile=model.profile, profile_translations=model.profile_translation, - ) for event in reversed(model.event) + ) + for event in reversed(model.event) ] - expected = sorted(expected, key=lambda d: d['starting_at']) + expected = sorted(expected, key=lambda d: d["starting_at"]) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_item__status_not_active(self): cases = [ ( { - 'academy_id': 1, - 'cohort_id': None, - 'syllabus_id': 1, + "academy_id": 1, + "cohort_id": None, + "syllabus_id": 1, }, + {"academy_id": 1, "allow_shared_creation": False, "icon_url": "https://www.google.com"}, { - 'academy_id': 1, - 'allow_shared_creation': False, - 'icon_url': 'https://www.google.com' - }, - { - 'academy_id': 1, + "academy_id": 1, }, ), ( { - 'academy_id': 4, - 'cohort_id': None, - 'syllabus_id': 2, - }, - { - 'academy_id': 3, - 'allow_shared_creation': True, - 'icon_url': 'https://www.google.com' + "academy_id": 4, + "cohort_id": None, + "syllabus_id": 2, }, + {"academy_id": 3, "allow_shared_creation": True, "icon_url": "https://www.google.com"}, { - 'academy_id': 4, + "academy_id": 4, }, ), ] self.headers(academy=1) - url = reverse_lazy('events:me') + url = reverse_lazy("events:me") for event_type_visibility_setting, event_type, cohort in cases: - model = self.bc.database.create(user=1, - event=2, - event_type=event_type, - academy=2, - cohort=cohort, - cohort_user=1, - syllabus=1, - syllabus_version=1, - event_type_visibility_setting=event_type_visibility_setting) + model = self.bc.database.create( + user=1, + event=2, + event_type=event_type, + academy=2, + cohort=cohort, + cohort_user=1, + syllabus=1, + syllabus_version=1, + event_type_visibility_setting=event_type_visibility_setting, + ) self.client.force_authenticate(model.user) diff --git a/breathecode/events/tests/urls/tests_me_event_id.py b/breathecode/events/tests/urls/tests_me_event_id.py index c8af61f9d..d1ed7837a 100644 --- a/breathecode/events/tests/urls/tests_me_event_id.py +++ b/breathecode/events/tests/urls/tests_me_event_id.py @@ -14,137 +14,142 @@ def visibility_settings_serializer(visibility_settings): all_vs = visibility_settings.all() - serialized_vs = [{ - 'id': item.id, - 'cohort': { - 'id': item.cohort.id, - 'name': item.cohort.name, - 'slug': item.cohort.slug, - } if item.cohort else None, - 'academy': { - 'id': item.academy.id, - 'name': item.academy.name, - 'slug': item.academy.slug, - }, - 'syllabus': { - 'id': item.syllabus.id, - 'name': item.syllabus.name, - 'slug': item.syllabus.slug, - } if item.syllabus else None, - } for item in all_vs] + serialized_vs = [ + { + "id": item.id, + "cohort": ( + { + "id": item.cohort.id, + "name": item.cohort.name, + "slug": item.cohort.slug, + } + if item.cohort + else None + ), + "academy": { + "id": item.academy.id, + "name": item.academy.name, + "slug": item.academy.slug, + }, + "syllabus": ( + { + "id": item.syllabus.id, + "name": item.syllabus.name, + "slug": item.syllabus.slug, + } + if item.syllabus + else None + ), + } + for item in all_vs + ] return serialized_vs def profile_translation_serializer(profile_translation): return { - 'bio': profile_translation.bio, - 'lang': profile_translation.lang, + "bio": profile_translation.bio, + "lang": profile_translation.lang, } def profile_serializer(profile, profile_translations=[]): return { - 'avatar_url': profile.avatar_url, - 'bio': profile.bio, - 'blog': profile.blog, - 'github_username': profile.github_username, - 'linkedin_url': profile.linkedin_url, - 'phone': profile.phone, - 'portfolio_url': profile.portfolio_url, - 'translations': [profile_translation_serializer(item) for item in profile_translations], - 'twitter_username': profile.twitter_username, + "avatar_url": profile.avatar_url, + "bio": profile.bio, + "blog": profile.blog, + "github_username": profile.github_username, + "linkedin_url": profile.linkedin_url, + "phone": profile.phone, + "portfolio_url": profile.portfolio_url, + "translations": [profile_translation_serializer(item) for item in profile_translations], + "twitter_username": profile.twitter_username, } -def get_serializer(self, - event, - event_type, - user, - academy=None, - city=None, - profile=None, - profile_translations=[], - data={}): +def get_serializer( + self, event, event_type, user, academy=None, city=None, profile=None, profile_translations=[], data={} +): academy_serialized = None city_serialized = None if city: city_serialized = { - 'name': city.name, + "name": city.name, } if academy: academy_serialized = { - 'city': city_serialized, - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "city": city_serialized, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } return { - 'academy': academy_serialized, - 'author': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, + "academy": academy_serialized, + "author": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, }, - 'host_user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'profile': profile_serializer(profile, profile_translations) if profile else None, + "host_user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "profile": profile_serializer(profile, profile_translations) if profile else None, }, - 'banner': event.banner, - 'capacity': event.capacity, - 'free_for_bootcamps': event.free_for_bootcamps, - 'free_for_all': event.free_for_all, - 'live_stream_url': event.live_stream_url, - 'asset_slug': event.asset_slug, - 'asset': None, - 'created_at': self.bc.datetime.to_iso_string(event.created_at), - 'currency': event.currency, - 'description': event.description, - 'ending_at': self.bc.datetime.to_iso_string(event.ending_at), - 'ended_at': event.ended_at, - 'event_type': { - 'academy': academy_serialized, - 'id': event_type.id, - 'name': event_type.name, - 'slug': event_type.slug, - 'lang': event_type.lang, - 'icon_url': event_type.icon_url, - 'allow_shared_creation': event_type.allow_shared_creation, - 'description': event_type.description, - 'visibility_settings': visibility_settings_serializer(event_type.visibility_settings), + "banner": event.banner, + "capacity": event.capacity, + "free_for_bootcamps": event.free_for_bootcamps, + "free_for_all": event.free_for_all, + "live_stream_url": event.live_stream_url, + "asset_slug": event.asset_slug, + "asset": None, + "created_at": self.bc.datetime.to_iso_string(event.created_at), + "currency": event.currency, + "description": event.description, + "ending_at": self.bc.datetime.to_iso_string(event.ending_at), + "ended_at": event.ended_at, + "event_type": { + "academy": academy_serialized, + "id": event_type.id, + "name": event_type.name, + "slug": event_type.slug, + "lang": event_type.lang, + "icon_url": event_type.icon_url, + "allow_shared_creation": event_type.allow_shared_creation, + "description": event_type.description, + "visibility_settings": visibility_settings_serializer(event_type.visibility_settings), }, - 'eventbrite_id': event.eventbrite_id, - 'eventbrite_organizer_id': event.eventbrite_organizer_id, - 'eventbrite_status': event.eventbrite_status, - 'eventbrite_sync_description': event.eventbrite_sync_description, - 'eventbrite_sync_status': event.eventbrite_sync_status, - 'eventbrite_url': event.eventbrite_url, - 'excerpt': event.excerpt, - 'host': event.host, - 'id': event.id, - 'lang': event.lang, - 'online_event': event.online_event, - 'organization': event.organization, - 'published_at': event.published_at, - 'slug': event.slug, - 'starting_at': self.bc.datetime.to_iso_string(event.starting_at), - 'status': event.status, - 'sync_with_eventbrite': event.sync_with_eventbrite, - 'tags': event.tags, - 'title': event.title, - 'updated_at': self.bc.datetime.to_iso_string(event.updated_at), - 'url': event.url, - 'venue': event.venue, + "eventbrite_id": event.eventbrite_id, + "eventbrite_organizer_id": event.eventbrite_organizer_id, + "eventbrite_status": event.eventbrite_status, + "eventbrite_sync_description": event.eventbrite_sync_description, + "eventbrite_sync_status": event.eventbrite_sync_status, + "eventbrite_url": event.eventbrite_url, + "excerpt": event.excerpt, + "host": event.host, + "id": event.id, + "lang": event.lang, + "online_event": event.online_event, + "organization": event.organization, + "published_at": event.published_at, + "slug": event.slug, + "starting_at": self.bc.datetime.to_iso_string(event.starting_at), + "status": event.status, + "sync_with_eventbrite": event.sync_with_eventbrite, + "tags": event.tags, + "title": event.title, + "updated_at": self.bc.datetime.to_iso_string(event.updated_at), + "url": event.url, + "venue": event.venue, **data, } def extract_starting_at(d): - return datetime.strptime(str(d.starting_at), '%Y-%m-%d %H:%M:%S%z') + return datetime.strptime(str(d.starting_at), "%Y-%m-%d %H:%M:%S%z") class AcademyEventTestSuite(EventTestCase): @@ -152,240 +157,254 @@ class AcademyEventTestSuite(EventTestCase): # When: no auth # Then: return 401 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_auth(self): self.headers(academy=1) - url = reverse_lazy('events:me_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:me_event_id", kwargs={"event_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) # When: zero Event # Then: return 404 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_zero_items(self): self.headers(academy=1) - url = reverse_lazy('events:me_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:me_event_id", kwargs={"event_id": 1}) model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('events.Event'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) # Given: 1 Event, 1 EventType and 1 User # When: No EventTypeVisibilitySetting # Then: return 404 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_visible(self): self.headers(academy=1) - url = reverse_lazy('events:me_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:me_event_id", kwargs={"event_id": 1}) - model = self.bc.database.create(user=1, event=1, event_type={'icon_url': 'https://www.google.com'}) + model = self.bc.database.create(user=1, event=1, event_type={"icon_url": "https://www.google.com"}) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) # Given: 1 Event, 1 EventType, 1 User, 1 Academy and 1 CohortUser # When: visible in this cohort # Then: return 200 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_visible_in_this_cohort(self): self.headers(academy=1) - url = reverse_lazy('events:me_event_id', kwargs={'event_id': 1}) - - event_type_visibility_setting = {'cohort_id': 1, 'syllabus_id': None, 'academy_id': 1} - - model = self.bc.database.create(user=1, - event=1, - event_type={'icon_url': 'https://www.google.com'}, - event_type_visibility_setting=event_type_visibility_setting, - cohort_user=1, - academy=1) + url = reverse_lazy("events:me_event_id", kwargs={"event_id": 1}) + + event_type_visibility_setting = {"cohort_id": 1, "syllabus_id": None, "academy_id": 1} + + model = self.bc.database.create( + user=1, + event=1, + event_type={"icon_url": "https://www.google.com"}, + event_type_visibility_setting=event_type_visibility_setting, + cohort_user=1, + academy=1, + ) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.event, - model.event_type, - model.user, - academy=model.academy, - city=model.city, - data={}) + expected = get_serializer( + self, model.event, model.event_type, model.user, academy=model.academy, city=model.city, data={} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) # Given: 1 Event, 1 EventType, 1 User, 1 Academy and 1 CohortUser # When: visible in this academy # Then: return 200 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_visible_in_this_academy(self): self.headers(academy=1) - url = reverse_lazy('events:me_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:me_event_id", kwargs={"event_id": 1}) - event_type_visibility_setting = {'cohort_id': None, 'syllabus_id': None, 'academy_id': 1} + event_type_visibility_setting = {"cohort_id": None, "syllabus_id": None, "academy_id": 1} - model = self.bc.database.create(user=1, - event=1, - event_type={'icon_url': 'https://www.google.com'}, - event_type_visibility_setting=event_type_visibility_setting, - cohort_user=1) + model = self.bc.database.create( + user=1, + event=1, + event_type={"icon_url": "https://www.google.com"}, + event_type_visibility_setting=event_type_visibility_setting, + cohort_user=1, + ) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.event, - model.event_type, - model.user, - academy=model.academy, - city=model.city, - data={}) + expected = get_serializer( + self, model.event, model.event_type, model.user, academy=model.academy, city=model.city, data={} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) # Given: 1 Event, 1 EventType, 1 User, 1 Academy and 1 CohortUser # When: visible in this academy # Then: return 200 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_visible_in_this_syllabus(self): self.headers(academy=1) - url = reverse_lazy('events:me_event_id', kwargs={'event_id': 1}) - - event_type_visibility_setting = {'cohort_id': None, 'syllabus_id': 1, 'academy_id': 1} - - model = self.bc.database.create(user=1, - event=1, - academy=1, - event_type={'icon_url': 'https://www.google.com'}, - event_type_visibility_setting=event_type_visibility_setting, - cohort_user=1, - syllabus=1, - syllabus_version=1) + url = reverse_lazy("events:me_event_id", kwargs={"event_id": 1}) + + event_type_visibility_setting = {"cohort_id": None, "syllabus_id": 1, "academy_id": 1} + + model = self.bc.database.create( + user=1, + event=1, + academy=1, + event_type={"icon_url": "https://www.google.com"}, + event_type_visibility_setting=event_type_visibility_setting, + cohort_user=1, + syllabus=1, + syllabus_version=1, + ) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.event, - model.event_type, - model.user, - academy=model.academy, - city=model.city, - data={}) + expected = get_serializer( + self, model.event, model.event_type, model.user, academy=model.academy, city=model.city, data={} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) # Given: 1 Event, 1 EventType, 1 EventTypeSet, 1 User, 1 Academy and 1 Subscription # When: visible in this subscription # Then: return 200 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_visible_in_this_subscription(self): self.headers(academy=1) - url = reverse_lazy('events:me_event_id', kwargs={'event_id': 1}) - - model = self.bc.database.create(user=1, - event=1, - academy=1, - event_type={'icon_url': 'https://www.google.com'}, - event_type_set=1, - subscription=1) + url = reverse_lazy("events:me_event_id", kwargs={"event_id": 1}) + + model = self.bc.database.create( + user=1, + event=1, + academy=1, + event_type={"icon_url": "https://www.google.com"}, + event_type_set=1, + subscription=1, + ) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.event, - model.event_type, - model.user, - academy=model.academy, - city=model.city, - data={}) + expected = get_serializer( + self, model.event, model.event_type, model.user, academy=model.academy, city=model.city, data={} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) # Given: 1 Event, 1 EventType, 1 EventTypeSet, 1 User, 1 Academy, 1 PlanFinancing, # -> 1 Profile and 2 ProfileTranslation # When: visible in this plan financing # Then: return 200 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_visible_in_this_plan_financing(self): self.headers(academy=1) - url = reverse_lazy('events:me_event_id', kwargs={'event_id': 1}) + url = reverse_lazy("events:me_event_id", kwargs={"event_id": 1}) plan_financing = { - 'plan_expires_at': datetime.now() + timedelta(days=1), - 'monthly_price': random.random() * 100, - 'valid_until': datetime.now() + timedelta(days=1), + "plan_expires_at": datetime.now() + timedelta(days=1), + "monthly_price": random.random() * 100, + "valid_until": datetime.now() + timedelta(days=1), } - model = self.bc.database.create(user=1, - event=1, - academy=1, - profile=1, - profile_translation=2, - event_type={'icon_url': 'https://www.google.com'}, - event_type_set=1, - plan_financing=plan_financing) + model = self.bc.database.create( + user=1, + event=1, + academy=1, + profile=1, + profile_translation=2, + event_type={"icon_url": "https://www.google.com"}, + event_type_set=1, + plan_financing=plan_financing, + ) self.client.force_authenticate(model.user) response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.event, - model.event_type, - model.user, - academy=model.academy, - city=model.city, - profile=model.profile, - profile_translations=model.profile_translation, - data={}) + expected = get_serializer( + self, + model.event, + model.event_type, + model.user, + academy=model.academy, + city=model.city, + profile=model.profile, + profile_translations=model.profile_translation, + data={}, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) diff --git a/breathecode/events/tests/urls/tests_me_event_id_join.py b/breathecode/events/tests/urls/tests_me_event_id_join.py index 5952d2112..6f5afe515 100644 --- a/breathecode/events/tests/urls/tests_me_event_id_join.py +++ b/breathecode/events/tests/urls/tests_me_event_id_join.py @@ -25,62 +25,60 @@ def setup(db): def consumption_session(event, event_type_set, user, consumable, data={}): return { - 'consumable_id': consumable.id, - 'duration': timedelta(), - 'operation_code': 'default', - 'eta': ..., - 'how_many': 1.0, - 'id': 0, - 'path': 'payments.EventTypeSet', - 'related_id': event_type_set.id, - 'related_slug': event_type_set.slug, - 'request': { - 'args': [], - 'headers': { - 'academy': None + "consumable_id": consumable.id, + "duration": timedelta(), + "operation_code": "default", + "eta": ..., + "how_many": 1.0, + "id": 0, + "path": "payments.EventTypeSet", + "related_id": event_type_set.id, + "related_slug": event_type_set.slug, + "request": { + "args": [], + "headers": {"academy": None}, + "kwargs": { + "event_id": event.id, }, - 'kwargs': { - 'event_id': event.id, - }, - 'user': user.id + "user": user.id, }, - 'status': 'PENDING', - 'user_id': user.id, - 'was_discounted': False, + "status": "PENDING", + "user_id": user.id, + "was_discounted": False, **data, } def event_checkin_serializer(id, event, user): return { - 'attended_at': UTC_NOW, - 'attendee_id': user.id, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_source': None, - 'utm_url': None, - 'email': user.email, - 'event_id': event.id, - 'id': id, - 'status': 'DONE', + "attended_at": UTC_NOW, + "attendee_id": user.id, + "utm_campaign": None, + "utm_medium": None, + "utm_source": None, + "utm_url": None, + "email": user.email, + "event_id": event.id, + "id": id, + "status": "DONE", } # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_message(message, data={}): request = None - context = {'MESSAGE': message, 'BUTTON': None, 'BUTTON_TARGET': '_blank', 'LINK': None, **data} + context = {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None, **data} - return loader.render_to_string('message.html', context, request) + return loader.render_to_string("message.html", context, request) def serializer(event): return { - 'id': event.id, - 'starting_at': event.starting_at, - 'ending_at': event.ending_at, - 'live_stream_url': event.live_stream_url, - 'title': event.title, + "id": event.id, + "starting_at": event.starting_at, + "ending_at": event.ending_at, + "live_stream_url": event.live_stream_url, + "title": event.title, } @@ -88,17 +86,17 @@ def serializer(event): def render_countdown(event, token, academy=None): request = None context = { - 'event': serializer(event), - 'token': token.key, + "event": serializer(event), + "token": token.key, } if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - return loader.render_to_string('countdown.html', context, request) + return loader.render_to_string("countdown.html", context, request) class AcademyEventTestSuite(EventTestCase): @@ -106,758 +104,827 @@ class AcademyEventTestSuite(EventTestCase): # When: no auth # Then: return 401 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_auth(self): - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': 1}) + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": 1}) response = self.client.get(url) - url_hash = self.bc.format.to_base64('/v1/events/me/event/1/join') + url_hash = self.bc.format.to_base64("/v1/events/me/event/1/join") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={url_hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={url_hash}") self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # When: no consumables # Then: return 402 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables(self): event_type_model = model = self.bc.database.create(event_type_set=1) - model = self.bc.database.create(user=1, - token=1, - plan={ - 'is_renewable': False, - 'event_type_set': event_type_model.event_type_set - }, - service=1, - subscription={'selected_event_type_set': event_type_model.event_type_set}) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': 1}) + f'?{querystring}' + model = self.bc.database.create( + user=1, + token=1, + plan={"is_renewable": False, "event_type_set": event_type_model.event_type_set}, + service=1, + subscription={"selected_event_type_set": event_type_model.event_type_set}, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": 1}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('not-found') + expected = render_message("not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('events.Event'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable, Event, EventTypeSet, and IOweYou, User have Group and Permission # When: Feature flag set to False # Then: return 404 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=False)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=False)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__bypass_with_feature_flag__live_event_not_found(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} model = self.bc.database.create(user=1, service=service, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': 1}) + f'?{querystring}' + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": 1}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('not-found') + expected = render_message("not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('events.Event'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable, with Event, EventTypeSet, IOweYou, CohortUser, User have Group and Permission # When: Feature flag set to False, right hash and event.live_stream_url not set # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=False)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=False)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__bypass_with_feature_flag__with_live_event__cohort_without_url(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} delta = timedelta(seconds=random.randint(1, 1000)) - event = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} - event_type = {'icon_url': self.bc.fake.url()} + event = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} + event_type = {"icon_url": self.bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None + i_owe_you["valid_until"] = None - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, service=service, event=event, event_type=event_type, event_type_set=1, token=1, **extra + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('event-online-meeting-url-not-found') + expected = render_message("event-online-meeting-url-not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable, with Event, EventTypeSet, IOweYou, CohortUser, User have Group and Permission # When: Feature flag set to False, right hash and event.live_stream_url set # Then: return 302 to cohort.online_meeting_url and create a EventCheckin with status DONE - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=False)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=False)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__bypass_with_feature_flag__with_live_event__cohort_with_url(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, } - event_type = {'icon_url': self.bc.fake.url()} + event_type = {"icon_url": self.bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None + i_owe_you["valid_until"] = None - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, service=service, event=event, event_type=event_type, event_type_set=1, token=1, **extra + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), [ - event_checkin_serializer(1, model.event, model.user), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual( + self.bc.database.list_of("events.EventCheckin"), + [ + event_checkin_serializer(1, model.event, model.user), + ], + ) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable and Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True # Then: return 404 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__it_try_to_consume__live_event_not_found(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} model = self.bc.database.create(user=1, service=service, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': 1}) + f'?{querystring}' + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": 1}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('not-found') + expected = render_message("not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('events.Event'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) + self.assertEqual(self.bc.database.list_of("events.Event"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable with Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True and event.live_stream_url not set # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__it_try_to_consume__with_live_event__cohort_without_url(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} delta = timedelta(seconds=random.randint(1, 1000)) - event = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} - event_type = {'icon_url': self.bc.fake.url()} + event = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} + event_type = {"icon_url": self.bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None + i_owe_you["valid_until"] = None - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, service=service, event=event, event_type=event_type, event_type_set=1, token=1, **extra + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('event-online-meeting-url-not-found') + expected = render_message("event-online-meeting-url-not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable with Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True and event.live_stream_url set # Then: return 402 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__it_try_to_consume__with_live_event__cohort_with_url(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, + "host_user_id": None, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, } - event_type = {'icon_url': self.bc.fake.url()} + event_type = {"icon_url": self.bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': True} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - plan={ - 'is_renewable': False, - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - }, - plan_offer={ - 'original_plan_id': 1, - 'suggested_plan_id': 1, - 'show_modal': bool(random.getrandbits(1)), - 'expires_at': None, - }, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": True} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + plan={ + "is_renewable": False, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + }, + plan_offer={ + "original_plan_id": 1, + "suggested_plan_id": 1, + "show_modal": bool(random.getrandbits(1)), + "expires_at": None, + }, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) template_data = {} sugested = model.plan_offer.suggested_plan.slug - template_data['BUTTON'] = 'Get more consumables' - template_data['LINK'] = f'https://4geeks.com/checkout?plan={sugested}&token={model.token.key}' - template_data['GO_BACK'] = 'Go back to Dashboard' - template_data['URL_BACK'] = 'https://4geeks.com/choose-program' + template_data["BUTTON"] = "Get more consumables" + template_data["LINK"] = f"https://4geeks.com/checkout?plan={sugested}&token={model.token.key}" + template_data["GO_BACK"] = "Go back to Dashboard" + template_data["URL_BACK"] = "https://4geeks.com/choose-program" content = self.bc.format.from_bytes(response.content) - expected = render_message('with-consumer-not-enough-consumables', data=template_data) + expected = render_message("with-consumer-not-enough-consumables", data=template_data) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 402) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True, event end in the past and event.live_stream_url set # Then: return 200 and create a ConsumptionSession - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_event__in_the_past(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW - delta, - 'live_stream_url': online_meeting_url, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW - delta, + "live_stream_url": online_meeting_url, } - event_type = {'icon_url': self.bc.fake.url()} + event_type = {"icon_url": self.bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None + i_owe_you["valid_until"] = None - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('event-has-ended') + expected = render_message("event-has-ended") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True and event end in the future # Then: return 200 and create a ConsumptionSession and create a EventCheckin with status DONE - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_event__in_the_future(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, + "host_user_id": None, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, } - event_type = {'icon_url': self.bc.fake.url()} + event_type = {"icon_url": self.bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': True} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": True} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.event, - model.event_type_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), [ - event_checkin_serializer(1, model.event, model.user), - ]) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(1, 1), eta=UTC_NOW + delta), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.event, + model.event_type_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.EventCheckin"), + [ + event_checkin_serializer(1, model.event, model.user), + ], + ) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, + [ + call(args=(1, 1), eta=UTC_NOW + delta), + ], + ) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True and event end in the future, # -> event.free_for_bootcamps = None and event_type.free_for_bootcamps = False # -> academy.available_as_saas = True # Then: return 200 and create a ConsumptionSession and create a EventCheckin with status DONE - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__academy_no_saas__non_free1(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, - 'free_for_bootcamps': None, + "host_user_id": None, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, + "free_for_bootcamps": None, } event_type = { - 'icon_url': self.bc.fake.url(), - 'free_for_bootcamps': False, + "icon_url": self.bc.fake.url(), + "free_for_bootcamps": False, } is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': False} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": False} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.event, - model.event_type_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), [ - event_checkin_serializer(1, model.event, model.user), - ]) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(1, 1), eta=UTC_NOW + delta), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.event, + model.event_type_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.EventCheckin"), + [ + event_checkin_serializer(1, model.event, model.user), + ], + ) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, + [ + call(args=(1, 1), eta=UTC_NOW + delta), + ], + ) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True and event end in the future, # -> event.free_for_bootcamps = False and event_type.free_for_bootcamps is random # -> academy.available_as_saas = True # Then: return 200 and create a ConsumptionSession and create a EventCheckin with status DONE - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__academy_no_saas__non_free2(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, - 'free_for_bootcamps': False, + "host_user_id": None, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, + "free_for_bootcamps": False, } event_type = { - 'icon_url': self.bc.fake.url(), - 'free_for_bootcamps': bool(random.randbytes(1)), + "icon_url": self.bc.fake.url(), + "free_for_bootcamps": bool(random.randbytes(1)), } is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': False} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": False} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.event, - model.event_type_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), [ - event_checkin_serializer(1, model.event, model.user), - ]) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(1, 1), eta=UTC_NOW + delta), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.event, + model.event_type_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.EventCheckin"), + [ + event_checkin_serializer(1, model.event, model.user), + ], + ) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, + [ + call(args=(1, 1), eta=UTC_NOW + delta), + ], + ) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group, Permission, # -> Cohort and CohortUser @@ -866,93 +933,109 @@ def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__aca # -> academy.available_as_saas is random, # -> cohort.available_as_saas = True # Then: return 200 and create a ConsumptionSession and create a EventCheckin with status DONE - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_is_free_with_cohort_users_saas__cohort(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, - 'free_for_bootcamps': True, + "host_user_id": None, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, + "free_for_bootcamps": True, } event_type = { - 'icon_url': self.bc.fake.url(), - 'free_for_bootcamps': True, + "icon_url": self.bc.fake.url(), + "free_for_bootcamps": True, } is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': bool(random.randbytes(1))} - cohort = {'available_as_saas': True} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - cohort=cohort, - cohort_user=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": bool(random.randbytes(1))} + cohort = {"available_as_saas": True} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + cohort=cohort, + cohort_user=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.event, - model.event_type_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), [ - event_checkin_serializer(1, model.event, model.user), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.event, + model.event_type_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.EventCheckin"), + [ + event_checkin_serializer(1, model.event, model.user), + ], + ) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group, Permission, # -> Cohort and CohortUser @@ -961,92 +1044,108 @@ def test_is_free_with_cohort_users_saas__cohort(self): # -> academy.available_as_saas = True, # -> cohort.available_as_saas = None # Then: return 200 and create a ConsumptionSession and create a EventCheckin with status DONE - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_is_free_with_cohort_users_saas__academy(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, - 'free_for_bootcamps': True, + "host_user_id": None, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, + "free_for_bootcamps": True, } event_type = { - 'icon_url': self.bc.fake.url(), - 'free_for_bootcamps': True, + "icon_url": self.bc.fake.url(), + "free_for_bootcamps": True, } is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': True} - cohort = {'available_as_saas': None} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - cohort=cohort, - cohort_user=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": True} + cohort = {"available_as_saas": None} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + cohort=cohort, + cohort_user=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.event, - model.event_type_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), [ - event_checkin_serializer(1, model.event, model.user), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.event, + model.event_type_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("events.EventCheckin"), + [ + event_checkin_serializer(1, model.event, model.user), + ], + ) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group, Permission, # -> Cohort and CohortUser @@ -1055,82 +1154,93 @@ def test_is_free_with_cohort_users_saas__academy(self): # -> academy.available_as_saas is random, # -> cohort.available_as_saas = False # Then: return 200 and create a ConsumptionSession and create a EventCheckin with status DONE - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_is_free_with_cohort_users_no_saas__cohort(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, - 'free_for_bootcamps': True, + "host_user_id": None, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, + "free_for_bootcamps": True, } event_type = { - 'icon_url': self.bc.fake.url(), - 'free_for_bootcamps': True, + "icon_url": self.bc.fake.url(), + "free_for_bootcamps": True, } is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': bool(random.randbytes(1))} - cohort = {'available_as_saas': False} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - cohort=cohort, - cohort_user=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": bool(random.randbytes(1))} + cohort = {"available_as_saas": False} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + cohort=cohort, + cohort_user=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), [ - event_checkin_serializer(1, model.event, model.user), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual( + self.bc.database.list_of("events.EventCheckin"), + [ + event_checkin_serializer(1, model.event, model.user), + ], + ) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group, Permission, # -> Cohort and CohortUser @@ -1139,126 +1249,139 @@ def test_is_free_with_cohort_users_no_saas__cohort(self): # -> academy.available_as_saas = False, # -> cohort.available_as_saas = None # Then: return 200 and create a ConsumptionSession and create a EventCheckin with status DONE - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_is_free_with_cohort_users_no_saas__academy(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW - delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, - 'free_for_bootcamps': True, + "host_user_id": None, + "starting_at": UTC_NOW - delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, + "free_for_bootcamps": True, } event_type = { - 'icon_url': self.bc.fake.url(), - 'free_for_bootcamps': True, + "icon_url": self.bc.fake.url(), + "free_for_bootcamps": True, } is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': False} - cohort = {'available_as_saas': None} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - cohort=cohort, - cohort_user=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": False} + cohort = {"available_as_saas": None} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + cohort=cohort, + cohort_user=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), [ - event_checkin_serializer(1, model.event, model.user), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual( + self.bc.database.list_of("events.EventCheckin"), + [ + event_checkin_serializer(1, model.event, model.user), + ], + ) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True and event start and end in the future # Then: return 200 and create a ConsumptionSession - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__show_countdown(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': None, - 'starting_at': UTC_NOW + delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, + "host_user_id": None, + "starting_at": UTC_NOW + delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, } - event_type = {'icon_url': self.bc.fake.url()} + event_type = {"icon_url": self.bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - academy = {'available_as_saas': True} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = self.bc.database.create(user=1, - academy=academy, - service=service, - event=event, - event_type=event_type, - event_type_set=1, - consumable=1, - token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + academy = {"available_as_saas": True} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = self.bc.database.create( + user=1, + academy=academy, + service=service, + event=event, + event_type=event_type, + event_type_set=1, + consumable=1, + token=1, + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) @@ -1267,71 +1390,85 @@ def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__sho # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.event, - model.event_type_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(1, 1), eta=UTC_NOW + delta), - ]) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.event, + model.event_type_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, + [ + call(args=(1, 1), eta=UTC_NOW + delta), + ], + ) # Given: with Consumable, Event, EventTypeSet, IOweYou, User have Group and Permission # When: Feature flag set to True and event start and end in the future, # -> authenticate user is event host # Then: return 200 and avoid to create a ConsumptionSession - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__show_countdown(self): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = self.bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': 1, - 'starting_at': UTC_NOW + delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, + "host_user_id": 1, + "starting_at": UTC_NOW + delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, } - event_type = {'icon_url': self.bc.fake.url()} + event_type = {"icon_url": self.bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None + i_owe_you["valid_until"] = None - academy = {'available_as_saas': True} - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} + academy = {"available_as_saas": True} + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} model = self.bc.database.create( user=1, academy=academy, @@ -1341,10 +1478,11 @@ def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__sho # event_type_set=1, # consumable=1, token=1, - **extra) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + **extra, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = self.client.get(url) @@ -1353,22 +1491,25 @@ def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__sho # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.Event'), [ - self.bc.format.to_dict(model.event), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual( + self.bc.database.list_of("events.Event"), + [ + self.bc.format.to_dict(model.event), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) - self.assertEqual(self.bc.database.list_of('events.EventCheckin'), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) + self.assertEqual(self.bc.database.list_of("events.EventCheckin"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) @@ -1376,82 +1517,83 @@ def test_with_consumable__it_try_to_consume__with_live_event__in_the_future__sho # Given: A no SAAS student who has paid # When: auth # Then: response 200 -@pytest.mark.parametrize('cohort_user', [ - { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'ACTIVE', - }, - { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'ACTIVE', - }, - { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'GRADUATED', - }, - { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'GRADUATED', - }, -]) -@pytest.mark.parametrize('academy, cohort', [ - ( +@pytest.mark.parametrize( + "cohort_user", + [ { - 'available_as_saas': True + "finantial_status": "FULLY_PAID", + "educational_status": "ACTIVE", }, { - 'available_as_saas': False + "finantial_status": "UP_TO_DATE", + "educational_status": "ACTIVE", }, - ), - ( { - 'available_as_saas': False + "finantial_status": "FULLY_PAID", + "educational_status": "GRADUATED", }, { - 'available_as_saas': None + "finantial_status": "UP_TO_DATE", + "educational_status": "GRADUATED", }, - ), -]) -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) -@patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) -@patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) -@patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) -@patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) -def test__post__auth__no_saas__finantial_status_no_late(bc: Breathecode, client: fx.Client, academy, cohort, - cohort_user): - service = {'slug': 'event_join'} + ], +) +@pytest.mark.parametrize( + "academy, cohort", + [ + ( + {"available_as_saas": True}, + {"available_as_saas": False}, + ), + ( + {"available_as_saas": False}, + {"available_as_saas": None}, + ), + ], +) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) +@patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) +@patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) +@patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) +@patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) +def test__post__auth__no_saas__finantial_status_no_late( + bc: Breathecode, client: fx.Client, academy, cohort, cohort_user +): + service = {"slug": "event_join"} online_meeting_url = bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': 1, - 'starting_at': UTC_NOW + delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, + "host_user_id": 1, + "starting_at": UTC_NOW + delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, } - event_type = {'icon_url': bc.fake.url()} + event_type = {"icon_url": bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - model = bc.database.create(user=1, - academy=academy, - cohort=cohort, - cohort_user=cohort_user, - service=service, - event=event, - event_type=event_type, - token=1, - **extra) - querystring = bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + model = bc.database.create( + user=1, + academy=academy, + cohort=cohort, + cohort_user=cohort_user, + service=service, + event=event, + event_type=event_type, + token=1, + **extra, + ) + querystring = bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = client.get(url) @@ -1460,22 +1602,22 @@ def test__post__auth__no_saas__finantial_status_no_late(bc: Breathecode, client: # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) assert content, expected assert response.status_code == 200 - assert bc.database.list_of('events.Event') == [ + assert bc.database.list_of("events.Event") == [ bc.format.to_dict(model.event), ] - assert bc.database.list_of('payments.Consumable') == [] + assert bc.database.list_of("payments.Consumable") == [] - assert bc.database.list_of('payments.ConsumptionSession') == [] - assert bc.database.list_of('events.EventCheckin') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] + assert bc.database.list_of("events.EventCheckin") == [] bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) @@ -1483,93 +1625,92 @@ def test__post__auth__no_saas__finantial_status_no_late(bc: Breathecode, client: # Given: A no SAAS student who hasn't paid # When: auth # Then: response 402 -@pytest.mark.parametrize('academy, cohort', [ - ( - { - 'available_as_saas': True - }, - { - 'available_as_saas': False - }, - ), - ( - { - 'available_as_saas': False - }, - { - 'available_as_saas': None - }, - ), -]) -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) -@patch('breathecode.events.permissions.flags.Release.enable_consume_live_events', MagicMock(return_value=True)) -@patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) -@patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) -@patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) +@pytest.mark.parametrize( + "academy, cohort", + [ + ( + {"available_as_saas": True}, + {"available_as_saas": False}, + ), + ( + {"available_as_saas": False}, + {"available_as_saas": None}, + ), + ], +) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) +@patch("breathecode.events.permissions.flags.Release.enable_consume_live_events", MagicMock(return_value=True)) +@patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) +@patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) +@patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test__post__auth__no_saas__finantial_status_late(bc: Breathecode, client: fx.Client, academy, cohort): - service = {'slug': 'event_join'} + service = {"slug": "event_join"} online_meeting_url = bc.fake.url() delta = timedelta(seconds=random.randint(1, 1000)) event = { - 'host_user_id': 1, - 'starting_at': UTC_NOW + delta, - 'ending_at': UTC_NOW + delta, - 'live_stream_url': online_meeting_url, + "host_user_id": 1, + "starting_at": UTC_NOW + delta, + "ending_at": UTC_NOW + delta, + "live_stream_url": online_meeting_url, } - event_type = {'icon_url': bc.fake.url()} + event_type = {"icon_url": bc.fake.url()} is_subscription = bool(random.randbytes(1)) i_owe_you = { - 'next_payment_at': UTC_NOW + timedelta(weeks=4), - 'valid_until': UTC_NOW + timedelta(weeks=4), + "next_payment_at": UTC_NOW + timedelta(weeks=4), + "valid_until": UTC_NOW + timedelta(weeks=4), } if is_subscription and bool(random.randbytes(1)): - i_owe_you['valid_until'] = None - - extra = {'subscription' if is_subscription else 'plan_financing': i_owe_you} - cohort_user = {'finantial_status': 'LATE', 'educational_status': 'ACTIVE'} - model = bc.database.create(user=1, - academy=academy, - cohort=cohort, - cohort_user=cohort_user, - service=service, - event=event, - event_type=event_type, - token=1, - **extra) - querystring = bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_id_join', kwargs={'event_id': model.event.id}) + f'?{querystring}' + i_owe_you["valid_until"] = None + + extra = {"subscription" if is_subscription else "plan_financing": i_owe_you} + cohort_user = {"finantial_status": "LATE", "educational_status": "ACTIVE"} + model = bc.database.create( + user=1, + academy=academy, + cohort=cohort, + cohort_user=cohort_user, + service=service, + event=event, + event_type=event_type, + token=1, + **extra, + ) + querystring = bc.format.to_querystring({"token": model.token.key}) + + url = reverse_lazy("events:me_event_id_join", kwargs={"event_id": model.event.id}) + f"?{querystring}" response = client.get(url) content = bc.format.from_bytes(response.content) - expected = render_message('You must get a plan in order to access this service', - data={ - 'GO_BACK': 'Go back to Dashboard', - 'URL_BACK': 'https://4geeks.com/choose-program', - 'BUTTON': 'Get a plan', - 'LINK': f'https://4geeks.com/checkout?plan=basic&token={model.token.key}', - }) + expected = render_message( + "You must get a plan in order to access this service", + data={ + "GO_BACK": "Go back to Dashboard", + "URL_BACK": "https://4geeks.com/choose-program", + "BUTTON": "Get a plan", + "LINK": f"https://4geeks.com/checkout?plan=basic&token={model.token.key}", + }, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) assert content == expected assert response.status_code == 402 - assert bc.database.list_of('events.Event') == [ + assert bc.database.list_of("events.Event") == [ bc.format.to_dict(model.event), ] - assert bc.database.list_of('payments.Consumable') == [] + assert bc.database.list_of("payments.Consumable") == [] - assert bc.database.list_of('payments.ConsumptionSession') == [] - assert bc.database.list_of('events.EventCheckin') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] + assert bc.database.list_of("events.EventCheckin") == [] bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) diff --git a/breathecode/events/tests/urls/tests_me_event_liveclass.py b/breathecode/events/tests/urls/tests_me_event_liveclass.py index 5d94c8a37..05aa9462a 100644 --- a/breathecode/events/tests/urls/tests_me_event_liveclass.py +++ b/breathecode/events/tests/urls/tests_me_event_liveclass.py @@ -9,9 +9,9 @@ def cohort_serializer(cohort): return { - 'id': cohort.id, - 'name': cohort.name, - 'slug': cohort.slug, + "id": cohort.id, + "name": cohort.name, + "slug": cohort.slug, } @@ -25,13 +25,13 @@ def get_serializer(self, event_type, cohort, data={}): started_at = self.bc.datetime.to_iso_string(event_type.started_at) return { - 'id': event_type.id, - 'started_at': started_at, - 'ended_at': ended_at, - 'cohort': cohort_serializer(cohort), - 'starting_at': self.bc.datetime.to_iso_string(event_type.starting_at), - 'ending_at': self.bc.datetime.to_iso_string(event_type.ending_at), - 'hash': event_type.hash, + "id": event_type.id, + "started_at": started_at, + "ended_at": ended_at, + "cohort": cohort_serializer(cohort), + "starting_at": self.bc.datetime.to_iso_string(event_type.starting_at), + "ending_at": self.bc.datetime.to_iso_string(event_type.ending_at), + "hash": event_type.hash, **data, } @@ -40,14 +40,14 @@ class AcademyEventTestSuite(EventTestCase): # When: I call the API without authentication # Then: I should get a 401 error - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_no_auth(self): - url = reverse_lazy('events:me_event_liveclass') + url = reverse_lazy("events:me_event_liveclass") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -55,14 +55,14 @@ def test_no_auth(self): # Given: User # When: User is authenticated and has no LiveClass # Then: I should get a 200 status code with no data - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_zero_live_classes(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('events:me_event_liveclass') + url = reverse_lazy("events:me_event_liveclass") response = self.client.get(url) json = response.json() @@ -71,20 +71,20 @@ def test_zero_live_classes(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) # Given: a User, LiveClass, Cohort and CohortTimeSlot # When: User is authenticated, has LiveClass and CohortUser belongs to this LiveClass # Then: I should get a 200 status code with the LiveClass data - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_one_live_class(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(user=1, live_class=1, cohort=1, cohort_time_slot=1, cohort_user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('events:me_event_liveclass') + url = reverse_lazy("events:me_event_liveclass") response = self.client.get(url) json = response.json() @@ -92,17 +92,22 @@ def test_one_live_class(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) # Given: LiveClass.objects.filter is mocked # When: the mock is called # Then: the mock should be called with the correct arguments and does not raise an exception - @patch('breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup', - MagicMock(wraps=lookup_extension.compile_lookup)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch( + "breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup", + MagicMock(wraps=lookup_extension.compile_lookup), + ) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_lookup_extension(self): self.bc.request.set_headers(academy=1) @@ -111,61 +116,64 @@ def test_lookup_extension(self): self.client.force_authenticate(model.user) args, kwargs = self.bc.format.call( - 'en', + "en", strings={ - 'exact': [ - 'remote_meeting_url', + "exact": [ + "remote_meeting_url", ], }, bools={ - 'is_null': ['ended_at'], + "is_null": ["ended_at"], }, datetimes={ - 'gte': ['starting_at'], - 'lte': ['ending_at'], + "gte": ["starting_at"], + "lte": ["ending_at"], }, slugs=[ - 'cohort_time_slot__cohort', - 'cohort_time_slot__cohort__academy', - 'cohort_time_slot__cohort__syllabus_version__syllabus', + "cohort_time_slot__cohort", + "cohort_time_slot__cohort__academy", + "cohort_time_slot__cohort__syllabus_version__syllabus", ], overwrite={ - 'cohort': 'cohort_time_slot__cohort', - 'academy': 'cohort_time_slot__cohort__academy', - 'syllabus': 'cohort_time_slot__cohort__syllabus_version__syllabus', - 'start': 'starting_at', - 'end': 'ending_at', - 'upcoming': 'ended_at', + "cohort": "cohort_time_slot__cohort", + "academy": "cohort_time_slot__cohort__academy", + "syllabus": "cohort_time_slot__cohort__syllabus_version__syllabus", + "start": "starting_at", + "end": "ending_at", + "upcoming": "ended_at", }, ) query = self.bc.format.lookup(*args, **kwargs) - url = reverse_lazy('events:me_event_liveclass') + '?' + self.bc.format.querystring(query) - - self.assertEqual([x for x in query], [ - 'cohort', - 'academy', - 'syllabus', - 'remote_meeting_url', - 'start', - 'end', - 'upcoming', - ]) + url = reverse_lazy("events:me_event_liveclass") + "?" + self.bc.format.querystring(query) + + self.assertEqual( + [x for x in query], + [ + "cohort", + "academy", + "syllabus", + "remote_meeting_url", + "start", + "end", + "upcoming", + ], + ) response = self.client.get(url) json = response.json() expected = [] - for x in ['overwrite', 'custom_fields']: + for x in ["overwrite", "custom_fields"]: if x in kwargs: del kwargs[x] - for field in ['ids', 'slugs']: + for field in ["ids", "slugs"]: values = kwargs.get(field, tuple()) kwargs[field] = tuple(values) - for field in ['ints', 'strings', 'bools', 'datetimes']: + for field in ["ints", "strings", "bools", "datetimes"]: modes = kwargs.get(field, {}) for mode in modes: if not isinstance(kwargs[field][mode], tuple): @@ -173,12 +181,18 @@ def test_lookup_extension(self): kwargs[field] = frozenset(modes.items()) - self.bc.check.calls(lookup_extension.compile_lookup.call_args_list, [ - call(**kwargs), - ]) + self.bc.check.calls( + lookup_extension.compile_lookup.call_args_list, + [ + call(**kwargs), + ], + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) diff --git a/breathecode/events/tests/urls/tests_me_event_liveclass_join_hash.py b/breathecode/events/tests/urls/tests_me_event_liveclass_join_hash.py index 01c3a8fe5..7f764bde1 100644 --- a/breathecode/events/tests/urls/tests_me_event_liveclass_join_hash.py +++ b/breathecode/events/tests/urls/tests_me_event_liveclass_join_hash.py @@ -23,28 +23,26 @@ def setup(db): def consumption_session(live_class, cohort_set, user, consumable, data={}): return { - 'consumable_id': consumable.id, - 'duration': timedelta(), - 'operation_code': 'default', - 'eta': ..., - 'how_many': 1.0, - 'id': 0, - 'path': 'payments.CohortSet', - 'related_id': cohort_set.id, - 'related_slug': cohort_set.slug, - 'request': { - 'args': [], - 'headers': { - 'academy': None + "consumable_id": consumable.id, + "duration": timedelta(), + "operation_code": "default", + "eta": ..., + "how_many": 1.0, + "id": 0, + "path": "payments.CohortSet", + "related_id": cohort_set.id, + "related_slug": cohort_set.slug, + "request": { + "args": [], + "headers": {"academy": None}, + "kwargs": { + "hash": live_class.hash, }, - 'kwargs': { - 'hash': live_class.hash, - }, - 'user': user.id + "user": user.id, }, - 'status': 'PENDING', - 'user_id': user.id, - 'was_discounted': False, + "status": "PENDING", + "user_id": user.id, + "was_discounted": False, **data, } @@ -52,18 +50,18 @@ def consumption_session(live_class, cohort_set, user, consumable, data={}): # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_message(message, data={}): request = None - context = {'MESSAGE': message, 'BUTTON': None, 'BUTTON_TARGET': '_blank', 'LINK': None, **data} + context = {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None, **data} - return loader.render_to_string('message.html', context, request) + return loader.render_to_string("message.html", context, request) def serializer(live_class): return { - 'id': live_class.id, - 'starting_at': live_class.starting_at, - 'ending_at': live_class.ending_at, - 'live_stream_url': live_class.cohort_time_slot.cohort.online_meeting_url, - 'title': live_class.cohort_time_slot.cohort.name, + "id": live_class.id, + "starting_at": live_class.starting_at, + "ending_at": live_class.ending_at, + "live_stream_url": live_class.cohort_time_slot.cohort.online_meeting_url, + "title": live_class.cohort_time_slot.cohort.name, } @@ -71,17 +69,17 @@ def serializer(live_class): def render_countdown(live_class, token, academy=None): request = None context = { - 'event': serializer(live_class), - 'token': token.key, + "event": serializer(live_class), + "token": token.key, } if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - return loader.render_to_string('countdown.html', context, request) + return loader.render_to_string("countdown.html", context, request) class AcademyEventTestSuite(EventTestCase): @@ -89,482 +87,515 @@ class AcademyEventTestSuite(EventTestCase): # When: no auth # Then: return 401 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_auth(self): - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': 'potato'}) + url = reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": "potato"}) response = self.client.get(url) - url_hash = self.bc.format.to_base64('/v1/events/me/event/liveclass/join/potato') + url_hash = self.bc.format.to_base64("/v1/events/me/event/liveclass/join/potato") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={url_hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={url_hash}") self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # When: no consumables # Then: return 402 - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables(self): model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': 'potato'}) + f'?{querystring}' + url = reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": "potato"}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('not-found') + expected = render_message("not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable and LiveClass, User have Group and Permission # When: Feature flag set to False # Then: return 404 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=False)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=False)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__bypass_with_feature_flag__live_class_not_found(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} model = self.bc.database.create(user=1, group=1, service=service, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': 'potato'}) + f'?{querystring}' + url = reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": "potato"}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('not-found') + expected = render_message("not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable, with LiveClass, CohortUser, User have Group and Permission # When: Feature flag set to False, right hash and cohort.live_class_join not set # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=False)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=False)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__bypass_with_feature_flag__with_live_class__cohort_without_url(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} delta = timedelta(seconds=random.randint(1, 1000)) - live_class = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} + live_class = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} model = self.bc.database.create(user=1, group=1, service=service, live_class=live_class, cohort_user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('cohort-online-meeting-url-not-found') + expected = render_message("cohort-online-meeting-url-not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable, with LiveClass, CohortUser, User have Group and Permission # When: Feature flag set to False, right hash and cohort.live_class_join set # Then: return 302 to cohort.online_meeting_url - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=False)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=False)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__bypass_with_feature_flag__with_live_class__cohort_with_url(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} online_meeting_url = self.bc.fake.url() - cohort = {'online_meeting_url': online_meeting_url} + cohort = {"online_meeting_url": online_meeting_url} delta = timedelta(seconds=random.randint(1, 1000)) - live_class = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} - model = self.bc.database.create(user=1, - group=1, - service=service, - live_class=live_class, - cohort_user=1, - cohort=cohort, - token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + live_class = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} + model = self.bc.database.create( + user=1, group=1, service=service, live_class=live_class, cohort_user=1, cohort=cohort, token=1 + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable and LiveClass, User have Group and Permission # When: Feature flag set to True # Then: return 404 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__it_try_to_consume__live_class_not_found(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} model = self.bc.database.create(user=1, group=1, service=service, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) + querystring = self.bc.format.to_querystring({"token": model.token.key}) - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': 'potato'}) + f'?{querystring}' + url = reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": "potato"}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('not-found') + expected = render_message("not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 404) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual(self.bc.database.list_of("events.LiveClass"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable with LiveClass, User have Group and Permission # When: Feature flag set to True and cohort.live_class_join not set # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__it_try_to_consume__with_live_class__cohort_without_url(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} delta = timedelta(seconds=random.randint(1, 1000)) - live_class = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} - model = self.bc.database.create(user=1, - group=1, - service=service, - live_class=live_class, - cohort_user=1, - cohort=1, - token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + live_class = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} + model = self.bc.database.create( + user=1, group=1, service=service, live_class=live_class, cohort_user=1, cohort=1, token=1 + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('cohort-online-meeting-url-not-found') + expected = render_message("cohort-online-meeting-url-not-found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: no Consumable with LiveClass, User have Group and Permission # When: Feature flag set to True and cohort.live_class_join set # Then: return 402 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_no_consumables__it_try_to_consume__with_live_class__cohort_with_url(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} online_meeting_url = self.bc.fake.url() - cohort = {'online_meeting_url': online_meeting_url, 'available_as_saas': True} + cohort = {"online_meeting_url": online_meeting_url, "available_as_saas": True} delta = timedelta(seconds=random.randint(1, 1000)) - live_class = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} - model = self.bc.database.create(user=1, - group=1, - service=service, - live_class=live_class, - cohort_user=1, - cohort=cohort, - token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + live_class = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} + model = self.bc.database.create( + user=1, group=1, service=service, live_class=live_class, cohort_user=1, cohort=cohort, token=1 + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) template_data = {} - template_data['GO_BACK'] = 'Go back to Dashboard' - template_data['URL_BACK'] = 'https://4geeks.com/choose-program' - expected = render_message('with-consumer-not-enough-consumables', data=template_data) + template_data["GO_BACK"] = "Go back to Dashboard" + template_data["URL_BACK"] = "https://4geeks.com/choose-program" + expected = render_message("with-consumer-not-enough-consumables", data=template_data) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 402) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: with Consumable, LiveClass, User have Group and Permission # When: Feature flag set to True, class end in the past and cohort.live_class_join set # Then: return 200 and create a ConsumptionSession - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_class__in_the_past(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} online_meeting_url = self.bc.fake.url() - cohort = {'online_meeting_url': online_meeting_url} + cohort = {"online_meeting_url": online_meeting_url} delta = timedelta(seconds=random.randint(1, 1000)) - live_class = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW - delta} - model = self.bc.database.create(user=1, - group=1, - service=service, - live_class=live_class, - cohort_user=1, - consumable=1, - cohort=cohort, - token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + live_class = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW - delta} + model = self.bc.database.create( + user=1, group=1, service=service, live_class=live_class, cohort_user=1, consumable=1, cohort=cohort, token=1 + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('class-has-ended') + expected = render_message("class-has-ended") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, []) # Given: with Consumable, LiveClass, User have Group and Permission # When: Feature flag set to True and class end in the future # Then: return 200 and create a ConsumptionSession - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_class__in_the_future(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} online_meeting_url = self.bc.fake.url() - cohort = {'online_meeting_url': online_meeting_url, 'available_as_saas': True} + cohort = {"online_meeting_url": online_meeting_url, "available_as_saas": True} delta = timedelta(seconds=random.randint(1, 1000)) - live_class = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} - academy = {'available_as_saas': True} - model = self.bc.database.create(user=1, - group=1, - service=service, - live_class=live_class, - cohort_user=1, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - consumable=1, - token=1, - academy=academy) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + live_class = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} + academy = {"available_as_saas": True} + model = self.bc.database.create( + user=1, + group=1, + service=service, + live_class=live_class, + cohort_user=1, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + consumable=1, + token=1, + academy=academy, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, online_meeting_url) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.live_class, - model.cohort_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(1, 1), eta=UTC_NOW + delta), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.live_class, + model.cohort_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, + [ + call(args=(1, 1), eta=UTC_NOW + delta), + ], + ) # Given: with Consumable, LiveClass, User have Group and Permission # When: Feature flag set to True and class start and end in the future # Then: return 200 and create a ConsumptionSession - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_class__in_the_future__show_countdown(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} online_meeting_url = self.bc.fake.url() - cohort = {'online_meeting_url': online_meeting_url, 'available_as_saas': True} + cohort = {"online_meeting_url": online_meeting_url, "available_as_saas": True} delta = timedelta(seconds=random.randint(1, 1000)) - live_class = {'starting_at': UTC_NOW + delta, 'ending_at': UTC_NOW + delta} - academy = {'available_as_saas': True} - model = self.bc.database.create(user=1, - group=1, - service=service, - live_class=live_class, - cohort_user=1, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - consumable=1, - token=1, - academy=academy) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + live_class = {"starting_at": UTC_NOW + delta, "ending_at": UTC_NOW + delta} + academy = {"available_as_saas": True} + model = self.bc.database.create( + user=1, + group=1, + service=service, + live_class=live_class, + cohort_user=1, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + consumable=1, + token=1, + academy=academy, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) @@ -573,160 +604,213 @@ def test_with_consumable__it_try_to_consume__with_live_class__in_the_future__sho # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, 200) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.live_class, - model.cohort_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(1, 1), eta=UTC_NOW + delta), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.live_class, + model.cohort_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, + [ + call(args=(1, 1), eta=UTC_NOW + delta), + ], + ) # Given: with Consumable, LiveClass, User have Group and Permission # When: Feature flag set to True and class start and end in the future # Then: return 200 and create a ConsumptionSession - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) - @patch('breathecode.events.tasks.mark_live_class_as_started.delay', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) + @patch("breathecode.events.tasks.mark_live_class_as_started.delay", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_class__on_starting_time(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} online_meeting_url = self.bc.fake.url() - cohort = {'online_meeting_url': online_meeting_url, 'available_as_saas': True} + cohort = {"online_meeting_url": online_meeting_url, "available_as_saas": True} delta = timedelta(seconds=random.randint(1, 1000)) delta = timedelta(minutes=30) - live_class = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} - academy = {'available_as_saas': True} - model = self.bc.database.create(user=1, - group=1, - service=service, - live_class=live_class, - cohort_user=1, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - consumable=1, - token=1, - academy=academy) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + live_class = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} + academy = {"available_as_saas": True} + model = self.bc.database.create( + user=1, + group=1, + service=service, + live_class=live_class, + cohort_user=1, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + consumable=1, + token=1, + academy=academy, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) self.assertEqual(response.status_code, 302) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.live_class, - model.cohort_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(1, 1), eta=UTC_NOW + delta), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.live_class, + model.cohort_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, + [ + call(args=(1, 1), eta=UTC_NOW + delta), + ], + ) self.bc.check.calls(tasks_events.mark_live_class_as_started.delay.call_args_list, []) # Given: with Consumable, LiveClass, User have Group and Permission # When: Feature flag set to True and class start and end in the future # Then: return a redirection status 302 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.events.permissions.flags.Release.enable_consume_live_classes', MagicMock(return_value=True)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) - @patch('breathecode.events.tasks.mark_live_class_as_started.delay', MagicMock(return_value=None)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.events.permissions.flags.Release.enable_consume_live_classes", MagicMock(return_value=True)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) + @patch("breathecode.events.tasks.mark_live_class_as_started.delay", MagicMock(return_value=None)) def test_with_consumable__it_try_to_consume__with_live_class__on_starting_time_is_teacher(self): - service = {'slug': 'live_class_join'} + service = {"slug": "live_class_join"} online_meeting_url = self.bc.fake.url() - cohort = {'online_meeting_url': online_meeting_url, 'available_as_saas': True} + cohort = {"online_meeting_url": online_meeting_url, "available_as_saas": True} delta = timedelta(seconds=random.randint(1, 1000)) delta = timedelta(minutes=30) - live_class = {'starting_at': UTC_NOW - delta, 'ending_at': UTC_NOW + delta} - academy = {'available_as_saas': True} - model = self.bc.database.create(user=1, - group=1, - service=service, - live_class=live_class, - cohort_user={'role': 'TEACHER'}, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - consumable=1, - token=1, - academy=academy) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - - url = reverse_lazy('events:me_event_liveclass_join_hash', kwargs={'hash': model.live_class.hash - }) + f'?{querystring}' + live_class = {"starting_at": UTC_NOW - delta, "ending_at": UTC_NOW + delta} + academy = {"available_as_saas": True} + model = self.bc.database.create( + user=1, + group=1, + service=service, + live_class=live_class, + cohort_user={"role": "TEACHER"}, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + consumable=1, + token=1, + academy=academy, + ) + querystring = self.bc.format.to_querystring({"token": model.token.key}) + + url = ( + reverse_lazy("events:me_event_liveclass_join_hash", kwargs={"hash": model.live_class.hash}) + + f"?{querystring}" + ) response = self.client.get(url) self.assertEqual(response.status_code, 302) - self.assertEqual(self.bc.database.list_of('events.LiveClass'), [ - self.bc.format.to_dict(model.live_class), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) - - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - consumption_session(model.live_class, - model.cohort_set, - model.user, - model.consumable, - data={ - 'id': 1, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(1, 1), eta=UTC_NOW + delta), - ]) - self.bc.check.calls(tasks_events.mark_live_class_as_started.delay.call_args_list, [ - call(model.live_class.id), - ]) + self.assertEqual( + self.bc.database.list_of("events.LiveClass"), + [ + self.bc.format.to_dict(model.live_class), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + consumption_session( + model.live_class, + model.cohort_set, + model.user, + model.consumable, + data={ + "id": 1, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, + [ + call(args=(1, 1), eta=UTC_NOW + delta), + ], + ) + self.bc.check.calls( + tasks_events.mark_live_class_as_started.delay.call_args_list, + [ + call(model.live_class.id), + ], + ) diff --git a/breathecode/events/urls.py b/breathecode/events/urls.py index 0033bc30e..f9378d3ca 100644 --- a/breathecode/events/urls.py +++ b/breathecode/events/urls.py @@ -1,57 +1,91 @@ from django.urls import path -from .views import (AcademyEventJoinView, AcademyEventTypeView, AcademyLiveClassView, EventMeView, - EventTypeVisibilitySettingView, EventView, EventTypeView, AcademyEventCheckinView, MeLiveClassView, - get_events, eventbrite_webhook, AcademyEventView, AcademyVenueView, ICalCohortsView, ICalEventView, - ICalStudentView, AcademyOrganizationView, OrganizationWebhookView, AcademyOrganizerView, - AcademyOrganizationOrganizerView, EventCheckinView, AcademyLiveClassJoinView, join_event, - EventMeCheckinView, join_live_class, EventPublicView) +from .views import ( + AcademyEventJoinView, + AcademyEventTypeView, + AcademyLiveClassView, + EventMeView, + EventTypeVisibilitySettingView, + EventView, + EventTypeView, + AcademyEventCheckinView, + MeLiveClassView, + get_events, + eventbrite_webhook, + AcademyEventView, + AcademyVenueView, + ICalCohortsView, + ICalEventView, + ICalStudentView, + AcademyOrganizationView, + OrganizationWebhookView, + AcademyOrganizerView, + AcademyOrganizationOrganizerView, + EventCheckinView, + AcademyLiveClassJoinView, + join_event, + EventMeCheckinView, + join_live_class, + EventPublicView, +) from .syndication import LatestEventsFeed -app_name = 'events' +app_name = "events" urlpatterns = [ - path('', EventView.as_view(), name='root'), - path('me', EventMeView.as_view(), name='me'), - path('me/event/<int:event_id>/checkin', EventMeCheckinView.as_view(), name='me_event_id_checkin'), - path('me/event/<int:event_id>/join', join_event, name='me_event_id_join'), - path('me/event/<int:event_id>', EventMeView.as_view(), name='me_event_id'), + path("", EventView.as_view(), name="root"), + path("me", EventMeView.as_view(), name="me"), + path("me/event/<int:event_id>/checkin", EventMeCheckinView.as_view(), name="me_event_id_checkin"), + path("me/event/<int:event_id>/join", join_event, name="me_event_id_join"), + path("me/event/<int:event_id>", EventMeView.as_view(), name="me_event_id"), # move this - path('me/event/liveclass', MeLiveClassView.as_view(), name='me_event_liveclass'), - path('me/event/liveclass/join/<str:hash>', join_live_class, name='me_event_liveclass_join_hash'), - path('academy/event/liveclass', AcademyLiveClassView.as_view(), name='academy_event_liveclass'), - path('academy/event/liveclass/join/<str:hash>', - AcademyLiveClassJoinView.as_view(), - name='academy_event_liveclass_join_hash'), - path('all', get_events, name='all'), - path('feed/all', LatestEventsFeed()), - path('event/<slug:event_slug>', EventPublicView.as_view(), name='event'), - path('eventype', EventTypeView.as_view(), name='eventype'), - path('event/<int:event_id>/checkin', EventCheckinView.as_view(), name='event_checkin'), - path('academy/event', AcademyEventView.as_view(), name='academy_event'), - path('academy/event/<int:event_id>', AcademyEventView.as_view(), name='academy_event_id'), - path('academy/event/<int:event_id>/join', AcademyEventJoinView.as_view(), name='academy_event_id_join'), - path('academy/organization', AcademyOrganizationView.as_view(), name='academy_organization'), - path('academy/organization/organizer', - AcademyOrganizationOrganizerView.as_view(), - name='academy_organization_organizer'), - path('academy/organization/organizer/<int:organizer_id>', - AcademyOrganizationOrganizerView.as_view(), - name='academy_organization_organizer_id'), - path('academy/organizer', AcademyOrganizerView.as_view(), name='academy_organizer'), - path('academy/organization/eventbrite/webhook', - OrganizationWebhookView.as_view(), - name='academy_organizarion_eventbrite_webhook'), - path('ical/cohorts', ICalCohortsView.as_view(), name='ical_cohorts'), - path('ical/events', ICalEventView.as_view(), name='ical_events'), - path('ical/student/<int:user_id>', ICalStudentView.as_view(), name='ical_student_id'), - path('academy/venues', AcademyVenueView.as_view(), name='academy_venues'), - path('academy/eventype', AcademyEventTypeView.as_view(), name='academy_eventype'), - path('academy/eventype/<slug:event_type_slug>', AcademyEventTypeView.as_view(), name='academy_eventype_slug'), - path('academy/eventype/<slug:event_type_slug>/visibilitysetting', - EventTypeVisibilitySettingView.as_view(), - name='academy_eventype_slug_visibilitysetting'), - path('academy/eventype/<slug:event_type_slug>/visibilitysetting/<int:visibility_setting_id>', - EventTypeVisibilitySettingView.as_view(), - name='academy_eventype_slug_visibilitysetting_id'), - path('academy/checkin', AcademyEventCheckinView.as_view(), name='academy_checkin'), - path('eventbrite/webhook/<int:organization_id>', eventbrite_webhook, name='eventbrite_webhook_id'), + path("me/event/liveclass", MeLiveClassView.as_view(), name="me_event_liveclass"), + path("me/event/liveclass/join/<str:hash>", join_live_class, name="me_event_liveclass_join_hash"), + path("academy/event/liveclass", AcademyLiveClassView.as_view(), name="academy_event_liveclass"), + path( + "academy/event/liveclass/join/<str:hash>", + AcademyLiveClassJoinView.as_view(), + name="academy_event_liveclass_join_hash", + ), + path("all", get_events, name="all"), + path("feed/all", LatestEventsFeed()), + path("event/<slug:event_slug>", EventPublicView.as_view(), name="event"), + path("eventype", EventTypeView.as_view(), name="eventype"), + path("event/<int:event_id>/checkin", EventCheckinView.as_view(), name="event_checkin"), + path("academy/event", AcademyEventView.as_view(), name="academy_event"), + path("academy/event/<int:event_id>", AcademyEventView.as_view(), name="academy_event_id"), + path("academy/event/<int:event_id>/join", AcademyEventJoinView.as_view(), name="academy_event_id_join"), + path("academy/organization", AcademyOrganizationView.as_view(), name="academy_organization"), + path( + "academy/organization/organizer", + AcademyOrganizationOrganizerView.as_view(), + name="academy_organization_organizer", + ), + path( + "academy/organization/organizer/<int:organizer_id>", + AcademyOrganizationOrganizerView.as_view(), + name="academy_organization_organizer_id", + ), + path("academy/organizer", AcademyOrganizerView.as_view(), name="academy_organizer"), + path( + "academy/organization/eventbrite/webhook", + OrganizationWebhookView.as_view(), + name="academy_organizarion_eventbrite_webhook", + ), + path("ical/cohorts", ICalCohortsView.as_view(), name="ical_cohorts"), + path("ical/events", ICalEventView.as_view(), name="ical_events"), + path("ical/student/<int:user_id>", ICalStudentView.as_view(), name="ical_student_id"), + path("academy/venues", AcademyVenueView.as_view(), name="academy_venues"), + path("academy/eventype", AcademyEventTypeView.as_view(), name="academy_eventype"), + path("academy/eventype/<slug:event_type_slug>", AcademyEventTypeView.as_view(), name="academy_eventype_slug"), + path( + "academy/eventype/<slug:event_type_slug>/visibilitysetting", + EventTypeVisibilitySettingView.as_view(), + name="academy_eventype_slug_visibilitysetting", + ), + path( + "academy/eventype/<slug:event_type_slug>/visibilitysetting/<int:visibility_setting_id>", + EventTypeVisibilitySettingView.as_view(), + name="academy_eventype_slug_visibilitysetting_id", + ), + path("academy/checkin", AcademyEventCheckinView.as_view(), name="academy_checkin"), + path("eventbrite/webhook/<int:organization_id>", eventbrite_webhook, name="eventbrite_webhook_id"), ] diff --git a/breathecode/events/utils.py b/breathecode/events/utils.py index b00a66aa1..8e7ca6cd8 100644 --- a/breathecode/events/utils.py +++ b/breathecode/events/utils.py @@ -6,11 +6,11 @@ class Eventbrite(object): def __init__(self, token=None): if token is None: - token = os.getenv('EVENTBRITE_KEY', '') + token = os.getenv("EVENTBRITE_KEY", "") - self.host = 'https://www.eventbriteapi.com/v3' + self.host = "https://www.eventbriteapi.com/v3" self.token = token - self.headers = {'Authorization': f'Bearer {token}'} + self.headers = {"Authorization": f"Bearer {token}"} def has_error(self): # { @@ -27,60 +27,58 @@ def request(self, _type, url, headers=None, query_string=None, data=None): headers = {} _headers = {**self.headers, **headers} - _query_string = '?' + urllib.parse.urlencode(query_string) if query_string else '' + _query_string = "?" + urllib.parse.urlencode(query_string) if query_string else "" response = requests.request(_type, self.host + url + _query_string, headers=_headers, data=data, timeout=2) result = response.json() - if 'status_code' in result and result['status_code'] >= 400: - raise Exception(result['error_description']) - - if 'pagination' in result: - print('has more items?', result['pagination']['has_more_items']) - if result['pagination']['has_more_items']: - print('Continuation: ', result['pagination']['continuation']) - new_result = self.request(_type, - url, - query_string={ - **query_string, 'continuation': result['pagination']['continuation'] - }) + if "status_code" in result and result["status_code"] >= 400: + raise Exception(result["error_description"]) + + if "pagination" in result: + print("has more items?", result["pagination"]["has_more_items"]) + if result["pagination"]["has_more_items"]: + print("Continuation: ", result["pagination"]["continuation"]) + new_result = self.request( + _type, url, query_string={**query_string, "continuation": result["pagination"]["continuation"]} + ) for key in new_result: - print(key, type(new_result[key]) == 'list') - if type(new_result[key]) == 'list': + print(key, type(new_result[key]) == "list") + if type(new_result[key]) == "list": new_result[key] = result[key] + new_result[key] result.update(new_result) return result def get_my_organizations(self): - data = self.request('GET', '/users/me/organizations/') + data = self.request("GET", "/users/me/organizations/") return data def get_organization_events(self, organization_id): - query_string = {'expand': 'organizer,venue', 'status': 'live'} - data = self.request('GET', f'/organizations/{str(organization_id)}/events/', query_string=query_string) + query_string = {"expand": "organizer,venue", "status": "live"} + data = self.request("GET", f"/organizations/{str(organization_id)}/events/", query_string=query_string) return data def get_organization_venues(self, organization_id): - data = self.request('GET', f'/organizations/{str(organization_id)}/venues/') + data = self.request("GET", f"/organizations/{str(organization_id)}/venues/") return data # https://www.eventbrite.com/platform/api#/reference/event/create/create-an-event def create_organization_event(self, organization_id, data): - data = self.request('POST', f'/organizations/{str(organization_id)}/events/', data=data) + data = self.request("POST", f"/organizations/{str(organization_id)}/events/", data=data) return data # https://www.eventbrite.com/platform/api#/reference/event/update/update-an-event def update_organization_event(self, event_id, data): - data = self.request('PUT', f'/events/{event_id}/', data=data) + data = self.request("PUT", f"/events/{event_id}/", data=data) return data # https://www.eventbrite.com/platform/api#/reference/event-description/retrieve/retrieve-full-html-description def get_event_description(self, event_id): - data = self.request('GET', f'/events/{event_id}/structured_content/') + data = self.request("GET", f"/events/{event_id}/structured_content/") return data # https://www.eventbrite.com/platform/api#/reference/event-description/retrieve/retrieve-full-html-description def create_or_update_event_description(self, event_id, version, data): - data = self.request('POST', f'/events/{event_id}/structured_content/{version}/', data=data) + data = self.request("POST", f"/events/{event_id}/structured_content/{version}/", data=data) return data diff --git a/breathecode/events/views.py b/breathecode/events/views.py index 4924fc433..2a9b55351 100644 --- a/breathecode/events/views.py +++ b/breathecode/events/views.py @@ -93,51 +93,51 @@ SUNDAY = 6 -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_events(request): items = Event.objects.all() lookup = {} - if 'city' in request.GET: - city = request.GET.get('city') - lookup['venue__city__iexact'] = city + if "city" in request.GET: + city = request.GET.get("city") + lookup["venue__city__iexact"] = city - if 'country' in request.GET: - value = request.GET.get('country') - lookup['venue__country__iexact'] = value + if "country" in request.GET: + value = request.GET.get("country") + lookup["venue__country__iexact"] = value - if 'type' in request.GET: - value = request.GET.get('type') - lookup['event_type__slug'] = value + if "type" in request.GET: + value = request.GET.get("type") + lookup["event_type__slug"] = value - if 'zip_code' in request.GET: - value = request.GET.get('zip_code') - lookup['venue__zip_code'] = value + if "zip_code" in request.GET: + value = request.GET.get("zip_code") + lookup["venue__zip_code"] = value - if 'academy' in request.GET: - value = request.GET.get('academy') - lookup['academy__slug__in'] = value.split(',') + if "academy" in request.GET: + value = request.GET.get("academy") + lookup["academy__slug__in"] = value.split(",") - if 'status' in request.GET: - value = request.GET.get('status') - lookup['status__in'] = value.split(',') + if "status" in request.GET: + value = request.GET.get("status") + lookup["status__in"] = value.split(",") else: - lookup['status'] = 'ACTIVE' + lookup["status"] = "ACTIVE" - online_event = request.GET.get('online_event', None) - if online_event == 'true': - lookup['online_event'] = True - elif online_event == 'false': - lookup['online_event'] = False + online_event = request.GET.get("online_event", None) + if online_event == "true": + lookup["online_event"] = True + elif online_event == "false": + lookup["online_event"] = False - lookup['ending_at__gte'] = timezone.now() - if 'past' in request.GET: - if request.GET.get('past') == 'true': - lookup.pop('ending_at__gte') - lookup['starting_at__lte'] = timezone.now() + lookup["ending_at__gte"] = timezone.now() + if "past" in request.GET: + if request.GET.get("past") == "true": + lookup.pop("ending_at__gte") + lookup["starting_at__lte"] = timezone.now() - items = items.filter(**lookup).order_by('starting_at') + items = items.filter(**lookup).order_by("starting_at") serializer = EventSmallSerializer(items, many=True) return Response(serializer.data) @@ -154,11 +154,15 @@ def get(self, request, event_slug=None, format=None): event = Event.objects.filter(slug=event_slug).first() if not event: - raise ValidationException(translation(lang, - en='Event not found or you dont have access', - es='Evento no encontrado o no tienes acceso', - slug='not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="Event not found or you dont have access", + es="Evento no encontrado o no tienes acceso", + slug="not-found", + ), + code=404, + ) serializer = EventPublicBigSerializer(event, many=False) return Response(serializer.data) @@ -171,31 +175,31 @@ def get(self, request, format=None): items = Event.objects.all() lookup = {} - if 'city' in self.request.GET: - city = self.request.GET.get('city') - lookup['venue__city__iexact'] = city + if "city" in self.request.GET: + city = self.request.GET.get("city") + lookup["venue__city__iexact"] = city - if 'country' in self.request.GET: - value = self.request.GET.get('city') - lookup['venue__country__iexact'] = value + if "country" in self.request.GET: + value = self.request.GET.get("city") + lookup["venue__country__iexact"] = value - if 'zip_code' in self.request.GET: - value = self.request.GET.get('city') - lookup['venue__zip_code'] = value + if "zip_code" in self.request.GET: + value = self.request.GET.get("city") + lookup["venue__zip_code"] = value - lookup['starting_at__gte'] = timezone.now() - if 'past' in self.request.GET: - if self.request.GET.get('past') == 'true': - lookup.pop('starting_at__gte') - lookup['starting_at__lte'] = timezone.now() + lookup["starting_at__gte"] = timezone.now() + if "past" in self.request.GET: + if self.request.GET.get("past") == "true": + lookup.pop("starting_at__gte") + lookup["starting_at__lte"] = timezone.now() - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = EventSmallSerializer(items, many=True) return Response(serializer.data) def post(self, request, format=None): - serializer = EventSerializer(data=request.data, context={'academy_id': None}) + serializer = EventSerializer(data=request.data, context={"academy_id": None}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -220,35 +224,39 @@ def get(self, request, event_id=None): single_event = Event.objects.filter(id=event_id, event_type__in=items).first() if not single_event: - raise ValidationException(translation(lang, - en='Event not found or you dont have access', - es='Evento no encontrado o no tienes acceso', - slug='not-found'), - code=404) - - _r = self.request.GET.get('redirect', 'false') - - #DEPRECATED: due we have a new endpoint that manages the EventTypeSet consumables - if _r == 'true': + raise ValidationException( + translation( + lang, + en="Event not found or you dont have access", + es="Evento no encontrado o no tienes acceso", + slug="not-found", + ), + code=404, + ) + + _r = self.request.GET.get("redirect", "false") + + # DEPRECATED: due we have a new endpoint that manages the EventTypeSet consumables + if _r == "true": if single_event is None: - return render_message(request, - 'Event not found or you dont have access', - academy=single_event.academy) - if single_event.live_stream_url is None or single_event.live_stream_url == '': - return render_message(request, 'Event live stream URL is not found', academy=single_event.academy) + return render_message( + request, "Event not found or you dont have access", academy=single_event.academy + ) + if single_event.live_stream_url is None or single_event.live_stream_url == "": + return render_message(request, "Event live stream URL is not found", academy=single_event.academy) return redirect(single_event.live_stream_url) serializer = EventBigSerializer(single_event, many=False) return Response(serializer.data) - items = Event.objects.filter(event_type__in=items, status='ACTIVE').order_by('starting_at') + items = Event.objects.filter(event_type__in=items, status="ACTIVE").order_by("starting_at") lookup = {} - online_event = self.request.GET.get('online_event', '') - if online_event == 'true': - lookup['online_event'] = True - elif online_event == 'false': - lookup['online_event'] = False + online_event = self.request.GET.get("online_event", "") + if online_event == "true": + lookup["online_event"] = True + elif online_event == "false": + lookup["online_event"] = False items = items.filter(**lookup) items = handler.queryset(items) @@ -258,7 +266,7 @@ def get(self, request, event_id=None): class MeLiveClassView(APIView): - extensions = APIViewExtensions(cache=LiveClassCache, sort='-starting_at', paginate=True) + extensions = APIViewExtensions(cache=LiveClassCache, sort="-starting_at", paginate=True) def get(self, request): handler = self.extensions(request) @@ -272,29 +280,29 @@ def get(self, request): query = handler.lookup.build( lang, strings={ - 'exact': [ - 'remote_meeting_url', + "exact": [ + "remote_meeting_url", ], }, bools={ - 'is_null': ['ended_at'], + "is_null": ["ended_at"], }, datetimes={ - 'gte': ['starting_at'], - 'lte': ['ending_at'], + "gte": ["starting_at"], + "lte": ["ending_at"], }, slugs=[ - 'cohort_time_slot__cohort', - 'cohort_time_slot__cohort__academy', - 'cohort_time_slot__cohort__syllabus_version__syllabus', + "cohort_time_slot__cohort", + "cohort_time_slot__cohort__academy", + "cohort_time_slot__cohort__syllabus_version__syllabus", ], overwrite={ - 'cohort': 'cohort_time_slot__cohort', - 'academy': 'cohort_time_slot__cohort__academy', - 'syllabus': 'cohort_time_slot__cohort__syllabus_version__syllabus', - 'start': 'starting_at', - 'end': 'ending_at', - 'upcoming': 'ended_at', + "cohort": "cohort_time_slot__cohort", + "academy": "cohort_time_slot__cohort__academy", + "syllabus": "cohort_time_slot__cohort__syllabus_version__syllabus", + "start": "starting_at", + "end": "ending_at", + "upcoming": "ended_at", }, ) @@ -307,31 +315,36 @@ def get(self, request): @private_view() -@consume('live_class_join', consumer=live_class_by_url_param, format='html') +@consume("live_class_join", consumer=live_class_by_url_param, format="html") def join_live_class(request, token, live_class, lang): now = timezone.now() if live_class.starting_at > now: obj = {} if live_class.cohort_time_slot.cohort.academy: - obj['COMPANY_INFO_EMAIL'] = live_class.cohort_time_slot.cohort.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = (live_class.cohort_time_slot.cohort.academy.legal_name - or live_class.cohort_time_slot.cohort.academy.name) - obj['COMPANY_LOGO'] = live_class.cohort_time_slot.cohort.academy.logo_url - obj['COMPANY_NAME'] = live_class.cohort_time_slot.cohort.academy.name - - if 'heading' not in obj: - obj['heading'] = live_class.cohort_time_slot.cohort.academy.name - - return render(request, 'countdown.html', { - 'token': token.key, - 'event': LiveClassJoinSerializer(live_class).data, - **obj, - }) + obj["COMPANY_INFO_EMAIL"] = live_class.cohort_time_slot.cohort.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = ( + live_class.cohort_time_slot.cohort.academy.legal_name or live_class.cohort_time_slot.cohort.academy.name + ) + obj["COMPANY_LOGO"] = live_class.cohort_time_slot.cohort.academy.logo_url + obj["COMPANY_NAME"] = live_class.cohort_time_slot.cohort.academy.name + + if "heading" not in obj: + obj["heading"] = live_class.cohort_time_slot.cohort.academy.name + + return render( + request, + "countdown.html", + { + "token": token.key, + "event": LiveClassJoinSerializer(live_class).data, + **obj, + }, + ) - is_teacher = CohortUser.objects.filter(cohort=live_class.cohort_time_slot.cohort, - role__in=['TEACHER', 'ASSISTANT'], - user=request.user).exists() + is_teacher = CohortUser.objects.filter( + cohort=live_class.cohort_time_slot.cohort, role__in=["TEACHER", "ASSISTANT"], user=request.user + ).exists() if is_teacher and live_class.started_at is None: mark_live_class_as_started.delay(live_class.id) @@ -340,9 +353,9 @@ def join_live_class(request, token, live_class, lang): class AcademyLiveClassView(APIView): - extensions = APIViewExtensions(sort='-starting_at', paginate=True) + extensions = APIViewExtensions(sort="-starting_at", paginate=True) - @capable_of('start_or_end_class') + @capable_of("start_or_end_class") def get(self, request, academy_id=None): from .models import LiveClass @@ -353,33 +366,33 @@ def get(self, request, academy_id=None): query = handler.lookup.build( lang, strings={ - 'exact': [ - 'remote_meeting_url', - 'cohort_time_slot__cohort__cohortuser__user__email', + "exact": [ + "remote_meeting_url", + "cohort_time_slot__cohort__cohortuser__user__email", ], }, bools={ - 'is_null': ['ended_at'], + "is_null": ["ended_at"], }, datetimes={ - 'gte': ['starting_at'], - 'lte': ['ending_at'], + "gte": ["starting_at"], + "lte": ["ending_at"], }, slugs=[ - 'cohort_time_slot__cohort__cohortuser__user', - 'cohort_time_slot__cohort', - 'cohort_time_slot__cohort__academy', - 'cohort_time_slot__cohort__syllabus_version__syllabus', + "cohort_time_slot__cohort__cohortuser__user", + "cohort_time_slot__cohort", + "cohort_time_slot__cohort__academy", + "cohort_time_slot__cohort__syllabus_version__syllabus", ], overwrite={ - 'cohort': 'cohort_time_slot__cohort', - 'academy': 'cohort_time_slot__cohort__academy', - 'syllabus': 'cohort_time_slot__cohort__syllabus_version__syllabus', - 'start': 'starting_at', - 'end': 'ending_at', - 'upcoming': 'ended_at', - 'user': 'cohort_time_slot__cohort__cohortuser__user', - 'user_email': 'cohort_time_slot__cohort__cohortuser__user__email', + "cohort": "cohort_time_slot__cohort", + "academy": "cohort_time_slot__cohort__academy", + "syllabus": "cohort_time_slot__cohort__syllabus_version__syllabus", + "start": "starting_at", + "end": "ending_at", + "upcoming": "ended_at", + "user": "cohort_time_slot__cohort__cohortuser__user", + "user_email": "cohort_time_slot__cohort__cohortuser__user__email", }, ) @@ -390,36 +403,47 @@ def get(self, request, academy_id=None): return handler.response(serializer.data) - @capable_of('start_or_end_class') + @capable_of("start_or_end_class") def post(self, request, academy_id=None): lang = get_user_language(request) - serializer = LiveClassSerializer(data=request.data, context={ - 'lang': lang, - 'academy_id': academy_id, - }) + serializer = LiveClassSerializer( + data=request.data, + context={ + "lang": lang, + "academy_id": academy_id, + }, + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('start_or_end_class') + @capable_of("start_or_end_class") def put(self, request, cohort_schedule_id, academy_id=None): lang = get_user_language(request) - already = LiveClass.objects.filter(id=cohort_schedule_id, - cohort_time_slot__cohort__academy__id=academy_id).first() + already = LiveClass.objects.filter( + id=cohort_schedule_id, cohort_time_slot__cohort__academy__id=academy_id + ).first() if already is None: raise ValidationException( - translation(lang, - en=f'Live class not found for this academy {academy_id}', - es=f'Clase en vivo no encontrada para esta academia {academy_id}', - slug='not-found')) - - serializer = LiveClassSerializer(already, data=request.data, context={ - 'lang': lang, - 'academy_id': academy_id, - }) + translation( + lang, + en=f"Live class not found for this academy {academy_id}", + es=f"Clase en vivo no encontrada para esta academia {academy_id}", + slug="not-found", + ) + ) + + serializer = LiveClassSerializer( + already, + data=request.data, + context={ + "lang": lang, + "academy_id": academy_id, + }, + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -428,23 +452,28 @@ def put(self, request, cohort_schedule_id, academy_id=None): class AcademyLiveClassJoinView(APIView): - @capable_of('start_or_end_class') + @capable_of("start_or_end_class") def get(self, request, hash, academy_id=None): lang = get_user_language(request) - live_class = LiveClass.objects.filter(cohort_time_slot__cohort__cohortuser__user=request.user, - cohort_time_slot__cohort__academy__id=int(academy_id), - hash=hash).first() + live_class = LiveClass.objects.filter( + cohort_time_slot__cohort__cohortuser__user=request.user, + cohort_time_slot__cohort__academy__id=int(academy_id), + hash=hash, + ).first() if not live_class: raise ValidationException( - translation(lang, en='Live class not found', es='Clase en vivo no encontrada', slug='not-found')) + translation(lang, en="Live class not found", es="Clase en vivo no encontrada", slug="not-found") + ) if not live_class.cohort_time_slot.cohort.online_meeting_url: - message = translation(lang, - en='Live class has no online meeting url', - es='La clase en vivo no tiene una URL de reunión en línea', - slug='no-meeting-url') + message = translation( + lang, + en="Live class has no online meeting url", + es="La clase en vivo no tiene una URL de reunión en línea", + slug="no-meeting-url", + ) return render_message(request, message, status=400, academy=live_class.cohort_time_slot.cohort.academy) return redirect(live_class.cohort_time_slot.cohort.online_meeting_url) @@ -452,9 +481,9 @@ def get(self, request, hash, academy_id=None): class AcademyEventView(APIView, GenerateLookupsMixin): - extensions = APIViewExtensions(cache=EventCache, sort='-starting_at', paginate=True) + extensions = APIViewExtensions(cache=EventCache, sort="-starting_at", paginate=True) - @capable_of('read_event') + @capable_of("read_event") def get(self, request, academy_id=None, event_id=None): handler = self.extensions(request) @@ -465,7 +494,7 @@ def get(self, request, academy_id=None, event_id=None): if event_id is not None: single_event = Event.objects.filter(id=event_id, academy__id=academy_id).first() if single_event is None: - raise ValidationException('Event not found', 404) + raise ValidationException("Event not found", 404) serializer = AcademyEventSmallSerializer(single_event, many=False) return handler.response(serializer.data) @@ -473,28 +502,28 @@ def get(self, request, academy_id=None, event_id=None): items = Event.objects.filter(academy__id=academy_id) lookup = {} - city = self.request.GET.get('city') + city = self.request.GET.get("city") if city: - lookup['venue__city__iexact'] = city + lookup["venue__city__iexact"] = city - country = self.request.GET.get('country') + country = self.request.GET.get("country") if country: - lookup['venue__country__iexact'] = country + lookup["venue__country__iexact"] = country - zip_code = self.request.GET.get('zip_code') + zip_code = self.request.GET.get("zip_code") if zip_code: - lookup['venue__zip_code'] = zip_code + lookup["venue__zip_code"] = zip_code - upcoming = self.request.GET.get('upcoming') - past = self.request.GET.get('past') + upcoming = self.request.GET.get("upcoming") + past = self.request.GET.get("past") if upcoming: - lookup['starting_at__gte'] = timezone.now() + lookup["starting_at__gte"] = timezone.now() elif past: - if 'starting_at__gte' in lookup: - lookup.pop('starting_at__gte') - if past == 'true': - lookup['starting_at__lte'] = timezone.now() + if "starting_at__gte" in lookup: + lookup.pop("starting_at__gte") + if past == "true": + lookup["starting_at__lte"] = timezone.now() items = items.filter(**lookup) items = handler.queryset(items) @@ -502,37 +531,36 @@ def get(self, request, academy_id=None, event_id=None): return handler.response(serializer.data) - @capable_of('crud_event') + @capable_of("crud_event") def post(self, request, format=None, academy_id=None): lang = get_user_language(request) academy = Academy.objects.filter(id=academy_id).first() if academy is None: raise ValidationException( - translation(lang, - en=f'Academy {academy_id} not found', - es=f'Academia {academy_id} no encontrada', - slug='academy-not-found')) + translation( + lang, + en=f"Academy {academy_id} not found", + es=f"Academia {academy_id} no encontrada", + slug="academy-not-found", + ) + ) data = {} for key in request.data.keys(): data[key] = request.data.get(key) - data['sync_status'] = 'PENDING' + data["sync_status"] = "PENDING" - serializer = EventSerializer(data={ - **data, 'academy': academy.id - }, - context={ - 'lang': lang, - 'academy_id': academy_id - }) + serializer = EventSerializer( + data={**data, "academy": academy.id}, context={"lang": lang, "academy_id": academy_id} + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_event') + @capable_of("crud_event") def put(self, request, academy_id=None, event_id=None): lang = get_user_language(request) @@ -543,48 +571,50 @@ def put(self, request, academy_id=None, event_id=None): data_list = [dict(request.data)] if event_id is None: - raise ValidationException('Missing event_id') + raise ValidationException("Missing event_id") event = Event.objects.filter(id=event_id, academy__id=academy_id).first() if event is None: raise ValidationException( - translation(lang, - en=f'Event not found for this academy {academy_id}', - es=f'Evento no encontrado para esta academia {academy_id}', - slug='event-not-found')) + translation( + lang, + en=f"Event not found for this academy {academy_id}", + es=f"Evento no encontrado para esta academia {academy_id}", + slug="event-not-found", + ) + ) - data_list[0]['id'] = event.id + data_list[0]["id"] = event.id all_events = [] for data in data_list: - if 'id' not in data: + if "id" not in data: raise ValidationException( - translation(lang, - en='Event id not found', - es='No encontró el id del evento', - slug='event-id-not-found')) + translation( + lang, en="Event id not found", es="No encontró el id del evento", slug="event-id-not-found" + ) + ) - instance = Event.objects.filter(id=data['id'], academy__id=academy_id).first() + instance = Event.objects.filter(id=data["id"], academy__id=academy_id).first() if not instance: raise ValidationException( - translation(lang, - en=f'Event not found for this academy {academy_id}', - es=f'Evento no encontrado para esta academia {academy_id}', - slug='event-not-found')) + translation( + lang, + en=f"Event not found for this academy {academy_id}", + es=f"Evento no encontrado para esta academia {academy_id}", + slug="event-not-found", + ) + ) all_events.append(instance) all_serializers = [] index = -1 for data in data_list: index += 1 - serializer = EventPUTSerializer(all_events[index], - data=data, - context={ - 'lang': lang, - 'request': request, - 'academy_id': academy_id - }) + serializer = EventPUTSerializer( + all_events[index], data=data, context={"lang": lang, "request": request, "academy_id": academy_id} + ) all_serializers.append(serializer) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -600,24 +630,25 @@ def put(self, request, academy_id=None, event_id=None): return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_event') + @capable_of("crud_event") def delete(self, request, academy_id=None, event_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if not lookups and not event_id: - raise ValidationException('provide arguments in the url', code=400, slug='without-lookups-and-event-id') + raise ValidationException("provide arguments in the url", code=400, slug="without-lookups-and-event-id") if lookups and event_id: - raise ValidationException('event_id in url ' - 'in bulk mode request, use querystring style instead', - code=400, - slug='lookups-and-event-id-together') + raise ValidationException( + "event_id in url " "in bulk mode request, use querystring style instead", + code=400, + slug="lookups-and-event-id-together", + ) if lookups: alls = Event.objects.filter(**lookups) - valids = alls.filter(academy__id=academy_id, status='DRAFT') + valids = alls.filter(academy__id=academy_id, status="DRAFT") from_other_academy = alls.exclude(academy__id=academy_id) - not_draft = alls.exclude(status='DRAFT') + not_draft = alls.exclude(status="DRAFT") responses = [] if valids: @@ -625,20 +656,23 @@ def delete(self, request, academy_id=None, event_id=None): if from_other_academy: responses.append( - MultiStatusResponse('Event doest not exist or does not belong to this academy', - code=400, - slug='not-found', - queryset=from_other_academy)) + MultiStatusResponse( + "Event doest not exist or does not belong to this academy", + code=400, + slug="not-found", + queryset=from_other_academy, + ) + ) if not_draft: responses.append( - MultiStatusResponse('Only draft events can be deleted', - code=400, - slug='non-draft-event', - queryset=not_draft)) + MultiStatusResponse( + "Only draft events can be deleted", code=400, slug="non-draft-event", queryset=not_draft + ) + ) if from_other_academy or not_draft: - response = response_207(responses, 'slug') + response = response_207(responses, "slug") valids.delete() return response @@ -647,10 +681,10 @@ def delete(self, request, academy_id=None, event_id=None): event = Event.objects.filter(academy__id=academy_id, id=event_id).first() if event is None: - raise ValidationException('Event doest not exist or does not belong to this academy', slug='not-found') + raise ValidationException("Event doest not exist or does not belong to this academy", slug="not-found") - if event.status != 'DRAFT': - raise ValidationException('Only draft events can be deleted', slug='non-draft-event') + if event.status != "DRAFT": + raise ValidationException("Only draft events can be deleted", slug="non-draft-event") event.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) @@ -658,7 +692,7 @@ def delete(self, request, academy_id=None, event_id=None): class AcademyEventJoinView(APIView): - @capable_of('start_or_end_event') + @capable_of("start_or_end_event") def get(self, request, event_id, academy_id=None): lang = get_user_language(request) @@ -666,13 +700,16 @@ def get(self, request, event_id, academy_id=None): if not event: raise ValidationException( - translation(lang, en='Event not found', es='Evento no encontrado', slug='not-found')) + translation(lang, en="Event not found", es="Evento no encontrado", slug="not-found") + ) if not event.live_stream_url: - message = translation(lang, - en='Event has no live stream url', - es='Evento no tiene url de live stream', - slug='no-live-stream-url') + message = translation( + lang, + en="Event has no live stream url", + es="Evento no tiene url de live stream", + slug="no-live-stream-url", + ) return render_message(request, message, status=400, academy=event.academy) return redirect(event.live_stream_url) @@ -685,15 +722,15 @@ def get(self, request, format=None): items = EventType.objects.all() lookup = {} - if 'academy' in self.request.GET: - value = self.request.GET.get('academy') - lookup['academy__slug'] = value + if "academy" in self.request.GET: + value = self.request.GET.get("academy") + lookup["academy__slug"] = value - if 'allow_shared_creation' in self.request.GET: - value = self.request.GET.get('allow_shared_creation', '').lower() - lookup['allow_shared_creation'] = value == 'true' + if "allow_shared_creation" in self.request.GET: + value = self.request.GET.get("allow_shared_creation", "").lower() + lookup["allow_shared_creation"] = value == "true" - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = EventTypeSerializer(items, many=True) return Response(serializer.data) @@ -701,13 +738,13 @@ def get(self, request, format=None): class AcademyEventTypeView(APIView): - @capable_of('read_event_type') + @capable_of("read_event_type") def get(self, request, academy_id=None, event_type_slug=None): if event_type_slug is not None: event_type = EventType.objects.filter(academy__id=academy_id, slug=event_type_slug).first() if not event_type: - raise ValidationException('Event Type not found for this academy', slug='event-type-not-found') + raise ValidationException("Event Type not found for this academy", slug="event-type-not-found") serializer = EventTypeBigSerializer(event_type, many=False) return Response(serializer.data) @@ -715,33 +752,33 @@ def get(self, request, academy_id=None, event_type_slug=None): items = EventType.objects.filter(Q(academy__id=academy_id) | Q(allow_shared_creation=True)) lookup = {} - if 'academy' in self.request.GET: - value = self.request.GET.get('academy') - lookup['academy__slug'] = value + if "academy" in self.request.GET: + value = self.request.GET.get("academy") + lookup["academy__slug"] = value - if 'allow_shared_creation' in self.request.GET: - value = self.request.GET.get('allow_shared_creation', '').lower() - lookup['allow_shared_creation'] = value == 'true' + if "allow_shared_creation" in self.request.GET: + value = self.request.GET.get("allow_shared_creation", "").lower() + lookup["allow_shared_creation"] = value == "true" - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = EventTypeSerializer(items, many=True) return Response(serializer.data) - @capable_of('crud_event_type') + @capable_of("crud_event_type") def post(self, request, academy_id): - serializer = PostEventTypeSerializer(data=request.data, context={'academy_id': academy_id}) + serializer = PostEventTypeSerializer(data=request.data, context={"academy_id": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_event_type') + @capable_of("crud_event_type") def put(self, request, academy_id, event_type_slug=None): event_type = EventType.objects.filter(academy__id=academy_id, slug=event_type_slug).first() if not event_type: - raise ValidationException('Event Type not found for this academy', slug='event-type-not-found') - serializer = EventTypePutSerializer(event_type, data=request.data, context={'academy_id': academy_id}) + raise ValidationException("Event Type not found for this academy", slug="event-type-not-found") + serializer = EventTypePutSerializer(event_type, data=request.data, context={"academy_id": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -751,9 +788,9 @@ def put(self, request, academy_id, event_type_slug=None): class EventTypeVisibilitySettingView(APIView): """Show the visibility settings of a EventType.""" - extensions = APIViewExtensions(sort='-id') + extensions = APIViewExtensions(sort="-id") - @capable_of('read_event_type') + @capable_of("read_event_type") def get(self, request, event_type_slug, academy_id=None): handler = self.extensions(request) lang = get_user_language(request) @@ -761,7 +798,8 @@ def get(self, request, event_type_slug, academy_id=None): event_type = EventType.objects.filter(slug=event_type_slug).first() if not event_type: raise ValidationException( - translation(lang, en='Event type not found', es='Tipo de evento no encontrado', slug='not-found'), ) + translation(lang, en="Event type not found", es="Tipo de evento no encontrado", slug="not-found"), + ) if event_type.allow_shared_creation or event_type.academy.id == academy_id: items = event_type.visibility_settings.filter(academy__id=academy_id) @@ -774,7 +812,7 @@ def get(self, request, event_type_slug, academy_id=None): serializer = EventTypeVisibilitySettingSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_event_type') + @capable_of("crud_event_type") def post(self, request, event_type_slug, academy_id=None): lang = get_user_language(request) @@ -783,30 +821,32 @@ def post(self, request, event_type_slug, academy_id=None): event_type = EventType.objects.filter(slug=event_type_slug, academy=academy_id).first() if not event_type: raise ValidationException( - translation(lang, - en='Event type not found', - es='Tipo de evento no encontrado', - slug='event-type-not-found'), ) + translation( + lang, en="Event type not found", es="Tipo de evento no encontrado", slug="event-type-not-found" + ), + ) syllabus = None - if 'syllabus' in request.data: - syllabus = Syllabus.objects.filter(Q(academy_owner__id=academy_id) | Q(private=False), - id=request.data['syllabus']).first() + if "syllabus" in request.data: + syllabus = Syllabus.objects.filter( + Q(academy_owner__id=academy_id) | Q(private=False), id=request.data["syllabus"] + ).first() if syllabus is None: raise ValidationException( - translation(lang, en='Syllabus not found', es='Syllabus no encontrado', - slug='syllabus-not-found'), ) + translation(lang, en="Syllabus not found", es="Syllabus no encontrado", slug="syllabus-not-found"), + ) cohort = None - if 'cohort' in request.data: - cohort = Cohort.objects.filter(id=request.data['cohort'], academy=academy_id).first() + if "cohort" in request.data: + cohort = Cohort.objects.filter(id=request.data["cohort"], academy=academy_id).first() if cohort is None: raise ValidationException( - translation(lang, en='Cohort not found', es='Cohorte no encontrada', slug='cohort-not-found'), ) + translation(lang, en="Cohort not found", es="Cohorte no encontrada", slug="cohort-not-found"), + ) - visibility_setting, created = EventTypeVisibilitySetting.objects.get_or_create(syllabus=syllabus, - academy=academy, - cohort=cohort) + visibility_setting, created = EventTypeVisibilitySetting.objects.get_or_create( + syllabus=syllabus, academy=academy, cohort=cohort + ) if not event_type.visibility_settings.filter(id=visibility_setting.id).exists(): event_type.visibility_settings.add(visibility_setting) @@ -814,29 +854,36 @@ def post(self, request, event_type_slug, academy_id=None): serializer = EventTypeVisibilitySettingSerializer(visibility_setting, many=False) return Response(serializer.data, status=status.HTTP_201_CREATED if created else status.HTTP_200_OK) - @capable_of('crud_event_type') + @capable_of("crud_event_type") def delete(self, request, event_type_slug, visibility_setting_id=None, academy_id=None): lang = get_user_language(request) event_type = EventType.objects.filter(slug=event_type_slug, academy=academy_id).first() if not event_type: raise ValidationException( - translation(lang, - en='Event type not found', - es='Tipo de evento no encontrado', - slug='event-type-not-found'), ) + translation( + lang, en="Event type not found", es="Tipo de evento no encontrado", slug="event-type-not-found" + ), + ) item = EventTypeVisibilitySetting.objects.filter(id=visibility_setting_id, academy=academy_id).first() if not item: - raise ValidationException(translation(lang, - en='Event type visibility setting not found', - es='Configuración de visibilidad no encontrada', - slug='event-type-visibility-setting-not-found'), - code=404) - - other_event_type = EventType.objects.filter(visibility_settings__id=visibility_setting_id, - academy=academy_id).exclude(slug=event_type_slug).exists() + raise ValidationException( + translation( + lang, + en="Event type visibility setting not found", + es="Configuración de visibilidad no encontrada", + slug="event-type-visibility-setting-not-found", + ), + code=404, + ) + + other_event_type = ( + EventType.objects.filter(visibility_settings__id=visibility_setting_id, academy=academy_id) + .exclude(slug=event_type_slug) + .exists() + ) if other_event_type: event_type.visibility_settings.remove(item) @@ -848,26 +895,30 @@ def delete(self, request, event_type_slug, visibility_setting_id=None, academy_i @private_view() -@consume('event_join', consumer=event_by_url_param, format='html') +@consume("event_join", consumer=event_by_url_param, format="html") def join_event(request, token, event): now = timezone.now() if event.starting_at > now: obj = {} if event.academy: - obj['COMPANY_INFO_EMAIL'] = event.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = event.academy.legal_name or event.academy.name - obj['COMPANY_LOGO'] = event.academy.logo_url - obj['COMPANY_NAME'] = event.academy.name - - if 'heading' not in obj: - obj['heading'] = event.academy.name - - return render(request, 'countdown.html', { - 'token': token.key, - 'event': EventJoinSmallSerializer(event).data, - **obj, - }) + obj["COMPANY_INFO_EMAIL"] = event.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = event.academy.legal_name or event.academy.name + obj["COMPANY_LOGO"] = event.academy.logo_url + obj["COMPANY_NAME"] = event.academy.name + + if "heading" not in obj: + obj["heading"] = event.academy.name + + return render( + request, + "countdown.html", + { + "token": token.key, + "event": EventJoinSmallSerializer(event).data, + **obj, + }, + ) # if the event is happening right now and I have not joined yet checkin = EventCheckin.objects.filter(Q(email=token.user.email) | Q(attendee=token.user), event=event).first() @@ -876,14 +927,13 @@ def join_event(request, token, event): if checkin.attended_at is None: - checkin.status = 'DONE' + checkin.status = "DONE" checkin.attended_at = now checkin.save() - tasks_activity.add_activity.delay(checkin.attendee.id, - 'event_checkin_assisted', - related_type='events.EventCheckin', - related_id=checkin.id) + tasks_activity.add_activity.delay( + checkin.attendee.id, "event_checkin_assisted", related_type="events.EventCheckin", related_id=checkin.id + ) return redirect(event.live_stream_url) @@ -893,12 +943,12 @@ class EventCheckinView(APIView): def get(self, request, event_id): if event_id is None: - raise ValidationException('event_id must not be null', status.HTTP_404_NOT_FOUND) + raise ValidationException("event_id must not be null", status.HTTP_404_NOT_FOUND) try: event_id = int(event_id) except Exception: - raise ValidationException(f'{event_id} must be am integer', slug='Event must be an integer') + raise ValidationException(f"{event_id} must be am integer", slug="Event must be an integer") event_checkins = EventCheckin.objects.filter(event=event_id) @@ -917,19 +967,25 @@ def put(self, request, event_id): if event is None: event = Event.objects.filter(id=event_id).first() if event is None or event.event_type is None: - raise ValidationException(translation( - lang, - en='This event was not found, or your current plan does not include access to it.', - es='El evento no se ha encontrado o tu plan no te permite asistir a este evento', - slug='event-not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="This event was not found, or your current plan does not include access to it.", + es="El evento no se ha encontrado o tu plan no te permite asistir a este evento", + slug="event-not-found", + ), + code=404, + ) else: - raise ValidationException(translation( - lang, - en='Tu plan no te permite tener acceso a eventos de este tipo: ' + event.event_type.name, - es='Your current plan does not include access to this type of events: ' + event.event_type.name, - slug='event-not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="Tu plan no te permite tener acceso a eventos de este tipo: " + event.event_type.name, + es="Your current plan does not include access to this type of events: " + event.event_type.name, + slug="event-not-found", + ), + code=404, + ) serializer = PUTEventCheckinSerializer(event, request.data) if serializer.is_valid(): @@ -945,29 +1001,30 @@ def post(self, request, event_id): if event is None: event = Event.objects.filter(id=event_id).first() if event is None or event.event_type is None: - raise ValidationException(translation( - lang, - en='This event was not found, or your current plan does not include access to it.', - es='El evento no se ha encontrado o tu plan no te permite asistir a este evento', - slug='event-not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="This event was not found, or your current plan does not include access to it.", + es="El evento no se ha encontrado o tu plan no te permite asistir a este evento", + slug="event-not-found", + ), + code=404, + ) else: - raise ValidationException(translation( - lang, - en='Tu plan no te permite tener acceso a eventos de este tipo: ' + event.event_type.name, - es='Your current plan does not include access to this type of events: ' + event.event_type.name, - slug='event-not-found'), - code=404) - - serializer = POSTEventCheckinSerializer(data={ - **request.data, 'email': request.user.email, - 'attendee': request.user.id, - 'event': event.id - }, - context={ - 'lang': lang, - 'user': request.user - }) + raise ValidationException( + translation( + lang, + en="Tu plan no te permite tener acceso a eventos de este tipo: " + event.event_type.name, + es="Your current plan does not include access to this type of events: " + event.event_type.name, + slug="event-not-found", + ), + code=404, + ) + + serializer = POSTEventCheckinSerializer( + data={**request.data, "email": request.user.email, "attendee": request.user.id, "event": event.id}, + context={"lang": lang, "user": request.user}, + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -976,38 +1033,40 @@ def post(self, request, event_id): class AcademyEventCheckinView(APIView): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_eventcheckin') + @capable_of("read_eventcheckin") def get(self, request, format=None, academy_id=None): handler = self.extensions(request) items = EventCheckin.objects.filter(event__academy__id=academy_id) lookup = {} - if 'status' in self.request.GET: - value = self.request.GET.get('status') - lookup['status'] = value + if "status" in self.request.GET: + value = self.request.GET.get("status") + lookup["status"] = value - if 'event' in self.request.GET: - value = self.request.GET.get('event') - lookup['event__id'] = value + if "event" in self.request.GET: + value = self.request.GET.get("event") + lookup["event__id"] = value - like = self.request.GET.get('like') - if 'like' in self.request.GET: + like = self.request.GET.get("like") + if "like" in self.request.GET: items = items.filter( Q(attendee__first_name__icontains=like) | Q(attendee__last_name_icontains=like) - | Q(attendee__email_icontains=like) | Q(email_icontains=like)) + | Q(attendee__email_icontains=like) + | Q(email_icontains=like) + ) - start = request.GET.get('start', None) + start = request.GET.get("start", None) if start is not None: - start_date = datetime.strptime(start, '%Y-%m-%d').date() + start_date = datetime.strptime(start, "%Y-%m-%d").date() items = items.filter(created_at__gte=start_date) - end = request.GET.get('end', None) + end = request.GET.get("end", None) if end is not None: - end_date = datetime.strptime(end, '%Y-%m-%d').date() + end_date = datetime.strptime(end, "%Y-%m-%d").date() items = items.filter(created_at__lte=end_date) items = items.filter(**lookup) @@ -1017,35 +1076,36 @@ def get(self, request, format=None, academy_id=None): return handler.response(serializer.data) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) @renderer_classes([PlainTextRenderer]) def eventbrite_webhook(request, organization_id): if actions.is_eventbrite_enabled() is False: - return Response('Eventbrite integration is disabled, to activate add env variable EVENTBRITE=TRUE', - content_type='text/plain') + return Response( + "Eventbrite integration is disabled, to activate add env variable EVENTBRITE=TRUE", + content_type="text/plain", + ) webhook = Eventbrite.add_webhook_to_log(request.data, organization_id) if webhook: async_eventbrite_webhook.delay(webhook.id) else: - logger.debug('One request cannot be parsed, maybe you should update `Eventbrite' - '.add_webhook_to_log`') + logger.debug("One request cannot be parsed, maybe you should update `Eventbrite" ".add_webhook_to_log`") logger.debug(request.data) # async_eventbrite_webhook(request.data) - return Response('ok', content_type='text/plain') + return Response("ok", content_type="text/plain") class AcademyOrganizerView(APIView): - @capable_of('read_organization') + @capable_of("read_organization") def get(self, request, academy_id=None): orgs = Organizer.objects.filter(academy__id=academy_id) if orgs is None: - raise ValidationException('Organizers not found for this academy', 404) + raise ValidationException("Organizers not found for this academy", 404) serializer = OrganizerSmallSerializer(orgs, many=True) return Response(serializer.data) @@ -1054,27 +1114,27 @@ def get(self, request, academy_id=None): # list venues class AcademyOrganizationOrganizerView(APIView): - @capable_of('read_organization') + @capable_of("read_organization") def get(self, request, academy_id=None): org = Organization.objects.filter(academy__id=academy_id).first() if org is None: - raise ValidationException('Organization not found for this academy', 404) + raise ValidationException("Organization not found for this academy", 404) organizers = Organizer.objects.filter(organization_id=org.id) serializer = OrganizerSmallSerializer(organizers, many=True) return Response(serializer.data) - @capable_of('crud_organization') + @capable_of("crud_organization") def delete(self, request, academy_id=None, organizer_id=None): org = Organization.objects.filter(academy__id=academy_id).first() if org is None: - raise ValidationException('Organization not found for this academy', 404) + raise ValidationException("Organization not found for this academy", 404) organizer = Organizer.objects.filter(organization_id=org.id, id=organizer_id).first() if organizer is None: - raise ValidationException('Organizers not found for this academy organization', 404) + raise ValidationException("Organizers not found for this academy organization", 404) organizer.academy = None organizer.save() @@ -1086,35 +1146,35 @@ def delete(self, request, academy_id=None, organizer_id=None): # list venues class AcademyOrganizationView(APIView): - @capable_of('read_organization') + @capable_of("read_organization") def get(self, request, academy_id=None): org = Organization.objects.filter(academy__id=academy_id).first() if org is None: - raise ValidationException('Organization not found for this academy', 404) + raise ValidationException("Organization not found for this academy", 404) serializer = OrganizationBigSerializer(org, many=False) return Response(serializer.data) - @capable_of('crud_organization') + @capable_of("crud_organization") def post(self, request, format=None, academy_id=None): organization = Organization.objects.filter(academy__id=academy_id).first() if organization: - raise ValidationException('Academy already has an organization asociated', slug='already-created') + raise ValidationException("Academy already has an organization asociated", slug="already-created") - serializer = OrganizationSerializer(data={**request.data, 'academy': academy_id}) + serializer = OrganizationSerializer(data={**request.data, "academy": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_organization') + @capable_of("crud_organization") def put(self, request, format=None, academy_id=None): organization = Organization.objects.filter(academy__id=academy_id).first() if not organization: - raise ValidationException('Organization not found for this academy', slug='org-not-found') + raise ValidationException("Organization not found for this academy", slug="org-not-found") serializer = OrganizationSerializer(organization, data=request.data) if serializer.is_valid(): @@ -1122,12 +1182,12 @@ def put(self, request, format=None, academy_id=None): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_organization') + @capable_of("crud_organization") def delete(self, request, format=None, academy_id=None): organization = Organization.objects.filter(academy__id=academy_id).first() if not organization: - raise ValidationException('Organization not found for this academy', slug='org-not-found') + raise ValidationException("Organization not found for this academy", slug="org-not-found") organization.delete() @@ -1137,14 +1197,14 @@ def delete(self, request, format=None, academy_id=None): # list eventbride webhook class OrganizationWebhookView(APIView, HeaderLimitOffsetPagination): - @capable_of('read_organization') + @capable_of("read_organization") def get(self, request, academy_id=None): org = Organization.objects.filter(academy__id=academy_id).first() if not org: - raise ValidationException('Academy has no organization', code=400, slug='organization-no-found') + raise ValidationException("Academy has no organization", code=400, slug="organization-no-found") - webhooks = EventbriteWebhook.objects.filter(organization_id=org.id).order_by('-updated_at') + webhooks = EventbriteWebhook.objects.filter(organization_id=org.id).order_by("-updated_at") page = self.paginate_queryset(webhooks, request) serializer = EventbriteWebhookSerializer(page, many=True) if self.is_paginate(request): @@ -1156,10 +1216,10 @@ def get(self, request, academy_id=None): # list venues class AcademyVenueView(APIView): - @capable_of('read_event') + @capable_of("read_event") def get(self, request, format=None, academy_id=None, user_id=None): - venues = Venue.objects.filter(academy__id=academy_id).order_by('-created_at') + venues = Venue.objects.filter(academy__id=academy_id).order_by("-created_at") serializer = VenueSerializer(venues, many=True) return Response(serializer.data) @@ -1175,19 +1235,16 @@ def ical_academies_repr(slugs=None, ids=None): slugs = [] if ids: - ret = ret + \ - list(Academy.objects.filter(id__in=ids).values_list('id', flat=True)) + ret = ret + list(Academy.objects.filter(id__in=ids).values_list("id", flat=True)) if slugs: - ret = ret + \ - list(Academy.objects.filter( - slug__in=slugs).values_list('id', flat=True)) + ret = ret + list(Academy.objects.filter(slug__in=slugs).values_list("id", flat=True)) ret = sorted(list(dict.fromkeys(ret))) - ret = ','.join([str(id) for id in ret]) + ret = ",".join([str(id) for id in ret]) if ret: - ret = f' ({ret})' + ret = f" ({ret})" return ret @@ -1197,46 +1254,47 @@ class ICalStudentView(APIView): def get(self, request, user_id): if not User.objects.filter(id=user_id).count(): - raise ValidationException('Student not exist', 404, slug='student-not-exist') + raise ValidationException("Student not exist", 404, slug="student-not-exist") - cohort_ids = (CohortUser.objects.filter(user__id=user_id, - cohort__ending_date__isnull=False, - cohort__never_ends=False).values_list( - 'cohort_id', flat=True).exclude(cohort__stage='DELETED')) + cohort_ids = ( + CohortUser.objects.filter(user__id=user_id, cohort__ending_date__isnull=False, cohort__never_ends=False) + .values_list("cohort_id", flat=True) + .exclude(cohort__stage="DELETED") + ) - items = CohortTimeSlot.objects.filter(cohort__id__in=cohort_ids).order_by('id') + items = CohortTimeSlot.objects.filter(cohort__id__in=cohort_ids).order_by("id") - upcoming = request.GET.get('upcoming') - if upcoming == 'true': + upcoming = request.GET.get("upcoming") + if upcoming == "true": now = timezone.now() items = items.filter(cohort__kickoff_date__gte=now) key = server_id() calendar = iCalendar() - calendar.add('prodid', f'-//4Geeks//Student Schedule ({user_id}) {key}//EN') - calendar.add('METHOD', 'PUBLISH') - calendar.add('X-WR-CALNAME', 'Academy - Schedule') - calendar.add('X-WR-CALDESC', '') - calendar.add('REFRESH-INTERVAL;VALUE=DURATION', 'PT15M') + calendar.add("prodid", f"-//4Geeks//Student Schedule ({user_id}) {key}//EN") + calendar.add("METHOD", "PUBLISH") + calendar.add("X-WR-CALNAME", "Academy - Schedule") + calendar.add("X-WR-CALDESC", "") + calendar.add("REFRESH-INTERVAL;VALUE=DURATION", "PT15M") - url = os.getenv('API_URL') + url = os.getenv("API_URL") if url: - url = re.sub(r'/$', '', url) + '/v1/events/ical/student/' + str(user_id) - calendar.add('url', url) + url = re.sub(r"/$", "", url) + "/v1/events/ical/student/" + str(user_id) + calendar.add("url", url) - calendar.add('version', '2.0') + calendar.add("version", "2.0") for item in items: event = iEvent() - event.add('summary', item.cohort.name) - event.add('uid', f'breathecode_cohort_time_slot_{item.id}_{key}') + event.add("summary", item.cohort.name) + event.add("uid", f"breathecode_cohort_time_slot_{item.id}_{key}") stamp = DatetimeInteger.to_datetime(item.timezone, item.starting_at) starting_at = fix_datetime_weekday(item.cohort.kickoff_date, stamp, next=True) - event.add('dtstart', starting_at) - event.add('dtstamp', stamp) + event.add("dtstart", starting_at) + event.add("dtstamp", stamp) until_date = item.removed_at or item.cohort.ending_date @@ -1245,47 +1303,48 @@ def get(self, request, user_id): ending_at = DatetimeInteger.to_datetime(item.timezone, item.ending_at) ending_at = fix_datetime_weekday(item.cohort.kickoff_date, ending_at, next=True) - event.add('dtend', ending_at) + event.add("dtend", ending_at) if item.recurrent: utc_ending_at = ending_at.astimezone(pytz.UTC) # is possible hour of cohort.ending_date are wrong filled, I's assumes the max diff between # summer/winter timezone should have two hours - delta = timedelta(hours=utc_ending_at.hour - until_date.hour + 3, - minutes=utc_ending_at.minute - until_date.minute, - seconds=utc_ending_at.second - until_date.second) + delta = timedelta( + hours=utc_ending_at.hour - until_date.hour + 3, + minutes=utc_ending_at.minute - until_date.minute, + seconds=utc_ending_at.second - until_date.second, + ) - event.add('rrule', {'freq': item.recurrency_type, 'until': until_date + delta}) + event.add("rrule", {"freq": item.recurrency_type, "until": until_date + delta}) - teacher = CohortUser.objects.filter(role='TEACHER', cohort__id=item.cohort.id).first() + teacher = CohortUser.objects.filter(role="TEACHER", cohort__id=item.cohort.id).first() if teacher: - organizer = vCalAddress(f'MAILTO:{teacher.user.email}') + organizer = vCalAddress(f"MAILTO:{teacher.user.email}") if teacher.user.first_name and teacher.user.last_name: - organizer.params['cn'] = vText(f'{teacher.user.first_name} ' - f'{teacher.user.last_name}') + organizer.params["cn"] = vText(f"{teacher.user.first_name} " f"{teacher.user.last_name}") elif teacher.user.first_name: - organizer.params['cn'] = vText(teacher.user.first_name) + organizer.params["cn"] = vText(teacher.user.first_name) elif teacher.user.last_name: - organizer.params['cn'] = vText(teacher.user.last_name) + organizer.params["cn"] = vText(teacher.user.last_name) - organizer.params['role'] = vText('OWNER') - event['organizer'] = organizer + organizer.params["role"] = vText("OWNER") + event["organizer"] = organizer location = item.cohort.academy.name if item.cohort.academy.website_url: - location = f'{location} ({item.cohort.academy.website_url})' - event['location'] = vText(item.cohort.online_meeting_url or item.cohort.academy.name) + location = f"{location} ({item.cohort.academy.website_url})" + event["location"] = vText(item.cohort.online_meeting_url or item.cohort.academy.name) calendar.add_component(event) calendar_text = calendar.to_ical() - response = HttpResponse(calendar_text, content_type='text/calendar') - response['Content-Disposition'] = 'attachment; filename="calendar.ics"' + response = HttpResponse(calendar_text, content_type="text/calendar") + response["Content-Disposition"] = 'attachment; filename="calendar.ics"' return response @@ -1293,35 +1352,39 @@ class ICalCohortsView(APIView): permission_classes = [AllowAny] def get(self, request): - ids = request.GET.get('academy', '') - slugs = request.GET.get('academy_slug', '') + ids = request.GET.get("academy", "") + slugs = request.GET.get("academy_slug", "") - ids = ids.split(',') if ids else [] - slugs = slugs.split(',') if slugs else [] + ids = ids.split(",") if ids else [] + slugs = slugs.split(",") if slugs else [] if ids: - items = Cohort.objects.filter(ending_date__isnull=False, never_ends=False, - academy__id__in=ids).order_by('id') + items = Cohort.objects.filter(ending_date__isnull=False, never_ends=False, academy__id__in=ids).order_by( + "id" + ) elif slugs: - items = Cohort.objects.filter(ending_date__isnull=False, never_ends=False, - academy__slug__in=slugs).order_by('id') + items = Cohort.objects.filter( + ending_date__isnull=False, never_ends=False, academy__slug__in=slugs + ).order_by("id") else: items = [] if not ids and not slugs: raise ValidationException( - 'You need to specify at least one academy or academy_slug (comma separated) in the querystring') + "You need to specify at least one academy or academy_slug (comma separated) in the querystring" + ) - if (Academy.objects.filter(id__in=ids).count() != len(ids) - or Academy.objects.filter(slug__in=slugs).count() != len(slugs)): - raise ValidationException('Some academy not exist') + if Academy.objects.filter(id__in=ids).count() != len(ids) or Academy.objects.filter( + slug__in=slugs + ).count() != len(slugs): + raise ValidationException("Some academy not exist") - items = items.exclude(stage='DELETED') + items = items.exclude(stage="DELETED") - upcoming = request.GET.get('upcoming') - if upcoming == 'true': + upcoming = request.GET.get("upcoming") + if upcoming == "true": now = timezone.now() items = items.filter(kickoff_date__gte=now) @@ -1329,30 +1392,30 @@ def get(self, request): key = server_id() calendar = iCalendar() - calendar.add('prodid', f'-//4Geeks//Academy Cohorts{academies_repr} {key}//EN') - calendar.add('METHOD', 'PUBLISH') - calendar.add('X-WR-CALNAME', 'Academy - Cohorts') - calendar.add('X-WR-CALDESC', '') - calendar.add('REFRESH-INTERVAL;VALUE=DURATION', 'PT15M') + calendar.add("prodid", f"-//4Geeks//Academy Cohorts{academies_repr} {key}//EN") + calendar.add("METHOD", "PUBLISH") + calendar.add("X-WR-CALNAME", "Academy - Cohorts") + calendar.add("X-WR-CALDESC", "") + calendar.add("REFRESH-INTERVAL;VALUE=DURATION", "PT15M") - url = os.getenv('API_URL') + url = os.getenv("API_URL") if url: - url = re.sub(r'/$', '', url) + '/v1/events/ical/cohorts' + url = re.sub(r"/$", "", url) + "/v1/events/ical/cohorts" if ids or slugs: - url = url + '?' + url = url + "?" if ids: - url = url + 'academy=' + ','.join(ids) + url = url + "academy=" + ",".join(ids) if ids and slugs: - url = url + '&' + url = url + "&" if slugs: - url = url + 'academy_slug=' + ','.join(slugs) + url = url + "academy_slug=" + ",".join(slugs) - calendar.add('url', url) + calendar.add("url", url) - calendar.add('version', '2.0') + calendar.add("version", "2.0") for item in items: event = iEvent() @@ -1360,38 +1423,45 @@ def get(self, request): event_last_day = iEvent() has_last_day = False - event.add('summary', item.name) - event.add('uid', f'breathecode_cohort_{item.id}_{key}') - event.add('dtstart', item.kickoff_date) + event.add("summary", item.name) + event.add("uid", f"breathecode_cohort_{item.id}_{key}") + event.add("dtstart", item.kickoff_date) - timeslots = update_timeslots_out_of_range(item.kickoff_date, item.ending_date, - CohortTimeSlot.objects.filter(cohort__id=item.id)) + timeslots = update_timeslots_out_of_range( + item.kickoff_date, item.ending_date, CohortTimeSlot.objects.filter(cohort__id=item.id) + ) first_timeslot = timeslots[0] if timeslots else None if first_timeslot: - recurrent = first_timeslot['recurrent'] - starting_at = first_timeslot['starting_at'] if not recurrent else fix_datetime_weekday( - item.kickoff_date, first_timeslot['starting_at'], next=True) - ending_at = first_timeslot['ending_at'] if not recurrent else fix_datetime_weekday( - item.kickoff_date, first_timeslot['ending_at'], next=True) - - event_first_day.add('summary', f'{item.name} - First day') - event_first_day.add('uid', f'breathecode_cohort_{item.id}_first_{key}') - event_first_day.add('dtstart', starting_at) - event_first_day.add('dtend', ending_at) - event_first_day.add('dtstamp', first_timeslot['created_at']) + recurrent = first_timeslot["recurrent"] + starting_at = ( + first_timeslot["starting_at"] + if not recurrent + else fix_datetime_weekday(item.kickoff_date, first_timeslot["starting_at"], next=True) + ) + ending_at = ( + first_timeslot["ending_at"] + if not recurrent + else fix_datetime_weekday(item.kickoff_date, first_timeslot["ending_at"], next=True) + ) + + event_first_day.add("summary", f"{item.name} - First day") + event_first_day.add("uid", f"breathecode_cohort_{item.id}_first_{key}") + event_first_day.add("dtstart", starting_at) + event_first_day.add("dtend", ending_at) + event_first_day.add("dtstamp", first_timeslot["created_at"]) if item.ending_date: - event.add('dtend', item.ending_date) + event.add("dtend", item.ending_date) timeslots_datetime = [] # fix the datetime to be use for get the last day for timeslot in timeslots: - starting_at = timeslot['starting_at'] - ending_at = timeslot['ending_at'] + starting_at = timeslot["starting_at"] + ending_at = timeslot["ending_at"] diff = ending_at - starting_at - if timeslot['recurrent']: + if timeslot["recurrent"]: ending_at = fix_datetime_weekday(item.ending_date, ending_at, prev=True) starting_at = ending_at - diff @@ -1404,49 +1474,48 @@ def get(self, request): last_timeslot = timeslots_datetime[0] has_last_day = True - event_last_day.add('summary', f'{item.name} - Last day') + event_last_day.add("summary", f"{item.name} - Last day") - event_last_day.add('uid', f'breathecode_cohort_{item.id}_last_{key}') - event_last_day.add('dtstart', last_timeslot[0]) - event_last_day.add('dtend', last_timeslot[1]) - event_last_day.add('dtstamp', item.created_at) + event_last_day.add("uid", f"breathecode_cohort_{item.id}_last_{key}") + event_last_day.add("dtstart", last_timeslot[0]) + event_last_day.add("dtend", last_timeslot[1]) + event_last_day.add("dtstamp", item.created_at) - event.add('dtstamp', item.created_at) + event.add("dtstamp", item.created_at) - teacher = CohortUser.objects.filter(role='TEACHER', cohort__id=item.id).first() + teacher = CohortUser.objects.filter(role="TEACHER", cohort__id=item.id).first() if teacher: - organizer = vCalAddress(f'MAILTO:{teacher.user.email}') + organizer = vCalAddress(f"MAILTO:{teacher.user.email}") if teacher.user.first_name and teacher.user.last_name: - organizer.params['cn'] = vText(f'{teacher.user.first_name} ' - f'{teacher.user.last_name}') + organizer.params["cn"] = vText(f"{teacher.user.first_name} " f"{teacher.user.last_name}") elif teacher.user.first_name: - organizer.params['cn'] = vText(teacher.user.first_name) + organizer.params["cn"] = vText(teacher.user.first_name) elif teacher.user.last_name: - organizer.params['cn'] = vText(teacher.user.last_name) + organizer.params["cn"] = vText(teacher.user.last_name) - organizer.params['role'] = vText('OWNER') - event['organizer'] = organizer + organizer.params["role"] = vText("OWNER") + event["organizer"] = organizer if first_timeslot: - event_first_day['organizer'] = organizer + event_first_day["organizer"] = organizer if has_last_day: - event_last_day['organizer'] = organizer + event_last_day["organizer"] = organizer location = item.academy.name if item.academy.website_url: - location = f'{location} ({item.academy.website_url})' + location = f"{location} ({item.academy.website_url})" - event['location'] = vText(item.online_meeting_url or item.academy.name) + event["location"] = vText(item.online_meeting_url or item.academy.name) if first_timeslot: - event_first_day['location'] = vText(item.online_meeting_url or item.academy.name) + event_first_day["location"] = vText(item.online_meeting_url or item.academy.name) if has_last_day: - event_last_day['location'] = vText(item.online_meeting_url or item.academy.name) + event_last_day["location"] = vText(item.online_meeting_url or item.academy.name) if first_timeslot: calendar.add_component(event_first_day) @@ -1457,8 +1526,8 @@ def get(self, request): calendar_text = calendar.to_ical() - response = HttpResponse(calendar_text, content_type='text/calendar') - response['Content-Disposition'] = 'attachment; filename="calendar.ics"' + response = HttpResponse(calendar_text, content_type="text/calendar") + response["Content-Disposition"] = 'attachment; filename="calendar.ics"' return response @@ -1466,33 +1535,35 @@ class ICalEventView(APIView): permission_classes = [AllowAny] def get(self, request): - items = Event.objects.filter(status='ACTIVE') + items = Event.objects.filter(status="ACTIVE") - ids = request.GET.get('academy', '') - slugs = request.GET.get('academy_slug', '') + ids = request.GET.get("academy", "") + slugs = request.GET.get("academy_slug", "") - ids = ids.split(',') if ids else [] - slugs = slugs.split(',') if slugs else [] + ids = ids.split(",") if ids else [] + slugs = slugs.split(",") if slugs else [] if ids: - items = Event.objects.filter(academy__id__in=ids, status='ACTIVE').order_by('id') + items = Event.objects.filter(academy__id__in=ids, status="ACTIVE").order_by("id") elif slugs: - items = Event.objects.filter(academy__slug__in=slugs, status='ACTIVE').order_by('id') + items = Event.objects.filter(academy__slug__in=slugs, status="ACTIVE").order_by("id") else: items = [] if not ids and not slugs: raise ValidationException( - 'You need to specify at least one academy or academy_slug (comma separated) in the querystring') + "You need to specify at least one academy or academy_slug (comma separated) in the querystring" + ) - if (Academy.objects.filter(id__in=ids).count() != len(ids) - or Academy.objects.filter(slug__in=slugs).count() != len(slugs)): - raise ValidationException('Some academy not exist') + if Academy.objects.filter(id__in=ids).count() != len(ids) or Academy.objects.filter( + slug__in=slugs + ).count() != len(slugs): + raise ValidationException("Some academy not exist") - upcoming = request.GET.get('upcoming') - if items and upcoming == 'true': + upcoming = request.GET.get("upcoming") + if items and upcoming == "true": now = timezone.now() items = items.filter(starting_at__gte=now) @@ -1500,94 +1571,93 @@ def get(self, request): key = server_id() calendar = iCalendar() - calendar.add('prodid', f'-//4Geeks//Academy Events{academies_repr} {key}//EN') - calendar.add('METHOD', 'PUBLISH') - calendar.add('X-WR-CALNAME', 'Academy - Events') - calendar.add('X-WR-CALDESC', '') - calendar.add('REFRESH-INTERVAL;VALUE=DURATION', 'PT15M') + calendar.add("prodid", f"-//4Geeks//Academy Events{academies_repr} {key}//EN") + calendar.add("METHOD", "PUBLISH") + calendar.add("X-WR-CALNAME", "Academy - Events") + calendar.add("X-WR-CALDESC", "") + calendar.add("REFRESH-INTERVAL;VALUE=DURATION", "PT15M") - url = os.getenv('API_URL') + url = os.getenv("API_URL") if url: - url = re.sub(r'/$', '', url) + '/v1/events/ical/events' + url = re.sub(r"/$", "", url) + "/v1/events/ical/events" if ids or slugs: - url = url + '?' + url = url + "?" if ids: - url = url + 'academy=' + ','.join(ids) + url = url + "academy=" + ",".join(ids) if ids and slugs: - url = url + '&' + url = url + "&" if slugs: - url = url + 'academy_slug=' + ','.join(slugs) + url = url + "academy_slug=" + ",".join(slugs) - calendar.add('url', url) + calendar.add("url", url) - calendar.add('version', '2.0') + calendar.add("version", "2.0") for item in items: event = iEvent() if item.title: - event.add('summary', item.title) + event.add("summary", item.title) - description = '' - description = f'{description}Url: {item.url}\n' + description = "" + description = f"{description}Url: {item.url}\n" if item.academy: - description = f'{description}Academy: {item.academy.name}\n' + description = f"{description}Academy: {item.academy.name}\n" if item.venue and item.venue.title: - description = f'{description}Venue: {item.venue.title}\n' + description = f"{description}Venue: {item.venue.title}\n" if item.event_type: - description = f'{description}Event type: {item.event_type.name}\n' + description = f"{description}Event type: {item.event_type.name}\n" if item.online_event: - description = f'{description}Location: online\n' + description = f"{description}Location: online\n" - event.add('description', description) - event.add('uid', f'breathecode_event_{item.id}_{key}') - event.add('dtstart', item.starting_at) - event.add('dtend', item.ending_at) - event.add('dtstamp', item.created_at) + event.add("description", description) + event.add("uid", f"breathecode_event_{item.id}_{key}") + event.add("dtstart", item.starting_at) + event.add("dtend", item.ending_at) + event.add("dtstamp", item.created_at) if item.author and item.author.email: - organizer = vCalAddress(f'MAILTO:{item.author.email}') + organizer = vCalAddress(f"MAILTO:{item.author.email}") if item.author.first_name and item.author.last_name: - organizer.params['cn'] = vText(f'{item.author.first_name} ' - f'{item.author.last_name}') + organizer.params["cn"] = vText(f"{item.author.first_name} " f"{item.author.last_name}") elif item.author.first_name: - organizer.params['cn'] = vText(item.author.first_name) + organizer.params["cn"] = vText(item.author.first_name) elif item.author.last_name: - organizer.params['cn'] = vText(item.author.last_name) + organizer.params["cn"] = vText(item.author.last_name) - organizer.params['role'] = vText('OWNER') - event['organizer'] = organizer + organizer.params["role"] = vText("OWNER") + event["organizer"] = organizer if item.venue and (item.venue.country or item.venue.state or item.venue.city or item.venue.street_address): - value = '' + value = "" if item.venue.street_address: - value = f'{value}{item.venue.street_address}, ' + value = f"{value}{item.venue.street_address}, " if item.venue.city: - value = f'{value}{item.venue.city}, ' + value = f"{value}{item.venue.city}, " if item.venue.state: - value = f'{value}{item.venue.state}, ' + value = f"{value}{item.venue.state}, " if item.venue.country: - value = f'{value}{item.venue.country}' + value = f"{value}{item.venue.country}" - value = re.sub(', $', '', value) - event['location'] = vText(value) + value = re.sub(", $", "", value) + event["location"] = vText(value) calendar.add_component(event) calendar_text = calendar.to_ical() - response = HttpResponse(calendar_text, content_type='text/calendar') - response['Content-Disposition'] = 'attachment; filename="calendar.ics"' + response = HttpResponse(calendar_text, content_type="text/calendar") + response["Content-Disposition"] = 'attachment; filename="calendar.ics"' return response diff --git a/breathecode/feedback/actions.py b/breathecode/feedback/actions.py index e6cce1973..baf67e70d 100644 --- a/breathecode/feedback/actions.py +++ b/breathecode/feedback/actions.py @@ -20,55 +20,59 @@ def send_survey_group(survey=None, cohort=None): if survey is None and cohort is None: - raise ValidationException('Missing survey or cohort', slug='missing-survey-or-cohort') + raise ValidationException("Missing survey or cohort", slug="missing-survey-or-cohort") if survey is None: survey = Survey(cohort=cohort, lang=cohort.language.lower()) - result = {'success': [], 'error': []} + result = {"success": [], "error": []} try: if cohort is not None: if survey.cohort.id != cohort.id: - raise ValidationException('The survey does not match the cohort id', - slug='survey-does-not-match-cohort') + raise ValidationException( + "The survey does not match the cohort id", slug="survey-does-not-match-cohort" + ) if cohort is None: cohort = survey.cohort - cohort_teacher = CohortUser.objects.filter(cohort=survey.cohort, role='TEACHER') + cohort_teacher = CohortUser.objects.filter(cohort=survey.cohort, role="TEACHER") if cohort_teacher.count() == 0: - raise ValidationException('This cohort must have a teacher assigned to be able to survey it', - 400, - slug='cohort-must-have-teacher-assigned-to-survey') + raise ValidationException( + "This cohort must have a teacher assigned to be able to survey it", + 400, + slug="cohort-must-have-teacher-assigned-to-survey", + ) - ucs = CohortUser.objects.filter(cohort=cohort, role='STUDENT').filter() + ucs = CohortUser.objects.filter(cohort=cohort, role="STUDENT").filter() for uc in ucs: - if uc.educational_status in ['ACTIVE', 'GRADUATED']: + if uc.educational_status in ["ACTIVE", "GRADUATED"]: tasks.send_cohort_survey.delay(uc.user.id, survey.id) - logger.debug(f'Survey scheduled to send for {uc.user.email}') - result['success'].append(f'Survey scheduled to send for {uc.user.email}') + logger.debug(f"Survey scheduled to send for {uc.user.email}") + result["success"].append(f"Survey scheduled to send for {uc.user.email}") else: logger.debug(f"Survey NOT sent to {uc.user.email} because it's not an active or graduated student") - result['error'].append( - f"Survey NOT sent to {uc.user.email} because it's not an active or graduated student") + result["error"].append( + f"Survey NOT sent to {uc.user.email} because it's not an active or graduated student" + ) survey.sent_at = timezone.now() - if len(result['error']) == 0: - survey.status = 'SENT' - elif len(result['success']) > 0 and len(result['error']) > 0: - survey.status = 'PARTIAL' + if len(result["error"]) == 0: + survey.status = "SENT" + elif len(result["success"]) > 0 and len(result["error"]) > 0: + survey.status = "PARTIAL" else: - survey.status = 'FATAL' + survey.status = "FATAL" survey.status_json = json.dumps(result) survey.save() except Exception as e: - survey.status = 'FATAL' - result['error'].append('Error sending survey to group: ' + str(e)) + survey.status = "FATAL" + result["error"].append("Error sending survey to group: " + str(e)) survey.status_json = json.dumps(result) survey.save() raise e @@ -80,89 +84,93 @@ def send_question(user, cohort=None): answer = Answer(user=user) # just can send the question if the user is active in the cohort - cu_kwargs = {'user': user, 'educational_status__in': ['ACTIVE', 'GRADUATED']} + cu_kwargs = {"user": user, "educational_status__in": ["ACTIVE", "GRADUATED"]} if cohort: - cu_kwargs['cohort'] = cohort + cu_kwargs["cohort"] = cohort ###1 - cu = CohortUser.objects.filter(**cu_kwargs).order_by('-cohort__kickoff_date').first() + cu = CohortUser.objects.filter(**cu_kwargs).order_by("-cohort__kickoff_date").first() if not cu: - raise ValidationException('Impossible to determine the student cohort, maybe it has more than one, or cero.', - slug='without-cohort-or-cannot-determine-cohort') + raise ValidationException( + "Impossible to determine the student cohort, maybe it has more than one, or cero.", + slug="without-cohort-or-cannot-determine-cohort", + ) answer.cohort = cu.cohort answer.lang = answer.cohort.language.lower() answer.save() - has_slackuser = hasattr(user, 'slackuser') + has_slackuser = hasattr(user, "slackuser") if not user.email and not has_slackuser: - raise ValidationException(f'User not have email and slack, this survey cannot be send: {str(user.id)}', - slug='without-email-or-slack-user') + raise ValidationException( + f"User not have email and slack, this survey cannot be send: {str(user.id)}", + slug="without-email-or-slack-user", + ) ###2 if not answer.cohort.syllabus_version: - raise ValidationException('Cohort not have one SyllabusVersion', slug='cohort-without-syllabus-version') + raise ValidationException("Cohort not have one SyllabusVersion", slug="cohort-without-syllabus-version") if not answer.cohort.schedule: - raise ValidationException('Cohort not have one SyllabusSchedule', slug='cohort-without-specialty-mode') + raise ValidationException("Cohort not have one SyllabusSchedule", slug="cohort-without-specialty-mode") - question_was_sent_previously = Answer.objects.filter(cohort=answer.cohort, user=user, status='SENT').count() + question_was_sent_previously = Answer.objects.filter(cohort=answer.cohort, user=user, status="SENT").count() question = tasks.build_question(answer) if question_was_sent_previously: - answer = Answer.objects.filter(cohort=answer.cohort, user=user, status='SENT').first() + answer = Answer.objects.filter(cohort=answer.cohort, user=user, status="SENT").first() Token.objects.filter(id=answer.token_id).delete() else: - answer.title = question['title'] - answer.lowest = question['lowest'] - answer.highest = question['highest'] + answer.title = question["title"] + answer.lowest = question["lowest"] + answer.highest = question["highest"] answer.lang = answer.cohort.language.lower() answer.save() - token, created = Token.get_or_create(user, token_type='temporal', hours_length=72) + token, created = Token.get_or_create(user, token_type="temporal", hours_length=72) - token_id = Token.objects.filter(key=token).values_list('id', flat=True).first() + token_id = Token.objects.filter(key=token).values_list("id", flat=True).first() answer.token_id = token_id answer.save() data = { - 'QUESTION': question['title'], - 'HIGHEST': answer.highest, - 'LOWEST': answer.lowest, - 'SUBJECT': question['title'], - 'ANSWER_ID': answer.id, - 'BUTTON': strings[answer.cohort.language.lower()]['button_label'], - 'LINK': f'https://nps.4geeks.com/{answer.id}?token={token.key}', + "QUESTION": question["title"], + "HIGHEST": answer.highest, + "LOWEST": answer.lowest, + "SUBJECT": question["title"], + "ANSWER_ID": answer.id, + "BUTTON": strings[answer.cohort.language.lower()]["button_label"], + "LINK": f"https://nps.4geeks.com/{answer.id}?token={token.key}", } if user.email: - send_email_message('nps', user.email, data, academy=answer.cohort.academy) + send_email_message("nps", user.email, data, academy=answer.cohort.academy) - if hasattr(user, 'slackuser') and hasattr(answer.cohort.academy, 'slackteam'): - send_slack('nps', user.slackuser, answer.cohort.academy.slackteam, data=data, academy=answer.cohort.academy) + if hasattr(user, "slackuser") and hasattr(answer.cohort.academy, "slackteam"): + send_slack("nps", user.slackuser, answer.cohort.academy.slackteam, data=data, academy=answer.cohort.academy) # keep track of sent survays until they get answered if not question_was_sent_previously: - logger.info(f'Survey was sent for user: {str(user.id)}') - answer.status = 'SENT' + logger.info(f"Survey was sent for user: {str(user.id)}") + answer.status = "SENT" answer.save() return True else: - logger.info(f'Survey was resent for user: {str(user.id)}') + logger.info(f"Survey was resent for user: {str(user.id)}") return True def answer_survey(user, data): - Answer.objects.create(**{**data, 'user': user}) + Answer.objects.create(**{**data, "user": user}) def get_student_answer_avg(user_id, cohort_id=None, academy_id=None): - answers = Answer.objects.filter(user__id=user_id, status='ANSWERED', score__isnull=False) + answers = Answer.objects.filter(user__id=user_id, status="ANSWERED", score__isnull=False) # optionally filter by cohort if cohort_id is not None: @@ -172,12 +180,12 @@ def get_student_answer_avg(user_id, cohort_id=None, academy_id=None): if academy_id is not None: answers = answers.filter(academy__id=academy_id) - query = answers.aggregate(average=Avg('score')) + query = answers.aggregate(average=Avg("score")) - if query['average'] is not None: - return round(query['average'], 2) + if query["average"] is not None: + return round(query["average"], 2) - return query['average'] + return query["average"] def create_user_graduation_reviews(user, cohort) -> bool: @@ -190,24 +198,24 @@ def create_user_graduation_reviews(user, cohort) -> bool: author=user, ).count() if total_reviews > 0: - logger.info('No new reviews will be requested, student already has pending requests for this cohort') + logger.info("No new reviews will be requested, student already has pending requests for this cohort") return False platforms = ReviewPlatform.objects.all() - logger.info(f'{platforms.count()} will be requested for student {user.id}, avg NPS score of {average}') + logger.info(f"{platforms.count()} will be requested for student {user.id}, avg NPS score of {average}") for plat in platforms: review = Review(cohort=cohort, author=user, platform=plat, nps_previous_rating=average) review.save() return True - logger.info(f'No reviews requested for student {user.id} because average NPS score is {average}') + logger.info(f"No reviews requested for student {user.id} because average NPS score is {average}") return False def calculate_survey_response_rate(survey_id: int) -> float: total_responses = Answer.objects.filter(survey__id=survey_id).count() - answered_responses = Answer.objects.filter(survey__id=survey_id, status='ANSWERED').count() + answered_responses = Answer.objects.filter(survey__id=survey_id, status="ANSWERED").count() response_rate = (answered_responses / total_responses) * 100 return response_rate @@ -216,41 +224,40 @@ def calculate_survey_response_rate(survey_id: int) -> float: def calculate_survey_scores(survey_id: int) -> dict: def get_average(answers: QuerySet[Answer]) -> float: - result = answers.aggregate(Avg('score')) - return result['score__avg'] + result = answers.aggregate(Avg("score")) + return result["score__avg"] survey = Survey.objects.filter(id=survey_id).first() if not survey: - raise ValidationException('Survey not found', code=404, slug='not-found') + raise ValidationException("Survey not found", code=404, slug="not-found") - answers = Answer.objects.filter(survey=survey, status='ANSWERED') + answers = Answer.objects.filter(survey=survey, status="ANSWERED") total = get_average(answers) - academy_pattern = strings[survey.lang]['academy']['title'].split('{}') - cohort_pattern = strings[survey.lang]['cohort']['title'].split('{}') - mentor_pattern = strings[survey.lang]['mentor']['title'].split('{}') + academy_pattern = strings[survey.lang]["academy"]["title"].split("{}") + cohort_pattern = strings[survey.lang]["cohort"]["title"].split("{}") + mentor_pattern = strings[survey.lang]["mentor"]["title"].split("{}") academy = get_average(answers.filter(title__startswith=academy_pattern[0], title__endswith=academy_pattern[1])) cohort = get_average(answers.filter(title__startswith=cohort_pattern[0], title__endswith=cohort_pattern[1])) all_mentors = { - x.title - for x in answers.filter(title__startswith=mentor_pattern[0], title__endswith=mentor_pattern[1]) + x.title for x in answers.filter(title__startswith=mentor_pattern[0], title__endswith=mentor_pattern[1]) } - full_mentor_pattern = (mentor_pattern[0].replace('?', '\\?') + r'([\w ]+)' + mentor_pattern[1].replace('?', '\\?')) + full_mentor_pattern = mentor_pattern[0].replace("?", "\\?") + r"([\w ]+)" + mentor_pattern[1].replace("?", "\\?") mentors = [] for mentor in all_mentors: name = re.findall(full_mentor_pattern, mentor)[0] score = get_average(answers.filter(title=mentor)) - mentors.append({'name': name, 'score': score}) + mentors.append({"name": name, "score": score}) return { - 'total': total, - 'academy': academy, - 'cohort': cohort, - 'mentors': sorted(mentors, key=lambda x: x['name']), + "total": total, + "academy": academy, + "cohort": cohort, + "mentors": sorted(mentors, key=lambda x: x["name"]), } diff --git a/breathecode/feedback/admin.py b/breathecode/feedback/admin.py index 59b936659..f8fafbdf3 100644 --- a/breathecode/feedback/admin.py +++ b/breathecode/feedback/admin.py @@ -2,7 +2,10 @@ import json from django.contrib import admin, messages from django.contrib.auth.admin import UserAdmin -from breathecode.admissions.admin import CohortAdmin as AdmissionsCohortAdmin, CohortUserAdmin as AdmissionsCohortUserAdmin +from breathecode.admissions.admin import ( + CohortAdmin as AdmissionsCohortAdmin, + CohortUserAdmin as AdmissionsCohortUserAdmin, +) from breathecode.feedback.tasks import recalculate_survey_scores from .models import Answer, UserProxy, CohortProxy, CohortUserProxy, Survey, Review, ReviewPlatform from .actions import send_survey_group, create_user_graduation_reviews @@ -14,7 +17,7 @@ logger = logging.getLogger(__name__) -@admin.display(description='Send General NPS Survey') +@admin.display(description="Send General NPS Survey") def send_bulk_survey(modeladmin, request, queryset): # mocking tools are poor to apply it from django.contrib import messages @@ -36,19 +39,19 @@ def send_bulk_survey(modeladmin, request, queryset): logger.fatal(error) if errors: - message = ' - '.join([f'{error} ({errors[error]})' for error in errors.keys()]) + message = " - ".join([f"{error} ({errors[error]})" for error in errors.keys()]) messages.error(request, message=message) else: - messages.success(request, message='Survey was successfully sent') + messages.success(request, message="Survey was successfully sent") @admin.register(UserProxy) class UserAdmin(UserAdmin): - list_display = ('username', 'email', 'first_name', 'last_name') + list_display = ("username", "email", "first_name", "last_name") actions = [send_bulk_survey] -@admin.display(description='Send General NPS Survey') +@admin.display(description="Send General NPS Survey") def send_bulk_cohort_user_survey(modeladmin, request, queryset): from django.contrib import messages @@ -69,24 +72,24 @@ def send_bulk_cohort_user_survey(modeladmin, request, queryset): logger.fatal(error) if errors: - message = ' - '.join([f'{error} ({errors[error]})' for error in errors.keys()]) + message = " - ".join([f"{error} ({errors[error]})" for error in errors.keys()]) messages.error(request, message=message) else: - messages.success(request, message='Survey was successfully sent') + messages.success(request, message="Survey was successfully sent") -@admin.display(description='Generate review requests') +@admin.display(description="Generate review requests") def generate_review_requests(modeladmin, request, queryset): cus = queryset.all() for cu in cus: - if cu.educational_status != 'GRADUATED': - messages.success(request, message='All selected students must have graduated') + if cu.educational_status != "GRADUATED": + messages.success(request, message="All selected students must have graduated") return False try: for cu in cus: create_user_graduation_reviews(cu.user, cu.cohort) - messages.success(request, message='Review request were successfully generated') + messages.success(request, message="Review request were successfully generated") except Exception as e: messages.error(request, message=str(e)) @@ -101,10 +104,10 @@ class CohortUserAdmin(AdmissionsCohortUserAdmin): @admin.register(CohortProxy) class CohortAdmin(AdmissionsCohortAdmin): - list_display = ('id', 'slug', 'stage', 'name', 'kickoff_date', 'syllabus_version', 'schedule') + list_display = ("id", "slug", "stage", "name", "kickoff_date", "syllabus_version", "schedule") -@admin.display(description='Add academy to answer') +@admin.display(description="Add academy to answer") def add_academy_to_answer(modeladmin, request, queryset): for answer in queryset: @@ -119,110 +122,116 @@ def add_academy_to_answer(modeladmin, request, queryset): class AnswerTypeFilter(admin.SimpleListFilter): - title = 'Answer Type' + title = "Answer Type" - parameter_name = 'answer_type' + parameter_name = "answer_type" def lookups(self, request, model_admin): return ( - ('academy', 'Academy'), - ('cohort', 'Cohort'), - ('mentor', 'Mentor'), - ('session', 'Session'), - ('event', 'Event'), + ("academy", "Academy"), + ("cohort", "Cohort"), + ("mentor", "Mentor"), + ("session", "Session"), + ("event", "Event"), ) def queryset(self, request, queryset): - if self.value() == 'mentor': + if self.value() == "mentor": return queryset.filter(event__isnull=True, mentorship_session__isnull=True, mentor__isnull=False) - if self.value() == 'session': + if self.value() == "session": return queryset.filter(mentorship_session__isnull=False) - if self.value() == 'event': + if self.value() == "event": return queryset.filter(event__isnull=False, mentorship_session__isnull=True) - if self.value() == 'event': - return queryset.filter(academy__isnull=True, - cohort__isnull=True, - event__isnull=True, - mentorship_session__isnull=True, - mentor__isnull=True) - - if self.value() == 'cohort': - return queryset.filter(academy__isnull=False, - cohort__isnull=False, - event__isnull=True, - mentorship_session__isnull=True, - mentor__isnull=True) - - if self.value() == 'academy': - return queryset.filter(academy__isnull=False, - cohort__isnull=True, - event__isnull=True, - mentorship_session__isnull=True, - mentor__isnull=True) + if self.value() == "event": + return queryset.filter( + academy__isnull=True, + cohort__isnull=True, + event__isnull=True, + mentorship_session__isnull=True, + mentor__isnull=True, + ) + + if self.value() == "cohort": + return queryset.filter( + academy__isnull=False, + cohort__isnull=False, + event__isnull=True, + mentorship_session__isnull=True, + mentor__isnull=True, + ) + + if self.value() == "academy": + return queryset.filter( + academy__isnull=False, + cohort__isnull=True, + event__isnull=True, + mentorship_session__isnull=True, + mentor__isnull=True, + ) @admin.register(Answer) class AnswerAdmin(admin.ModelAdmin, AdminExportCsvMixin): - list_display = ('status', 'user', 'academy', 'cohort', 'mentor', 'score', 'opened_at', 'created_at', 'answer_url') - search_fields = ['user__first_name', 'user__last_name', 'user__email', 'cohort__slug'] - list_filter = [AnswerTypeFilter, 'status', 'score', 'academy__slug', 'cohort__slug'] - actions = ['export_as_csv', add_academy_to_answer] - raw_id_fields = ['user', 'cohort', 'mentor'] + list_display = ("status", "user", "academy", "cohort", "mentor", "score", "opened_at", "created_at", "answer_url") + search_fields = ["user__first_name", "user__last_name", "user__email", "cohort__slug"] + list_filter = [AnswerTypeFilter, "status", "score", "academy__slug", "cohort__slug"] + actions = ["export_as_csv", add_academy_to_answer] + raw_id_fields = ["user", "cohort", "mentor"] def answer_url(self, obj): - url = 'https://nps.4geeks.com/' + str(obj.id) + url = "https://nps.4geeks.com/" + str(obj.id) return format_html(f"<a rel='noopener noreferrer' target='_blank' href='{url}'>open answer</a>") # def entity(self, object): # return f"{object.entity_slug} (id:{str(object.entity_id)})" -@admin.display(description='Send survey to all cohort students') +@admin.display(description="Send survey to all cohort students") def send_big_cohort_bulk_survey(modeladmin, request, queryset): - logger.debug('send_big_cohort_bulk_survey called') + logger.debug("send_big_cohort_bulk_survey called") # cohort_ids = queryset.values_list('id', flat=True) surveys = queryset.all() for s in surveys: - logger.debug(f'Sending survey {s.id}') + logger.debug(f"Sending survey {s.id}") try: send_survey_group(survey=s) except Exception as e: - s.status = 'FATAL' - s.status_json = json.dumps({'errors': [str(e)]}) + s.status = "FATAL" + s.status_json = json.dumps({"errors": [str(e)]}) logger.fatal(str(e)) - if s.status != 'SENT': - messages.error(request, message='Some surveys have not been sent') + if s.status != "SENT": + messages.error(request, message="Some surveys have not been sent") s.save() - logger.info('All surveys scheduled to send for cohorts') + logger.info("All surveys scheduled to send for cohorts") class SentFilter(admin.SimpleListFilter): - title = 'Sent tag' + title = "Sent tag" - parameter_name = 'is_sent' + parameter_name = "is_sent" def lookups(self, request, model_admin): return ( - ('yes', 'Sent'), - ('no', 'Not yet sent'), + ("yes", "Sent"), + ("no", "Not yet sent"), ) def queryset(self, request, queryset): - if self.value() == 'yes': + if self.value() == "yes": return queryset.filter(sent_at__isnull=False) - if self.value() == 'no': + if self.value() == "no": return queryset.filter(sent_at__isnull=True) @@ -233,44 +242,45 @@ def fill_sent_at_with_created_at(modeladmin, request, queryset): s.save() -@admin.display(description='Recalculate all Survey scores and response rate') +@admin.display(description="Recalculate all Survey scores and response rate") def calculate_survey_scores(modeladmin, request, queryset): - for id in Survey.objects.all().values_list('id', flat=True): + for id in Survey.objects.all().values_list("id", flat=True): recalculate_survey_scores.delay(id) @admin.register(Survey) class SurveyAdmin(admin.ModelAdmin): - list_display = ('id', 'cohort', 'status', 'duration', 'created_at', 'sent_at', 'survey_url') - search_fields = ['cohort__slug', 'cohort__academy__slug', 'cohort__name', 'cohort__academy__name'] - list_filter = [SentFilter, 'status', 'cohort__academy__slug'] - raw_id_fields = ['cohort'] + list_display = ("id", "cohort", "status", "duration", "created_at", "sent_at", "survey_url") + search_fields = ["cohort__slug", "cohort__academy__slug", "cohort__name", "cohort__academy__name"] + list_filter = [SentFilter, "status", "cohort__academy__slug"] + raw_id_fields = ["cohort"] actions = [send_big_cohort_bulk_survey, fill_sent_at_with_created_at, calculate_survey_scores] + change_field( - ['PENDING', 'SENT', 'PARTIAL', 'FATAL'], name='status') + ["PENDING", "SENT", "PARTIAL", "FATAL"], name="status" + ) def survey_url(self, obj): - url = 'https://nps.4geeks.com/survey/' + str(obj.id) + url = "https://nps.4geeks.com/survey/" + str(obj.id) return format_html(f"<a rel='noopener noreferrer' target='_blank' href='{url}'>open survey</a>") @admin.register(Review) class ReviewAdmin(admin.ModelAdmin): - search_fields = ['author__first_name', 'author__last_name', 'author__email', 'cohort__slug'] - list_display = ('id', 'current_status', 'author', 'cohort', 'nps_previous_rating', 'total_rating', 'platform') - readonly_fields = ['nps_previous_rating'] - list_filter = ['status', 'cohort__academy__slug', 'platform'] - raw_id_fields = ['author', 'cohort'] + search_fields = ["author__first_name", "author__last_name", "author__email", "cohort__slug"] + list_display = ("id", "current_status", "author", "cohort", "nps_previous_rating", "total_rating", "platform") + readonly_fields = ["nps_previous_rating"] + list_filter = ["status", "cohort__academy__slug", "platform"] + raw_id_fields = ["author", "cohort"] def current_status(self, obj): colors = { - 'DONE': 'bg-success', - 'IGNORE': '', - 'PENDING': 'bg-warning', + "DONE": "bg-success", + "IGNORE": "", + "PENDING": "bg-warning", } return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") @admin.register(ReviewPlatform) class ReviewPlatformAdmin(admin.ModelAdmin): - list_display = ('slug', 'name') + list_display = ("slug", "name") diff --git a/breathecode/feedback/apps.py b/breathecode/feedback/apps.py index 9f406226f..d11a79741 100644 --- a/breathecode/feedback/apps.py +++ b/breathecode/feedback/apps.py @@ -5,8 +5,8 @@ class FeedbackConfig(AppConfig): - name = 'breathecode.feedback' + name = "breathecode.feedback" def ready(self): - logger.debug('Loading feedback.receivers') + logger.debug("Loading feedback.receivers") from . import receivers # noqa: F401 diff --git a/breathecode/feedback/management/commands/remove_invalid_answers.py b/breathecode/feedback/management/commands/remove_invalid_answers.py index 96c69a862..32e429838 100644 --- a/breathecode/feedback/management/commands/remove_invalid_answers.py +++ b/breathecode/feedback/management/commands/remove_invalid_answers.py @@ -4,21 +4,24 @@ class Command(BaseCommand): - help = 'Remove invalid answers keeping the survey answered' + help = "Remove invalid answers keeping the survey answered" def handle(self, *args, **options): for survey in Survey.objects.filter(): # prevent remove answers was answered or opened - pending_answers = Answer.objects.filter(survey=survey, - cohort=survey.cohort).exclude(status__in=['ANSWERED', 'OPENED']) + pending_answers = Answer.objects.filter(survey=survey, cohort=survey.cohort).exclude( + status__in=["ANSWERED", "OPENED"] + ) user_ids = {x.user.id for x in pending_answers} for user_id in user_ids: # if the student is not active or graduate, remove all the answers related to this cohort - if CohortUser.objects.filter( - user__id=user_id, - cohort=survey.cohort).exclude(educational_status__in=['ACTIVE', 'GRADUATED']).exists(): + if ( + CohortUser.objects.filter(user__id=user_id, cohort=survey.cohort) + .exclude(educational_status__in=["ACTIVE", "GRADUATED"]) + .exists() + ): pending_answers.filter(user__id=user_id).delete() - self.stdout.write(self.style.SUCCESS('Successfully deleted invalid answers')) + self.stdout.write(self.style.SUCCESS("Successfully deleted invalid answers")) diff --git a/breathecode/feedback/migrations/0001_initial.py b/breathecode/feedback/migrations/0001_initial.py index 8409d8f7c..78f486883 100644 --- a/breathecode/feedback/migrations/0001_initial.py +++ b/breathecode/feedback/migrations/0001_initial.py @@ -15,24 +15,38 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Answer', + name="Answer", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(blank=True, max_length=200)), - ('score', models.CharField(blank=True, max_length=250)), - ('comment', models.CharField(blank=True, max_length=255)), - ('enity_type', - models.CharField(choices=[('EVENT', 'Event'), ('CERTIFICATE', 'Certificate'), ('WORKSHOP', 'Workshop'), - ('MENTOR', 'Mentor'), ('ACADEMY', 'Academy'), ('COHORT', 'Cohort')], - max_length=12)), - ('entity_id', models.PositiveIntegerField()), - ('entity_slug', models.SlugField(blank=True, max_length=255)), - ('user', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.CharField(blank=True, max_length=200)), + ("score", models.CharField(blank=True, max_length=250)), + ("comment", models.CharField(blank=True, max_length=255)), + ( + "enity_type", + models.CharField( + choices=[ + ("EVENT", "Event"), + ("CERTIFICATE", "Certificate"), + ("WORKSHOP", "Workshop"), + ("MENTOR", "Mentor"), + ("ACADEMY", "Academy"), + ("COHORT", "Cohort"), + ], + max_length=12, + ), + ), + ("entity_id", models.PositiveIntegerField()), + ("entity_slug", models.SlugField(blank=True, max_length=255)), + ( + "user", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/breathecode/feedback/migrations/0002_auto_20200806_0415.py b/breathecode/feedback/migrations/0002_auto_20200806_0415.py index 35f2434d6..e50902756 100644 --- a/breathecode/feedback/migrations/0002_auto_20200806_0415.py +++ b/breathecode/feedback/migrations/0002_auto_20200806_0415.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0001_initial'), + ("feedback", "0001_initial"), ] operations = [ migrations.RenameField( - model_name='answer', - old_name='enity_type', - new_name='entity_type', + model_name="answer", + old_name="enity_type", + new_name="entity_type", ), ] diff --git a/breathecode/feedback/migrations/0003_auto_20200806_0417.py b/breathecode/feedback/migrations/0003_auto_20200806_0417.py index 452732f03..6ac0838e1 100644 --- a/breathecode/feedback/migrations/0003_auto_20200806_0417.py +++ b/breathecode/feedback/migrations/0003_auto_20200806_0417.py @@ -7,19 +7,19 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0002_auto_20200806_0415'), + ("feedback", "0002_auto_20200806_0415"), ] operations = [ migrations.AddField( - model_name='answer', - name='created_at', + model_name="answer", + name="created_at", field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( - model_name='answer', - name='updated_at', + model_name="answer", + name="updated_at", field=models.DateTimeField(auto_now=True), ), ] diff --git a/breathecode/feedback/migrations/0004_auto_20201006_0058.py b/breathecode/feedback/migrations/0004_auto_20201006_0058.py index dd1102f2b..e02732e30 100644 --- a/breathecode/feedback/migrations/0004_auto_20201006_0058.py +++ b/breathecode/feedback/migrations/0004_auto_20201006_0058.py @@ -9,83 +9,89 @@ class Migration(migrations.Migration): dependencies = [ - ('auth', '0012_alter_user_first_name_max_length'), + ("auth", "0012_alter_user_first_name_max_length"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('events', '0004_auto_20200806_0042'), - ('admissions', '0011_auto_20201006_0058'), - ('feedback', '0003_auto_20200806_0417'), + ("events", "0004_auto_20200806_0042"), + ("admissions", "0011_auto_20201006_0058"), + ("feedback", "0003_auto_20200806_0417"), ] operations = [ migrations.CreateModel( - name='UserProxy', + name="UserProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), migrations.RemoveField( - model_name='answer', - name='entity_id', + model_name="answer", + name="entity_id", ), migrations.RemoveField( - model_name='answer', - name='entity_slug', + model_name="answer", + name="entity_slug", ), migrations.RemoveField( - model_name='answer', - name='entity_type', + model_name="answer", + name="entity_type", ), migrations.AddField( - model_name='answer', - name='academy', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.academy'), + model_name="answer", + name="academy", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.academy", + ), ), migrations.AddField( - model_name='answer', - name='cohort', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.cohort'), + model_name="answer", + name="cohort", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.cohort", + ), ), migrations.AddField( - model_name='answer', - name='event', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='events.event'), + model_name="answer", + name="event", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to="events.event" + ), ), migrations.AddField( - model_name='answer', - name='mentor', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name='mentor_set', - to=settings.AUTH_USER_MODEL), + model_name="answer", + name="mentor", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="mentor_set", + to=settings.AUTH_USER_MODEL, + ), ), migrations.AlterField( - model_name='answer', - name='user', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="answer", + name="user", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/breathecode/feedback/migrations/0005_surveylog.py b/breathecode/feedback/migrations/0005_surveylog.py index 00f7f8ae4..31c1b5a43 100644 --- a/breathecode/feedback/migrations/0005_surveylog.py +++ b/breathecode/feedback/migrations/0005_surveylog.py @@ -9,24 +9,28 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('feedback', '0004_auto_20201006_0058'), + ("feedback", "0004_auto_20201006_0058"), ] operations = [ migrations.CreateModel( - name='SurveyLog', + name="SurveyLog", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('answered_at', models.DateTimeField(blank=True, default=None, null=True)), - ('token', models.CharField(max_length=255)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("answered_at", models.DateTimeField(blank=True, default=None, null=True)), + ("token", models.CharField(max_length=255)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "user", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/breathecode/feedback/migrations/0006_auto_20201009_2018.py b/breathecode/feedback/migrations/0006_auto_20201009_2018.py index 8ae50ef66..677ae8c84 100644 --- a/breathecode/feedback/migrations/0006_auto_20201009_2018.py +++ b/breathecode/feedback/migrations/0006_auto_20201009_2018.py @@ -6,27 +6,30 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0005_surveylog'), + ("feedback", "0005_surveylog"), ] operations = [ migrations.AddField( - model_name='answer', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('SENT', 'Send'), ('ANSWERED', 'Answered'), - ('EXPIRED', 'Expired')], - default='PENDING', - max_length=15), + model_name="answer", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("SENT", "Send"), ("ANSWERED", "Answered"), ("EXPIRED", "Expired")], + default="PENDING", + max_length=15, + ), ), migrations.AlterField( - model_name='answer', - name='comment', + model_name="answer", + name="comment", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AlterField( - model_name='answer', - name='score', + model_name="answer", + name="score", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), - migrations.DeleteModel(name='SurveyLog', ), + migrations.DeleteModel( + name="SurveyLog", + ), ] diff --git a/breathecode/feedback/migrations/0007_auto_20201010_0257.py b/breathecode/feedback/migrations/0007_auto_20201010_0257.py index a0fb0361b..a283a4dbb 100644 --- a/breathecode/feedback/migrations/0007_auto_20201010_0257.py +++ b/breathecode/feedback/migrations/0007_auto_20201010_0257.py @@ -6,16 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0006_auto_20201009_2018'), + ("feedback", "0006_auto_20201009_2018"), ] operations = [ migrations.AlterField( - model_name='answer', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('SENT', 'Sent'), ('ANSWERED', 'Answered'), - ('EXPIRED', 'Expired')], - default='PENDING', - max_length=15), + model_name="answer", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("SENT", "Sent"), ("ANSWERED", "Answered"), ("EXPIRED", "Expired")], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/feedback/migrations/0008_cohortproxy.py b/breathecode/feedback/migrations/0008_cohortproxy.py index fe4bec564..dd03df7c0 100644 --- a/breathecode/feedback/migrations/0008_cohortproxy.py +++ b/breathecode/feedback/migrations/0008_cohortproxy.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('feedback', '0007_auto_20201010_0257'), + ("admissions", "0011_auto_20201006_0058"), + ("feedback", "0007_auto_20201010_0257"), ] operations = [ migrations.CreateModel( - name='CohortProxy', + name="CohortProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('admissions.cohort', ), + bases=("admissions.cohort",), ), ] diff --git a/breathecode/feedback/migrations/0009_auto_20201027_0234.py b/breathecode/feedback/migrations/0009_auto_20201027_0234.py index e2141c1f4..c8f00c10e 100644 --- a/breathecode/feedback/migrations/0009_auto_20201027_0234.py +++ b/breathecode/feedback/migrations/0009_auto_20201027_0234.py @@ -6,23 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0008_cohortproxy'), + ("feedback", "0008_cohortproxy"), ] operations = [ migrations.AddField( - model_name='answer', - name='highest', - field=models.CharField(default='very likely', max_length=50), + model_name="answer", + name="highest", + field=models.CharField(default="very likely", max_length=50), ), migrations.AddField( - model_name='answer', - name='lang', - field=models.CharField(blank=True, default='en', max_length=3), + model_name="answer", + name="lang", + field=models.CharField(blank=True, default="en", max_length=3), ), migrations.AddField( - model_name='answer', - name='lowest', - field=models.CharField(default='not likely', max_length=50), + model_name="answer", + name="lowest", + field=models.CharField(default="not likely", max_length=50), ), ] diff --git a/breathecode/feedback/migrations/0010_auto_20201029_0857.py b/breathecode/feedback/migrations/0010_auto_20201029_0857.py index 232ce38a3..106388bbe 100644 --- a/breathecode/feedback/migrations/0010_auto_20201029_0857.py +++ b/breathecode/feedback/migrations/0010_auto_20201029_0857.py @@ -6,21 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0009_auto_20201027_0234'), + ("feedback", "0009_auto_20201027_0234"), ] operations = [ migrations.AddField( - model_name='answer', - name='opened_at', + model_name="answer", + name="opened_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='answer', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('SENT', 'Sent'), ('OPENED', 'Opened'), - ('EXPIRED', 'Expired')], - default='PENDING', - max_length=15), + model_name="answer", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("SENT", "Sent"), ("OPENED", "Opened"), ("EXPIRED", "Expired")], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/feedback/migrations/0011_cohortuserproxy.py b/breathecode/feedback/migrations/0011_cohortuserproxy.py index f9c65d368..5f781abae 100644 --- a/breathecode/feedback/migrations/0011_cohortuserproxy.py +++ b/breathecode/feedback/migrations/0011_cohortuserproxy.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('feedback', '0010_auto_20201029_0857'), + ("admissions", "0011_auto_20201006_0058"), + ("feedback", "0010_auto_20201029_0857"), ] operations = [ migrations.CreateModel( - name='CohortUserProxy', + name="CohortUserProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('admissions.cohortuser', ), + bases=("admissions.cohortuser",), ), ] diff --git a/breathecode/feedback/migrations/0012_answer_token.py b/breathecode/feedback/migrations/0012_answer_token.py index bb48f9dc2..04f47aa56 100644 --- a/breathecode/feedback/migrations/0012_answer_token.py +++ b/breathecode/feedback/migrations/0012_answer_token.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0017_auto_20210113_0644'), - ('feedback', '0011_cohortuserproxy'), + ("authenticate", "0017_auto_20210113_0644"), + ("feedback", "0011_cohortuserproxy"), ] operations = [ migrations.AddField( - model_name='answer', - name='token', - field=models.OneToOneField(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='authenticate.token'), + model_name="answer", + name="token", + field=models.OneToOneField( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="authenticate.token", + ), ), ] diff --git a/breathecode/feedback/migrations/0013_auto_20210127_2336.py b/breathecode/feedback/migrations/0013_auto_20210127_2336.py index 16392ad27..76f1536b4 100644 --- a/breathecode/feedback/migrations/0013_auto_20210127_2336.py +++ b/breathecode/feedback/migrations/0013_auto_20210127_2336.py @@ -8,45 +8,61 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0014_auto_20201218_0534'), - ('feedback', '0012_answer_token'), + ("admissions", "0014_auto_20201218_0534"), + ("feedback", "0012_answer_token"), ] operations = [ migrations.CreateModel( - name='Survey', + name="Survey", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', models.CharField(blank=True, default='en', max_length=3)), - ('avg_score', - models.CharField(blank=True, - default=None, - help_text='The avg from all the answers taken under this survey', - max_length=250, - null=True)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('SENT', 'Sent'), ('OPENED', 'Opened'), - ('EXPIRED', 'Expired')], - default='PENDING', - max_length=15)), - ('duration', - models.DurationField(default=datetime.timedelta(days=1), - help_text='No one will be able to answer after this period of time')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('cohort', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.cohort')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("lang", models.CharField(blank=True, default="en", max_length=3)), + ( + "avg_score", + models.CharField( + blank=True, + default=None, + help_text="The avg from all the answers taken under this survey", + max_length=250, + null=True, + ), + ), + ( + "status", + models.CharField( + choices=[ + ("PENDING", "Pending"), + ("SENT", "Sent"), + ("OPENED", "Opened"), + ("EXPIRED", "Expired"), + ], + default="PENDING", + max_length=15, + ), + ), + ( + "duration", + models.DurationField( + default=datetime.timedelta(days=1), + help_text="No one will be able to answer after this period of time", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("cohort", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort")), ], ), migrations.AddField( - model_name='answer', - name='survey', + model_name="answer", + name="survey", field=models.ForeignKey( blank=True, default=None, - help_text= - 'You can group one or more answers in one survey, the survey does not belong to any student in particular but answers belong to the student that answered', + help_text="You can group one or more answers in one survey, the survey does not belong to any student in particular but answers belong to the student that answered", null=True, on_delete=django.db.models.deletion.SET_NULL, - to='feedback.survey'), + to="feedback.survey", + ), ), ] diff --git a/breathecode/feedback/migrations/0014_auto_20210128_1745.py b/breathecode/feedback/migrations/0014_auto_20210128_1745.py index 471e9bd42..d0bb7854a 100644 --- a/breathecode/feedback/migrations/0014_auto_20210128_1745.py +++ b/breathecode/feedback/migrations/0014_auto_20210128_1745.py @@ -6,36 +6,45 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0013_auto_20210127_2336'), + ("feedback", "0013_auto_20210127_2336"), ] operations = [ migrations.AddField( - model_name='survey', - name='max_assistants_to_ask', + model_name="survey", + name="max_assistants_to_ask", field=models.IntegerField(default=2), ), migrations.AddField( - model_name='survey', - name='max_teachers_to_ask', + model_name="survey", + name="max_teachers_to_ask", field=models.IntegerField(default=1), ), migrations.AlterField( - model_name='answer', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('SENT', 'Sent'), ('ANSWERED', 'Answered'), - ('OPENED', 'Opened'), ('EXPIRED', 'Expired')], - default='PENDING', - max_length=15), + model_name="answer", + name="status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("SENT", "Sent"), + ("ANSWERED", "Answered"), + ("OPENED", "Opened"), + ("EXPIRED", "Expired"), + ], + default="PENDING", + max_length=15, + ), ), migrations.AlterField( - model_name='survey', - name='avg_score', - field=models.CharField(blank=True, - default=None, - editable=False, - help_text='The avg from all the answers taken under this survey', - max_length=250, - null=True), + model_name="survey", + name="avg_score", + field=models.CharField( + blank=True, + default=None, + editable=False, + help_text="The avg from all the answers taken under this survey", + max_length=250, + null=True, + ), ), ] diff --git a/breathecode/feedback/migrations/0015_auto_20210129_2143.py b/breathecode/feedback/migrations/0015_auto_20210129_2143.py index 4caa0aeb5..742ffcfcd 100644 --- a/breathecode/feedback/migrations/0015_auto_20210129_2143.py +++ b/breathecode/feedback/migrations/0015_auto_20210129_2143.py @@ -6,21 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0014_auto_20210128_1745'), + ("feedback", "0014_auto_20210128_1745"), ] operations = [ migrations.AddField( - model_name='survey', - name='status_json', + model_name="survey", + name="status_json", field=models.JSONField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='survey', - name='status', - field=models.CharField(choices=[('SENT', 'Sent'), ('PENDING', 'Pending'), ('PARTIAL', 'Partial'), - ('FATAL', 'FATAL')], - default='PENDING', - max_length=15), + model_name="survey", + name="status", + field=models.CharField( + choices=[("SENT", "Sent"), ("PENDING", "Pending"), ("PARTIAL", "Partial"), ("FATAL", "FATAL")], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/feedback/migrations/0016_auto_20210201_1700.py b/breathecode/feedback/migrations/0016_auto_20210201_1700.py index cd8235f87..69033f094 100644 --- a/breathecode/feedback/migrations/0016_auto_20210201_1700.py +++ b/breathecode/feedback/migrations/0016_auto_20210201_1700.py @@ -6,21 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0015_auto_20210129_2143'), + ("feedback", "0015_auto_20210129_2143"), ] operations = [ migrations.AlterField( - model_name='answer', - name='comment', + model_name="answer", + name="comment", field=models.TextField(blank=True, default=None, max_length=1000, null=True), ), migrations.AlterField( - model_name='survey', - name='status', - field=models.CharField(choices=[('SENT', 'Sent'), ('PENDING', 'Pending'), ('PARTIAL', 'Partial'), - ('FATAL', 'Fatal')], - default='PENDING', - max_length=15), + model_name="survey", + name="status", + field=models.CharField( + choices=[("SENT", "Sent"), ("PENDING", "Pending"), ("PARTIAL", "Partial"), ("FATAL", "Fatal")], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/feedback/migrations/0017_survey_sent_at.py b/breathecode/feedback/migrations/0017_survey_sent_at.py index f82b0b085..bfac6dcd2 100644 --- a/breathecode/feedback/migrations/0017_survey_sent_at.py +++ b/breathecode/feedback/migrations/0017_survey_sent_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0016_auto_20210201_1700'), + ("feedback", "0016_auto_20210201_1700"), ] operations = [ migrations.AddField( - model_name='survey', - name='sent_at', + model_name="survey", + name="sent_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/feedback/migrations/0018_alter_answer_score.py b/breathecode/feedback/migrations/0018_alter_answer_score.py index a41dd286c..0afbafc9b 100644 --- a/breathecode/feedback/migrations/0018_alter_answer_score.py +++ b/breathecode/feedback/migrations/0018_alter_answer_score.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0017_survey_sent_at'), + ("feedback", "0017_survey_sent_at"), ] operations = [ migrations.AlterField( - model_name='answer', - name='score', + model_name="answer", + name="score", field=models.IntegerField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/feedback/migrations/0019_review_reviewplatform.py b/breathecode/feedback/migrations/0019_review_reviewplatform.py index 476401ac2..bedb9b533 100644 --- a/breathecode/feedback/migrations/0019_review_reviewplatform.py +++ b/breathecode/feedback/migrations/0019_review_reviewplatform.py @@ -8,52 +8,62 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0023_auto_20210812_2153'), + ("admissions", "0023_auto_20210812_2153"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('feedback', '0018_alter_answer_score'), + ("feedback", "0018_alter_answer_score"), ] operations = [ migrations.CreateModel( - name='ReviewPlatform', + name="ReviewPlatform", fields=[ - ('slug', models.SlugField(primary_key=True, serialize=False)), - ('name', models.CharField(max_length=100)), - ('website', models.URLField()), - ('review_signup', - models.URLField(blank=True, default=None, help_text='Give URL to create a new review', null=True)), - ('contact_email', models.EmailField(max_length=254)), - ('contact_name', models.EmailField(blank=True, default=None, max_length=254, null=True)), - ('contact_phone', models.CharField(blank=True, default=None, max_length=17, null=True)), + ("slug", models.SlugField(primary_key=True, serialize=False)), + ("name", models.CharField(max_length=100)), + ("website", models.URLField()), + ( + "review_signup", + models.URLField(blank=True, default=None, help_text="Give URL to create a new review", null=True), + ), + ("contact_email", models.EmailField(max_length=254)), + ("contact_name", models.EmailField(blank=True, default=None, max_length=254, null=True)), + ("contact_phone", models.CharField(blank=True, default=None, max_length=17, null=True)), ], ), migrations.CreateModel( - name='Review', + name="Review", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('total_rating', models.FloatField(blank=True, default=None, null=True)), - ('public_url', models.URLField(blank=True, default=None, null=True)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('IGNORE', 'Ignore')], - default='PENDING', - help_text='Deleted reviews hav status=Ignore', - max_length=9)), - ('status_text', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('comments', - models.TextField(blank=True, - default=None, - help_text='Student comments when leaving the review', - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), - ('cohort', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort')), - ('platform', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='feedback.reviewplatform')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("total_rating", models.FloatField(blank=True, default=None, null=True)), + ("public_url", models.URLField(blank=True, default=None, null=True)), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("IGNORE", "Ignore")], + default="PENDING", + help_text="Deleted reviews hav status=Ignore", + max_length=9, + ), + ), + ("status_text", models.CharField(blank=True, default=None, max_length=255, null=True)), + ( + "comments", + models.TextField( + blank=True, default=None, help_text="Student comments when leaving the review", null=True + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("author", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "cohort", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), + ), + ( + "platform", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="feedback.reviewplatform"), + ), ], ), ] diff --git a/breathecode/feedback/migrations/0020_alter_review_status.py b/breathecode/feedback/migrations/0020_alter_review_status.py index 868ebbcb5..487d3ffcd 100644 --- a/breathecode/feedback/migrations/0020_alter_review_status.py +++ b/breathecode/feedback/migrations/0020_alter_review_status.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0019_review_reviewplatform'), + ("feedback", "0019_review_reviewplatform"), ] operations = [ migrations.AlterField( - model_name='review', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('REQUESTED', 'Requested'), ('DONE', 'Done'), - ('IGNORE', 'Ignore')], - default='PENDING', - help_text='Deleted reviews hav status=Ignore', - max_length=9), + model_name="review", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("REQUESTED", "Requested"), ("DONE", "Done"), ("IGNORE", "Ignore")], + default="PENDING", + help_text="Deleted reviews hav status=Ignore", + max_length=9, + ), ), ] diff --git a/breathecode/feedback/migrations/0021_review_nps_previous_rating.py b/breathecode/feedback/migrations/0021_review_nps_previous_rating.py index d9531beaf..4b68c624a 100644 --- a/breathecode/feedback/migrations/0021_review_nps_previous_rating.py +++ b/breathecode/feedback/migrations/0021_review_nps_previous_rating.py @@ -6,16 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0020_alter_review_status'), + ("feedback", "0020_alter_review_status"), ] operations = [ migrations.AddField( - model_name='review', - name='nps_previous_rating', - field=models.FloatField(blank=True, - default=None, - help_text='Automatically calculated based on NPS survay responses', - null=True), + model_name="review", + name="nps_previous_rating", + field=models.FloatField( + blank=True, default=None, help_text="Automatically calculated based on NPS survay responses", null=True + ), ), ] diff --git a/breathecode/feedback/migrations/0022_answer_mentorship_session.py b/breathecode/feedback/migrations/0022_answer_mentorship_session.py index ddf6502d2..185473c79 100644 --- a/breathecode/feedback/migrations/0022_answer_mentorship_session.py +++ b/breathecode/feedback/migrations/0022_answer_mentorship_session.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '__first__'), - ('feedback', '0021_review_nps_previous_rating'), + ("mentorship", "__first__"), + ("feedback", "0021_review_nps_previous_rating"), ] operations = [ migrations.AddField( - model_name='answer', - name='mentorship_session', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='mentorship.mentorshipsession'), + model_name="answer", + name="mentorship_session", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="mentorship.mentorshipsession", + ), ), ] diff --git a/breathecode/feedback/migrations/0023_answer_sent_at.py b/breathecode/feedback/migrations/0023_answer_sent_at.py index 721b74dcd..e7f24a60e 100644 --- a/breathecode/feedback/migrations/0023_answer_sent_at.py +++ b/breathecode/feedback/migrations/0023_answer_sent_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0022_answer_mentorship_session'), + ("feedback", "0022_answer_mentorship_session"), ] operations = [ migrations.AddField( - model_name='answer', - name='sent_at', + model_name="answer", + name="sent_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/feedback/migrations/0024_survey_response_rate.py b/breathecode/feedback/migrations/0024_survey_response_rate.py index 0d5c38cd9..25dc99733 100644 --- a/breathecode/feedback/migrations/0024_survey_response_rate.py +++ b/breathecode/feedback/migrations/0024_survey_response_rate.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0023_answer_sent_at'), + ("feedback", "0023_answer_sent_at"), ] operations = [ migrations.AddField( - model_name='survey', - name='response_rate', + model_name="survey", + name="response_rate", field=models.FloatField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/feedback/migrations/0025_alter_review_nps_previous_rating.py b/breathecode/feedback/migrations/0025_alter_review_nps_previous_rating.py index 598ba8125..3ef348fb7 100644 --- a/breathecode/feedback/migrations/0025_alter_review_nps_previous_rating.py +++ b/breathecode/feedback/migrations/0025_alter_review_nps_previous_rating.py @@ -6,16 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0024_survey_response_rate'), + ("feedback", "0024_survey_response_rate"), ] operations = [ migrations.AlterField( - model_name='review', - name='nps_previous_rating', - field=models.FloatField(blank=True, - default=None, - help_text='Automatically calculated based on NPS survey responses', - null=True), + model_name="review", + name="nps_previous_rating", + field=models.FloatField( + blank=True, default=None, help_text="Automatically calculated based on NPS survey responses", null=True + ), ), ] diff --git a/breathecode/feedback/migrations/0026_auto_20220830_0808.py b/breathecode/feedback/migrations/0026_auto_20220830_0808.py index 02394c07d..9957bd3b7 100644 --- a/breathecode/feedback/migrations/0026_auto_20220830_0808.py +++ b/breathecode/feedback/migrations/0026_auto_20220830_0808.py @@ -6,17 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0025_alter_review_nps_previous_rating'), + ("feedback", "0025_alter_review_nps_previous_rating"), ] operations = [ migrations.RemoveField( - model_name='survey', - name='avg_score', + model_name="survey", + name="avg_score", ), migrations.AddField( - model_name='survey', - name='scores', + model_name="survey", + name="scores", field=models.JSONField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/feedback/migrations/0027_review_is_public.py b/breathecode/feedback/migrations/0027_review_is_public.py index 7e98d7408..1b828d57d 100644 --- a/breathecode/feedback/migrations/0027_review_is_public.py +++ b/breathecode/feedback/migrations/0027_review_is_public.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0026_auto_20220830_0808'), + ("feedback", "0026_auto_20220830_0808"), ] operations = [ migrations.AddField( - model_name='review', - name='is_public', + model_name="review", + name="is_public", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/feedback/migrations/0028_review_lang.py b/breathecode/feedback/migrations/0028_review_lang.py index 1c9d4239a..514d0eff2 100644 --- a/breathecode/feedback/migrations/0028_review_lang.py +++ b/breathecode/feedback/migrations/0028_review_lang.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('feedback', '0027_review_is_public'), + ("feedback", "0027_review_is_public"), ] operations = [ migrations.AddField( - model_name='review', - name='lang', + model_name="review", + name="lang", field=models.CharField(blank=True, max_length=3, null=True), ), ] diff --git a/breathecode/feedback/models.py b/breathecode/feedback/models.py index dab548d5d..0cdd086a5 100644 --- a/breathecode/feedback/models.py +++ b/breathecode/feedback/models.py @@ -9,7 +9,7 @@ from breathecode.events.models import Event from breathecode.mentorship.models import MentorshipSession -__all__ = ['UserProxy', 'CohortUserProxy', 'CohortProxy', 'Survey', 'Answer'] +__all__ = ["UserProxy", "CohortUserProxy", "CohortProxy", "Survey", "Answer"] class UserProxy(User): @@ -30,15 +30,15 @@ class Meta: proxy = True -PENDING = 'PENDING' -SENT = 'SENT' -PARTIAL = 'PARTIAL' -FATAL = 'FATAL' +PENDING = "PENDING" +SENT = "SENT" +PARTIAL = "PARTIAL" +FATAL = "FATAL" SURVEY_STATUS = ( - (SENT, 'Sent'), - (PENDING, 'Pending'), - (PARTIAL, 'Partial'), - (FATAL, 'Fatal'), + (SENT, "Sent"), + (PENDING, "Pending"), + (PARTIAL, "Partial"), + (FATAL, "Fatal"), ) @@ -50,7 +50,7 @@ class Survey(models.Model): 3. How is the blabla.. """ - lang = models.CharField(max_length=3, blank=True, default='en') + lang = models.CharField(max_length=3, blank=True, default="en") cohort = models.ForeignKey(Cohort, on_delete=models.CASCADE) @@ -63,27 +63,28 @@ class Survey(models.Model): status = models.CharField(max_length=15, choices=SURVEY_STATUS, default=PENDING) status_json = models.JSONField(default=None, null=True, blank=True) - duration = models.DurationField(default=datetime.timedelta(hours=24), - help_text='No one will be able to answer after this period of time') + duration = models.DurationField( + default=datetime.timedelta(hours=24), help_text="No one will be able to answer after this period of time" + ) created_at = models.DateTimeField(auto_now_add=True, editable=True) updated_at = models.DateTimeField(auto_now=True, editable=False) sent_at = models.DateTimeField(default=None, null=True, blank=True) def __str__(self): - return 'Survey for ' + self.cohort.name + return "Survey for " + self.cohort.name -PENDING = 'PENDING' -SENT = 'SENT' -ANSWERED = 'ANSWERED' -OPENED = 'OPENED' -EXPIRED = 'EXPIRED' +PENDING = "PENDING" +SENT = "SENT" +ANSWERED = "ANSWERED" +OPENED = "OPENED" +EXPIRED = "EXPIRED" SURVEY_STATUS = ( - (PENDING, 'Pending'), - (SENT, 'Sent'), - (ANSWERED, 'Answered'), - (OPENED, 'Opened'), - (EXPIRED, 'Expired'), + (PENDING, "Pending"), + (SENT, "Sent"), + (ANSWERED, "Answered"), + (OPENED, "Opened"), + (EXPIRED, "Expired"), ) @@ -94,22 +95,17 @@ def __init__(self, *args, **kwargs): self.__old_status = self.status title = models.CharField(max_length=200, blank=True) - lowest = models.CharField(max_length=50, default='not likely') - highest = models.CharField(max_length=50, default='very likely') - lang = models.CharField(max_length=3, blank=True, default='en') + lowest = models.CharField(max_length=50, default="not likely") + highest = models.CharField(max_length=50, default="very likely") + lang = models.CharField(max_length=3, blank=True, default="en") event = models.ForeignKey(Event, on_delete=models.SET_NULL, default=None, blank=True, null=True) - mentorship_session = models.ForeignKey(MentorshipSession, - on_delete=models.SET_NULL, - default=None, - blank=True, - null=True) - mentor = models.ForeignKey(User, - related_name='mentor_set', - on_delete=models.SET_NULL, - default=None, - blank=True, - null=True) + mentorship_session = models.ForeignKey( + MentorshipSession, on_delete=models.SET_NULL, default=None, blank=True, null=True + ) + mentor = models.ForeignKey( + User, related_name="mentor_set", on_delete=models.SET_NULL, default=None, blank=True, null=True + ) cohort = models.ForeignKey(Cohort, on_delete=models.SET_NULL, default=None, blank=True, null=True) academy = models.ForeignKey(Academy, on_delete=models.SET_NULL, default=None, blank=True, null=True) token = models.OneToOneField(Token, on_delete=models.SET_NULL, default=None, blank=True, null=True) @@ -123,8 +119,7 @@ def __init__(self, *args, **kwargs): default=None, blank=True, null=True, - help_text= - 'You can group one or more answers in one survey, the survey does not belong to any student in particular but answers belong to the student that answered' + help_text="You can group one or more answers in one survey, the survey does not belong to any student in particular but answers belong to the student that answered", ) status = models.CharField(max_length=15, choices=SURVEY_STATUS, default=PENDING) @@ -140,7 +135,7 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) # Call the "real" save() method. - if self.__old_status != self.status and self.status == 'ANSWERED': + if self.__old_status != self.status and self.status == "ANSWERED": # signal the updated answer signals.survey_answered.send_robust(instance=self, sender=Answer) @@ -152,48 +147,46 @@ class ReviewPlatform(models.Model): """ Websites like KareerKarma, Switchup, Coursereport, etc. """ + slug = models.SlugField(primary_key=True) name = models.CharField(max_length=100) website = models.URLField() - review_signup = models.URLField(blank=True, null=True, default=None, help_text='Give URL to create a new review') + review_signup = models.URLField(blank=True, null=True, default=None, help_text="Give URL to create a new review") contact_email = models.EmailField() contact_name = models.EmailField(blank=True, null=True, default=None) contact_phone = models.CharField(max_length=17, blank=True, null=True, default=None) def __str__(self): - return f'{self.slug}' + return f"{self.slug}" -PENDING = 'PENDING' -REQUESTED = 'REQUESTED' -DONE = 'DONE' -IGNORE = 'IGNORE' +PENDING = "PENDING" +REQUESTED = "REQUESTED" +DONE = "DONE" +IGNORE = "IGNORE" REVIEW_STATUS = ( - (PENDING, 'Pending'), - (REQUESTED, 'Requested'), - (DONE, 'Done'), - (IGNORE, 'Ignore'), + (PENDING, "Pending"), + (REQUESTED, "Requested"), + (DONE, "Done"), + (IGNORE, "Ignore"), ) class Review(models.Model): - nps_previous_rating = models.FloatField(blank=True, - null=True, - default=None, - help_text='Automatically calculated based on NPS survey responses') + nps_previous_rating = models.FloatField( + blank=True, null=True, default=None, help_text="Automatically calculated based on NPS survey responses" + ) total_rating = models.FloatField(blank=True, null=True, default=None) public_url = models.URLField(blank=True, null=True, default=None) - status = models.CharField(max_length=9, - choices=REVIEW_STATUS, - default=PENDING, - help_text='Deleted reviews hav status=Ignore') + status = models.CharField( + max_length=9, choices=REVIEW_STATUS, default=PENDING, help_text="Deleted reviews hav status=Ignore" + ) status_text = models.CharField(max_length=255, default=None, null=True, blank=True) - comments = models.TextField(default=None, - null=True, - blank=True, - help_text='Student comments when leaving the review') + comments = models.TextField( + default=None, null=True, blank=True, help_text="Student comments when leaving the review" + ) cohort = models.ForeignKey(Cohort, on_delete=models.CASCADE, null=True, blank=True) author = models.ForeignKey(User, on_delete=models.CASCADE) @@ -205,7 +198,7 @@ class Review(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - cohort = 'no specific cohort' + cohort = "no specific cohort" if self.cohort is not None: cohort = self.cohort.slug - return f'{self.author.first_name} {self.author.last_name} for {cohort}' + return f"{self.author.first_name} {self.author.last_name} for {cohort}" diff --git a/breathecode/feedback/receivers.py b/breathecode/feedback/receivers.py index 5668d40b0..d0195f0b4 100644 --- a/breathecode/feedback/receivers.py +++ b/breathecode/feedback/receivers.py @@ -22,24 +22,24 @@ def answer_received(sender, instance, **kwargs): Update survey avg score when new answers are received also notify bad nps score. """ - logger.debug('Answer received, calling task process_answer_received') + logger.debug("Answer received, calling task process_answer_received") process_answer_received.delay(instance.id) @receiver(student_edu_status_updated, sender=CohortUser) def post_save_cohort_user(sender, instance, **kwargs): - if instance.educational_status == 'GRADUATED': - logger.debug('Procesing student graduation') + if instance.educational_status == "GRADUATED": + logger.debug("Procesing student graduation") process_student_graduation.delay(instance.cohort.id, instance.user.id) @receiver(mentorship_session_status, sender=MentorshipSession) def post_mentorin_session_ended(sender: Type[MentorshipSession], instance: MentorshipSession, **kwargs): - if instance.status == 'COMPLETED': + if instance.status == "COMPLETED": duration = timedelta(seconds=0) if instance.started_at is not None and instance.ended_at is not None: duration = instance.ended_at - instance.started_at if duration > timedelta(minutes=5) and instance.mentor and instance.mentee: - logger.debug(f'Session lasted for {str(duration.seconds/60)} minutes, sending survey') + logger.debug(f"Session lasted for {str(duration.seconds/60)} minutes, sending survey") send_mentorship_session_survey.delay(instance.id) diff --git a/breathecode/feedback/serializers.py b/breathecode/feedback/serializers.py index 443f0bab0..812f89a53 100644 --- a/breathecode/feedback/serializers.py +++ b/breathecode/feedback/serializers.py @@ -25,6 +25,7 @@ class GetCohortSerializer(serpy.Serializer): class GetProfileSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() @@ -36,7 +37,7 @@ class UserSerializer(serpy.Serializer): profile = serpy.MethodField() def get_profile(self, obj): - if not hasattr(obj, 'profile'): + if not hasattr(obj, "profile"): return None return GetProfileSmallSerializer(obj.profile).data @@ -44,6 +45,7 @@ def get_profile(self, obj): class GithubSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() name = serpy.Field() @@ -52,6 +54,7 @@ class GithubSmallSerializer(serpy.Serializer): class UserSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() email = serpy.Field() @@ -60,7 +63,7 @@ class UserSmallSerializer(serpy.Serializer): github = serpy.MethodField() def get_github(self, obj): - if not hasattr(obj, 'credentialsgithub'): + if not hasattr(obj, "credentialsgithub"): return None return GithubSmallSerializer(obj.credentialsgithub).data @@ -106,7 +109,7 @@ class SurveySmallSerializer(serpy.Serializer): public_url = serpy.MethodField() def get_public_url(self, obj): - return 'https://nps.4geeks.com/survey/' + str(obj.id) + return "https://nps.4geeks.com/survey/" + str(obj.id) class BigAnswerSerializer(serpy.Serializer): @@ -155,30 +158,30 @@ class AnswerPUTSerializer(serializers.ModelSerializer): class Meta: model = Answer - exclude = ('token', ) + exclude = ("token",) def validate(self, data): # the user cannot vote to the same entity within 5 minutes - answer = Answer.objects.filter(user=self.context['request'].user, id=self.context['answer']).first() + answer = Answer.objects.filter(user=self.context["request"].user, id=self.context["answer"]).first() if answer is None: - raise ValidationError('This survey does not exist for this user') + raise ValidationError("This survey does not exist for this user") - if not 'score' in data or int(data['score']) > 10 or int(data['score']) < 1: - raise ValidationError('Score must be between 1 and 10') + if not "score" in data or int(data["score"]) > 10 or int(data["score"]) < 1: + raise ValidationError("Score must be between 1 and 10") - if answer.status == 'ANSWERED' and data['score'] != answer.score: - raise ValidationError(f'You have already answered {answer.score}, you must keep the same score') + if answer.status == "ANSWERED" and data["score"] != answer.score: + raise ValidationError(f"You have already answered {answer.score}, you must keep the same score") return data def update(self, instance, validated_data): - instance.score = validated_data['score'] - instance.status = 'ANSWERED' + instance.score = validated_data["score"] + instance.status = "ANSWERED" # instance.token = None - if 'comment' in validated_data: - instance.comment = validated_data['comment'] + if "comment" in validated_data: + instance.comment = validated_data["comment"] instance.save() @@ -191,44 +194,48 @@ class SurveySerializer(serializers.ModelSerializer): public_url = serializers.SerializerMethodField() def get_public_url(self, obj): - return 'https://nps.4geeks.com/survey/' + str(obj.id) + return "https://nps.4geeks.com/survey/" + str(obj.id) class Meta: model = Survey - exclude = ('scores', 'status_json', 'response_rate') + exclude = ("scores", "status_json", "response_rate") def validate(self, data): - if data['cohort'].academy.id != int(self.context['academy_id']): - raise ValidationException(f'You don\'t have rights for this cohort academy {self.context["academy_id"]}.', - code=400, - slug='cohort-academy-needs-rights') + if data["cohort"].academy.id != int(self.context["academy_id"]): + raise ValidationException( + f'You don\'t have rights for this cohort academy {self.context["academy_id"]}.', + code=400, + slug="cohort-academy-needs-rights", + ) - if 'duration' in data and data['duration'] < timezone.timedelta(hours=1): - raise ValidationException('Minimum duration for surveys is one hour.', - code=400, - slug='minimum-survey-duration-1h') + if "duration" in data and data["duration"] < timezone.timedelta(hours=1): + raise ValidationException( + "Minimum duration for surveys is one hour.", code=400, slug="minimum-survey-duration-1h" + ) - cohort_teacher = CohortUser.objects.filter(cohort=data['cohort'], role='TEACHER') + cohort_teacher = CohortUser.objects.filter(cohort=data["cohort"], role="TEACHER") if cohort_teacher.count() == 0: - raise ValidationException('This cohort must have a teacher assigned to be able to survey it', - code=400, - slug='cohort-needs-teacher-assigned') + raise ValidationException( + "This cohort must have a teacher assigned to be able to survey it", + code=400, + slug="cohort-needs-teacher-assigned", + ) return data def create(self, validated_data): send_now = False - if 'send_now' in validated_data: - if validated_data['send_now']: + if "send_now" in validated_data: + if validated_data["send_now"]: send_now = True - del validated_data['send_now'] + del validated_data["send_now"] - cohort = validated_data['cohort'] + cohort = validated_data["cohort"] - if 'lang' not in validated_data: - validated_data['lang'] = cohort.language.lower() + if "lang" not in validated_data: + validated_data["lang"] = cohort.language.lower() result = super().create(validated_data) @@ -244,28 +251,28 @@ class SurveyPUTSerializer(serializers.ModelSerializer): class Meta: model = Survey - exclude = ('scores', 'status_json', 'status', 'response_rate') + exclude = ("scores", "status_json", "status", "response_rate") def validate(self, data): - if self.instance.status != 'PENDING': - raise ValidationException('This survey was already send, therefore it cannot be updated') + if self.instance.status != "PENDING": + raise ValidationException("This survey was already send, therefore it cannot be updated") - if 'cohort' in data: - raise ValidationException('The cohort cannot be updated in a survey, please create a new survey instead.') + if "cohort" in data: + raise ValidationException("The cohort cannot be updated in a survey, please create a new survey instead.") - if self.instance.cohort.academy.id != int(self.context['academy_id']): - raise ValidationException('You don\'t have rights for this cohort academy') + if self.instance.cohort.academy.id != int(self.context["academy_id"]): + raise ValidationException("You don't have rights for this cohort academy") return data def update(self, instance, validated_data): send_now = False - if 'send_now' in validated_data: - if validated_data['send_now']: + if "send_now" in validated_data: + if validated_data["send_now"]: send_now = True - del validated_data['send_now'] + del validated_data["send_now"] result = super().update(instance, validated_data) @@ -279,21 +286,21 @@ class ReviewPUTSerializer(serializers.ModelSerializer): class Meta: model = Review - exclude = ('created_at', 'updated_at', 'author', 'platform', 'nps_previous_rating') + exclude = ("created_at", "updated_at", "author", "platform", "nps_previous_rating") def validate(self, data): - if 'cohort' in data: - raise ValidationException('The cohort cannot be updated in a review, please create a new review instead.') + if "cohort" in data: + raise ValidationException("The cohort cannot be updated in a review, please create a new review instead.") - if 'author' in data: - raise ValidationException('The author cannot be updated in a review, please create a new review instead.') + if "author" in data: + raise ValidationException("The author cannot be updated in a review, please create a new review instead.") - if 'platform' in data: - raise ValidationException('The platform cannot be updated in a review, please create a new review instead.') + if "platform" in data: + raise ValidationException("The platform cannot be updated in a review, please create a new review instead.") - if self.instance.cohort.academy.id != int(self.context['academy_id']): - raise ValidationException('You don\'t have rights for this cohort academy') + if self.instance.cohort.academy.id != int(self.context["academy_id"]): + raise ValidationException("You don't have rights for this cohort academy") return data diff --git a/breathecode/feedback/signals.py b/breathecode/feedback/signals.py index d03e629c9..98272cfca 100644 --- a/breathecode/feedback/signals.py +++ b/breathecode/feedback/signals.py @@ -2,6 +2,7 @@ For each signal you want other apps to be able to receive, you have to declare a new variable here like this: """ + from django import dispatch # when a student answers one particular questions of a survey diff --git a/breathecode/feedback/supervisors.py b/breathecode/feedback/supervisors.py index e6c2120a2..725b5204c 100644 --- a/breathecode/feedback/supervisors.py +++ b/breathecode/feedback/supervisors.py @@ -14,20 +14,24 @@ @supervisor(delta=timedelta(days=1)) def supervise_mentorship_survey(): utc_now = timezone.now() - sessions = MentorshipSession.objects.filter(status='COMPLETED', - started_at__isnull=False, - ended_at__isnull=False, - mentor__isnull=False, - mentee__isnull=False, - created_at__lte=utc_now, - created_at__gte=utc_now - timedelta(days=5)) + sessions = MentorshipSession.objects.filter( + status="COMPLETED", + started_at__isnull=False, + ended_at__isnull=False, + mentor__isnull=False, + mentee__isnull=False, + created_at__lte=utc_now, + created_at__gte=utc_now - timedelta(days=5), + ) for session in sessions: duration = session.ended_at - session.started_at - if duration > timedelta(minutes=5) and Answer.objects.filter( - mentorship_session__id=session.id).exists() is False: - yield f'Session {session.id} hasn\'t a survey', 'no-survey-for-session', {'session_id': session.id} + if ( + duration > timedelta(minutes=5) + and Answer.objects.filter(mentorship_session__id=session.id).exists() is False + ): + yield f"Session {session.id} hasn't a survey", "no-survey-for-session", {"session_id": session.id} @issue(supervise_mentorship_survey, delta=timedelta(minutes=10)) diff --git a/breathecode/feedback/tasks.py b/breathecode/feedback/tasks.py index 46ce9174e..ac8711efb 100644 --- a/breathecode/feedback/tasks.py +++ b/breathecode/feedback/tasks.py @@ -20,70 +20,76 @@ # Get an instance of a logger logger = getLogger(__name__) -ADMIN_URL = os.getenv('ADMIN_URL', '') -API_URL = os.getenv('API_URL', '') -ENV = os.getenv('ENV', '') +ADMIN_URL = os.getenv("ADMIN_URL", "") +API_URL = os.getenv("API_URL", "") +ENV = os.getenv("ENV", "") def build_question(answer): lang = answer.lang.lower() - question = {'title': '', 'lowest': '', 'highest': ''} + question = {"title": "", "lowest": "", "highest": ""} if answer.mentorship_session is not None: - question['title'] = strings[lang]['session']['title'].format( - f'{answer.mentorship_session.mentor.user.first_name} {answer.mentorship_session.mentor.user.last_name}') - question['lowest'] = strings[lang]['session']['lowest'] - question['highest'] = strings[lang]['session']['highest'] + question["title"] = strings[lang]["session"]["title"].format( + f"{answer.mentorship_session.mentor.user.first_name} {answer.mentorship_session.mentor.user.last_name}" + ) + question["lowest"] = strings[lang]["session"]["lowest"] + question["highest"] = strings[lang]["session"]["highest"] elif answer.event is not None: - question['title'] = strings[lang]['event']['title'] - question['lowest'] = strings[lang]['event']['lowest'] - question['highest'] = strings[lang]['event']['highest'] + question["title"] = strings[lang]["event"]["title"] + question["lowest"] = strings[lang]["event"]["lowest"] + question["highest"] = strings[lang]["event"]["highest"] elif answer.mentor is not None: - question['title'] = strings[lang]['mentor']['title'].format(answer.mentor.first_name + ' ' + - answer.mentor.last_name) - question['lowest'] = strings[lang]['mentor']['lowest'] - question['highest'] = strings[lang]['mentor']['highest'] + question["title"] = strings[lang]["mentor"]["title"].format( + answer.mentor.first_name + " " + answer.mentor.last_name + ) + question["lowest"] = strings[lang]["mentor"]["lowest"] + question["highest"] = strings[lang]["mentor"]["highest"] elif answer.cohort is not None: - title = answer.cohort.syllabus_version.syllabus.name if answer.cohort.syllabus_version \ - and answer.cohort.syllabus_version.syllabus.name else answer.cohort.name - - question['title'] = strings[lang]['cohort']['title'].format(title) - question['lowest'] = strings[lang]['cohort']['lowest'] - question['highest'] = strings[lang]['cohort']['highest'] + title = ( + answer.cohort.syllabus_version.syllabus.name + if answer.cohort.syllabus_version and answer.cohort.syllabus_version.syllabus.name + else answer.cohort.name + ) + + question["title"] = strings[lang]["cohort"]["title"].format(title) + question["lowest"] = strings[lang]["cohort"]["lowest"] + question["highest"] = strings[lang]["cohort"]["highest"] elif answer.academy is not None: - question['title'] = strings[lang]['academy']['title'].format(answer.academy.name) - question['lowest'] = strings[lang]['academy']['lowest'] - question['highest'] = strings[lang]['academy']['highest'] + question["title"] = strings[lang]["academy"]["title"].format(answer.academy.name) + question["lowest"] = strings[lang]["academy"]["lowest"] + question["highest"] = strings[lang]["academy"]["highest"] return question def get_system_email(): - system_email = os.getenv('SYSTEM_EMAIL') + system_email = os.getenv("SYSTEM_EMAIL") return system_email def get_admin_url(): - admin_url = os.getenv('ADMIN_URL') + admin_url = os.getenv("ADMIN_URL") return admin_url -def generate_user_cohort_survey_answers(user, survey, status='OPENED'): +def generate_user_cohort_survey_answers(user, survey, status="OPENED"): if not CohortUser.objects.filter( - cohort=survey.cohort, role='STUDENT', user=user, educational_status__in=['ACTIVE', 'GRADUATED']).exists(): - raise ValidationException('This student does not belong to this cohort', 400) + cohort=survey.cohort, role="STUDENT", user=user, educational_status__in=["ACTIVE", "GRADUATED"] + ).exists(): + raise ValidationException("This student does not belong to this cohort", 400) - cohort_teacher = CohortUser.objects.filter(cohort=survey.cohort, - role='TEACHER', - educational_status__in=['ACTIVE', 'GRADUATED']) + cohort_teacher = CohortUser.objects.filter( + cohort=survey.cohort, role="TEACHER", educational_status__in=["ACTIVE", "GRADUATED"] + ) if cohort_teacher.count() == 0: - raise ValidationException('This cohort must have a teacher assigned to be able to survey it', 400) + raise ValidationException("This cohort must have a teacher assigned to be able to survey it", 400) def new_answer(answer: Answer): question = build_question(answer) - answer.title = question['title'] - answer.lowest = question['lowest'] - answer.highest = question['highest'] + answer.title = question["title"] + answer.lowest = question["lowest"] + answer.highest = question["highest"] answer.user = user answer.status = status answer.survey = survey @@ -109,9 +115,9 @@ def new_answer(answer: Answer): count = count + 1 # ask for the first TA - cohort_assistant = CohortUser.objects.filter(cohort=survey.cohort, - role='ASSISTANT', - educational_status__in=['ACTIVE', 'GRADUATED']) + cohort_assistant = CohortUser.objects.filter( + cohort=survey.cohort, role="ASSISTANT", educational_status__in=["ACTIVE", "GRADUATED"] + ) count = 0 for ca in cohort_assistant: if count >= survey.max_assistants_to_ask: @@ -128,87 +134,84 @@ def new_answer(answer: Answer): def api_url(): - return os.getenv('API_URL', '') + return os.getenv("API_URL", "") @task(bind=False, priority=TaskPriority.NOTIFICATION.value) def send_cohort_survey(user_id, survey_id, **_): - logger.info('Starting send_cohort_survey') + logger.info("Starting send_cohort_survey") survey = Survey.objects.filter(id=survey_id).first() if survey is None: - raise RetryTask('Survey not found') + raise RetryTask("Survey not found") user = User.objects.filter(id=user_id).first() if user is None: - raise AbortTask('User not found') + raise AbortTask("User not found") utc_now = timezone.now() if utc_now > survey.created_at + survey.duration: - raise AbortTask('This survey has already expired') + raise AbortTask("This survey has already expired") - cu = CohortUser.objects.filter(cohort=survey.cohort, - role='STUDENT', - user=user, - educational_status__in=['ACTIVE', 'GRADUATED']).first() + cu = CohortUser.objects.filter( + cohort=survey.cohort, role="STUDENT", user=user, educational_status__in=["ACTIVE", "GRADUATED"] + ).first() if cu is None: - raise AbortTask('This student does not belong to this cohort') + raise AbortTask("This student does not belong to this cohort") try: - generate_user_cohort_survey_answers(user, survey, status='SENT') + generate_user_cohort_survey_answers(user, survey, status="SENT") except Exception as e: raise AbortTask(str(e)) - has_slackuser = hasattr(user, 'slackuser') + has_slackuser = hasattr(user, "slackuser") if not user.email and not has_slackuser: - message = f'Author not have email and slack, this survey cannot be send by {str(user.id)}' + message = f"Author not have email and slack, this survey cannot be send by {str(user.id)}" raise AbortTask(message) - token, created = Token.get_or_create(user, token_type='temporal', hours_length=48) + token, created = Token.get_or_create(user, token_type="temporal", hours_length=48) data = { - 'SUBJECT': strings[survey.lang]['survey_subject'], - 'MESSAGE': strings[survey.lang]['survey_message'], - 'TRACKER_URL': f'{api_url()}/v1/feedback/survey/{survey_id}/tracker.png', - 'BUTTON': strings[survey.lang]['button_label'], - 'LINK': f'https://nps.4geeks.com/survey/{survey_id}?token={token.key}', + "SUBJECT": strings[survey.lang]["survey_subject"], + "MESSAGE": strings[survey.lang]["survey_message"], + "TRACKER_URL": f"{api_url()}/v1/feedback/survey/{survey_id}/tracker.png", + "BUTTON": strings[survey.lang]["button_label"], + "LINK": f"https://nps.4geeks.com/survey/{survey_id}?token={token.key}", } if user.email: - notify_actions.send_email_message('nps_survey', user.email, data, academy=survey.cohort.academy) + notify_actions.send_email_message("nps_survey", user.email, data, academy=survey.cohort.academy) - if hasattr(user, 'slackuser') and hasattr(survey.cohort.academy, 'slackteam'): - notify_actions.send_slack('nps_survey', - user.slackuser, - survey.cohort.academy.slackteam, - data=data, - academy=survey.cohort.academy) + if hasattr(user, "slackuser") and hasattr(survey.cohort.academy, "slackteam"): + notify_actions.send_slack( + "nps_survey", user.slackuser, survey.cohort.academy.slackteam, data=data, academy=survey.cohort.academy + ) @task(bind=False, priority=TaskPriority.ACADEMY.value) def process_student_graduation(cohort_id, user_id, **_): from .actions import create_user_graduation_reviews - logger.debug('Starting process_student_graduation') + logger.debug("Starting process_student_graduation") cohort = Cohort.objects.filter(id=cohort_id).first() if cohort is None: - raise AbortTask(f'Invalid cohort id: {cohort_id}') + raise AbortTask(f"Invalid cohort id: {cohort_id}") user = User.objects.filter(id=user_id).first() if user is None: - raise AbortTask(f'Invalid user id: {user_id}') + raise AbortTask(f"Invalid user id: {user_id}") create_user_graduation_reviews(user, cohort) @task(bind=False, priority=TaskPriority.ACADEMY.value) def recalculate_survey_scores(survey_id, **_): - logger.info('Starting recalculate_survey_score') + logger.info("Starting recalculate_survey_score") survey = Survey.objects.filter(id=survey_id).first() if survey is None: - raise RetryTask('Survey not found') + raise RetryTask("Survey not found") survey.response_rate = actions.calculate_survey_response_rate(survey.id) survey.scores = actions.calculate_survey_scores(survey.id) @@ -223,13 +226,13 @@ def process_answer_received(answer_id, **_): the school. """ - logger.debug('Starting notify_bad_nps_score') + logger.debug("Starting notify_bad_nps_score") answer = Answer.objects.filter(id=answer_id).first() if answer is None: - raise RetryTask('Answer not found') + raise RetryTask("Answer not found") if answer.survey is None: - raise AbortTask('No survey connected to answer.') + raise AbortTask("No survey connected to answer.") answer.survey.response_rate = actions.calculate_survey_response_rate(answer.survey.id) answer.survey.scores = actions.calculate_survey_scores(answer.survey.id) @@ -247,77 +250,78 @@ def process_answer_received(answer_id, **_): list_of_emails.append(answer.academy.feedback_email) if len(list_of_emails) == 0: - raise AbortTask('No email found.') + raise AbortTask("No email found.") # TODO: instead of sending, use notifications system to be built on the breathecode.admin app. if list_of_emails: notify_actions.send_email_message( - 'negative_answer', + "negative_answer", list_of_emails, data={ - 'SUBJECT': f'A student answered with a bad NPS score at {answer.academy.name}', - 'FULL_NAME': answer.user.first_name + ' ' + answer.user.last_name, - 'QUESTION': answer.title, - 'SCORE': answer.score, - 'COMMENTS': answer.comment, - 'ACADEMY': answer.academy.name, - 'LINK': f'{admin_url}/feedback/surveys/{answer.academy.slug}/{answer.survey.id}', + "SUBJECT": f"A student answered with a bad NPS score at {answer.academy.name}", + "FULL_NAME": answer.user.first_name + " " + answer.user.last_name, + "QUESTION": answer.title, + "SCORE": answer.score, + "COMMENTS": answer.comment, + "ACADEMY": answer.academy.name, + "LINK": f"{admin_url}/feedback/surveys/{answer.academy.slug}/{answer.survey.id}", }, - academy=answer.academy) + academy=answer.academy, + ) return True @task(bind=False, priority=TaskPriority.NOTIFICATION.value) def send_mentorship_session_survey(session_id, **_): - logger.info('Starting send_mentorship_session_survey') + logger.info("Starting send_mentorship_session_survey") session = MentorshipSession.objects.filter(id=session_id).first() if session is None: - raise RetryTask('Mentoring session doesn\'t found') + raise RetryTask("Mentoring session doesn't found") if session.mentee is None: - raise AbortTask('This session doesn\'t have a mentee') + raise AbortTask("This session doesn't have a mentee") if not session.started_at or not session.ended_at: - raise AbortTask('This session hasn\'t finished') + raise AbortTask("This session hasn't finished") if session.ended_at - session.started_at <= timedelta(minutes=5): - raise AbortTask('Mentorship session duration is less or equal than five minutes') + raise AbortTask("Mentorship session duration is less or equal than five minutes") if not session.service: - raise AbortTask('Mentorship session doesn\'t have a service associated with it') + raise AbortTask("Mentorship session doesn't have a service associated with it") answer = Answer.objects.filter(mentorship_session__id=session.id).first() if answer is None: answer = Answer(mentorship_session=session, academy=session.mentor.academy, lang=session.service.language) question = build_question(answer) - answer.title = question['title'] - answer.lowest = question['lowest'] - answer.highest = question['highest'] + answer.title = question["title"] + answer.lowest = question["lowest"] + answer.highest = question["highest"] answer.user = session.mentee - answer.status = 'SENT' + answer.status = "SENT" answer.save() - elif answer.status == 'ANSWERED': - raise AbortTask(f'This survey about MentorshipSession {session.id} was answered') + elif answer.status == "ANSWERED": + raise AbortTask(f"This survey about MentorshipSession {session.id} was answered") if not session.mentee.email: - message = f'Author not have email, this survey cannot be send by {session.mentee.id}' + message = f"Author not have email, this survey cannot be send by {session.mentee.id}" raise AbortTask(message) - token, _ = Token.get_or_create(session.mentee, token_type='temporal', hours_length=48) + token, _ = Token.get_or_create(session.mentee, token_type="temporal", hours_length=48) # lazyload api url in test environment - api_url = API_URL if ENV != 'test' else os.getenv('API_URL', '') + api_url = API_URL if ENV != "test" else os.getenv("API_URL", "") data = { - 'SUBJECT': strings[answer.lang.lower()]['survey_subject'], - 'MESSAGE': answer.title, - 'TRACKER_URL': f'{api_url}/v1/feedback/answer/{answer.id}/tracker.png', - 'BUTTON': strings[answer.lang.lower()]['button_label'], - 'LINK': f'https://nps.4geeks.com/{answer.id}?token={token.key}', + "SUBJECT": strings[answer.lang.lower()]["survey_subject"], + "MESSAGE": answer.title, + "TRACKER_URL": f"{api_url}/v1/feedback/answer/{answer.id}/tracker.png", + "BUTTON": strings[answer.lang.lower()]["button_label"], + "LINK": f"https://nps.4geeks.com/{answer.id}?token={token.key}", } if session.mentee.email: - if notify_actions.send_email_message('nps_survey', session.mentee.email, data, academy=session.mentor.academy): + if notify_actions.send_email_message("nps_survey", session.mentee.email, data, academy=session.mentor.academy): answer.sent_at = timezone.now() answer.save() diff --git a/breathecode/feedback/tests/actions/tests_calculate_survey_scores.py b/breathecode/feedback/tests/actions/tests_calculate_survey_scores.py index 51ff6ea26..79de886d6 100644 --- a/breathecode/feedback/tests/actions/tests_calculate_survey_scores.py +++ b/breathecode/feedback/tests/actions/tests_calculate_survey_scores.py @@ -1,6 +1,7 @@ """ Test /academy/survey """ + import random from unittest.mock import MagicMock, patch @@ -15,69 +16,71 @@ class SurveyTestSuite(FeedbackTestCase): """Test /academy/survey""" + """ 🔽🔽🔽 GET without Survey """ - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test__without_survey(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_survey', role=1) + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_survey", role=1) - with self.assertRaisesMessage(ValidationException, 'not-found'): + with self.assertRaisesMessage(ValidationException, "not-found"): calculate_survey_scores(1) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) """ 🔽🔽🔽 GET with one Survey """ - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test__with_survey(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_survey', - role=1, - survey=1) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_survey", role=1, survey=1 + ) json = calculate_survey_scores(1) - expected = {'academy': None, 'cohort': None, 'mentors': [], 'total': None} + expected = {"academy": None, "cohort": None, "mentors": [], "total": None} self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [ - self.bc.format.to_dict(model.survey), - ]) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + self.bc.format.to_dict(model.survey), + ], + ) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) """ 🔽🔽🔽 GET with one Survey and many Answer with bad statuses """ - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test__with_survey__answers_with_bad_statuses(self): self.headers(academy=1) - statuses = ['PENDING', 'SENT', 'OPENED', 'EXPIRED'] - answers = [{'status': s} for s in statuses] - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_survey', - role=1, - survey=1, - answer=answers) + statuses = ["PENDING", "SENT", "OPENED", "EXPIRED"] + answers = [{"status": s} for s in statuses] + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_survey", role=1, survey=1, answer=answers + ) json = calculate_survey_scores(1) - expected = {'academy': None, 'cohort': None, 'mentors': [], 'total': None} + expected = {"academy": None, "cohort": None, "mentors": [], "total": None} self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [ - self.bc.format.to_dict(model.survey), - ]) self.assertEqual( - self.bc.database.list_of('feedback.Answer'), + self.bc.database.list_of("feedback.Survey"), + [ + self.bc.format.to_dict(model.survey), + ], + ) + self.assertEqual( + self.bc.database.list_of("feedback.Answer"), self.bc.format.to_dict(model.answer), ) @@ -85,27 +88,27 @@ def test__with_survey__answers_with_bad_statuses(self): 🔽🔽🔽 GET with one Survey and many Answer with right status, score not set """ - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test__with_survey__answers_with_right_status__score_not_set(self): self.headers(academy=1) - answers = [{'status': 'ANSWERED'} for _ in range(0, 2)] - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_survey', - role=1, - survey=1, - answer=answers) + answers = [{"status": "ANSWERED"} for _ in range(0, 2)] + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_survey", role=1, survey=1, answer=answers + ) json = calculate_survey_scores(1) - expected = {'academy': None, 'cohort': None, 'mentors': [], 'total': None} + expected = {"academy": None, "cohort": None, "mentors": [], "total": None} self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [ - self.bc.format.to_dict(model.survey), - ]) self.assertEqual( - self.bc.database.list_of('feedback.Answer'), + self.bc.database.list_of("feedback.Survey"), + [ + self.bc.format.to_dict(model.survey), + ], + ) + self.assertEqual( + self.bc.database.list_of("feedback.Answer"), self.bc.format.to_dict(model.answer), ) @@ -113,7 +116,7 @@ def test__with_survey__answers_with_right_status__score_not_set(self): 🔽🔽🔽 GET with one Survey and many Answer with right status, score set """ - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test__with_survey__answers_with_right_status__score_set(self): self.headers(academy=1) @@ -121,71 +124,81 @@ def test__with_survey__answers_with_right_status__score_set(self): size_of_cohort_answers = random.randint(2, 5) size_of_mentor1_answers = random.randint(2, 5) size_of_mentor2_answers = random.randint(2, 5) - size_of_answers = (size_of_academy_answers + size_of_cohort_answers + size_of_mentor1_answers + - size_of_mentor2_answers) - - academy_answers = [{ - 'status': 'ANSWERED', - 'score': random.randint(1, 11), - 'title': strings['en']['academy']['title'].format('asd'), - } for _ in range(0, size_of_academy_answers)] - - cohort_answers = [{ - 'status': 'ANSWERED', - 'score': random.randint(1, 11), - 'title': strings['en']['cohort']['title'].format('asd'), - } for _ in range(0, size_of_cohort_answers)] - - mentor1_answers = [{ - 'status': 'ANSWERED', - 'score': random.randint(1, 11), - 'title': strings['en']['mentor']['title'].format('asd1'), - } for _ in range(0, size_of_mentor1_answers)] - - mentor2_answers = [{ - 'status': 'ANSWERED', - 'score': random.randint(1, 11), - 'title': strings['en']['mentor']['title'].format('asd2'), - } for _ in range(0, size_of_mentor2_answers)] + size_of_answers = ( + size_of_academy_answers + size_of_cohort_answers + size_of_mentor1_answers + size_of_mentor2_answers + ) + + academy_answers = [ + { + "status": "ANSWERED", + "score": random.randint(1, 11), + "title": strings["en"]["academy"]["title"].format("asd"), + } + for _ in range(0, size_of_academy_answers) + ] + + cohort_answers = [ + { + "status": "ANSWERED", + "score": random.randint(1, 11), + "title": strings["en"]["cohort"]["title"].format("asd"), + } + for _ in range(0, size_of_cohort_answers) + ] + + mentor1_answers = [ + { + "status": "ANSWERED", + "score": random.randint(1, 11), + "title": strings["en"]["mentor"]["title"].format("asd1"), + } + for _ in range(0, size_of_mentor1_answers) + ] + + mentor2_answers = [ + { + "status": "ANSWERED", + "score": random.randint(1, 11), + "title": strings["en"]["mentor"]["title"].format("asd2"), + } + for _ in range(0, size_of_mentor2_answers) + ] answers = academy_answers + cohort_answers + mentor1_answers + mentor2_answers - survey = {'response_rate': random.randint(1, 101)} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_survey', - role=1, - survey=survey, - answer=answers) + survey = {"response_rate": random.randint(1, 101)} + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_survey", role=1, survey=survey, answer=answers + ) json = calculate_survey_scores(1) expected = { - 'academy': - sum([x['score'] for x in academy_answers]) / size_of_academy_answers, - 'cohort': - sum([x['score'] for x in cohort_answers]) / size_of_cohort_answers, - 'mentors': [ + "academy": sum([x["score"] for x in academy_answers]) / size_of_academy_answers, + "cohort": sum([x["score"] for x in cohort_answers]) / size_of_cohort_answers, + "mentors": [ { - 'name': 'asd1', - 'score': sum([x['score'] for x in mentor1_answers]) / size_of_mentor1_answers, + "name": "asd1", + "score": sum([x["score"] for x in mentor1_answers]) / size_of_mentor1_answers, }, { - 'name': 'asd2', - 'score': sum([x['score'] for x in mentor2_answers]) / size_of_mentor2_answers, + "name": "asd2", + "score": sum([x["score"] for x in mentor2_answers]) / size_of_mentor2_answers, }, ], - 'total': - sum([x.score for x in model.answer]) / size_of_answers, + "total": sum([x.score for x in model.answer]) / size_of_answers, } self.assertEqual(json, expected) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [ - { - **self.bc.format.to_dict(model.survey), - 'response_rate': model.survey.response_rate, - }, - ]) self.assertEqual( - self.bc.database.list_of('feedback.Answer'), + self.bc.database.list_of("feedback.Survey"), + [ + { + **self.bc.format.to_dict(model.survey), + "response_rate": model.survey.response_rate, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("feedback.Answer"), self.bc.format.to_dict(model.answer), ) diff --git a/breathecode/feedback/tests/actions/tests_get_answer_avg.py b/breathecode/feedback/tests/actions/tests_get_answer_avg.py index 02bb317e7..697aee7dc 100644 --- a/breathecode/feedback/tests/actions/tests_get_answer_avg.py +++ b/breathecode/feedback/tests/actions/tests_get_answer_avg.py @@ -1,6 +1,7 @@ """ Test /answer """ + from ..mixins import FeedbackTestCase from ...actions import get_student_answer_avg @@ -12,12 +13,10 @@ class AnswerTestSuite(FeedbackTestCase): def test_get_answer_avg(self): - model = self.generate_models(authenticate=True, - answer=True, - profile_academy=True, - answer_status='ANSWERED', - answer_score=8) + model = self.generate_models( + authenticate=True, answer=True, profile_academy=True, answer_status="ANSWERED", answer_score=8 + ) - average = get_student_answer_avg(model['user'].id, model['answer'].cohort.id) + average = get_student_answer_avg(model["user"].id, model["answer"].cohort.id) - self.assertEqual(average, model['answer'].score) + self.assertEqual(average, model["answer"].score) diff --git a/breathecode/feedback/tests/actions/tests_send_question.py b/breathecode/feedback/tests/actions/tests_send_question.py index ec4ba3740..d27141639 100644 --- a/breathecode/feedback/tests/actions/tests_send_question.py +++ b/breathecode/feedback/tests/actions/tests_send_question.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random from unittest.mock import MagicMock, patch @@ -22,25 +23,25 @@ class SendSurveyTestSuite(FeedbackTestCase): 🔽🔽🔽 Without Cohort """ - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__without_cohort(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] model = self.bc.database.create(user=True) try: - send_question(model['user']) + send_question(model["user"]) except Exception as e: - self.assertEqual(str(e), 'without-cohort-or-cannot-determine-cohort') + self.assertEqual(str(e), "without-cohort-or-cannot-determine-cohort") - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) @@ -51,32 +52,32 @@ def test_send_question__without_cohort(self): 🔽🔽🔽 Can't determine the Cohort """ - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__with_same_user_in_two_cohort(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - cohort_user = {'educational_status': random.choice(['POSTPONED', 'SUSPENDED', 'DROPPED'])} + cohort_user = {"educational_status": random.choice(["POSTPONED", "SUSPENDED", "DROPPED"])} model1 = self.bc.database.create(cohort_user=cohort_user) base = model1.copy() - del base['cohort_user'] + del base["cohort_user"] self.bc.database.create(cohort_user=cohort_user, models=base) try: - send_question(model1['user']) + send_question(model1["user"]) except Exception as e: - self.assertEqual(str(e), 'without-cohort-or-cannot-determine-cohort') + self.assertEqual(str(e), "without-cohort-or-cannot-determine-cohort") - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) @@ -87,454 +88,478 @@ def test_send_question__with_same_user_in_two_cohort(self): 🔽🔽🔽 Cohort without SyllabusVersion """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__cohort_without_syllabus_version(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_user = {'educational_status': c} + cohort_user = {"educational_status": c} - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] model = self.bc.database.create(user=True, cohort_user=cohort_user) try: - send_question(model['user']) + send_question(model["user"]) except Exception as e: message = str(e) - self.assertEqual(message, 'cohort-without-syllabus-version') - - translations = strings[model['cohort'].language] - expected = [{ - 'id': n + 1, - 'title': '', - 'lowest': translations['event']['lowest'], - 'highest': translations['event']['highest'], - 'lang': 'en', - 'event_id': None, - 'mentor_id': None, - 'cohort_id': n + 1, - 'academy_id': None, - 'token_id': None, - 'score': None, - 'comment': None, - 'mentorship_session_id': None, - 'sent_at': None, - 'survey_id': None, - 'status': 'PENDING', - 'user_id': n + 1, - 'opened_at': None, - }] - - self.assertEqual(self.bc.database.list_of('feedback.Answer'), expected) + self.assertEqual(message, "cohort-without-syllabus-version") + + translations = strings[model["cohort"].language] + expected = [ + { + "id": n + 1, + "title": "", + "lowest": translations["event"]["lowest"], + "highest": translations["event"]["highest"], + "lang": "en", + "event_id": None, + "mentor_id": None, + "cohort_id": n + 1, + "academy_id": None, + "token_id": None, + "score": None, + "comment": None, + "mentorship_session_id": None, + "sent_at": None, + "survey_id": None, + "status": "PENDING", + "user_id": n + 1, + "opened_at": None, + } + ] + + self.assertEqual(self.bc.database.list_of("feedback.Answer"), expected) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) mock_mailgun.call_args_list = [] mock_slack.call_args_list = [] - self.bc.database.delete('feedback.Answer') + self.bc.database.delete("feedback.Answer") """ 🔽🔽🔽 Cohort without SyllabusSchedule """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__cohort_without_syllabus_schedule(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_user = {'educational_status': c} + cohort_user = {"educational_status": c} - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] model = self.bc.database.create(user=True, cohort_user=cohort_user, syllabus_version=True) try: - send_question(model['user']) + send_question(model["user"]) except Exception as e: message = str(e) - self.assertEqual(message, 'cohort-without-specialty-mode') - - translations = strings[model['cohort'].language] - expected = [{ - 'id': n + 1, - 'title': '', - 'lowest': translations['event']['lowest'], - 'highest': translations['event']['highest'], - 'lang': 'en', - 'event_id': None, - 'mentor_id': None, - 'cohort_id': n + 1, - 'academy_id': None, - 'token_id': None, - 'score': None, - 'comment': None, - 'mentorship_session_id': None, - 'sent_at': None, - 'survey_id': None, - 'status': 'PENDING', - 'user_id': n + 1, - 'opened_at': None, - }] - - self.assertEqual(self.bc.database.list_of('feedback.Answer'), expected) + self.assertEqual(message, "cohort-without-specialty-mode") + + translations = strings[model["cohort"].language] + expected = [ + { + "id": n + 1, + "title": "", + "lowest": translations["event"]["lowest"], + "highest": translations["event"]["highest"], + "lang": "en", + "event_id": None, + "mentor_id": None, + "cohort_id": n + 1, + "academy_id": None, + "token_id": None, + "score": None, + "comment": None, + "mentorship_session_id": None, + "sent_at": None, + "survey_id": None, + "status": "PENDING", + "user_id": n + 1, + "opened_at": None, + } + ] + + self.assertEqual(self.bc.database.list_of("feedback.Answer"), expected) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) mock_mailgun.call_args_list = [] mock_slack.call_args_list = [] - self.bc.database.delete('feedback.Answer') + self.bc.database.delete("feedback.Answer") """ 🔽🔽🔽 Answer are generate and send in a email """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__just_send_by_email(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_user = {'educational_status': c} + cohort_user = {"educational_status": c} - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - model = self.bc.database.create(user=True, - cohort_user=cohort_user, - syllabus_version=True, - syllabus_schedule=True, - syllabus={'name': self.bc.fake.name()}) + model = self.bc.database.create( + user=True, + cohort_user=cohort_user, + syllabus_version=True, + syllabus_schedule=True, + syllabus={"name": self.bc.fake.name()}, + ) certificate = model.syllabus.name - send_question(model['user']) - - expected = [{ - 'academy_id': None, - 'cohort_id': n + 1, - 'comment': None, - 'event_id': None, - 'highest': 'very good', - 'id': n + 1, - 'lang': 'en', - 'lowest': 'not good', - 'mentor_id': None, - 'mentorship_session_id': None, - 'opened_at': None, - 'sent_at': None, - 'score': None, - 'status': 'SENT', - 'survey_id': None, - 'title': f'How has been your experience studying {certificate} so far?', - 'token_id': n + 1, - 'user_id': n + 1, - }] - - dicts = self.bc.database.list_of('feedback.Answer') + send_question(model["user"]) + + expected = [ + { + "academy_id": None, + "cohort_id": n + 1, + "comment": None, + "event_id": None, + "highest": "very good", + "id": n + 1, + "lang": "en", + "lowest": "not good", + "mentor_id": None, + "mentorship_session_id": None, + "opened_at": None, + "sent_at": None, + "score": None, + "status": "SENT", + "survey_id": None, + "title": f"How has been your experience studying {certificate} so far?", + "token_id": n + 1, + "user_id": n + 1, + } + ] + + dicts = self.bc.database.list_of("feedback.Answer") self.assertEqual(dicts, expected) self.assertEqual(self.count_token(), 1) - self.check_email_contain_a_correct_token('en', dicts, mock_mailgun, model) + self.check_email_contain_a_correct_token("en", dicts, mock_mailgun, model) self.assertEqual(mock_slack.call_args_list, []) mock_mailgun.call_args_list = [] mock_slack.call_args_list = [] - self.bc.database.delete('feedback.Answer') - self.bc.database.delete('authenticate.Token') + self.bc.database.delete("feedback.Answer") + self.bc.database.delete("authenticate.Token") """ 🔽🔽🔽 Answer are generate and send in a email, passing cohort """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__just_send_by_email__passing_cohort(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_user = {'educational_status': c} + cohort_user = {"educational_status": c} - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - model = self.bc.database.create(user=True, - cohort_user=cohort_user, - syllabus_version=True, - syllabus_schedule=True, - syllabus={'name': self.bc.fake.name()}) + model = self.bc.database.create( + user=True, + cohort_user=cohort_user, + syllabus_version=True, + syllabus_schedule=True, + syllabus={"name": self.bc.fake.name()}, + ) certificate = model.syllabus.name send_question(model.user, model.cohort) - expected = [{ - 'academy_id': None, - 'cohort_id': n + 1, - 'comment': None, - 'event_id': None, - 'highest': 'very good', - 'id': n + 1, - 'lang': 'en', - 'lowest': 'not good', - 'mentor_id': None, - 'mentorship_session_id': None, - 'opened_at': None, - 'sent_at': None, - 'score': None, - 'status': 'SENT', - 'survey_id': None, - 'title': f'How has been your experience studying {certificate} so far?', - 'token_id': n + 1, - 'user_id': n + 1, - }] - - dicts = self.bc.database.list_of('feedback.Answer') + expected = [ + { + "academy_id": None, + "cohort_id": n + 1, + "comment": None, + "event_id": None, + "highest": "very good", + "id": n + 1, + "lang": "en", + "lowest": "not good", + "mentor_id": None, + "mentorship_session_id": None, + "opened_at": None, + "sent_at": None, + "score": None, + "status": "SENT", + "survey_id": None, + "title": f"How has been your experience studying {certificate} so far?", + "token_id": n + 1, + "user_id": n + 1, + } + ] + + dicts = self.bc.database.list_of("feedback.Answer") self.assertEqual(dicts, expected) self.assertEqual(self.count_token(), 1) - self.check_email_contain_a_correct_token('en', dicts, mock_mailgun, model) + self.check_email_contain_a_correct_token("en", dicts, mock_mailgun, model) self.assertEqual(mock_slack.call_args_list, []) mock_mailgun.call_args_list = [] mock_slack.call_args_list = [] - self.bc.database.delete('feedback.Answer') - self.bc.database.delete('authenticate.Token') + self.bc.database.delete("feedback.Answer") + self.bc.database.delete("authenticate.Token") """ 🔽🔽🔽 Answer are generate and send in a email and slack """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__send_by_email_and_slack(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_user = {'educational_status': c} + cohort_user = {"educational_status": c} - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - cohort_kwargs = {'language': 'en'} - model = self.bc.database.create(user=True, - cohort_user=cohort_user, - slack_user=True, - slack_team=True, - credentials_slack=True, - academy=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - syllabus={'name': self.bc.fake.name()}) + cohort_kwargs = {"language": "en"} + model = self.bc.database.create( + user=True, + cohort_user=cohort_user, + slack_user=True, + slack_team=True, + credentials_slack=True, + academy=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + syllabus={"name": self.bc.fake.name()}, + ) certificate = model.syllabus.name - send_question(model['user']) - - expected = [{ - 'id': n + 1, - 'title': f'How has been your experience studying {certificate} so far?', - 'lowest': 'not good', - 'highest': 'very good', - 'lang': 'en', - 'cohort_id': n + 1, - 'academy_id': None, - 'mentor_id': None, - 'event_id': None, - 'token_id': n + 1, - 'mentorship_session_id': None, - 'sent_at': None, - 'score': None, - 'comment': None, - 'survey_id': None, - 'status': 'SENT', - 'user_id': n + 1, - 'opened_at': None, - }] - - dicts = [answer for answer in self.bc.database.list_of('feedback.Answer')] + send_question(model["user"]) + + expected = [ + { + "id": n + 1, + "title": f"How has been your experience studying {certificate} so far?", + "lowest": "not good", + "highest": "very good", + "lang": "en", + "cohort_id": n + 1, + "academy_id": None, + "mentor_id": None, + "event_id": None, + "token_id": n + 1, + "mentorship_session_id": None, + "sent_at": None, + "score": None, + "comment": None, + "survey_id": None, + "status": "SENT", + "user_id": n + 1, + "opened_at": None, + } + ] + + dicts = [answer for answer in self.bc.database.list_of("feedback.Answer")] self.assertEqual(dicts, expected) - self.check_email_contain_a_correct_token('en', dicts, mock_mailgun, model) - self.check_slack_contain_a_correct_token('en', dicts, mock_slack, model, answer_id=model.user.id) + self.check_email_contain_a_correct_token("en", dicts, mock_mailgun, model) + self.check_slack_contain_a_correct_token("en", dicts, mock_slack, model, answer_id=model.user.id) mock_mailgun.call_args_list = [] mock_slack.call_args_list = [] - self.bc.database.delete('feedback.Answer') - self.bc.database.delete('authenticate.Token') + self.bc.database.delete("feedback.Answer") + self.bc.database.delete("authenticate.Token") """ 🔽🔽🔽 Send question in english """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__with_cohort_lang_en(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_user = {'educational_status': c} + cohort_user = {"educational_status": c} - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - cohort_kwargs = {'language': 'en'} - model = self.bc.database.create(user=True, - cohort_user=cohort_user, - slack_user=True, - slack_team=True, - credentials_slack=True, - academy=True, - slack_team_owner=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - syllabus={'name': self.bc.fake.name()}) + cohort_kwargs = {"language": "en"} + model = self.bc.database.create( + user=True, + cohort_user=cohort_user, + slack_user=True, + slack_team=True, + credentials_slack=True, + academy=True, + slack_team_owner=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + syllabus={"name": self.bc.fake.name()}, + ) certificate = model.syllabus.name - send_question(model['user']) - - expected = [{ - 'id': n + 1, - 'title': f'How has been your experience studying {certificate} so far?', - 'lowest': 'not good', - 'highest': 'very good', - 'lang': 'en', - 'cohort_id': n + 1, - 'academy_id': None, - 'mentor_id': None, - 'event_id': None, - 'mentorship_session_id': None, - 'sent_at': None, - 'token_id': n + 1, - 'score': None, - 'comment': None, - 'status': 'SENT', - 'user_id': n + 1, - 'survey_id': None, - 'opened_at': None, - }] - - dicts = self.bc.database.list_of('feedback.Answer') + send_question(model["user"]) + + expected = [ + { + "id": n + 1, + "title": f"How has been your experience studying {certificate} so far?", + "lowest": "not good", + "highest": "very good", + "lang": "en", + "cohort_id": n + 1, + "academy_id": None, + "mentor_id": None, + "event_id": None, + "mentorship_session_id": None, + "sent_at": None, + "token_id": n + 1, + "score": None, + "comment": None, + "status": "SENT", + "user_id": n + 1, + "survey_id": None, + "opened_at": None, + } + ] + + dicts = self.bc.database.list_of("feedback.Answer") self.assertEqual(dicts, expected) - self.check_email_contain_a_correct_token('en', dicts, mock_mailgun, model) - self.check_slack_contain_a_correct_token('en', dicts, mock_slack, model, answer_id=model.user.id) + self.check_email_contain_a_correct_token("en", dicts, mock_mailgun, model) + self.check_slack_contain_a_correct_token("en", dicts, mock_slack, model, answer_id=model.user.id) mock_mailgun.call_args_list = [] mock_slack.call_args_list = [] - self.bc.database.delete('feedback.Answer') - self.bc.database.delete('authenticate.Token') + self.bc.database.delete("feedback.Answer") + self.bc.database.delete("authenticate.Token") """ 🔽🔽🔽 Send question in spanish """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_question__with_cohort_lang_es(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_user = {'educational_status': c} + cohort_user = {"educational_status": c} - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - cohort_kwargs = {'language': 'es'} - model = self.bc.database.create(user=True, - cohort_user=cohort_user, - slack_user=True, - slack_team=True, - credentials_slack=True, - academy=True, - slack_team_owner=True, - syllabus_version=True, - syllabus_schedule=True, - cohort_kwargs=cohort_kwargs, - syllabus={'name': self.bc.fake.name()}) + cohort_kwargs = {"language": "es"} + model = self.bc.database.create( + user=True, + cohort_user=cohort_user, + slack_user=True, + slack_team=True, + credentials_slack=True, + academy=True, + slack_team_owner=True, + syllabus_version=True, + syllabus_schedule=True, + cohort_kwargs=cohort_kwargs, + syllabus={"name": self.bc.fake.name()}, + ) certificate = model.syllabus.name - send_question(model['user']) - - expected = [{ - 'academy_id': None, - 'cohort_id': n + 1, - 'comment': None, - 'event_id': None, - 'highest': 'muy buena', - 'id': n + 1, - 'lang': 'es', - 'lowest': 'mala', - 'mentor_id': None, - 'mentorship_session_id': None, - 'sent_at': None, - 'opened_at': None, - 'score': None, - 'status': 'SENT', - 'survey_id': None, - 'title': f'¿Cómo ha sido tu experiencia estudiando {certificate}?', - 'token_id': n + 1, - 'user_id': n + 1, - }] - - dicts = self.bc.database.list_of('feedback.Answer') + send_question(model["user"]) + + expected = [ + { + "academy_id": None, + "cohort_id": n + 1, + "comment": None, + "event_id": None, + "highest": "muy buena", + "id": n + 1, + "lang": "es", + "lowest": "mala", + "mentor_id": None, + "mentorship_session_id": None, + "sent_at": None, + "opened_at": None, + "score": None, + "status": "SENT", + "survey_id": None, + "title": f"¿Cómo ha sido tu experiencia estudiando {certificate}?", + "token_id": n + 1, + "user_id": n + 1, + } + ] + + dicts = self.bc.database.list_of("feedback.Answer") self.assertEqual(dicts, expected) self.assertEqual(self.count_token(), 1) - self.check_email_contain_a_correct_token('es', dicts, mock_mailgun, model) - self.check_slack_contain_a_correct_token('es', dicts, mock_slack, model, answer_id=model.user.id) + self.check_email_contain_a_correct_token("es", dicts, mock_mailgun, model) + self.check_slack_contain_a_correct_token("es", dicts, mock_slack, model, answer_id=model.user.id) mock_mailgun.call_args_list = [] mock_slack.call_args_list = [] - self.bc.database.delete('feedback.Answer') - self.bc.database.delete('authenticate.Token') + self.bc.database.delete("feedback.Answer") + self.bc.database.delete("authenticate.Token") diff --git a/breathecode/feedback/tests/actions/tests_send_survey_group.py b/breathecode/feedback/tests/actions/tests_send_survey_group.py index 670d9900f..5630f03c6 100644 --- a/breathecode/feedback/tests/actions/tests_send_survey_group.py +++ b/breathecode/feedback/tests/actions/tests_send_survey_group.py @@ -1,6 +1,7 @@ """ Test /answer """ + from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -17,54 +18,54 @@ class AnswerTestSuite(FeedbackTestCase): - @patch('breathecode.feedback.tasks.send_cohort_survey.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.send_cohort_survey.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_survey_group(self): - with self.assertRaisesMessage(ValidationException, 'missing-survey-or-cohort'): + with self.assertRaisesMessage(ValidationException, "missing-survey-or-cohort"): send_survey_group() self.assertEqual(tasks.send_cohort_survey.delay.call_args_list, []) - @patch('breathecode.feedback.tasks.send_cohort_survey.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.send_cohort_survey.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_survey_and_cohort_do_not_match(self): model = self.generate_models(cohort=2, survey=1) - with self.assertRaisesMessage(ValidationException, 'survey-does-not-match-cohort'): + with self.assertRaisesMessage(ValidationException, "survey-does-not-match-cohort"): send_survey_group(model.survey, model.cohort[1]) self.assertEqual(tasks.send_cohort_survey.delay.call_args_list, []) - @patch('breathecode.feedback.tasks.send_cohort_survey.delay', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.send_cohort_survey.delay", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_cohort_does_not_have_teacher_assigned_to_survey(self): - wrong_roles = ['ASSISTANT', 'REVIEWER', 'STUDENT'] + wrong_roles = ["ASSISTANT", "REVIEWER", "STUDENT"] for role in wrong_roles: - model = self.generate_models(cohort=1, survey=1, cohort_user={'role': role}) + model = self.generate_models(cohort=1, survey=1, cohort_user={"role": role}) - with self.assertRaisesMessage(ValidationException, 'cohort-must-have-teacher-assigned-to-survey'): + with self.assertRaisesMessage(ValidationException, "cohort-must-have-teacher-assigned-to-survey"): send_survey_group(model.survey, model.cohort) self.assertEqual(tasks.send_cohort_survey.delay.call_args_list, []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.feedback.tasks.send_cohort_survey.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.feedback.tasks.send_cohort_survey.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_educational_status_is_active_or_graduated(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for status in statuses: - cohort_user = [{'role': 'TEACHER'}, {'role': 'STUDENT', 'educational_status': status}] + cohort_user = [{"role": "TEACHER"}, {"role": "STUDENT", "educational_status": status}] model = self.generate_models(cohort=1, survey=1, cohort_user=cohort_user) @@ -72,37 +73,41 @@ def test_when_educational_status_is_active_or_graduated(self): result = send_survey_group(model.survey, model.cohort) - expected = {'success': [f'Survey scheduled to send for {model.user.email}'], 'error': []} + expected = {"success": [f"Survey scheduled to send for {model.user.email}"], "error": []} self.assertEqual(result, expected) self.assertEqual( - self.bc.database.list_of('feedback.Survey'), - [{ - **survey, 'sent_at': UTC_NOW, - 'status': 'SENT', - 'status_json': '{' - f'"success": ["Survey scheduled to send for {model.user.email}"], "error": []' - '}' - }]) - - self.bc.database.delete('feedback.Survey') + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey, + "sent_at": UTC_NOW, + "status": "SENT", + "status_json": "{" + f'"success": ["Survey scheduled to send for {model.user.email}"], "error": []' + "}", + } + ], + ) + + self.bc.database.delete("feedback.Survey") self.assertEqual(tasks.send_cohort_survey.delay.call_args_list, [call(model.user.id, model.survey.id)]) tasks.send_cohort_survey.delay.call_args_list = [] - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.feedback.tasks.send_cohort_survey.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.feedback.tasks.send_cohort_survey.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_educational_status_is_all_of_the_others_error(self): - statuses = ['POSTPONED', 'SUSPENDED', 'DROPPED'] + statuses = ["POSTPONED", "SUSPENDED", "DROPPED"] for status in statuses: - self.bc.database.delete('feedback.Survey') - cohort_user = [{'role': 'TEACHER'}, {'role': 'STUDENT', 'educational_status': status}] + self.bc.database.delete("feedback.Survey") + cohort_user = [{"role": "TEACHER"}, {"role": "STUDENT", "educational_status": status}] model = self.generate_models(cohort=1, survey=1, cohort_user=cohort_user) @@ -111,39 +116,38 @@ def test_when_educational_status_is_all_of_the_others_error(self): result = send_survey_group(model.survey, model.cohort) expected = { - 'success': [], - 'error': [f"Survey NOT sent to {model.user.email} because it's not an active or graduated student"] + "success": [], + "error": [f"Survey NOT sent to {model.user.email} because it's not an active or graduated student"], } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey, 'sent_at': - UTC_NOW, - 'status': - 'FATAL', - 'status_json': - '{"success": [], "error": ["Survey NOT sent to ' - f"{model.user.email} because it's not an active or graduated student\"]" - '}' - }]) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey, + "sent_at": UTC_NOW, + "status": "FATAL", + "status_json": '{"success": [], "error": ["Survey NOT sent to ' + f"{model.user.email} because it's not an active or graduated student\"]" + "}", + } + ], + ) self.assertEqual(tasks.send_cohort_survey.delay.call_args_list, []) - @patch('breathecode.feedback.tasks.send_cohort_survey.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.send_cohort_survey.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_some_cases_are_successful_and_some_are_error(self): - cohort_users = [{ - 'role': 'TEACHER' - }, { - 'role': 'STUDENT', - 'educational_status': 'ACTIVE' - }, { - 'role': 'STUDENT', - 'educational_status': 'SUSPENDED' - }] + cohort_users = [ + {"role": "TEACHER"}, + {"role": "STUDENT", "educational_status": "ACTIVE"}, + {"role": "STUDENT", "educational_status": "SUSPENDED"}, + ] model = self.generate_models(cohort=1, survey=1, cohort_user=cohort_users) @@ -152,81 +156,79 @@ def test_when_some_cases_are_successful_and_some_are_error(self): result = send_survey_group(model.survey, model.cohort) expected = { - 'success': [f'Survey scheduled to send for {model.user.email}'], - 'error': [f"Survey NOT sent to {model.user.email} because it's not an active or graduated student"] + "success": [f"Survey scheduled to send for {model.user.email}"], + "error": [f"Survey NOT sent to {model.user.email} because it's not an active or graduated student"], } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey, 'sent_at': - UTC_NOW, - 'status': - 'PARTIAL', - 'status_json': - '{' - f'"success": ["Survey scheduled to send for {model.user.email}"], "error": ["Survey NOT sent to ' - f'{model.user.email} because it\'s not an active or graduated student"]' - '}' - }]) - - self.bc.database.delete('feedback.Survey') + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey, + "sent_at": UTC_NOW, + "status": "PARTIAL", + "status_json": "{" + f'"success": ["Survey scheduled to send for {model.user.email}"], "error": ["Survey NOT sent to ' + f"{model.user.email} because it's not an active or graduated student\"]" + "}", + } + ], + ) + + self.bc.database.delete("feedback.Survey") self.assertEqual(tasks.send_cohort_survey.delay.call_args_list, [call(model.user.id, model.survey.id)]) tasks.send_cohort_survey.delay.call_args_list = [] - @patch('breathecode.feedback.tasks.send_cohort_survey.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.send_cohort_survey.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_survey_is_none(self): - model = self.generate_models(cohort=1, - cohort_user=[ - { - 'role': 'TEACHER', - 'educational_status': 'SUSPENDED', - }, - { - 'role': 'STUDENT', - 'educational_status': 'SUSPENDED', - }, - ]) + model = self.generate_models( + cohort=1, + cohort_user=[ + { + "role": "TEACHER", + "educational_status": "SUSPENDED", + }, + { + "role": "STUDENT", + "educational_status": "SUSPENDED", + }, + ], + ) result = send_survey_group(cohort=model.cohort) expected = { - 'success': [], - 'error': [f"Survey NOT sent to {model.user.email} because it's not an active or graduated student"] + "success": [], + "error": [f"Survey NOT sent to {model.user.email} because it's not an active or graduated student"], } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - 'sent_at': - UTC_NOW, - 'status': - 'FATAL', - 'status_json': - '{"success": [], "error": ["Survey NOT sent to ' - f"{model.user.email} because it's not an active or graduated student\"]" - '}', - 'scores': - None, - 'cohort_id': - 1, - 'duration': - timedelta(days=1), - 'id': - 1, - 'lang': - 'en', - 'max_assistants_to_ask': - 2, - 'max_teachers_to_ask': - 1, - 'response_rate': - None, - 'scores': - None, - }]) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + "sent_at": UTC_NOW, + "status": "FATAL", + "status_json": '{"success": [], "error": ["Survey NOT sent to ' + f"{model.user.email} because it's not an active or graduated student\"]" + "}", + "scores": None, + "cohort_id": 1, + "duration": timedelta(days=1), + "id": 1, + "lang": "en", + "max_assistants_to_ask": 2, + "max_teachers_to_ask": 1, + "response_rate": None, + "scores": None, + } + ], + ) self.assertEqual(tasks.send_cohort_survey.delay.call_args_list, []) diff --git a/breathecode/feedback/tests/admin/deleted_tests_send_cohort_bulk_survey.py b/breathecode/feedback/tests/admin/deleted_tests_send_cohort_bulk_survey.py index 412499b6d..1de26498e 100644 --- a/breathecode/feedback/tests/admin/deleted_tests_send_cohort_bulk_survey.py +++ b/breathecode/feedback/tests/admin/deleted_tests_send_cohort_bulk_survey.py @@ -1,6 +1,7 @@ """ Test /answer """ + from datetime import datetime from unittest.mock import patch from django.http.request import HttpRequest @@ -19,173 +20,196 @@ class SendSurveyTestSuite(FeedbackTestCase): """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_send_cohort_bulk_survey_without_cohort(self): """Test /answer without auth""" request = HttpRequest() self.assertEqual(send_cohort_bulk_survey(None, request, Cohort.objects.all()), None) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_send_cohort_bulk_survey_with_educational_status_active(self): """Test /answer without auth""" request = HttpRequest() - cohort_kwargs = {'language': 'en'} - cohort_user_kwargs = {'role': 'STUDENT', 'educational_status': 'ACTIVE'} + cohort_kwargs = {"language": "en"} + cohort_user_kwargs = {"role": "STUDENT", "educational_status": "ACTIVE"} models = [ - self.generate_models(user=True, - cohort_user=True, - profile_academy=True, - cohort_user_kwargs=cohort_user_kwargs, - cohort_kwargs=cohort_kwargs) for _ in range(0, 3) + self.generate_models( + user=True, + cohort_user=True, + profile_academy=True, + cohort_user_kwargs=cohort_user_kwargs, + cohort_kwargs=cohort_kwargs, + ) + for _ in range(0, 3) ] - _cohorts = [(models[key]['cohort'].certificate.name, key + 1) for key in range(0, 3)] + _cohorts = [(models[key]["cohort"].certificate.name, key + 1) for key in range(0, 3)] self.assertEqual(send_cohort_bulk_survey(None, request, Cohort.objects.all()), None) - expected = [{ - 'academy_id': None, - 'cohort_id': key, - 'comment': None, - 'event_id': None, - 'highest': 'very good', - 'id': key, - 'lang': 'en', - 'lowest': 'not good', - 'mentor_id': None, - 'opened_at': None, - 'score': None, - 'status': 'SENT', - 'survey_id': None, - 'title': f'How has been your experience studying {c} so far?', - 'token_id': key, - 'user_id': key, - } for c, key in _cohorts] + expected = [ + { + "academy_id": None, + "cohort_id": key, + "comment": None, + "event_id": None, + "highest": "very good", + "id": key, + "lang": "en", + "lowest": "not good", + "mentor_id": None, + "opened_at": None, + "score": None, + "status": "SENT", + "survey_id": None, + "title": f"How has been your experience studying {c} so far?", + "token_id": key, + "user_id": key, + } + for c, key in _cohorts + ] dicts = [ - answer for answer in self.bc.database.list_of('feedback.Answer') - if isinstance(answer['created_at'], datetime) and answer.pop('created_at') + answer + for answer in self.bc.database.list_of("feedback.Answer") + if isinstance(answer["created_at"], datetime) and answer.pop("created_at") ] self.assertEqual(dicts, expected) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_send_cohort_bulk_survey_with_educational_status_graduated(self): """Test /answer without auth""" request = HttpRequest() - cohort_kwargs = {'language': 'en'} - cohort_user_kwargs = {'role': 'STUDENT', 'educational_status': 'GRADUATED'} + cohort_kwargs = {"language": "en"} + cohort_user_kwargs = {"role": "STUDENT", "educational_status": "GRADUATED"} models = [ - self.generate_models(user=True, - cohort_user=True, - profile_academy=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) for _ in range(0, 3) + self.generate_models( + user=True, + cohort_user=True, + profile_academy=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) + for _ in range(0, 3) ] cohorts = Cohort.objects.all() self.assertEqual(send_cohort_bulk_survey(None, request, cohorts), None) - _cohorts = [(models[key]['cohort'].certificate.name, key + 1) for key in range(0, 3)] - expected = [{ - 'academy_id': None, - 'cohort_id': key, - 'comment': None, - 'event_id': None, - 'highest': 'very good', - 'id': key, - 'lang': 'en', - 'lowest': 'not good', - 'mentor_id': None, - 'opened_at': None, - 'score': None, - 'survey_id': None, - 'status': 'SENT', - 'title': f'How has been your experience studying {cohort} so far?', - 'token_id': key, - 'user_id': key, - } for cohort, key in _cohorts] + _cohorts = [(models[key]["cohort"].certificate.name, key + 1) for key in range(0, 3)] + expected = [ + { + "academy_id": None, + "cohort_id": key, + "comment": None, + "event_id": None, + "highest": "very good", + "id": key, + "lang": "en", + "lowest": "not good", + "mentor_id": None, + "opened_at": None, + "score": None, + "survey_id": None, + "status": "SENT", + "title": f"How has been your experience studying {cohort} so far?", + "token_id": key, + "user_id": key, + } + for cohort, key in _cohorts + ] dicts = [ - answer for answer in self.bc.database.list_of('feedback.Answer') - if isinstance(answer['created_at'], datetime) and answer.pop('created_at') + answer + for answer in self.bc.database.list_of("feedback.Answer") + if isinstance(answer["created_at"], datetime) and answer.pop("created_at") ] self.assertEqual(dicts, expected) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_send_cohort_bulk_survey_with_educational_status_postponed(self): """Test /answer without auth""" request = HttpRequest() - cohort_kwargs = {'language': 'en'} - cohort_user_kwargs = {'role': 'STUDENT', 'educational_status': 'POSTPONED'} + cohort_kwargs = {"language": "en"} + cohort_user_kwargs = {"role": "STUDENT", "educational_status": "POSTPONED"} for _ in range(0, 3): - self.generate_models(user=True, - cohort_user=True, - profile_academy=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + self.generate_models( + user=True, + cohort_user=True, + profile_academy=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) self.assertEqual(send_cohort_bulk_survey(None, request, Cohort.objects.all()), None) expected = [] dicts = [ - answer for answer in self.bc.database.list_of('feedback.Answer') - if isinstance(answer['created_at'], datetime) and answer.pop('created_at') + answer + for answer in self.bc.database.list_of("feedback.Answer") + if isinstance(answer["created_at"], datetime) and answer.pop("created_at") ] self.assertEqual(dicts, expected) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_send_cohort_bulk_survey_with_educational_status_suspended(self): """Test /answer without auth""" request = HttpRequest() - cohort_kwargs = {'language': 'en'} - cohort_user_kwargs = {'role': 'STUDENT', 'educational_status': 'SUSPENDED'} + cohort_kwargs = {"language": "en"} + cohort_user_kwargs = {"role": "STUDENT", "educational_status": "SUSPENDED"} for _ in range(0, 3): - self.generate_models(user=True, - cohort_user=True, - profile_academy=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + self.generate_models( + user=True, + cohort_user=True, + profile_academy=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) self.assertEqual(send_cohort_bulk_survey(None, request, Cohort.objects.all()), None) expected = [] dicts = [ - answer for answer in self.bc.database.list_of('feedback.Answer') - if isinstance(answer['created_at'], datetime) and answer.pop('created_at') + answer + for answer in self.bc.database.list_of("feedback.Answer") + if isinstance(answer["created_at"], datetime) and answer.pop("created_at") ] self.assertEqual(dicts, expected) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_send_cohort_bulk_survey_with_educational_status_dropped(self): """Test /answer without auth""" request = HttpRequest() - cohort_kwargs = {'language': 'en'} - cohort_user_kwargs = {'role': 'STUDENT', 'educational_status': 'DROPPED'} + cohort_kwargs = {"language": "en"} + cohort_user_kwargs = {"role": "STUDENT", "educational_status": "DROPPED"} for _ in range(0, 3): - self.generate_models(user=True, - cohort_user=True, - profile_academy=True, - cohort_kwargs=cohort_kwargs, - cohort_user_kwargs=cohort_user_kwargs) + self.generate_models( + user=True, + cohort_user=True, + profile_academy=True, + cohort_kwargs=cohort_kwargs, + cohort_user_kwargs=cohort_user_kwargs, + ) self.assertEqual(send_cohort_bulk_survey(None, request, Cohort.objects.all()), None) expected = [] dicts = [ - answer for answer in self.bc.database.list_of('feedback.Answer') - if isinstance(answer['created_at'], datetime) and answer.pop('created_at') + answer + for answer in self.bc.database.list_of("feedback.Answer") + if isinstance(answer["created_at"], datetime) and answer.pop("created_at") ] self.assertEqual(dicts, expected) diff --git a/breathecode/feedback/tests/admin/tests_calculate_survey_scores.py b/breathecode/feedback/tests/admin/tests_calculate_survey_scores.py index 5e171e956..65500dc39 100644 --- a/breathecode/feedback/tests/admin/tests_calculate_survey_scores.py +++ b/breathecode/feedback/tests/admin/tests_calculate_survey_scores.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -20,25 +21,25 @@ class SendSurveyTestSuite(FeedbackTestCase): 🔽🔽🔽 With zero Surveys """ - @patch('breathecode.feedback.tasks.recalculate_survey_scores.delay', MagicMock()) + @patch("breathecode.feedback.tasks.recalculate_survey_scores.delay", MagicMock()) def test_with_zero_surveys(self): result = calculate_survey_scores(None, None, None) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) self.assertEqual(tasks.recalculate_survey_scores.delay.call_args_list, []) """ 🔽🔽🔽 With random number of Surveys """ - @patch('breathecode.feedback.tasks.recalculate_survey_scores.delay', MagicMock()) + @patch("breathecode.feedback.tasks.recalculate_survey_scores.delay", MagicMock()) def test_with_random_number_of_surveys(self): model = self.bc.database.create(survey=random.randint(2, 10)) result = calculate_survey_scores(None, None, None) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), self.bc.format.to_dict(model.survey)) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), self.bc.format.to_dict(model.survey)) self.assertEqual( tasks.recalculate_survey_scores.delay.call_args_list, [call(x.id) for x in model.survey], diff --git a/breathecode/feedback/tests/admin/tests_send_bulk_cohort_user_survey.py b/breathecode/feedback/tests/admin/tests_send_bulk_cohort_user_survey.py index 7454d16e6..b5f5165b5 100644 --- a/breathecode/feedback/tests/admin/tests_send_bulk_cohort_user_survey.py +++ b/breathecode/feedback/tests/admin/tests_send_bulk_cohort_user_survey.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging from unittest.mock import MagicMock, call, patch @@ -18,10 +19,10 @@ class SendSurveyTestSuite(FeedbackTestCase): 🔽🔽🔽 With zero CohortUser """ - @patch('django.contrib.messages.api.add_message', MagicMock()) - @patch('breathecode.feedback.actions.send_question', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.contrib.messages.api.add_message", MagicMock()) + @patch("breathecode.feedback.actions.send_question", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_zero_cohort_users(self): request = HttpRequest() @@ -29,24 +30,27 @@ def test_with_zero_cohort_users(self): result = send_bulk_cohort_user_survey(None, request, queryset) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), []) - - self.assertEqual(api.add_message.call_args_list, [ - call(request, 25, 'Survey was successfully sent', extra_tags='', fail_silently=False), - ]) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), []) + + self.assertEqual( + api.add_message.call_args_list, + [ + call(request, 25, "Survey was successfully sent", extra_tags="", fail_silently=False), + ], + ) self.assertEqual(actions.send_question.call_args_list, []) """ 🔽🔽🔽 With two CohortUser """ - @patch('django.contrib.messages.api.add_message', MagicMock()) - @patch('breathecode.feedback.actions.send_question', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.contrib.messages.api.add_message", MagicMock()) + @patch("breathecode.feedback.actions.send_question", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_two_cohort_users(self): request = HttpRequest() - CohortUser = self.bc.database.get_model('admissions.CohortUser') + CohortUser = self.bc.database.get_model("admissions.CohortUser") model = self.bc.database.create(cohort_user=2) db = self.bc.format.to_dict(model.cohort_user) @@ -55,29 +59,36 @@ def test_with_two_cohort_users(self): result = send_bulk_cohort_user_survey(None, request, queryset) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), db) - - self.assertEqual(str(api.add_message.call_args_list), - str([ - call(request, 25, 'Survey was successfully sent', extra_tags='', fail_silently=False), - ])) - self.assertEqual(actions.send_question.call_args_list, [ - call(model.user, model.cohort), - call(model.user, model.cohort), - ]) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), db) + + self.assertEqual( + str(api.add_message.call_args_list), + str( + [ + call(request, 25, "Survey was successfully sent", extra_tags="", fail_silently=False), + ] + ), + ) + self.assertEqual( + actions.send_question.call_args_list, + [ + call(model.user, model.cohort), + call(model.user, model.cohort), + ], + ) """ 🔽🔽🔽 With two CohortUser raise exceptions """ - @patch('django.contrib.messages.api.add_message', MagicMock()) - @patch('breathecode.feedback.actions.send_question', MagicMock(side_effect=Exception('qwerty'))) - @patch('logging.Logger.fatal', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.contrib.messages.api.add_message", MagicMock()) + @patch("breathecode.feedback.actions.send_question", MagicMock(side_effect=Exception("qwerty"))) + @patch("logging.Logger.fatal", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_two_cohort_users__raises_exceptions__same_exception(self): request = HttpRequest() - CohortUser = self.bc.database.get_model('admissions.CohortUser') + CohortUser = self.bc.database.get_model("admissions.CohortUser") model = self.bc.database.create(cohort_user=2) db = self.bc.format.to_dict(model.cohort_user) @@ -86,26 +97,34 @@ def test_with_two_cohort_users__raises_exceptions__same_exception(self): result = send_bulk_cohort_user_survey(None, request, queryset) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), db) - - self.assertEqual(actions.send_question.call_args_list, [ - call(model.user, model.cohort), - call(model.user, model.cohort), - ]) - self.assertEqual(api.add_message.call_args_list, [ - call(request, 40, 'qwerty (2)', extra_tags='', fail_silently=False), - ]) - self.assertEqual(logging.Logger.fatal.call_args_list, [call('qwerty'), call('qwerty')]) - - @patch('django.contrib.messages.api.add_message', MagicMock()) - @patch('breathecode.feedback.actions.send_question', - MagicMock(side_effect=[Exception('qwerty1'), Exception('qwerty2')])) - @patch('logging.Logger.fatal', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), db) + + self.assertEqual( + actions.send_question.call_args_list, + [ + call(model.user, model.cohort), + call(model.user, model.cohort), + ], + ) + self.assertEqual( + api.add_message.call_args_list, + [ + call(request, 40, "qwerty (2)", extra_tags="", fail_silently=False), + ], + ) + self.assertEqual(logging.Logger.fatal.call_args_list, [call("qwerty"), call("qwerty")]) + + @patch("django.contrib.messages.api.add_message", MagicMock()) + @patch( + "breathecode.feedback.actions.send_question", + MagicMock(side_effect=[Exception("qwerty1"), Exception("qwerty2")]), + ) + @patch("logging.Logger.fatal", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_with_two_cohort_users__raises_exceptions__different_exceptions(self): request = HttpRequest() - CohortUser = self.bc.database.get_model('admissions.CohortUser') + CohortUser = self.bc.database.get_model("admissions.CohortUser") model = self.bc.database.create(cohort_user=2) db = self.bc.format.to_dict(model.cohort_user) @@ -114,13 +133,19 @@ def test_with_two_cohort_users__raises_exceptions__different_exceptions(self): result = send_bulk_cohort_user_survey(None, request, queryset) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), db) - - self.assertEqual(actions.send_question.call_args_list, [ - call(model.user, model.cohort), - call(model.user, model.cohort), - ]) - self.assertEqual(api.add_message.call_args_list, [ - call(request, 40, 'qwerty1 (1) - qwerty2 (1)', extra_tags='', fail_silently=False), - ]) - self.assertEqual(logging.Logger.fatal.call_args_list, [call('qwerty1'), call('qwerty2')]) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), db) + + self.assertEqual( + actions.send_question.call_args_list, + [ + call(model.user, model.cohort), + call(model.user, model.cohort), + ], + ) + self.assertEqual( + api.add_message.call_args_list, + [ + call(request, 40, "qwerty1 (1) - qwerty2 (1)", extra_tags="", fail_silently=False), + ], + ) + self.assertEqual(logging.Logger.fatal.call_args_list, [call("qwerty1"), call("qwerty2")]) diff --git a/breathecode/feedback/tests/admin/tests_send_bulk_survey.py b/breathecode/feedback/tests/admin/tests_send_bulk_survey.py index 0c8d5a866..255df85bb 100644 --- a/breathecode/feedback/tests/admin/tests_send_bulk_survey.py +++ b/breathecode/feedback/tests/admin/tests_send_bulk_survey.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging from datetime import datetime from unittest.mock import MagicMock, call, patch @@ -17,32 +18,35 @@ class SendSurveyTestSuite(FeedbackTestCase): 🔽🔽🔽 With zero User """ - @patch('django.contrib.messages.api.add_message', MagicMock()) - @patch('breathecode.feedback.actions.send_question', MagicMock()) + @patch("django.contrib.messages.api.add_message", MagicMock()) + @patch("breathecode.feedback.actions.send_question", MagicMock()) def test_with_zero_users(self): request = HttpRequest() - User = self.bc.database.get_model('auth.User') + User = self.bc.database.get_model("auth.User") queryset = User.objects.all() result = send_bulk_survey(None, request, queryset) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('auth.User'), []) - - self.assertEqual(api.add_message.call_args_list, [ - call(request, 25, 'Survey was successfully sent', extra_tags='', fail_silently=False), - ]) + self.assertEqual(self.bc.database.list_of("auth.User"), []) + + self.assertEqual( + api.add_message.call_args_list, + [ + call(request, 25, "Survey was successfully sent", extra_tags="", fail_silently=False), + ], + ) self.assertEqual(actions.send_question.call_args_list, []) """ 🔽🔽🔽 With two User """ - @patch('django.contrib.messages.api.add_message', MagicMock()) - @patch('breathecode.feedback.actions.send_question', MagicMock()) + @patch("django.contrib.messages.api.add_message", MagicMock()) + @patch("breathecode.feedback.actions.send_question", MagicMock()) def test_with_two_users(self): request = HttpRequest() - User = self.bc.database.get_model('auth.User') + User = self.bc.database.get_model("auth.User") model = self.bc.database.create(user=2) db = self.bc.format.to_dict(model.user) @@ -51,23 +55,26 @@ def test_with_two_users(self): result = send_bulk_survey(None, request, queryset) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('auth.User'), db) - - self.assertEqual(api.add_message.call_args_list, [ - call(request, 25, 'Survey was successfully sent', extra_tags='', fail_silently=False), - ]) + self.assertEqual(self.bc.database.list_of("auth.User"), db) + + self.assertEqual( + api.add_message.call_args_list, + [ + call(request, 25, "Survey was successfully sent", extra_tags="", fail_silently=False), + ], + ) self.assertEqual(actions.send_question.call_args_list, [call(model.user[0]), call(model.user[1])]) """ 🔽🔽🔽 With two User raise exceptions """ - @patch('django.contrib.messages.api.add_message', MagicMock()) - @patch('breathecode.feedback.actions.send_question', MagicMock(side_effect=Exception('qwerty'))) - @patch('logging.Logger.fatal', MagicMock()) + @patch("django.contrib.messages.api.add_message", MagicMock()) + @patch("breathecode.feedback.actions.send_question", MagicMock(side_effect=Exception("qwerty"))) + @patch("logging.Logger.fatal", MagicMock()) def test_with_two_users__raises_exceptions__same_exception(self): request = HttpRequest() - User = self.bc.database.get_model('auth.User') + User = self.bc.database.get_model("auth.User") model = self.bc.database.create(user=2) db = self.bc.format.to_dict(model.user) @@ -76,21 +83,26 @@ def test_with_two_users__raises_exceptions__same_exception(self): result = send_bulk_survey(None, request, queryset) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('auth.User'), db) + self.assertEqual(self.bc.database.list_of("auth.User"), db) self.assertEqual(actions.send_question.call_args_list, [call(model.user[0]), call(model.user[1])]) - self.assertEqual(api.add_message.call_args_list, [ - call(request, 40, 'qwerty (2)', extra_tags='', fail_silently=False), - ]) - self.assertEqual(logging.Logger.fatal.call_args_list, [call('qwerty'), call('qwerty')]) - - @patch('django.contrib.messages.api.add_message', MagicMock()) - @patch('breathecode.feedback.actions.send_question', - MagicMock(side_effect=[Exception('qwerty1'), Exception('qwerty2')])) - @patch('logging.Logger.fatal', MagicMock()) + self.assertEqual( + api.add_message.call_args_list, + [ + call(request, 40, "qwerty (2)", extra_tags="", fail_silently=False), + ], + ) + self.assertEqual(logging.Logger.fatal.call_args_list, [call("qwerty"), call("qwerty")]) + + @patch("django.contrib.messages.api.add_message", MagicMock()) + @patch( + "breathecode.feedback.actions.send_question", + MagicMock(side_effect=[Exception("qwerty1"), Exception("qwerty2")]), + ) + @patch("logging.Logger.fatal", MagicMock()) def test_with_two_users__raises_exceptions__different_exceptions(self): request = HttpRequest() - User = self.bc.database.get_model('auth.User') + User = self.bc.database.get_model("auth.User") model = self.bc.database.create(user=2) db = self.bc.format.to_dict(model.user) @@ -99,10 +111,13 @@ def test_with_two_users__raises_exceptions__different_exceptions(self): result = send_bulk_survey(None, request, queryset) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('auth.User'), db) + self.assertEqual(self.bc.database.list_of("auth.User"), db) self.assertEqual(actions.send_question.call_args_list, [call(model.user[0]), call(model.user[1])]) - self.assertEqual(api.add_message.call_args_list, [ - call(request, 40, 'qwerty1 (1) - qwerty2 (1)', extra_tags='', fail_silently=False), - ]) - self.assertEqual(logging.Logger.fatal.call_args_list, [call('qwerty1'), call('qwerty2')]) + self.assertEqual( + api.add_message.call_args_list, + [ + call(request, 40, "qwerty1 (1) - qwerty2 (1)", extra_tags="", fail_silently=False), + ], + ) + self.assertEqual(logging.Logger.fatal.call_args_list, [call("qwerty1"), call("qwerty2")]) diff --git a/breathecode/feedback/tests/management/commands/tests_remove_invalid_answers.py b/breathecode/feedback/tests/management/commands/tests_remove_invalid_answers.py index 04202b67a..ea925dacf 100644 --- a/breathecode/feedback/tests/management/commands/tests_remove_invalid_answers.py +++ b/breathecode/feedback/tests/management/commands/tests_remove_invalid_answers.py @@ -13,56 +13,80 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + monkeypatch.setattr("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) -@patch('sys.stdout.write', MagicMock()) +@patch("sys.stdout.write", MagicMock()) def test_run_handler(db, bc: Breathecode): - surveys = [{'cohort_id': n} for n in range(1, 4)] - cohort_users = [{ - 'cohort_id': n, - 'user_id': n, - 'educational_status': random.choice(['POSTPONED', 'SUSPENDED', 'DROPPED']) - } for n in range(1, 4)] - answers = [{ - 'survey_id': n, - 'cohort_id': n, - 'user_id': n, - 'status': random.choice(['ANSWERED', 'OPENED']), - 'score': random.randint(1, 10), - } for n in range(1, 4)] + [{ - 'survey_id': n, - 'cohort_id': n, - 'user_id': n, - 'status': random.choice(['PENDING', 'SENT', 'EXPIRED']), - 'score': None, - } for n in range(1, 4)] + [{ - 'survey_id': n, - 'cohort_id': n, - 'user_id': n, - 'status': random.choice(['ANSWERED', 'OPENED']), - 'score': None, - } for n in range(1, 4)] + [{ - 'survey_id': n, - 'cohort_id': n, - 'user_id': n, - 'status': random.choice(['ANSWERED', 'OPENED']), - 'score': random.randint(1, 10), - } for n in range(1, 4)] + [{ - 'survey_id': n, - 'cohort_id': n, - 'user_id': n, - 'status': random.choice(['PENDING', 'SENT', 'EXPIRED']), - 'score': None, - } for n in range(1, 4)] + [{ - 'survey_id': n, - 'cohort_id': n, - 'user_id': n, - 'status': random.choice(['ANSWERED', 'OPENED']), - 'score': None, - } for n in range(1, 4)] - - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + surveys = [{"cohort_id": n} for n in range(1, 4)] + cohort_users = [ + {"cohort_id": n, "user_id": n, "educational_status": random.choice(["POSTPONED", "SUSPENDED", "DROPPED"])} + for n in range(1, 4) + ] + answers = ( + [ + { + "survey_id": n, + "cohort_id": n, + "user_id": n, + "status": random.choice(["ANSWERED", "OPENED"]), + "score": random.randint(1, 10), + } + for n in range(1, 4) + ] + + [ + { + "survey_id": n, + "cohort_id": n, + "user_id": n, + "status": random.choice(["PENDING", "SENT", "EXPIRED"]), + "score": None, + } + for n in range(1, 4) + ] + + [ + { + "survey_id": n, + "cohort_id": n, + "user_id": n, + "status": random.choice(["ANSWERED", "OPENED"]), + "score": None, + } + for n in range(1, 4) + ] + + [ + { + "survey_id": n, + "cohort_id": n, + "user_id": n, + "status": random.choice(["ANSWERED", "OPENED"]), + "score": random.randint(1, 10), + } + for n in range(1, 4) + ] + + [ + { + "survey_id": n, + "cohort_id": n, + "user_id": n, + "status": random.choice(["PENDING", "SENT", "EXPIRED"]), + "score": None, + } + for n in range(1, 4) + ] + + [ + { + "survey_id": n, + "cohort_id": n, + "user_id": n, + "status": random.choice(["ANSWERED", "OPENED"]), + "score": None, + } + for n in range(1, 4) + ] + ) + + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = bc.database.create(user=3, survey=surveys, answer=answers, cohort=3, cohort_user=cohort_users) @@ -71,16 +95,16 @@ def test_run_handler(db, bc: Breathecode): # reset in this line because some people left print in some places sys.stdout.write.call_args_list = [] - with patch('sys.stderr.write', MagicMock()): + with patch("sys.stderr.write", MagicMock()): command = Command() command.handle() assert sys.stderr.write.call_args_list == [] - assert bc.database.list_of('feedback.Survey') == bc.format.to_dict(model.survey) + assert bc.database.list_of("feedback.Survey") == bc.format.to_dict(model.survey) # this ignore the answers is not answered or opened - assert bc.database.list_of('feedback.Answer') == [ + assert bc.database.list_of("feedback.Answer") == [ bc.format.to_dict(answer_db[0]), bc.format.to_dict(answer_db[1]), bc.format.to_dict(answer_db[2]), @@ -95,4 +119,4 @@ def test_run_handler(db, bc: Breathecode): bc.format.to_dict(answer_db[17]), ] - assert sys.stdout.write.call_args_list == [call('Successfully deleted invalid answers\n')] + assert sys.stdout.write.call_args_list == [call("Successfully deleted invalid answers\n")] diff --git a/breathecode/feedback/tests/mixins/__init__.py b/breathecode/feedback/tests/mixins/__init__.py index db4a36940..ba0d9f3de 100644 --- a/breathecode/feedback/tests/mixins/__init__.py +++ b/breathecode/feedback/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Admissions mixins """ + from .feedback_test_case import FeedbackTestCase # noqa: F401 diff --git a/breathecode/feedback/tests/mixins/feedback_test_case.py b/breathecode/feedback/tests/mixins/feedback_test_case.py index a73e24d37..80c139915 100644 --- a/breathecode/feedback/tests/mixins/feedback_test_case.py +++ b/breathecode/feedback/tests/mixins/feedback_test_case.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + import os from unittest.mock import call @@ -22,8 +23,16 @@ from ...models import Answer -class FeedbackTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, BreathecodeMixin): +class FeedbackTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + BreathecodeMixin, +): """FeedbackTestCase with auth methods""" def tearDown(self): @@ -36,116 +45,113 @@ def setUp(self): def get_token_key(self, id=None): kwargs = {} if id: - kwargs['id'] = id - return Token.objects.filter(**kwargs).values_list('key', flat=True).first() + kwargs["id"] = id + return Token.objects.filter(**kwargs).values_list("key", flat=True).first() def remove_all_answer(self): Answer.objects.all().delete() def check_email_contain_a_correct_token(self, lang, dicts, mock, model): token = self.get_token_key() - question = dicts[0]['title'] + question = dicts[0]["title"] link = f"https://nps.4geeks.com/{dicts[0]['id']}?token={token}" args_list = mock.call_args_list - academy = model.get('academy', None) - - template = get_template_content('nps', { - 'QUESTION': question, - 'HIGHEST': dicts[0]['highest'], - 'LOWEST': dicts[0]['lowest'], - 'SUBJECT': question, - 'ANSWER_ID': dicts[0]['id'], - 'BUTTON': strings[lang]['button_label'], - 'LINK': link, - }, ['email'], - academy=academy) - - self.assertEqual(args_list, [ - call(f'https://api.mailgun.net/v3/{os.environ.get("MAILGUN_DOMAIN")}/messages', - auth=('api', os.environ.get('MAILGUN_API_KEY', '')), - data={ - 'from': f"4Geeks <mailgun@{os.environ.get('MAILGUN_DOMAIN')}>", - 'to': [model['user'].email], - 'subject': template['subject'], - 'text': template['text'], - 'html': template['html'] - }, - timeout=2) - ]) - - html = template['html'] - del template['html'] + academy = model.get("academy", None) + + template = get_template_content( + "nps", + { + "QUESTION": question, + "HIGHEST": dicts[0]["highest"], + "LOWEST": dicts[0]["lowest"], + "SUBJECT": question, + "ANSWER_ID": dicts[0]["id"], + "BUTTON": strings[lang]["button_label"], + "LINK": link, + }, + ["email"], + academy=academy, + ) + + self.assertEqual( + args_list, + [ + call( + f'https://api.mailgun.net/v3/{os.environ.get("MAILGUN_DOMAIN")}/messages', + auth=("api", os.environ.get("MAILGUN_API_KEY", "")), + data={ + "from": f"4Geeks <mailgun@{os.environ.get('MAILGUN_DOMAIN')}>", + "to": [model["user"].email], + "subject": template["subject"], + "text": template["text"], + "html": template["html"], + }, + timeout=2, + ) + ], + ) + + html = template["html"] + del template["html"] self.assertEqual( - template, { - 'SUBJECT': - question, - 'subject': - question, - 'text': - '\n' - '\n' - 'Please take 2 min to answer the following question:\n' - '\n' - f'{question}\n' - '\n' - 'Click here to vote: ' - f'{link}' - '\n' - '\n' - '\n' - '\n' - 'The 4Geeks Team' - '\n' - }) + template, + { + "SUBJECT": question, + "subject": question, + "text": "\n" + "\n" + "Please take 2 min to answer the following question:\n" + "\n" + f"{question}\n" + "\n" + "Click here to vote: " + f"{link}" + "\n" + "\n" + "\n" + "\n" + "The 4Geeks Team" + "\n", + }, + ) self.assertToken(token) self.assertTrue(link in html) def check_slack_contain_a_correct_token(self, lang, dicts, mock, model, answer_id=1): token = self.get_token_key() - slack_token = model['slack_team'].owner.credentialsslack.token - slack_id = model['slack_user'].slack_id + slack_token = model["slack_team"].owner.credentialsslack.token + slack_id = model["slack_user"].slack_id args_list = mock.call_args_list - question = dicts[0]['title'] - answer = strings[lang]['button_label'] + question = dicts[0]["title"] + answer = strings[lang]["button_label"] expected = [ - call(method='POST', - url='https://slack.com/api/chat.postMessage', - headers={ - 'Authorization': f'Bearer {slack_token}', - 'Content-type': 'application/json' - }, - params=None, - json={ - 'channel': - slack_id, - 'private_metadata': - '', - 'blocks': [{ - 'type': 'header', - 'text': { - 'type': 'plain_text', - 'text': question, - 'emoji': True - } - }, { - 'type': - 'actions', - 'elements': [{ - 'type': 'button', - 'text': { - 'type': 'plain_text', - 'text': answer, - 'emoji': True - }, - 'url': f'https://nps.4geeks.com/{answer_id}?token={token}' - }] - }], - 'parse': - 'full' - }, - timeout=10) + call( + method="POST", + url="https://slack.com/api/chat.postMessage", + headers={"Authorization": f"Bearer {slack_token}", "Content-type": "application/json"}, + params=None, + json={ + "channel": slack_id, + "private_metadata": "", + "blocks": [ + {"type": "header", "text": {"type": "plain_text", "text": question, "emoji": True}}, + { + "type": "actions", + "elements": [ + { + "type": "button", + "text": {"type": "plain_text", "text": answer, "emoji": True}, + "url": f"https://nps.4geeks.com/{answer_id}?token={token}", + } + ], + }, + ], + "parse": "full", + }, + timeout=10, + ) ] self.assertEqual(args_list, expected) diff --git a/breathecode/feedback/tests/signals/tests_survey_answered.py b/breathecode/feedback/tests/signals/tests_survey_answered.py index 1e36dd884..61ac667c3 100644 --- a/breathecode/feedback/tests/signals/tests_survey_answered.py +++ b/breathecode/feedback/tests/signals/tests_survey_answered.py @@ -1,6 +1,7 @@ """ Test /academy/survey """ + from unittest.mock import patch, MagicMock, call from breathecode.tests.mixins.legacy import LegacyAPITestCase @@ -8,27 +9,27 @@ class TestSurveyAnswered(LegacyAPITestCase): """Test /academy/survey""" - @patch('breathecode.feedback.tasks.process_answer_received.delay', MagicMock()) + @patch("breathecode.feedback.tasks.process_answer_received.delay", MagicMock()) def test_survey_answered_signal_pending(self, enable_signals): enable_signals() from breathecode.feedback.tasks import process_answer_received model = self.generate_models(answer=True) - answer_db = self.model_to_dict(model, 'answer') + answer_db = self.model_to_dict(model, "answer") self.assertEqual(process_answer_received.delay.call_args_list, []) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [answer_db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [answer_db]) - @patch('breathecode.feedback.tasks.process_answer_received.delay', MagicMock()) + @patch("breathecode.feedback.tasks.process_answer_received.delay", MagicMock()) def test_survey_answered_signal_answered(self, enable_signals): enable_signals() from breathecode.feedback.tasks import process_answer_received - answer = {'status': 'ANSWERED'} + answer = {"status": "ANSWERED"} model = self.generate_models(answer=answer) - answer_db = self.model_to_dict(model, 'answer') + answer_db = self.model_to_dict(model, "answer") self.assertEqual(process_answer_received.delay.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [answer_db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [answer_db]) diff --git a/breathecode/feedback/tests/supervisors/tests_no_survey_for_session.py b/breathecode/feedback/tests/supervisors/tests_no_survey_for_session.py index b150f2dd2..bdeef8957 100644 --- a/breathecode/feedback/tests/supervisors/tests_no_survey_for_session.py +++ b/breathecode/feedback/tests/supervisors/tests_no_survey_for_session.py @@ -21,10 +21,13 @@ def __init__(self, bc: Breathecode): def list(self): supervisors = SupervisorModel.objects.all() - return [{ - 'task_module': supervisor.task_module, - 'task_name': supervisor.task_name, - } for supervisor in supervisors] + return [ + { + "task_module": supervisor.task_module, + "task_name": supervisor.task_name, + } + for supervisor in supervisors + ] @sync_to_async def alist(self): @@ -60,17 +63,17 @@ def supervisor(db, bc: Breathecode): @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) + monkeypatch.setattr("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) yield def db(data={}): return { - 'delta': timedelta(seconds=3600), - 'id': 0, - 'ran_at': None, - 'task_module': '', - 'task_name': '', + "delta": timedelta(seconds=3600), + "id": 0, + "ran_at": None, + "task_module": "", + "task_name": "", **data, } @@ -79,52 +82,56 @@ def tests_issue_not_found(database: capy.Database): res = no_survey_for_session(1) assert res is None - assert database.list_of('mentorship.MentorshipSession') == [] + assert database.list_of("mentorship.MentorshipSession") == [] assert send_mentorship_session_survey.delay.call_args_list == [] def tests_mentorship_session_not_found(database: capy.Database, format: capy.Format): - fn_module = 'breathecode.feedback.supervisors' - fn_name = 'supervise_mentorship_survey' - database.create(supervisor={ - 'task_module': fn_module, - 'task_name': fn_name, - }, - supervisor_issue={ - 'code': 'no-survey-for-session', - 'fixed': None, - 'params': { - 'session_id': 1, - }, - }) + fn_module = "breathecode.feedback.supervisors" + fn_name = "supervise_mentorship_survey" + database.create( + supervisor={ + "task_module": fn_module, + "task_name": fn_name, + }, + supervisor_issue={ + "code": "no-survey-for-session", + "fixed": None, + "params": { + "session_id": 1, + }, + }, + ) res = no_survey_for_session(1) assert res is None - assert database.list_of('mentorship.MentorshipSession') == [] + assert database.list_of("mentorship.MentorshipSession") == [] assert send_mentorship_session_survey.delay.call_args_list == [] def tests_no_answer(database: capy.Database, format: capy.Format): - fn_module = 'breathecode.feedback.supervisors' - fn_name = 'supervise_mentorship_survey' - model = database.create(mentorship_session=1, - supervisor={ - 'task_module': fn_module, - 'task_name': fn_name, - }, - supervisor_issue={ - 'code': 'no-survey-for-session', - 'fixed': None, - 'params': { - 'session_id': 1, - }, - }) + fn_module = "breathecode.feedback.supervisors" + fn_name = "supervise_mentorship_survey" + model = database.create( + mentorship_session=1, + supervisor={ + "task_module": fn_module, + "task_name": fn_name, + }, + supervisor_issue={ + "code": "no-survey-for-session", + "fixed": None, + "params": { + "session_id": 1, + }, + }, + ) res = no_survey_for_session(1) assert res is None - assert database.list_of('mentorship.MentorshipSession') == [ + assert database.list_of("mentorship.MentorshipSession") == [ format.to_obj_repr(model.mentorship_session), ] @@ -132,28 +139,30 @@ def tests_no_answer(database: capy.Database, format: capy.Format): def tests_answer(database: capy.Database, format: capy.Format): - fn_module = 'breathecode.feedback.supervisors' - fn_name = 'supervise_mentorship_survey' - model = database.create(mentorship_session=1, - supervisor={ - 'task_module': fn_module, - 'task_name': fn_name, - }, - supervisor_issue={ - 'code': 'no-survey-for-session', - 'fixed': None, - 'params': { - 'session_id': 1, - }, - }, - feedback__answer=1, - city=1, - country=1) + fn_module = "breathecode.feedback.supervisors" + fn_name = "supervise_mentorship_survey" + model = database.create( + mentorship_session=1, + supervisor={ + "task_module": fn_module, + "task_name": fn_name, + }, + supervisor_issue={ + "code": "no-survey-for-session", + "fixed": None, + "params": { + "session_id": 1, + }, + }, + feedback__answer=1, + city=1, + country=1, + ) res = no_survey_for_session(1) assert res is True - assert database.list_of('mentorship.MentorshipSession') == [ + assert database.list_of("mentorship.MentorshipSession") == [ format.to_obj_repr(model.mentorship_session), ] diff --git a/breathecode/feedback/tests/supervisors/tests_supervise_mentorship_survey.py b/breathecode/feedback/tests/supervisors/tests_supervise_mentorship_survey.py index 3411847af..986f40f22 100644 --- a/breathecode/feedback/tests/supervisors/tests_supervise_mentorship_survey.py +++ b/breathecode/feedback/tests/supervisors/tests_supervise_mentorship_survey.py @@ -20,10 +20,13 @@ def __init__(self, bc: Breathecode): def list(self): supervisors = SupervisorModel.objects.all() - return [{ - 'task_module': supervisor.task_module, - 'task_name': supervisor.task_name, - } for supervisor in supervisors] + return [ + { + "task_module": supervisor.task_module, + "task_name": supervisor.task_name, + } + for supervisor in supervisors + ] @sync_to_async def alist(self): @@ -61,29 +64,31 @@ def supervisor(db, bc: Breathecode): def setup(db, monkeypatch: pytest.MonkeyPatch): from breathecode.monitoring.models import Supervisor - monkeypatch.setattr('logging.Logger.error', MagicMock()) - monkeypatch.setattr('breathecode.payments.supervisors.MIN_PENDING_SESSIONS', 2) - monkeypatch.setattr('breathecode.payments.supervisors.MIN_CANCELLED_SESSIONS', 2) - - fn_module = 'breathecode.feedback.supervisors' - fn_name = 'supervise_mentorship_survey' - Supervisor.objects.get_or_create(task_module=fn_module, - task_name=fn_name, - defaults={ - 'delta': timedelta(seconds=3600), - 'ran_at': None, - }) + monkeypatch.setattr("logging.Logger.error", MagicMock()) + monkeypatch.setattr("breathecode.payments.supervisors.MIN_PENDING_SESSIONS", 2) + monkeypatch.setattr("breathecode.payments.supervisors.MIN_CANCELLED_SESSIONS", 2) + + fn_module = "breathecode.feedback.supervisors" + fn_name = "supervise_mentorship_survey" + Supervisor.objects.get_or_create( + task_module=fn_module, + task_name=fn_name, + defaults={ + "delta": timedelta(seconds=3600), + "ran_at": None, + }, + ) yield def db(data={}): return { - 'delta': timedelta(seconds=3600), - 'id': 0, - 'ran_at': None, - 'task_module': '', - 'task_name': '', + "delta": timedelta(seconds=3600), + "id": 0, + "ran_at": None, + "task_module": "", + "task_name": "", **data, } @@ -98,7 +103,7 @@ def tests_no_sessions(supervisor: Supervisor): # 'task_name': 'supervise_mentorship_survey', # }, # ] - assert supervisor.log('breathecode.feedback.supervisors', 'supervise_mentorship_survey') == [] + assert supervisor.log("breathecode.feedback.supervisors", "supervise_mentorship_survey") == [] # sessions = MentorshipSession.objects.filter(status='COMPLETED', # started_at__isnull=False, @@ -109,76 +114,87 @@ def tests_no_sessions(supervisor: Supervisor): # created_at__gte=utc_now - timedelta(days=5)) -@pytest.mark.parametrize('status', [ - 'PENDING', - 'STARTED', - 'FAILED', - 'IGNORED', - 'CANCELED', -]) -@pytest.mark.parametrize('started_at, ended_at, mentor, mentee', [ - (None, None, 0, 0), - (timedelta(0), None, 0, 0), - (None, timedelta(0), 0, 0), - (None, None, 1, 0), - (None, None, 0, 1), -]) -def tests_invalid_sessions(database: capy.Database, supervisor: Supervisor, utc_now: datetime, status, started_at, - ended_at, mentor, mentee): - mentorship_session = {'status': status, 'mentor': mentor, 'mentee': mentee} +@pytest.mark.parametrize( + "status", + [ + "PENDING", + "STARTED", + "FAILED", + "IGNORED", + "CANCELED", + ], +) +@pytest.mark.parametrize( + "started_at, ended_at, mentor, mentee", + [ + (None, None, 0, 0), + (timedelta(0), None, 0, 0), + (None, timedelta(0), 0, 0), + (None, None, 1, 0), + (None, None, 0, 1), + ], +) +def tests_invalid_sessions( + database: capy.Database, supervisor: Supervisor, utc_now: datetime, status, started_at, ended_at, mentor, mentee +): + mentorship_session = {"status": status, "mentor": mentor, "mentee": mentee} if started_at is not None: - mentorship_session['started_at'] = utc_now + started_at + mentorship_session["started_at"] = utc_now + started_at if ended_at is not None: - mentorship_session['ended_at'] = utc_now + ended_at + mentorship_session["ended_at"] = utc_now + ended_at database.create(user=1, mentorship_session=(2, mentorship_session)) supervise_mentorship_survey() - assert supervisor.log('breathecode.feedback.supervisors', 'supervise-mentorshipsurvey') == [] + assert supervisor.log("breathecode.feedback.supervisors", "supervise-mentorshipsurvey") == [] -@pytest.mark.parametrize('delta, answer', [ - (timedelta(0), False), - (timedelta(0), True), - (timedelta(minutes=5, seconds=1), True), -]) -def tests_found_sessions(database: capy.Database, supervisor: Supervisor, utc_now: datetime, delta: timedelta, - answer: bool): +@pytest.mark.parametrize( + "delta, answer", + [ + (timedelta(0), False), + (timedelta(0), True), + (timedelta(minutes=5, seconds=1), True), + ], +) +def tests_found_sessions( + database: capy.Database, supervisor: Supervisor, utc_now: datetime, delta: timedelta, answer: bool +): mentorship_session = { - 'status': 'COMPLETED', - 'mentor': 1, - 'mentee': 1, - 'started_at': utc_now, - 'ended_at': utc_now + delta, + "status": "COMPLETED", + "mentor": 1, + "mentee": 1, + "started_at": utc_now, + "ended_at": utc_now + delta, } if answer: - answer = [{'mentorship_session_id': n + 1, 'token_id': n + 1} for n in range(2)] + answer = [{"mentorship_session_id": n + 1, "token_id": n + 1} for n in range(2)] database.create(user=1, mentorship_session=(2, mentorship_session), feedback__answer=answer, token=2) supervise_mentorship_survey() - assert supervisor.log('breathecode.feedback.supervisors', 'supervise_mentorship_survey') == [] + assert supervisor.log("breathecode.feedback.supervisors", "supervise_mentorship_survey") == [] def tests_sessions_with_no_surveys(database: capy.Database, supervisor: Supervisor, utc_now: datetime): delta = timedelta(minutes=5, seconds=1) mentorship_session = { - 'status': 'COMPLETED', - 'mentor': 1, - 'mentee': 1, - 'started_at': utc_now, - 'ended_at': utc_now + delta, + "status": "COMPLETED", + "mentor": 1, + "mentee": 1, + "started_at": utc_now, + "ended_at": utc_now + delta, } database.create(user=1, mentorship_session=(2, mentorship_session)) supervise_mentorship_survey() - assert supervisor.log('breathecode.feedback.supervisors', 'supervise_mentorship_survey') == [ + assert supervisor.log("breathecode.feedback.supervisors", "supervise_mentorship_survey") == [ "Session 1 hasn't a survey", "Session 2 hasn't a survey", ] diff --git a/breathecode/feedback/tests/tasks/tests_generate_user_cohort_survey_answers.py b/breathecode/feedback/tests/tasks/tests_generate_user_cohort_survey_answers.py index 53903a67c..45a6030e7 100644 --- a/breathecode/feedback/tests/tasks/tests_generate_user_cohort_survey_answers.py +++ b/breathecode/feedback/tests/tasks/tests_generate_user_cohort_survey_answers.py @@ -16,25 +16,25 @@ def answer(data={}): return { - 'academy_id': 0, - 'cohort_id': 0, - 'comment': None, - 'event_id': None, - 'highest': 'very good', - 'id': 0, - 'lang': 'en', - 'lowest': 'not good', - 'mentor_id': None, - 'mentorship_session_id': None, - 'opened_at': UTC_NOW, - 'score': None, - 'sent_at': None, - 'status': 'OPENED', - 'survey_id': 0, - 'title': '', - 'token_id': None, - 'user_id': 0, - **data + "academy_id": 0, + "cohort_id": 0, + "comment": None, + "event_id": None, + "highest": "very good", + "id": 0, + "lang": "en", + "lowest": "not good", + "mentor_id": None, + "mentorship_session_id": None, + "opened_at": UTC_NOW, + "score": None, + "sent_at": None, + "status": "OPENED", + "survey_id": 0, + "title": "", + "token_id": None, + "user_id": 0, + **data, } @@ -44,219 +44,237 @@ def test_when_student_is_not_assigned(self): model = self.generate_models(cohort=1, user=1, survey=1) - with self.assertRaisesMessage(ValidationException, 'This student does not belong to this cohort'): - generate_user_cohort_survey_answers(model.user, model.survey, status='OPENED') + with self.assertRaisesMessage(ValidationException, "This student does not belong to this cohort"): + generate_user_cohort_survey_answers(model.user, model.survey, status="OPENED") - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test_when_teacher_is_not_assigned(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for c in statuses: - cohort_user = {'educational_status': c} + cohort_user = {"educational_status": c} model = self.generate_models(cohort=1, user=1, survey=1, cohort_user=cohort_user) - with self.assertRaisesMessage(ValidationException, - 'This cohort must have a teacher assigned to be able to survey it'): - generate_user_cohort_survey_answers(model.user, model.survey, status='OPENED') + with self.assertRaisesMessage( + ValidationException, "This cohort must have a teacher assigned to be able to survey it" + ): + generate_user_cohort_survey_answers(model.user, model.survey, status="OPENED") - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_when_teacher_is_assigned(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_users = [{'educational_status': c}, {'role': 'TEACHER', 'educational_status': c}] + cohort_users = [{"educational_status": c}, {"role": "TEACHER", "educational_status": c}] model = self.bc.database.create(cohort=1, user=1, survey=1, cohort_user=cohort_users) - generate_user_cohort_survey_answers(model.user, model.survey, status='OPENED') - - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [ - answer({ - 'title': f'How has been your experience studying {model.cohort.name} so far?', - 'user_id': n + 1, - 'survey_id': n + 1, - 'lowest': 'not good', - 'id': (n * 3) + 1, - 'highest': 'very good', - 'cohort_id': n + 1, - 'academy_id': n + 1, - 'token_id': None - }), - answer({ - 'title': - f'How has been your experience with your mentor {model.user.first_name} {model.user.last_name} so far?', - 'lang': 'en', - 'user_id': n + 1, - 'survey_id': n + 1, - 'mentor_id': n + 1, - 'lowest': 'not good', - 'mentorship_session_id': None, - 'score': None, - 'sent_at': None, - 'status': 'OPENED', - 'id': (n * 3) + 2, - 'highest': 'very good', - 'cohort_id': n + 1, - 'academy_id': n + 1 - }), - answer({ - 'title': f'How likely are you to recommend {model.academy.name} to your friends ' - 'and family?', - 'user_id': n + 1, - 'survey_id': n + 1, - 'lowest': 'not likely', - 'id': (n * 3) + 3, - 'highest': 'very likely', - 'cohort_id': None, - 'academy_id': n + 1 - }) - ]) + generate_user_cohort_survey_answers(model.user, model.survey, status="OPENED") + + self.assertEqual( + self.bc.database.list_of("feedback.Answer"), + [ + answer( + { + "title": f"How has been your experience studying {model.cohort.name} so far?", + "user_id": n + 1, + "survey_id": n + 1, + "lowest": "not good", + "id": (n * 3) + 1, + "highest": "very good", + "cohort_id": n + 1, + "academy_id": n + 1, + "token_id": None, + } + ), + answer( + { + "title": f"How has been your experience with your mentor {model.user.first_name} {model.user.last_name} so far?", + "lang": "en", + "user_id": n + 1, + "survey_id": n + 1, + "mentor_id": n + 1, + "lowest": "not good", + "mentorship_session_id": None, + "score": None, + "sent_at": None, + "status": "OPENED", + "id": (n * 3) + 2, + "highest": "very good", + "cohort_id": n + 1, + "academy_id": n + 1, + } + ), + answer( + { + "title": f"How likely are you to recommend {model.academy.name} to your friends " + "and family?", + "user_id": n + 1, + "survey_id": n + 1, + "lowest": "not likely", + "id": (n * 3) + 3, + "highest": "very likely", + "cohort_id": None, + "academy_id": n + 1, + } + ), + ], + ) # teardown - self.bc.database.delete('feedback.Answer') + self.bc.database.delete("feedback.Answer") - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_when_cohort_has_syllabus(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_users = [{'educational_status': c}, {'role': 'TEACHER', 'educational_status': c}] + cohort_users = [{"educational_status": c}, {"role": "TEACHER", "educational_status": c}] model = self.bc.database.create(cohort=1, user=1, survey=1, cohort_user=cohort_users, syllabus_version=1) - generate_user_cohort_survey_answers(model.user, model.survey, status='OPENED') - - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [ - answer({ - 'title': f'How has been your experience studying {model.cohort.name} so far?', - 'user_id': n + 1, - 'survey_id': n + 1, - 'lowest': 'not good', - 'id': (n * 3) + 1, - 'highest': 'very good', - 'cohort_id': n + 1, - 'academy_id': n + 1, - 'token_id': None - }), - answer({ - 'title': - f'How has been your experience with your mentor {model.user.first_name} {model.user.last_name} so far?', - 'lang': 'en', - 'user_id': n + 1, - 'survey_id': n + 1, - 'mentor_id': n + 1, - 'lowest': 'not good', - 'mentorship_session_id': None, - 'score': None, - 'sent_at': None, - 'status': 'OPENED', - 'id': (n * 3) + 2, - 'highest': 'very good', - 'cohort_id': n + 1, - 'academy_id': n + 1 - }), - answer({ - 'title': f'How likely are you to recommend {model.academy.name} to your friends ' - 'and family?', - 'user_id': n + 1, - 'survey_id': n + 1, - 'lowest': 'not likely', - 'id': (n * 3) + 3, - 'highest': 'very likely', - 'cohort_id': None, - 'academy_id': n + 1 - }) - ]) + generate_user_cohort_survey_answers(model.user, model.survey, status="OPENED") + + self.assertEqual( + self.bc.database.list_of("feedback.Answer"), + [ + answer( + { + "title": f"How has been your experience studying {model.cohort.name} so far?", + "user_id": n + 1, + "survey_id": n + 1, + "lowest": "not good", + "id": (n * 3) + 1, + "highest": "very good", + "cohort_id": n + 1, + "academy_id": n + 1, + "token_id": None, + } + ), + answer( + { + "title": f"How has been your experience with your mentor {model.user.first_name} {model.user.last_name} so far?", + "lang": "en", + "user_id": n + 1, + "survey_id": n + 1, + "mentor_id": n + 1, + "lowest": "not good", + "mentorship_session_id": None, + "score": None, + "sent_at": None, + "status": "OPENED", + "id": (n * 3) + 2, + "highest": "very good", + "cohort_id": n + 1, + "academy_id": n + 1, + } + ), + answer( + { + "title": f"How likely are you to recommend {model.academy.name} to your friends " + "and family?", + "user_id": n + 1, + "survey_id": n + 1, + "lowest": "not likely", + "id": (n * 3) + 3, + "highest": "very likely", + "cohort_id": None, + "academy_id": n + 1, + } + ), + ], + ) # teardown - self.bc.database.delete('feedback.Answer') + self.bc.database.delete("feedback.Answer") - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_role_assistant(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] cohort_users = [ - { - 'role': 'TEACHER', - 'educational_status': c - }, - { - 'role': 'ASSISTANT', - 'educational_status': c - }, - { - 'educational_status': c - }, + {"role": "TEACHER", "educational_status": c}, + {"role": "ASSISTANT", "educational_status": c}, + {"educational_status": c}, ] model = self.bc.database.create(cohort=1, user=1, survey=1, cohort_user=cohort_users) - generate_user_cohort_survey_answers(model.user, model.survey, status='OPENED') - - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [ - answer({ - 'title': f'How has been your experience studying {model.cohort.name} so far?', - 'user_id': n + 1, - 'survey_id': n + 1, - 'lowest': 'not good', - 'id': (n * 4) + 1, - 'highest': 'very good', - 'cohort_id': n + 1, - 'academy_id': n + 1, - 'token_id': None - }), - answer({ - 'title': - f'How has been your experience with your mentor {model.user.first_name} {model.user.last_name} so far?', - 'lang': 'en', - 'user_id': n + 1, - 'survey_id': n + 1, - 'mentor_id': n + 1, - 'lowest': 'not good', - 'mentorship_session_id': None, - 'score': None, - 'sent_at': None, - 'status': 'OPENED', - 'id': (n * 4) + 2, - 'highest': 'very good', - 'cohort_id': n + 1, - 'academy_id': n + 1 - }), - answer({ - 'title': - f'How has been your experience with your mentor {model.user.first_name} {model.user.last_name} so far?', - 'user_id': n + 1, - 'survey_id': n + 1, - 'lowest': 'not good', - 'id': (n * 4) + 3, - 'highest': 'very good', - 'cohort_id': n + 1, - 'academy_id': n + 1, - 'mentor_id': n + 1 - }), - answer({ - 'title': f'How likely are you to recommend {model.academy.name} to your friends ' - 'and family?', - 'user_id': n + 1, - 'survey_id': n + 1, - 'lowest': 'not likely', - 'id': (n * 4) + 4, - 'highest': 'very likely', - 'cohort_id': None, - 'academy_id': n + 1 - }) - ]) + generate_user_cohort_survey_answers(model.user, model.survey, status="OPENED") + + self.assertEqual( + self.bc.database.list_of("feedback.Answer"), + [ + answer( + { + "title": f"How has been your experience studying {model.cohort.name} so far?", + "user_id": n + 1, + "survey_id": n + 1, + "lowest": "not good", + "id": (n * 4) + 1, + "highest": "very good", + "cohort_id": n + 1, + "academy_id": n + 1, + "token_id": None, + } + ), + answer( + { + "title": f"How has been your experience with your mentor {model.user.first_name} {model.user.last_name} so far?", + "lang": "en", + "user_id": n + 1, + "survey_id": n + 1, + "mentor_id": n + 1, + "lowest": "not good", + "mentorship_session_id": None, + "score": None, + "sent_at": None, + "status": "OPENED", + "id": (n * 4) + 2, + "highest": "very good", + "cohort_id": n + 1, + "academy_id": n + 1, + } + ), + answer( + { + "title": f"How has been your experience with your mentor {model.user.first_name} {model.user.last_name} so far?", + "user_id": n + 1, + "survey_id": n + 1, + "lowest": "not good", + "id": (n * 4) + 3, + "highest": "very good", + "cohort_id": n + 1, + "academy_id": n + 1, + "mentor_id": n + 1, + } + ), + answer( + { + "title": f"How likely are you to recommend {model.academy.name} to your friends " + "and family?", + "user_id": n + 1, + "survey_id": n + 1, + "lowest": "not likely", + "id": (n * 4) + 4, + "highest": "very likely", + "cohort_id": None, + "academy_id": n + 1, + } + ), + ], + ) # teardown - self.bc.database.delete('feedback.Answer') + self.bc.database.delete("feedback.Answer") diff --git a/breathecode/feedback/tests/tasks/tests_process_answer_received.py b/breathecode/feedback/tests/tasks/tests_process_answer_received.py index 48bef983c..bd19990e4 100644 --- a/breathecode/feedback/tests/tasks/tests_process_answer_received.py +++ b/breathecode/feedback/tests/tasks/tests_process_answer_received.py @@ -1,6 +1,7 @@ """ Test /academy/survey """ + from unittest.mock import MagicMock, call, patch from breathecode.feedback.tasks import process_answer_received @@ -20,39 +21,48 @@ def get_env(key, value=None): class SurveyAnsweredTestSuite(FeedbackTestCase): """Test /academy/survey""" - @patch('logging.Logger.warning', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + @patch("logging.Logger.warning", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_without_answer(self): import logging process_answer_received.delay(1) - self.assertEqual(logging.Logger.warning.call_args_list, [ - call('Answer not found'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Answer not found', exc_info=True), - ]) + self.assertEqual( + logging.Logger.warning.call_args_list, + [ + call("Answer not found"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Answer not found", exc_info=True), + ], + ) self.assertEqual(actions.calculate_survey_scores.call_args_list, []) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, []) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) - - @patch('logging.Logger.warning', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) - @patch('os.getenv', - MagicMock(side_effect=apply_get_env({ - 'SYSTEM_EMAIL': 'a@a.a', - 'ADMIN_URL': 'https://www.whatever.com' - }))) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) + + @patch("logging.Logger.warning", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) + @patch( + "os.getenv", + MagicMock(side_effect=apply_get_env({"SYSTEM_EMAIL": "a@a.a", "ADMIN_URL": "https://www.whatever.com"})), + ) def test_survey_answered_task_without_survey(self): import logging @@ -62,26 +72,31 @@ def test_survey_answered_task_without_survey(self): process_answer_received.delay(1) self.assertEqual(logging.Logger.warning.call_args_list, []) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('No survey connected to answer.', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("No survey connected to answer.", exc_info=True), + ], + ) self.assertEqual(actions.calculate_survey_scores.call_args_list, []) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, []) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) - - @patch('logging.Logger.warning', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) + + @patch("logging.Logger.warning", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_with_survey(self): import logging - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=1, survey=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) @@ -89,34 +104,36 @@ def test_survey_answered_task_with_survey(self): self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 0.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': None - }, - }]) - - @patch('logging.Logger.warning', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 0.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": None}, + } + ], + ) + + @patch("logging.Logger.warning", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_with_survey_score_seven(self): import logging from breathecode.notify.actions import send_email_message - answer = {'score': 7, 'status': 'ANSWERED'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + answer = {"score": 7, "status": "ANSWERED"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=answer, survey=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) @@ -125,34 +142,36 @@ def test_survey_answered_task_with_survey_score_seven(self): self.assertEqual(send_email_message.call_args_list, []) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 100.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': 7.0 - }, - }]) - - @patch('logging.Logger.warning', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 100.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": 7.0}, + } + ], + ) + + @patch("logging.Logger.warning", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_with_survey_score_seven__with_academy(self): import logging from breathecode.notify.actions import send_email_message - answer = {'score': 7, 'status': 'ANSWERED'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + answer = {"score": 7, "status": "ANSWERED"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=answer, survey=1, academy=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) @@ -161,24 +180,26 @@ def test_survey_answered_task_with_survey_score_seven__with_academy(self): self.assertEqual(send_email_message.call_args_list, []) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 100.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': 7.0 - }, - }]) - - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(return_value=None)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 100.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": 7.0}, + } + ], + ) + + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(return_value=None)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_with_survey_score_seven__with_academy__with_user__no_emails(self): import logging @@ -186,43 +207,47 @@ def test_survey_answered_task_with_survey_score_seven__with_academy__with_user__ from breathecode.notify.actions import send_email_message - answer_kwargs = {'score': 7} - academy = {'feedback_email': None} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + answer_kwargs = {"score": 7} + academy = {"feedback_email": None} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=answer_kwargs, survey=1, academy=academy, user=1, cohort=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('No email found.', exc_info=True), - ]) - self.assertEqual(os.getenv.call_args_list, [call('ENV', ''), call('SYSTEM_EMAIL'), call('ADMIN_URL')]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("No email found.", exc_info=True), + ], + ) + self.assertEqual(os.getenv.call_args_list, [call("ENV", ""), call("SYSTEM_EMAIL"), call("ADMIN_URL")]) self.assertEqual(send_email_message.call_args_list, []) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 0.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': None - }, - }]) - - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) - @patch('os.getenv', - MagicMock(side_effect=apply_get_env({ - 'SYSTEM_EMAIL': 'a@a.a', - 'ADMIN_URL': 'https://www.whatever.com' - }))) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 0.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": None}, + } + ], + ) + + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) + @patch( + "os.getenv", + MagicMock(side_effect=apply_get_env({"SYSTEM_EMAIL": "a@a.a", "ADMIN_URL": "https://www.whatever.com"})), + ) def test_survey_answered_task_with_survey_score_seven__with_academy__with_user__without_feedback_email(self): import logging @@ -230,219 +255,250 @@ def test_survey_answered_task_with_survey_score_seven__with_academy__with_user__ from breathecode.notify.actions import send_email_message - answer_kwargs = {'score': 7} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + answer_kwargs = {"score": 7} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=answer_kwargs, survey=1, academy=1, user=1, cohort=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(os.getenv.call_args_list, [call('ENV', ''), call('SYSTEM_EMAIL'), call('ADMIN_URL')]) + self.assertEqual(os.getenv.call_args_list, [call("ENV", ""), call("SYSTEM_EMAIL"), call("ADMIN_URL")]) assert send_email_message.call_args_list == [ - call('negative_answer', ['a@a.a'], - data={ - 'SUBJECT': f'A student answered with a bad NPS score at {model.answer.academy.name}', - 'FULL_NAME': f'{model.answer.user.first_name} {model.answer.user.last_name}', - 'QUESTION': model.answer.title, - 'SCORE': model.answer.score, - 'COMMENTS': model.answer.comment, - 'ACADEMY': model.answer.academy.name, - 'LINK': f'https://www.whatever.com/feedback/surveys/{model.answer.academy.slug}/1' - }, - academy=model.academy) + call( + "negative_answer", + ["a@a.a"], + data={ + "SUBJECT": f"A student answered with a bad NPS score at {model.answer.academy.name}", + "FULL_NAME": f"{model.answer.user.first_name} {model.answer.user.last_name}", + "QUESTION": model.answer.title, + "SCORE": model.answer.score, + "COMMENTS": model.answer.comment, + "ACADEMY": model.answer.academy.name, + "LINK": f"https://www.whatever.com/feedback/surveys/{model.answer.academy.slug}/1", + }, + academy=model.academy, + ) ] self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 0.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': None - }, - }]) - - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', - MagicMock(side_effect=apply_get_env({ - 'SYSTEM_EMAIL': None, - 'ADMIN_URL': 'https://www.whatever.com' - }))) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 0.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": None}, + } + ], + ) + + @patch("logging.Logger.error", MagicMock()) + @patch( + "os.getenv", + MagicMock(side_effect=apply_get_env({"SYSTEM_EMAIL": None, "ADMIN_URL": "https://www.whatever.com"})), + ) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_with_survey_score_seven__with_academy__with_user__without_system_email__with_feedback_email( - self): + self, + ): import logging from breathecode.notify.actions import send_email_message - answer_kwargs = {'score': 7} - academy_kwargs = {'feedback_email': 'someone@email.com'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + answer_kwargs = {"score": 7} + academy_kwargs = {"feedback_email": "someone@email.com"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=answer_kwargs, survey=1, academy=academy_kwargs, user=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(send_email_message.call_args_list, [ - call('negative_answer', [f'{model.academy.feedback_email}'], - data={ - 'SUBJECT': f'A student answered with a bad NPS score at {model.answer.academy.name}', - 'FULL_NAME': f'{model.answer.user.first_name} {model.answer.user.last_name}', - 'QUESTION': model.answer.title, - 'SCORE': model.answer.score, - 'COMMENTS': model.answer.comment, - 'ACADEMY': model.answer.academy.name, - 'LINK': f'https://www.whatever.com/feedback/surveys/{model.answer.academy.slug}/1' - }, - academy=model.academy) - ]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "negative_answer", + [f"{model.academy.feedback_email}"], + data={ + "SUBJECT": f"A student answered with a bad NPS score at {model.answer.academy.name}", + "FULL_NAME": f"{model.answer.user.first_name} {model.answer.user.last_name}", + "QUESTION": model.answer.title, + "SCORE": model.answer.score, + "COMMENTS": model.answer.comment, + "ACADEMY": model.answer.academy.name, + "LINK": f"https://www.whatever.com/feedback/surveys/{model.answer.academy.slug}/1", + }, + academy=model.academy, + ) + ], + ) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 0.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': None - }, - }]) - - @patch('logging.Logger.error', MagicMock()) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 0.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": None}, + } + ], + ) + + @patch("logging.Logger.error", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env({"SYSTEM_EMAIL": "test@email.com", "ADMIN_URL": "https://www.whatever.com"}) + ), + ) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'SYSTEM_EMAIL': 'test@email.com', - 'ADMIN_URL': 'https://www.whatever.com' - }))) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_with_survey_score_seven__with_academy__with_user__with_system_email__without_feedback_email( - self): + self, + ): import logging from breathecode.notify.actions import send_email_message - answer_kwargs = {'score': 7} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + answer_kwargs = {"score": 7} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=answer_kwargs, survey=1, academy=1, user=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(send_email_message.call_args_list, [ - call('negative_answer', ['test@email.com'], - data={ - 'SUBJECT': f'A student answered with a bad NPS score at {model.answer.academy.name}', - 'FULL_NAME': f'{model.answer.user.first_name} {model.answer.user.last_name}', - 'QUESTION': model.answer.title, - 'SCORE': model.answer.score, - 'COMMENTS': model.answer.comment, - 'ACADEMY': model.answer.academy.name, - 'LINK': f'https://www.whatever.com/feedback/surveys/{model.answer.academy.slug}/1' - }, - academy=model.academy) - ]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "negative_answer", + ["test@email.com"], + data={ + "SUBJECT": f"A student answered with a bad NPS score at {model.answer.academy.name}", + "FULL_NAME": f"{model.answer.user.first_name} {model.answer.user.last_name}", + "QUESTION": model.answer.title, + "SCORE": model.answer.score, + "COMMENTS": model.answer.comment, + "ACADEMY": model.answer.academy.name, + "LINK": f"https://www.whatever.com/feedback/surveys/{model.answer.academy.slug}/1", + }, + academy=model.academy, + ) + ], + ) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 0.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': None - }, - }]) - - @patch('logging.Logger.error', MagicMock()) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 0.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": None}, + } + ], + ) + + @patch("logging.Logger.error", MagicMock()) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'SYSTEM_EMAIL': 'test@email.com', - 'ADMIN_URL': 'https://www.whatever.com' - }))) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + "os.getenv", + MagicMock( + side_effect=apply_get_env({"SYSTEM_EMAIL": "test@email.com", "ADMIN_URL": "https://www.whatever.com"}) + ), + ) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_with_survey_score_seven__with_academy__with_user__with_system_email__with_feedback_email( - self): + self, + ): import logging import os from breathecode.notify.actions import send_email_message - answer_kwargs = {'score': 7} - academy_kwargs = {'feedback_email': 'someone@email.com'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + answer_kwargs = {"score": 7} + academy_kwargs = {"feedback_email": "someone@email.com"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=answer_kwargs, survey=1, academy=academy_kwargs, user=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(send_email_message.call_args_list, [ - call('negative_answer', ['test@email.com', model.academy.feedback_email], - data={ - 'SUBJECT': f'A student answered with a bad NPS score at {model.answer.academy.name}', - 'FULL_NAME': f'{model.answer.user.first_name} {model.answer.user.last_name}', - 'QUESTION': model.answer.title, - 'SCORE': model.answer.score, - 'COMMENTS': model.answer.comment, - 'ACADEMY': model.answer.academy.name, - 'LINK': f'https://www.whatever.com/feedback/surveys/{model.answer.academy.slug}/1' - }, - academy=model.academy) - ]) + self.assertEqual( + send_email_message.call_args_list, + [ + call( + "negative_answer", + ["test@email.com", model.academy.feedback_email], + data={ + "SUBJECT": f"A student answered with a bad NPS score at {model.answer.academy.name}", + "FULL_NAME": f"{model.answer.user.first_name} {model.answer.user.last_name}", + "QUESTION": model.answer.title, + "SCORE": model.answer.score, + "COMMENTS": model.answer.comment, + "ACADEMY": model.answer.academy.name, + "LINK": f"https://www.whatever.com/feedback/surveys/{model.answer.academy.slug}/1", + }, + academy=model.academy, + ) + ], + ) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 0.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': None - }, - }]) - - @patch('logging.Logger.warning', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(wraps=actions.calculate_survey_scores)) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', - MagicMock(wraps=actions.calculate_survey_response_rate)) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 0.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": None}, + } + ], + ) + + @patch("logging.Logger.warning", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(wraps=actions.calculate_survey_scores)) + @patch( + "breathecode.feedback.actions.calculate_survey_response_rate", + MagicMock(wraps=actions.calculate_survey_response_rate), + ) def test_survey_answered_task_with_survey_score_ten__with_academy__with_user(self): import logging from breathecode.notify.actions import send_email_message - answer = {'score': 10, 'status': 'ANSWERED'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + answer = {"score": 10, "status": "ANSWERED"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(answer=answer, survey=1, academy=1, user=1) - survey_db = self.model_to_dict(model, 'survey') + survey_db = self.model_to_dict(model, "survey") process_answer_received.delay(1) @@ -451,13 +507,13 @@ def test_survey_answered_task_with_survey_score_ten__with_academy__with_user(sel self.assertEqual(send_email_message.call_args_list, []) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [{ - **survey_db, - 'response_rate': 100.0, - 'scores': { - 'academy': None, - 'cohort': None, - 'mentors': [], - 'total': 10.0 - }, - }]) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **survey_db, + "response_rate": 100.0, + "scores": {"academy": None, "cohort": None, "mentors": [], "total": 10.0}, + } + ], + ) diff --git a/breathecode/feedback/tests/tasks/tests_process_student_graduation.py b/breathecode/feedback/tests/tasks/tests_process_student_graduation.py index b69bf44e9..50254f8af 100644 --- a/breathecode/feedback/tests/tasks/tests_process_student_graduation.py +++ b/breathecode/feedback/tests/tasks/tests_process_student_graduation.py @@ -16,25 +16,25 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('logging.Logger.info', MagicMock()) - monkeypatch.setattr('logging.Logger.error', MagicMock()) + monkeypatch.setattr("logging.Logger.info", MagicMock()) + monkeypatch.setattr("logging.Logger.error", MagicMock()) yield def db_item(data={}): return { - 'author_id': 0, - 'cohort_id': 0, - 'comments': None, - 'id': 0, - 'is_public': False, - 'lang': None, - 'nps_previous_rating': None, - 'platform_id': '', - 'public_url': None, - 'status': 'PENDING', - 'status_text': None, - 'total_rating': None, + "author_id": 0, + "cohort_id": 0, + "comments": None, + "id": 0, + "is_public": False, + "lang": None, + "nps_previous_rating": None, + "platform_id": "", + "public_url": None, + "status": "PENDING", + "status_text": None, + "total_rating": None, **data, } @@ -44,9 +44,9 @@ def test_cohort_not_found(database: capy.Database): assert logging.Logger.info.call_args_list == [] assert logging.Logger.error.call_args_list == [ - call('Invalid cohort id: 1', exc_info=True), + call("Invalid cohort id: 1", exc_info=True), ] - assert database.list_of('feedback.Review') == [] + assert database.list_of("feedback.Review") == [] def test_user_not_found(database: capy.Database): @@ -56,9 +56,9 @@ def test_user_not_found(database: capy.Database): assert logging.Logger.info.call_args_list == [] assert logging.Logger.error.call_args_list == [ - call('Invalid user id: 1', exc_info=True), + call("Invalid user id: 1", exc_info=True), ] - assert database.list_of('feedback.Review') == [] + assert database.list_of("feedback.Review") == [] def test_no_answers(database: capy.Database): @@ -66,85 +66,100 @@ def test_no_answers(database: capy.Database): process_student_graduation.delay(1, 1) - assert logging.Logger.info.call_args_list == [call('0 will be requested for student 1, avg NPS score of None')] + assert logging.Logger.info.call_args_list == [call("0 will be requested for student 1, avg NPS score of None")] assert logging.Logger.error.call_args_list == [] - assert database.list_of('feedback.Review') == [] + assert database.list_of("feedback.Review") == [] def test_answers_not_answered(database: capy.Database): - model = database.create(cohort=1, - city=1, - country=1, - user=1, - feedback__answer=[{ - 'score': 4, - 'token_id': n + 1 - } for n in range(2)], - token=2, - review_platform=2) + model = database.create( + cohort=1, + city=1, + country=1, + user=1, + feedback__answer=[{"score": 4, "token_id": n + 1} for n in range(2)], + token=2, + review_platform=2, + ) process_student_graduation.delay(1, 1) - assert logging.Logger.info.call_args_list == [call('2 will be requested for student 1, avg NPS score of None')] + assert logging.Logger.info.call_args_list == [call("2 will be requested for student 1, avg NPS score of None")] assert logging.Logger.error.call_args_list == [] - assert database.list_of('feedback.Review') == [ - db_item({ - 'id': n + 1, - 'cohort_id': 1, - 'author_id': 1, - 'platform_id': model.review_platform[n].slug, - 'nps_previous_rating': None, - }) for n in range(2) + assert database.list_of("feedback.Review") == [ + db_item( + { + "id": n + 1, + "cohort_id": 1, + "author_id": 1, + "platform_id": model.review_platform[n].slug, + "nps_previous_rating": None, + } + ) + for n in range(2) ] def test_answers_answered__low_avg(database: capy.Database): - model = database.create(cohort=1, - city=1, - country=1, - user=1, - feedback__answer=[{ - 'score': 4, - 'token_id': n + 1, - 'status': 'ANSWERED', - } for n in range(2)], - token=2, - review_platform=2) + model = database.create( + cohort=1, + city=1, + country=1, + user=1, + feedback__answer=[ + { + "score": 4, + "token_id": n + 1, + "status": "ANSWERED", + } + for n in range(2) + ], + token=2, + review_platform=2, + ) process_student_graduation.delay(1, 1) assert logging.Logger.info.call_args_list == [ - call('No reviews requested for student 1 because average NPS score is 4.0'), + call("No reviews requested for student 1 because average NPS score is 4.0"), ] assert logging.Logger.error.call_args_list == [] - assert database.list_of('feedback.Review') == [] + assert database.list_of("feedback.Review") == [] def test_answers_answered__good_avg(database: capy.Database): - model = database.create(cohort=1, - city=1, - country=1, - user=1, - feedback__answer=[{ - 'score': 8, - 'token_id': n + 1, - 'status': 'ANSWERED', - } for n in range(2)], - token=2, - review_platform=2) + model = database.create( + cohort=1, + city=1, + country=1, + user=1, + feedback__answer=[ + { + "score": 8, + "token_id": n + 1, + "status": "ANSWERED", + } + for n in range(2) + ], + token=2, + review_platform=2, + ) process_student_graduation.delay(1, 1) assert logging.Logger.info.call_args_list == [ - call('2 will be requested for student 1, avg NPS score of 8.0'), + call("2 will be requested for student 1, avg NPS score of 8.0"), ] assert logging.Logger.error.call_args_list == [] - assert database.list_of('feedback.Review') == [ - db_item({ - 'id': n + 1, - 'cohort_id': 1, - 'author_id': 1, - 'platform_id': model.review_platform[n].slug, - 'nps_previous_rating': 8.0, - }) for n in range(2) + assert database.list_of("feedback.Review") == [ + db_item( + { + "id": n + 1, + "cohort_id": 1, + "author_id": 1, + "platform_id": model.review_platform[n].slug, + "nps_previous_rating": 8.0, + } + ) + for n in range(2) ] diff --git a/breathecode/feedback/tests/tasks/tests_recalculate_survey_scores.py b/breathecode/feedback/tests/tasks/tests_recalculate_survey_scores.py index 155581dff..0f261cad5 100644 --- a/breathecode/feedback/tests/tasks/tests_recalculate_survey_scores.py +++ b/breathecode/feedback/tests/tasks/tests_recalculate_survey_scores.py @@ -21,17 +21,17 @@ def get_scores(): return { - 'total': TOTAL, - 'academy': ACADEMY, - 'cohort': COHORT, - 'mentors': [ + "total": TOTAL, + "academy": ACADEMY, + "cohort": COHORT, + "mentors": [ { - 'name': MENTOR1_NAME, - 'score': MENTOR1, + "name": MENTOR1_NAME, + "score": MENTOR1, }, { - 'name': MENTOR2_NAME, - 'score': MENTOR2, + "name": MENTOR2_NAME, + "score": MENTOR2, }, ], } @@ -42,50 +42,59 @@ class SurveyAnsweredTestSuite(FeedbackTestCase): 🔽🔽🔽 With 0 Survey """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(return_value=get_scores())) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', MagicMock(return_value=RESPONSE_RATE)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(return_value=get_scores())) + @patch("breathecode.feedback.actions.calculate_survey_response_rate", MagicMock(return_value=RESPONSE_RATE)) def test_with_zero_surveys(self): recalculate_survey_scores.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting recalculate_survey_score'), - call('Starting recalculate_survey_score'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Survey not found', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting recalculate_survey_score"), + call("Starting recalculate_survey_score"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Survey not found", exc_info=True), + ], + ) self.assertEqual(actions.calculate_survey_scores.call_args_list, []) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, []) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) """ 🔽🔽🔽 With 1 Survey """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('breathecode.feedback.actions.calculate_survey_scores', MagicMock(return_value=get_scores())) - @patch('breathecode.feedback.actions.calculate_survey_response_rate', MagicMock(return_value=RESPONSE_RATE)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("breathecode.feedback.actions.calculate_survey_scores", MagicMock(return_value=get_scores())) + @patch("breathecode.feedback.actions.calculate_survey_response_rate", MagicMock(return_value=RESPONSE_RATE)) def test_with_one_surveys(self): - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.bc.database.create(survey=1) logging.Logger.info.call_args_list = [] recalculate_survey_scores.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [call('Starting recalculate_survey_score')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Starting recalculate_survey_score")]) self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(actions.calculate_survey_scores.call_args_list, [call(1)]) self.assertEqual(actions.calculate_survey_response_rate.call_args_list, [call(1)]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [ - { - **self.bc.format.to_dict(model.survey), - 'response_rate': RESPONSE_RATE, - 'scores': get_scores(), - }, - ]) + self.assertEqual( + self.bc.database.list_of("feedback.Survey"), + [ + { + **self.bc.format.to_dict(model.survey), + "response_rate": RESPONSE_RATE, + "scores": get_scores(), + }, + ], + ) diff --git a/breathecode/feedback/tests/tasks/tests_send_cohort_survey.py b/breathecode/feedback/tests/tasks/tests_send_cohort_survey.py index c46898a59..80132da0c 100644 --- a/breathecode/feedback/tests/tasks/tests_send_cohort_survey.py +++ b/breathecode/feedback/tests/tasks/tests_send_cohort_survey.py @@ -28,61 +28,70 @@ def get_env(key, value=None): class SendCohortSurvey(FeedbackTestCase): """Test /academy/survey""" - @patch('breathecode.feedback.tasks.generate_user_cohort_survey_answers', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.generate_user_cohort_survey_answers", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_survey_is_none(self): - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(cohort=1) logging.Logger.info.call_args_list = [] send_cohort_survey.delay(user_id=None, survey_id=None) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting send_cohort_survey'), - call('Starting send_cohort_survey'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Survey not found', exc_info=True), - ]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting send_cohort_survey"), + call("Starting send_cohort_survey"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Survey not found", exc_info=True), + ], + ) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) self.assertEqual(tasks.generate_user_cohort_survey_answers.call_args_list, []) - @patch('breathecode.feedback.tasks.generate_user_cohort_survey_answers', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.generate_user_cohort_survey_answers", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_user_is_none(self): - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(cohort=1, survey=1) logging.Logger.info.call_args_list = [] send_cohort_survey.delay(survey_id=1, user_id=None) - self.assertEqual(logging.Logger.info.call_args_list, [call('Starting send_cohort_survey')]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('User not found', exc_info=True), - ]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [self.bc.format.to_dict(model.survey)]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Starting send_cohort_survey")]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("User not found", exc_info=True), + ], + ) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), [self.bc.format.to_dict(model.survey)]) self.assertEqual(tasks.generate_user_cohort_survey_answers.call_args_list, []) - @patch('breathecode.feedback.tasks.generate_user_cohort_survey_answers', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.generate_user_cohort_survey_answers", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_survey_has_expired(self): created = timezone.now() - timedelta(hours=48, minutes=1) duration = timedelta(hours=48) - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(cohort=1, survey={'duration': duration}, user=1) + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models(cohort=1, survey={"duration": duration}, user=1) logging.Logger.info.call_args_list = [] model.survey.created_at = created @@ -91,187 +100,217 @@ def test_when_survey_has_expired(self): send_cohort_survey.delay(survey_id=1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting send_cohort_survey'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('This survey has already expired', exc_info=True), - ]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [self.bc.format.to_dict(model.survey)]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting send_cohort_survey"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("This survey has already expired", exc_info=True), + ], + ) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), [self.bc.format.to_dict(model.survey)]) self.assertEqual(tasks.generate_user_cohort_survey_answers.call_args_list, []) - @patch('breathecode.feedback.tasks.generate_user_cohort_survey_answers', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.tasks.generate_user_cohort_survey_answers", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_send_cohort_when_student_does_not_belong_to_cohort(self): - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(cohort=1, user=1, survey=1) logging.Logger.info.call_args_list = [] send_cohort_survey.delay(survey_id=1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [call('Starting send_cohort_survey')]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('This student does not belong to this cohort', exc_info=True), - ]) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [self.bc.format.to_dict(model.survey)]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Starting send_cohort_survey")]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("This student does not belong to this cohort", exc_info=True), + ], + ) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), [self.bc.format.to_dict(model.survey)]) self.assertEqual(tasks.generate_user_cohort_survey_answers.call_args_list, []) self.assertEqual(actions.send_email_message.call_args_list, []) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'API_URL': 'https://hello.com'}))) - @patch('breathecode.feedback.tasks.generate_user_cohort_survey_answers', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"API_URL": "https://hello.com"}))) + @patch("breathecode.feedback.tasks.generate_user_cohort_survey_answers", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_student_not_found(self): - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(cohort=1, user=1, survey=1, cohort_user={'role': 'STUDENT'}) + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models(cohort=1, user=1, survey=1, cohort_user={"role": "STUDENT"}) logging.Logger.info.call_args_list = [] send_cohort_survey.delay(survey_id=1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [call('Starting send_cohort_survey')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Starting send_cohort_survey")]) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [self.bc.format.to_dict(model.survey)]) - self.assertEqual(tasks.generate_user_cohort_survey_answers.call_args_list, - [call(model.user, model.survey, status='SENT')]) - token = self.bc.database.get('authenticate.Token', 1, dict=False) - self.assertEqual(actions.send_email_message.call_args_list, [ - call( - 'nps_survey', - model.user.email, { - 'SUBJECT': 'We need your feedback', - 'MESSAGE': 'Please take 5 minutes to give us feedback about your experience at the academy so far.', - 'TRACKER_URL': f'https://hello.com/v1/feedback/survey/{model.survey.id}/tracker.png', - 'BUTTON': 'Answer the question', - 'LINK': f'https://nps.4geeks.com/survey/{model.survey.id}?token={token.key}', - }, - academy=model.academy) - ]) - - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'API_URL': 'https://hello.com'}))) - @patch('breathecode.feedback.tasks.generate_user_cohort_survey_answers', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), [self.bc.format.to_dict(model.survey)]) + self.assertEqual( + tasks.generate_user_cohort_survey_answers.call_args_list, [call(model.user, model.survey, status="SENT")] + ) + token = self.bc.database.get("authenticate.Token", 1, dict=False) + self.assertEqual( + actions.send_email_message.call_args_list, + [ + call( + "nps_survey", + model.user.email, + { + "SUBJECT": "We need your feedback", + "MESSAGE": "Please take 5 minutes to give us feedback about your experience at the academy so far.", + "TRACKER_URL": f"https://hello.com/v1/feedback/survey/{model.survey.id}/tracker.png", + "BUTTON": "Answer the question", + "LINK": f"https://nps.4geeks.com/survey/{model.survey.id}?token={token.key}", + }, + academy=model.academy, + ) + ], + ) + + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"API_URL": "https://hello.com"}))) + @patch("breathecode.feedback.tasks.generate_user_cohort_survey_answers", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_an_email_is_sent(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_users = [{'educational_status': c}, {'role': 'STUDENT', 'educational_status': c}] + cohort_users = [{"educational_status": c}, {"role": "STUDENT", "educational_status": c}] - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(cohort=1, user=1, survey=1, cohort_user=cohort_users) logging.Logger.info.call_args_list = [] send_cohort_survey.delay(survey_id=model.survey.id, user_id=model.user.id) - self.assertEqual(logging.Logger.info.call_args_list, [call('Starting send_cohort_survey')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Starting send_cohort_survey")]) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [self.bc.format.to_dict(model.survey)]) - self.assertEqual(tasks.generate_user_cohort_survey_answers.call_args_list, - [call(model.user, model.survey, status='SENT')]) - token = self.bc.database.get('authenticate.Token', model.survey.id, dict=False) - self.assertEqual(actions.send_email_message.call_args_list, [ - call('nps_survey', - model.user.email, { - 'SUBJECT': 'We need your feedback', - 'MESSAGE': - 'Please take 5 minutes to give us feedback about your experience at the academy so far.', - 'TRACKER_URL': f'https://hello.com/v1/feedback/survey/{model.survey.id}/tracker.png', - 'BUTTON': 'Answer the question', - 'LINK': f'https://nps.4geeks.com/survey/{model.survey.id}?token={token.key}', - }, - academy=model.academy) - ]) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), [self.bc.format.to_dict(model.survey)]) + self.assertEqual( + tasks.generate_user_cohort_survey_answers.call_args_list, + [call(model.user, model.survey, status="SENT")], + ) + token = self.bc.database.get("authenticate.Token", model.survey.id, dict=False) + self.assertEqual( + actions.send_email_message.call_args_list, + [ + call( + "nps_survey", + model.user.email, + { + "SUBJECT": "We need your feedback", + "MESSAGE": "Please take 5 minutes to give us feedback about your experience at the academy so far.", + "TRACKER_URL": f"https://hello.com/v1/feedback/survey/{model.survey.id}/tracker.png", + "BUTTON": "Answer the question", + "LINK": f"https://nps.4geeks.com/survey/{model.survey.id}?token={token.key}", + }, + academy=model.academy, + ) + ], + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] tasks.generate_user_cohort_survey_answers.call_args_list = [] actions.send_email_message.call_args_list = [] - self.bc.database.delete('feedback.Survey') - - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'API_URL': 'https://hello.com'}))) - @patch('breathecode.feedback.tasks.generate_user_cohort_survey_answers', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.notify.actions.send_slack', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + self.bc.database.delete("feedback.Survey") + + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"API_URL": "https://hello.com"}))) + @patch("breathecode.feedback.tasks.generate_user_cohort_survey_answers", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.notify.actions.send_slack", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_when_an_email_is_sent_with_slack_team_and_user(self): - statuses = ['ACTIVE', 'GRADUATED'] + statuses = ["ACTIVE", "GRADUATED"] for n in range(0, 2): c = statuses[n] - cohort_user = {'role': 'STUDENT', 'educational_status': c} - - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(cohort=1, - slack_user=1, - slack_team=1, - user=1, - survey=1, - cohort_user=cohort_user) + cohort_user = {"role": "STUDENT", "educational_status": c} + + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + cohort=1, slack_user=1, slack_team=1, user=1, survey=1, cohort_user=cohort_user + ) logging.Logger.info.call_args_list = [] send_cohort_survey.delay(survey_id=model.survey.id, user_id=model.user.id) - self.assertEqual(logging.Logger.info.call_args_list, [call('Starting send_cohort_survey')]) + self.assertEqual(logging.Logger.info.call_args_list, [call("Starting send_cohort_survey")]) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [self.bc.format.to_dict(model.survey)]) - self.assertEqual(tasks.generate_user_cohort_survey_answers.call_args_list, - [call(model.user, model.survey, status='SENT')]) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), [self.bc.format.to_dict(model.survey)]) + self.assertEqual( + tasks.generate_user_cohort_survey_answers.call_args_list, + [call(model.user, model.survey, status="SENT")], + ) - token = self.bc.database.get('authenticate.Token', model.survey.id, dict=False) + token = self.bc.database.get("authenticate.Token", model.survey.id, dict=False) self.assertEqual( str(actions.send_slack.call_args_list), - str([ - call('nps_survey', - model.slack_user, - model.slack_team, - data={ - 'SUBJECT': 'We need your feedback', - 'MESSAGE': - 'Please take 5 minutes to give us feedback about your experience at the academy so far.', - 'TRACKER_URL': f'https://hello.com/v1/feedback/survey/{model.survey.id}/tracker.png', - 'BUTTON': 'Answer the question', - 'LINK': f'https://nps.4geeks.com/survey/{model.survey.id}?token={token.key}', - }, - academy=model.academy) - ])) - self.assertEqual(actions.send_email_message.call_args_list, [ - call('nps_survey', - model.user.email, { - 'SUBJECT': 'We need your feedback', - 'MESSAGE': - 'Please take 5 minutes to give us feedback about your experience at the academy so far.', - 'TRACKER_URL': f'https://hello.com/v1/feedback/survey/{model.survey.id}/tracker.png', - 'BUTTON': 'Answer the question', - 'LINK': f'https://nps.4geeks.com/survey/{model.survey.id}?token={token.key}', - }, - academy=model.academy) - ]) + str( + [ + call( + "nps_survey", + model.slack_user, + model.slack_team, + data={ + "SUBJECT": "We need your feedback", + "MESSAGE": "Please take 5 minutes to give us feedback about your experience at the academy so far.", + "TRACKER_URL": f"https://hello.com/v1/feedback/survey/{model.survey.id}/tracker.png", + "BUTTON": "Answer the question", + "LINK": f"https://nps.4geeks.com/survey/{model.survey.id}?token={token.key}", + }, + academy=model.academy, + ) + ] + ), + ) + self.assertEqual( + actions.send_email_message.call_args_list, + [ + call( + "nps_survey", + model.user.email, + { + "SUBJECT": "We need your feedback", + "MESSAGE": "Please take 5 minutes to give us feedback about your experience at the academy so far.", + "TRACKER_URL": f"https://hello.com/v1/feedback/survey/{model.survey.id}/tracker.png", + "BUTTON": "Answer the question", + "LINK": f"https://nps.4geeks.com/survey/{model.survey.id}?token={token.key}", + }, + academy=model.academy, + ) + ], + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] tasks.generate_user_cohort_survey_answers.call_args_list = [] actions.send_email_message.call_args_list = [] actions.send_slack.call_args_list = [] - self.bc.database.delete('feedback.Survey') + self.bc.database.delete("feedback.Survey") diff --git a/breathecode/feedback/tests/tasks/tests_send_mentorship_session_survey.py b/breathecode/feedback/tests/tasks/tests_send_mentorship_session_survey.py index 2a4e995f5..8cc17ecc4 100644 --- a/breathecode/feedback/tests/tasks/tests_send_mentorship_session_survey.py +++ b/breathecode/feedback/tests/tasks/tests_send_mentorship_session_survey.py @@ -1,6 +1,7 @@ """ Tasks tests """ + from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -12,30 +13,30 @@ from ...utils import strings from ..mixins import FeedbackTestCase -API_URL = 'http://kenny-was.reborn' +API_URL = "http://kenny-was.reborn" UTC_NOW = timezone.now() def build_answer_dict(attrs={}): return { - 'academy_id': None, - 'cohort_id': None, - 'comment': None, - 'event_id': None, - 'highest': 'very likely', - 'id': 1, - 'lang': 'en', - 'lowest': 'not likely', - 'mentor_id': None, - 'mentorship_session_id': None, - 'opened_at': None, - 'score': None, - 'sent_at': None, - 'status': 'PENDING', - 'survey_id': None, - 'title': None, - 'token_id': None, - 'user_id': None, + "academy_id": None, + "cohort_id": None, + "comment": None, + "event_id": None, + "highest": "very likely", + "id": 1, + "lang": "en", + "lowest": "not likely", + "mentor_id": None, + "mentorship_session_id": None, + "opened_at": None, + "score": None, + "sent_at": None, + "status": "PENDING", + "survey_id": None, + "title": None, + "token_id": None, + "user_id": None, **attrs, } @@ -50,25 +51,26 @@ def get_env(key, default=None): class ActionCertificateScreenshotTestCase(FeedbackTestCase): """Test task send_mentorship_session_survey""" + """ 🔽🔽🔽 Without MentorshipSession """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__without_mentorship_session(self): from logging import Logger send_mentorship_session_survey.delay(1) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) assert Logger.info.call_args_list == [ - call('Starting send_mentorship_session_survey'), - call('Starting send_mentorship_session_survey'), + call("Starting send_mentorship_session_survey"), + call("Starting send_mentorship_session_survey"), ] assert Logger.error.call_args_list == [ call("Mentoring session doesn't found", exc_info=True), @@ -79,12 +81,12 @@ def test_send_mentorship_session_survey__without_mentorship_session(self): 🔽🔽🔽 With MentorshipSession and without User (mentee) """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__with_mentorship_session(self): from logging import Logger @@ -93,280 +95,326 @@ def test_send_mentorship_session_survey__with_mentorship_session(self): send_mentorship_session_survey.delay(1) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) - self.assertEqual(Logger.info.call_args_list, [call('Starting send_mentorship_session_survey')]) - self.assertEqual(Logger.error.call_args_list, [ - call("This session doesn't have a mentee", exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) + self.assertEqual(Logger.info.call_args_list, [call("Starting send_mentorship_session_survey")]) + self.assertEqual( + Logger.error.call_args_list, + [ + call("This session doesn't have a mentee", exc_info=True), + ], + ) self.assertEqual(actions.send_email_message.call_args_list, []) """ 🔽🔽🔽 With MentorshipSession started not finished and with User (mentee) """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__with_mentorship_session__with_mentee__session_started_not_finished(self): from logging import Logger mentorship_session = { - 'started_at': UTC_NOW, + "started_at": UTC_NOW, } self.bc.database.create(mentorship_session=mentorship_session, user=1) Logger.info.call_args_list = [] send_mentorship_session_survey.delay(1) - self.assertEqual(Logger.info.call_args_list, [call('Starting send_mentorship_session_survey')]) - self.assertEqual(Logger.error.call_args_list, [ - call("This session hasn't finished", exc_info=True), - ]) + self.assertEqual(Logger.info.call_args_list, [call("Starting send_mentorship_session_survey")]) + self.assertEqual( + Logger.error.call_args_list, + [ + call("This session hasn't finished", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) self.assertEqual(actions.send_email_message.call_args_list, []) - self.assertEqual(self.bc.database.list_of('authenticate.Token'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Token"), []) """ 🔽🔽🔽 With MentorshipSession not started but finished and with User (mentee) """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__with_mentorship_session__with_mentee__session_not_started_but_finished( - self): + self, + ): from logging import Logger mentorship_session = { - 'ended_at': UTC_NOW, + "ended_at": UTC_NOW, } self.bc.database.create(mentorship_session=mentorship_session, user=1) Logger.info.call_args_list = [] send_mentorship_session_survey.delay(1) - self.assertEqual(Logger.info.call_args_list, [call('Starting send_mentorship_session_survey')]) - self.assertEqual(Logger.error.call_args_list, [ - call("This session hasn't finished", exc_info=True), - ]) + self.assertEqual(Logger.info.call_args_list, [call("Starting send_mentorship_session_survey")]) + self.assertEqual( + Logger.error.call_args_list, + [ + call("This session hasn't finished", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) self.assertEqual(actions.send_email_message.call_args_list, []) - self.assertEqual(self.bc.database.list_of('authenticate.Token'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Token"), []) """ 🔽🔽🔽 With MentorshipSession started and finished and with User (mentee) """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__with_mentorship_session__with_mentee__session_started_and_finished(self): from logging import Logger mentorship_session = { - 'started_at': UTC_NOW, - 'ended_at': UTC_NOW + timedelta(minutes=5), + "started_at": UTC_NOW, + "ended_at": UTC_NOW + timedelta(minutes=5), } self.bc.database.create(mentorship_session=mentorship_session, user=1) Logger.info.call_args_list = [] send_mentorship_session_survey.delay(1) - self.assertEqual(Logger.info.call_args_list, [call('Starting send_mentorship_session_survey')]) + self.assertEqual(Logger.info.call_args_list, [call("Starting send_mentorship_session_survey")]) assert Logger.error.call_args_list == [ - call('Mentorship session duration is less or equal than five minutes', exc_info=True), + call("Mentorship session duration is less or equal than five minutes", exc_info=True), ] - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) self.assertEqual(actions.send_email_message.call_args_list, []) - self.assertEqual(self.bc.database.list_of('authenticate.Token'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Token"), []) """ 🔽🔽🔽 With MentorshipSession and with User (mentee) """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__with_mentorship_session__with_mentee(self): from logging import Logger mentorship_session = { - 'started_at': UTC_NOW, - 'ended_at': UTC_NOW + timedelta(minutes=5, seconds=1), + "started_at": UTC_NOW, + "ended_at": UTC_NOW + timedelta(minutes=5, seconds=1), } model = self.bc.database.create(mentorship_session=mentorship_session, user=1) Logger.info.call_args_list = [] send_mentorship_session_survey.delay(1) - self.assertEqual(Logger.info.call_args_list, [call('Starting send_mentorship_session_survey')]) + self.assertEqual(Logger.info.call_args_list, [call("Starting send_mentorship_session_survey")]) assert Logger.error.call_args_list == [ call("Mentorship session doesn't have a service associated with it", exc_info=True), ] - self.assertEqual(self.bc.database.list_of('feedback.Answer'), []) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), []) self.assertEqual(actions.send_email_message.call_args_list, []) - self.assertEqual(self.bc.database.list_of('authenticate.Token'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Token"), []) """ 🔽🔽🔽 With MentorshipSession, User (mentee) and MentorshipService """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__with_mentorship_session__with_mentee__with_mentorship_service(self): from logging import Logger mentorship_session = { - 'started_at': UTC_NOW, - 'ended_at': UTC_NOW + timedelta(minutes=5, seconds=1), + "started_at": UTC_NOW, + "ended_at": UTC_NOW + timedelta(minutes=5, seconds=1), } model = self.bc.database.create(mentorship_session=mentorship_session, user=1, mentorship_service=1) Logger.info.call_args_list = [] send_mentorship_session_survey.delay(1) - self.assertEqual(Logger.info.call_args_list, [call('Starting send_mentorship_session_survey')]) + self.assertEqual(Logger.info.call_args_list, [call("Starting send_mentorship_session_survey")]) self.assertEqual(Logger.error.call_args_list, []) - fullname_of_mentor = (model.mentorship_session.mentor.user.first_name + ' ' + - model.mentorship_session.mentor.user.last_name) - - token = self.bc.database.get('authenticate.Token', 1, dict=False) - - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [ - build_answer_dict({ - 'academy_id': 1, - 'title': strings['en']['session']['title'].format(fullname_of_mentor), - 'lowest': strings['en']['session']['lowest'], - 'highest': strings['en']['session']['highest'], - 'mentorship_session_id': 1, - 'sent_at': UTC_NOW, - 'status': 'SENT', - 'user_id': 1, - }), - ]) - - self.assertEqual(actions.send_email_message.call_args_list, [ - call('nps_survey', - model.user.email, { - 'SUBJECT': strings['en']['survey_subject'], - 'MESSAGE': strings['en']['session']['title'].format(fullname_of_mentor), - 'TRACKER_URL': f'{API_URL}/v1/feedback/answer/1/tracker.png', - 'BUTTON': strings['en']['button_label'], - 'LINK': f'https://nps.4geeks.com/1?token={token.key}', - }, - academy=model.academy) - ]) - - self.bc.check.partial_equality(self.bc.database.list_of('authenticate.Token'), [{ - 'key': token.key, - 'token_type': 'temporal', - }]) + fullname_of_mentor = ( + model.mentorship_session.mentor.user.first_name + " " + model.mentorship_session.mentor.user.last_name + ) + + token = self.bc.database.get("authenticate.Token", 1, dict=False) + + self.assertEqual( + self.bc.database.list_of("feedback.Answer"), + [ + build_answer_dict( + { + "academy_id": 1, + "title": strings["en"]["session"]["title"].format(fullname_of_mentor), + "lowest": strings["en"]["session"]["lowest"], + "highest": strings["en"]["session"]["highest"], + "mentorship_session_id": 1, + "sent_at": UTC_NOW, + "status": "SENT", + "user_id": 1, + } + ), + ], + ) + + self.assertEqual( + actions.send_email_message.call_args_list, + [ + call( + "nps_survey", + model.user.email, + { + "SUBJECT": strings["en"]["survey_subject"], + "MESSAGE": strings["en"]["session"]["title"].format(fullname_of_mentor), + "TRACKER_URL": f"{API_URL}/v1/feedback/answer/1/tracker.png", + "BUTTON": strings["en"]["button_label"], + "LINK": f"https://nps.4geeks.com/1?token={token.key}", + }, + academy=model.academy, + ) + ], + ) + + self.bc.check.partial_equality( + self.bc.database.list_of("authenticate.Token"), + [ + { + "key": token.key, + "token_type": "temporal", + } + ], + ) """ 🔽🔽🔽 With MentorshipSession, with User (mentee) and Answer with status PENDING """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__with_mentorship_session__with_mentee__with_answer(self): from logging import Logger mentorship_session = { - 'started_at': UTC_NOW, - 'ended_at': UTC_NOW + timedelta(minutes=5, seconds=1), + "started_at": UTC_NOW, + "ended_at": UTC_NOW + timedelta(minutes=5, seconds=1), } model = self.bc.database.create(mentorship_session=mentorship_session, user=1, answer=1, mentorship_service=1) Logger.info.call_args_list = [] send_mentorship_session_survey.delay(1) - self.assertEqual(Logger.info.call_args_list, [call('Starting send_mentorship_session_survey')]) + self.assertEqual(Logger.info.call_args_list, [call("Starting send_mentorship_session_survey")]) self.assertEqual(Logger.error.call_args_list, []) - token = self.bc.database.get('authenticate.Token', 1, dict=False) - - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [ - { - **self.bc.format.to_dict(model.answer), - 'sent_at': UTC_NOW, - }, - ]) - - self.assertEqual(actions.send_email_message.call_args_list, [ - call('nps_survey', - model.user.email, { - 'SUBJECT': strings['en']['survey_subject'], - 'MESSAGE': model.answer.title, - 'TRACKER_URL': f'{API_URL}/v1/feedback/answer/1/tracker.png', - 'BUTTON': strings['en']['button_label'], - 'LINK': f'https://nps.4geeks.com/1?token={token.key}', - }, - academy=model.academy) - ]) - - self.bc.check.partial_equality(self.bc.database.list_of('authenticate.Token'), [{ - 'key': token.key, - 'token_type': 'temporal', - }]) + token = self.bc.database.get("authenticate.Token", 1, dict=False) + + self.assertEqual( + self.bc.database.list_of("feedback.Answer"), + [ + { + **self.bc.format.to_dict(model.answer), + "sent_at": UTC_NOW, + }, + ], + ) + + self.assertEqual( + actions.send_email_message.call_args_list, + [ + call( + "nps_survey", + model.user.email, + { + "SUBJECT": strings["en"]["survey_subject"], + "MESSAGE": model.answer.title, + "TRACKER_URL": f"{API_URL}/v1/feedback/answer/1/tracker.png", + "BUTTON": strings["en"]["button_label"], + "LINK": f"https://nps.4geeks.com/1?token={token.key}", + }, + academy=model.academy, + ) + ], + ) + + self.bc.check.partial_equality( + self.bc.database.list_of("authenticate.Token"), + [ + { + "key": token.key, + "token_type": "temporal", + } + ], + ) """ 🔽🔽🔽 With MentorshipSession, with User (mentee) and Answer with status ANSWERED """ - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'ENV': 'test', 'API_URL': API_URL}))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"ENV": "test", "API_URL": API_URL}))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_send_mentorship_session_survey__with_mentorship_session__with_mentee__with_answer_answered(self): from logging import Logger - answer = {'status': 'ANSWERED'} + answer = {"status": "ANSWERED"} mentorship_session = { - 'started_at': UTC_NOW, - 'ended_at': UTC_NOW + timedelta(minutes=5, seconds=1), + "started_at": UTC_NOW, + "ended_at": UTC_NOW + timedelta(minutes=5, seconds=1), } - model = self.bc.database.create(mentorship_session=mentorship_session, - user=1, - answer=answer, - mentorship_service=1) + model = self.bc.database.create( + mentorship_session=mentorship_session, user=1, answer=answer, mentorship_service=1 + ) Logger.info.call_args_list = [] send_mentorship_session_survey.delay(1) - self.assertEqual(Logger.info.call_args_list, [ - call('Starting send_mentorship_session_survey'), - ]) - - self.assertEqual(Logger.error.call_args_list, [ - call('This survey about MentorshipSession 1 was answered', exc_info=True), - ]) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [self.bc.format.to_dict(model.answer)]) + self.assertEqual( + Logger.info.call_args_list, + [ + call("Starting send_mentorship_session_survey"), + ], + ) + + self.assertEqual( + Logger.error.call_args_list, + [ + call("This survey about MentorshipSession 1 was answered", exc_info=True), + ], + ) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [self.bc.format.to_dict(model.answer)]) self.assertEqual(actions.send_email_message.call_args_list, []) - self.assertEqual(self.bc.database.list_of('authenticate.Token'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Token"), []) diff --git a/breathecode/feedback/tests/urls/tests_academy_answer.py b/breathecode/feedback/tests/urls/tests_academy_answer.py index fbb90c0d9..5d10d73ad 100644 --- a/breathecode/feedback/tests/urls/tests_academy_answer.py +++ b/breathecode/feedback/tests/urls/tests_academy_answer.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -19,56 +20,55 @@ class AnswerTestSuite(FeedbackTestCase): """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('feedback:answer') + url = reverse_lazy("feedback:answer") response = self.client.get(url) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('feedback:answer') - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("feedback:answer") + response = self.client.get(url, **{"HTTP_Academy": 1}) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('feedback:answer') + url = reverse_lazy("feedback:answer") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_nps_answers for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, + {"detail": "You (user: 1) don't have this capability: read_nps_answers for academy 1", "status_code": 403}, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - url = reverse_lazy('feedback:answer') + models = self.generate_models( + authenticate=True, profile_academy=True, capability="read_nps_answers", role="potato" + ) + url = reverse_lazy("feedback:answer") response = self.client.get(url) json = response.json() @@ -76,935 +76,1001 @@ def test_answer_without_data(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_answer(), 0) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - answer=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') - url = reverse_lazy('feedback:answer') + model = self.generate_models( + authenticate=True, answer=True, profile_academy=True, capability="read_nps_answers", role="potato" + ) + db = self.model_to_dict(model, "answer") + url = reverse_lazy("feedback:answer") response = self.client.get(url) json = response.json() - json = [{**x, 'created_at': None} for x in json if self.assertDatetime(x['created_at'])] - - self.assertEqual(json, [{ - 'created_at': None, - 'academy': { - 'id': model['answer'].academy.id, - 'name': model['answer'].academy.name, - 'slug': model['answer'].academy.slug, - }, - 'cohort': { - 'id': model['answer'].cohort.id, - 'name': model['answer'].cohort.name, - 'slug': model['answer'].cohort.slug, - }, - 'comment': model['answer'].comment, - 'event': model['answer'].event, - 'highest': model['answer'].highest, - 'id': model['answer'].id, - 'lang': model['answer'].lang, - 'lowest': model['answer'].lowest, - 'mentor': { - 'first_name': model['answer'].mentor.first_name, - 'id': model['answer'].mentor.id, - 'last_name': model['answer'].mentor.last_name, - 'profile': None, - }, - 'score': model['answer'].score, - 'status': model['answer'].status, - 'title': model['answer'].title, - 'user': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - }]) + json = [{**x, "created_at": None} for x in json if self.assertDatetime(x["created_at"])] + + self.assertEqual( + json, + [ + { + "created_at": None, + "academy": { + "id": model["answer"].academy.id, + "name": model["answer"].academy.name, + "slug": model["answer"].academy.slug, + }, + "cohort": { + "id": model["answer"].cohort.id, + "name": model["answer"].cohort.name, + "slug": model["answer"].cohort.slug, + }, + "comment": model["answer"].comment, + "event": model["answer"].event, + "highest": model["answer"].highest, + "id": model["answer"].id, + "lang": model["answer"].lang, + "lowest": model["answer"].lowest, + "mentor": { + "first_name": model["answer"].mentor.first_name, + "id": model["answer"].mentor.id, + "last_name": model["answer"].mentor.last_name, + "profile": None, + }, + "score": model["answer"].score, + "status": model["answer"].status, + "title": model["answer"].title, + "user": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer__with_data__with_profile(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - answer=True, - profile_academy=True, - profile=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') - url = reverse_lazy('feedback:answer') + model = self.generate_models( + authenticate=True, + answer=True, + profile_academy=True, + profile=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") + url = reverse_lazy("feedback:answer") response = self.client.get(url) json = response.json() - json = [{**x, 'created_at': None} for x in json if self.assertDatetime(x['created_at'])] - - self.assertEqual(json, [{ - 'created_at': None, - 'academy': { - 'id': model['answer'].academy.id, - 'name': model['answer'].academy.name, - 'slug': model['answer'].academy.slug, - }, - 'cohort': { - 'id': model['answer'].cohort.id, - 'name': model['answer'].cohort.name, - 'slug': model['answer'].cohort.slug, - }, - 'comment': model['answer'].comment, - 'event': model['answer'].event, - 'highest': model['answer'].highest, - 'id': model['answer'].id, - 'lang': model['answer'].lang, - 'lowest': model['answer'].lowest, - 'mentor': { - 'first_name': model['answer'].mentor.first_name, - 'id': model['answer'].mentor.id, - 'last_name': model['answer'].mentor.last_name, - 'profile': { - 'avatar_url': None, - }, - }, - 'score': model['answer'].score, - 'status': model['answer'].status, - 'title': model['answer'].title, - 'user': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': { - 'avatar_url': None, - }, - }, - }]) + json = [{**x, "created_at": None} for x in json if self.assertDatetime(x["created_at"])] + + self.assertEqual( + json, + [ + { + "created_at": None, + "academy": { + "id": model["answer"].academy.id, + "name": model["answer"].academy.name, + "slug": model["answer"].academy.slug, + }, + "cohort": { + "id": model["answer"].cohort.id, + "name": model["answer"].cohort.name, + "slug": model["answer"].cohort.slug, + }, + "comment": model["answer"].comment, + "event": model["answer"].event, + "highest": model["answer"].highest, + "id": model["answer"].id, + "lang": model["answer"].lang, + "lowest": model["answer"].lowest, + "mentor": { + "first_name": model["answer"].mentor.first_name, + "id": model["answer"].mentor.id, + "last_name": model["answer"].mentor.last_name, + "profile": { + "avatar_url": None, + }, + }, + "score": model["answer"].score, + "status": model["answer"].status, + "title": model["answer"].title, + "user": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": { + "avatar_url": None, + }, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_bad_param_user_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - answer=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') - params = {'user': 9999} - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + model = self.generate_models( + authenticate=True, + user=True, + answer=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") + params = {"user": 9999} + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_param_user_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - answer=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') + model = self.generate_models( + authenticate=True, + user=True, + answer=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") params = { - 'user': model['user'].id, + "user": model["user"].id, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() - json = [{**x, 'created_at': None} for x in json if self.assertDatetime(x['created_at'])] - - self.assertEqual(json, [{ - 'created_at': None, - 'academy': { - 'id': model['answer'].academy.id, - 'name': model['answer'].academy.name, - 'slug': model['answer'].academy.slug, - }, - 'cohort': { - 'id': model['answer'].cohort.id, - 'name': model['answer'].cohort.name, - 'slug': model['answer'].cohort.slug, - }, - 'comment': model['answer'].comment, - 'event': model['answer'].event, - 'highest': model['answer'].highest, - 'id': model['answer'].id, - 'lang': model['answer'].lang, - 'lowest': model['answer'].lowest, - 'mentor': { - 'first_name': model['answer'].mentor.first_name, - 'id': model['answer'].mentor.id, - 'last_name': model['answer'].mentor.last_name, - 'profile': None, - }, - 'score': model['answer'].score, - 'status': model['answer'].status, - 'title': model['answer'].title, - 'user': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - }]) + json = [{**x, "created_at": None} for x in json if self.assertDatetime(x["created_at"])] + + self.assertEqual( + json, + [ + { + "created_at": None, + "academy": { + "id": model["answer"].academy.id, + "name": model["answer"].academy.name, + "slug": model["answer"].academy.slug, + }, + "cohort": { + "id": model["answer"].cohort.id, + "name": model["answer"].cohort.name, + "slug": model["answer"].cohort.slug, + }, + "comment": model["answer"].comment, + "event": model["answer"].event, + "highest": model["answer"].highest, + "id": model["answer"].id, + "lang": model["answer"].lang, + "lowest": model["answer"].lowest, + "mentor": { + "first_name": model["answer"].mentor.first_name, + "id": model["answer"].mentor.id, + "last_name": model["answer"].mentor.last_name, + "profile": None, + }, + "score": model["answer"].score, + "status": model["answer"].status, + "title": model["answer"].title, + "user": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_bad_param_cohort_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - cohort=True, - answer=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') + model = self.generate_models( + authenticate=True, + user=True, + cohort=True, + answer=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") params = { - 'cohort': 'they-killed-kenny', + "cohort": "they-killed-kenny", } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_param_cohort_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - cohort=True, - answer=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') + model = self.generate_models( + authenticate=True, + user=True, + cohort=True, + answer=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") params = { - 'cohort': model['cohort'].slug, + "cohort": model["cohort"].slug, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() - json = [{**x, 'created_at': None} for x in json if self.assertDatetime(x['created_at'])] - - self.assertEqual(json, [{ - 'created_at': None, - 'academy': { - 'id': model['answer'].academy.id, - 'name': model['answer'].academy.name, - 'slug': model['answer'].academy.slug, - }, - 'cohort': { - 'id': model['cohort'].id, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - }, - 'comment': model['answer'].comment, - 'event': model['answer'].event, - 'highest': model['answer'].highest, - 'id': model['answer'].id, - 'lang': model['answer'].lang, - 'lowest': model['answer'].lowest, - 'mentor': { - 'first_name': model['answer'].mentor.first_name, - 'id': model['answer'].mentor.id, - 'last_name': model['answer'].mentor.last_name, - 'profile': None, - }, - 'score': model['answer'].score, - 'status': model['answer'].status, - 'title': model['answer'].title, - 'user': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - }]) + json = [{**x, "created_at": None} for x in json if self.assertDatetime(x["created_at"])] + + self.assertEqual( + json, + [ + { + "created_at": None, + "academy": { + "id": model["answer"].academy.id, + "name": model["answer"].academy.name, + "slug": model["answer"].academy.slug, + }, + "cohort": { + "id": model["cohort"].id, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + }, + "comment": model["answer"].comment, + "event": model["answer"].event, + "highest": model["answer"].highest, + "id": model["answer"].id, + "lang": model["answer"].lang, + "lowest": model["answer"].lowest, + "mentor": { + "first_name": model["answer"].mentor.first_name, + "id": model["answer"].mentor.id, + "last_name": model["answer"].mentor.last_name, + "profile": None, + }, + "score": model["answer"].score, + "status": model["answer"].status, + "title": model["answer"].title, + "user": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_param_academy_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - cohort=True, - academy=True, - answer=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') + model = self.generate_models( + authenticate=True, + user=True, + cohort=True, + academy=True, + answer=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") params = { - 'academy': model['academy'].id, + "academy": model["academy"].id, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() - json = [{**x, 'created_at': None} for x in json if self.assertDatetime(x['created_at'])] - - self.assertEqual(json, [{ - 'created_at': None, - 'academy': { - 'id': model['answer'].academy.id, - 'name': model['answer'].academy.name, - 'slug': model['answer'].academy.slug, - }, - 'cohort': { - 'id': model['cohort'].id, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - }, - 'comment': model['answer'].comment, - 'event': model['answer'].event, - 'highest': model['answer'].highest, - 'id': model['answer'].id, - 'lang': model['answer'].lang, - 'lowest': model['answer'].lowest, - 'mentor': { - 'first_name': model['answer'].mentor.first_name, - 'id': model['answer'].mentor.id, - 'last_name': model['answer'].mentor.last_name, - 'profile': None, - }, - 'score': model['answer'].score, - 'status': model['answer'].status, - 'title': model['answer'].title, - 'user': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - }]) + json = [{**x, "created_at": None} for x in json if self.assertDatetime(x["created_at"])] + + self.assertEqual( + json, + [ + { + "created_at": None, + "academy": { + "id": model["answer"].academy.id, + "name": model["answer"].academy.name, + "slug": model["answer"].academy.slug, + }, + "cohort": { + "id": model["cohort"].id, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + }, + "comment": model["answer"].comment, + "event": model["answer"].event, + "highest": model["answer"].highest, + "id": model["answer"].id, + "lang": model["answer"].lang, + "lowest": model["answer"].lowest, + "mentor": { + "first_name": model["answer"].mentor.first_name, + "id": model["answer"].mentor.id, + "last_name": model["answer"].mentor.last_name, + "profile": None, + }, + "score": model["answer"].score, + "status": model["answer"].status, + "title": model["answer"].title, + "user": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_bad_param_mentor_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - cohort=True, - academy=True, - answer=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') + model = self.generate_models( + authenticate=True, + user=True, + cohort=True, + academy=True, + answer=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") params = { - 'mentor': 9999, + "mentor": 9999, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_param_mentor_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - cohort=True, - academy=True, - answer=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') + model = self.generate_models( + authenticate=True, + user=True, + cohort=True, + academy=True, + answer=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") params = { - 'mentor': model['user'].id, + "mentor": model["user"].id, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' - response = self.client.get(url, headers={'Academy': model['academy'].id}) + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" + response = self.client.get(url, headers={"Academy": model["academy"].id}) json = response.json() - json = [{**x, 'created_at': None} for x in json if self.assertDatetime(x['created_at'])] - - self.assertEqual(json, [{ - 'created_at': None, - 'academy': { - 'id': model['answer'].academy.id, - 'name': model['answer'].academy.name, - 'slug': model['answer'].academy.slug, - }, - 'cohort': { - 'id': model['cohort'].id, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - }, - 'comment': model['answer'].comment, - 'event': model['answer'].event, - 'highest': model['answer'].highest, - 'id': model['answer'].id, - 'lang': model['answer'].lang, - 'lowest': model['answer'].lowest, - 'mentor': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - 'score': model['answer'].score, - 'status': model['answer'].status, - 'title': model['answer'].title, - 'user': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - }]) + json = [{**x, "created_at": None} for x in json if self.assertDatetime(x["created_at"])] + + self.assertEqual( + json, + [ + { + "created_at": None, + "academy": { + "id": model["answer"].academy.id, + "name": model["answer"].academy.name, + "slug": model["answer"].academy.slug, + }, + "cohort": { + "id": model["cohort"].id, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + }, + "comment": model["answer"].comment, + "event": model["answer"].event, + "highest": model["answer"].highest, + "id": model["answer"].id, + "lang": model["answer"].lang, + "lowest": model["answer"].lowest, + "mentor": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + "score": model["answer"].score, + "status": model["answer"].status, + "title": model["answer"].title, + "user": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_bad_param_event_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - cohort=True, - academy=True, - answer=True, - event=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') + model = self.generate_models( + authenticate=True, + user=True, + cohort=True, + academy=True, + answer=True, + event=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") params = { - 'event': 9999, + "event": 9999, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_param_event_with_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - user=True, - cohort=True, - academy=True, - answer=True, - event=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - db = self.model_to_dict(model, 'answer') + model = self.generate_models( + authenticate=True, + user=True, + cohort=True, + academy=True, + answer=True, + event=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) + db = self.model_to_dict(model, "answer") params = { - 'event': model['event'].id, + "event": model["event"].id, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() - json = [{**x, 'created_at': None} for x in json if self.assertDatetime(x['created_at'])] - - self.assertEqual(json, [{ - 'created_at': None, - 'academy': { - 'id': model['answer'].academy.id, - 'name': model['answer'].academy.name, - 'slug': model['answer'].academy.slug, - }, - 'cohort': { - 'id': model['cohort'].id, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - }, - 'comment': model['answer'].comment, - 'event': { - 'id': model['event'].id, - 'description': model['event'].description, - 'excerpt': model['event'].excerpt, - 'lang': model['event'].lang, - 'title': model['event'].title, - }, - 'highest': model['answer'].highest, - 'id': model['answer'].id, - 'lang': model['answer'].lang, - 'lowest': model['answer'].lowest, - 'mentor': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - 'score': model['answer'].score, - 'status': model['answer'].status, - 'title': model['answer'].title, - 'user': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - }]) + json = [{**x, "created_at": None} for x in json if self.assertDatetime(x["created_at"])] + + self.assertEqual( + json, + [ + { + "created_at": None, + "academy": { + "id": model["answer"].academy.id, + "name": model["answer"].academy.name, + "slug": model["answer"].academy.slug, + }, + "cohort": { + "id": model["cohort"].id, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + }, + "comment": model["answer"].comment, + "event": { + "id": model["event"].id, + "description": model["event"].description, + "excerpt": model["event"].excerpt, + "lang": model["event"].lang, + "title": model["event"].title, + }, + "highest": model["answer"].highest, + "id": model["answer"].id, + "lang": model["answer"].lang, + "lowest": model["answer"].lowest, + "mentor": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + "score": model["answer"].score, + "status": model["answer"].status, + "title": model["answer"].title, + "user": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_bad_param_score_with_data(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - user=True, - cohort=True, - academy=True, - event=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') + base = self.generate_models( + authenticate=True, + user=True, + cohort=True, + academy=True, + event=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) for score in range(1, 10): self.remove_all_answer() - answer_kwargs = {'score': score} + answer_kwargs = {"score": score} model = self.generate_models(answer=True, answer_kwargs=answer_kwargs, models=base) - db = self.model_to_dict(model, 'answer') + db = self.model_to_dict(model, "answer") params = { - 'score': 1 if score == 10 else score + 1, + "score": 1 if score == 10 else score + 1, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" response = self.client.get(url) json = response.json() self.assertEqual(json, []) - db['score'] = score + db["score"] = score self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_param_score_with_data(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - user=True, - cohort=True, - academy=True, - event=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') + base = self.generate_models( + authenticate=True, + user=True, + cohort=True, + academy=True, + event=True, + profile_academy=True, + capability="read_nps_answers", + role="potato", + ) for score in range(1, 10): self.remove_all_answer() - answer_kwargs = {'score': score} + answer_kwargs = {"score": score} model = self.generate_models(answer=True, answer_kwargs=answer_kwargs, models=base) - db = self.model_to_dict(model, 'answer') + db = self.model_to_dict(model, "answer") params = { - 'score': score, + "score": score, } - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?{urllib.parse.urlencode(params)}' - response = self.client.get(url, headers={'Academy': model['academy'].id}) + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?{urllib.parse.urlencode(params)}" + response = self.client.get(url, headers={"Academy": model["academy"].id}) json = response.json() - json = [{**x, 'created_at': None} for x in json if self.assertDatetime(x['created_at'])] - - self.assertEqual(json, [{ - 'created_at': None, - 'academy': { - 'id': model['answer'].academy.id, - 'name': model['answer'].academy.name, - 'slug': model['answer'].academy.slug, - }, - 'cohort': { - 'id': model['cohort'].id, - 'name': model['cohort'].name, - 'slug': model['cohort'].slug, - }, - 'comment': model['answer'].comment, - 'event': { - 'id': model['event'].id, - 'description': model['event'].description, - 'excerpt': model['event'].excerpt, - 'lang': model['event'].lang, - 'title': model['event'].title, - }, - 'highest': model['answer'].highest, - 'id': model['answer'].id, - 'lang': model['answer'].lang, - 'lowest': model['answer'].lowest, - 'mentor': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - 'score': score, - 'status': model['answer'].status, - 'title': model['answer'].title, - 'user': { - 'first_name': model['user'].first_name, - 'id': model['user'].id, - 'last_name': model['user'].last_name, - 'profile': None, - }, - }]) - - db['score'] = score + json = [{**x, "created_at": None} for x in json if self.assertDatetime(x["created_at"])] + + self.assertEqual( + json, + [ + { + "created_at": None, + "academy": { + "id": model["answer"].academy.id, + "name": model["answer"].academy.name, + "slug": model["answer"].academy.slug, + }, + "cohort": { + "id": model["cohort"].id, + "name": model["cohort"].name, + "slug": model["cohort"].slug, + }, + "comment": model["answer"].comment, + "event": { + "id": model["event"].id, + "description": model["event"].description, + "excerpt": model["event"].excerpt, + "lang": model["event"].lang, + "title": model["event"].title, + }, + "highest": model["answer"].highest, + "id": model["answer"].id, + "lang": model["answer"].lang, + "lowest": model["answer"].lowest, + "mentor": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + "score": score, + "status": model["answer"].status, + "title": model["answer"].title, + "user": { + "first_name": model["user"].first_name, + "id": model["user"].id, + "last_name": model["user"].last_name, + "profile": None, + }, + } + ], + ) + + db["score"] = score self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [db]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [db]) """ 🔽🔽🔽 With full like querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_query_like_full_name(self): """Test /answer with like full name""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - del base['user'] + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_nps_answers", role="potato" + ) + del base["user"] user_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } user_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Reinaldo', - 'last_name': 'Descarado', + "email": "a@a.com", + "first_name": "Reinaldo", + "last_name": "Descarado", } models = [ self.generate_models(user=True, answer=True, user_kwargs=user_kwargs, models=base), self.generate_models(user=True, answer=True, user_kwargs=user_kwargs_2, models=base), ] - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?like=Rene Descartes' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?like=Rene Descartes" response = self.client.get(url) json = response.json() - expected = [{ - 'created_at': self.datetime_to_iso(models[0].answer.created_at), - 'academy': { - 'id': models[0].answer.academy.id, - 'name': models[0].answer.academy.name, - 'slug': models[0].answer.academy.slug, - }, - 'cohort': { - 'id': models[0].cohort.id, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - }, - 'comment': models[0].answer.comment, - 'event': models[0].answer.event, - 'highest': models[0].answer.highest, - 'id': models[0].answer.id, - 'lang': models[0].answer.lang, - 'lowest': models[0].answer.lowest, - 'mentor': { - 'first_name': models[0].answer.mentor.first_name, - 'id': models[0].answer.mentor.id, - 'last_name': models[0].answer.mentor.last_name, - 'profile': None, - }, - 'score': models[0].answer.score, - 'status': models[0].answer.status, - 'title': models[0].answer.title, - 'user': { - 'first_name': 'Rene', - 'id': 2, - 'last_name': 'Descartes', - 'profile': None, - }, - }] + expected = [ + { + "created_at": self.datetime_to_iso(models[0].answer.created_at), + "academy": { + "id": models[0].answer.academy.id, + "name": models[0].answer.academy.name, + "slug": models[0].answer.academy.slug, + }, + "cohort": { + "id": models[0].cohort.id, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + }, + "comment": models[0].answer.comment, + "event": models[0].answer.event, + "highest": models[0].answer.highest, + "id": models[0].answer.id, + "lang": models[0].answer.lang, + "lowest": models[0].answer.lowest, + "mentor": { + "first_name": models[0].answer.mentor.first_name, + "id": models[0].answer.mentor.id, + "last_name": models[0].answer.mentor.last_name, + "profile": None, + }, + "score": models[0].answer.score, + "status": models[0].answer.status, + "title": models[0].answer.title, + "user": { + "first_name": "Rene", + "id": 2, + "last_name": "Descartes", + "profile": None, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_query_like_first_name(self): """Test /answer with like first name""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - del base['user'] + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_nps_answers", role="potato" + ) + del base["user"] user_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } user_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Reinaldo', - 'last_name': 'Descarado', + "email": "a@a.com", + "first_name": "Reinaldo", + "last_name": "Descarado", } models = [ self.generate_models(user=True, answer=True, user_kwargs=user_kwargs, models=base), self.generate_models(user=True, answer=True, user_kwargs=user_kwargs_2, models=base), ] - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?like=Rene' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?like=Rene" response = self.client.get(url) json = response.json() - expected = [{ - 'created_at': self.datetime_to_iso(models[0].answer.created_at), - 'academy': { - 'id': models[0].answer.academy.id, - 'name': models[0].answer.academy.name, - 'slug': models[0].answer.academy.slug, - }, - 'cohort': { - 'id': models[0].cohort.id, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - }, - 'comment': models[0].answer.comment, - 'event': models[0].answer.event, - 'highest': models[0].answer.highest, - 'id': models[0].answer.id, - 'lang': models[0].answer.lang, - 'lowest': models[0].answer.lowest, - 'mentor': { - 'first_name': models[0].answer.mentor.first_name, - 'id': models[0].answer.mentor.id, - 'last_name': models[0].answer.mentor.last_name, - 'profile': None, - }, - 'score': models[0].answer.score, - 'status': models[0].answer.status, - 'title': models[0].answer.title, - 'user': { - 'first_name': 'Rene', - 'id': 2, - 'last_name': 'Descartes', - 'profile': None, - }, - }] + expected = [ + { + "created_at": self.datetime_to_iso(models[0].answer.created_at), + "academy": { + "id": models[0].answer.academy.id, + "name": models[0].answer.academy.name, + "slug": models[0].answer.academy.slug, + }, + "cohort": { + "id": models[0].cohort.id, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + }, + "comment": models[0].answer.comment, + "event": models[0].answer.event, + "highest": models[0].answer.highest, + "id": models[0].answer.id, + "lang": models[0].answer.lang, + "lowest": models[0].answer.lowest, + "mentor": { + "first_name": models[0].answer.mentor.first_name, + "id": models[0].answer.mentor.id, + "last_name": models[0].answer.mentor.last_name, + "profile": None, + }, + "score": models[0].answer.score, + "status": models[0].answer.status, + "title": models[0].answer.title, + "user": { + "first_name": "Rene", + "id": 2, + "last_name": "Descartes", + "profile": None, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_query_like_last_name(self): """Test /answer with like last name""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - del base['user'] + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_nps_answers", role="potato" + ) + del base["user"] user_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } user_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Reinaldo', - 'last_name': 'Descarado', + "email": "a@a.com", + "first_name": "Reinaldo", + "last_name": "Descarado", } models = [ self.generate_models(user=True, answer=True, user_kwargs=user_kwargs, models=base), self.generate_models(user=True, answer=True, user_kwargs=user_kwargs_2, models=base), ] - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?like=Descartes' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?like=Descartes" response = self.client.get(url) json = response.json() - expected = [{ - 'created_at': self.datetime_to_iso(models[0].answer.created_at), - 'academy': { - 'id': models[0].answer.academy.id, - 'name': models[0].answer.academy.name, - 'slug': models[0].answer.academy.slug, - }, - 'cohort': { - 'id': models[0].cohort.id, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - }, - 'comment': models[0].answer.comment, - 'event': models[0].answer.event, - 'highest': models[0].answer.highest, - 'id': models[0].answer.id, - 'lang': models[0].answer.lang, - 'lowest': models[0].answer.lowest, - 'mentor': { - 'first_name': models[0].answer.mentor.first_name, - 'id': models[0].answer.mentor.id, - 'last_name': models[0].answer.mentor.last_name, - 'profile': None, - }, - 'score': models[0].answer.score, - 'status': models[0].answer.status, - 'title': models[0].answer.title, - 'user': { - 'first_name': 'Rene', - 'id': 2, - 'last_name': 'Descartes', - 'profile': None, - }, - }] + expected = [ + { + "created_at": self.datetime_to_iso(models[0].answer.created_at), + "academy": { + "id": models[0].answer.academy.id, + "name": models[0].answer.academy.name, + "slug": models[0].answer.academy.slug, + }, + "cohort": { + "id": models[0].cohort.id, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + }, + "comment": models[0].answer.comment, + "event": models[0].answer.event, + "highest": models[0].answer.highest, + "id": models[0].answer.id, + "lang": models[0].answer.lang, + "lowest": models[0].answer.lowest, + "mentor": { + "first_name": models[0].answer.mentor.first_name, + "id": models[0].answer.mentor.id, + "last_name": models[0].answer.mentor.last_name, + "profile": None, + }, + "score": models[0].answer.score, + "status": models[0].answer.status, + "title": models[0].answer.title, + "user": { + "first_name": "Rene", + "id": 2, + "last_name": "Descartes", + "profile": None, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_with_query_like_email(self): """Test /answer with like email""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') - del base['user'] + base = self.generate_models( + authenticate=True, profile_academy=True, capability="read_nps_answers", role="potato" + ) + del base["user"] user_kwargs = { - 'email': 'b@b.com', - 'first_name': 'Rene', - 'last_name': 'Descartes', + "email": "b@b.com", + "first_name": "Rene", + "last_name": "Descartes", } user_kwargs_2 = { - 'email': 'a@a.com', - 'first_name': 'Reinaldo', - 'last_name': 'Descarado', + "email": "a@a.com", + "first_name": "Reinaldo", + "last_name": "Descarado", } models = [ self.generate_models(user=True, answer=True, user_kwargs=user_kwargs, models=base), self.generate_models(user=True, answer=True, user_kwargs=user_kwargs_2, models=base), ] - base_url = reverse_lazy('feedback:answer') - url = f'{base_url}?like=b@b.com' + base_url = reverse_lazy("feedback:answer") + url = f"{base_url}?like=b@b.com" response = self.client.get(url) json = response.json() - expected = [{ - 'created_at': self.datetime_to_iso(models[0].answer.created_at), - 'academy': { - 'id': models[0].answer.academy.id, - 'name': models[0].answer.academy.name, - 'slug': models[0].answer.academy.slug, - }, - 'cohort': { - 'id': models[0].cohort.id, - 'name': models[0].cohort.name, - 'slug': models[0].cohort.slug, - }, - 'comment': models[0].answer.comment, - 'event': models[0].answer.event, - 'highest': models[0].answer.highest, - 'id': models[0].answer.id, - 'lang': models[0].answer.lang, - 'lowest': models[0].answer.lowest, - 'mentor': { - 'first_name': models[0].answer.mentor.first_name, - 'id': models[0].answer.mentor.id, - 'last_name': models[0].answer.mentor.last_name, - 'profile': None, - }, - 'score': models[0].answer.score, - 'status': models[0].answer.status, - 'title': models[0].answer.title, - 'user': { - 'first_name': 'Rene', - 'id': 2, - 'last_name': 'Descartes', - 'profile': None, - }, - }] + expected = [ + { + "created_at": self.datetime_to_iso(models[0].answer.created_at), + "academy": { + "id": models[0].answer.academy.id, + "name": models[0].answer.academy.name, + "slug": models[0].answer.academy.slug, + }, + "cohort": { + "id": models[0].cohort.id, + "name": models[0].cohort.name, + "slug": models[0].cohort.slug, + }, + "comment": models[0].answer.comment, + "event": models[0].answer.event, + "highest": models[0].answer.highest, + "id": models[0].answer.id, + "lang": models[0].answer.lang, + "lowest": models[0].answer.lowest, + "mentor": { + "first_name": models[0].answer.mentor.first_name, + "id": models[0].answer.mentor.id, + "last_name": models[0].answer.mentor.last_name, + "profile": None, + }, + "score": models[0].answer.score, + "status": models[0].answer.status, + "title": models[0].answer.title, + "user": { + "first_name": "Rene", + "id": 2, + "last_name": "Descartes", + "profile": None, + }, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -1013,40 +1079,46 @@ def test_answer_with_query_like_email(self): 🔽🔽🔽 Spy extensions """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_answer__spy_extensions(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') + models = self.generate_models( + authenticate=True, profile_academy=True, capability="read_nps_answers", role="potato" + ) - url = reverse_lazy('feedback:answer') + url = reverse_lazy("feedback:answer") self.client.get(url) - self.bc.check.calls(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_answer__spy_extension_arguments(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_nps_answers', - role='potato') + models = self.generate_models( + authenticate=True, profile_academy=True, capability="read_nps_answers", role="potato" + ) - url = reverse_lazy('feedback:answer') + url = reverse_lazy("feedback:answer") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=AnswerCache, sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=AnswerCache, sort="-created_at", paginate=True), + ], + ) diff --git a/breathecode/feedback/tests/urls/tests_academy_survey.py b/breathecode/feedback/tests/urls/tests_academy_survey.py index 38c6998b4..003517d2a 100644 --- a/breathecode/feedback/tests/urls/tests_academy_survey.py +++ b/breathecode/feedback/tests/urls/tests_academy_survey.py @@ -1,6 +1,7 @@ """ Test /academy/survey """ + from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -17,49 +18,49 @@ class SurveyTestSuite(FeedbackTestCase): """Test /academy/survey""" - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__without_auth(self): """Test /academy/survey without authorization""" - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__without_academy(self): """Test /academy/survey without academy""" self.bc.database.create(authenticate=True) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__without_role(self): """Test /academy/survey without role""" self.headers(academy=1) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") self.bc.database.create(authenticate=True) response = self.client.get(url) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: read_survey for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: read_survey for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__without_data(self): """Test /academy/survey without data""" @@ -68,19 +69,19 @@ def test_academy_survey__get__without_data(self): authenticate=True, academy=True, profile_academy=True, - role='read_survey', - capability='read_survey', + role="read_survey", + capability="read_survey", ) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__with_data(self): """Test /academy/survey with data""" @@ -89,76 +90,72 @@ def test_academy_survey__get__with_data(self): authenticate=True, academy=True, profile_academy=True, - role='read_survey', + role="read_survey", survey=True, - capability='read_survey', + capability="read_survey", ) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['survey'].id, - 'lang': model['survey'].lang, - 'cohort': { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name - }, - 'scores': model['survey'].scores, - 'response_rate': model['survey'].response_rate, - 'status': model['survey'].status, - 'status_json': model['survey'].status_json, - 'duration': '86400.0', - 'created_at': self.bc.datetime.to_iso_string(model['survey'].created_at), - 'sent_at': None, - 'public_url': 'https://nps.4geeks.com/survey/1' - }] + expected = [ + { + "id": model["survey"].id, + "lang": model["survey"].lang, + "cohort": {"id": model["cohort"].id, "slug": model["cohort"].slug, "name": model["cohort"].name}, + "scores": model["survey"].scores, + "response_rate": model["survey"].response_rate, + "status": model["survey"].status, + "status_json": model["survey"].status_json, + "duration": "86400.0", + "created_at": self.bc.datetime.to_iso_string(model["survey"].created_at), + "sent_at": None, + "public_url": "https://nps.4geeks.com/survey/1", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__with_response_rate(self): """Test /academy/survey wiith response rate""" self.headers(academy=1) - survey_kwargs = {'response_rate': 7.5} + survey_kwargs = {"response_rate": 7.5} model = self.bc.database.create( authenticate=True, academy=True, profile_academy=True, - role='read_survey', + role="read_survey", survey=survey_kwargs, - capability='read_survey', + capability="read_survey", ) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['survey'].id, - 'lang': model['survey'].lang, - 'cohort': { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name - }, - 'scores': model['survey'].scores, - 'response_rate': model['survey'].response_rate, - 'status': model['survey'].status, - 'status_json': model['survey'].status_json, - 'duration': '86400.0', - 'created_at': self.bc.datetime.to_iso_string(model['survey'].created_at), - 'sent_at': None, - 'public_url': 'https://nps.4geeks.com/survey/1' - }] + expected = [ + { + "id": model["survey"].id, + "lang": model["survey"].lang, + "cohort": {"id": model["cohort"].id, "slug": model["cohort"].slug, "name": model["cohort"].name}, + "scores": model["survey"].scores, + "response_rate": model["survey"].response_rate, + "status": model["survey"].status, + "status_json": model["survey"].status_json, + "duration": "86400.0", + "created_at": self.bc.datetime.to_iso_string(model["survey"].created_at), + "sent_at": None, + "public_url": "https://nps.4geeks.com/survey/1", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__with_sent_status_query(self): """Test /academy/survey with sent status query""" @@ -167,12 +164,12 @@ def test_academy_survey__get__with_sent_status_query(self): authenticate=True, academy=True, profile_academy=True, - role='read_survey', + role="read_survey", survey=True, - capability='read_survey', + capability="read_survey", ) - url = reverse_lazy('feedback:academy_survey') + '?status=SENT' + url = reverse_lazy("feedback:academy_survey") + "?status=SENT" response = self.client.get(url) json = response.json() expected = [] @@ -180,8 +177,8 @@ def test_academy_survey__get__with_sent_status_query(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__with_pending_status_query(self): """Test /academy/survey with pending status query""" @@ -190,37 +187,35 @@ def test_academy_survey__get__with_pending_status_query(self): authenticate=True, academy=True, profile_academy=True, - role='read_survey', + role="read_survey", survey=True, - capability='read_survey', + capability="read_survey", ) - url = reverse_lazy('feedback:academy_survey') + '?status=PENDING' + url = reverse_lazy("feedback:academy_survey") + "?status=PENDING" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['survey'].id, - 'lang': model['survey'].lang, - 'cohort': { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name - }, - 'scores': model['survey'].scores, - 'response_rate': model['survey'].response_rate, - 'status': model['survey'].status, - 'status_json': model['survey'].status_json, - 'duration': '86400.0', - 'created_at': self.bc.datetime.to_iso_string(model['survey'].created_at), - 'sent_at': None, - 'public_url': 'https://nps.4geeks.com/survey/1' - }] + expected = [ + { + "id": model["survey"].id, + "lang": model["survey"].lang, + "cohort": {"id": model["cohort"].id, "slug": model["cohort"].slug, "name": model["cohort"].name}, + "scores": model["survey"].scores, + "response_rate": model["survey"].response_rate, + "status": model["survey"].status, + "status_json": model["survey"].status_json, + "duration": "86400.0", + "created_at": self.bc.datetime.to_iso_string(model["survey"].created_at), + "sent_at": None, + "public_url": "https://nps.4geeks.com/survey/1", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__with_different_lang_query(self): """Test /academy/survey with different lang status query""" @@ -229,12 +224,12 @@ def test_academy_survey__get__with_different_lang_query(self): authenticate=True, academy=True, profile_academy=True, - role='read_survey', + role="read_survey", survey=True, - capability='read_survey', + capability="read_survey", ) - url = reverse_lazy('feedback:academy_survey') + '?lang=esp' + url = reverse_lazy("feedback:academy_survey") + "?lang=esp" response = self.client.get(url) json = response.json() expected = [] @@ -242,8 +237,8 @@ def test_academy_survey__get__with_different_lang_query(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__with_same_lang_query(self): """Test /academy/survey with same lang status query""" @@ -252,37 +247,35 @@ def test_academy_survey__get__with_same_lang_query(self): authenticate=True, academy=True, profile_academy=True, - role='read_survey', + role="read_survey", survey=True, - capability='read_survey', + capability="read_survey", ) - url = reverse_lazy('feedback:academy_survey') + '?lang=en' + url = reverse_lazy("feedback:academy_survey") + "?lang=en" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['survey'].id, - 'lang': model['survey'].lang, - 'cohort': { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name - }, - 'scores': model['survey'].scores, - 'response_rate': model['survey'].response_rate, - 'status': model['survey'].status, - 'status_json': model['survey'].status_json, - 'duration': '86400.0', - 'created_at': self.bc.datetime.to_iso_string(model['survey'].created_at), - 'sent_at': None, - 'public_url': 'https://nps.4geeks.com/survey/1' - }] + expected = [ + { + "id": model["survey"].id, + "lang": model["survey"].lang, + "cohort": {"id": model["cohort"].id, "slug": model["cohort"].slug, "name": model["cohort"].name}, + "scores": model["survey"].scores, + "response_rate": model["survey"].response_rate, + "status": model["survey"].status, + "status_json": model["survey"].status_json, + "duration": "86400.0", + "created_at": self.bc.datetime.to_iso_string(model["survey"].created_at), + "sent_at": None, + "public_url": "https://nps.4geeks.com/survey/1", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__with_different_cohort_slug_cohort_query(self): """Test /academy/survey with different cohort slug than what is in the cohort query""" @@ -291,13 +284,13 @@ def test_academy_survey__get__with_different_cohort_slug_cohort_query(self): authenticate=True, academy=True, profile_academy=True, - role='read_survey', + role="read_survey", survey=True, - capability='read_survey', + capability="read_survey", cohort=True, ) - url = reverse_lazy('feedback:academy_survey') + f'?cohort=testing-cohort' + url = reverse_lazy("feedback:academy_survey") + f"?cohort=testing-cohort" response = self.client.get(url) json = response.json() expected = [] @@ -305,90 +298,88 @@ def test_academy_survey__get__with_different_cohort_slug_cohort_query(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__get__with_same_cohort_slug_cohort_query(self): """Test /academy/survey with same cohort slug as in the model""" self.headers(academy=1) - cohort_kwargs = {'slug': 'testing-cohort'} + cohort_kwargs = {"slug": "testing-cohort"} model = self.bc.database.create( authenticate=True, academy=True, profile_academy=True, - role='read_survey', + role="read_survey", survey=True, - capability='read_survey', + capability="read_survey", cohort=cohort_kwargs, ) - url = reverse_lazy('feedback:academy_survey') + f'?cohort=testing-cohort' + url = reverse_lazy("feedback:academy_survey") + f"?cohort=testing-cohort" response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['survey'].id, - 'lang': model['survey'].lang, - 'cohort': { - 'id': model['cohort'].id, - 'slug': model['cohort'].slug, - 'name': model['cohort'].name - }, - 'scores': model['survey'].scores, - 'response_rate': model['survey'].response_rate, - 'status': model['survey'].status, - 'status_json': model['survey'].status_json, - 'duration': '86400.0', - 'created_at': self.bc.datetime.to_iso_string(model['survey'].created_at), - 'sent_at': None, - 'public_url': 'https://nps.4geeks.com/survey/1' - }] + expected = [ + { + "id": model["survey"].id, + "lang": model["survey"].lang, + "cohort": {"id": model["cohort"].id, "slug": model["cohort"].slug, "name": model["cohort"].name}, + "scores": model["survey"].scores, + "response_rate": model["survey"].response_rate, + "status": model["survey"].status, + "status_json": model["survey"].status_json, + "duration": "86400.0", + "created_at": self.bc.datetime.to_iso_string(model["survey"].created_at), + "sent_at": None, + "public_url": "https://nps.4geeks.com/survey/1", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__without_auth(self): """Test /academy/survey without authorization""" - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.post(url, {}) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__without_academy(self): """Test /academy/survey without authorization""" self.bc.database.create(authenticate=True) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.post(url, {}) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__without_role(self): """Test /academy/survey without role""" self.headers(academy=1) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") self.bc.database.create(authenticate=True) response = self.client.post(url, {}) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: crud_survey for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: crud_survey for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__without_cohort(self): """Test /academy/survey post without cohort""" @@ -397,42 +388,42 @@ def test_academy_survey__post__without_cohort(self): authenticate=True, academy=True, profile_academy=True, - role='crud_survey', - capability='crud_survey', + role="crud_survey", + capability="crud_survey", ) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.post(url) json = response.json() - expected = {'cohort': ['This field is required.']} + expected = {"cohort": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__with_cohort_needs_rights(self): """Test /academy/survey post with cohort needs rights""" self.headers(academy=2) - profile_academy_kwargs = {'academy_id': 2} + profile_academy_kwargs = {"academy_id": 2} model = self.bc.database.create( authenticate=True, academy=2, profile_academy=profile_academy_kwargs, - role='crud_survey', - capability='crud_survey', + role="crud_survey", + capability="crud_survey", cohort=True, ) - url = reverse_lazy('feedback:academy_survey') - response = self.client.post(url, {'cohort': 1}) + url = reverse_lazy("feedback:academy_survey") + response = self.client.post(url, {"cohort": 1}) json = response.json() - expected = {'detail': 'cohort-academy-needs-rights', 'status_code': 400} + expected = {"detail": "cohort-academy-needs-rights", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__with_cohort_shorter_than_hour(self): """Test /academy/survey post with cohort shorter than hour""" @@ -441,20 +432,20 @@ def test_academy_survey__post__with_cohort_shorter_than_hour(self): authenticate=True, academy=True, profile_academy=True, - role='crud_survey', - capability='crud_survey', + role="crud_survey", + capability="crud_survey", cohort=True, ) - url = reverse_lazy('feedback:academy_survey') - response = self.client.post(url, {'cohort': 1, 'duration': '3599'}) + url = reverse_lazy("feedback:academy_survey") + response = self.client.post(url, {"cohort": 1, "duration": "3599"}) json = response.json() - expected = {'detail': 'minimum-survey-duration-1h', 'status_code': 400} + expected = {"detail": "minimum-survey-duration-1h", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__without_cohort_teacher_assigned(self): """Test /academy/survey post without cohort teacher assigned""" @@ -463,231 +454,228 @@ def test_academy_survey__post__without_cohort_teacher_assigned(self): authenticate=True, academy=True, profile_academy=True, - role='crud_survey', - capability='crud_survey', + role="crud_survey", + capability="crud_survey", cohort=True, cohort_user=True, ) - url = reverse_lazy('feedback:academy_survey') - response = self.client.post(url, {'cohort': 1}) + url = reverse_lazy("feedback:academy_survey") + response = self.client.post(url, {"cohort": 1}) json = response.json() - expected = {'detail': 'cohort-needs-teacher-assigned', 'status_code': 400} + expected = {"detail": "cohort-needs-teacher-assigned", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__with_cohort_teacher_assigned(self): """Test /academy/survey post with cohort teacher assigned""" self.headers(academy=1) - cohort_user_kwargs = [{'role': 'STUDENT'}, {'role': 'TEACHER'}] + cohort_user_kwargs = [{"role": "STUDENT"}, {"role": "TEACHER"}] model = self.bc.database.create( authenticate=True, academy=True, profile_academy=True, - role='STUDENT', - capability='crud_survey', + role="STUDENT", + capability="crud_survey", cohort=True, cohort_user=cohort_user_kwargs, ) - url = reverse_lazy('feedback:academy_survey') - response = self.client.post(url, {'cohort': 1}) + url = reverse_lazy("feedback:academy_survey") + response = self.client.post(url, {"cohort": 1}) json = response.json() - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'id': model['cohort'].id, - 'status': True, - 'public_url': 'https://nps.4geeks.com/survey/1', - 'lang': 'en', - 'max_assistants_to_ask': 2, - 'max_teachers_to_ask': 1, - 'duration': '1 00:00:00', - 'sent_at': None, - 'cohort': model['cohort_user'][0].cohort.id, + "id": model["cohort"].id, + "status": True, + "public_url": "https://nps.4geeks.com/survey/1", + "lang": "en", + "max_assistants_to_ask": 2, + "max_teachers_to_ask": 1, + "duration": "1 00:00:00", + "sent_at": None, + "cohort": model["cohort_user"][0].cohort.id, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__with_cohort_teacher_assigned_with_longer_than_hour(self): """Test /academy/survey post with cohort teacher assigned with longer than hour.""" self.headers(academy=1) - cohort_user_kwargs = [{'role': 'STUDENT'}, {'role': 'TEACHER'}] + cohort_user_kwargs = [{"role": "STUDENT"}, {"role": "TEACHER"}] model = self.bc.database.create( authenticate=True, academy=True, profile_academy=True, - role='STUDENT', - capability='crud_survey', + role="STUDENT", + capability="crud_survey", cohort=True, cohort_user=cohort_user_kwargs, ) - url = reverse_lazy('feedback:academy_survey') - response = self.client.post(url, {'cohort': 1, 'duration': '3601'}) + url = reverse_lazy("feedback:academy_survey") + response = self.client.post(url, {"cohort": 1, "duration": "3601"}) json = response.json() - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'id': model['cohort'].id, - 'status': True, - 'public_url': 'https://nps.4geeks.com/survey/1', - 'lang': 'en', - 'max_assistants_to_ask': 2, - 'max_teachers_to_ask': 1, - 'duration': '01:00:01', - 'sent_at': None, - 'cohort': model['cohort_user'][0].cohort.id, + "id": model["cohort"].id, + "status": True, + "public_url": "https://nps.4geeks.com/survey/1", + "lang": "en", + "max_assistants_to_ask": 2, + "max_teachers_to_ask": 1, + "duration": "01:00:01", + "sent_at": None, + "cohort": model["cohort_user"][0].cohort.id, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - @patch('breathecode.feedback.actions.send_survey_group', MagicMock(return_value={'success': [], 'error': []})) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.actions.send_survey_group", MagicMock(return_value={"success": [], "error": []})) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__post__when_send_survey_group_is_called(self): """Test /academy/sur.""" self.headers(academy=1) - cohort_user_kwargs = [{'role': 'STUDENT'}, {'role': 'TEACHER'}] + cohort_user_kwargs = [{"role": "STUDENT"}, {"role": "TEACHER"}] model = self.bc.database.create( authenticate=True, academy=True, profile_academy=True, - role='STUDENT', - capability='crud_survey', + role="STUDENT", + capability="crud_survey", cohort=True, cohort_user=cohort_user_kwargs, ) - url = reverse_lazy('feedback:academy_survey') - response = self.client.post(url, {'cohort': 1, 'duration': '3601', 'send_now': True}) + url = reverse_lazy("feedback:academy_survey") + response = self.client.post(url, {"cohort": 1, "duration": "3601", "send_now": True}) json = response.json() - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'id': model['cohort'].id, - 'status': True, - 'public_url': 'https://nps.4geeks.com/survey/1', - 'lang': 'en', - 'max_assistants_to_ask': 2, - 'max_teachers_to_ask': 1, - 'duration': '01:00:01', - 'sent_at': None, - 'cohort': model['cohort_user'][0].cohort.id, + "id": model["cohort"].id, + "status": True, + "public_url": "https://nps.4geeks.com/survey/1", + "lang": "en", + "max_assistants_to_ask": 2, + "max_teachers_to_ask": 1, + "duration": "01:00:01", + "sent_at": None, + "cohort": model["cohort_user"][0].cohort.id, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - survey = self.bc.database.get('feedback.Survey', 1, dict=False) + survey = self.bc.database.get("feedback.Survey", 1, dict=False) self.assertEqual(actions.send_survey_group.call_args_list, [call(survey=survey)]) """DELETE Auth""" - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__delete__in_bulk_without_capability(self): """Test /academy/survey delete in bulk without capability.""" self.headers(academy=1) - base = self.generate_models(authenticate=True, ) - url = reverse_lazy('feedback:academy_survey') + base = self.generate_models( + authenticate=True, + ) + url = reverse_lazy("feedback:academy_survey") response = self.client.delete(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_survey for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: crud_survey for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey_delete_in_bulk_with_two_surveys(self): """Test /academy/survey/ delete in bulk with two surveys.""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - survey=2, - capability='crud_survey', - role=1) + model = self.generate_models( + authenticate=True, profile_academy=True, survey=2, capability="crud_survey", role=1 + ) - url = reverse_lazy('feedback:academy_survey') + '?id=1,2' + url = reverse_lazy("feedback:academy_survey") + "?id=1,2" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey__delete__without_passing_ids(self): """Test /academy/survey/ delete without passing ids.""" self.headers(academy=1) - slug = 'without-survey-id-and-lookups' + slug = "without-survey-id-and-lookups" - model = self.generate_models(user=1, profile_academy=True, survey=2, capability='crud_survey', role=1) + model = self.generate_models(user=1, profile_academy=True, survey=2, capability="crud_survey", role=1) self.client.force_authenticate(model.user) - url = reverse_lazy('feedback:academy_survey') + url = reverse_lazy("feedback:academy_survey") response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()['detail'], slug) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), self.bc.format.to_dict(model.survey)) + self.assertEqual(response.json()["detail"], slug) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), self.bc.format.to_dict(model.survey)) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey_id__delete__not_answered(self): """Test /academy/survey/ id delete not answered.""" - SURVEY_STATUS = ['PENDING', 'SENT', 'OPENED', 'EXPIRED'] + SURVEY_STATUS = ["PENDING", "SENT", "OPENED", "EXPIRED"] for x in SURVEY_STATUS: - answer = {'status': x} - model = self.generate_models(user=1, - profile_academy=True, - survey=1, - capability='crud_survey', - role=1, - answer=answer) + answer = {"status": x} + model = self.generate_models( + user=1, profile_academy=True, survey=1, capability="crud_survey", role=1, answer=answer + ) self.headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('feedback:academy_survey') + f'?id={model.survey.id}' + url = reverse_lazy("feedback:academy_survey") + f"?id={model.survey.id}" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_academy_survey_id__delete__answered(self): """Test /academy/survey/ id delete answered.""" self.headers(academy=1) - answer = {'status': 'ANSWERED'} - model = self.generate_models(user=1, - profile_academy=True, - survey=1, - capability='crud_survey', - role=1, - answer=answer) + answer = {"status": "ANSWERED"} + model = self.generate_models( + user=1, profile_academy=True, survey=1, capability="crud_survey", role=1, answer=answer + ) self.client.force_authenticate(model.user) - url = reverse_lazy('feedback:academy_survey') + '?id=1' + url = reverse_lazy("feedback:academy_survey") + "?id=1" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [self.bc.format.to_dict(model.survey)]) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), [self.bc.format.to_dict(model.survey)]) diff --git a/breathecode/feedback/tests/urls/tests_academy_survey_id.py b/breathecode/feedback/tests/urls/tests_academy_survey_id.py index 65ababcf7..b10b20211 100644 --- a/breathecode/feedback/tests/urls/tests_academy_survey_id.py +++ b/breathecode/feedback/tests/urls/tests_academy_survey_id.py @@ -1,6 +1,7 @@ """ Test /academy/survey """ + import re import urllib from unittest.mock import MagicMock, call, patch @@ -21,30 +22,30 @@ class SurveyTestSuite(FeedbackTestCase): """Test /academy/survey""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_academy_survey__get__without_auth(self): """Test /academy/survey get without authorization""" - url = reverse_lazy('feedback:academy_survey_id', kwargs={'survey_id': 1}) + url = reverse_lazy("feedback:academy_survey_id", kwargs={"survey_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_academy_survey__get__without_academy(self): """Test /academy/survey get without academy""" self.bc.database.create(authenticate=True) - url = reverse_lazy('feedback:academy_survey_id', kwargs={'survey_id': 1}) + url = reverse_lazy("feedback:academy_survey_id", kwargs={"survey_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -55,104 +56,103 @@ def test_academy_survey__get__without_academy(self): def test_academy_survey__delete__in_bulk_without_capability(self): """Test /academy/survey/ delete in bulk without capability.""" self.headers(academy=1) - base = self.generate_models(authenticate=True, ) - url = reverse_lazy('feedback:academy_survey_id', kwargs={'survey_id': 1}) + base = self.generate_models( + authenticate=True, + ) + url = reverse_lazy("feedback:academy_survey_id", kwargs={"survey_id": 1}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_survey for academy 1", - 'status_code': 403, - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: crud_survey for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_academy_survey_delete_in_bulk_with_two_surveys(self): """Test /academy/survey/ delete in bulk with two surveys.""" self.headers(academy=1) - model = self.generate_models(user=1, profile_academy=True, survey=2, capability='crud_survey', role=1) + model = self.generate_models(user=1, profile_academy=True, survey=2, capability="crud_survey", role=1) self.client.force_authenticate(model.user) - url = reverse_lazy('feedback:academy_survey_id', kwargs={'survey_id': 1}) + '?id=1,2' + url = reverse_lazy("feedback:academy_survey_id", kwargs={"survey_id": 1}) + "?id=1,2" response = self.client.delete(url) json = response.json() - expected = {'detail': 'survey-id-and-lookups-together', 'status_code': 400} + expected = {"detail": "survey-id-and-lookups-together", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), self.bc.format.to_dict(model.survey)) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), self.bc.format.to_dict(model.survey)) def test_academy_survey_id__delete(self): """Test /academy/survey_id delete.""" self.headers(academy=1) - model = self.generate_models(user=1, profile_academy=True, survey=1, capability='crud_survey', role=1) + model = self.generate_models(user=1, profile_academy=True, survey=1, capability="crud_survey", role=1) self.client.force_authenticate(model.user) - url = reverse_lazy('feedback:academy_survey_id', kwargs={'survey_id': 1}) + url = reverse_lazy("feedback:academy_survey_id", kwargs={"survey_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) def test_academy_survey_id__delete__not_found(self): """Test /academy/survey_id/ delete not found.""" self.headers(academy=1) - model = self.generate_models(user=1, profile_academy=True, capability='crud_survey', role=1) + model = self.generate_models(user=1, profile_academy=True, capability="crud_survey", role=1) self.client.force_authenticate(model.user) - url = reverse_lazy('feedback:academy_survey_id', kwargs={'survey_id': 1}) + url = reverse_lazy("feedback:academy_survey_id", kwargs={"survey_id": 1}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'survey-not-found', 'status_code': 404} + expected = {"detail": "survey-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) def test_academy_survey_id__delete__not_answered(self): """Test /academy/survey_id/ delete not answered.""" - SURVEY_STATUS = ['PENDING', 'SENT', 'OPENED', 'EXPIRED'] + SURVEY_STATUS = ["PENDING", "SENT", "OPENED", "EXPIRED"] for x in SURVEY_STATUS: - answer = {'status': x} - model = self.generate_models(user=1, - profile_academy=True, - survey=1, - capability='crud_survey', - role=1, - answer=answer) + answer = {"status": x} + model = self.generate_models( + user=1, profile_academy=True, survey=1, capability="crud_survey", role=1, answer=answer + ) self.headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('feedback:academy_survey_id', kwargs={'survey_id': model.survey.id}) + url = reverse_lazy("feedback:academy_survey_id", kwargs={"survey_id": model.survey.id}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), []) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), []) - @patch('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) + @patch("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) def test_academy_survey_id__delete__answered(self): """Test /academy/survey_id delete answered(self).""" self.headers(academy=1) - answer = {'status': 'ANSWERED'} - model = self.generate_models(user=1, - profile_academy=True, - survey=1, - capability='crud_survey', - role=1, - answer=answer) + answer = {"status": "ANSWERED"} + model = self.generate_models( + user=1, profile_academy=True, survey=1, capability="crud_survey", role=1, answer=answer + ) self.client.force_authenticate(model.user) - url = reverse_lazy('feedback:academy_survey_id', kwargs={'survey_id': 1}) + url = reverse_lazy("feedback:academy_survey_id", kwargs={"survey_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('feedback.Survey'), [self.bc.format.to_dict(model.survey)]) + self.assertEqual(self.bc.database.list_of("feedback.Survey"), [self.bc.format.to_dict(model.survey)]) diff --git a/breathecode/feedback/tests/urls/tests_answer_id_tracker.py b/breathecode/feedback/tests/urls/tests_answer_id_tracker.py index 67cae5ab3..a53318d13 100644 --- a/breathecode/feedback/tests/urls/tests_answer_id_tracker.py +++ b/breathecode/feedback/tests/urls/tests_answer_id_tracker.py @@ -1,6 +1,7 @@ """ Test /answer/:id/tracker.png """ + import re from unittest.mock import patch from django.urls.base import reverse_lazy @@ -17,62 +18,67 @@ class AnswerIdTrackerTestSuite(FeedbackTestCase): """Test /answer/:id/tracker.png""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_id_tracker_without_auth(self): """Test /answer/:id/tracker.png without auth""" - url = reverse_lazy('feedback:answer_id_tracker', kwargs={'answer_id': 9999}) + url = reverse_lazy("feedback:answer_id_tracker", kwargs={"answer_id": 9999}) response = self.client.get(url) - self.assertEqual(response['content-type'], 'image/png') + self.assertEqual(response["content-type"], "image/png") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_answer(), 0) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_id_tracker_without_data(self): """Test /answer/:id/tracker.png without auth""" self.generate_models(authenticate=True) - url = reverse_lazy('feedback:answer_id_tracker', kwargs={'answer_id': 9999}) + url = reverse_lazy("feedback:answer_id_tracker", kwargs={"answer_id": 9999}) response = self.client.get(url) - self.assertEqual(response['content-type'], 'image/png') + self.assertEqual(response["content-type"], "image/png") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.count_answer(), 0) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_id_tracker_with_data_without_status(self): """Test /answer/:id/tracker.png without auth""" model = self.generate_models(authenticate=True, answer=True) - url = reverse_lazy('feedback:answer_id_tracker', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:answer_id_tracker", kwargs={"answer_id": model["answer"].id}) response = self.client.get(url) - self.assertEqual(response['content-type'], 'image/png') + self.assertEqual(response["content-type"], "image/png") self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('feedback.Answer'), [self.model_to_dict(model, 'answer')]) + self.assertEqual(self.bc.database.list_of("feedback.Answer"), [self.model_to_dict(model, "answer")]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_answer_id_tracker_with_data(self): """Test /answer/:id/tracker.png without auth""" - answer_kwargs = {'status': 'SENT'} + answer_kwargs = {"status": "SENT"} model = self.generate_models(authenticate=True, answer=True, answer_kwargs=answer_kwargs) - url = reverse_lazy('feedback:answer_id_tracker', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:answer_id_tracker", kwargs={"answer_id": model["answer"].id}) response = self.client.get(url) - self.assertEqual(response['content-type'], 'image/png') + self.assertEqual(response["content-type"], "image/png") self.assertEqual(response.status_code, status.HTTP_200_OK) - answers = self.bc.database.list_of('feedback.Answer') - self.assertDatetime(answers[0]['opened_at']) - answers[0]['opened_at'] = None + answers = self.bc.database.list_of("feedback.Answer") + self.assertDatetime(answers[0]["opened_at"]) + answers[0]["opened_at"] = None - self.assertEqual(answers, [{ - **self.model_to_dict(model, 'answer'), - 'status': 'OPENED', - }]) + self.assertEqual( + answers, + [ + { + **self.model_to_dict(model, "answer"), + "status": "OPENED", + } + ], + ) diff --git a/breathecode/feedback/tests/urls/tests_user_me_answer_answer_id.py b/breathecode/feedback/tests/urls/tests_user_me_answer_answer_id.py index a91ce90f0..d3ec4f8dc 100644 --- a/breathecode/feedback/tests/urls/tests_user_me_answer_answer_id.py +++ b/breathecode/feedback/tests/urls/tests_user_me_answer_answer_id.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import re from datetime import datetime from unittest.mock import MagicMock, call, patch @@ -19,38 +20,38 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch): - monkeypatch.setattr('breathecode.feedback.signals.survey_answered.send_robust', MagicMock()) - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr("breathecode.feedback.signals.survey_answered.send_robust", MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield def test_answer_id_without_auth(bc: Breathecode, client: APIClient): """Test /answer/:id without auth""" - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': 9999}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": 9999}) response = client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('feedback.Answer') == [] + assert bc.database.list_of("feedback.Answer") == [] def test_answer_id_without_data(bc: Breathecode, client: APIClient): """Test /answer/:id without auth""" model = bc.database.create(user=1) client.force_authenticate(model.user) - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': 9999}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": 9999}) response = client.get(url) json = response.json() expected = { - 'detail': 'answer-of-other-user-or-not-exists', - 'status_code': 404, + "detail": "answer-of-other-user-or-not-exists", + "status_code": 404, } assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('feedback.Answer') == [] + assert bc.database.list_of("feedback.Answer") == [] def test_answer_id__answer_of_other_user(bc: Breathecode, client: APIClient): @@ -59,70 +60,70 @@ def test_answer_id__answer_of_other_user(bc: Breathecode, client: APIClient): client.force_authenticate(model.user) model = bc.database.create(answer=True) db = bc.format.to_dict(model.answer) - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": model["answer"].id}) response = client.get(url) json = response.json() - expected = {'detail': 'answer-of-other-user-or-not-exists', 'status_code': 404} + expected = {"detail": "answer-of-other-user-or-not-exists", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('feedback.Answer') == [db] + assert bc.database.list_of("feedback.Answer") == [db] def test_answer_id_with_data(bc: Breathecode, client: APIClient): """Test /answer/:id without auth""" - answer_kwargs = {'status': 'SENT'} + answer_kwargs = {"status": "SENT"} model = bc.database.create(user=1, answer=True, answer_kwargs=answer_kwargs) client.force_authenticate(model.user) db = bc.format.to_dict(model.answer) - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": model["answer"].id}) response = client.get(url) json = response.json() expected = { - 'id': model['answer'].id, - 'title': model['answer'].title, - 'lowest': model['answer'].lowest, - 'highest': model['answer'].highest, - 'lang': model['answer'].lang, - 'score': model['answer'].score, - 'comment': model['answer'].comment, - 'status': model['answer'].status, - 'opened_at': model['answer'].opened_at, - 'created_at': datetime_to_iso_format(model['answer'].created_at), - 'updated_at': datetime_to_iso_format(model['answer'].updated_at), - 'cohort': model['answer'].cohort, - 'academy': model['answer'].academy, - 'mentor': { - 'first_name': model['answer'].mentor.first_name, - 'id': model['answer'].mentor.id, - 'last_name': model['answer'].mentor.last_name, - 'profile': None, + "id": model["answer"].id, + "title": model["answer"].title, + "lowest": model["answer"].lowest, + "highest": model["answer"].highest, + "lang": model["answer"].lang, + "score": model["answer"].score, + "comment": model["answer"].comment, + "status": model["answer"].status, + "opened_at": model["answer"].opened_at, + "created_at": datetime_to_iso_format(model["answer"].created_at), + "updated_at": datetime_to_iso_format(model["answer"].updated_at), + "cohort": model["answer"].cohort, + "academy": model["answer"].academy, + "mentor": { + "first_name": model["answer"].mentor.first_name, + "id": model["answer"].mentor.id, + "last_name": model["answer"].mentor.last_name, + "profile": None, }, - 'user': { - 'first_name': model['answer'].user.first_name, - 'id': model['answer'].user.id, - 'last_name': model['answer'].user.last_name, - 'profile': None, + "user": { + "first_name": model["answer"].user.first_name, + "id": model["answer"].user.id, + "last_name": model["answer"].user.last_name, + "profile": None, }, - 'event': model['answer'].event, + "event": model["answer"].event, } assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('feedback.Answer') == [db] + assert bc.database.list_of("feedback.Answer") == [db] def test_answer_id_put_with_bad_id(bc: Breathecode, client: APIClient): """Test /answer/:id without auth""" model = bc.database.create(user=1) client.force_authenticate(model.user) - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': 9999}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": 9999}) response = client.put(url, {}) json = response.json() expected = { - 'detail': 'answer-of-other-user-or-not-exists', - 'status_code': 404, + "detail": "answer-of-other-user-or-not-exists", + "status_code": 404, } assert json == expected @@ -133,70 +134,70 @@ def test_answer_id_put_with_bad_id(bc: Breathecode, client: APIClient): def test_answer_id_put_without_score(bc: Breathecode, client: APIClient): """Test /answer/:id without auth""" - answer_kwargs = {'status': 'SENT'} + answer_kwargs = {"status": "SENT"} model = bc.database.create(user=1, answer=True, answer_kwargs=answer_kwargs) client.force_authenticate(model.user) db = bc.format.to_dict(model.answer) data = { - 'comment': 'They killed kenny', + "comment": "They killed kenny", } - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": model["answer"].id}) response = client.put(url, data) json = response.json() - assert json == {'non_field_errors': ['Score must be between 1 and 10']} + assert json == {"non_field_errors": ["Score must be between 1 and 10"]} assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('feedback.Answer') == [db] + assert bc.database.list_of("feedback.Answer") == [db] assert survey_answered.send_robust.call_args_list == [] assert activity_tasks.add_activity.delay.call_args_list == [] def test_answer_id_put_with_score_less_of_1(bc: Breathecode, client: APIClient): """Test /answer/:id without auth""" - answer_kwargs = {'status': 'SENT'} + answer_kwargs = {"status": "SENT"} model = bc.database.create(user=1, answer=True, answer_kwargs=answer_kwargs) client.force_authenticate(model.user) db = bc.format.to_dict(model.answer) - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": model["answer"].id}) data = { - 'comment': 'They killed kenny', - 'score': 0, + "comment": "They killed kenny", + "score": 0, } response = client.put(url, data) json = response.json() - assert json == {'non_field_errors': ['Score must be between 1 and 10']} + assert json == {"non_field_errors": ["Score must be between 1 and 10"]} assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('feedback.Answer') == [db] + assert bc.database.list_of("feedback.Answer") == [db] assert survey_answered.send_robust.call_args_list == [] assert activity_tasks.add_activity.delay.call_args_list == [] def test_answer_id_put_with_score_more_of_10(bc: Breathecode, client: APIClient): """Test /answer/:id without auth""" - answer_kwargs = {'status': 'SENT'} + answer_kwargs = {"status": "SENT"} model = bc.database.create(user=1, answer=True, answer_kwargs=answer_kwargs) client.force_authenticate(model.user) db = bc.format.to_dict(model.answer) - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": model["answer"].id}) data = { - 'comment': 'They killed kenny', - 'score': 11, + "comment": "They killed kenny", + "score": 11, } response = client.put(url, data) json = response.json() - assert json == {'non_field_errors': ['Score must be between 1 and 10']} + assert json == {"non_field_errors": ["Score must be between 1 and 10"]} assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('feedback.Answer') == [db] + assert bc.database.list_of("feedback.Answer") == [db] assert survey_answered.send_robust.call_args_list == [] assert activity_tasks.add_activity.delay.call_args_list == [] -@pytest.mark.parametrize('score', [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) +@pytest.mark.parametrize("score", [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) def test_answer_id_put_with_all_valid_scores(bc: Breathecode, client: APIClient, score): """Test /answer/:id without auth""" - answer_kwargs = {'status': 'SENT'} + answer_kwargs = {"status": "SENT"} answers = [] model = bc.database.create( @@ -207,50 +208,51 @@ def test_answer_id_put_with_all_valid_scores(bc: Breathecode, client: APIClient, client.force_authenticate(model.user) answers.append(model.answer) db = bc.format.to_dict(model.answer) - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": model["answer"].id}) data = { - 'comment': 'They killed kenny', - 'score': score, + "comment": "They killed kenny", + "score": score, } response = client.put(url, data) json = response.json() expected = { - 'id': model['answer'].id, - 'title': model['answer'].title, - 'lowest': model['answer'].lowest, - 'highest': model['answer'].highest, - 'lang': model['answer'].lang, - 'score': score, - 'comment': data['comment'], - 'status': 'ANSWERED', - 'opened_at': model['answer'].opened_at, - 'created_at': datetime_to_iso_format(model['answer'].created_at), - 'cohort': model['answer'].cohort, - 'academy': model['answer'].academy, - 'survey': None, - 'mentorship_session': None, - 'sent_at': None, - 'mentor': model['answer'].mentor.id, - 'event': model['answer'].event, - 'user': model['answer'].user.id, + "id": model["answer"].id, + "title": model["answer"].title, + "lowest": model["answer"].lowest, + "highest": model["answer"].highest, + "lang": model["answer"].lang, + "score": score, + "comment": data["comment"], + "status": "ANSWERED", + "opened_at": model["answer"].opened_at, + "created_at": datetime_to_iso_format(model["answer"].created_at), + "cohort": model["answer"].cohort, + "academy": model["answer"].academy, + "survey": None, + "mentorship_session": None, + "sent_at": None, + "mentor": model["answer"].mentor.id, + "event": model["answer"].event, + "user": model["answer"].user.id, } - del json['updated_at'] + del json["updated_at"] assert json == expected dicts = [ - answer for answer in bc.database.list_of('feedback.Answer') - if not 'updated_at' in answer or isinstance(answer['updated_at'], datetime) and answer.pop('updated_at') + answer + for answer in bc.database.list_of("feedback.Answer") + if not "updated_at" in answer or isinstance(answer["updated_at"], datetime) and answer.pop("updated_at") ] assert response.status_code == status.HTTP_200_OK - db['score'] = score - db['status'] = 'ANSWERED' - db['comment'] = data['comment'] + db["score"] = score + db["status"] = "ANSWERED" + db["comment"] = data["comment"] assert dicts == [db] @@ -258,7 +260,7 @@ def test_answer_id_put_with_all_valid_scores(bc: Breathecode, client: APIClient, call(instance=model.answer, sender=model.answer.__class__), ] assert activity_tasks.add_activity.delay.call_args_list == [ - call(model.user.id, 'nps_answered', related_type='feedback.Answer', related_id=model.answer.id), + call(model.user.id, "nps_answered", related_type="feedback.Answer", related_id=model.answer.id), ] @@ -288,15 +290,15 @@ def test_answer_id_put_with_all_valid_scores(bc: Breathecode, client: APIClient, def test_answer_id_put_twice_same_score(bc: Breathecode, client: APIClient): """Test /answer/:id without auth""" - answer_kwargs = {'status': 'SENT', 'score': 3} + answer_kwargs = {"status": "SENT", "score": 3} model = bc.database.create(user=1, answer=True, answer_kwargs=answer_kwargs) client.force_authenticate(model.user) db = bc.format.to_dict(model.answer) - url = reverse_lazy('feedback:user_me_answer_id', kwargs={'answer_id': model['answer'].id}) + url = reverse_lazy("feedback:user_me_answer_id", kwargs={"answer_id": model["answer"].id}) data = { - 'comment': 'They killed kenny', - 'score': 3, + "comment": "They killed kenny", + "score": 3, } client.put(url, data) response = client.put(url, data) @@ -304,13 +306,13 @@ def test_answer_id_put_twice_same_score(bc: Breathecode, client: APIClient): assert response.status_code == status.HTTP_200_OK - db['score'] = data['score'] - db['status'] = 'ANSWERED' - db['comment'] = data['comment'] + db["score"] = data["score"] + db["status"] = "ANSWERED" + db["comment"] = data["comment"] - assert bc.database.list_of('feedback.Answer') == [db] + assert bc.database.list_of("feedback.Answer") == [db] assert survey_answered.send_robust.call_args_list == [call(instance=model.answer, sender=model.answer.__class__)] assert activity_tasks.add_activity.delay.call_args_list == [ - call(1, 'nps_answered', related_type='feedback.Answer', related_id=1), - call(1, 'nps_answered', related_type='feedback.Answer', related_id=1), + call(1, "nps_answered", related_type="feedback.Answer", related_id=1), + call(1, "nps_answered", related_type="feedback.Answer", related_id=1), ] diff --git a/breathecode/feedback/urls.py b/breathecode/feedback/urls.py index 7dcee3c06..336f32470 100644 --- a/breathecode/feedback/urls.py +++ b/breathecode/feedback/urls.py @@ -1,21 +1,29 @@ from django.urls import path -from .views import (AnswerMeView, GetAnswerView, track_survey_open, get_survey_questions, SurveyView, AcademyAnswerView, - get_reviews, ReviewView, get_review_platform) +from .views import ( + AnswerMeView, + GetAnswerView, + track_survey_open, + get_survey_questions, + SurveyView, + AcademyAnswerView, + get_reviews, + ReviewView, + get_review_platform, +) -app_name = 'feedback' +app_name = "feedback" urlpatterns = [ - path('academy/answer', GetAnswerView.as_view(), name='answer'), - path('answer/<int:answer_id>/tracker.png', track_survey_open, name='answer_id_tracker'), - path('user/me/answer/<int:answer_id>', AnswerMeView.as_view(), name='user_me_answer_id'), - path('academy/survey', SurveyView.as_view(), name='academy_survey'), - path('academy/survey/<int:survey_id>', SurveyView.as_view(), name='academy_survey_id'), - path('user/me/survey/<int:survey_id>/questions', get_survey_questions), - path('review', get_reviews, name='review'), - path('academy/review', ReviewView.as_view(), name='review'), - path('academy/review/<int:review_id>', ReviewView.as_view(), name='review_id'), - path('review_platform', get_review_platform, name='review_platform'), - path('review_platform/<str:platform_slug>', get_review_platform, name='review_platform'), - + path("academy/answer", GetAnswerView.as_view(), name="answer"), + path("answer/<int:answer_id>/tracker.png", track_survey_open, name="answer_id_tracker"), + path("user/me/answer/<int:answer_id>", AnswerMeView.as_view(), name="user_me_answer_id"), + path("academy/survey", SurveyView.as_view(), name="academy_survey"), + path("academy/survey/<int:survey_id>", SurveyView.as_view(), name="academy_survey_id"), + path("user/me/survey/<int:survey_id>/questions", get_survey_questions), + path("review", get_reviews, name="review"), + path("academy/review", ReviewView.as_view(), name="review"), + path("academy/review/<int:review_id>", ReviewView.as_view(), name="review_id"), + path("review_platform", get_review_platform, name="review_platform"), + path("review_platform/<str:platform_slug>", get_review_platform, name="review_platform"), # TODO: missing tests - path('academy/answer/<int:answer_id>', AcademyAnswerView.as_view(), name='academy_answer_id'), + path("academy/answer/<int:answer_id>", AcademyAnswerView.as_view(), name="academy_answer_id"), ] diff --git a/breathecode/feedback/utils.py b/breathecode/feedback/utils.py index 03c10f78b..0ceabd0f9 100644 --- a/breathecode/feedback/utils.py +++ b/breathecode/feedback/utils.py @@ -1,65 +1,62 @@ strings = { - 'es': { - 'event': { - 'title': '¿Que tan probable es que recomiendes eventos como estos a tus familiares y amigos?', - 'highest': 'muy probable', - 'lowest': 'poco probable', - }, - 'mentor': { - 'title': '¿Como ha sido tu experiencia con tu mentor {}?', - 'highest': 'muy buena', - 'lowest': 'mala', - }, - 'cohort': { - 'title': '¿Cómo ha sido tu experiencia estudiando {}?', - 'highest': 'muy buena', - 'lowest': 'mala', - }, - 'academy': { - 'title': '¿Qué tan probable es que recomiendes {} a tus amigos y familiares?', - 'highest': 'muy probable', - 'lowest': 'poco probable', - }, - 'session': { - 'title': '¿Cómo fue tu experiencia en tu mentoría con {}?', - 'highest': 'muy útil', - 'lowest': 'poco útil', - }, - 'button_label': - 'Responder', - 'survey_subject': - 'Necesitamos tu feedback', - 'survey_message': - 'Por favor toma 5 minutos para enviarnos un feedback sobre tu experiencia en la academia hasta ahora', + "es": { + "event": { + "title": "¿Que tan probable es que recomiendes eventos como estos a tus familiares y amigos?", + "highest": "muy probable", + "lowest": "poco probable", + }, + "mentor": { + "title": "¿Como ha sido tu experiencia con tu mentor {}?", + "highest": "muy buena", + "lowest": "mala", + }, + "cohort": { + "title": "¿Cómo ha sido tu experiencia estudiando {}?", + "highest": "muy buena", + "lowest": "mala", + }, + "academy": { + "title": "¿Qué tan probable es que recomiendes {} a tus amigos y familiares?", + "highest": "muy probable", + "lowest": "poco probable", + }, + "session": { + "title": "¿Cómo fue tu experiencia en tu mentoría con {}?", + "highest": "muy útil", + "lowest": "poco útil", + }, + "button_label": "Responder", + "survey_subject": "Necesitamos tu feedback", + "survey_message": "Por favor toma 5 minutos para enviarnos un feedback sobre tu experiencia en la academia hasta ahora", + }, + "en": { + "event": { + "title": "How likely are you to recommend upcomint events to your friends and family?", + "highest": "very likely", + "lowest": "not likely", + }, + "mentor": { + "title": "How has been your experience with your mentor {} so far?", + "highest": "very good", + "lowest": "not good", + }, + "cohort": { + "title": "How has been your experience studying {} so far?", + "highest": "very good", + "lowest": "not good", + }, + "academy": { + "title": "How likely are you to recommend {} to your friends and family?", + "highest": "very likely", + "lowest": "not likely", + }, + "session": { + "title": "How was your experience in your mentoring with {}?", + "highest": "muy útil", + "lowest": "poco útil", + }, + "button_label": "Answer the question", + "survey_subject": "We need your feedback", + "survey_message": "Please take 5 minutes to give us feedback about your experience at the academy so far.", }, - 'en': { - 'event': { - 'title': 'How likely are you to recommend upcomint events to your friends and family?', - 'highest': 'very likely', - 'lowest': 'not likely', - }, - 'mentor': { - 'title': 'How has been your experience with your mentor {} so far?', - 'highest': 'very good', - 'lowest': 'not good', - }, - 'cohort': { - 'title': 'How has been your experience studying {} so far?', - 'highest': 'very good', - 'lowest': 'not good', - }, - 'academy': { - 'title': 'How likely are you to recommend {} to your friends and family?', - 'highest': 'very likely', - 'lowest': 'not likely', - }, - 'session': { - 'title': 'How was your experience in your mentoring with {}?', - 'highest': 'muy útil', - 'lowest': 'poco útil', - }, - 'button_label': 'Answer the question', - 'survey_subject': 'We need your feedback', - 'survey_message': 'Please take 5 minutes to give us feedback about your experience at the academy so far.', - } } diff --git a/breathecode/feedback/views.py b/breathecode/feedback/views.py index 843c0d3e0..f045ea989 100644 --- a/breathecode/feedback/views.py +++ b/breathecode/feedback/views.py @@ -33,45 +33,45 @@ from .tasks import generate_user_cohort_survey_answers -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def track_survey_open(request, answer_id=None): item = None if answer_id is not None: - item = Answer.objects.filter(id=answer_id, status='SENT').first() + item = Answer.objects.filter(id=answer_id, status="SENT").first() if item is not None: - item.status = 'OPENED' + item.status = "OPENED" item.opened_at = timezone.now() item.save() - image = Image.new('RGB', (1, 1)) - response = HttpResponse(content_type='image/png') - image.save(response, 'PNG') + image = Image.new("RGB", (1, 1)) + response = HttpResponse(content_type="image/png") + image.save(response, "PNG") return response -@api_view(['GET']) +@api_view(["GET"]) def get_survey_questions(request, survey_id=None): survey = Survey.objects.filter(id=survey_id).first() if survey is None: - raise ValidationException('Survey not found', 404) + raise ValidationException("Survey not found", 404) utc_now = timezone.now() if utc_now > survey.sent_at + survey.duration: - raise ValidationException('This survey has already expired', 400) + raise ValidationException("This survey has already expired", 400) - cu = CohortUser.objects.filter(cohort=survey.cohort, role='STUDENT', user=request.user).first() + cu = CohortUser.objects.filter(cohort=survey.cohort, role="STUDENT", user=request.user).first() if cu is None: - raise ValidationException('This student does not belong to this cohort', 400) + raise ValidationException("This student does not belong to this cohort", 400) - cohort_teacher = CohortUser.objects.filter(cohort=survey.cohort, role='TEACHER') + cohort_teacher = CohortUser.objects.filter(cohort=survey.cohort, role="TEACHER") if cohort_teacher.count() == 0: - raise ValidationException('This cohort must have a teacher assigned to be able to survey it', 400) + raise ValidationException("This cohort must have a teacher assigned to be able to survey it", 400) - answers = generate_user_cohort_survey_answers(request.user, survey, status='OPENED') + answers = generate_user_cohort_survey_answers(request.user, survey, status="OPENED") serializer = AnswerSerializer(answers, many=True) return Response(serializer.data, status=status.HTTP_200_OK) @@ -82,9 +82,9 @@ class GetAnswerView(APIView): List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(cache=AnswerCache, sort='-created_at', paginate=True) + extensions = APIViewExtensions(cache=AnswerCache, sort="-created_at", paginate=True) - @capable_of('read_nps_answers') + @capable_of("read_nps_answers") def get(self, request, format=None, academy_id=None): handler = self.extensions(request) @@ -95,39 +95,39 @@ def get(self, request, format=None, academy_id=None): items = Answer.objects.filter(academy__id=academy_id) lookup = {} - users = request.GET.get('user', None) - if users is not None and users != '': - items = items.filter(user__id__in=users.split(',')) + users = request.GET.get("user", None) + if users is not None and users != "": + items = items.filter(user__id__in=users.split(",")) - cohorts = request.GET.get('cohort', None) - if cohorts is not None and cohorts != '': - items = items.filter(cohort__slug__in=cohorts.split(',')) + cohorts = request.GET.get("cohort", None) + if cohorts is not None and cohorts != "": + items = items.filter(cohort__slug__in=cohorts.split(",")) - mentors = request.GET.get('mentor', None) - if mentors is not None and mentors != '': - items = items.filter(mentor__id__in=mentors.split(',')) + mentors = request.GET.get("mentor", None) + if mentors is not None and mentors != "": + items = items.filter(mentor__id__in=mentors.split(",")) - events = request.GET.get('event', None) - if events is not None and events != '': - items = items.filter(event__id__in=events.split(',')) + events = request.GET.get("event", None) + if events is not None and events != "": + items = items.filter(event__id__in=events.split(",")) - score = request.GET.get('score', None) - if score is not None and score != '': - lookup['score'] = score + score = request.GET.get("score", None) + if score is not None and score != "": + lookup["score"] = score - _status = request.GET.get('status', None) - if _status is not None and _status != '': - items = items.filter(status__in=_status.split(',')) + _status = request.GET.get("status", None) + if _status is not None and _status != "": + items = items.filter(status__in=_status.split(",")) - surveys = request.GET.get('survey', None) - if surveys is not None and surveys != '': - items = items.filter(survey__id__in=surveys.split(',')) + surveys = request.GET.get("survey", None) + if surveys is not None and surveys != "": + items = items.filter(survey__id__in=surveys.split(",")) items = items.filter(**lookup) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: - items = query_like_by_full_name(like=like, items=items, prefix='user__') + items = query_like_by_full_name(like=like, items=items, prefix="user__") items = handler.queryset(items) serializer = AnswerSerializer(items, many=True) @@ -142,33 +142,32 @@ class AnswerMeView(APIView): def put(self, request, answer_id=None): if answer_id is None: - raise ValidationException('Missing answer_id', slug='missing-answer-id') + raise ValidationException("Missing answer_id", slug="missing-answer-id") answer = Answer.objects.filter(user=request.user, id=answer_id).first() if answer is None: - raise ValidationException('This survey does not exist for this user', - code=404, - slug='answer-of-other-user-or-not-exists') + raise ValidationException( + "This survey does not exist for this user", code=404, slug="answer-of-other-user-or-not-exists" + ) - serializer = AnswerPUTSerializer(answer, data=request.data, context={'request': request, 'answer': answer_id}) + serializer = AnswerPUTSerializer(answer, data=request.data, context={"request": request, "answer": answer_id}) if serializer.is_valid(): - tasks_activity.add_activity.delay(request.user.id, - 'nps_answered', - related_type='feedback.Answer', - related_id=answer_id) + tasks_activity.add_activity.delay( + request.user.id, "nps_answered", related_type="feedback.Answer", related_id=answer_id + ) serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def get(self, request, answer_id=None): if answer_id is None: - raise ValidationException('Missing answer_id', slug='missing-answer-id') + raise ValidationException("Missing answer_id", slug="missing-answer-id") answer = Answer.objects.filter(user=request.user, id=answer_id).first() if answer is None: - raise ValidationException('This survey does not exist for this user', - code=404, - slug='answer-of-other-user-or-not-exists') + raise ValidationException( + "This survey does not exist for this user", code=404, slug="answer-of-other-user-or-not-exists" + ) serializer = BigAnswerSerializer(answer) return Response(serializer.data, status=status.HTTP_200_OK) @@ -176,14 +175,14 @@ def get(self, request, answer_id=None): class AcademyAnswerView(APIView): - @capable_of('read_nps_answers') + @capable_of("read_nps_answers") def get(self, request, academy_id=None, answer_id=None): if answer_id is None: - raise ValidationException('Missing answer_id', code=404) + raise ValidationException("Missing answer_id", code=404) answer = Answer.objects.filter(academy__id=academy_id, id=answer_id).first() if answer is None: - raise ValidationException('This survey does not exist for this academy') + raise ValidationException("This survey does not exist for this academy") serializer = BigAnswerSerializer(answer) return Response(serializer.data, status=status.HTTP_200_OK) @@ -194,10 +193,10 @@ class SurveyView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - @capable_of('crud_survey') + @capable_of("crud_survey") def post(self, request, academy_id=None): - serializer = SurveySerializer(data=request.data, context={'request': request, 'academy_id': academy_id}) + serializer = SurveySerializer(data=request.data, context={"request": request, "academy_id": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -207,34 +206,30 @@ def post(self, request, academy_id=None): List all snippets, or create a new snippet. """ - @capable_of('crud_survey') + @capable_of("crud_survey") def put(self, request, survey_id=None, academy_id=None): if survey_id is None: - raise ValidationException('Missing survey_id') + raise ValidationException("Missing survey_id") survey = Survey.objects.filter(id=survey_id).first() if survey is None: - raise NotFound('This survey does not exist') - - serializer = SurveyPUTSerializer(survey, - data=request.data, - context={ - 'request': request, - 'survey': survey_id, - 'academy_id': academy_id - }) + raise NotFound("This survey does not exist") + + serializer = SurveyPUTSerializer( + survey, data=request.data, context={"request": request, "survey": survey_id, "academy_id": academy_id} + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('read_survey') + @capable_of("read_survey") def get(self, request, survey_id=None, academy_id=None): if survey_id is not None: survey = Survey.objects.filter(id=survey_id).first() if survey is None: - raise NotFound('This survey does not exist') + raise NotFound("This survey does not exist") serializer = SurveySerializer(survey) return Response(serializer.data, status=status.HTTP_200_OK) @@ -242,21 +237,21 @@ def get(self, request, survey_id=None, academy_id=None): items = Survey.objects.filter(cohort__academy__id=academy_id) lookup = {} - if 'status' in self.request.GET: - param = self.request.GET.get('status') - lookup['status'] = param + if "status" in self.request.GET: + param = self.request.GET.get("status") + lookup["status"] = param - if 'cohort' in self.request.GET: - param = self.request.GET.get('cohort') - lookup['cohort__slug'] = param + if "cohort" in self.request.GET: + param = self.request.GET.get("cohort") + lookup["cohort__slug"] = param - if 'lang' in self.request.GET: - param = self.request.GET.get('lang') - lookup['lang'] = param + if "lang" in self.request.GET: + param = self.request.GET.get("lang") + lookup["lang"] = param - sort = self.request.GET.get('sort') + sort = self.request.GET.get("sort") if sort is None: - sort = '-created_at' + sort = "-created_at" items = items.filter(**lookup).order_by(sort) page = self.paginate_queryset(items, request) @@ -267,54 +262,55 @@ def get(self, request, survey_id=None, academy_id=None): else: return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_survey') + @capable_of("crud_survey") def delete(self, request, academy_id=None, survey_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups and survey_id: raise ValidationException( - 'survey_id was provided in url ' - 'in bulk mode request, use querystring style instead', + "survey_id was provided in url " "in bulk mode request, use querystring style instead", code=400, - slug='survey-id-and-lookups-together') + slug="survey-id-and-lookups-together", + ) if not lookups and not survey_id: - raise ValidationException('survey_id was not provided in url', - code=400, - slug='without-survey-id-and-lookups') + raise ValidationException( + "survey_id was not provided in url", code=400, slug="without-survey-id-and-lookups" + ) if lookups: - items = Survey.objects.filter(**lookups, cohort__academy__id=academy_id).exclude(status='SENT') + items = Survey.objects.filter(**lookups, cohort__academy__id=academy_id).exclude(status="SENT") ids = [item.id for item in items] - if answers := Answer.objects.filter(survey__id__in=ids, status='ANSWERED'): + if answers := Answer.objects.filter(survey__id__in=ids, status="ANSWERED"): slugs = set([answer.survey.cohort.slug for answer in answers]) raise ValidationException( f'Survey cannot be deleted because it has been answered for cohorts {", ".join(slugs)}', code=400, - slug='survey-cannot-be-deleted') + slug="survey-cannot-be-deleted", + ) for item in items: item.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) - sur = Survey.objects.filter(id=survey_id, cohort__academy__id=academy_id).exclude(status='SENT').first() + sur = Survey.objects.filter(id=survey_id, cohort__academy__id=academy_id).exclude(status="SENT").first() if sur is None: - raise ValidationException('Survey not found', 404, slug='survey-not-found') + raise ValidationException("Survey not found", 404, slug="survey-not-found") - if Answer.objects.filter(survey__id=survey_id, status='ANSWERED'): - raise ValidationException('Survey cannot be deleted', code=400, slug='survey-cannot-be-deleted') + if Answer.objects.filter(survey__id=survey_id, status="ANSWERED"): + raise ValidationException("Survey cannot be deleted", code=400, slug="survey-cannot-be-deleted") sur.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_review_platform(request, platform_slug=None): @@ -325,36 +321,38 @@ def get_review_platform(request, platform_slug=None): serializer = ReviewPlatformSerializer(items, many=False) return Response(serializer.data) else: - raise ValidationException('Review platform not found', slug='reivew_platform_not_found', code=404) + raise ValidationException("Review platform not found", slug="reivew_platform_not_found", code=404) else: serializer = ReviewPlatformSerializer(items, many=True) return Response(serializer.data) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_reviews(request): """ List all snippets, or create a new snippet. """ - items = Review.objects.filter(is_public=True, - status='DONE', - comments__isnull=False, - total_rating__isnull=False, - total_rating__gt=0, - total_rating__lte=10).exclude(comments__exact='') + items = Review.objects.filter( + is_public=True, + status="DONE", + comments__isnull=False, + total_rating__isnull=False, + total_rating__gt=0, + total_rating__lte=10, + ).exclude(comments__exact="") lookup = {} - if 'academy' in request.GET: - param = request.GET.get('academy') - lookup['cohort__academy__id'] = param + if "academy" in request.GET: + param = request.GET.get("academy") + lookup["cohort__academy__id"] = param - if 'lang' in request.GET: - param = request.GET.get('lang') - lookup['lang'] = param + if "lang" in request.GET: + param = request.GET.get("lang") + lookup["lang"] = param - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = ReviewSmallSerializer(items, many=True) return Response(serializer.data) @@ -365,48 +363,48 @@ class ReviewView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - @capable_of('read_review') + @capable_of("read_review") def get(self, request, format=None, academy_id=None): academy = Academy.objects.get(id=academy_id) items = Review.objects.filter(cohort__academy__id=academy.id) lookup = {} - start = request.GET.get('start', None) + start = request.GET.get("start", None) if start is not None: - start_date = datetime.strptime(start, '%Y-%m-%d').date() - lookup['created_at__gte'] = start_date + start_date = datetime.strptime(start, "%Y-%m-%d").date() + lookup["created_at__gte"] = start_date - end = request.GET.get('end', None) + end = request.GET.get("end", None) if end is not None: - end_date = datetime.strptime(end, '%Y-%m-%d').date() - lookup['created_at__lte'] = end_date + end_date = datetime.strptime(end, "%Y-%m-%d").date() + lookup["created_at__lte"] = end_date - if 'status' in self.request.GET: - param = self.request.GET.get('status') - lookup['status'] = param + if "status" in self.request.GET: + param = self.request.GET.get("status") + lookup["status"] = param - if 'platform' in self.request.GET: - param = self.request.GET.get('platform') + if "platform" in self.request.GET: + param = self.request.GET.get("platform") items = items.filter(platform__name__icontains=param) - if 'cohort' in self.request.GET: - param = self.request.GET.get('cohort') - lookup['cohort__id'] = param + if "cohort" in self.request.GET: + param = self.request.GET.get("cohort") + lookup["cohort__id"] = param - if 'author' in self.request.GET: - param = self.request.GET.get('author') - lookup['author__id'] = param + if "author" in self.request.GET: + param = self.request.GET.get("author") + lookup["author__id"] = param - sort_by = '-created_at' - if 'sort' in self.request.GET and self.request.GET['sort'] != '': - sort_by = self.request.GET.get('sort') + sort_by = "-created_at" + if "sort" in self.request.GET and self.request.GET["sort"] != "": + sort_by = self.request.GET.get("sort") items = items.filter(**lookup).order_by(sort_by) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: - items = query_like_by_full_name(like=like, items=items, prefix='author__') + items = query_like_by_full_name(like=like, items=items, prefix="author__") page = self.paginate_queryset(items, request) serializer = ReviewSmallSerializer(page, many=True) @@ -416,38 +414,34 @@ def get(self, request, format=None, academy_id=None): else: return Response(serializer.data, status=200) - @capable_of('crud_review') + @capable_of("crud_review") def put(self, request, review_id, academy_id=None): review = Review.objects.filter(id=review_id, cohort__academy__id=academy_id).first() if review is None: - raise NotFound('This review does not exist on this academy') - - serializer = ReviewPUTSerializer(review, - data=request.data, - context={ - 'request': request, - 'review': review_id, - 'academy_id': academy_id - }) + raise NotFound("This review does not exist on this academy") + + serializer = ReviewPUTSerializer( + review, data=request.data, context={"request": request, "review": review_id, "academy_id": academy_id} + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_review') + @capable_of("crud_review") def delete(self, request, academy_id=None): # TODO: here i don't add one single delete, because i don't know if it is required - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) # automation_objects if not lookups: - raise ValidationException('Missing parameters in the querystring', code=400) + raise ValidationException("Missing parameters in the querystring", code=400) items = Review.objects.filter(**lookups, academy__id=academy_id) for item in items: - item.status = 'IGNORE' + item.status = "IGNORE" item.save() return Response(None, status=status.HTTP_204_NO_CONTENT) diff --git a/breathecode/freelance/actions.py b/breathecode/freelance/actions.py index ce461149a..1540dab0a 100644 --- a/breathecode/freelance/actions.py +++ b/breathecode/freelance/actions.py @@ -15,7 +15,7 @@ def get_hours(content): - p = re.compile(r'<hrs>(\d+\.?\d*)</hrs>') + p = re.compile(r"<hrs>(\d+\.?\d*)</hrs>") result = p.search(content) hours = None if result is not None: @@ -24,7 +24,7 @@ def get_hours(content): def get_status(content): - p = re.compile('<status>(\\w+)</status>') + p = re.compile("<status>(\\w+)</status>") result = p.search(content) status = None if result is not None: @@ -41,19 +41,19 @@ def update_status_based_on_github_action(github_action, issue): # Possible github action: # opened, edited, deleted, pinned, unpinned, closed, reopened, assigned, unassigned, labeled, unlabeled, locked, unlocked, transferred, milestoned, or demilestoned. if issue is None: - return 'DRAFT' + return "DRAFT" - if issue.status == 'IGNORED': - return 'IGNORED' + if issue.status == "IGNORED": + return "IGNORED" - if github_action == 'reopened': - return 'TODO' + if github_action == "reopened": + return "TODO" - if github_action == 'deleted': - return 'IGNORED' + if github_action == "deleted": + return "IGNORED" - if github_action == 'closed': - return 'DONE' + if github_action == "closed": + return "DONE" return issue.status @@ -62,37 +62,36 @@ def sync_single_issue(issue, comment=None, freelancer=None, incoming_github_acti if isinstance(issue, dict) == False: issue = { - 'id': issue.number, - 'title': issue.title, - 'url': issue.html_url, - 'body': issue.body, - 'html_url': issue.html_url, - 'assignees': [({ - 'id': a.id - }) for a in issue.assignees], + "id": issue.number, + "title": issue.title, + "url": issue.html_url, + "body": issue.body, + "html_url": issue.html_url, + "assignees": [({"id": a.id}) for a in issue.assignees], } - if 'issue' in issue: - issue = issue['issue'] + if "issue" in issue: + issue = issue["issue"] issue_number = None - if 'number' in issue: - issue_number = issue['number'] + if "number" in issue: + issue_number = issue["number"] node_id = None - if 'node_id' in issue: - node_id = issue['node_id'] + if "node_id" in issue: + node_id = issue["node_id"] else: logger.info( - f'Impossible to identify issue because it does not have a node_id (number:{issue_number}), ignoring synch: ' - + str(issue)) + f"Impossible to identify issue because it does not have a node_id (number:{issue_number}), ignoring synch: " + + str(issue) + ) return None _issue = Issue.objects.filter(node_id=node_id).first() if _issue is None: _issue = Issue( - title='Untitled', + title="Untitled", node_id=node_id, ) @@ -101,55 +100,56 @@ def sync_single_issue(issue, comment=None, freelancer=None, incoming_github_acti if issue_number is not None: _issue.github_number = issue_number - if issue['title'] is not None: - _issue.title = issue['title'][:255] + if issue["title"] is not None: + _issue.title = issue["title"][:255] - if issue['body'] is not None: - _issue.body = issue['body'][:500] + if issue["body"] is not None: + _issue.body = issue["body"][:500] - _issue.url = issue['html_url'] + _issue.url = issue["html_url"] - result = re.search(r'github\.com\/([\w\-_]+)\/([\w\-_]+)\/.+', _issue.url) + result = re.search(r"github\.com\/([\w\-_]+)\/([\w\-_]+)\/.+", _issue.url) if result is not None: - _issue.repository_url = f'https://github.com/{result.group(1)}/{result.group(2)}' + _issue.repository_url = f"https://github.com/{result.group(1)}/{result.group(2)}" # To include it on the next invoice - _issue.invoice = ProjectInvoice.get_or_create(_issue.repository_url, academy_slug, status='DUE') + _issue.invoice = ProjectInvoice.get_or_create(_issue.repository_url, academy_slug, status="DUE") if freelancer is None: - if 'assignees' in issue and len(issue['assignees']) > 0: - assigne = issue['assignees'][0] - freelancer = Freelancer.objects.filter(github_user__github_id=assigne['id']).first() + if "assignees" in issue and len(issue["assignees"]) > 0: + assigne = issue["assignees"][0] + freelancer = Freelancer.objects.filter(github_user__github_id=assigne["id"]).first() if freelancer is None: raise Exception( f'Assigned github user: {assigne["id"]} is not a freelancer but is the main user associated to this issue' ) else: - raise Exception('There was no freelancer associated with this issue') + raise Exception("There was no freelancer associated with this issue") _issue.freelancer = freelancer hours = get_hours(_issue.body) if hours is not None and _issue.duration_in_hours != hours: - logger.info(f'Updating issue {node_id} ({issue_number}) hrs with {hours}, found <hrs> tag on updated body') + logger.info(f"Updating issue {node_id} ({issue_number}) hrs with {hours}, found <hrs> tag on updated body") _issue.duration_in_minutes = hours * 60 _issue.duration_in_hours = hours # update based on the comment (if available) if comment is not None: - hours = get_hours(comment['body']) + hours = get_hours(comment["body"]) if hours is not None and _issue.duration_in_hours != hours: - logger.info(f'Updating issue {node_id} ({issue_number}) hrs with {hours}, found <hrs> tag on new comment') + logger.info(f"Updating issue {node_id} ({issue_number}) hrs with {hours}, found <hrs> tag on new comment") _issue.duration_in_minutes = hours * 60 _issue.duration_in_hours = hours - status = get_status(comment['body']) + status = get_status(comment["body"]) if status is not None and status_is_valid(status): logger.info( - f'Updating issue {node_id} ({issue_number}) status to {status} found <status> tag on new comment') + f"Updating issue {node_id} ({issue_number}) status to {status} found <status> tag on new comment" + ) _issue.status = status elif status is not None: - error = f'The status {status} is not valid' + error = f"The status {status} is not valid" logger.info(error) _issue.status_message = error _issue.save() @@ -160,25 +160,25 @@ def sync_single_issue(issue, comment=None, freelancer=None, incoming_github_acti def sync_user_issues(freelancer, academy_slug=None): if freelancer.github_user is None: - raise ValueError('Freelancer has not github user') + raise ValueError("Freelancer has not github user") github_id = freelancer.github_user.github_id credentials = CredentialsGithub.objects.filter(github_id=github_id).first() if credentials is None: - raise ValueError(f'Credentials for this user {github_id} not found') + raise ValueError(f"Credentials for this user {github_id} not found") g = Github(credentials.token) user = g.get_user() - open_issues = user.get_user_issues(state='open') + open_issues = user.get_user_issues(state="open") count = 0 for issue in open_issues: count += 1 _i = sync_single_issue(issue, freelancer=freelancer, academy_slug=academy_slug) if _i is not None: - logger.debug(f'{_i.node_id} synched') - logger.debug(f'{str(count)} issues found for this Github user credentials {str(credentials)}') + logger.debug(f"{_i.node_id} synched") + logger.debug(f"{str(count)} issues found for this Github user credentials {str(credentials)}") return count @@ -190,104 +190,105 @@ def change_status(issue, status): def generate_project_invoice(project): - logger.debug('Generate invoice for project %s', project.title) + logger.debug("Generate invoice for project %s", project.title) # reset all pending issues invoices, we'll start again - Issue.objects.filter(invoice__project__id=project.id).exclude(status='DONE').update(invoice=None) + Issue.objects.filter(invoice__project__id=project.id).exclude(status="DONE").update(invoice=None) # get next pending invoice - invoice = ProjectInvoice.get_or_create(project.repository, project.academy.slug, status='DUE') + invoice = ProjectInvoice.get_or_create(project.repository, project.academy.slug, status="DUE") # fetch for issues to be invoiced - done_issues = Issue.objects.filter(academy__slug=project.academy.slug, - url__icontains=project.repository, - status='DONE').filter(Q(invoice__isnull=True) - | Q(invoice__status='DUE')) + done_issues = Issue.objects.filter( + academy__slug=project.academy.slug, url__icontains=project.repository, status="DONE" + ).filter(Q(invoice__isnull=True) | Q(invoice__status="DUE")) invoices = {} for issue in done_issues: issue.invoice = invoice - issue.status_message = '' + issue.status_message = "" if str(issue.invoice.id) not in invoices: - invoices[str(issue.invoice.id)] = {'minutes': 0, 'hours': 0, 'price': 0, 'instance': issue.invoice} - - if issue.status != 'DONE': - issue.status_message += 'Issue is still ' + issue.status - if issue.node_id is None or issue.node_id == '': - issue.status_message += 'Github node id not found' - - if issue.status_message == '': - _hours = invoices[str(issue.invoice.id)]['hours'] + issue.duration_in_hours - invoices[str(issue.invoice.id)]['hours'] = _hours - invoices[str( - issue.invoice.id)]['minutes'] = invoices[str(issue.invoice.id)]['minutes'] + issue.duration_in_minutes - invoices[str(issue.invoice.id)]['price'] = _hours * issue.freelancer.get_client_hourly_rate(project) + invoices[str(issue.invoice.id)] = {"minutes": 0, "hours": 0, "price": 0, "instance": issue.invoice} + + if issue.status != "DONE": + issue.status_message += "Issue is still " + issue.status + if issue.node_id is None or issue.node_id == "": + issue.status_message += "Github node id not found" + + if issue.status_message == "": + _hours = invoices[str(issue.invoice.id)]["hours"] + issue.duration_in_hours + invoices[str(issue.invoice.id)]["hours"] = _hours + invoices[str(issue.invoice.id)]["minutes"] = ( + invoices[str(issue.invoice.id)]["minutes"] + issue.duration_in_minutes + ) + invoices[str(issue.invoice.id)]["price"] = _hours * issue.freelancer.get_client_hourly_rate(project) issue.save() for inv_id in invoices: - invoices[inv_id]['instance'].total_duration_in_hours = invoices[inv_id]['hours'] - invoices[inv_id]['instance'].total_duration_in_minutes = invoices[inv_id]['minutes'] - invoices[inv_id]['instance'].total_price = invoices[inv_id]['price'] + invoices[inv_id]["instance"].total_duration_in_hours = invoices[inv_id]["hours"] + invoices[inv_id]["instance"].total_duration_in_minutes = invoices[inv_id]["minutes"] + invoices[inv_id]["instance"].total_price = invoices[inv_id]["price"] - invoices[inv_id]['instance'].save() + invoices[inv_id]["instance"].save() - return [invoices[inv_id]['instance'] for inv_id in invoices] + return [invoices[inv_id]["instance"] for inv_id in invoices] def generate_freelancer_bill(freelancer): - Issue.objects.filter(bill__isnull=False, freelancer__id=freelancer.id).exclude(status='DONE').update(bill=None) + Issue.objects.filter(bill__isnull=False, freelancer__id=freelancer.id).exclude(status="DONE").update(bill=None) def get_bill(academy): - open_bill = Bill.objects.filter(freelancer__id=freelancer.id, - status='DUE', - academy__slug=academy.slug, - academy__isnull=False).first() + open_bill = Bill.objects.filter( + freelancer__id=freelancer.id, status="DUE", academy__slug=academy.slug, academy__isnull=False + ).first() if open_bill is None: open_bill = Bill(freelancer=freelancer, academy=academy) open_bill.save() return open_bill - done_issues = Issue.objects.filter(freelancer__id=freelancer.id, - status='DONE').filter(Q(bill__isnull=True) - | Q(bill__status='DUE')).exclude(academy__isnull=True) + done_issues = ( + Issue.objects.filter(freelancer__id=freelancer.id, status="DONE") + .filter(Q(bill__isnull=True) | Q(bill__status="DUE")) + .exclude(academy__isnull=True) + ) bills = {} for issue in done_issues: issue.bill = get_bill(issue.academy) - issue.status_message = '' + issue.status_message = "" if str(issue.bill.id) not in bills: - bills[str(issue.bill.id)] = {'minutes': 0, 'hours': 0, 'price': 0, 'instance': issue.bill} + bills[str(issue.bill.id)] = {"minutes": 0, "hours": 0, "price": 0, "instance": issue.bill} - if issue.status != 'DONE': - issue.status_message += 'Issue is still ' + issue.status - if issue.node_id is None or issue.node_id == '': - issue.status_message += 'Github node id not found' + if issue.status != "DONE": + issue.status_message += "Issue is still " + issue.status + if issue.node_id is None or issue.node_id == "": + issue.status_message += "Github node id not found" - if issue.status_message == '': - _hours = bills[str(issue.bill.id)]['hours'] + issue.duration_in_hours - bills[str(issue.bill.id)]['hours'] = _hours - bills[str(issue.bill.id)]['minutes'] = bills[str(issue.bill.id)]['minutes'] + issue.duration_in_minutes + if issue.status_message == "": + _hours = bills[str(issue.bill.id)]["hours"] + issue.duration_in_hours + bills[str(issue.bill.id)]["hours"] = _hours + bills[str(issue.bill.id)]["minutes"] = bills[str(issue.bill.id)]["minutes"] + issue.duration_in_minutes project = issue.invoice.project if issue.invoice is not None else None - bills[str(issue.bill.id)]['price'] = _hours * freelancer.get_hourly_rate(project) + bills[str(issue.bill.id)]["price"] = _hours * freelancer.get_hourly_rate(project) issue.save() for bill_id in bills: - bills[bill_id]['instance'].total_duration_in_hours = bills[bill_id]['hours'] - bills[bill_id]['instance'].total_duration_in_minutes = bills[bill_id]['minutes'] - bills[bill_id]['instance'].total_price = bills[bill_id]['price'] - bills[bill_id]['instance'].save() + bills[bill_id]["instance"].total_duration_in_hours = bills[bill_id]["hours"] + bills[bill_id]["instance"].total_duration_in_minutes = bills[bill_id]["minutes"] + bills[bill_id]["instance"].total_price = bills[bill_id]["price"] + bills[bill_id]["instance"].save() - return [bills[bill_id]['instance'] for bill_id in bills] + return [bills[bill_id]["instance"] for bill_id in bills] -@admin.display(description='Process Hook') +@admin.display(description="Process Hook") def run_hook(modeladmin, request, queryset): for hook in queryset.all(): ac_academy = hook.ac_academy diff --git a/breathecode/freelance/admin.py b/breathecode/freelance/admin.py index 5762a9c31..22d6877a2 100644 --- a/breathecode/freelance/admin.py +++ b/breathecode/freelance/admin.py @@ -1,18 +1,19 @@ from django.contrib import admin, messages -from .models import (Freelancer, Issue, Bill, AcademyFreelanceProject, FreelanceProjectMember, ProjectInvoice) +from .models import Freelancer, Issue, Bill, AcademyFreelanceProject, FreelanceProjectMember, ProjectInvoice from django.utils.html import format_html from . import actions from breathecode.utils.admin import change_field + # Register your models here. -@admin.display(description='Sync open issues') +@admin.display(description="Sync open issues") def sync_issues(modeladmin, request, queryset): freelancers = queryset.all() for freelancer in freelancers: try: count = actions.sync_user_issues(freelancer) - messages.success(message=f'{count} issues successfully synched!', request=request) + messages.success(message=f"{count} issues successfully synched!", request=request) except ValueError as err: messages.error(request, err) @@ -22,7 +23,7 @@ def generate_freelancer_bill(modeladmin, request, queryset): for freelancer in freelancers: try: actions.generate_freelancer_bill(freelancer) - messages.success(message='Success!', request=request) + messages.success(message="Success!", request=request) except ValueError as err: messages.error(request, err) @@ -33,9 +34,10 @@ def mark_as(queryset, status, request): try: for i in issues: - if i.bill is not None and i.bill.status != 'DUE': + if i.bill is not None and i.bill.status != "DUE": raise Exception( - f'Github {i.github_number} cannot be updated because it was already approved for payment') + f"Github {i.github_number} cannot be updated because it was already approved for payment" + ) freelancers[i.freelancer.id] = i.freelancer i.status = status i.save() @@ -48,13 +50,13 @@ def mark_as(queryset, status, request): @admin.register(Freelancer) class FreelancerAdmin(admin.ModelAdmin): - list_display = ['user_id', 'full_name', 'email', 'github', 'price_per_hour'] - search_fields = ['user__email', 'user__first_name', 'user__last_name'] - raw_id_fields = ['user', 'github_user'] + list_display = ["user_id", "full_name", "email", "github", "price_per_hour"] + search_fields = ["user__email", "user__first_name", "user__last_name"] + raw_id_fields = ["user", "github_user"] actions = [sync_issues, generate_freelancer_bill] def full_name(self, obj): - return obj.user.first_name + ' ' + obj.user.last_name + return obj.user.first_name + " " + obj.user.last_name def email(self, obj): return obj.user.email @@ -74,7 +76,7 @@ def resync_single_issue(modeladmin, request, queryset): for i in issues: try: actions.sync_single_issue(i) - messages.success(message='Success!', request=request) + messages.success(message="Success!", request=request) except ValueError as err: messages.error(request, err) @@ -82,13 +84,24 @@ def resync_single_issue(modeladmin, request, queryset): @admin.register(Issue) class IssueAdmin(admin.ModelAdmin): search_fields = [ - 'title', 'freelancer__user__email', 'freelancer__user__first_name', 'freelancer__user__last_name', - 'github_number' + "title", + "freelancer__user__email", + "freelancer__user__first_name", + "freelancer__user__last_name", + "github_number", ] - list_display = ('id', 'github_number', 'freelancer', 'title', 'status', 'duration_in_hours', 'bill_id', - 'github_url') - list_filter = ['status', 'bill__status'] - actions = [resync_single_issue] + change_field(['TODO', 'DONE', 'IGNORED', 'DRAFT', 'DOING'], name='status') + list_display = ( + "id", + "github_number", + "freelancer", + "title", + "status", + "duration_in_hours", + "bill_id", + "github_url", + ) + list_filter = ["status", "bill__status"] + actions = [resync_single_issue] + change_field(["TODO", "DONE", "IGNORED", "DRAFT", "DOING"], name="status") def github_url(self, obj): return format_html("<a rel='noopener noreferrer' target='_blank' href='{url}'>open in github</a>", url=obj.url) @@ -96,14 +109,15 @@ def github_url(self, obj): @admin.register(Bill) class BillAdmin(admin.ModelAdmin): - list_display = ('id', 'freelancer', 'status', 'total_duration_in_hours', 'total_price', 'paid_at', 'invoice_url') - list_filter = ['status'] - actions = change_field(['PAID', 'APPROVED', 'IGNORED', 'DUE'], name='status') + list_display = ("id", "freelancer", "status", "total_duration_in_hours", "total_price", "paid_at", "invoice_url") + list_filter = ["status"] + actions = change_field(["PAID", "APPROVED", "IGNORED", "DUE"], name="status") def invoice_url(self, obj): return format_html( "<a rel='noopener noreferrer' target='_blank' href='/v1/freelance/bills/{id}/html'>open invoice</a>", - id=obj.id) + id=obj.id, + ) def generate_project_invoice(modeladmin, request, queryset): @@ -118,22 +132,25 @@ def generate_project_invoice(modeladmin, request, queryset): @admin.register(AcademyFreelanceProject) class AcademyFreelanceProjectAdmin(admin.ModelAdmin): - list_display = ('id', 'title', 'academy', 'total_client_hourly_price') - list_filter = ['academy'] + list_display = ("id", "title", "academy", "total_client_hourly_price") + list_filter = ["academy"] actions = [generate_project_invoice] @admin.register(FreelanceProjectMember) class FreelanceProjectMemberAdmin(admin.ModelAdmin): - list_display = ('freelancer', 'project', 'total_cost_hourly_price', 'total_client_hourly_price') - list_filter = ['project'] + list_display = ("freelancer", "project", "total_cost_hourly_price", "total_client_hourly_price") + list_filter = ["project"] search_fields = [ - 'project__title', 'freelancer__user__email', 'freelancer__user__first_name', 'freelancer__user__last_name' + "project__title", + "freelancer__user__email", + "freelancer__user__first_name", + "freelancer__user__last_name", ] @admin.register(ProjectInvoice) class ProjectInvoiceAdmin(admin.ModelAdmin): - list_display = ('id', 'project', 'status', 'total_duration_in_hours', 'total_price', 'paid_at') - list_filter = ['status'] - actions = change_field(['PAID', 'APPROVED', 'IGNORED', 'DUE'], name='status') + list_display = ("id", "project", "status", "total_duration_in_hours", "total_price", "paid_at") + list_filter = ["status"] + actions = change_field(["PAID", "APPROVED", "IGNORED", "DUE"], name="status") diff --git a/breathecode/freelance/apps.py b/breathecode/freelance/apps.py index 29ec56655..e6de033ce 100644 --- a/breathecode/freelance/apps.py +++ b/breathecode/freelance/apps.py @@ -2,7 +2,7 @@ class FreelanceConfig(AppConfig): - name = 'breathecode.freelance' + name = "breathecode.freelance" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/freelance/management/commands/default_issues_to_miami.py b/breathecode/freelance/management/commands/default_issues_to_miami.py index 0cb28bdaf..7bdd5f59d 100644 --- a/breathecode/freelance/management/commands/default_issues_to_miami.py +++ b/breathecode/freelance/management/commands/default_issues_to_miami.py @@ -4,11 +4,11 @@ class Command(BaseCommand): - help = 'Make all issues be from miami' + help = "Make all issues be from miami" def handle(self, *args, **options): - academy = Academy.objects.filter(slug='downtown-miami').first() + academy = Academy.objects.filter(slug="downtown-miami").first() Issue.objects.filter(academy__isnull=True).update(academy=academy) - self.stdout.write(self.style.SUCCESS('Successfully sync issues')) + self.stdout.write(self.style.SUCCESS("Successfully sync issues")) diff --git a/breathecode/freelance/management/commands/delete_unfinished_issues.py b/breathecode/freelance/management/commands/delete_unfinished_issues.py index 5464d1672..0313f1fa1 100644 --- a/breathecode/freelance/management/commands/delete_unfinished_issues.py +++ b/breathecode/freelance/management/commands/delete_unfinished_issues.py @@ -3,10 +3,10 @@ class Command(BaseCommand): - help = 'Sync breathecode with active campaign' + help = "Sync breathecode with active campaign" def handle(self, *args, **options): - Issue.objects.filter(status__in=['DOING', 'IGNORED', 'DRAFT', 'TODO', 'DOING']).delete() + Issue.objects.filter(status__in=["DOING", "IGNORED", "DRAFT", "TODO", "DOING"]).delete() - self.stdout.write(self.style.SUCCESS('Successfully sync tags')) + self.stdout.write(self.style.SUCCESS("Successfully sync tags")) diff --git a/breathecode/freelance/migrations/0001_initial.py b/breathecode/freelance/migrations/0001_initial.py index 0123919ce..ec482c8f6 100644 --- a/breathecode/freelance/migrations/0001_initial.py +++ b/breathecode/freelance/migrations/0001_initial.py @@ -11,70 +11,81 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0002_credentialsquickbooks'), + ("authenticate", "0002_credentialsquickbooks"), ] operations = [ migrations.CreateModel( - name='Bill', + name="Bill", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('status', models.CharField(choices=[('DUE', 'Due'), ('PAID', 'Paid')], default='DUE', max_length=20)), - ('total_duration_in_minutes', models.FloatField()), - ('total_price', models.FloatField()), - ('paid_at', models.DateTimeField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("status", models.CharField(choices=[("DUE", "Due"), ("PAID", "Paid")], default="DUE", max_length=20)), + ("total_duration_in_minutes", models.FloatField()), + ("total_price", models.FloatField()), + ("paid_at", models.DateTimeField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Freelancer', + name="Freelancer", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('price_per_hour', models.FloatField()), - ('github_user', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_DEFAULT, - to='authenticate.CredentialsGithub')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("price_per_hour", models.FloatField()), + ( + "github_user", + models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.SET_DEFAULT, + to="authenticate.CredentialsGithub", + ), + ), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( - name='Issue', + name="Issue", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(max_length=100)), - ('status', - models.CharField(choices=[('DRAFT', 'Draft'), ('TODO', 'Todo'), ('DOING', 'Doing'), ('DONE', 'Done')], - default='DRAFT', - max_length=20)), - ('github_state', models.CharField(max_length=30)), - ('github_number', models.PositiveIntegerField()), - ('body', models.TextField()), - ('duration_in_minutes', models.FloatField()), - ('url', models.URLField(max_length=255)), - ('repository_url', models.URLField(max_length=255)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), - ('bill', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='freelance.Bill')), - ('freelancer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='freelance.Freelancer')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.CharField(max_length=100)), + ( + "status", + models.CharField( + choices=[("DRAFT", "Draft"), ("TODO", "Todo"), ("DOING", "Doing"), ("DONE", "Done")], + default="DRAFT", + max_length=20, + ), + ), + ("github_state", models.CharField(max_length=30)), + ("github_number", models.PositiveIntegerField()), + ("body", models.TextField()), + ("duration_in_minutes", models.FloatField()), + ("url", models.URLField(max_length=255)), + ("repository_url", models.URLField(max_length=255)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("author", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "bill", + models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="freelance.Bill" + ), + ), + ( + "freelancer", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="freelance.Freelancer"), + ), ], ), migrations.AddField( - model_name='bill', - name='freelancer', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='freelance.Freelancer'), + model_name="bill", + name="freelancer", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="freelance.Freelancer"), ), migrations.AddField( - model_name='bill', - name='reviewer', + model_name="bill", + name="reviewer", field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ] diff --git a/breathecode/freelance/migrations/0002_auto_20200717_2003.py b/breathecode/freelance/migrations/0002_auto_20200717_2003.py index a5b0cc78e..75222304c 100644 --- a/breathecode/freelance/migrations/0002_auto_20200717_2003.py +++ b/breathecode/freelance/migrations/0002_auto_20200717_2003.py @@ -9,21 +9,20 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('freelance', '0001_initial'), + ("freelance", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='issue', - name='author', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="issue", + name="author", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), migrations.AlterField( - model_name='issue', - name='duration_in_minutes', + model_name="issue", + name="duration_in_minutes", field=models.FloatField(default=0), ), ] diff --git a/breathecode/freelance/migrations/0003_auto_20200717_2009.py b/breathecode/freelance/migrations/0003_auto_20200717_2009.py index 3d681a3ef..e6eefedf3 100644 --- a/breathecode/freelance/migrations/0003_auto_20200717_2009.py +++ b/breathecode/freelance/migrations/0003_auto_20200717_2009.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0002_auto_20200717_2003'), + ("freelance", "0002_auto_20200717_2003"), ] operations = [ migrations.AlterField( - model_name='issue', - name='body', + model_name="issue", + name="body", field=models.TextField(max_length=500), ), migrations.AlterField( - model_name='issue', - name='title', + model_name="issue", + name="title", field=models.CharField(max_length=255), ), ] diff --git a/breathecode/freelance/migrations/0004_issue_duration_in_hours.py b/breathecode/freelance/migrations/0004_issue_duration_in_hours.py index a0175451f..29977665d 100644 --- a/breathecode/freelance/migrations/0004_issue_duration_in_hours.py +++ b/breathecode/freelance/migrations/0004_issue_duration_in_hours.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0003_auto_20200717_2009'), + ("freelance", "0003_auto_20200717_2009"), ] operations = [ migrations.AddField( - model_name='issue', - name='duration_in_hours', + model_name="issue", + name="duration_in_hours", field=models.FloatField(default=0), ), ] diff --git a/breathecode/freelance/migrations/0005_auto_20200717_2047.py b/breathecode/freelance/migrations/0005_auto_20200717_2047.py index a365e3b45..8d0c3de63 100644 --- a/breathecode/freelance/migrations/0005_auto_20200717_2047.py +++ b/breathecode/freelance/migrations/0005_auto_20200717_2047.py @@ -6,36 +6,43 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0004_issue_duration_in_hours'), + ("freelance", "0004_issue_duration_in_hours"), ] operations = [ migrations.AddField( - model_name='bill', - name='total_duration_in_hours', + model_name="bill", + name="total_duration_in_hours", field=models.FloatField(default=0), ), migrations.AlterField( - model_name='bill', - name='paid_at', + model_name="bill", + name="paid_at", field=models.DateTimeField(default=None, null=True), ), migrations.AlterField( - model_name='bill', - name='total_duration_in_minutes', + model_name="bill", + name="total_duration_in_minutes", field=models.FloatField(default=0), ), migrations.AlterField( - model_name='bill', - name='total_price', + model_name="bill", + name="total_price", field=models.FloatField(default=0), ), migrations.AlterField( - model_name='issue', - name='status', - field=models.CharField(choices=[('IGNORED', 'Ignored'), ('DRAFT', 'Draft'), ('TODO', 'Todo'), - ('DOING', 'Doing'), ('DONE', 'Done')], - default='DRAFT', - max_length=20), + model_name="issue", + name="status", + field=models.CharField( + choices=[ + ("IGNORED", "Ignored"), + ("DRAFT", "Draft"), + ("TODO", "Todo"), + ("DOING", "Doing"), + ("DONE", "Done"), + ], + default="DRAFT", + max_length=20, + ), ), ] diff --git a/breathecode/freelance/migrations/0006_auto_20200728_2225.py b/breathecode/freelance/migrations/0006_auto_20200728_2225.py index bab7028ab..b3f9f8976 100644 --- a/breathecode/freelance/migrations/0006_auto_20200728_2225.py +++ b/breathecode/freelance/migrations/0006_auto_20200728_2225.py @@ -9,16 +9,15 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('freelance', '0005_auto_20200717_2047'), + ("freelance", "0005_auto_20200717_2047"), ] operations = [ migrations.AlterField( - model_name='bill', - name='reviewer', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="bill", + name="reviewer", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), ] diff --git a/breathecode/freelance/migrations/0007_bill_academy.py b/breathecode/freelance/migrations/0007_bill_academy.py index 2e76ba9eb..4f66cd585 100644 --- a/breathecode/freelance/migrations/0007_bill_academy.py +++ b/breathecode/freelance/migrations/0007_bill_academy.py @@ -7,17 +7,16 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0014_auto_20201218_0534'), - ('freelance', '0006_auto_20200728_2225'), + ("admissions", "0014_auto_20201218_0534"), + ("freelance", "0006_auto_20200728_2225"), ] operations = [ migrations.AddField( - model_name='bill', - name='academy', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="bill", + name="academy", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), ), ] diff --git a/breathecode/freelance/migrations/0008_auto_20210302_0254.py b/breathecode/freelance/migrations/0008_auto_20210302_0254.py index f0792cbc8..4715ed371 100644 --- a/breathecode/freelance/migrations/0008_auto_20210302_0254.py +++ b/breathecode/freelance/migrations/0008_auto_20210302_0254.py @@ -7,22 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0007_bill_academy'), + ("freelance", "0007_bill_academy"), ] operations = [ migrations.AlterField( - model_name='bill', - name='paid_at', + model_name="bill", + name="paid_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='issue', - name='bill', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='freelance.bill'), + model_name="issue", + name="bill", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="freelance.bill" + ), ), ] diff --git a/breathecode/freelance/migrations/0009_auto_20210326_0041.py b/breathecode/freelance/migrations/0009_auto_20210326_0041.py index 026ded373..9834c9c89 100644 --- a/breathecode/freelance/migrations/0009_auto_20210326_0041.py +++ b/breathecode/freelance/migrations/0009_auto_20210326_0041.py @@ -9,34 +9,36 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('freelance', '0008_auto_20210302_0254'), + ("freelance", "0008_auto_20210302_0254"), ] operations = [ migrations.AlterField( - model_name='bill', - name='status', - field=models.CharField(choices=[('DUE', 'Due'), ('APPROVED', 'Approved'), ('PAID', 'Paid')], - default='DUE', - max_length=20), + model_name="bill", + name="status", + field=models.CharField( + choices=[("DUE", "Due"), ("APPROVED", "Approved"), ("PAID", "Paid")], default="DUE", max_length=20 + ), ), migrations.AlterField( - model_name='issue', - name='author', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="issue", + name="author", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), migrations.AlterField( - model_name='issue', - name='github_state', + model_name="issue", + name="github_state", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='issue', - name='repository_url', + model_name="issue", + name="repository_url", field=models.URLField(blank=True, default=None, max_length=255, null=True), ), ] diff --git a/breathecode/freelance/migrations/0010_repositoryissuewebhook.py b/breathecode/freelance/migrations/0010_repositoryissuewebhook.py index a33123a99..b74053caf 100644 --- a/breathecode/freelance/migrations/0010_repositoryissuewebhook.py +++ b/breathecode/freelance/migrations/0010_repositoryissuewebhook.py @@ -6,33 +6,49 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0009_auto_20210326_0041'), + ("freelance", "0009_auto_20210326_0041"), ] operations = [ migrations.CreateModel( - name='RepositoryIssueWebhook', + name="RepositoryIssueWebhook", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('webhook_action', - models.CharField(blank=True, - default=None, - help_text='The specific action that was triggered on github for this webhook', - max_length=100, - null=True)), - ('run_at', - models.DateTimeField(blank=True, default=None, help_text='Date/time that the webhook ran', null=True)), - ('repository', models.URLField(help_text='Github repo where the event occured', max_length=255)), - ('payload', - models.JSONField(help_text='Info that came on the request, it varies depending on the webhook type')), - ('academy_slug', models.SlugField()), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('ERROR', 'Error')], - default='PENDING', - max_length=9)), - ('status_text', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "webhook_action", + models.CharField( + blank=True, + default=None, + help_text="The specific action that was triggered on github for this webhook", + max_length=100, + null=True, + ), + ), + ( + "run_at", + models.DateTimeField( + blank=True, default=None, help_text="Date/time that the webhook ran", null=True + ), + ), + ("repository", models.URLField(help_text="Github repo where the event occured", max_length=255)), + ( + "payload", + models.JSONField( + help_text="Info that came on the request, it varies depending on the webhook type" + ), + ), + ("academy_slug", models.SlugField()), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("ERROR", "Error")], + default="PENDING", + max_length=9, + ), + ), + ("status_text", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/breathecode/freelance/migrations/0011_issue_node_id.py b/breathecode/freelance/migrations/0011_issue_node_id.py index 5f644adf9..8e7c40a98 100644 --- a/breathecode/freelance/migrations/0011_issue_node_id.py +++ b/breathecode/freelance/migrations/0011_issue_node_id.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0010_repositoryissuewebhook'), + ("freelance", "0010_repositoryissuewebhook"), ] operations = [ migrations.AddField( - model_name='issue', - name='node_id', + model_name="issue", + name="node_id", field=models.CharField( blank=True, default=None, - help_text= - 'This is the only unique identifier we get from github, the issue number its not unique among repos', + help_text="This is the only unique identifier we get from github, the issue number its not unique among repos", max_length=50, - null=True), + null=True, + ), ), ] diff --git a/breathecode/freelance/migrations/0012_auto_20220222_0036.py b/breathecode/freelance/migrations/0012_auto_20220222_0036.py index fa05764d6..354ad116c 100644 --- a/breathecode/freelance/migrations/0012_auto_20220222_0036.py +++ b/breathecode/freelance/migrations/0012_auto_20220222_0036.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0011_issue_node_id'), + ("freelance", "0011_issue_node_id"), ] operations = [ migrations.AlterField( - model_name='issue', - name='repository_url', + model_name="issue", + name="repository_url", field=models.URLField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='issue', - name='url', + model_name="issue", + name="url", field=models.URLField(), ), ] diff --git a/breathecode/freelance/migrations/0013_issue_status_message.py b/breathecode/freelance/migrations/0013_issue_status_message.py index 946665d22..40ff4b14a 100644 --- a/breathecode/freelance/migrations/0013_issue_status_message.py +++ b/breathecode/freelance/migrations/0013_issue_status_message.py @@ -6,17 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0012_auto_20220222_0036'), + ("freelance", "0012_auto_20220222_0036"), ] operations = [ migrations.AddField( - model_name='issue', - name='status_message', - field=models.CharField(blank=True, - default=None, - help_text='Important message like reasong why not included on bill, etc.', - max_length=255, - null=True), + model_name="issue", + name="status_message", + field=models.CharField( + blank=True, + default=None, + help_text="Important message like reasong why not included on bill, etc.", + max_length=255, + null=True, + ), ), ] diff --git a/breathecode/freelance/migrations/0014_alter_bill_status.py b/breathecode/freelance/migrations/0014_alter_bill_status.py index 4c3b48aed..a9f1fabdc 100644 --- a/breathecode/freelance/migrations/0014_alter_bill_status.py +++ b/breathecode/freelance/migrations/0014_alter_bill_status.py @@ -6,16 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0013_issue_status_message'), + ("freelance", "0013_issue_status_message"), ] operations = [ migrations.AlterField( - model_name='bill', - name='status', - field=models.CharField(choices=[('DUE', 'Due'), ('APPROVED', 'Approved'), ('IGNORED', 'Ignored'), - ('PAID', 'Paid')], - default='DUE', - max_length=20), + model_name="bill", + name="status", + field=models.CharField( + choices=[("DUE", "Due"), ("APPROVED", "Approved"), ("IGNORED", "Ignored"), ("PAID", "Paid")], + default="DUE", + max_length=20, + ), ), ] diff --git a/breathecode/freelance/migrations/0015_auto_20220608_0129.py b/breathecode/freelance/migrations/0015_auto_20220608_0129.py index b1d219ff7..9bd8988ed 100644 --- a/breathecode/freelance/migrations/0015_auto_20220608_0129.py +++ b/breathecode/freelance/migrations/0015_auto_20220608_0129.py @@ -7,29 +7,33 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0041_cohortuser_watching'), - ('freelance', '0014_alter_bill_status'), + ("admissions", "0041_cohortuser_watching"), + ("freelance", "0014_alter_bill_status"), ] operations = [ migrations.AddField( - model_name='issue', - name='academy', - field=models.ForeignKey(blank=True, - default=None, - help_text='Will help catalog billing grouped by academy', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="issue", + name="academy", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Will help catalog billing grouped by academy", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), ), migrations.AlterField( - model_name='bill', - name='academy', - field=models.ForeignKey(blank=True, - default=None, - help_text='Will help catalog billing grouped by academy', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="bill", + name="academy", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Will help catalog billing grouped by academy", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), ), ] diff --git a/breathecode/freelance/migrations/0016_academyfreelanceproject_freelanceprojectmember_projectinvoice.py b/breathecode/freelance/migrations/0016_academyfreelanceproject_freelanceprojectmember_projectinvoice.py index 3e703e030..d723efc77 100644 --- a/breathecode/freelance/migrations/0016_academyfreelanceproject_freelanceprojectmember_projectinvoice.py +++ b/breathecode/freelance/migrations/0016_academyfreelanceproject_freelanceprojectmember_projectinvoice.py @@ -8,72 +8,98 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0045_alter_syllabusversion_integrity_status'), + ("admissions", "0045_alter_syllabusversion_integrity_status"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('freelance', '0015_auto_20220608_0129'), + ("freelance", "0015_auto_20220608_0129"), ] operations = [ migrations.CreateModel( - name='AcademyFreelanceProject', + name="AcademyFreelanceProject", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(max_length=255)), - ('repository', models.URLField(help_text='Github repo where the event occured', max_length=255)), - ('total_client_price', - models.FloatField(help_text='How much will the client be billed for each our on this project')), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.CharField(max_length=255)), + ("repository", models.URLField(help_text="Github repo where the event occured", max_length=255)), + ( + "total_client_price", + models.FloatField(help_text="How much will the client be billed for each our on this project"), + ), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), migrations.CreateModel( - name='FreelanceProjectMember', + name="FreelanceProjectMember", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(max_length=255)), - ('repository', models.URLField(help_text='Github repo url', max_length=255)), - ('total_cost_price', - models.FloatField(blank=True, - default=None, - help_text='Paid to the freelancer, leave blank to use the default freelancer price', - null=True)), - ('total_client_price', - models.FloatField( - blank=True, - default=None, - help_text= - 'Billed to the client on this project/freelancer, leave blank to use default from the project', - null=True)), - ('freelancer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='freelance.freelancer')), - ('project', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='freelance.academyfreelanceproject')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.CharField(max_length=255)), + ("repository", models.URLField(help_text="Github repo url", max_length=255)), + ( + "total_cost_price", + models.FloatField( + blank=True, + default=None, + help_text="Paid to the freelancer, leave blank to use the default freelancer price", + null=True, + ), + ), + ( + "total_client_price", + models.FloatField( + blank=True, + default=None, + help_text="Billed to the client on this project/freelancer, leave blank to use default from the project", + null=True, + ), + ), + ( + "freelancer", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="freelance.freelancer"), + ), + ( + "project", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="freelance.academyfreelanceproject" + ), + ), ], ), migrations.CreateModel( - name='ProjectInvoice', + name="ProjectInvoice", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('status', - models.CharField(choices=[('DUE', 'Due'), ('APPROVED', 'Approved'), ('IGNORED', 'Ignored'), - ('PAID', 'Paid')], - default='DUE', - max_length=20)), - ('total_duration_in_minutes', models.FloatField(default=0)), - ('total_duration_in_hours', models.FloatField(default=0)), - ('total_price', models.FloatField(default=0)), - ('paid_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('freelancer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='freelance.freelancer')), - ('project', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='freelance.freelanceprojectmember')), - ('reviewer', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "status", + models.CharField( + choices=[("DUE", "Due"), ("APPROVED", "Approved"), ("IGNORED", "Ignored"), ("PAID", "Paid")], + default="DUE", + max_length=20, + ), + ), + ("total_duration_in_minutes", models.FloatField(default=0)), + ("total_duration_in_hours", models.FloatField(default=0)), + ("total_price", models.FloatField(default=0)), + ("paid_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "freelancer", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="freelance.freelancer"), + ), + ( + "project", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="freelance.freelanceprojectmember" + ), + ), + ( + "reviewer", + models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/breathecode/freelance/migrations/0017_auto_20220919_2336.py b/breathecode/freelance/migrations/0017_auto_20220919_2336.py index 8078c40ed..23f58fc6c 100644 --- a/breathecode/freelance/migrations/0017_auto_20220919_2336.py +++ b/breathecode/freelance/migrations/0017_auto_20220919_2336.py @@ -6,21 +6,21 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0016_academyfreelanceproject_freelanceprojectmember_projectinvoice'), + ("freelance", "0016_academyfreelanceproject_freelanceprojectmember_projectinvoice"), ] operations = [ migrations.RenameField( - model_name='academyfreelanceproject', - old_name='total_client_price', - new_name='total_client_hourly_price', + model_name="academyfreelanceproject", + old_name="total_client_price", + new_name="total_client_hourly_price", ), migrations.RemoveField( - model_name='freelanceprojectmember', - name='repository', + model_name="freelanceprojectmember", + name="repository", ), migrations.RemoveField( - model_name='freelanceprojectmember', - name='title', + model_name="freelanceprojectmember", + name="title", ), ] diff --git a/breathecode/freelance/migrations/0018_auto_20220919_2338.py b/breathecode/freelance/migrations/0018_auto_20220919_2338.py index 6938af335..3ba65bfc3 100644 --- a/breathecode/freelance/migrations/0018_auto_20220919_2338.py +++ b/breathecode/freelance/migrations/0018_auto_20220919_2338.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0017_auto_20220919_2336'), + ("freelance", "0017_auto_20220919_2336"), ] operations = [ migrations.RenameField( - model_name='freelanceprojectmember', - old_name='total_client_price', - new_name='total_client_hourly_price', + model_name="freelanceprojectmember", + old_name="total_client_price", + new_name="total_client_hourly_price", ), migrations.RenameField( - model_name='freelanceprojectmember', - old_name='total_cost_price', - new_name='total_cost_hourly_price', + model_name="freelanceprojectmember", + old_name="total_cost_price", + new_name="total_cost_hourly_price", ), ] diff --git a/breathecode/freelance/migrations/0019_bill_invoice.py b/breathecode/freelance/migrations/0019_bill_invoice.py index a00d0eeed..c5fda0e74 100644 --- a/breathecode/freelance/migrations/0019_bill_invoice.py +++ b/breathecode/freelance/migrations/0019_bill_invoice.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0018_auto_20220919_2338'), + ("freelance", "0018_auto_20220919_2338"), ] operations = [ migrations.AddField( - model_name='bill', - name='invoice', - field=models.ForeignKey(blank=True, - default=None, - help_text='Attach this freelance bill to a project invoice', - null=True, - on_delete=django.db.models.deletion.SET_DEFAULT, - to='freelance.projectinvoice'), + model_name="bill", + name="invoice", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Attach this freelance bill to a project invoice", + null=True, + on_delete=django.db.models.deletion.SET_DEFAULT, + to="freelance.projectinvoice", + ), ), ] diff --git a/breathecode/freelance/migrations/0020_auto_20220920_0038.py b/breathecode/freelance/migrations/0020_auto_20220920_0038.py index 96c8328b7..4e2122d7f 100644 --- a/breathecode/freelance/migrations/0020_auto_20220920_0038.py +++ b/breathecode/freelance/migrations/0020_auto_20220920_0038.py @@ -7,22 +7,24 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0019_bill_invoice'), + ("freelance", "0019_bill_invoice"), ] operations = [ migrations.RemoveField( - model_name='bill', - name='invoice', + model_name="bill", + name="invoice", ), migrations.AddField( - model_name='issue', - name='invoice', - field=models.ForeignKey(blank=True, - default=None, - help_text='Attach this issue to a project invoice', - null=True, - on_delete=django.db.models.deletion.SET_DEFAULT, - to='freelance.projectinvoice'), + model_name="issue", + name="invoice", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Attach this issue to a project invoice", + null=True, + on_delete=django.db.models.deletion.SET_DEFAULT, + to="freelance.projectinvoice", + ), ), ] diff --git a/breathecode/freelance/migrations/0021_auto_20220920_0155.py b/breathecode/freelance/migrations/0021_auto_20220920_0155.py index f195855f6..4b8445da2 100644 --- a/breathecode/freelance/migrations/0021_auto_20220920_0155.py +++ b/breathecode/freelance/migrations/0021_auto_20220920_0155.py @@ -7,18 +7,19 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0020_auto_20220920_0038'), + ("freelance", "0020_auto_20220920_0038"), ] operations = [ migrations.RemoveField( - model_name='projectinvoice', - name='freelancer', + model_name="projectinvoice", + name="freelancer", ), migrations.AlterField( - model_name='projectinvoice', - name='project', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='freelance.academyfreelanceproject'), + model_name="projectinvoice", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="freelance.academyfreelanceproject" + ), ), ] diff --git a/breathecode/freelance/migrations/0022_alter_issue_github_number.py b/breathecode/freelance/migrations/0022_alter_issue_github_number.py index 209cb9b88..0ce369877 100644 --- a/breathecode/freelance/migrations/0022_alter_issue_github_number.py +++ b/breathecode/freelance/migrations/0022_alter_issue_github_number.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0021_auto_20220920_0155'), + ("freelance", "0021_auto_20220920_0155"), ] operations = [ migrations.AlterField( - model_name='issue', - name='github_number', + model_name="issue", + name="github_number", field=models.PositiveIntegerField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/freelance/migrations/0023_auto_20221221_1133.py b/breathecode/freelance/migrations/0023_auto_20221221_1133.py index 225237b74..28a66af08 100644 --- a/breathecode/freelance/migrations/0023_auto_20221221_1133.py +++ b/breathecode/freelance/migrations/0023_auto_20221221_1133.py @@ -6,28 +6,30 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0022_alter_issue_github_number'), + ("freelance", "0022_alter_issue_github_number"), ] operations = [ migrations.AlterField( - model_name='issue', - name='node_id', + model_name="issue", + name="node_id", field=models.CharField( blank=True, default=None, - help_text= - 'This is the only unique identifier we get from github, the issue number is not unique among repos', + help_text="This is the only unique identifier we get from github, the issue number is not unique among repos", max_length=50, - null=True), + null=True, + ), ), migrations.AlterField( - model_name='issue', - name='status_message', - field=models.CharField(blank=True, - default=None, - help_text='Important message like reason why not included on bill, etc.', - max_length=255, - null=True), + model_name="issue", + name="status_message", + field=models.CharField( + blank=True, + default=None, + help_text="Important message like reason why not included on bill, etc.", + max_length=255, + null=True, + ), ), ] diff --git a/breathecode/freelance/migrations/0024_delete_repositoryissuewebhook.py b/breathecode/freelance/migrations/0024_delete_repositoryissuewebhook.py index a7717bc1e..3229cd6b0 100644 --- a/breathecode/freelance/migrations/0024_delete_repositoryissuewebhook.py +++ b/breathecode/freelance/migrations/0024_delete_repositoryissuewebhook.py @@ -6,9 +6,11 @@ class Migration(migrations.Migration): dependencies = [ - ('freelance', '0023_auto_20221221_1133'), + ("freelance", "0023_auto_20221221_1133"), ] operations = [ - migrations.DeleteModel(name='RepositoryIssueWebhook', ), + migrations.DeleteModel( + name="RepositoryIssueWebhook", + ), ] diff --git a/breathecode/freelance/models.py b/breathecode/freelance/models.py index 182ed2ef2..9ecde6ee1 100644 --- a/breathecode/freelance/models.py +++ b/breathecode/freelance/models.py @@ -3,7 +3,7 @@ from breathecode.authenticate.models import CredentialsGithub from breathecode.admissions.models import Academy -__all__ = ['Freelancer', 'Bill', 'Issue'] +__all__ = ["Freelancer", "Bill", "Issue"] class Freelancer(models.Model): @@ -41,10 +41,11 @@ def get_client_hourly_rate(self, project): class AcademyFreelanceProject(models.Model): title = models.CharField(max_length=255) - repository = models.URLField(max_length=255, help_text='Github repo where the event occured') + repository = models.URLField(max_length=255, help_text="Github repo where the event occured") academy = models.ForeignKey(Academy, on_delete=models.CASCADE) total_client_hourly_price = models.FloatField( - help_text='How much will the client be billed for each our on this project') + help_text="How much will the client be billed for each our on this project" + ) def __str__(self): return self.title @@ -57,23 +58,25 @@ class FreelanceProjectMember(models.Model): null=True, blank=True, default=None, - help_text='Paid to the freelancer, leave blank to use the default freelancer price') + help_text="Paid to the freelancer, leave blank to use the default freelancer price", + ) total_client_hourly_price = models.FloatField( null=True, blank=True, default=None, - help_text='Billed to the client on this project/freelancer, leave blank to use default from the project') + help_text="Billed to the client on this project/freelancer, leave blank to use default from the project", + ) -DUE = 'DUE' -APPROVED = 'APPROVED' -PAID = 'PAID' -IGNORED = 'IGNORED' +DUE = "DUE" +APPROVED = "APPROVED" +PAID = "PAID" +IGNORED = "IGNORED" BILL_STATUS = ( - (DUE, 'Due'), - (APPROVED, 'Approved'), - (IGNORED, 'Ignored'), - (PAID, 'Paid'), + (DUE, "Due"), + (APPROVED, "Approved"), + (IGNORED, "Ignored"), + (PAID, "Paid"), ) @@ -95,12 +98,13 @@ class ProjectInvoice(models.Model): @staticmethod def get_or_create(repository, academy_slug, status): - invoice = ProjectInvoice.objects.filter(status=status, - project__repository__iexact=repository, - project__academy__slug=academy_slug).first() + invoice = ProjectInvoice.objects.filter( + status=status, project__repository__iexact=repository, project__academy__slug=academy_slug + ).first() if invoice is None: - project = AcademyFreelanceProject.objects.filter(repository__iexact=repository, - academy__slug=academy_slug).first() + project = AcademyFreelanceProject.objects.filter( + repository__iexact=repository, academy__slug=academy_slug + ).first() if project is None: return None @@ -117,12 +121,14 @@ class Bill(models.Model): total_duration_in_hours = models.FloatField(default=0) total_price = models.FloatField(default=0) - academy = models.ForeignKey(Academy, - on_delete=models.CASCADE, - null=True, - default=None, - blank=True, - help_text='Will help catalog billing grouped by academy') + academy = models.ForeignKey( + Academy, + on_delete=models.CASCADE, + null=True, + default=None, + blank=True, + help_text="Will help catalog billing grouped by academy", + ) reviewer = models.ForeignKey(User, on_delete=models.CASCADE, null=True, default=None) freelancer = models.ForeignKey(Freelancer, on_delete=models.CASCADE) @@ -132,17 +138,17 @@ class Bill(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) -IGNORED = 'IGNORED' -DRAFT = 'DRAFT' -TODO = 'TODO' -DOING = 'DOING' -DONE = 'DONE' +IGNORED = "IGNORED" +DRAFT = "DRAFT" +TODO = "TODO" +DOING = "DOING" +DONE = "DONE" ISSUE_STATUS = ( - (IGNORED, 'Ignored'), - (DRAFT, 'Draft'), - (TODO, 'Todo'), - (DOING, 'Doing'), - (DONE, 'Done'), + (IGNORED, "Ignored"), + (DRAFT, "Draft"), + (TODO, "Todo"), + (DOING, "Doing"), + (DONE, "Done"), ) @@ -154,13 +160,16 @@ class Issue(models.Model): default=None, null=True, blank=True, - help_text='This is the only unique identifier we get from github, the issue number is not unique among repos') + help_text="This is the only unique identifier we get from github, the issue number is not unique among repos", + ) status = models.CharField(max_length=20, choices=ISSUE_STATUS, default=DRAFT) - status_message = models.CharField(max_length=255, - blank=True, - null=True, - default=None, - help_text='Important message like reason why not included on bill, etc.') + status_message = models.CharField( + max_length=255, + blank=True, + null=True, + default=None, + help_text="Important message like reason why not included on bill, etc.", + ) github_state = models.CharField(max_length=30, blank=True, null=True, default=None) github_number = models.PositiveIntegerField(blank=True, null=True, default=None) @@ -175,19 +184,23 @@ class Issue(models.Model): author = models.ForeignKey(User, on_delete=models.CASCADE, null=True, default=None, blank=True) freelancer = models.ForeignKey(Freelancer, on_delete=models.CASCADE) - academy = models.ForeignKey(Academy, - on_delete=models.CASCADE, - null=True, - default=None, - blank=True, - help_text='Will help catalog billing grouped by academy') + academy = models.ForeignKey( + Academy, + on_delete=models.CASCADE, + null=True, + default=None, + blank=True, + help_text="Will help catalog billing grouped by academy", + ) bill = models.ForeignKey(Bill, on_delete=models.CASCADE, null=True, default=None, blank=True) - invoice = models.ForeignKey(ProjectInvoice, - null=True, - default=None, - blank=True, - on_delete=models.SET_DEFAULT, - help_text='Attach this issue to a project invoice') + invoice = models.ForeignKey( + ProjectInvoice, + null=True, + default=None, + blank=True, + on_delete=models.SET_DEFAULT, + help_text="Attach this issue to a project invoice", + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) diff --git a/breathecode/freelance/receivers.py b/breathecode/freelance/receivers.py index 347f8e346..beec44017 100644 --- a/breathecode/freelance/receivers.py +++ b/breathecode/freelance/receivers.py @@ -9,6 +9,6 @@ @receiver(github_webhook, sender=RepositoryWebhook) def post_webhook_received(sender, instance, **kwargs): - if instance.scope in ['issues', 'issue_comment']: - logger.debug('Received github webhook signal for issues') + if instance.scope in ["issues", "issue_comment"]: + logger.debug("Received github webhook signal for issues") async_repository_issue_github.delay(instance.id) diff --git a/breathecode/freelance/serializers.py b/breathecode/freelance/serializers.py index 276917c8c..8a3aff574 100644 --- a/breathecode/freelance/serializers.py +++ b/breathecode/freelance/serializers.py @@ -5,6 +5,7 @@ class AcademySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -15,12 +16,14 @@ class AcademySerializer(serpy.Serializer): class PublicProfileSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() class UserSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() first_name = serpy.Field() @@ -31,6 +34,7 @@ class UserSerializer(serpy.Serializer): class SmallProjectSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() title = serpy.Field() @@ -61,6 +65,7 @@ class SmallFreelancerMemberSerializer(serpy.Serializer): class SmallIssueSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() title = serpy.Field() @@ -76,12 +81,14 @@ class SmallIssueSerializer(serpy.Serializer): included_in_bill = serpy.MethodField() def get_included_in_bill(self, obj): - return (obj.status_message is None or obj.status_message == '') and (obj.node_id is not None - and obj.node_id != '') + return (obj.status_message is None or obj.status_message == "") and ( + obj.node_id is not None and obj.node_id != "" + ) class BigProjectSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() title = serpy.Field() @@ -94,6 +101,7 @@ def get_members(self, obj): class BigInvoiceSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() status = serpy.Field() @@ -108,12 +116,13 @@ class BigInvoiceSerializer(serpy.Serializer): issues = serpy.MethodField() def get_issues(self, obj): - _issues = obj.issue_set.order_by('created_at').all() + _issues = obj.issue_set.order_by("created_at").all() return SmallIssueSerializer(_issues, many=True).data class BigBillSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() status = serpy.Field() @@ -130,12 +139,13 @@ class BigBillSerializer(serpy.Serializer): issues = serpy.MethodField() def get_issues(self, obj): - _issues = obj.issue_set.order_by('created_at').all() + _issues = obj.issue_set.order_by("created_at").all() return SmallIssueSerializer(_issues, many=True).data class SmallBillSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() status = serpy.Field() @@ -150,4 +160,4 @@ class BillSerializer(serializers.ModelSerializer): class Meta: model = Bill - exclude = ('freelancer', ) + exclude = ("freelancer",) diff --git a/breathecode/freelance/tasks.py b/breathecode/freelance/tasks.py index ad9b4452c..0fa254fb4 100644 --- a/breathecode/freelance/tasks.py +++ b/breathecode/freelance/tasks.py @@ -13,18 +13,18 @@ @shared_task(bind=True, base=WebhookTask, priority=TaskPriority.BILL.value) def async_repository_issue_github(self, webhook): - logger.debug('async_repository_issue_github') + logger.debug("async_repository_issue_github") payload = webhook.get_payload() comment = None - if 'comment' in payload: - comment = payload['comment'] + if "comment" in payload: + comment = payload["comment"] - issue = sync_single_issue(issue=payload['issue'], comment=comment, academy_slug=webhook.academy_slug) + issue = sync_single_issue(issue=payload["issue"], comment=comment, academy_slug=webhook.academy_slug) issue.status = update_status_based_on_github_action(webhook.webhook_action, issue=issue) issue.save() - if webhook.webhook_action in ['closed', 'reopened']: + if webhook.webhook_action in ["closed", "reopened"]: generate_freelancer_bill(issue.freelancer) return webhook diff --git a/breathecode/freelance/tests/actions/tests_sync_single_issue.py b/breathecode/freelance/tests/actions/tests_sync_single_issue.py index 8383f0086..6271ba501 100644 --- a/breathecode/freelance/tests/actions/tests_sync_single_issue.py +++ b/breathecode/freelance/tests/actions/tests_sync_single_issue.py @@ -1,6 +1,7 @@ """ Test Sync Single Issue """ + import json from ..mixins import FreelanceTestCase from ...actions import sync_single_issue @@ -11,56 +12,59 @@ def issue_item(data={}): return { - 'academy_id': None, - 'author_id': None, - 'bill_id': None, - 'body': 'team-learn-plan', - 'duration_in_hours': 0.0, - 'duration_in_minutes': 0.0, - 'freelancer_id': 1, - 'github_number': None, - 'github_state': None, - 'id': 1, - 'invoice_id': None, - 'node_id': '1', - 'repository_url': None, - 'status': 'DRAFT', - 'status_message': None, - 'title': 'dinner-surface-need', - 'url': 'http://miller.com/', - **data + "academy_id": None, + "author_id": None, + "bill_id": None, + "body": "team-learn-plan", + "duration_in_hours": 0.0, + "duration_in_minutes": 0.0, + "freelancer_id": 1, + "github_number": None, + "github_state": None, + "id": 1, + "invoice_id": None, + "node_id": "1", + "repository_url": None, + "status": "DRAFT", + "status_message": None, + "title": "dinner-surface-need", + "url": "http://miller.com/", + **data, } class GetOrCreateSessionTestSuite(FreelanceTestCase): - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_IssueWithNoId(self): result = sync_single_issue({}) self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('freelance.Issue'), []) + self.assertEqual(self.bc.database.list_of("freelance.Issue"), []) self.assertEqual( Logger.info.call_args_list, - [call('Impossible to identify issue because it does not have a node_id (number:None), ignoring synch: {}')]) + [call("Impossible to identify issue because it does not have a node_id (number:None), ignoring synch: {}")], + ) - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_IssueWithFakeSlug(self): - with self.assertRaisesMessage(Exception, 'There was no freelancer associated with this issue'): + with self.assertRaisesMessage(Exception, "There was no freelancer associated with this issue"): - result = sync_single_issue({ - 'node_id': 1, - 'title': self.bc.fake.slug(), - 'body': self.bc.fake.slug(), - 'html_url': self.bc.fake.url() - }) + result = sync_single_issue( + { + "node_id": 1, + "title": self.bc.fake.slug(), + "body": self.bc.fake.slug(), + "html_url": self.bc.fake.url(), + } + ) - self.assertEqual(self.bc.database.list_of('freelance.Issue'), []) + self.assertEqual(self.bc.database.list_of("freelance.Issue"), []) self.assertEqual(Logger.info.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_IssueWith_freelancer(self): models1 = self.bc.database.create(freelancer=1) @@ -70,26 +74,20 @@ def test_IssueWith_freelancer(self): body = self.bc.fake.slug() url = self.bc.fake.url() - result = sync_single_issue({ - 'node_id': 1, - 'title': title, - 'body': body, - 'html_url': url - }, - freelancer=models1.freelancer) - - self.assertEqual(self.bc.database.list_of('freelance.Issue'), [ - issue_item({ - 'node_id': str(1), - 'title': title, - 'body': body, - 'url': url - }), - ]) + result = sync_single_issue( + {"node_id": 1, "title": title, "body": body, "html_url": url}, freelancer=models1.freelancer + ) + + self.assertEqual( + self.bc.database.list_of("freelance.Issue"), + [ + issue_item({"node_id": str(1), "title": title, "body": body, "url": url}), + ], + ) self.assertEqual(Logger.info.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_IssueWith_number(self): models1 = self.bc.database.create(freelancer=1) @@ -100,28 +98,21 @@ def test_IssueWith_number(self): url = self.bc.fake.url() number = random.randint(1, 10) - result = sync_single_issue({ - 'node_id': 1, - 'title': title, - 'body': body, - 'html_url': url, - 'number': number - }, - freelancer=models1.freelancer) - - self.assertEqual(self.bc.database.list_of('freelance.Issue'), [ - issue_item({ - 'node_id': str(1), - 'title': title, - 'body': body, - 'url': url, - 'github_number': number - }), - ]) + result = sync_single_issue( + {"node_id": 1, "title": title, "body": body, "html_url": url, "number": number}, + freelancer=models1.freelancer, + ) + + self.assertEqual( + self.bc.database.list_of("freelance.Issue"), + [ + issue_item({"node_id": str(1), "title": title, "body": body, "url": url, "github_number": number}), + ], + ) self.assertEqual(Logger.info.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def test_resultSearch_isNotNone(self): models1 = self.bc.database.create(freelancer=1) @@ -131,31 +122,37 @@ def test_resultSearch_isNotNone(self): body = self.bc.fake.slug() result = self.bc.fake.url() url = self.bc.fake.url() - repository_url = 'https://github.com/etolopez/apiv2/asdasd' + repository_url = "https://github.com/etolopez/apiv2/asdasd" res = sync_single_issue( { - 'node_id': 1, - 'title': title, - 'body': body, - 'result': result, - 'html_url': repository_url, + "node_id": 1, + "title": title, + "body": body, + "result": result, + "html_url": repository_url, }, - freelancer=models1.freelancer) - - self.assertEqual(self.bc.database.list_of('freelance.Issue'), [ - issue_item({ - 'node_id': str(1), - 'title': title, - 'body': body, - 'url': repository_url, - 'repository_url': repository_url[:-7], - }), - ]) + freelancer=models1.freelancer, + ) + + self.assertEqual( + self.bc.database.list_of("freelance.Issue"), + [ + issue_item( + { + "node_id": str(1), + "title": title, + "body": body, + "url": repository_url, + "repository_url": repository_url[:-7], + } + ), + ], + ) self.assertEqual(Logger.info.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def testing_hours(self): models1 = self.bc.database.create(freelancer=1) @@ -164,32 +161,41 @@ def testing_hours(self): title = self.bc.fake.slug() hours = random.random() * 50 minutes = hours * 60 - body = f'<hrs>{hours}</hrs>' + body = f"<hrs>{hours}</hrs>" url = self.bc.fake.url() - result = sync_single_issue({ - 'node_id': 1, - 'title': title, - 'body': body, - 'html_url': url, - }, - freelancer=models1.freelancer) - - self.assertEqual(self.bc.database.list_of('freelance.Issue'), [ - issue_item({ - 'node_id': str(1), - 'title': title, - 'body': body, - 'url': url, - 'duration_in_hours': hours, - 'duration_in_minutes': minutes - }), - ]) - - self.assertEqual(Logger.info.call_args_list, - [call(f'Updating issue 1 (None) hrs with {hours}, found <hrs> tag on updated body')]) - - @patch('logging.Logger.info', MagicMock()) + result = sync_single_issue( + { + "node_id": 1, + "title": title, + "body": body, + "html_url": url, + }, + freelancer=models1.freelancer, + ) + + self.assertEqual( + self.bc.database.list_of("freelance.Issue"), + [ + issue_item( + { + "node_id": str(1), + "title": title, + "body": body, + "url": url, + "duration_in_hours": hours, + "duration_in_minutes": minutes, + } + ), + ], + ) + + self.assertEqual( + Logger.info.call_args_list, + [call(f"Updating issue 1 (None) hrs with {hours}, found <hrs> tag on updated body")], + ) + + @patch("logging.Logger.info", MagicMock()) def testing_different_hours(self): models1 = self.bc.database.create(freelancer=1) @@ -199,160 +205,187 @@ def testing_different_hours(self): hours = random.random() * 50 another = random.random() * 50 minutes = hours * 60 - issue_body = f'<hrs>{another}</hrs>' - comment_body = f'<hrs>{hours}</hrs> <status>comment</status>' + issue_body = f"<hrs>{another}</hrs>" + comment_body = f"<hrs>{hours}</hrs> <status>comment</status>" url = self.bc.fake.url() - result = sync_single_issue({ - 'node_id': 1, - 'title': title, - 'body': issue_body, - 'html_url': url, - }, - freelancer=models1.freelancer, - comment={'body': comment_body}) - - self.assertEqual(self.bc.database.list_of('freelance.Issue'), [ - issue_item({ - 'node_id': str(1), - 'title': title, - 'body': issue_body, - 'url': url, - 'duration_in_hours': hours, - 'duration_in_minutes': minutes, - 'status_message': 'The status COMMENT is not valid', - }), - ]) - - self.assertEqual(Logger.info.call_args_list, [ - call(f'Updating issue 1 (None) hrs with {another}, found <hrs> tag on updated body'), - call(f'Updating issue 1 (None) hrs with {hours}, found <hrs> tag on new comment'), - call('The status COMMENT is not valid') - ]) - - @patch('logging.Logger.info', MagicMock()) + result = sync_single_issue( + { + "node_id": 1, + "title": title, + "body": issue_body, + "html_url": url, + }, + freelancer=models1.freelancer, + comment={"body": comment_body}, + ) + + self.assertEqual( + self.bc.database.list_of("freelance.Issue"), + [ + issue_item( + { + "node_id": str(1), + "title": title, + "body": issue_body, + "url": url, + "duration_in_hours": hours, + "duration_in_minutes": minutes, + "status_message": "The status COMMENT is not valid", + } + ), + ], + ) + + self.assertEqual( + Logger.info.call_args_list, + [ + call(f"Updating issue 1 (None) hrs with {another}, found <hrs> tag on updated body"), + call(f"Updating issue 1 (None) hrs with {hours}, found <hrs> tag on new comment"), + call("The status COMMENT is not valid"), + ], + ) + + @patch("logging.Logger.info", MagicMock()) def testing_correct_status_with_hours(self): models1 = self.bc.database.create(freelancer=1) Logger.info.call_args_list = [] - status = random.choice(['IGNORED', 'DRAFT', 'TODO', 'DOING', 'DONE']) + status = random.choice(["IGNORED", "DRAFT", "TODO", "DOING", "DONE"]) title = self.bc.fake.slug() hours = random.random() * 50 another = random.random() * 50 minutes = hours * 60 - issue_body = f'<hrs>{another}</hrs>' - comment_body = f'<hrs>{hours}</hrs> <status>{status}</status>' + issue_body = f"<hrs>{another}</hrs>" + comment_body = f"<hrs>{hours}</hrs> <status>{status}</status>" url = self.bc.fake.url() - result = sync_single_issue({ - 'node_id': 1, - 'title': title, - 'body': issue_body, - 'html_url': url, - }, - freelancer=models1.freelancer, - comment={'body': comment_body}) - - self.assertEqual(self.bc.database.list_of('freelance.Issue'), [ - issue_item({ - 'node_id': str(1), - 'title': title, - 'body': issue_body, - 'url': url, - 'duration_in_hours': hours, - 'duration_in_minutes': minutes, - 'status': status, - }), - ]) - - self.assertEqual(Logger.info.call_args_list, [ - call(f'Updating issue 1 (None) hrs with {another}, found <hrs> tag on updated body'), - call(f'Updating issue 1 (None) hrs with {hours}, found <hrs> tag on new comment'), - call(f'Updating issue 1 (None) status to {status} found <status> tag on new comment') - ]) - - @patch('logging.Logger.info', MagicMock()) + result = sync_single_issue( + { + "node_id": 1, + "title": title, + "body": issue_body, + "html_url": url, + }, + freelancer=models1.freelancer, + comment={"body": comment_body}, + ) + + self.assertEqual( + self.bc.database.list_of("freelance.Issue"), + [ + issue_item( + { + "node_id": str(1), + "title": title, + "body": issue_body, + "url": url, + "duration_in_hours": hours, + "duration_in_minutes": minutes, + "status": status, + } + ), + ], + ) + + self.assertEqual( + Logger.info.call_args_list, + [ + call(f"Updating issue 1 (None) hrs with {another}, found <hrs> tag on updated body"), + call(f"Updating issue 1 (None) hrs with {hours}, found <hrs> tag on new comment"), + call(f"Updating issue 1 (None) status to {status} found <status> tag on new comment"), + ], + ) + + @patch("logging.Logger.info", MagicMock()) def testing_Assignee_FreelancerIsNone(self): - with self.assertRaisesMessage(Exception, 'There was no freelancer associated with this issue'): + with self.assertRaisesMessage(Exception, "There was no freelancer associated with this issue"): models1 = self.bc.database.create(freelancer=None) Logger.info.call_args_list = [] - status = random.choice(['IGNORED', 'DRAFT', 'TODO', 'DOING', 'DONE']) + status = random.choice(["IGNORED", "DRAFT", "TODO", "DOING", "DONE"]) title = self.bc.fake.slug() hours = random.random() * 50 another = random.random() * 50 minutes = hours * 60 assignees = random.random() * 50 - issue_body = f'<hrs>{another}</hrs>' - comment_body = f'<hrs>{hours}</hrs> <status>{status}</status>' + issue_body = f"<hrs>{another}</hrs>" + comment_body = f"<hrs>{hours}</hrs> <status>{status}</status>" url = self.bc.fake.url() - result = sync_single_issue({ - 'node_id': 1, - 'title': title, - 'body': issue_body, - 'html_url': url, - }, - freelancer=None) - - issue_item({ - 'node_id': str(1), - 'title': title, - 'body': issue_body, - 'url': url, - 'assignees': assignees, - 'duration_in_hours': hours, - 'duration_in_minutes': minutes, - 'status': status, - }) + result = sync_single_issue( + { + "node_id": 1, + "title": title, + "body": issue_body, + "html_url": url, + }, + freelancer=None, + ) + + issue_item( + { + "node_id": str(1), + "title": title, + "body": issue_body, + "url": url, + "assignees": assignees, + "duration_in_hours": hours, + "duration_in_minutes": minutes, + "status": status, + } + ) self.assertEqual(Logger.info.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) + @patch("logging.Logger.info", MagicMock()) def testing_AssigneeID_FreelancerIsNone(self): models1 = self.bc.database.create(freelancer=None) Logger.info.call_args_list = [] assignment_id = 3 - status = random.choice(['IGNORED', 'DRAFT', 'TODO', 'DOING', 'DONE']) + status = random.choice(["IGNORED", "DRAFT", "TODO", "DOING", "DONE"]) title = self.bc.fake.slug() hours = random.random() * 50 another = random.random() * 50 minutes = hours * 60 freelancer: None - assignees = [{'id': assignment_id}] + assignees = [{"id": assignment_id}] assigne = assignees[0] - issue_body = f'<hrs>{another}</hrs>' - comment_body = f'<hrs>{hours}</hrs> <status>{status}</status>' + issue_body = f"<hrs>{another}</hrs>" + comment_body = f"<hrs>{hours}</hrs> <status>{status}</status>" url = self.bc.fake.url() with self.assertRaisesMessage( - Exception, - f'Assigned github user: {assigne["id"]} is not a freelancer but is the main user associated to this issue' + Exception, + f'Assigned github user: {assigne["id"]} is not a freelancer but is the main user associated to this issue', ): result = sync_single_issue( { - 'node_id': 1, - 'title': title, - 'body': issue_body, - 'html_url': url, - 'assignees': assignees, + "node_id": 1, + "title": title, + "body": issue_body, + "html_url": url, + "assignees": assignees, }, - freelancer=None) - - issue_item({ - 'node_id': str(1), - 'title': title, - 'body': issue_body, - 'url': url, - 'assignees': assignees, - 'freelancer': freelancer, - 'duration_in_hours': hours, - 'duration_in_minutes': minutes, - 'status': status, - }) + freelancer=None, + ) + + issue_item( + { + "node_id": str(1), + "title": title, + "body": issue_body, + "url": url, + "assignees": assignees, + "freelancer": freelancer, + "duration_in_hours": hours, + "duration_in_minutes": minutes, + "status": status, + } + ) self.assertEqual(Logger.info.call_args_list, []) diff --git a/breathecode/freelance/tests/mixins/__init__.py b/breathecode/freelance/tests/mixins/__init__.py index 4d05c8e02..852deee4e 100644 --- a/breathecode/freelance/tests/mixins/__init__.py +++ b/breathecode/freelance/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Admissions mixins """ + from .freelance_test_case import FreelanceTestCase # noqa: F401 diff --git a/breathecode/freelance/tests/mixins/freelance_test_case.py b/breathecode/freelance/tests/mixins/freelance_test_case.py index 50cf2bfa1..e4a54830e 100644 --- a/breathecode/freelance/tests/mixins/freelance_test_case.py +++ b/breathecode/freelance/tests/mixins/freelance_test_case.py @@ -1,17 +1,25 @@ """ Collections of mixins used to login in authorize microservice """ + import re from unittest.mock import MagicMock, patch from django.urls.base import reverse_lazy from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, DatetimeMixin, ICallMixin, - BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + DatetimeMixin, + ICallMixin, + BreathecodeMixin, +) from rest_framework import status -class FreelanceTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, DatetimeMixin, ICallMixin, - BreathecodeMixin): +class FreelanceTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, DatetimeMixin, ICallMixin, BreathecodeMixin +): """AdmissionsTestCase with auth methods""" def setUp(self): diff --git a/breathecode/freelance/urls.py b/breathecode/freelance/urls.py index d2c0d855c..1264ab685 100644 --- a/breathecode/freelance/urls.py +++ b/breathecode/freelance/urls.py @@ -1,26 +1,38 @@ from django.urls import path -from .views import (AcademyBillView, AcademyInvoiceMemberView, AcademyProjectInvoiceView, AcademyProjectMemberView, - AcademyProjectView, BillView, SingleBillView, SingleInvoiceView, get_issues, get_latest_bill, - render_html_all_bills, render_html_bill, sync_user_issues) +from .views import ( + AcademyBillView, + AcademyInvoiceMemberView, + AcademyProjectInvoiceView, + AcademyProjectMemberView, + AcademyProjectView, + BillView, + SingleBillView, + SingleInvoiceView, + get_issues, + get_latest_bill, + render_html_all_bills, + render_html_bill, + sync_user_issues, +) -app_name = 'freelance' +app_name = "freelance" urlpatterns = [ - path('bills', BillView.as_view()), - path('bills/html', render_html_all_bills), - path('bills/<int:id>/html', render_html_bill), - path('bills/<int:id>', SingleBillView.as_view()), - path('invoice/<int:id>', SingleInvoiceView.as_view()), - path('issues', get_issues), - path('sync/user', sync_user_issues), - path('sync/user/<int:user_id>/bill', get_latest_bill), - path('academy/bill', AcademyBillView.as_view()), - path('academy/bill/<int:bill_id>', AcademyBillView.as_view()), - path('academy/project', AcademyProjectView.as_view()), - path('academy/project/<int:project_id>', AcademyProjectView.as_view()), - path('academy/project/member', AcademyProjectMemberView.as_view()), - path('academy/project/invoice', AcademyProjectInvoiceView.as_view()), - path('academy/project/<int:project_id>/invoice', AcademyProjectInvoiceView.as_view()), - path('academy/project/invoice/<int:invoice_id>', AcademyProjectInvoiceView.as_view()), - path('academy/project/invoice/<int:invoice_id>/member', AcademyInvoiceMemberView.as_view()), + path("bills", BillView.as_view()), + path("bills/html", render_html_all_bills), + path("bills/<int:id>/html", render_html_bill), + path("bills/<int:id>", SingleBillView.as_view()), + path("invoice/<int:id>", SingleInvoiceView.as_view()), + path("issues", get_issues), + path("sync/user", sync_user_issues), + path("sync/user/<int:user_id>/bill", get_latest_bill), + path("academy/bill", AcademyBillView.as_view()), + path("academy/bill/<int:bill_id>", AcademyBillView.as_view()), + path("academy/project", AcademyProjectView.as_view()), + path("academy/project/<int:project_id>", AcademyProjectView.as_view()), + path("academy/project/member", AcademyProjectMemberView.as_view()), + path("academy/project/invoice", AcademyProjectInvoiceView.as_view()), + path("academy/project/<int:project_id>/invoice", AcademyProjectInvoiceView.as_view()), + path("academy/project/invoice/<int:invoice_id>", AcademyProjectInvoiceView.as_view()), + path("academy/project/invoice/<int:invoice_id>/member", AcademyInvoiceMemberView.as_view()), ] diff --git a/breathecode/freelance/views.py b/breathecode/freelance/views.py index ba2dd4441..5f6126f96 100644 --- a/breathecode/freelance/views.py +++ b/breathecode/freelance/views.py @@ -40,66 +40,64 @@ def render_html_all_bills(request, token): def map_status(_status): status_maper = { - 'DUE': 'under review', - 'APPROVED': 'ready to pay', - 'PAID': 'already paid', - 'IGNORED': 'ignored', + "DUE": "under review", + "APPROVED": "ready to pay", + "PAID": "already paid", + "IGNORED": "ignored", } - if _status not in status_maper: return _status + if _status not in status_maper: + return _status return status_maper[_status] lookup = {} - status = 'APPROVED' - if 'status' in request.GET: - status = request.GET.get('status') - lookup['status'] = status.upper() + status = "APPROVED" + if "status" in request.GET: + status = request.GET.get("status") + lookup["status"] = status.upper() - if 'academy' in request.GET: - lookup['academy__id__in'] = request.GET.get('academy').split(',') + if "academy" in request.GET: + lookup["academy__id__in"] = request.GET.get("academy").split(",") items = Bill.objects.filter(**lookup).exclude(academy__isnull=True) serializer = BigBillSerializer(items, many=True) total_price = 0 for bill in serializer.data: - total_price += bill['total_price'] + total_price += bill["total_price"] data = { - 'status': status, - 'token': token.key, - 'title': f'Payments {map_status(status)}', - 'possible_status': [(key, map_status(key)) for key, label in BILL_STATUS], - 'bills': serializer.data, - 'total_price': total_price + "status": status, + "token": token.key, + "title": f"Payments {map_status(status)}", + "possible_status": [(key, map_status(key)) for key, label in BILL_STATUS], + "bills": serializer.data, + "total_price": total_price, } - template = get_template_content('bills', data) - return HttpResponse(template['html']) + template = get_template_content("bills", data) + return HttpResponse(template["html"]) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def render_html_bill(request, id=None): item = Bill.objects.filter(id=id).first() if item is None: - template = get_template_content('message', {'message': 'Bill not found'}) - return HttpResponse(template['html']) + template = get_template_content("message", {"message": "Bill not found"}) + return HttpResponse(template["html"]) else: serializer = BigBillSerializer(item, many=False) - status_map = {'DUE': 'UNDER_REVIEW', 'APPROVED': 'READY_TO_PAY', 'PAID': 'ALREADY PAID'} + status_map = {"DUE": "UNDER_REVIEW", "APPROVED": "READY_TO_PAY", "PAID": "ALREADY PAID"} data = { **serializer.data, - 'issues': - SmallIssueSerializer(item.issue_set.all(), many=True).data, - 'status': - status_map[serializer.data['status']], - 'title': - f'Freelancer { serializer.data["freelancer"]["user"]["first_name"] } ' + "issues": SmallIssueSerializer(item.issue_set.all(), many=True).data, + "status": status_map[serializer.data["status"]], + "title": f'Freelancer { serializer.data["freelancer"]["user"]["first_name"] } ' f'{ serializer.data["freelancer"]["user"]["last_name"] } - Invoice { item.id }', } - template = get_template_content('invoice', data, academy=item.academy) - return HttpResponse(template['html']) + template = get_template_content("invoice", data, academy=item.academy) + return HttpResponse(template["html"]) # Create your views here. @@ -113,19 +111,19 @@ def get(self, request, format=None): items = Bill.objects.all() lookup = {} - if 'freelancer' in self.request.GET: - user_id = self.request.GET.get('freelancer') - lookup['freelancer__id'] = user_id + if "freelancer" in self.request.GET: + user_id = self.request.GET.get("freelancer") + lookup["freelancer__id"] = user_id - if 'user' in self.request.GET: - user_id = self.request.GET.get('user') - lookup['freelancer__user__id'] = user_id + if "user" in self.request.GET: + user_id = self.request.GET.get("user") + lookup["freelancer__user__id"] = user_id - if 'status' in self.request.GET: - status = self.request.GET.get('status') - lookup['status'] = status + if "status" in self.request.GET: + status = self.request.GET.get("status") + lookup["status"] = status - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = BillSerializer(items, many=True) return Response(serializer.data) @@ -143,9 +141,9 @@ class AcademyBillView(APIView): List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_freelancer_bill') + @capable_of("read_freelancer_bill") def get(self, request, academy_id, bill_id=None): def get_freelancer_by_name_or_email(query_name, query): @@ -153,13 +151,14 @@ def get_freelancer_by_name_or_email(query_name, query): query = query.filter( Q(freelancer__user__first_name__icontains=term) | Q(freelancer__user__last_name__icontains=term) - | Q(freelancer__user__email__icontains=term)) + | Q(freelancer__user__email__icontains=term) + ) return query if bill_id is not None: item = Bill.objects.filter(id=id).first() if item is None: - raise serializers.ValidationError('Bill not found', code=404) + raise serializers.ValidationError("Bill not found", code=404) else: serializer = BillSerializer(item, many=False) return Response(serializer.data) @@ -168,53 +167,53 @@ def get_freelancer_by_name_or_email(query_name, query): items = Bill.objects.filter(academy__id=academy_id) lookup = {} - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = get_freelancer_by_name_or_email(like, items) - freelancer = self.request.GET.get('freelancer', None) + freelancer = self.request.GET.get("freelancer", None) if freelancer is not None: - lookup['freelancer__id'] = freelancer.id + lookup["freelancer__id"] = freelancer.id - status = self.request.GET.get('status', '') - if status != '': - lookup['status__in'] = status.split(',') + status = self.request.GET.get("status", "") + if status != "": + lookup["status__in"] = status.split(",") - user_id = self.request.GET.get('user', None) + user_id = self.request.GET.get("user", None) if user_id is not None: - lookup['freelancer__user__id'] = user_id + lookup["freelancer__user__id"] = user_id - reviewer = self.request.GET.get('reviewer', None) + reviewer = self.request.GET.get("reviewer", None) if reviewer is not None: - lookup['reviewer__id'] = reviewer.id + lookup["reviewer__id"] = reviewer.id - sort = self.request.GET.get('sort', '-created_at') + sort = self.request.GET.get("sort", "-created_at") items = items.filter(**lookup).order_by(sort) items = handler.queryset(items) serializer = SmallBillSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_freelancer_bill') + @capable_of("crud_freelancer_bill") def put(self, request, bill_id=None, academy_id=None): # Bulk Action to Modify Status if bill_id is None: - bill_status = request.data['status'] - bills = request.data['bills'] + bill_status = request.data["status"] + bills = request.data["bills"] - if bill_status is None or bill_status == '': - raise ValidationException('Status not found in the body of the request', code=404) + if bill_status is None or bill_status == "": + raise ValidationException("Status not found in the body of the request", code=404) - if bill_status not in ['DUE', 'APPROVED', 'PAID', 'IGNORED']: - raise ValidationException(f'Status provided ({bill_status}) is not a valid status', code=404) + if bill_status not in ["DUE", "APPROVED", "PAID", "IGNORED"]: + raise ValidationException(f"Status provided ({bill_status}) is not a valid status", code=404) if bills is None or len(bills) == 0: - raise ValidationException('Bills not found in the body of the request', code=404) + raise ValidationException("Bills not found in the body of the request", code=404) for bill in bills: item = Bill.objects.filter(id=bill, academy__id=academy_id).first() if item is None: - raise ValidationException('Bill not found for this academy', code=404) + raise ValidationException("Bill not found for this academy", code=404) item.status = bill_status item.save() bill = item @@ -223,7 +222,7 @@ def put(self, request, bill_id=None, academy_id=None): item = Bill.objects.filter(id=bill_id, academy__id=academy_id).first() if item is None: - raise ValidationException('Bill not found for this academy', code=404) + raise ValidationException("Bill not found for this academy", code=404) serializer = BillSerializer(item, data=request.data, many=False) if serializer.is_valid(): @@ -237,28 +236,28 @@ class AcademyProjectView(APIView): List all snippets, or create a new snippet. """ - @capable_of('read_freelance_projects') + @capable_of("read_freelance_projects") def get(self, request, academy_id, project_id=None): if project_id is not None: item = AcademyFreelanceProject.objects.filter(id=project_id, academy__id=academy_id).first() if item is None: - raise ValidationException('Project not found on this academy', 404) + raise ValidationException("Project not found on this academy", 404) serializer = BigProjectSerializer(item, many=False) return Response(serializer.data) items = AcademyFreelanceProject.objects.filter(academy__id=academy_id) lookup = {} - if 'like' in self.request.GET: - like = self.request.GET.get('like') - lookup['title__icontains'] = like + if "like" in self.request.GET: + like = self.request.GET.get("like") + lookup["title__icontains"] = like - if 'repository' in self.request.GET: - repository = self.request.GET.get('repository') - lookup['repository__icontains'] = repository + if "repository" in self.request.GET: + repository = self.request.GET.get("repository") + lookup["repository__icontains"] = repository - items = items.filter(**lookup).order_by('-title') + items = items.filter(**lookup).order_by("-title") serializer = BigProjectSerializer(items, many=True) return Response(serializer.data) @@ -269,7 +268,7 @@ class AcademyProjectMemberView(APIView): List all snippets, or create a new snippet. """ - @capable_of('read_freelance_projects') + @capable_of("read_freelance_projects") def get(self, request, academy_id): items = FreelanceProjectMember.objects.filter(project__academy__id=academy_id) @@ -280,21 +279,21 @@ def find_user_by_name(query_name, qs): qs = qs.filter(Q(first_name__icontains=term) | Q(last_name__icontains=term)) return qs - if 'like' in self.request.GET: - like = self.request.GET.get('like') - if '@' in like: + if "like" in self.request.GET: + like = self.request.GET.get("like") + if "@" in like: items = items.filter(Q(freelancer__user__email__icontains=like)) else: for term in like.split(): items = items.filter( - Q(freelancer__user__first_name__icontains=term) - | Q(freelancer__user__last_name__icontains=term)) + Q(freelancer__user__first_name__icontains=term) | Q(freelancer__user__last_name__icontains=term) + ) - if 'project' in self.request.GET: - project = self.request.GET.get('project') - lookup['project__id'] = project + if "project" in self.request.GET: + project = self.request.GET.get("project") + lookup["project__id"] = project - items = items.filter(**lookup).order_by('-freelancer__user__first_name') + items = items.filter(**lookup).order_by("-freelancer__user__first_name") serializer = SmallFreelancerMemberSerializer(items, many=True) return Response(serializer.data) @@ -304,13 +303,14 @@ class AcademyInvoiceMemberView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] def get(self, request, invoice_id): invoice = ProjectInvoice.objects.filter(id=invoice_id).first() if invoice is None: - raise ValidationException(f'No invoice with id {invoice_id}', slug='invoice-not-found') + raise ValidationException(f"No invoice with id {invoice_id}", slug="invoice-not-found") items = FreelanceProjectMember.objects.filter(project__id=invoice.project.id) lookup = {} @@ -320,21 +320,21 @@ def find_user_by_name(query_name, qs): qs = qs.filter(Q(first_name__icontains=term) | Q(last_name__icontains=term)) return qs - if 'like' in self.request.GET: - like = self.request.GET.get('like') - if '@' in like: + if "like" in self.request.GET: + like = self.request.GET.get("like") + if "@" in like: items = items.filter(Q(freelancer__user__email__icontains=like)) else: for term in like.split(): items = items.filter( - Q(freelancer__user__first_name__icontains=term) - | Q(freelancer__user__last_name__icontains=term)) + Q(freelancer__user__first_name__icontains=term) | Q(freelancer__user__last_name__icontains=term) + ) - if 'project' in self.request.GET: - project = self.request.GET.get('project') - lookup['project__id'] = project + if "project" in self.request.GET: + project = self.request.GET.get("project") + lookup["project__id"] = project - items = items.filter(**lookup).order_by('-freelancer__user__first_name') + items = items.filter(**lookup).order_by("-freelancer__user__first_name") serializer = SmallFreelancerMemberSerializer(items, many=True) return Response(serializer.data) @@ -345,54 +345,54 @@ class AcademyProjectInvoiceView(APIView): List all snippets, or create a new snippet. """ - @capable_of('read_project_invoice') + @capable_of("read_project_invoice") def get(self, request, invoice_id=None, academy_id=None, project_id=None): if invoice_id is not None: item = ProjectInvoice.objects.filter(project__academy__id=academy_id, id=invoice_id).first() if item is None: - raise ValidationException('Project Invoice user not found', 404) + raise ValidationException("Project Invoice user not found", 404) serializer = BigInvoiceSerializer(item, many=False) return Response(serializer.data) items = ProjectInvoice.objects.filter(project__academy__id=academy_id) lookup = {} - if 'like' in self.request.GET: - like = self.request.GET.get('like') - lookup['project__title__icontains'] = like + if "like" in self.request.GET: + like = self.request.GET.get("like") + lookup["project__title__icontains"] = like - project = self.request.GET.get('project', project_id) + project = self.request.GET.get("project", project_id) if project: - lookup['project__id'] = project + lookup["project__id"] = project - if 'status' in self.request.GET: - status = self.request.GET.get('status') - lookup['status__iexact'] = status + if "status" in self.request.GET: + status = self.request.GET.get("status") + lookup["status__iexact"] = status - if 'after' in self.request.GET: - after = self.request.GET.get('after') - after = datetime.strptime(after, '%Y-%m-%d').date() + if "after" in self.request.GET: + after = self.request.GET.get("after") + after = datetime.strptime(after, "%Y-%m-%d").date() items = items.filter(created_at__gte=after) - if 'before' in self.request.GET: - before = self.request.GET.get('before') - before = datetime.strptime(before, '%Y-%m-%d').date() + if "before" in self.request.GET: + before = self.request.GET.get("before") + before = datetime.strptime(before, "%Y-%m-%d").date() items = items.filter(created_at__lte=before) - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") serializer = BillSerializer(items, many=True) return Response(serializer.data) - @capable_of('crud_project_invoice') + @capable_of("crud_project_invoice") def post(self, request, academy_id=None, project_id=None): if project_id is None: - raise ValidationException('Missing project ID on the URL', code=404, slug='argument-not-provided') + raise ValidationException("Missing project ID on the URL", code=404, slug="argument-not-provided") project = AcademyFreelanceProject.objects.filter(id=project_id, academy__id=academy_id).first() if project is None: - raise ValidationException('This project does not exist for this academy', code=404, slug='not-found') + raise ValidationException("This project does not exist for this academy", code=404, slug="not-found") invoices = generate_project_invoice(project) serializer = BigInvoiceSerializer(invoices, many=True) @@ -403,12 +403,13 @@ class SingleInvoiceView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] def get(self, request, id): item = ProjectInvoice.objects.filter(id=id).first() if item is None: - raise ValidationException('Invoice not found', slug='invoice-not-found', code=404) + raise ValidationException("Invoice not found", slug="invoice-not-found", code=404) else: serializer = BigInvoiceSerializer(item, many=False) return Response(serializer.data) @@ -422,16 +423,16 @@ class SingleBillView(APIView): def get(self, request, id): item = Bill.objects.filter(id=id).first() if item is None: - raise serializers.ValidationError('Bill not found', code=404) + raise serializers.ValidationError("Bill not found", code=404) else: serializer = BigBillSerializer(item, many=False) return Response(serializer.data) - @capable_of('crud_freelancer_bill') + @capable_of("crud_freelancer_bill") def put(self, request, id=None, academy_id=None): item = Bill.objects.filter(id=id, academy__id=academy_id).first() if item is None: - raise serializers.ValidationError('Bill not found for this academy', code=404) + raise serializers.ValidationError("Bill not found for this academy", code=404) serializer = BillSerializer(item, data=request.data, many=False) if serializer.is_valid(): @@ -441,7 +442,7 @@ def put(self, request, id=None, academy_id=None): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) def sync_user_issues(request): from .actions import sync_user_issues @@ -450,32 +451,32 @@ def sync_user_issues(request): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) def get_issues(request): issues = Issue.objects.all() lookup = {} - if 'freelancer' in request.GET: - user_id = request.GET.get('freelancer') - lookup['freelancer__id'] = user_id + if "freelancer" in request.GET: + user_id = request.GET.get("freelancer") + lookup["freelancer__id"] = user_id - if 'bill' in request.GET: - _id = request.GET.get('bill') - lookup['bill__id'] = _id + if "bill" in request.GET: + _id = request.GET.get("bill") + lookup["bill__id"] = _id - if 'status' in request.GET: - _status = request.GET.get('status') - lookup['status'] = _status.upper() + if "status" in request.GET: + _status = request.GET.get("status") + lookup["status"] = _status.upper() - issues = issues.filter(**lookup).order_by('-created_at') + issues = issues.filter(**lookup).order_by("-created_at") serializer = SmallIssueSerializer(issues, many=True) return Response(serializer.data, status=status.HTTP_200_OK) # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) def get_latest_bill(request, user_id=None): freelancer = Freelancer.objects.filter(user__id=user_id).first() reviewer = None @@ -484,7 +485,7 @@ def get_latest_bill(request, user_id=None): reviewer = Bill.objects.filter(reviewer__id=user_id).first() if freelancer is None or reviewer is None: - raise serializers.ValidationError('Freelancer or reviewer not found', code=404) + raise serializers.ValidationError("Freelancer or reviewer not found", code=404) open_bills = generate_freelancer_bill(freelancer or reviewer.freelancer) return Response(open_bills, status=status.HTTP_200_OK) diff --git a/breathecode/marketing/actions.py b/breathecode/marketing/actions.py index 1a2af483a..553ed16bf 100644 --- a/breathecode/marketing/actions.py +++ b/breathecode/marketing/actions.py @@ -22,24 +22,30 @@ logger = getLogger(__name__) -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY') -MAIL_ABSTRACT_KEY = os.getenv('MAIL_ABSTRACT_KEY') +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY") +MAIL_ABSTRACT_KEY = os.getenv("MAIL_ABSTRACT_KEY") def get_save_leads(): - return os.getenv('SAVE_LEADS') + return os.getenv("SAVE_LEADS") def bind_formentry_with_webhook(webhook): payload = json.loads(webhook.payload) - entry = FormEntry.objects.filter(ac_deal_id=payload['deal[id]']).order_by('-created_at').first() - if entry is None and 'deal[contactid]' in payload: - entry = FormEntry.objects.filter(ac_contact_id=payload['deal[contactid]'], - ac_deal_id__isnull=True).order_by('-created_at').first() - if entry is None and 'deal[contact_email]' in payload: - entry = FormEntry.objects.filter(email=payload['deal[contact_email]'], - ac_deal_id__isnull=True).order_by('-created_at').first() + entry = FormEntry.objects.filter(ac_deal_id=payload["deal[id]"]).order_by("-created_at").first() + if entry is None and "deal[contactid]" in payload: + entry = ( + FormEntry.objects.filter(ac_contact_id=payload["deal[contactid]"], ac_deal_id__isnull=True) + .order_by("-created_at") + .first() + ) + if entry is None and "deal[contact_email]" in payload: + entry = ( + FormEntry.objects.filter(email=payload["deal[contact_email]"], ac_deal_id__isnull=True) + .order_by("-created_at") + .first() + ) if entry is None: return False @@ -76,63 +82,73 @@ def validate_email(email, lang): } """ - resp = requests.get(f'https://emailvalidation.abstractapi.com/v1/?api_key={MAIL_ABSTRACT_KEY}&email={email}', - timeout=10) + resp = requests.get( + f"https://emailvalidation.abstractapi.com/v1/?api_key={MAIL_ABSTRACT_KEY}&email={email}", timeout=10 + ) data = resp.json() - if 'error' in data: - if 'message' in data['error']: - raise Exception(data['error']['message']) + if "error" in data: + if "message" in data["error"]: + raise Exception(data["error"]["message"]) raise ValidationException( - translation(lang, - en='Error while validating email address', - es='Se ha producido un error validando tu dirección de correo electrónico', - slug='email-validation-error')) + translation( + lang, + en="Error while validating email address", + es="Se ha producido un error validando tu dirección de correo electrónico", + slug="email-validation-error", + ) + ) - if 'is_disposable_email' in data and data['is_disposable_email']['value'] == True: + if "is_disposable_email" in data and data["is_disposable_email"]["value"] == True: raise ValidationException( translation( lang, - en='It seems you are using a disposable email service. Please provide a different email address', - es= - 'Parece que estás utilizando un proveedor de correos electronicos temporales. Por favor cambia tu dirección de correo electrónico.', - slug='disposable-email')) + en="It seems you are using a disposable email service. Please provide a different email address", + es="Parece que estás utilizando un proveedor de correos electronicos temporales. Por favor cambia tu dirección de correo electrónico.", + slug="disposable-email", + ) + ) - if (('is_mx_found' in data and data['is_mx_found']['value'] == False) - or ('is_smtp_valid' in data and data['is_smtp_valid']['value'] == False)): + if ("is_mx_found" in data and data["is_mx_found"]["value"] == False) or ( + "is_smtp_valid" in data and data["is_smtp_valid"]["value"] == False + ): raise ValidationException( translation( lang, - en='The email you have provided seems invalid, please provide a different email address.', - es='El correo electrónico que haz especificado parece inválido, por favor corrige tu correo electronico', - slug='invalid-email')) + en="The email you have provided seems invalid, please provide a different email address.", + es="El correo electrónico que haz especificado parece inválido, por favor corrige tu correo electronico", + slug="invalid-email", + ) + ) - if 'quality_score' in data and float(data['quality_score']) <= 0.60: - raise ValidationException(translation( - lang, - en='The email address seems to have poor quality. Are you able to provide a different email address?', - es= - 'El correo electrónico que haz especificado parece de mala calidad. ¿Podrías especificarnos otra dirección?', - slug='poor-quality-email'), - data=data) + if "quality_score" in data and float(data["quality_score"]) <= 0.60: + raise ValidationException( + translation( + lang, + en="The email address seems to have poor quality. Are you able to provide a different email address?", + es="El correo electrónico que haz especificado parece de mala calidad. ¿Podrías especificarnos otra dirección?", + slug="poor-quality-email", + ), + data=data, + ) - email_quality = float(data['quality_score']) - data['email_quality'] = email_quality - split_email = email.split('@') + email_quality = float(data["quality_score"]) + data["email_quality"] = email_quality + split_email = email.split("@") email_status = { - 'email': email, - 'user': split_email[0], - 'domain': split_email[1], - 'format_valid': data['is_valid_format']['value'], - 'mx_found': data['is_mx_found']['value'], - 'smtp_check': data['is_smtp_valid']['value'], - 'catch_all': data['is_catchall_email']['value'], - 'role': data['is_role_email']['value'], - 'disposable': data['is_disposable_email']['value'], - 'free': data['is_free_email']['value'], - 'score': email_quality + "email": email, + "user": split_email[0], + "domain": split_email[1], + "format_valid": data["is_valid_format"]["value"], + "mx_found": data["is_mx_found"]["value"], + "smtp_check": data["is_smtp_valid"]["value"], + "catch_all": data["is_catchall_email"]["value"], + "role": data["is_role_email"]["value"], + "disposable": data["is_disposable_email"]["value"], + "free": data["is_free_email"]["value"], + "score": email_quality, } return email_status @@ -143,27 +159,27 @@ def set_optional(contact, key, data, custom_key=None): custom_key = key if custom_key in data: - contact['field[' + acp_ids[key] + ',0]'] = data[custom_key] + contact["field[" + acp_ids[key] + ",0]"] = data[custom_key] return contact def get_lead_tags(ac_academy, form_entry): - if 'tags' not in form_entry or form_entry['tags'] == '': - raise Exception('You need to specify tags for this entry') + if "tags" not in form_entry or form_entry["tags"] == "": + raise Exception("You need to specify tags for this entry") else: - _tags = [t.strip() for t in form_entry['tags'].split(',')] - if len(_tags) == 0 or _tags[0] == '': - raise Exception('The contact tags are empty', 400) + _tags = [t.strip() for t in form_entry["tags"].split(",")] + if len(_tags) == 0 or _tags[0] == "": + raise Exception("The contact tags are empty", 400) - strong_tags = Tag.objects.filter(slug__in=_tags, tag_type='STRONG', ac_academy=ac_academy) - soft_tags = Tag.objects.filter(slug__in=_tags, tag_type='SOFT', ac_academy=ac_academy) - dicovery_tags = Tag.objects.filter(slug__in=_tags, tag_type='DISCOVERY', ac_academy=ac_academy) - other_tags = Tag.objects.filter(slug__in=_tags, tag_type='OTHER', ac_academy=ac_academy) + strong_tags = Tag.objects.filter(slug__in=_tags, tag_type="STRONG", ac_academy=ac_academy) + soft_tags = Tag.objects.filter(slug__in=_tags, tag_type="SOFT", ac_academy=ac_academy) + dicovery_tags = Tag.objects.filter(slug__in=_tags, tag_type="DISCOVERY", ac_academy=ac_academy) + other_tags = Tag.objects.filter(slug__in=_tags, tag_type="OTHER", ac_academy=ac_academy) tags = list(chain(strong_tags, soft_tags, dicovery_tags, other_tags)) if len(tags) != len(_tags): - message = 'Some tag applied to the contact not found or have tag_type different than [STRONG, SOFT, DISCOVER, OTHER]: ' + message = "Some tag applied to the contact not found or have tag_type different than [STRONG, SOFT, DISCOVER, OTHER]: " message += f'Check for the follow tags: {",".join(_tags)}' raise Exception(message) @@ -172,80 +188,84 @@ def get_lead_tags(ac_academy, form_entry): def get_lead_automations(ac_academy, form_entry): _automations = [] - if 'automations' not in form_entry or form_entry['automations'] == '': + if "automations" not in form_entry or form_entry["automations"] == "": return [] else: - _automations = form_entry['automations'].split(',') + _automations = form_entry["automations"].split(",") automations = Automation.objects.filter(slug__in=_automations, ac_academy=ac_academy) count = automations.count() if count == 0: - _name = form_entry['automations'] - raise Exception(f'The specified automation {_name} was not found for this AC Academy') + _name = form_entry["automations"] + raise Exception(f"The specified automation {_name} was not found for this AC Academy") - logger.debug(f'found {str(count)} automations') - return automations.values_list('acp_id', flat=True) + logger.debug(f"found {str(count)} automations") + return automations.values_list("acp_id", flat=True) def add_to_active_campaign(contact, academy_id: int, automation_id: int): if not ActiveCampaignAcademy.objects.filter(academy__id=academy_id).count(): - raise Exception(f'No academy found with id {academy_id}') + raise Exception(f"No academy found with id {academy_id}") - active_campaign_academy_values = ['ac_url', 'ac_key', 'event_attendancy_automation__id'] - ac_url, ac_key, event_attendancy_automation_id = ActiveCampaignAcademy.objects.filter( - academy__id=academy_id).values_list(*active_campaign_academy_values).first() + active_campaign_academy_values = ["ac_url", "ac_key", "event_attendancy_automation__id"] + ac_url, ac_key, event_attendancy_automation_id = ( + ActiveCampaignAcademy.objects.filter(academy__id=academy_id) + .values_list(*active_campaign_academy_values) + .first() + ) - logger.info('ready to send contact with following details') + logger.info("ready to send contact with following details") logger.info(contact) old_client = ACOldClient(ac_url, ac_key) response = old_client.contacts.create_contact(contact) - contact_id = response['subscriber_id'] + contact_id = response["subscriber_id"] - if 'subscriber_id' not in response: - logger.error('error adding contact', response) - raise APIException('Could not save contact in CRM') + if "subscriber_id" not in response: + logger.error("error adding contact", response) + raise APIException("Could not save contact in CRM") client = ActiveCampaignClient(ac_url, ac_key) if event_attendancy_automation_id != automation_id: - message = 'Automation doesn\'t exist for this AC Academy' + message = "Automation doesn't exist for this AC Academy" logger.info(message) raise Exception(message) - acp_id = Automation.objects.filter(id=automation_id).values_list('acp_id', flat=True).first() + acp_id = Automation.objects.filter(id=automation_id).values_list("acp_id", flat=True).first() if not acp_id: - message = 'Automation acp_id doesn\'t exist' + message = "Automation acp_id doesn't exist" logger.info(message) raise Exception(message) data = { - 'contactAutomation': { - 'contact': contact_id, - 'automation': acp_id, + "contactAutomation": { + "contact": contact_id, + "automation": acp_id, } } response = client.contacts.add_a_contact_to_an_automation(data) - if 'contacts' not in response: - logger.error(f'error triggering automation with id {str(acp_id)}', response) - raise APIException('Could not add contact to Automation') + if "contacts" not in response: + logger.error(f"error triggering automation with id {str(acp_id)}", response) + raise APIException("Could not add contact to Automation") - logger.info(f'Triggered automation with id {str(acp_id)}', response) + logger.info(f"Triggered automation with id {str(acp_id)}", response) def register_new_lead(form_entry=None): if form_entry is None: - raise ValidationException('You need to specify the form entry data') + raise ValidationException("You need to specify the form entry data") - if 'location' not in form_entry or form_entry['location'] is None: - raise ValidationException('Missing location information') + if "location" not in form_entry or form_entry["location"] is None: + raise ValidationException("Missing location information") ac_academy = None alias = AcademyAlias.objects.filter( - Q(active_campaign_slug=form_entry['location']) | Q(academy__slug=form_entry['location'])).first() + Q(active_campaign_slug=form_entry["location"]) | Q(academy__slug=form_entry["location"]) + ).first() try: if alias is not None: @@ -254,7 +274,7 @@ def register_new_lead(form_entry=None): pass if ac_academy is None: - ac_academy = ActiveCampaignAcademy.objects.filter(academy__slug=form_entry['location']).first() + ac_academy = ActiveCampaignAcademy.objects.filter(academy__slug=form_entry["location"]).first() if ac_academy is None: raise RetryTask(f"No academy found with slug {form_entry['location']}") @@ -262,149 +282,148 @@ def register_new_lead(form_entry=None): automations = get_lead_automations(ac_academy, form_entry) if automations: - logger.info('found automations') + logger.info("found automations") logger.info(list(automations)) else: - logger.info('automations not found') + logger.info("automations not found") tags = get_lead_tags(ac_academy, form_entry) - logger.info('found tags') + logger.info("found tags") logger.info(set(t.slug for t in tags)) if (automations is None or len(automations) == 0) and len(tags) > 0: if tags[0].automation is None: - raise ValidationException('No automation was specified and the the specified tag has no automation either') + raise ValidationException("No automation was specified and the the specified tag has no automation either") automations = [tags[0].automation.acp_id] - if not 'email' in form_entry: - raise ValidationException('The email doesn\'t exist') + if not "email" in form_entry: + raise ValidationException("The email doesn't exist") - if not 'first_name' in form_entry: - raise ValidationException('The first name doesn\'t exist') + if not "first_name" in form_entry: + raise ValidationException("The first name doesn't exist") - if not 'last_name' in form_entry: - raise ValidationException('The last name doesn\'t exist') + if not "last_name" in form_entry: + raise ValidationException("The last name doesn't exist") - if not 'phone' in form_entry: - raise ValidationException('The phone doesn\'t exist') + if not "phone" in form_entry: + raise ValidationException("The phone doesn't exist") - if not 'id' in form_entry: - raise ValidationException('The id doesn\'t exist') + if not "id" in form_entry: + raise ValidationException("The id doesn't exist") - if not 'course' in form_entry: - raise ValidationException('The course doesn\'t exist') + if not "course" in form_entry: + raise ValidationException("The course doesn't exist") # apply default language and make sure english is "en" and not "us" - if 'utm_language' in form_entry and form_entry['utm_language'] == 'us': - form_entry['utm_language'] = 'en' - elif 'language' in form_entry and form_entry['language'] == 'us': - form_entry['language'] = 'en' + if "utm_language" in form_entry and form_entry["utm_language"] == "us": + form_entry["utm_language"] = "en" + elif "language" in form_entry and form_entry["language"] == "us": + form_entry["language"] = "en" contact = { - 'email': form_entry['email'], - 'first_name': form_entry['first_name'], - 'last_name': form_entry['last_name'], - 'phone': form_entry['phone'] + "email": form_entry["email"], + "first_name": form_entry["first_name"], + "last_name": form_entry["last_name"], + "phone": form_entry["phone"], } - contact = set_optional(contact, 'utm_url', form_entry) - contact = set_optional(contact, 'utm_location', form_entry, 'location') - contact = set_optional(contact, 'course', form_entry) - contact = set_optional(contact, 'utm_language', form_entry, 'language') - contact = set_optional(contact, 'utm_country', form_entry, 'country') - contact = set_optional(contact, 'utm_campaign', form_entry) - contact = set_optional(contact, 'utm_source', form_entry) - contact = set_optional(contact, 'utm_content', form_entry) - contact = set_optional(contact, 'utm_medium', form_entry) - contact = set_optional(contact, 'utm_plan', form_entry) - contact = set_optional(contact, 'utm_placement', form_entry) - contact = set_optional(contact, 'utm_term', form_entry) - contact = set_optional(contact, 'gender', form_entry, 'sex') - contact = set_optional(contact, 'client_comments', form_entry) - contact = set_optional(contact, 'gclid', form_entry) - contact = set_optional(contact, 'current_download', form_entry) - contact = set_optional(contact, 'referral_key', form_entry) - - entry = FormEntry.objects.filter(id=form_entry['id']).first() + contact = set_optional(contact, "utm_url", form_entry) + contact = set_optional(contact, "utm_location", form_entry, "location") + contact = set_optional(contact, "course", form_entry) + contact = set_optional(contact, "utm_language", form_entry, "language") + contact = set_optional(contact, "utm_country", form_entry, "country") + contact = set_optional(contact, "utm_campaign", form_entry) + contact = set_optional(contact, "utm_source", form_entry) + contact = set_optional(contact, "utm_content", form_entry) + contact = set_optional(contact, "utm_medium", form_entry) + contact = set_optional(contact, "utm_plan", form_entry) + contact = set_optional(contact, "utm_placement", form_entry) + contact = set_optional(contact, "utm_term", form_entry) + contact = set_optional(contact, "gender", form_entry, "sex") + contact = set_optional(contact, "client_comments", form_entry) + contact = set_optional(contact, "gclid", form_entry) + contact = set_optional(contact, "current_download", form_entry) + contact = set_optional(contact, "referral_key", form_entry) + + entry = FormEntry.objects.filter(id=form_entry["id"]).first() if not entry: - raise ValidationException('FormEntry not found (id: ' + str(form_entry['id']) + ')') + raise ValidationException("FormEntry not found (id: " + str(form_entry["id"]) + ")") - if 'contact-us' == tags[0].slug: + if "contact-us" == tags[0].slug: obj = {} if ac_academy.academy: - obj['COMPANY_INFO_EMAIL'] = ac_academy.academy.feedback_email + obj["COMPANY_INFO_EMAIL"] = ac_academy.academy.feedback_email send_email_message( - 'new_contact', + "new_contact", ac_academy.academy.marketing_email, { - 'subject': f"New contact from the website {form_entry['first_name']} {form_entry['last_name']}", - 'full_name': form_entry['first_name'] + ' ' + form_entry['last_name'], - 'client_comments': form_entry['client_comments'], - 'data': { - **form_entry - }, + "subject": f"New contact from the website {form_entry['first_name']} {form_entry['last_name']}", + "full_name": form_entry["first_name"] + " " + form_entry["last_name"], + "client_comments": form_entry["client_comments"], + "data": {**form_entry}, **obj, # "data": { **form_entry, **address }, }, - academy=ac_academy.academy) + academy=ac_academy.academy, + ) is_duplicate = entry.is_duplicate(form_entry) # ENV Variable to fake lead storage - if get_save_leads() == 'FALSE': - entry.storage_status_text = 'Saved but not send to AC because SAVE_LEADS is FALSE' - entry.storage_status = 'PERSISTED' if not is_duplicate else 'DUPLICATED' + if get_save_leads() == "FALSE": + entry.storage_status_text = "Saved but not send to AC because SAVE_LEADS is FALSE" + entry.storage_status = "PERSISTED" if not is_duplicate else "DUPLICATED" entry.save() return entry - logger.info('ready to send contact with following details: ' + str(contact)) + logger.info("ready to send contact with following details: " + str(contact)) old_client = ACOldClient(ac_academy.ac_url, ac_academy.ac_key) response = old_client.contacts.create_contact(contact) - contact_id = response['subscriber_id'] + contact_id = response["subscriber_id"] # save contact_id from active campaign entry.ac_contact_id = contact_id entry.save() - if 'subscriber_id' not in response: - logger.error('error adding contact', response) - entry.storage_status = 'ERROR' - entry.storage_status_text = 'Could not save contact in CRM: Subscriber_id not found' + if "subscriber_id" not in response: + logger.error("error adding contact", response) + entry.storage_status = "ERROR" + entry.storage_status_text = "Could not save contact in CRM: Subscriber_id not found" entry.save() if is_duplicate: - entry.storage_status = 'DUPLICATED' + entry.storage_status = "DUPLICATED" entry.save() - logger.info('FormEntry is considered a duplicate, no automations or tags added') + logger.info("FormEntry is considered a duplicate, no automations or tags added") return entry client = ActiveCampaignClient(ac_academy.ac_url, ac_academy.ac_key) if automations and not is_duplicate: for automation_id in automations: - data = {'contactAutomation': {'contact': contact_id, 'automation': automation_id}} + data = {"contactAutomation": {"contact": contact_id, "automation": automation_id}} response = client.contacts.add_a_contact_to_an_automation(data) - if 'contacts' not in response: - logger.error(f'error triggering automation with id {str(automation_id)}', response) - raise APIException('Could not add contact to Automation') - logger.info(f'Triggered automation with id {str(automation_id)} ' + str(response)) + if "contacts" not in response: + logger.error(f"error triggering automation with id {str(automation_id)}", response) + raise APIException("Could not add contact to Automation") + logger.info(f"Triggered automation with id {str(automation_id)} " + str(response)) - logger.info('automations was executed successfully') + logger.info("automations was executed successfully") if tags and not is_duplicate: for t in tags: - data = {'contactTag': {'contact': contact_id, 'tag': t.acp_id}} + data = {"contactTag": {"contact": contact_id, "tag": t.acp_id}} response = client.contacts.add_a_tag_to_contact(data) - logger.info('contact was tagged successfully') + logger.info("contact was tagged successfully") - entry.storage_status = 'PERSISTED' + entry.storage_status = "PERSISTED" entry.save() - form_entry['storage_status'] = 'PERSISTED' + form_entry["storage_status"] = "PERSISTED" return entry @@ -422,7 +441,7 @@ def update_deal_custom_fields(formentry_id: int): deal_id = entry.ac_deal_id if entry.academy is None or entry.academy.activecampaignacademy is None: - raise Exception('Academy not found or not found in active campaign') + raise Exception("Academy not found or not found in active campaign") ac_academy = entry.academy.activecampaignacademy client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) @@ -432,36 +451,39 @@ def update_deal_custom_fields(formentry_id: int): _ids_to_update = [x for x in acp_ids.values() if isinstance(x, str)] _updated_fields = [] for _field in _incoming_values: - if _field['field'] not in _ids_to_update: + if _field["field"] not in _ids_to_update: logger.debug(f"Skipping contact custom field {_field['field']}") continue # convert contact custom field id to deal custom field id - _mapped_id = map_ids(_field['field']) + _mapped_id = map_ids(_field["field"]) # only go thru if the deal has the respective match custom field if _mapped_id: - _updated_fields.append({ - 'customFieldId': _mapped_id, - 'fieldValue': _field['value'], - }) + _updated_fields.append( + { + "customFieldId": _mapped_id, + "fieldValue": _field["value"], + } + ) # deal utm_location _form_entry_updated = False - if _mapped_id == '16': + if _mapped_id == "16": _form_entry_updated = True - entry.ac_deal_location = _field['value'] + entry.ac_deal_location = _field["value"] # deal utm_course - elif _mapped_id == '6': + elif _mapped_id == "6": _form_entry_updated = True - entry.ac_deal_course = _field['value'] + entry.ac_deal_course = _field["value"] - if _form_entry_updated: entry.save() + if _form_entry_updated: + entry.save() try: - if client.update_deal(deal_id, {'fields': _updated_fields}): + if client.update_deal(deal_id, {"fields": _updated_fields}): return True except Exception: - logger.exception(f'There was an error updating new deal {deal_id} with its contact custom fields') + logger.exception(f"There was an error updating new deal {deal_id} with its contact custom fields") return False @@ -470,27 +492,27 @@ def sync_tags(ac_academy): client = ActiveCampaignClient(ac_academy.ac_url, ac_academy.ac_key) response = client.tags.list_all_tags(limit=100) - if 'tags' not in response: - logger.error('Invalid tags incoming from AC') + if "tags" not in response: + logger.error("Invalid tags incoming from AC") return False - tags = response['tags'] + tags = response["tags"] count = 0 - while len(response['tags']) == 100: + while len(response["tags"]) == 100: count = count + 100 response = client.tags.list_all_tags(limit=100, offset=count) - tags = tags + response['tags'] + tags = tags + response["tags"] for tag in tags: - t = Tag.objects.filter(slug=tag['tag'], ac_academy=ac_academy).first() + t = Tag.objects.filter(slug=tag["tag"], ac_academy=ac_academy).first() if t is None: t = Tag( - slug=tag['tag'], - acp_id=tag['id'], + slug=tag["tag"], + acp_id=tag["id"], ac_academy=ac_academy, ) - t.subscribers = tag['subscriber_count'] + t.subscribers = tag["subscriber_count"] t.save() return response @@ -501,31 +523,31 @@ def sync_automations(ac_academy): client = ActiveCampaignClient(ac_academy.ac_url, ac_academy.ac_key) response = client.automations.list_all_automations(limit=100) - if 'automations' not in response: - logger.error('Invalid automations incoming from AC') + if "automations" not in response: + logger.error("Invalid automations incoming from AC") return False - automations = response['automations'] + automations = response["automations"] count = 0 - while len(response['automations']) == 100: + while len(response["automations"]) == 100: count = count + 100 response = client.automations.list_all_automations(limit=100, offset=count) - if 'automations' not in response: - logger.error('Invalid automations incoming from AC pagination') + if "automations" not in response: + logger.error("Invalid automations incoming from AC pagination") return False - automations = automations + response['automations'] + automations = automations + response["automations"] for auto in automations: - a = Automation.objects.filter(acp_id=auto['id'], ac_academy=ac_academy).first() + a = Automation.objects.filter(acp_id=auto["id"], ac_academy=ac_academy).first() if a is None: a = Automation( - acp_id=auto['id'], + acp_id=auto["id"], ac_academy=ac_academy, ) - a.name = auto['name'] - a.entered = auto['entered'] - a.exited = auto['exited'] - a.status = auto['status'] + a.name = auto["name"] + a.entered = auto["entered"] + a.exited = auto["exited"] + a.status = auto["status"] a.save() return response @@ -533,46 +555,51 @@ def sync_automations(ac_academy): def save_get_geolocal(contact, form_entry=None): - if 'latitude' not in form_entry or 'longitude' not in form_entry: + if "latitude" not in form_entry or "longitude" not in form_entry: form_entry = contact.to_form_data() - if 'latitude' not in form_entry or 'longitude' not in form_entry: + if "latitude" not in form_entry or "longitude" not in form_entry: return False - if form_entry['latitude'] == '' or form_entry['longitude'] == '' or form_entry['latitude'] is None or form_entry[ - 'longitude'] is None: + if ( + form_entry["latitude"] == "" + or form_entry["longitude"] == "" + or form_entry["latitude"] is None + or form_entry["longitude"] is None + ): return False result = {} resp = requests.get( f"https://maps.googleapis.com/maps/api/geocode/json?latlng={form_entry['latitude']},{form_entry['longitude']}&key={GOOGLE_CLOUD_KEY}", - timeout=2) + timeout=2, + ) data = resp.json() - if 'status' in data and data['status'] == 'INVALID_REQUEST': - raise Exception(data['error_message']) + if "status" in data and data["status"] == "INVALID_REQUEST": + raise Exception(data["error_message"]) - if 'results' in data: - for address in data['results']: - for component in address['address_components']: - if 'country' in component['types'] and 'country' not in result: - result['country'] = component['long_name'] - if 'locality' in component['types'] and 'locality' not in result: - result['locality'] = component['long_name'] - if 'route' in component['types'] and 'route' not in result: - result['route'] = component['long_name'] - if 'postal_code' in component['types'] and 'postal_code' not in result: - result['postal_code'] = component['long_name'] + if "results" in data: + for address in data["results"]: + for component in address["address_components"]: + if "country" in component["types"] and "country" not in result: + result["country"] = component["long_name"] + if "locality" in component["types"] and "locality" not in result: + result["locality"] = component["long_name"] + if "route" in component["types"] and "route" not in result: + result["route"] = component["long_name"] + if "postal_code" in component["types"] and "postal_code" not in result: + result["postal_code"] = component["long_name"] - if 'country' in result: - contact.country = result['country'] + if "country" in result: + contact.country = result["country"] - if 'locality' in result: - contact.city = result['locality'] + if "locality" in result: + contact.city = result["locality"] - if 'route' in result: - contact.street_address = result['route'] + if "route" in result: + contact.street_address = result["route"] - if 'postal_code' in result: - contact.zip_code = result['postal_code'] + if "postal_code" in result: + contact.zip_code = result["postal_code"] contact.save() @@ -585,77 +612,92 @@ def get_facebook_lead_info(lead_id, academy_id=None): lead = FormEntry.objects.filter(lead_id=lead_id).first() if lead is None: - raise APIException(f'Invalid lead id: {lead_id}') + raise APIException(f"Invalid lead id: {lead_id}") credential = CredentialsFacebook.objects.filter(academy__id=academy_id, expires_at__gte=now).first() if credential is None: - raise APIException('No active facebook credentials to get the leads') + raise APIException("No active facebook credentials to get the leads") - params = {'access_token': credential.token} - resp = requests.get(f'https://graph.facebook.com/v8.0/{lead_id}/', params=params, timeout=2) + params = {"access_token": credential.token} + resp = requests.get(f"https://graph.facebook.com/v8.0/{lead_id}/", params=params, timeout=2) if resp.status_code == 200: - logger.debug('Facebook responded with 200') + logger.debug("Facebook responded with 200") data = resp.json() - if 'field_data' in data: - lead.utm_campaign = data['ad_id'] - lead.utm_medium = data['ad_id'] - lead.utm_source = 'facebook' - for field in data['field_data']: - if field['name'] == 'first_name' or field['name'] == 'full_name': - lead.first_name = field['values'] - elif field['name'] == 'last_name': - lead.last_name = field['values'] - elif field['name'] == 'email': - lead.email = field['values'] - elif field['name'] == 'phone': - lead.phone = field['values'] + if "field_data" in data: + lead.utm_campaign = data["ad_id"] + lead.utm_medium = data["ad_id"] + lead.utm_source = "facebook" + for field in data["field_data"]: + if field["name"] == "first_name" or field["name"] == "full_name": + lead.first_name = field["values"] + elif field["name"] == "last_name": + lead.last_name = field["values"] + elif field["name"] == "email": + lead.email = field["values"] + elif field["name"] == "phone": + lead.phone = field["values"] lead.save() else: - logger.fatal('No information about the lead') + logger.fatal("No information about the lead") else: - logger.fatal('Impossible to connect to facebook API and retrieve lead information') + logger.fatal("Impossible to connect to facebook API and retrieve lead information") -STARTS_WITH_COMMA_PATTERN = re.compile(r'^,') -ENDS_WITH_COMMA_PATTERN = re.compile(r',$') +STARTS_WITH_COMMA_PATTERN = re.compile(r"^,") +ENDS_WITH_COMMA_PATTERN = re.compile(r",$") -def validate_marketing_tags(tags: str, academy_id: int, types: Optional[list] = None, lang: str = 'en') -> None: - if tags.find(',,') != -1: +def validate_marketing_tags(tags: str, academy_id: int, types: Optional[list] = None, lang: str = "en") -> None: + if tags.find(",,") != -1: raise ValidationException( - translation(lang, - en='You can\'t have two commas together on tags', - es='No puedes tener dos comas seguidas en las etiquetas', - slug='two-commas-together')) + translation( + lang, + en="You can't have two commas together on tags", + es="No puedes tener dos comas seguidas en las etiquetas", + slug="two-commas-together", + ) + ) - if tags.find(' ') != -1: + if tags.find(" ") != -1: raise ValidationException( - translation(lang, - en='Spaces are not allowed on tags', - es='No se permiten espacios en los tags', - slug='spaces-are-not-allowed')) + translation( + lang, + en="Spaces are not allowed on tags", + es="No se permiten espacios en los tags", + slug="spaces-are-not-allowed", + ) + ) if STARTS_WITH_COMMA_PATTERN.search(tags): raise ValidationException( - translation(lang, - en='Tags text cannot start with comma', - es='El texto de los tags no puede comenzar con una coma', - slug='starts-with-comma')) + translation( + lang, + en="Tags text cannot start with comma", + es="El texto de los tags no puede comenzar con una coma", + slug="starts-with-comma", + ) + ) if ENDS_WITH_COMMA_PATTERN.search(tags): raise ValidationException( - translation(lang, - en='Tags text cannot ends with comma', - es='El texto de los tags no puede terminar con una coma', - slug='ends-with-comma')) + translation( + lang, + en="Tags text cannot ends with comma", + es="El texto de los tags no puede terminar con una coma", + slug="ends-with-comma", + ) + ) - tags = [x for x in tags.split(',') if x] + tags = [x for x in tags.split(",") if x] if len(tags) < 2: raise ValidationException( - translation(lang, - en='Event must have at least two tags', - es='El evento debe tener al menos dos tags', - slug='have-less-two-tags')) + translation( + lang, + en="Event must have at least two tags", + es="El evento debe tener al menos dos tags", + slug="have-less-two-tags", + ) + ) _tags = Tag.objects.filter(slug__in=tags, ac_academy__academy__id=academy_id) if types: @@ -671,21 +713,23 @@ def validate_marketing_tags(tags: str, academy_id: int, types: Optional[list] = not_founds.append(tag) if len(types) == 0: - types = ['ANY'] + types = ["ANY"] raise ValidationException( - translation(lang, - en=f'Following tags not found with types {",".join(types)}: {",".join(not_founds)}', - es='Los siguientes tags no se encontraron con los tipos ' - f'{",".join(types)}: {",".join(not_founds)}', - slug='tag-not-exist')) + translation( + lang, + en=f'Following tags not found with types {",".join(types)}: {",".join(not_founds)}', + es="Los siguientes tags no se encontraron con los tipos " f'{",".join(types)}: {",".join(not_founds)}', + slug="tag-not-exist", + ) + ) def delete_tag(tag, include_other_academies=False): ac_academy = tag.ac_academy if ac_academy is None: - raise ValidationException(f'Invalid ac_academy for this tag {tag.slug}', code=400, slug='invalid-ac_academy') + raise ValidationException(f"Invalid ac_academy for this tag {tag.slug}", code=400, slug="invalid-ac_academy") client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) try: @@ -698,13 +742,13 @@ def delete_tag(tag, include_other_academies=False): return True except Exception: - logger.exception(f'There was an error deleting tag for {tag.slug}') + logger.exception(f"There was an error deleting tag for {tag.slug}") return False def convert_data_frame(item): - if 'Unnamed: 0' in item: - del item['Unnamed: 0'] + if "Unnamed: 0" in item: + del item["Unnamed: 0"] for key in item: if isinstance(item[key], np.integer): item[key] = int(item[key]) diff --git a/breathecode/marketing/admin.py b/breathecode/marketing/admin.py index 90c01c5c5..cef151573 100644 --- a/breathecode/marketing/admin.py +++ b/breathecode/marketing/admin.py @@ -44,42 +44,44 @@ logger = logging.getLogger(__name__) -@admin.display(description='♼ Test connection to Active Campaign') +@admin.display(description="♼ Test connection to Active Campaign") def test_ac(modeladmin, request, queryset): entries = queryset.all() try: for entry in entries: test_ac_connection(entry) - messages.success(request, message='Connection was a success') + messages.success(request, message="Connection was a success") except Exception as e: logger.fatal(str(e)) messages.error(request, message=str(e)) -@admin.display(description='♼ Sync AC Tags') +@admin.display(description="♼ Sync AC Tags") def sync_ac_tags(modeladmin, request, queryset): entries = queryset.all() try: for entry in entries: sync_tags(entry) - messages.success(request, message='Tags imported successfully') + messages.success(request, message="Tags imported successfully") except Exception as e: logger.fatal(str(e)) messages.error(request, message=str(e)) -@admin.display(description='♼ Sync AC Automations') +@admin.display(description="♼ Sync AC Automations") def sync_ac_automations(modeladmin, request, queryset): entries = queryset.all() - _result = {'success': [], 'error': []} + _result = {"success": [], "error": []} try: for entry in entries: - if sync_automations(entry): _result['success'].append(entry.academy.name) - else: _result['error'].append(entry.academy.name) + if sync_automations(entry): + _result["success"].append(entry.academy.name) + else: + _result["error"].append(entry.academy.name) - _errors = ', '.join(_result['error']) - _success = ', '.join(_result['success']) - messages.success(request, message=f'Errored in {_errors}. Succeded in: {_success}') + _errors = ", ".join(_result["error"]) + _success = ", ".join(_result["success"]) + messages.success(request, message=f"Errored in {_errors}. Succeded in: {_success}") except Exception as e: logger.fatal(str(e)) messages.error(request, message=str(e)) @@ -89,28 +91,29 @@ class CustomForm(forms.ModelForm): class Meta: model = ActiveCampaignAcademy - fields = '__all__' + fields = "__all__" def __init__(self, *args, **kwargs): super(CustomForm, self).__init__(*args, **kwargs) - self.fields['event_attendancy_automation'].queryset = Automation.objects.filter( - ac_academy=self.instance.id) # or something else + self.fields["event_attendancy_automation"].queryset = Automation.objects.filter( + ac_academy=self.instance.id + ) # or something else @admin.register(ActiveCampaignAcademy) class ACAcademyAdmin(admin.ModelAdmin, AdminExportCsvMixin): form = CustomForm - search_fields = ['academy__name', 'academy__slug'] - list_display = ('id', 'academy', 'ac_url', 'sync_status', 'last_interaction_at', 'sync_message') - list_filter = ['academy__slug', 'sync_status'] + search_fields = ["academy__name", "academy__slug"] + list_display = ("id", "academy", "ac_url", "sync_status", "last_interaction_at", "sync_message") + list_filter = ["academy__slug", "sync_status"] actions = [test_ac, sync_ac_tags, sync_ac_automations] @admin.register(AcademyAlias) class AcademyAliasAdmin(admin.ModelAdmin): - search_fields = ['slug', 'active_campaign_slug', 'academy__slug', 'academy__title'] - list_display = ('slug', 'active_campaign_slug', 'academy') - list_filter = ['academy__slug'] + search_fields = ["slug", "active_campaign_slug", "academy__slug", "academy__title"] + list_display = ("slug", "active_campaign_slug", "academy") + list_filter = ["academy__slug"] def generate_original_alias(modeladmin, request, queryset): @@ -122,54 +125,55 @@ def generate_original_alias(modeladmin, request, queryset): if AcademyAlias.objects.filter(slug=a.slug).first() is None: AcademyAlias.objects.create(slug=a.slug, active_campaign_slug=slug, academy=a) - messages.add_message(request, messages.INFO, f'Alias {a.slug} successfully created') + messages.add_message(request, messages.INFO, f"Alias {a.slug} successfully created") else: - messages.add_message(request, messages.ERROR, f'Alias {a.slug} already exists') + messages.add_message(request, messages.ERROR, f"Alias {a.slug} already exists") @admin.register(AcademyProxy) class AcademyAdmin(admin.ModelAdmin): - list_display = ('slug', 'name') + list_display = ("slug", "name") actions = [generate_original_alias] def send_to_active_campaign(modeladmin, request, queryset): entries = queryset.all() - total = {'error': 0, 'persisted': 0} + total = {"error": 0, "persisted": 0} entry = None try: for entry in entries: entry = register_new_lead(entry.to_form_data()) - if entry.storage_status == 'PERSISTED': - total['persisted'] += 1 + if entry.storage_status == "PERSISTED": + total["persisted"] += 1 else: - total['error'] += 1 + total["error"] += 1 except Exception as e: - total['error'] += 1 - entry.storage_status = 'ERROR' + total["error"] += 1 + entry.storage_status = "ERROR" entry.storage_status_text = str(e) entry.save() messages.add_message( - request, messages.SUCCESS, - f"Persisted leads: {total['persisted']}. Not persisted: {total['error']}. You can check each lead storage_status_text for details." + request, + messages.SUCCESS, + f"Persisted leads: {total['persisted']}. Not persisted: {total['error']}. You can check each lead storage_status_text for details.", ) -@admin.display(description='♺ Download more info from facebook') +@admin.display(description="♺ Download more info from facebook") def fetch_more_facebook_info(modeladmin, request, queryset): entries = queryset.all() for entry in entries: get_facebook_lead_info(entry.id) -@admin.display(description='🌐 Get GEO info') +@admin.display(description="🌐 Get GEO info") def sync_contact_custom_fields_with_deal(modeladmin, request, queryset): entries = queryset.all() for entry in entries: if not entry.ac_contact_id or not entry.ac_deal_id: - messages.error(request, message=f'FormEntry {str(entry.id)} is missing deal_id or contact_id') + messages.error(request, message=f"FormEntry {str(entry.id)} is missing deal_id or contact_id") return None for entry in entries: @@ -182,54 +186,90 @@ def get_geoinfo(modeladmin, request, queryset): for entry in entries: form_enty = { - 'latitude': entry.latitude, - 'longitude': entry.longitude, + "latitude": entry.latitude, + "longitude": entry.longitude, } save_get_geolocal(entry, form_enty) class PPCFilter(SimpleListFilter): - title = 'Source' # or use _('country') for translated title - parameter_name = 'source' + title = "Source" # or use _('country') for translated title + parameter_name = "source" def lookups(self, request, model_admin): - mediums = ['From PPC', 'Course Report'] + mediums = ["From PPC", "Course Report"] return [(m, m) for m in mediums] def queryset(self, request, queryset): - if self.value() == 'From PPC': + if self.value() == "From PPC": return queryset.filter(gclid__isnull=False) - if self.value() == 'Course Report': - return queryset.filter(utm_medium='coursereportschoolpage') + if self.value() == "Course Report": + return queryset.filter(utm_medium="coursereportschoolpage") @admin.register(FormEntry) class FormEntryAdmin(admin.ModelAdmin, AdminExportCsvMixin): - search_fields = ['email', 'first_name', 'last_name', 'phone', 'utm_campaign', 'utm_url', 'attribution_id'] - list_display = ('id', '_attribution_id', '_storage_status', 'created_at', 'first_name', 'last_name', 'email', - 'location', 'course', 'academy', 'country', 'city', 'utm_medium', 'utm_url', 'gclid', 'tags') + search_fields = ["email", "first_name", "last_name", "phone", "utm_campaign", "utm_url", "attribution_id"] + list_display = ( + "id", + "_attribution_id", + "_storage_status", + "created_at", + "first_name", + "last_name", + "email", + "location", + "course", + "academy", + "country", + "city", + "utm_medium", + "utm_url", + "gclid", + "tags", + ) list_filter = [ - 'storage_status', 'location', 'course', 'deal_status', PPCFilter, 'lead_generation_app', 'utm_medium', - 'utm_campaign', 'utm_source' + "storage_status", + "location", + "course", + "deal_status", + PPCFilter, + "lead_generation_app", + "utm_medium", + "utm_campaign", + "utm_source", ] - actions = [ - send_to_active_campaign, get_geoinfo, fetch_more_facebook_info, sync_contact_custom_fields_with_deal, - 'async_export_as_csv' - ] + change_field([ - 'bogota-colombia', 'mexicocity-mexico', 'quito-ecuador', 'buenosaires-argentina', 'caracas-venezuela', 'online' - ], - name='location') + change_field(['full-stack', 'datascience-ml', 'cybersecurity'], - name='course') + change_field(['REJECTED', 'DUPLICATED', 'ERROR'], - name='storage_status') + actions = ( + [ + send_to_active_campaign, + get_geoinfo, + fetch_more_facebook_info, + sync_contact_custom_fields_with_deal, + "async_export_as_csv", + ] + + change_field( + [ + "bogota-colombia", + "mexicocity-mexico", + "quito-ecuador", + "buenosaires-argentina", + "caracas-venezuela", + "online", + ], + name="location", + ) + + change_field(["full-stack", "datascience-ml", "cybersecurity"], name="course") + + change_field(["REJECTED", "DUPLICATED", "ERROR"], name="storage_status") + ) def _attribution_id(self, obj): - _html = f'<small>{obj.attribution_id}</small>' + _html = f"<small>{obj.attribution_id}</small>" if obj.won_at is not None: colors = { - 'WON': 'bg-success', - 'LOST': 'bg-error', - None: '', + "WON": "bg-success", + "LOST": "bg-error", + None: "", } _html += f"<p class='{colors[obj.deal_status]}'>WON</p>" @@ -237,24 +277,24 @@ def _attribution_id(self, obj): def _storage_status(self, obj): colors = { - 'PUBLISHED': 'bg-success', - 'OK': 'bg-success', - 'ERROR': 'bg-error', - 'WARNING': 'bg-warning', - 'DUPLICATED': '', - None: 'bg-warning', - 'DRAFT': 'bg-error', - 'PENDING_TRANSLATION': 'bg-error', - 'PENDING': 'bg-warning', - 'WARNING': 'bg-warning', - 'NOT_STARTED': 'bg-error', - 'UNLISTED': 'bg-warning', + "PUBLISHED": "bg-success", + "OK": "bg-success", + "ERROR": "bg-error", + "WARNING": "bg-warning", + "DUPLICATED": "", + None: "bg-warning", + "DRAFT": "bg-error", + "PENDING_TRANSLATION": "bg-error", + "PENDING": "bg-warning", + "WARNING": "bg-warning", + "NOT_STARTED": "bg-error", + "UNLISTED": "bg-warning", } def from_status(s): if s in colors: return colors[s] - return '' + return "" return format_html( f"<p class='{from_status(obj.storage_status)}'>{obj.storage_status}</p><small>{obj.storage_status_text}</small>" @@ -275,11 +315,11 @@ def delete_from_everywhere(modeladmin, request, queryset): slug = t.slug try: if delete_tag(t) == True: - messages.add_message(request, messages.INFO, f'Tag {slug} successully deleted') + messages.add_message(request, messages.INFO, f"Tag {slug} successully deleted") else: - messages.add_message(request, messages.ERROR, f'Error deleding tag {slug}') + messages.add_message(request, messages.ERROR, f"Error deleding tag {slug}") except Exception as e: - messages.add_message(request, messages.ERROR, f'Error deleding tag {slug}: {str(e)}') + messages.add_message(request, messages.ERROR, f"Error deleding tag {slug}: {str(e)}") def upload_to_active_campaign(modeladmin, request, queryset): @@ -289,27 +329,27 @@ def upload_to_active_campaign(modeladmin, request, queryset): try: ac_academy = t.ac_academy if ac_academy is None: - raise ValidationException(f'Invalid ac_academy for this tag {t.slug}', - code=400, - slug='invalid-ac_academy') + raise ValidationException( + f"Invalid ac_academy for this tag {t.slug}", code=400, slug="invalid-ac_academy" + ) client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) data = client.create_tag(t.slug, description=t.description) - t.acp_id = data['id'] + t.acp_id = data["id"] t.subscribers = 0 t.save() - messages.add_message(request, messages.INFO, f'Tag {t.slug} successully uploaded') + messages.add_message(request, messages.INFO, f"Tag {t.slug} successully uploaded") except Exception as e: - messages.add_message(request, messages.ERROR, f'Error uploading tag {slug}: {str(e)}') + messages.add_message(request, messages.ERROR, f"Error uploading tag {slug}: {str(e)}") @admin.display(description='Prepend "tech-" on slug') def prepend_tech_on_name(modeladmin, request, queryset): for t in queryset: - if t.slug[:5] == 'tech-': + if t.slug[:5] == "tech-": continue - t.slug = 'tech-' + t.slug + t.slug = "tech-" + t.slug t.save() @@ -317,25 +357,26 @@ class CustomTagModelForm(forms.ModelForm): class Meta: model = Tag - fields = '__all__' + fields = "__all__" def __init__(self, *args, **kwargs): super(CustomTagModelForm, self).__init__(*args, **kwargs) if self.instance.ac_academy is not None: - self.fields['automation'].queryset = Automation.objects.filter( - ac_academy=self.instance.ac_academy.id) # or something else + self.fields["automation"].queryset = Automation.objects.filter( + ac_academy=self.instance.ac_academy.id + ) # or something else class TagTypeFilter(SimpleListFilter): - title = 'tag_type' - parameter_name = 'tag_type' + title = "tag_type" + parameter_name = "tag_type" def lookups(self, request, model_admin): tags = set([c.tag_type for c in Tag.objects.filter(tag_type__isnull=False)]) - return [(c, c) for c in tags] + [('NONE', 'No type')] + return [(c, c) for c in tags] + [("NONE", "No type")] def queryset(self, request, queryset): - if self.value() == 'NONE': + if self.value() == "NONE": return queryset.filter(tag_type__isnull=True) if self.value(): return queryset.filter(tag_type__exact=self.value()) @@ -343,36 +384,40 @@ def queryset(self, request, queryset): class DisputedFilter(admin.SimpleListFilter): - title = 'Disputed tag' + title = "Disputed tag" - parameter_name = 'is_disputed' + parameter_name = "is_disputed" def lookups(self, request, model_admin): return ( - ('yes', 'Yes'), - ('no', 'No'), + ("yes", "Yes"), + ("no", "No"), ) def queryset(self, request, queryset): - if self.value() == 'yes': + if self.value() == "yes": return queryset.filter(disputed_at__isnull=False) - if self.value() == 'no': + if self.value() == "no": return queryset.filter(disputed_at__isnull=True) @admin.register(Tag) class TagAdmin(admin.ModelAdmin, AdminExportCsvMixin): form = CustomTagModelForm - search_fields = ['slug'] - list_display = ('id', 'slug', 'tag_type', 'disputed', 'ac_academy', 'acp_id', 'subscribers') - list_filter = [DisputedFilter, TagTypeFilter, 'ac_academy__academy__slug'] + search_fields = ["slug"] + list_display = ("id", "slug", "tag_type", "disputed", "ac_academy", "acp_id", "subscribers") + list_filter = [DisputedFilter, TagTypeFilter, "ac_academy__academy__slug"] actions = [ - delete_from_everywhere, 'export_as_csv', upload_to_active_campaign, add_dispute, remove_dispute, - prepend_tech_on_name - ] + change_field(['STRONG', 'SOFT', 'DISCOVERY', 'COHORT', 'DOWNLOADABLE', 'EVENT', 'OTHER'], name='tag_type') + delete_from_everywhere, + "export_as_csv", + upload_to_active_campaign, + add_dispute, + remove_dispute, + prepend_tech_on_name, + ] + change_field(["STRONG", "SOFT", "DISCOVERY", "COHORT", "DOWNLOADABLE", "EVENT", "OTHER"], name="tag_type") def disputed(self, obj): if obj.disputed_at is not None: @@ -385,32 +430,34 @@ def disputed(self, obj): @admin.register(Automation) class AutomationAdmin(admin.ModelAdmin, AdminExportCsvMixin): - search_fields = ['slug', 'name'] - list_display = ('id', 'acp_id', 'slug', 'name', 'status', 'entered', 'exited') - list_filter = ['status', 'ac_academy__academy__slug'] - actions = ['export_as_csv'] + search_fields = ["slug", "name"] + list_display = ("id", "acp_id", "slug", "name", "status", "entered", "exited") + list_filter = ["status", "ac_academy__academy__slug"] + actions = ["export_as_csv"] @admin.register(ShortLink) class ShortLinkAdmin(admin.ModelAdmin, AdminExportCsvMixin): - search_fields = ['slug', 'destination'] - list_display = ('id', 'slug', 'hits', 'current_status', 'active', 'lastclick_at', 'link') - list_filter = ['destination_status', 'active'] - actions = ['export_as_csv'] + search_fields = ["slug", "destination"] + list_display = ("id", "slug", "hits", "current_status", "active", "lastclick_at", "link") + list_filter = ["destination_status", "active"] + actions = ["export_as_csv"] def current_status(self, obj): colors = { - 'ACTIVE': 'bg-success', - 'ERROR': 'bg-error', - 'NOT_FOUND': 'bg-warning', + "ACTIVE": "bg-success", + "ERROR": "bg-error", + "NOT_FOUND": "bg-warning", } return format_html(f"<span class='badge {colors[obj.destination_status]}'>{obj.destination_status}</span>") def link(self, obj): - return format_html("<a rel='noopener noreferrer' target='_blank' href='{url}'>{short_link}</a>", - url=f'https://s.4geeks.co/s/{obj.slug}', - short_link=f'https://s.4geeks.co/s/{obj.slug}') + return format_html( + "<a rel='noopener noreferrer' target='_blank' href='{url}'>{short_link}</a>", + url=f"https://s.4geeks.co/s/{obj.slug}", + short_link=f"https://s.4geeks.co/s/{obj.slug}", + ) def bind_with_formentry(modeladmin, request, queryset): @@ -421,14 +468,14 @@ def bind_with_formentry(modeladmin, request, queryset): def async_process_hook(modeladmin, request, queryset): # stay this here for use the poor mocking system - for hook in queryset.all().order_by('created_at'): + for hook in queryset.all().order_by("created_at"): async_activecampaign_webhook.delay(hook.id) def process_hook(modeladmin, request, queryset): # stay this here for use the poor mocking system - for hook in queryset.all().order_by('created_at'): - print(f'Procesing hook: {hook.id}') + for hook in queryset.all().order_by("created_at"): + print(f"Procesing hook: {hook.id}") ac_academy = hook.ac_academy client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) client.execute_action(hook.id) @@ -436,47 +483,49 @@ def process_hook(modeladmin, request, queryset): @admin.register(ActiveCampaignWebhook) class ActiveCampaignWebhookAdmin(admin.ModelAdmin): - list_display = ('id', 'webhook_type', 'current_status', 'run_at', 'created_at', 'formentry') - search_fields = ['form_entry__email', 'form_entry__ac_deal_id'] - list_filter = ['status', 'webhook_type', 'form_entry__location'] - raw_id_fields = ['form_entry'] + list_display = ("id", "webhook_type", "current_status", "run_at", "created_at", "formentry") + search_fields = ["form_entry__email", "form_entry__ac_deal_id"] + list_filter = ["status", "webhook_type", "form_entry__location"] + raw_id_fields = ["form_entry"] actions = [process_hook, async_process_hook, bind_with_formentry] def current_status(self, obj): colors = { - 'DONE': 'bg-success', - 'ERROR': 'bg-error', - 'PENDING': 'bg-warning', + "DONE": "bg-success", + "ERROR": "bg-error", + "PENDING": "bg-warning", } - if obj.status == 'DONE': + if obj.status == "DONE": return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") return format_html( - f"<div><span class='badge {colors[obj.status]}'>{obj.status}</span></div><small>{obj.status_text}</small>") + f"<div><span class='badge {colors[obj.status]}'>{obj.status}</span></div><small>{obj.status_text}</small>" + ) def formentry(self, obj): if obj.form_entry is None: - return '-' + return "-" return format_html( - f"<a href='/admin/marketing/formentry/{obj.form_entry.id}/change/'>{str(obj.form_entry)}</a>") + f"<a href='/admin/marketing/formentry/{obj.form_entry.id}/change/'>{str(obj.form_entry)}</a>" + ) @admin.register(Downloadable) class DownloadableAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'academy', 'status', 'open_link') - raw_id_fields = ['author'] + list_display = ("slug", "name", "academy", "status", "open_link") + raw_id_fields = ["author"] def open_link(self, obj): return format_html(f"<a href='{obj.destination_url}' target='parent'>open link</a>") def status(self, obj): colors = { - 'ACTIVE': 'bg-success', - 'NOT_FOUND': 'bg-error', + "ACTIVE": "bg-success", + "NOT_FOUND": "bg-error", } return format_html(f"<span class='badge {colors[obj.destination_status]}'>{obj.destination_status}</span>") -@admin.display(description='Reset app id') +@admin.display(description="Reset app id") def reset_app_id(modeladmin, request, queryset): for app in queryset.all(): app.app_id = secrets.token_urlsafe(16) @@ -487,33 +536,37 @@ class LeadAppCustomForm(forms.ModelForm): class Meta: model = LeadGenerationApp - fields = '__all__' + fields = "__all__" def __init__(self, *args, **kwargs): super(LeadAppCustomForm, self).__init__(*args, **kwargs) try: - if 'default_automations' in self.fields: - self.fields['default_automations'].queryset = Automation.objects.filter( - ac_academy__academy__id=self.instance.academy.id).exclude(slug='') # or something else - self.fields['default_tags'].queryset = Tag.objects.filter( - ac_academy__academy__id=self.instance.academy.id) # or something else + if "default_automations" in self.fields: + self.fields["default_automations"].queryset = Automation.objects.filter( + ac_academy__academy__id=self.instance.academy.id + ).exclude( + slug="" + ) # or something else + self.fields["default_tags"].queryset = Tag.objects.filter( + ac_academy__academy__id=self.instance.academy.id + ) # or something else except Exception: - self.fields['default_automations'].queryset = Automation.objects.none() - self.fields['default_tags'].queryset = Tag.objects.none() + self.fields["default_automations"].queryset = Automation.objects.none() + self.fields["default_tags"].queryset = Tag.objects.none() @admin.register(LeadGenerationApp) class LeadGenerationAppAdmin(admin.ModelAdmin): form = LeadAppCustomForm - list_display = ('slug', 'name', 'academy', 'status', 'last_call_at', 'app_id') - readonly_fields = ('app_id', ) - actions = (reset_app_id, ) + list_display = ("slug", "name", "academy", "status", "last_call_at", "app_id") + readonly_fields = ("app_id",) + actions = (reset_app_id,) def status(self, obj): colors = { - 'OK': 'bg-success', - 'ERROR': 'bg-error', + "OK": "bg-success", + "ERROR": "bg-error", } if obj.last_call_status is None: return format_html("<span class='badge'>Not yet called</span>") @@ -521,13 +574,13 @@ def status(self, obj): def course_module_keys_validation(course_module): - if course_module['name'] is None or course_module['name'] == '': - return 'The module does not have a name.' - if course_module['slug'] is None or course_module['slug'] == '': + if course_module["name"] is None or course_module["name"] == "": + return "The module does not have a name." + if course_module["slug"] is None or course_module["slug"] == "": return f'The module {course_module["name"]} does not have a slug.' - if course_module['icon_url'] is None or course_module['icon_url'] == '': + if course_module["icon_url"] is None or course_module["icon_url"] == "": return f'The module {course_module["name"]} does not have an icon_url.' - if course_module['description'] is None or course_module['description'] == '': + if course_module["description"] is None or course_module["description"] == "": return f'The module {course_module["name"]} does not have a description.' @@ -542,19 +595,19 @@ def validate_course_modules(modeladmin, request, queryset): course_modules_list = [] for course_module in course_modules: keys_validation_error = course_module_keys_validation(course_module) - if keys_validation_error is not None and keys_validation_error != '': + if keys_validation_error is not None and keys_validation_error != "": course.status_message = keys_validation_error course.save() return - course_modules_list.append(course_module['slug']) + course_modules_list.append(course_module["slug"]) modules.append(course_modules_list) for module in modules: if modules[0] != module: - course.status_message = 'The course translations have different modules.' + course.status_message = "The course translations have different modules." course.save() return - course.status_message = 'All course translation have the same modules' + course.status_message = "All course translation have the same modules" course.save() except Exception as e: logger.fatal(str(e)) @@ -564,21 +617,21 @@ def validate_course_modules(modeladmin, request, queryset): @admin.register(UTMField) class UTMFieldAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'utm_type') - list_filter = ['utm_type', 'academy__slug'] - actions = change_field(['SOURCE', 'MEDIUM', 'CAMPAIGN', 'CONTENT'], name='utm_type') + list_display = ("slug", "name", "utm_type") + list_filter = ["utm_type", "academy__slug"] + actions = change_field(["SOURCE", "MEDIUM", "CAMPAIGN", "CONTENT"], name="utm_type") @admin.register(Course) class CourseAdmin(admin.ModelAdmin): - list_display = ('slug', 'academy', 'status', 'status_message', 'visibility') - list_filter = ['academy__slug', 'status', 'visibility'] - filter_horizontal = ('syllabus', ) - raw_id_fields = ['cohort'] + list_display = ("slug", "academy", "status", "status_message", "visibility") + list_filter = ["academy__slug", "status", "visibility"] + filter_horizontal = ("syllabus",) + raw_id_fields = ["cohort"] actions = [validate_course_modules] @admin.register(CourseTranslation) class CourseTranslationAdmin(admin.ModelAdmin): - list_display = ('course', 'lang', 'title', 'description') - list_filter = ['course__academy__slug', 'course__status', 'course__visibility'] + list_display = ("course", "lang", "title", "description") + list_filter = ["course__academy__slug", "course__status", "course__visibility"] diff --git a/breathecode/marketing/apps.py b/breathecode/marketing/apps.py index 0493b6e7c..940298ddd 100644 --- a/breathecode/marketing/apps.py +++ b/breathecode/marketing/apps.py @@ -2,7 +2,7 @@ class MarketingConfig(AppConfig): - name = 'breathecode.marketing' + name = "breathecode.marketing" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/marketing/management/commands/bind_hooks_with_formentries.py b/breathecode/marketing/management/commands/bind_hooks_with_formentries.py index d5cf0e10d..de4ef0c5f 100644 --- a/breathecode/marketing/management/commands/bind_hooks_with_formentries.py +++ b/breathecode/marketing/management/commands/bind_hooks_with_formentries.py @@ -4,10 +4,10 @@ class Command(BaseCommand): - help = 'Clean data from marketing module' + help = "Clean data from marketing module" def handle(self, *args, **options): - hooks = ActiveCampaignWebhook.objects.filter(status='ERROR') + hooks = ActiveCampaignWebhook.objects.filter(status="ERROR") for h in hooks: bind_formentry_with_webhook(h) diff --git a/breathecode/marketing/management/commands/marketing_garbage_collect.py b/breathecode/marketing/management/commands/marketing_garbage_collect.py index ec65ddf61..cc56cc295 100644 --- a/breathecode/marketing/management/commands/marketing_garbage_collect.py +++ b/breathecode/marketing/management/commands/marketing_garbage_collect.py @@ -3,7 +3,7 @@ class Command(BaseCommand): - help = 'Clean data from marketing module' + help = "Clean data from marketing module" def handle(self, *args, **options): @@ -11,7 +11,7 @@ def handle(self, *args, **options): def delete_old_webhooks(self): cursor = connection.cursor() - #status = 'ERROR' or status = 'PENDING' AND + # status = 'ERROR' or status = 'PENDING' AND cursor.execute("DELETE FROM marketing_activecampaignwebhook WHERE created_at < NOW() - INTERVAL '30 days'") cursor.execute("DELETE FROM marketing_activecampaignwebhook WHERE status <> 'ERROR' AND status <> 'PENDING'") diff --git a/breathecode/marketing/management/commands/relate_leads_with_academies.py b/breathecode/marketing/management/commands/relate_leads_with_academies.py index e430a7ecd..140b38baa 100644 --- a/breathecode/marketing/management/commands/relate_leads_with_academies.py +++ b/breathecode/marketing/management/commands/relate_leads_with_academies.py @@ -4,23 +4,25 @@ class Command(BaseCommand): - help = 'Sync breathecode with active campaign' + help = "Sync breathecode with active campaign" def handle(self, *args, **options): leads_without_academy = FormEntry.objects.filter(academy__isnull=True, location__isnull=False) - counts = {'attempts': 0, 'assigned': 0} + counts = {"attempts": 0, "assigned": 0} for l in leads_without_academy: - counts['attempts'] += 1 - print('Location: ', l.location) - if l.location != '': + counts["attempts"] += 1 + print("Location: ", l.location) + if l.location != "": academy = Academy.objects.filter(slug=l.location.strip()).first() if academy is not None: l.academy = academy l.location = l.location.strip() l.save() - counts['assigned'] += 1 + counts["assigned"] += 1 self.stdout.write( self.style.SUCCESS( - f'{counts["attempts"]} leads were found without academy and {counts["assigned"]} were fixed')) + f'{counts["attempts"]} leads were found without academy and {counts["assigned"]} were fixed' + ) + ) diff --git a/breathecode/marketing/management/commands/rerun_pending_ac_webhooks.py b/breathecode/marketing/management/commands/rerun_pending_ac_webhooks.py index 17aea8d4b..8d5408d04 100644 --- a/breathecode/marketing/management/commands/rerun_pending_ac_webhooks.py +++ b/breathecode/marketing/management/commands/rerun_pending_ac_webhooks.py @@ -10,13 +10,13 @@ class Command(BaseCommand): - help = 'Clean data from marketing module' + help = "Clean data from marketing module" def handle(self, *args, **options): - hooks = ActiveCampaignWebhook.objects.filter(Q(run_at=None) - | Q(run_at__lte=timezone.now() - timedelta(days=3)), - status='PENDING').only('id') + hooks = ActiveCampaignWebhook.objects.filter( + Q(run_at=None) | Q(run_at__lte=timezone.now() - timedelta(days=3)), status="PENDING" + ).only("id") for h in hooks: async_activecampaign_webhook.delay(h.id) diff --git a/breathecode/marketing/management/commands/retry_pending_leads.py b/breathecode/marketing/management/commands/retry_pending_leads.py index 9ac385c41..cea3a2301 100644 --- a/breathecode/marketing/management/commands/retry_pending_leads.py +++ b/breathecode/marketing/management/commands/retry_pending_leads.py @@ -4,10 +4,10 @@ class Command(BaseCommand): - help = 'Retry sending pending leads to active campaign' + help = "Retry sending pending leads to active campaign" def handle(self, *args, **options): - entries = FormEntry.objects.filter(storage_status='PENDING') + entries = FormEntry.objects.filter(storage_status="PENDING") for entry in entries: persist_single_lead.delay(entry.to_form_data()) diff --git a/breathecode/marketing/management/commands/set_ac_academy.py b/breathecode/marketing/management/commands/set_ac_academy.py index b85494933..150d2b069 100644 --- a/breathecode/marketing/management/commands/set_ac_academy.py +++ b/breathecode/marketing/management/commands/set_ac_academy.py @@ -3,14 +3,14 @@ class Command(BaseCommand): - help = 'Sync breathecode with active campaign' + help = "Sync breathecode with active campaign" def handle(self, *args, **options): - academy = ActiveCampaignAcademy.objects.filter(academy__slug='downtown-miami').first() + academy = ActiveCampaignAcademy.objects.filter(academy__slug="downtown-miami").first() if academy is not None: Tag.objects.update(ac_academy=academy) Automation.objects.update(ac_academy=academy) - self.stdout.write(self.style.SUCCESS('Successfully sync tags and academies')) + self.stdout.write(self.style.SUCCESS("Successfully sync tags and academies")) else: - self.stdout.write(self.style.ERROR('AC Academy not found')) + self.stdout.write(self.style.ERROR("AC Academy not found")) diff --git a/breathecode/marketing/management/commands/sync_with_active_campaign.py b/breathecode/marketing/management/commands/sync_with_active_campaign.py index 25fdd640a..1e835c213 100644 --- a/breathecode/marketing/management/commands/sync_with_active_campaign.py +++ b/breathecode/marketing/management/commands/sync_with_active_campaign.py @@ -5,19 +5,19 @@ class Command(BaseCommand): - help = 'Sync breathecode with active campaign' + help = "Sync breathecode with active campaign" def handle(self, *args, **options): academies = ActiveCampaignAcademy.objects.all() for a in academies: - self.stdout.write(self.style.SUCCESS(f'Synching sync tags for {a.academy.name}')) + self.stdout.write(self.style.SUCCESS(f"Synching sync tags for {a.academy.name}")) sync_tags(a) sync_automations(a) - self.stdout.write(self.style.SUCCESS('Synching automations')) + self.stdout.write(self.style.SUCCESS("Synching automations")) # delete webhook history from 30 days ago thirty_days_ago = timezone.now() - timezone.timedelta(days=30) ActiveCampaignWebhook.objects.filter(ac_academy__id=a.id, run_at__lt=thirty_days_ago).delete() - self.stdout.write(self.style.SUCCESS('Cleaning webhook log from more than 30 days ago')) + self.stdout.write(self.style.SUCCESS("Cleaning webhook log from more than 30 days ago")) diff --git a/breathecode/marketing/management/commands/sync_won_leads.py b/breathecode/marketing/management/commands/sync_won_leads.py index c829ed941..03f66eac8 100644 --- a/breathecode/marketing/management/commands/sync_won_leads.py +++ b/breathecode/marketing/management/commands/sync_won_leads.py @@ -4,17 +4,17 @@ class Command(BaseCommand): - help = 'Sync breathecode with active campaign' + help = "Sync breathecode with active campaign" def handle(self, *args, **options): - entries = FormEntry.objects.filter(deal_status='WON', user__isnull=True) + entries = FormEntry.objects.filter(deal_status="WON", user__isnull=True) for entry in entries: user = User.objects.filter(email=entry.email).first() if user is not None: entry.user = user entry.save() - self.stdout.write(self.style.SUCCESS(f'Found user for formentry {entry.email}, updating...')) + self.stdout.write(self.style.SUCCESS(f"Found user for formentry {entry.email}, updating...")) - self.stdout.write(self.style.SUCCESS('Finished.')) + self.stdout.write(self.style.SUCCESS("Finished.")) diff --git a/breathecode/marketing/migrations/0001_initial.py b/breathecode/marketing/migrations/0001_initial.py index 67c93e357..85963c899 100644 --- a/breathecode/marketing/migrations/0001_initial.py +++ b/breathecode/marketing/migrations/0001_initial.py @@ -12,28 +12,28 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Contact', + name="Contact", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('first_name', models.CharField(max_length=150)), - ('last_name', models.CharField(max_length=150)), - ('email', models.CharField(max_length=150)), - ('phone', phonenumber_field.modelfields.PhoneNumberField(blank=True, max_length=128, region=None)), - ('course', models.CharField(max_length=2)), - ('client_comments', models.CharField(max_length=2)), - ('language', models.CharField(max_length=2)), - ('utm_url', models.CharField(max_length=2)), - ('utm_medium', models.CharField(max_length=2)), - ('utm_campaign', models.CharField(max_length=2)), - ('street_address', models.CharField(max_length=250)), - ('country', models.CharField(max_length=30)), - ('city', models.CharField(max_length=30)), - ('latitude', models.DecimalField(decimal_places=6, max_digits=9)), - ('longitude', models.DecimalField(decimal_places=6, max_digits=9)), - ('state', models.CharField(max_length=30)), - ('zip_code', models.IntegerField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("first_name", models.CharField(max_length=150)), + ("last_name", models.CharField(max_length=150)), + ("email", models.CharField(max_length=150)), + ("phone", phonenumber_field.modelfields.PhoneNumberField(blank=True, max_length=128, region=None)), + ("course", models.CharField(max_length=2)), + ("client_comments", models.CharField(max_length=2)), + ("language", models.CharField(max_length=2)), + ("utm_url", models.CharField(max_length=2)), + ("utm_medium", models.CharField(max_length=2)), + ("utm_campaign", models.CharField(max_length=2)), + ("street_address", models.CharField(max_length=250)), + ("country", models.CharField(max_length=30)), + ("city", models.CharField(max_length=30)), + ("latitude", models.DecimalField(decimal_places=6, max_digits=9)), + ("longitude", models.DecimalField(decimal_places=6, max_digits=9)), + ("state", models.CharField(max_length=30)), + ("zip_code", models.IntegerField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/breathecode/marketing/migrations/0002_auto_20200618_2235.py b/breathecode/marketing/migrations/0002_auto_20200618_2235.py index 4f143f4c8..a8bece506 100644 --- a/breathecode/marketing/migrations/0002_auto_20200618_2235.py +++ b/breathecode/marketing/migrations/0002_auto_20200618_2235.py @@ -7,72 +7,70 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0001_initial'), + ("marketing", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='contact', - name='client_comments', + model_name="contact", + name="client_comments", field=models.CharField(default=None, max_length=250, null=True), ), migrations.AlterField( - model_name='contact', - name='course', + model_name="contact", + name="course", field=models.CharField(default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='contact', - name='last_name', + model_name="contact", + name="last_name", field=models.CharField(default=None, max_length=150, null=True), ), migrations.AlterField( - model_name='contact', - name='latitude', + model_name="contact", + name="latitude", field=models.DecimalField(decimal_places=6, default=None, max_digits=9, null=True), ), migrations.AlterField( - model_name='contact', - name='longitude', + model_name="contact", + name="longitude", field=models.DecimalField(decimal_places=6, default=None, max_digits=9, null=True), ), migrations.AlterField( - model_name='contact', - name='phone', - field=phonenumber_field.modelfields.PhoneNumberField(blank=True, - default=None, - max_length=128, - null=True, - region=None), + model_name="contact", + name="phone", + field=phonenumber_field.modelfields.PhoneNumberField( + blank=True, default=None, max_length=128, null=True, region=None + ), ), migrations.AlterField( - model_name='contact', - name='state', + model_name="contact", + name="state", field=models.CharField(default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='contact', - name='street_address', + model_name="contact", + name="street_address", field=models.CharField(default=None, max_length=250, null=True), ), migrations.AlterField( - model_name='contact', - name='utm_campaign', + model_name="contact", + name="utm_campaign", field=models.CharField(default=None, max_length=2, null=True), ), migrations.AlterField( - model_name='contact', - name='utm_medium', + model_name="contact", + name="utm_medium", field=models.CharField(default=None, max_length=2, null=True), ), migrations.AlterField( - model_name='contact', - name='utm_url', + model_name="contact", + name="utm_url", field=models.CharField(max_length=250), ), migrations.AlterField( - model_name='contact', - name='zip_code', + model_name="contact", + name="zip_code", field=models.IntegerField(default=None, null=True), ), ] diff --git a/breathecode/marketing/migrations/0003_auto_20200618_2253.py b/breathecode/marketing/migrations/0003_auto_20200618_2253.py index 918e88c2d..5c85f3a92 100644 --- a/breathecode/marketing/migrations/0003_auto_20200618_2253.py +++ b/breathecode/marketing/migrations/0003_auto_20200618_2253.py @@ -7,43 +7,47 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0002_auto_20200618_2235'), + ("marketing", "0002_auto_20200618_2235"), ] operations = [ migrations.CreateModel( - name='FormEntry', + name="FormEntry", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('first_name', models.CharField(max_length=150)), - ('last_name', models.CharField(default=None, max_length=150, null=True)), - ('email', models.CharField(max_length=150)), - ('phone', - phonenumber_field.modelfields.PhoneNumberField(blank=True, - default=None, - max_length=128, - null=True, - region=None)), - ('course', models.CharField(default=None, max_length=30, null=True)), - ('client_comments', models.CharField(default=None, max_length=250, null=True)), - ('language', models.CharField(max_length=2)), - ('utm_url', models.CharField(max_length=250)), - ('utm_medium', models.CharField(default=None, max_length=2, null=True)), - ('utm_campaign', models.CharField(default=None, max_length=2, null=True)), - ('street_address', models.CharField(default=None, max_length=250, null=True)), - ('country', models.CharField(max_length=30)), - ('city', models.CharField(max_length=30)), - ('latitude', models.DecimalField(decimal_places=6, default=None, max_digits=9, null=True)), - ('longitude', models.DecimalField(decimal_places=6, default=None, max_digits=9, null=True)), - ('state', models.CharField(default=None, max_length=30, null=True)), - ('zip_code', models.IntegerField(default=None, null=True)), - ('storage_status', - models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted')], - default='PENDING', - max_length=15)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("first_name", models.CharField(max_length=150)), + ("last_name", models.CharField(default=None, max_length=150, null=True)), + ("email", models.CharField(max_length=150)), + ( + "phone", + phonenumber_field.modelfields.PhoneNumberField( + blank=True, default=None, max_length=128, null=True, region=None + ), + ), + ("course", models.CharField(default=None, max_length=30, null=True)), + ("client_comments", models.CharField(default=None, max_length=250, null=True)), + ("language", models.CharField(max_length=2)), + ("utm_url", models.CharField(max_length=250)), + ("utm_medium", models.CharField(default=None, max_length=2, null=True)), + ("utm_campaign", models.CharField(default=None, max_length=2, null=True)), + ("street_address", models.CharField(default=None, max_length=250, null=True)), + ("country", models.CharField(max_length=30)), + ("city", models.CharField(max_length=30)), + ("latitude", models.DecimalField(decimal_places=6, default=None, max_digits=9, null=True)), + ("longitude", models.DecimalField(decimal_places=6, default=None, max_digits=9, null=True)), + ("state", models.CharField(default=None, max_length=30, null=True)), + ("zip_code", models.IntegerField(default=None, null=True)), + ( + "storage_status", + models.CharField( + choices=[("PENDING", "Pending"), ("PERSISTED", "Persisted")], default="PENDING", max_length=15 + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), - migrations.DeleteModel(name='Contact', ), + migrations.DeleteModel( + name="Contact", + ), ] diff --git a/breathecode/marketing/migrations/0004_auto_20200618_2308.py b/breathecode/marketing/migrations/0004_auto_20200618_2308.py index 0eaf99bd0..94048db8b 100644 --- a/breathecode/marketing/migrations/0004_auto_20200618_2308.py +++ b/breathecode/marketing/migrations/0004_auto_20200618_2308.py @@ -8,36 +8,35 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0003_auto_20200618_2253'), + ("marketing", "0003_auto_20200618_2253"), ] operations = [ migrations.CreateModel( - name='Contact', + name="Contact", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('first_name', models.CharField(max_length=150)), - ('last_name', models.CharField(default=None, max_length=150, null=True)), - ('email', models.CharField(max_length=150, unique=True)), - ('phone', - phonenumber_field.modelfields.PhoneNumberField(blank=True, - default=None, - max_length=128, - null=True, - region=None)), - ('language', models.CharField(max_length=2)), - ('country', models.CharField(max_length=30)), - ('city', models.CharField(max_length=30)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("first_name", models.CharField(max_length=150)), + ("last_name", models.CharField(default=None, max_length=150, null=True)), + ("email", models.CharField(max_length=150, unique=True)), + ( + "phone", + phonenumber_field.modelfields.PhoneNumberField( + blank=True, default=None, max_length=128, null=True, region=None + ), + ), + ("language", models.CharField(max_length=2)), + ("country", models.CharField(max_length=30)), + ("city", models.CharField(max_length=30)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.AddField( - model_name='formentry', - name='contact', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.Contact'), + model_name="formentry", + name="contact", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="marketing.Contact" + ), ), ] diff --git a/breathecode/marketing/migrations/0005_auto_20200619_0108.py b/breathecode/marketing/migrations/0005_auto_20200619_0108.py index 8416540ed..c6adbf87f 100644 --- a/breathecode/marketing/migrations/0005_auto_20200619_0108.py +++ b/breathecode/marketing/migrations/0005_auto_20200619_0108.py @@ -6,39 +6,57 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0004_auto_20200618_2308'), + ("marketing", "0004_auto_20200618_2308"), ] operations = [ migrations.CreateModel( - name='Tag', + name="Tag", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.CharField(max_length=150, unique=True)), - ('tag_type', - models.CharField(choices=[('STRONG', 'Strong'), ('SOFT', 'Soft'), ('DISCOVERY', 'Discovery'), - ('OTHER', 'Other')], - default=None, - max_length=15, - null=True)), - ('acp_id', models.IntegerField()), - ('subscribers', models.IntegerField()), - ('priority', - models.CharField(choices=[('STRONG', 'Strong'), ('SOFT', 'Soft'), ('DISCOVERY', 'Discovery'), - ('OTHER', 'Other')], - default=None, - max_length=15, - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.CharField(max_length=150, unique=True)), + ( + "tag_type", + models.CharField( + choices=[ + ("STRONG", "Strong"), + ("SOFT", "Soft"), + ("DISCOVERY", "Discovery"), + ("OTHER", "Other"), + ], + default=None, + max_length=15, + null=True, + ), + ), + ("acp_id", models.IntegerField()), + ("subscribers", models.IntegerField()), + ( + "priority", + models.CharField( + choices=[ + ("STRONG", "Strong"), + ("SOFT", "Soft"), + ("DISCOVERY", "Discovery"), + ("OTHER", "Other"), + ], + default=None, + max_length=15, + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.AddField( - model_name='formentry', - name='lead_type', - field=models.CharField(choices=[('STRONG', 'Strong'), ('SOFT', 'Soft'), ('DISCOVERY', 'Discovery')], - default=None, - max_length=15, - null=True), + model_name="formentry", + name="lead_type", + field=models.CharField( + choices=[("STRONG", "Strong"), ("SOFT", "Soft"), ("DISCOVERY", "Discovery")], + default=None, + max_length=15, + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0006_auto_20200619_0133.py b/breathecode/marketing/migrations/0006_auto_20200619_0133.py index 3594e6ee8..c7dc1eaa2 100644 --- a/breathecode/marketing/migrations/0006_auto_20200619_0133.py +++ b/breathecode/marketing/migrations/0006_auto_20200619_0133.py @@ -6,28 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0005_auto_20200619_0108'), + ("marketing", "0005_auto_20200619_0108"), ] operations = [ migrations.RemoveField( - model_name='tag', - name='priority', + model_name="tag", + name="priority", ), migrations.AlterField( - model_name='tag', - name='acp_id', - field=models.IntegerField(help_text='The id coming from active campaign'), + model_name="tag", + name="acp_id", + field=models.IntegerField(help_text="The id coming from active campaign"), ), migrations.AlterField( - model_name='tag', - name='tag_type', + model_name="tag", + name="tag_type", field=models.CharField( - choices=[('STRONG', 'Strong'), ('SOFT', 'Soft'), ('DISCOVERY', 'Discovery'), ('OTHER', 'Other')], + choices=[("STRONG", "Strong"), ("SOFT", "Soft"), ("DISCOVERY", "Discovery"), ("OTHER", "Other")], default=None, - help_text= - 'This will be use to determine the type of lead (strong, soft, etc.), if a lead has a tag with type=strong it will be added to the automation for strong leads', + help_text="This will be use to determine the type of lead (strong, soft, etc.), if a lead has a tag with type=strong it will be added to the automation for strong leads", max_length=15, - null=True), + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0007_auto_20200619_0151.py b/breathecode/marketing/migrations/0007_auto_20200619_0151.py index e78c858a6..78ceae6cc 100644 --- a/breathecode/marketing/migrations/0007_auto_20200619_0151.py +++ b/breathecode/marketing/migrations/0007_auto_20200619_0151.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0006_auto_20200619_0133'), + ("marketing", "0006_auto_20200619_0133"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='last_name', - field=models.CharField(default='', max_length=150), + model_name="formentry", + name="last_name", + field=models.CharField(default="", max_length=150), ), ] diff --git a/breathecode/marketing/migrations/0008_auto_20200619_0214.py b/breathecode/marketing/migrations/0008_auto_20200619_0214.py index 06bbc00d3..b52ce5595 100644 --- a/breathecode/marketing/migrations/0008_auto_20200619_0214.py +++ b/breathecode/marketing/migrations/0008_auto_20200619_0214.py @@ -6,33 +6,33 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0007_auto_20200619_0151'), + ("marketing", "0007_auto_20200619_0151"), ] operations = [ migrations.AddField( - model_name='formentry', - name='location', + model_name="formentry", + name="location", field=models.CharField(default=None, max_length=20, null=True), ), migrations.AddField( - model_name='formentry', - name='referral_key', + model_name="formentry", + name="referral_key", field=models.CharField(default=None, max_length=50, null=True), ), migrations.AlterField( - model_name='formentry', - name='first_name', - field=models.CharField(default='', max_length=150), + model_name="formentry", + name="first_name", + field=models.CharField(default="", max_length=150), ), migrations.AlterField( - model_name='formentry', - name='utm_campaign', + model_name="formentry", + name="utm_campaign", field=models.CharField(default=None, max_length=50, null=True), ), migrations.AlterField( - model_name='formentry', - name='utm_medium', + model_name="formentry", + name="utm_medium", field=models.CharField(default=None, max_length=50, null=True), ), ] diff --git a/breathecode/marketing/migrations/0009_auto_20200619_0234.py b/breathecode/marketing/migrations/0009_auto_20200619_0234.py index ca8862764..2d5b1f587 100644 --- a/breathecode/marketing/migrations/0009_auto_20200619_0234.py +++ b/breathecode/marketing/migrations/0009_auto_20200619_0234.py @@ -6,38 +6,38 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0008_auto_20200619_0214'), + ("marketing", "0008_auto_20200619_0214"), ] operations = [ migrations.AddField( - model_name='formentry', - name='tags', - field=models.CharField(blank=True, default='', max_length=100), + model_name="formentry", + name="tags", + field=models.CharField(blank=True, default="", max_length=100), ), migrations.AlterField( - model_name='formentry', - name='client_comments', + model_name="formentry", + name="client_comments", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), migrations.AlterField( - model_name='formentry', - name='location', + model_name="formentry", + name="location", field=models.CharField(blank=True, default=None, max_length=20, null=True), ), migrations.AlterField( - model_name='formentry', - name='referral_key', + model_name="formentry", + name="referral_key", field=models.CharField(blank=True, default=None, max_length=50, null=True), ), migrations.AlterField( - model_name='formentry', - name='utm_campaign', + model_name="formentry", + name="utm_campaign", field=models.CharField(blank=True, default=None, max_length=50, null=True), ), migrations.AlterField( - model_name='formentry', - name='utm_medium', + model_name="formentry", + name="utm_medium", field=models.CharField(blank=True, default=None, max_length=50, null=True), ), ] diff --git a/breathecode/marketing/migrations/0010_automation.py b/breathecode/marketing/migrations/0010_automation.py index ca021c43e..76b4e136c 100644 --- a/breathecode/marketing/migrations/0010_automation.py +++ b/breathecode/marketing/migrations/0010_automation.py @@ -6,26 +6,30 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0009_auto_20200619_0234'), + ("marketing", "0009_auto_20200619_0234"), ] operations = [ migrations.CreateModel( - name='Automation', + name="Automation", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', - models.CharField(blank=True, - default='', - help_text='unique string id that is used to connect incoming leads to automations', - max_length=150)), - ('name', models.CharField(max_length=100)), - ('acp_id', models.PositiveSmallIntegerField(help_text='ID asigned in active campaign')), - ('status', models.PositiveSmallIntegerField(help_text='2 = inactive, 1=active')), - ('entered', models.PositiveSmallIntegerField(help_text='How many contacts have entered')), - ('exited', models.PositiveSmallIntegerField(help_text='How many contacts have exited')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "slug", + models.CharField( + blank=True, + default="", + help_text="unique string id that is used to connect incoming leads to automations", + max_length=150, + ), + ), + ("name", models.CharField(max_length=100)), + ("acp_id", models.PositiveSmallIntegerField(help_text="ID asigned in active campaign")), + ("status", models.PositiveSmallIntegerField(help_text="2 = inactive, 1=active")), + ("entered", models.PositiveSmallIntegerField(help_text="How many contacts have entered")), + ("exited", models.PositiveSmallIntegerField(help_text="How many contacts have exited")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/breathecode/marketing/migrations/0011_auto_20200619_1848.py b/breathecode/marketing/migrations/0011_auto_20200619_1848.py index 2eba92f72..8c7ddf396 100644 --- a/breathecode/marketing/migrations/0011_auto_20200619_1848.py +++ b/breathecode/marketing/migrations/0011_auto_20200619_1848.py @@ -6,15 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0010_automation'), + ("marketing", "0010_automation"), ] operations = [ migrations.AlterField( - model_name='automation', - name='status', - field=models.PositiveSmallIntegerField(choices=[('2', 'Active'), ('1', 'Innactive'), ('0', 'Uknown')], - default='0', - help_text='2 = inactive, 1=active'), + model_name="automation", + name="status", + field=models.PositiveSmallIntegerField( + choices=[("2", "Active"), ("1", "Innactive"), ("0", "Uknown")], + default="0", + help_text="2 = inactive, 1=active", + ), ), ] diff --git a/breathecode/marketing/migrations/0012_auto_20200619_1851.py b/breathecode/marketing/migrations/0012_auto_20200619_1851.py index c3fe3f008..3b54870f9 100644 --- a/breathecode/marketing/migrations/0012_auto_20200619_1851.py +++ b/breathecode/marketing/migrations/0012_auto_20200619_1851.py @@ -6,16 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0011_auto_20200619_1848'), + ("marketing", "0011_auto_20200619_1848"), ] operations = [ migrations.AlterField( - model_name='automation', - name='status', - field=models.CharField(choices=[('2', 'Active'), ('1', 'Innactive'), ('0', 'Uknown')], - default='0', - help_text='2 = inactive, 1=active', - max_length=1), + model_name="automation", + name="status", + field=models.CharField( + choices=[("2", "Active"), ("1", "Innactive"), ("0", "Uknown")], + default="0", + help_text="2 = inactive, 1=active", + max_length=1, + ), ), ] diff --git a/breathecode/marketing/migrations/0013_auto_20200619_1903.py b/breathecode/marketing/migrations/0013_auto_20200619_1903.py index 619c5664e..4adccf40c 100644 --- a/breathecode/marketing/migrations/0013_auto_20200619_1903.py +++ b/breathecode/marketing/migrations/0013_auto_20200619_1903.py @@ -7,25 +7,29 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0012_auto_20200619_1851'), + ("marketing", "0012_auto_20200619_1851"), ] operations = [ migrations.AddField( - model_name='tag', - name='automation', - field=models.ForeignKey(default=None, - help_text='Leads that contain this tag will be asociated to this automation', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.Automation'), + model_name="tag", + name="automation", + field=models.ForeignKey( + default=None, + help_text="Leads that contain this tag will be asociated to this automation", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.Automation", + ), ), migrations.AlterField( - model_name='automation', - name='status', - field=models.CharField(choices=[('1', 'Active'), ('2', 'Innactive'), ('0', 'Uknown')], - default='0', - help_text='2 = inactive, 1=active', - max_length=1), + model_name="automation", + name="status", + field=models.CharField( + choices=[("1", "Active"), ("2", "Innactive"), ("0", "Uknown")], + default="0", + help_text="2 = inactive, 1=active", + max_length=1, + ), ), ] diff --git a/breathecode/marketing/migrations/0014_auto_20200619_1920.py b/breathecode/marketing/migrations/0014_auto_20200619_1920.py index 58823fa4a..2337c731c 100644 --- a/breathecode/marketing/migrations/0014_auto_20200619_1920.py +++ b/breathecode/marketing/migrations/0014_auto_20200619_1920.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0013_auto_20200619_1903'), + ("marketing", "0013_auto_20200619_1903"), ] operations = [ migrations.AlterField( - model_name='tag', - name='tag_type', + model_name="tag", + name="tag_type", field=models.CharField( - choices=[('STRONG', 'Strong'), ('SOFT', 'Soft'), ('DISCOVERY', 'Discovery'), ('OTHER', 'Other')], + choices=[("STRONG", "Strong"), ("SOFT", "Soft"), ("DISCOVERY", "Discovery"), ("OTHER", "Other")], default=None, - help_text= - "The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", + help_text="The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", max_length=15, - null=True), + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0015_auto_20200619_1930.py b/breathecode/marketing/migrations/0015_auto_20200619_1930.py index 5429bdbc1..ece87e641 100644 --- a/breathecode/marketing/migrations/0015_auto_20200619_1930.py +++ b/breathecode/marketing/migrations/0015_auto_20200619_1930.py @@ -6,27 +6,27 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0014_auto_20200619_1920'), + ("marketing", "0014_auto_20200619_1920"), ] operations = [ migrations.AddField( - model_name='formentry', - name='automations', - field=models.ManyToManyField(to='marketing.Automation'), + model_name="formentry", + name="automations", + field=models.ManyToManyField(to="marketing.Automation"), ), migrations.AddField( - model_name='formentry', - name='raw_tags', - field=models.CharField(blank=True, default='', max_length=100), + model_name="formentry", + name="raw_tags", + field=models.CharField(blank=True, default="", max_length=100), ), migrations.RemoveField( - model_name='formentry', - name='tags', + model_name="formentry", + name="tags", ), migrations.AddField( - model_name='formentry', - name='tags', - field=models.ManyToManyField(to='marketing.Tag'), + model_name="formentry", + name="tags", + field=models.ManyToManyField(to="marketing.Tag"), ), ] diff --git a/breathecode/marketing/migrations/0016_auto_20200619_1941.py b/breathecode/marketing/migrations/0016_auto_20200619_1941.py index 781172d29..c1e7e12ec 100644 --- a/breathecode/marketing/migrations/0016_auto_20200619_1941.py +++ b/breathecode/marketing/migrations/0016_auto_20200619_1941.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0015_auto_20200619_1930'), + ("marketing", "0015_auto_20200619_1930"), ] operations = [ migrations.AlterField( - model_name='automation', - name='acp_id', - field=models.PositiveSmallIntegerField(help_text='ID asigned in active campaign', unique=True), + model_name="automation", + name="acp_id", + field=models.PositiveSmallIntegerField(help_text="ID asigned in active campaign", unique=True), ), migrations.AlterField( - model_name='tag', - name='acp_id', - field=models.IntegerField(help_text='The id coming from active campaign', unique=True), + model_name="tag", + name="acp_id", + field=models.IntegerField(help_text="The id coming from active campaign", unique=True), ), ] diff --git a/breathecode/marketing/migrations/0017_auto_20200619_2020.py b/breathecode/marketing/migrations/0017_auto_20200619_2020.py index 4a179aea6..d6c776293 100644 --- a/breathecode/marketing/migrations/0017_auto_20200619_2020.py +++ b/breathecode/marketing/migrations/0017_auto_20200619_2020.py @@ -6,40 +6,40 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0016_auto_20200619_1941'), + ("marketing", "0016_auto_20200619_1941"), ] operations = [ migrations.RemoveField( - model_name='formentry', - name='raw_tags', + model_name="formentry", + name="raw_tags", ), migrations.AddField( - model_name='formentry', - name='automation_objects', - field=models.ManyToManyField(blank=True, to='marketing.Automation'), + model_name="formentry", + name="automation_objects", + field=models.ManyToManyField(blank=True, to="marketing.Automation"), ), migrations.AddField( - model_name='formentry', - name='tag_objects', - field=models.ManyToManyField(blank=True, to='marketing.Tag'), + model_name="formentry", + name="tag_objects", + field=models.ManyToManyField(blank=True, to="marketing.Tag"), ), migrations.RemoveField( - model_name='formentry', - name='automations', + model_name="formentry", + name="automations", ), migrations.AddField( - model_name='formentry', - name='automations', - field=models.CharField(blank=True, default='', max_length=100), + model_name="formentry", + name="automations", + field=models.CharField(blank=True, default="", max_length=100), ), migrations.RemoveField( - model_name='formentry', - name='tags', + model_name="formentry", + name="tags", ), migrations.AddField( - model_name='formentry', - name='tags', - field=models.CharField(blank=True, default='', max_length=100), + model_name="formentry", + name="tags", + field=models.CharField(blank=True, default="", max_length=100), ), ] diff --git a/breathecode/marketing/migrations/0018_auto_20200708_0049.py b/breathecode/marketing/migrations/0018_auto_20200708_0049.py index 6343c4bb8..e57f4bc11 100644 --- a/breathecode/marketing/migrations/0018_auto_20200708_0049.py +++ b/breathecode/marketing/migrations/0018_auto_20200708_0049.py @@ -6,21 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0017_auto_20200619_2020'), + ("marketing", "0017_auto_20200619_2020"), ] operations = [ migrations.AlterField( - model_name='automation', - name='slug', - field=models.SlugField(blank=True, - default='', - help_text='unique string id that is used to connect incoming leads to automations', - max_length=150), + model_name="automation", + name="slug", + field=models.SlugField( + blank=True, + default="", + help_text="unique string id that is used to connect incoming leads to automations", + max_length=150, + ), ), migrations.AlterField( - model_name='tag', - name='slug', + model_name="tag", + name="slug", field=models.SlugField(max_length=150, unique=True), ), ] diff --git a/breathecode/marketing/migrations/0019_auto_20200921_2008.py b/breathecode/marketing/migrations/0019_auto_20200921_2008.py index d74510374..758283352 100644 --- a/breathecode/marketing/migrations/0019_auto_20200921_2008.py +++ b/breathecode/marketing/migrations/0019_auto_20200921_2008.py @@ -7,13 +7,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0018_auto_20200708_0049'), + ("marketing", "0018_auto_20200708_0049"), ] operations = [ migrations.AlterField( - model_name='contact', - name='phone', + model_name="contact", + name="phone", field=models.CharField( blank=True, default=None, @@ -22,12 +22,14 @@ class Migration(migrations.Migration): validators=[ django.core.validators.RegexValidator( message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", - regex='^\\+?1?\\d{9,15}$') - ]), + regex="^\\+?1?\\d{9,15}$", + ) + ], + ), ), migrations.AlterField( - model_name='formentry', - name='phone', + model_name="formentry", + name="phone", field=models.CharField( blank=True, default=None, @@ -36,7 +38,9 @@ class Migration(migrations.Migration): validators=[ django.core.validators.RegexValidator( message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", - regex='^\\+?1?\\d{9,15}$') - ]), + regex="^\\+?1?\\d{9,15}$", + ) + ], + ), ), ] diff --git a/breathecode/marketing/migrations/0020_formentry_gclid.py b/breathecode/marketing/migrations/0020_formentry_gclid.py index ba979b1a3..72e14fdae 100644 --- a/breathecode/marketing/migrations/0020_formentry_gclid.py +++ b/breathecode/marketing/migrations/0020_formentry_gclid.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0019_auto_20200921_2008'), + ("marketing", "0019_auto_20200921_2008"), ] operations = [ migrations.AddField( - model_name='formentry', - name='gclid', + model_name="formentry", + name="gclid", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), ] diff --git a/breathecode/marketing/migrations/0021_auto_20201009_0224.py b/breathecode/marketing/migrations/0021_auto_20201009_0224.py index f8eafea5a..d76d3136d 100644 --- a/breathecode/marketing/migrations/0021_auto_20201009_0224.py +++ b/breathecode/marketing/migrations/0021_auto_20201009_0224.py @@ -7,57 +7,55 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0020_formentry_gclid'), + ("marketing", "0020_formentry_gclid"), ] operations = [ migrations.AddField( - model_name='formentry', - name='browser_lang', + model_name="formentry", + name="browser_lang", field=models.CharField(blank=True, default=None, max_length=5, null=True), ), migrations.AlterField( - model_name='formentry', - name='city', + model_name="formentry", + name="city", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='formentry', - name='contact', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.contact'), + model_name="formentry", + name="contact", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="marketing.contact" + ), ), migrations.AlterField( - model_name='formentry', - name='country', + model_name="formentry", + name="country", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='formentry', - name='latitude', + model_name="formentry", + name="latitude", field=models.DecimalField(blank=True, decimal_places=6, default=None, max_digits=9, null=True), ), migrations.AlterField( - model_name='formentry', - name='longitude', + model_name="formentry", + name="longitude", field=models.DecimalField(blank=True, decimal_places=6, default=None, max_digits=9, null=True), ), migrations.AlterField( - model_name='formentry', - name='state', + model_name="formentry", + name="state", field=models.CharField(blank=True, default=None, max_length=30, null=True), ), migrations.AlterField( - model_name='formentry', - name='street_address', + model_name="formentry", + name="street_address", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), migrations.AlterField( - model_name='formentry', - name='zip_code', + model_name="formentry", + name="zip_code", field=models.IntegerField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/marketing/migrations/0022_auto_20201009_1817.py b/breathecode/marketing/migrations/0022_auto_20201009_1817.py index 40775a03b..2165b2b10 100644 --- a/breathecode/marketing/migrations/0022_auto_20201009_1817.py +++ b/breathecode/marketing/migrations/0022_auto_20201009_1817.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0021_auto_20201009_0224'), + ("marketing", "0021_auto_20201009_0224"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='browser_lang', + model_name="formentry", + name="browser_lang", field=models.CharField(blank=True, default=None, max_length=10, null=True), ), ] diff --git a/breathecode/marketing/migrations/0023_auto_20201011_0014.py b/breathecode/marketing/migrations/0023_auto_20201011_0014.py index db55e1eda..097db4d85 100644 --- a/breathecode/marketing/migrations/0023_auto_20201011_0014.py +++ b/breathecode/marketing/migrations/0023_auto_20201011_0014.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0022_auto_20201009_1817'), + ("marketing", "0022_auto_20201009_1817"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='latitude', + model_name="formentry", + name="latitude", field=models.DecimalField(blank=True, decimal_places=15, default=None, max_digits=30, null=True), ), migrations.AlterField( - model_name='formentry', - name='longitude', + model_name="formentry", + name="longitude", field=models.DecimalField(blank=True, decimal_places=15, default=None, max_digits=30, null=True), ), ] diff --git a/breathecode/marketing/migrations/0024_auto_20201109_2353.py b/breathecode/marketing/migrations/0024_auto_20201109_2353.py index 8333e58f4..45f15701a 100644 --- a/breathecode/marketing/migrations/0024_auto_20201109_2353.py +++ b/breathecode/marketing/migrations/0024_auto_20201109_2353.py @@ -6,48 +6,48 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0023_auto_20201011_0014'), + ("marketing", "0023_auto_20201011_0014"), ] operations = [ migrations.AddField( - model_name='formentry', - name='fb_ad_id', + model_name="formentry", + name="fb_ad_id", field=models.IntegerField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='formentry', - name='fb_adgroup_id', + model_name="formentry", + name="fb_adgroup_id", field=models.IntegerField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='formentry', - name='fb_form_id', + model_name="formentry", + name="fb_form_id", field=models.IntegerField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='formentry', - name='fb_leadgen_id', + model_name="formentry", + name="fb_leadgen_id", field=models.IntegerField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='formentry', - name='fb_page_id', + model_name="formentry", + name="fb_page_id", field=models.IntegerField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='formentry', - name='email', + model_name="formentry", + name="email", field=models.CharField(blank=True, default=None, max_length=150, null=True), ), migrations.AlterField( - model_name='formentry', - name='language', - field=models.CharField(default='en', max_length=2), + model_name="formentry", + name="language", + field=models.CharField(default="en", max_length=2), ), migrations.AlterField( - model_name='formentry', - name='utm_url', + model_name="formentry", + name="utm_url", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), ] diff --git a/breathecode/marketing/migrations/0025_auto_20201110_0001.py b/breathecode/marketing/migrations/0025_auto_20201110_0001.py index f2e2a5a5d..05ca5478c 100644 --- a/breathecode/marketing/migrations/0025_auto_20201110_0001.py +++ b/breathecode/marketing/migrations/0025_auto_20201110_0001.py @@ -6,33 +6,33 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0024_auto_20201109_2353'), + ("marketing", "0024_auto_20201109_2353"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='fb_ad_id', + model_name="formentry", + name="fb_ad_id", field=models.BigIntegerField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='formentry', - name='fb_adgroup_id', + model_name="formentry", + name="fb_adgroup_id", field=models.BigIntegerField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='formentry', - name='fb_form_id', + model_name="formentry", + name="fb_form_id", field=models.BigIntegerField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='formentry', - name='fb_leadgen_id', + model_name="formentry", + name="fb_leadgen_id", field=models.BigIntegerField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='formentry', - name='fb_page_id', + model_name="formentry", + name="fb_page_id", field=models.BigIntegerField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/marketing/migrations/0026_formentry_utm_source.py b/breathecode/marketing/migrations/0026_formentry_utm_source.py index e5c13bcaf..0b65037b9 100644 --- a/breathecode/marketing/migrations/0026_formentry_utm_source.py +++ b/breathecode/marketing/migrations/0026_formentry_utm_source.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0025_auto_20201110_0001'), + ("marketing", "0025_auto_20201110_0001"), ] operations = [ migrations.AddField( - model_name='formentry', - name='utm_source', + model_name="formentry", + name="utm_source", field=models.CharField(blank=True, default=None, max_length=50, null=True), ), ] diff --git a/breathecode/marketing/migrations/0027_shortlink.py b/breathecode/marketing/migrations/0027_shortlink.py index b604b830a..e4991c66a 100644 --- a/breathecode/marketing/migrations/0027_shortlink.py +++ b/breathecode/marketing/migrations/0027_shortlink.py @@ -8,32 +8,34 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0012_auto_20201124_1737'), + ("admissions", "0012_auto_20201124_1737"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('marketing', '0026_formentry_utm_source'), + ("marketing", "0026_formentry_utm_source"), ] operations = [ migrations.CreateModel( - name='ShortLink', + name="ShortLink", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('destination', models.URLField()), - ('hits', models.IntegerField(default=0)), - ('active', models.BooleanField(default=True)), - ('destination_status', - models.CharField(choices=[('ACTIVE', 'Active'), ('NOT_FOUND', 'Not found')], - default='ACTIVE', - max_length=15)), - ('utm_content', models.CharField(blank=True, default=None, max_length=250, null=True)), - ('utm_medium', models.CharField(blank=True, default=None, max_length=50, null=True)), - ('utm_campaign', models.CharField(blank=True, default=None, max_length=50, null=True)), - ('utm_source', models.CharField(blank=True, default=None, max_length=50, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("destination", models.URLField()), + ("hits", models.IntegerField(default=0)), + ("active", models.BooleanField(default=True)), + ( + "destination_status", + models.CharField( + choices=[("ACTIVE", "Active"), ("NOT_FOUND", "Not found")], default="ACTIVE", max_length=15 + ), + ), + ("utm_content", models.CharField(blank=True, default=None, max_length=250, null=True)), + ("utm_medium", models.CharField(blank=True, default=None, max_length=50, null=True)), + ("utm_campaign", models.CharField(blank=True, default=None, max_length=50, null=True)), + ("utm_source", models.CharField(blank=True, default=None, max_length=50, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("author", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ] diff --git a/breathecode/marketing/migrations/0028_auto_20201218_0534.py b/breathecode/marketing/migrations/0028_auto_20201218_0534.py index f7f943164..2c244621a 100644 --- a/breathecode/marketing/migrations/0028_auto_20201218_0534.py +++ b/breathecode/marketing/migrations/0028_auto_20201218_0534.py @@ -7,70 +7,83 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0014_auto_20201218_0534'), - ('marketing', '0027_shortlink'), + ("admissions", "0014_auto_20201218_0534"), + ("marketing", "0027_shortlink"), ] operations = [ migrations.AddField( - model_name='contact', - name='academy', - field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='admissions.academy'), + model_name="contact", + name="academy", + field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy"), preserve_default=False, ), migrations.AddField( - model_name='formentry', - name='academy', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="formentry", + name="academy", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), ), migrations.CreateModel( - name='ActiveCampaignAcademy', + name="ActiveCampaignAcademy", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('ac_key', models.CharField(max_length=150)), - ('ac_url', models.CharField(max_length=150)), - ('sync_status', - models.CharField(choices=[('INCOMPLETED', 'Incompleted'), ('COMPLETED', 'Completed')], - default='INCOMPLETED', - help_text='Automatically set when interacting with the Active Campaign API', - max_length=15)), - ('sync_message', - models.CharField(blank=True, - default=None, - help_text='Contains any success or error messages depending on the status', - max_length=100, - null=True)), - ('last_interaction_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("ac_key", models.CharField(max_length=150)), + ("ac_url", models.CharField(max_length=150)), + ( + "sync_status", + models.CharField( + choices=[("INCOMPLETED", "Incompleted"), ("COMPLETED", "Completed")], + default="INCOMPLETED", + help_text="Automatically set when interacting with the Active Campaign API", + max_length=15, + ), + ), + ( + "sync_message", + models.CharField( + blank=True, + default=None, + help_text="Contains any success or error messages depending on the status", + max_length=100, + null=True, + ), + ), + ("last_interaction_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), migrations.AddField( - model_name='automation', - name='ac_academy', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.activecampaignacademy'), + model_name="automation", + name="ac_academy", + field=models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.activecampaignacademy", + ), ), migrations.AddField( - model_name='formentry', - name='ac_academy', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.activecampaignacademy'), + model_name="formentry", + name="ac_academy", + field=models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.activecampaignacademy", + ), ), migrations.AddField( - model_name='tag', - name='ac_academy', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.activecampaignacademy'), + model_name="tag", + name="ac_academy", + field=models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.activecampaignacademy", + ), ), ] diff --git a/breathecode/marketing/migrations/0029_auto_20201218_0631.py b/breathecode/marketing/migrations/0029_auto_20201218_0631.py index 78eba8214..647d7af2a 100644 --- a/breathecode/marketing/migrations/0029_auto_20201218_0631.py +++ b/breathecode/marketing/migrations/0029_auto_20201218_0631.py @@ -6,23 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0028_auto_20201218_0534'), + ("marketing", "0028_auto_20201218_0534"), ] operations = [ migrations.AlterField( - model_name='automation', - name='acp_id', - field=models.PositiveIntegerField(help_text='ID asigned in active campaign'), + model_name="automation", + name="acp_id", + field=models.PositiveIntegerField(help_text="ID asigned in active campaign"), ), migrations.AlterField( - model_name='tag', - name='acp_id', - field=models.IntegerField(help_text='The id coming from active campaign'), + model_name="tag", + name="acp_id", + field=models.IntegerField(help_text="The id coming from active campaign"), ), migrations.AlterField( - model_name='tag', - name='slug', + model_name="tag", + name="slug", field=models.SlugField(max_length=150), ), ] diff --git a/breathecode/marketing/migrations/0030_activecampaignacademy_event_attendancy_automation.py b/breathecode/marketing/migrations/0030_activecampaignacademy_event_attendancy_automation.py index 9c48cb4aa..65c9f7938 100644 --- a/breathecode/marketing/migrations/0030_activecampaignacademy_event_attendancy_automation.py +++ b/breathecode/marketing/migrations/0030_activecampaignacademy_event_attendancy_automation.py @@ -7,16 +7,15 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0029_auto_20201218_0631'), + ("marketing", "0029_auto_20201218_0631"), ] operations = [ migrations.AddField( - model_name='activecampaignacademy', - name='event_attendancy_automation', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.automation'), + model_name="activecampaignacademy", + name="event_attendancy_automation", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="marketing.automation" + ), ), ] diff --git a/breathecode/marketing/migrations/0031_auto_20210123_0259.py b/breathecode/marketing/migrations/0031_auto_20210123_0259.py index 574388c89..1ea651358 100644 --- a/breathecode/marketing/migrations/0031_auto_20210123_0259.py +++ b/breathecode/marketing/migrations/0031_auto_20210123_0259.py @@ -7,17 +7,19 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0030_activecampaignacademy_event_attendancy_automation'), + ("marketing", "0030_activecampaignacademy_event_attendancy_automation"), ] operations = [ migrations.AlterField( - model_name='activecampaignacademy', - name='event_attendancy_automation', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.automation'), + model_name="activecampaignacademy", + name="event_attendancy_automation", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.automation", + ), ), ] diff --git a/breathecode/marketing/migrations/0032_auto_20210128_1745.py b/breathecode/marketing/migrations/0032_auto_20210128_1745.py index e2ffc07d7..366e8541d 100644 --- a/breathecode/marketing/migrations/0032_auto_20210128_1745.py +++ b/breathecode/marketing/migrations/0032_auto_20210128_1745.py @@ -7,22 +7,24 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0031_auto_20210123_0259'), + ("marketing", "0031_auto_20210123_0259"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='ac_academy', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.activecampaignacademy'), + model_name="formentry", + name="ac_academy", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.activecampaignacademy", + ), ), migrations.AlterField( - model_name='formentry', - name='last_name', - field=models.CharField(blank=True, default='', max_length=150), + model_name="formentry", + name="last_name", + field=models.CharField(blank=True, default="", max_length=150), ), ] diff --git a/breathecode/marketing/migrations/0033_auto_20210302_0102.py b/breathecode/marketing/migrations/0033_auto_20210302_0102.py index 682975c59..93d5121ea 100644 --- a/breathecode/marketing/migrations/0033_auto_20210302_0102.py +++ b/breathecode/marketing/migrations/0033_auto_20210302_0102.py @@ -6,26 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0032_auto_20210128_1745'), + ("marketing", "0032_auto_20210128_1745"), ] operations = [ migrations.AddField( - model_name='formentry', - name='deal_status', - field=models.CharField(blank=True, - choices=[('WON', 'Won'), ('LOST', 'Lost')], - default=None, - max_length=15, - null=True), + model_name="formentry", + name="deal_status", + field=models.CharField( + blank=True, choices=[("WON", "Won"), ("LOST", "Lost")], default=None, max_length=15, null=True + ), ), migrations.AddField( - model_name='formentry', - name='sentiment', - field=models.CharField(blank=True, - choices=[('GOOD', 'Good'), ('BAD', 'Bad')], - default=None, - max_length=15, - null=True), + model_name="formentry", + name="sentiment", + field=models.CharField( + blank=True, choices=[("GOOD", "Good"), ("BAD", "Bad")], default=None, max_length=15, null=True + ), ), ] diff --git a/breathecode/marketing/migrations/0034_auto_20210430_0035.py b/breathecode/marketing/migrations/0034_auto_20210430_0035.py index 993a61303..653a4277c 100644 --- a/breathecode/marketing/migrations/0034_auto_20210430_0035.py +++ b/breathecode/marketing/migrations/0034_auto_20210430_0035.py @@ -7,61 +7,79 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0033_auto_20210302_0102'), + ("marketing", "0033_auto_20210302_0102"), ] operations = [ migrations.AddField( - model_name='formentry', - name='ac_contact_id', - field=models.CharField(blank=True, - default=None, - help_text='Active Campaign Contact ID', - max_length=20, - null=True), + model_name="formentry", + name="ac_contact_id", + field=models.CharField( + blank=True, default=None, help_text="Active Campaign Contact ID", max_length=20, null=True + ), ), migrations.AddField( - model_name='formentry', - name='ac_deal_id', - field=models.CharField(blank=True, - default=None, - help_text='Active Campaign Deal ID', - max_length=20, - null=True), + model_name="formentry", + name="ac_deal_id", + field=models.CharField( + blank=True, default=None, help_text="Active Campaign Deal ID", max_length=20, null=True + ), ), migrations.CreateModel( - name='ActiveCampaignWebhook', + name="ActiveCampaignWebhook", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('webhook_type', models.CharField(blank=True, default=None, max_length=100, null=True)), - ('run_at', models.DateTimeField(help_text='Date/time that the webhook ran')), - ('initiated_by', - models.CharField(help_text='Source/section of the software that triggered the webhook to run', - max_length=100)), - ('payload', - models.JSONField( - help_text='Extra info that came on the request, it varies depending on the webhook type')), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('ERROR', 'Error')], - default='PENDING', - max_length=9)), - ('status_text', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('ac_academy', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='marketing.activecampaignacademy')), - ('contact', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.contact')), - ('form_entry', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.formentry')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("webhook_type", models.CharField(blank=True, default=None, max_length=100, null=True)), + ("run_at", models.DateTimeField(help_text="Date/time that the webhook ran")), + ( + "initiated_by", + models.CharField( + help_text="Source/section of the software that triggered the webhook to run", max_length=100 + ), + ), + ( + "payload", + models.JSONField( + help_text="Extra info that came on the request, it varies depending on the webhook type" + ), + ), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("ERROR", "Error")], + default="PENDING", + max_length=9, + ), + ), + ("status_text", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "ac_academy", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="marketing.activecampaignacademy" + ), + ), + ( + "contact", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.contact", + ), + ), + ( + "form_entry", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.formentry", + ), + ), ], ), ] diff --git a/breathecode/marketing/migrations/0035_auto_20210607_2111.py b/breathecode/marketing/migrations/0035_auto_20210607_2111.py index 51789f80e..ec20e67ac 100644 --- a/breathecode/marketing/migrations/0035_auto_20210607_2111.py +++ b/breathecode/marketing/migrations/0035_auto_20210607_2111.py @@ -7,21 +7,21 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0018_alter_cohortuser_role'), - ('marketing', '0034_auto_20210430_0035'), + ("admissions", "0018_alter_cohortuser_role"), + ("marketing", "0034_auto_20210430_0035"), ] operations = [ migrations.RemoveField( - model_name='formentry', - name='ac_academy', + model_name="formentry", + name="ac_academy", ), migrations.CreateModel( - name='AcademyAlias', + name="AcademyAlias", fields=[ - ('slug', models.SlugField(primary_key=True, serialize=False)), - ('active_campaign_slug', models.SlugField()), - ('academy', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("slug", models.SlugField(primary_key=True, serialize=False)), + ("active_campaign_slug", models.SlugField()), + ("academy", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), ] diff --git a/breathecode/marketing/migrations/0036_alter_academyalias_academy.py b/breathecode/marketing/migrations/0036_alter_academyalias_academy.py index 07f740346..f913f86be 100644 --- a/breathecode/marketing/migrations/0036_alter_academyalias_academy.py +++ b/breathecode/marketing/migrations/0036_alter_academyalias_academy.py @@ -7,14 +7,14 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0018_alter_cohortuser_role'), - ('marketing', '0035_auto_20210607_2111'), + ("admissions", "0018_alter_cohortuser_role"), + ("marketing", "0035_auto_20210607_2111"), ] operations = [ migrations.AlterField( - model_name='academyalias', - name='academy', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy'), + model_name="academyalias", + name="academy", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy"), ), ] diff --git a/breathecode/marketing/migrations/0037_auto_20210608_0627.py b/breathecode/marketing/migrations/0037_auto_20210608_0627.py index bdb19bd1a..31da3f3f0 100644 --- a/breathecode/marketing/migrations/0037_auto_20210608_0627.py +++ b/breathecode/marketing/migrations/0037_auto_20210608_0627.py @@ -6,38 +6,38 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0036_alter_academyalias_academy'), + ("marketing", "0036_alter_academyalias_academy"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='course', + model_name="formentry", + name="course", field=models.CharField(default=None, max_length=70, null=True), ), migrations.AlterField( - model_name='formentry', - name='location', + model_name="formentry", + name="location", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), migrations.AlterField( - model_name='formentry', - name='referral_key', + model_name="formentry", + name="referral_key", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), migrations.AlterField( - model_name='formentry', - name='utm_campaign', + model_name="formentry", + name="utm_campaign", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), migrations.AlterField( - model_name='formentry', - name='utm_medium', + model_name="formentry", + name="utm_medium", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), migrations.AlterField( - model_name='formentry', - name='utm_source', + model_name="formentry", + name="utm_source", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), ] diff --git a/breathecode/marketing/migrations/0038_auto_20210703_0359.py b/breathecode/marketing/migrations/0038_auto_20210703_0359.py index 08b1a3b15..c1283d5ed 100644 --- a/breathecode/marketing/migrations/0038_auto_20210703_0359.py +++ b/breathecode/marketing/migrations/0038_auto_20210703_0359.py @@ -9,22 +9,24 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('marketing', '0037_auto_20210608_0627'), + ("marketing", "0037_auto_20210608_0627"), ] operations = [ migrations.AddField( - model_name='formentry', - name='user', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL), + model_name="formentry", + name="user", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), ), migrations.AddField( - model_name='formentry', - name='won_at', + model_name="formentry", + name="won_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/marketing/migrations/0039_downloadable.py b/breathecode/marketing/migrations/0039_downloadable.py index 1ef8bcbbb..2af16199f 100644 --- a/breathecode/marketing/migrations/0039_downloadable.py +++ b/breathecode/marketing/migrations/0039_downloadable.py @@ -8,33 +8,38 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0024_academy_feedback_email'), + ("admissions", "0024_academy_feedback_email"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('marketing', '0038_auto_20210703_0359'), + ("marketing", "0038_auto_20210703_0359"), ] operations = [ migrations.CreateModel( - name='Downloadable', + name="Downloadable", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=100)), - ('description', models.TextField(max_length=450)), - ('hits', models.IntegerField(default=0)), - ('active', - models.BooleanField(default=True, - help_text='Non-active downloadables will display a message to the user')), - ('preview_url', models.URLField()), - ('destination_url', models.URLField()), - ('destination_status', - models.CharField(choices=[('ACTIVE', 'Active'), ('NOT_FOUND', 'Not found')], - default='ACTIVE', - max_length=15)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=100)), + ("description", models.TextField(max_length=450)), + ("hits", models.IntegerField(default=0)), + ( + "active", + models.BooleanField( + default=True, help_text="Non-active downloadables will display a message to the user" + ), + ), + ("preview_url", models.URLField()), + ("destination_url", models.URLField()), + ( + "destination_status", + models.CharField( + choices=[("ACTIVE", "Active"), ("NOT_FOUND", "Not found")], default="ACTIVE", max_length=15 + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("author", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ] diff --git a/breathecode/marketing/migrations/0039_formentry_ac_expected_cohort.py b/breathecode/marketing/migrations/0039_formentry_ac_expected_cohort.py index c82b64c3c..b9c5b3a50 100644 --- a/breathecode/marketing/migrations/0039_formentry_ac_expected_cohort.py +++ b/breathecode/marketing/migrations/0039_formentry_ac_expected_cohort.py @@ -6,17 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0038_auto_20210703_0359'), + ("marketing", "0038_auto_20210703_0359"), ] operations = [ migrations.AddField( - model_name='formentry', - name='ac_expected_cohort', - field=models.CharField(blank=True, - default=None, - help_text='Which cohort is this student expecting to join', - max_length=100, - null=True), + model_name="formentry", + name="ac_expected_cohort", + field=models.CharField( + blank=True, + default=None, + help_text="Which cohort is this student expecting to join", + max_length=100, + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0040_formentry_current_download.py b/breathecode/marketing/migrations/0040_formentry_current_download.py index 205c4d5a7..eaae26d75 100644 --- a/breathecode/marketing/migrations/0040_formentry_current_download.py +++ b/breathecode/marketing/migrations/0040_formentry_current_download.py @@ -6,17 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0039_downloadable'), + ("marketing", "0039_downloadable"), ] operations = [ migrations.AddField( - model_name='formentry', - name='current_download', - field=models.CharField(blank=True, - default=None, - help_text='Slug of the breathecode.marketing.downloadable', - max_length=255, - null=True), + model_name="formentry", + name="current_download", + field=models.CharField( + blank=True, + default=None, + help_text="Slug of the breathecode.marketing.downloadable", + max_length=255, + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0041_merge_20211018_2259.py b/breathecode/marketing/migrations/0041_merge_20211018_2259.py index 87408097b..9250a5f58 100644 --- a/breathecode/marketing/migrations/0041_merge_20211018_2259.py +++ b/breathecode/marketing/migrations/0041_merge_20211018_2259.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0039_formentry_ac_expected_cohort'), - ('marketing', '0040_formentry_current_download'), + ("marketing", "0039_formentry_ac_expected_cohort"), + ("marketing", "0040_formentry_current_download"), ] operations = [] diff --git a/breathecode/marketing/migrations/0042_auto_20211028_2048.py b/breathecode/marketing/migrations/0042_auto_20211028_2048.py index 446ff3929..ed2a46afb 100644 --- a/breathecode/marketing/migrations/0042_auto_20211028_2048.py +++ b/breathecode/marketing/migrations/0042_auto_20211028_2048.py @@ -6,40 +6,43 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0041_merge_20211018_2259'), + ("marketing", "0041_merge_20211018_2259"), ] operations = [ migrations.AddField( - model_name='shortlink', - name='destination_status_text', + model_name="shortlink", + name="destination_status_text", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), migrations.AddField( - model_name='shortlink', - name='lastclick_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='Last time a click was registered for this link', - null=True), + model_name="shortlink", + name="lastclick_at", + field=models.DateTimeField( + blank=True, default=None, help_text="Last time a click was registered for this link", null=True + ), ), migrations.AddField( - model_name='shortlink', - name='private', + model_name="shortlink", + name="private", field=models.BooleanField(default=True), ), migrations.AlterField( - model_name='downloadable', - name='destination_status', - field=models.CharField(choices=[('ACTIVE', 'Active'), ('NOT_FOUND', 'Not found'), ('ERROR', 'Error')], - default='ACTIVE', - max_length=15), + model_name="downloadable", + name="destination_status", + field=models.CharField( + choices=[("ACTIVE", "Active"), ("NOT_FOUND", "Not found"), ("ERROR", "Error")], + default="ACTIVE", + max_length=15, + ), ), migrations.AlterField( - model_name='shortlink', - name='destination_status', - field=models.CharField(choices=[('ACTIVE', 'Active'), ('NOT_FOUND', 'Not found'), ('ERROR', 'Error')], - default='ACTIVE', - max_length=15), + model_name="shortlink", + name="destination_status", + field=models.CharField( + choices=[("ACTIVE", "Active"), ("NOT_FOUND", "Not found"), ("ERROR", "Error")], + default="ACTIVE", + max_length=15, + ), ), ] diff --git a/breathecode/marketing/migrations/0043_auto_20211111_0010.py b/breathecode/marketing/migrations/0043_auto_20211111_0010.py index 41a3899bc..0e09535c1 100644 --- a/breathecode/marketing/migrations/0043_auto_20211111_0010.py +++ b/breathecode/marketing/migrations/0043_auto_20211111_0010.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0042_auto_20211028_2048'), + ("marketing", "0042_auto_20211028_2048"), ] operations = [ migrations.AlterField( - model_name='activecampaignacademy', - name='ac_url', + model_name="activecampaignacademy", + name="ac_url", field=models.URLField(), ), migrations.AlterField( - model_name='formentry', - name='utm_url', + model_name="formentry", + name="utm_url", field=models.URLField(blank=True, default=None, max_length=2000, null=True), ), ] diff --git a/breathecode/marketing/migrations/0044_leadgenerationapp.py b/breathecode/marketing/migrations/0044_leadgenerationapp.py index 495c5e7b7..cd318644a 100644 --- a/breathecode/marketing/migrations/0044_leadgenerationapp.py +++ b/breathecode/marketing/migrations/0044_leadgenerationapp.py @@ -7,47 +7,57 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0029_auto_20211217_0248'), - ('marketing', '0043_auto_20211111_0010'), + ("admissions", "0029_auto_20211217_0248"), + ("marketing", "0043_auto_20211111_0010"), ] operations = [ migrations.CreateModel( - name='LeadGenerationApp', + name="LeadGenerationApp", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=100)), - ('description', models.TextField(max_length=450)), - ('app_id', - models.CharField(help_text='Unique token generated only for this app, can be reset to revoke acceess', - max_length=255, - unique=True)), - ('hits', models.IntegerField(default=0)), - ('last_call_log', - models.TextField(help_text='Output that was sent to this app on the last call', max_length=450)), - ('last_call_status', - models.CharField(blank=True, - choices=[('OK', 'Ok'), ('ERROR', 'Error')], - default=None, - max_length=9, - null=True)), - ('last_call_at', - models.DateTimeField(blank=True, - default=None, - help_text='Timestamp from the last time this app called our API', - null=True)), - ('location', models.CharField(blank=True, default=None, max_length=70, null=True)), - ('language', models.CharField(default='en', max_length=2)), - ('utm_url', models.URLField(blank=True, default=None, max_length=2000, null=True)), - ('utm_medium', models.CharField(blank=True, default=None, max_length=70, null=True)), - ('utm_campaign', models.CharField(blank=True, default=None, max_length=70, null=True)), - ('utm_source', models.CharField(blank=True, default=None, max_length=70, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('default_automations', models.ManyToManyField(blank=True, to='marketing.Automation')), - ('default_tags', models.ManyToManyField(blank=True, to='marketing.Tag')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=100)), + ("description", models.TextField(max_length=450)), + ( + "app_id", + models.CharField( + help_text="Unique token generated only for this app, can be reset to revoke acceess", + max_length=255, + unique=True, + ), + ), + ("hits", models.IntegerField(default=0)), + ( + "last_call_log", + models.TextField(help_text="Output that was sent to this app on the last call", max_length=450), + ), + ( + "last_call_status", + models.CharField( + blank=True, choices=[("OK", "Ok"), ("ERROR", "Error")], default=None, max_length=9, null=True + ), + ), + ( + "last_call_at", + models.DateTimeField( + blank=True, + default=None, + help_text="Timestamp from the last time this app called our API", + null=True, + ), + ), + ("location", models.CharField(blank=True, default=None, max_length=70, null=True)), + ("language", models.CharField(default="en", max_length=2)), + ("utm_url", models.URLField(blank=True, default=None, max_length=2000, null=True)), + ("utm_medium", models.CharField(blank=True, default=None, max_length=70, null=True)), + ("utm_campaign", models.CharField(blank=True, default=None, max_length=70, null=True)), + ("utm_source", models.CharField(blank=True, default=None, max_length=70, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("default_automations", models.ManyToManyField(blank=True, to="marketing.Automation")), + ("default_tags", models.ManyToManyField(blank=True, to="marketing.Tag")), ], ), ] diff --git a/breathecode/marketing/migrations/0045_auto_20220103_2200.py b/breathecode/marketing/migrations/0045_auto_20220103_2200.py index 65ed24bae..b97c3eef9 100644 --- a/breathecode/marketing/migrations/0045_auto_20220103_2200.py +++ b/breathecode/marketing/migrations/0045_auto_20220103_2200.py @@ -7,36 +7,35 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0044_leadgenerationapp'), + ("marketing", "0044_leadgenerationapp"), ] operations = [ migrations.RemoveField( - model_name='leadgenerationapp', - name='last_call_log', + model_name="leadgenerationapp", + name="last_call_log", ), migrations.AddField( - model_name='formentry', - name='lead_generation_app', + model_name="formentry", + name="lead_generation_app", field=models.ForeignKey( default=None, - help_text='Other apps can send leads to breathecode but they need to be registered here', + help_text="Other apps can send leads to breathecode but they need to be registered here", null=True, on_delete=django.db.models.deletion.CASCADE, - to='marketing.leadgenerationapp'), + to="marketing.leadgenerationapp", + ), ), migrations.AddField( - model_name='leadgenerationapp', - name='last_request_data', - field=models.TextField(blank=True, - default=None, - help_text='Incomig payload from the last request', - max_length=450, - null=True), + model_name="leadgenerationapp", + name="last_request_data", + field=models.TextField( + blank=True, default=None, help_text="Incomig payload from the last request", max_length=450, null=True + ), ), migrations.AlterField( - model_name='leadgenerationapp', - name='language', + model_name="leadgenerationapp", + name="language", field=models.CharField(blank=True, default=None, max_length=2, null=True), ), ] diff --git a/breathecode/marketing/migrations/0046_auto_20220113_2140.py b/breathecode/marketing/migrations/0046_auto_20220113_2140.py index 3a5b7d342..4ce1789fb 100644 --- a/breathecode/marketing/migrations/0046_auto_20220113_2140.py +++ b/breathecode/marketing/migrations/0046_auto_20220113_2140.py @@ -6,29 +6,29 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0045_auto_20220103_2200'), + ("marketing", "0045_auto_20220103_2200"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='automations', - field=models.CharField(blank=True, - default='', - help_text='Comma separated list of automations', - max_length=100), + model_name="formentry", + name="automations", + field=models.CharField( + blank=True, default="", help_text="Comma separated list of automations", max_length=100 + ), ), migrations.AlterField( - model_name='formentry', - name='tags', - field=models.CharField(blank=True, default='', help_text='Comma separated list of tags', max_length=100), + model_name="formentry", + name="tags", + field=models.CharField(blank=True, default="", help_text="Comma separated list of tags", max_length=100), ), migrations.AlterField( - model_name='leadgenerationapp', - name='default_automations', + model_name="leadgenerationapp", + name="default_automations", field=models.ManyToManyField( blank=True, - help_text='Automations with are slug will be excluded, make sure to set slug to them', - to='marketing.Automation'), + help_text="Automations with are slug will be excluded, make sure to set slug to them", + to="marketing.Automation", + ), ), ] diff --git a/breathecode/marketing/migrations/0047_alter_leadgenerationapp_utm_url.py b/breathecode/marketing/migrations/0047_alter_leadgenerationapp_utm_url.py index 87e959b4b..d819b8b3a 100644 --- a/breathecode/marketing/migrations/0047_alter_leadgenerationapp_utm_url.py +++ b/breathecode/marketing/migrations/0047_alter_leadgenerationapp_utm_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0046_auto_20220113_2140'), + ("marketing", "0046_auto_20220113_2140"), ] operations = [ migrations.AlterField( - model_name='leadgenerationapp', - name='utm_url', + model_name="leadgenerationapp", + name="utm_url", field=models.CharField(blank=True, default=None, max_length=2000, null=True), ), ] diff --git a/breathecode/marketing/migrations/0048_alter_formentry_utm_url.py b/breathecode/marketing/migrations/0048_alter_formentry_utm_url.py index 6ec2faf3c..4323fa238 100644 --- a/breathecode/marketing/migrations/0048_alter_formentry_utm_url.py +++ b/breathecode/marketing/migrations/0048_alter_formentry_utm_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0047_alter_leadgenerationapp_utm_url'), + ("marketing", "0047_alter_leadgenerationapp_utm_url"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='utm_url', + model_name="formentry", + name="utm_url", field=models.CharField(blank=True, default=None, max_length=2000, null=True), ), ] diff --git a/breathecode/marketing/migrations/0049_auto_20220127_1806.py b/breathecode/marketing/migrations/0049_auto_20220127_1806.py index a8d614956..55c85a7d4 100644 --- a/breathecode/marketing/migrations/0049_auto_20220127_1806.py +++ b/breathecode/marketing/migrations/0049_auto_20220127_1806.py @@ -7,34 +7,36 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0048_alter_formentry_utm_url'), + ("marketing", "0048_alter_formentry_utm_url"), ] operations = [ migrations.AddField( - model_name='formentry', - name='utm_content', + model_name="formentry", + name="utm_content", field=models.CharField(blank=True, default=None, max_length=70, null=True), ), migrations.AlterField( - model_name='tag', - name='automation', - field=models.ForeignKey(blank=True, - default=None, - help_text='Leads that contain this tag will be asociated to this automation', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='marketing.automation'), + model_name="tag", + name="automation", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Leads that contain this tag will be asociated to this automation", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="marketing.automation", + ), ), migrations.AlterField( - model_name='tag', - name='tag_type', + model_name="tag", + name="tag_type", field=models.CharField( - choices=[('STRONG', 'Strong'), ('SOFT', 'Soft'), ('DISCOVERY', 'Discovery'), ('OTHER', 'Other')], - default='OTHER', - help_text= - "The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", + choices=[("STRONG", "Strong"), ("SOFT", "Soft"), ("DISCOVERY", "Discovery"), ("OTHER", "Other")], + default="OTHER", + help_text="The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", max_length=15, - null=True), + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0050_alter_tag_tag_type.py b/breathecode/marketing/migrations/0050_alter_tag_tag_type.py index be19e3ba6..effbbb019 100644 --- a/breathecode/marketing/migrations/0050_alter_tag_tag_type.py +++ b/breathecode/marketing/migrations/0050_alter_tag_tag_type.py @@ -6,20 +6,27 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0049_auto_20220127_1806'), + ("marketing", "0049_auto_20220127_1806"), ] operations = [ migrations.AlterField( - model_name='tag', - name='tag_type', + model_name="tag", + name="tag_type", field=models.CharField( - choices=[('STRONG', 'Strong'), ('SOFT', 'Soft'), ('DISCOVERY', 'Discovery'), ('COHORT', 'Cohort'), - ('DOWNLOADABLE', 'Downloadable'), ('EVENT', 'Event'), ('OTHER', 'Other')], - default='OTHER', - help_text= - "The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", + choices=[ + ("STRONG", "Strong"), + ("SOFT", "Soft"), + ("DISCOVERY", "Discovery"), + ("COHORT", "Cohort"), + ("DOWNLOADABLE", "Downloadable"), + ("EVENT", "Event"), + ("OTHER", "Other"), + ], + default="OTHER", + help_text="The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", max_length=15, - null=True), + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0051_auto_20220205_1736.py b/breathecode/marketing/migrations/0051_auto_20220205_1736.py index c0143c049..154545501 100644 --- a/breathecode/marketing/migrations/0051_auto_20220205_1736.py +++ b/breathecode/marketing/migrations/0051_auto_20220205_1736.py @@ -6,26 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0050_alter_tag_tag_type'), + ("marketing", "0050_alter_tag_tag_type"), ] operations = [ migrations.AddField( - model_name='tag', - name='disputed_at', + model_name="tag", + name="disputed_at", field=models.DateTimeField( blank=True, default=None, - help_text= - 'Disputed tags get deleted after 10 days unless its used in 1+ automations or has 1+ subscriber', - null=True), + help_text="Disputed tags get deleted after 10 days unless its used in 1+ automations or has 1+ subscriber", + null=True, + ), ), migrations.AddField( - model_name='tag', - name='disputed_reason', - field=models.TextField(blank=True, - default=None, - help_text='Explain why you think the tag should be deleted', - null=True), + model_name="tag", + name="disputed_reason", + field=models.TextField( + blank=True, default=None, help_text="Explain why you think the tag should be deleted", null=True + ), ), ] diff --git a/breathecode/marketing/migrations/0052_utmfield.py b/breathecode/marketing/migrations/0052_utmfield.py index b3ad14260..8459e4028 100644 --- a/breathecode/marketing/migrations/0052_utmfield.py +++ b/breathecode/marketing/migrations/0052_utmfield.py @@ -8,28 +8,36 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0029_auto_20211217_0248'), + ("admissions", "0029_auto_20211217_0248"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('marketing', '0051_auto_20220205_1736'), + ("marketing", "0051_auto_20220205_1736"), ] operations = [ migrations.CreateModel( - name='UTMField', + name="UTMField", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=100)), - ('description', models.TextField(max_length=450)), - ('utm_type', - models.CharField(choices=[('CONTENT', 'Source'), ('SOURCE', 'Medium'), ('MEDIUM', 'Content'), - ('CAMPAIGN', 'Campaign')], - default=None, - max_length=15)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=100)), + ("description", models.TextField(max_length=450)), + ( + "utm_type", + models.CharField( + choices=[ + ("CONTENT", "Source"), + ("SOURCE", "Medium"), + ("MEDIUM", "Content"), + ("CAMPAIGN", "Campaign"), + ], + default=None, + max_length=15, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("author", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ] diff --git a/breathecode/marketing/migrations/0053_tag_description.py b/breathecode/marketing/migrations/0053_tag_description.py index f840e0af8..498a18c90 100644 --- a/breathecode/marketing/migrations/0053_tag_description.py +++ b/breathecode/marketing/migrations/0053_tag_description.py @@ -6,16 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0052_utmfield'), + ("marketing", "0052_utmfield"), ] operations = [ migrations.AddField( - model_name='tag', - name='description', - field=models.TextField(blank=True, - default=None, - help_text='How is this tag being used? Why is it needed?', - null=True), + model_name="tag", + name="description", + field=models.TextField( + blank=True, default=None, help_text="How is this tag being used? Why is it needed?", null=True + ), ), ] diff --git a/breathecode/marketing/migrations/0054_alter_tag_tag_type.py b/breathecode/marketing/migrations/0054_alter_tag_tag_type.py index 17f73cb38..a74dc854e 100644 --- a/breathecode/marketing/migrations/0054_alter_tag_tag_type.py +++ b/breathecode/marketing/migrations/0054_alter_tag_tag_type.py @@ -6,20 +6,27 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0053_tag_description'), + ("marketing", "0053_tag_description"), ] operations = [ migrations.AlterField( - model_name='tag', - name='tag_type', + model_name="tag", + name="tag_type", field=models.CharField( - choices=[('STRONG', 'Strong'), ('SOFT', 'Soft'), ('DISCOVERY', 'Discovery'), ('COHORT', 'Cohort'), - ('DOWNLOADABLE', 'Downloadable'), ('EVENT', 'Event'), ('OTHER', 'Other')], + choices=[ + ("STRONG", "Strong"), + ("SOFT", "Soft"), + ("DISCOVERY", "Discovery"), + ("COHORT", "Cohort"), + ("DOWNLOADABLE", "Downloadable"), + ("EVENT", "Event"), + ("OTHER", "Other"), + ], default=None, - help_text= - "The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", + help_text="The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", max_length=15, - null=True), + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0055_alter_utmfield_utm_type.py b/breathecode/marketing/migrations/0055_alter_utmfield_utm_type.py index b26f31344..2c28f64ed 100644 --- a/breathecode/marketing/migrations/0055_alter_utmfield_utm_type.py +++ b/breathecode/marketing/migrations/0055_alter_utmfield_utm_type.py @@ -6,16 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0054_alter_tag_tag_type'), + ("marketing", "0054_alter_tag_tag_type"), ] operations = [ migrations.AlterField( - model_name='utmfield', - name='utm_type', - field=models.CharField(choices=[('CONTENT', 'Content'), ('SOURCE', 'Source'), ('MEDIUM', 'Medium'), - ('CAMPAIGN', 'Campaign')], - default=None, - max_length=15), + model_name="utmfield", + name="utm_type", + field=models.CharField( + choices=[("CONTENT", "Content"), ("SOURCE", "Source"), ("MEDIUM", "Medium"), ("CAMPAIGN", "Campaign")], + default=None, + max_length=15, + ), ), ] diff --git a/breathecode/marketing/migrations/0056_auto_20220503_1543.py b/breathecode/marketing/migrations/0056_auto_20220503_1543.py index 3704fec35..4d80b932d 100644 --- a/breathecode/marketing/migrations/0056_auto_20220503_1543.py +++ b/breathecode/marketing/migrations/0056_auto_20220503_1543.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0055_alter_utmfield_utm_type'), + ("marketing", "0055_alter_utmfield_utm_type"), ] operations = [ migrations.AlterField( - model_name='automation', - name='entered', - field=models.PositiveIntegerField(help_text='How many contacts have entered'), + model_name="automation", + name="entered", + field=models.PositiveIntegerField(help_text="How many contacts have entered"), ), migrations.AlterField( - model_name='automation', - name='exited', - field=models.PositiveIntegerField(help_text='How many contacts have exited'), + model_name="automation", + name="exited", + field=models.PositiveIntegerField(help_text="How many contacts have exited"), ), ] diff --git a/breathecode/marketing/migrations/0057_auto_20220919_2057.py b/breathecode/marketing/migrations/0057_auto_20220919_2057.py index b6d18070c..214237f29 100644 --- a/breathecode/marketing/migrations/0057_auto_20220919_2057.py +++ b/breathecode/marketing/migrations/0057_auto_20220919_2057.py @@ -7,32 +7,40 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0056_auto_20220503_1543'), + ("marketing", "0056_auto_20220503_1543"), ] operations = [ migrations.AddField( - model_name='activecampaignacademy', - name='duplicate_leads_delta_avoidance', + model_name="activecampaignacademy", + name="duplicate_leads_delta_avoidance", field=models.DurationField( default=datetime.timedelta(seconds=1800), - help_text='Leads that apply to the same course on this timedelta will not be sent to AC'), + help_text="Leads that apply to the same course on this timedelta will not be sent to AC", + ), ), migrations.AddField( - model_name='formentry', - name='storage_status_text', + model_name="formentry", + name="storage_status_text", field=models.CharField( blank=True, - default='', - help_text='Will show exception message or any other cloud on the error that occurred (if any)', - max_length=250), + default="", + help_text="Will show exception message or any other cloud on the error that occurred (if any)", + max_length=250, + ), ), migrations.AlterField( - model_name='formentry', - name='storage_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), - ('DUPLICATED', 'Duplicated'), ('ERROR', 'ERROR')], - default='PENDING', - max_length=15), + model_name="formentry", + name="storage_status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("DUPLICATED", "Duplicated"), + ("ERROR", "ERROR"), + ], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/marketing/migrations/0058_alter_formentry_storage_status.py b/breathecode/marketing/migrations/0058_alter_formentry_storage_status.py index 48584e393..3cb997489 100644 --- a/breathecode/marketing/migrations/0058_alter_formentry_storage_status.py +++ b/breathecode/marketing/migrations/0058_alter_formentry_storage_status.py @@ -6,16 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0057_auto_20220919_2057'), + ("marketing", "0057_auto_20220919_2057"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='storage_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), - ('DUPLICATED', 'Duplicated'), ('ERROR', 'Error')], - default='PENDING', - max_length=15), + model_name="formentry", + name="storage_status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("DUPLICATED", "Duplicated"), + ("ERROR", "Error"), + ], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/marketing/migrations/0059_auto_20221004_1943.py b/breathecode/marketing/migrations/0059_auto_20221004_1943.py index 37b2fa4cb..ebf347984 100644 --- a/breathecode/marketing/migrations/0059_auto_20221004_1943.py +++ b/breathecode/marketing/migrations/0059_auto_20221004_1943.py @@ -6,26 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0047_merge_20220924_0611'), - ('marketing', '0058_alter_formentry_storage_status'), + ("admissions", "0047_merge_20220924_0611"), + ("marketing", "0058_alter_formentry_storage_status"), ] operations = [ migrations.CreateModel( - name='AcademyProxy', + name="AcademyProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('admissions.academy', ), + bases=("admissions.academy",), ), migrations.AlterField( - model_name='leadgenerationapp', - name='app_id', - field=models.CharField(help_text='Unique token generated only for this app, can be reset to revoke access', - max_length=255, - unique=True), + model_name="leadgenerationapp", + name="app_id", + field=models.CharField( + help_text="Unique token generated only for this app, can be reset to revoke access", + max_length=255, + unique=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0060_auto_20221109_2245.py b/breathecode/marketing/migrations/0060_auto_20221109_2245.py index 8dc51566c..030a487f3 100644 --- a/breathecode/marketing/migrations/0060_auto_20221109_2245.py +++ b/breathecode/marketing/migrations/0060_auto_20221109_2245.py @@ -6,116 +6,118 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0059_auto_20221004_1943'), + ("marketing", "0059_auto_20221004_1943"), ] operations = [ migrations.AddField( - model_name='formentry', - name='custom_fields', - field=models.JSONField(blank=True, - default=None, - help_text='Other incoming values in the payload will be saved here as they come', - null=True), + model_name="formentry", + name="custom_fields", + field=models.JSONField( + blank=True, + default=None, + help_text="Other incoming values in the payload will be saved here as they come", + null=True, + ), ), migrations.AddField( - model_name='formentry', - name='sex', - field=models.CharField(blank=True, - default=None, - help_text='M=male,F=female,O=other', - max_length=15, - null=True), + model_name="formentry", + name="sex", + field=models.CharField( + blank=True, default=None, help_text="M=male,F=female,O=other", max_length=15, null=True + ), ), migrations.AddField( - model_name='formentry', - name='utm_placement', - field=models.CharField(blank=True, - default=None, - help_text='User agent or device screen', - max_length=50, - null=True), + model_name="formentry", + name="utm_placement", + field=models.CharField( + blank=True, default=None, help_text="User agent or device screen", max_length=50, null=True + ), ), migrations.AddField( - model_name='formentry', - name='utm_plan', - field=models.CharField(blank=True, - default=None, - help_text='If its applying for a scholarship, upfront, isa, financing, etc.', - max_length=50, - null=True), + model_name="formentry", + name="utm_plan", + field=models.CharField( + blank=True, + default=None, + help_text="If its applying for a scholarship, upfront, isa, financing, etc.", + max_length=50, + null=True, + ), ), migrations.AddField( - model_name='formentry', - name='utm_term', - field=models.CharField(blank=True, default=None, help_text='Keyword used in cpc', max_length=50, null=True), + model_name="formentry", + name="utm_term", + field=models.CharField(blank=True, default=None, help_text="Keyword used in cpc", max_length=50, null=True), ), migrations.AddField( - model_name='leadgenerationapp', - name='utm_plan', - field=models.CharField(blank=True, - default=None, - help_text='If its applying for a scholarship, upfront, isa, financing, etc.', - max_length=50, - null=True), + model_name="leadgenerationapp", + name="utm_plan", + field=models.CharField( + blank=True, + default=None, + help_text="If its applying for a scholarship, upfront, isa, financing, etc.", + max_length=50, + null=True, + ), ), migrations.AddField( - model_name='shortlink', - name='utm_placement', - field=models.CharField(blank=True, - default=None, - help_text='User agent or device screen', - max_length=50, - null=True), + model_name="shortlink", + name="utm_placement", + field=models.CharField( + blank=True, default=None, help_text="User agent or device screen", max_length=50, null=True + ), ), migrations.AddField( - model_name='shortlink', - name='utm_plan', - field=models.CharField(blank=True, - default=None, - help_text='If its applying for a scholarship, upfront, isa, financing, etc.', - max_length=50, - null=True), + model_name="shortlink", + name="utm_plan", + field=models.CharField( + blank=True, + default=None, + help_text="If its applying for a scholarship, upfront, isa, financing, etc.", + max_length=50, + null=True, + ), ), migrations.AddField( - model_name='shortlink', - name='utm_term', - field=models.CharField(blank=True, default=None, help_text='Keyword used in cpc', max_length=50, null=True), + model_name="shortlink", + name="utm_term", + field=models.CharField(blank=True, default=None, help_text="Keyword used in cpc", max_length=50, null=True), ), migrations.AlterField( - model_name='shortlink', - name='utm_campaign', - field=models.CharField(blank=True, - default=None, - help_text='Campaign ID when PPC but can be a string in more informal campaigns', - max_length=50, - null=True), + model_name="shortlink", + name="utm_campaign", + field=models.CharField( + blank=True, + default=None, + help_text="Campaign ID when PPC but can be a string in more informal campaigns", + max_length=50, + null=True, + ), ), migrations.AlterField( - model_name='shortlink', - name='utm_content', - field=models.CharField(blank=True, - default=None, - help_text='Can be de ad group id or ad id', - max_length=250, - null=True), + model_name="shortlink", + name="utm_content", + field=models.CharField( + blank=True, default=None, help_text="Can be de ad group id or ad id", max_length=250, null=True + ), ), migrations.AlterField( - model_name='shortlink', - name='utm_medium', - field=models.CharField(blank=True, - default=None, - help_text='social, organic, paid, email, referral, etc.', - max_length=50, - null=True), + model_name="shortlink", + name="utm_medium", + field=models.CharField( + blank=True, + default=None, + help_text="social, organic, paid, email, referral, etc.", + max_length=50, + null=True, + ), ), migrations.AlterField( - model_name='shortlink', - name='utm_source', - field=models.CharField(blank=True, - default=None, - help_text='fb, ig, google, twitter, quora, etc.', - max_length=50, - null=True), + model_name="shortlink", + name="utm_source", + field=models.CharField( + blank=True, default=None, help_text="fb, ig, google, twitter, quora, etc.", max_length=50, null=True + ), ), ] diff --git a/breathecode/marketing/migrations/0061_auto_20230203_2048.py b/breathecode/marketing/migrations/0061_auto_20230203_2048.py index 3c0f57aa8..5282badf1 100644 --- a/breathecode/marketing/migrations/0061_auto_20230203_2048.py +++ b/breathecode/marketing/migrations/0061_auto_20230203_2048.py @@ -6,16 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0060_auto_20221109_2245'), + ("marketing", "0060_auto_20221109_2245"), ] operations = [ migrations.RemoveField( - model_name='formentry', - name='automation_objects', + model_name="formentry", + name="automation_objects", ), migrations.RemoveField( - model_name='formentry', - name='tag_objects', + model_name="formentry", + name="tag_objects", ), ] diff --git a/breathecode/marketing/migrations/0062_alter_formentry_zip_code.py b/breathecode/marketing/migrations/0062_alter_formentry_zip_code.py index 09854e4b7..2a85440da 100644 --- a/breathecode/marketing/migrations/0062_alter_formentry_zip_code.py +++ b/breathecode/marketing/migrations/0062_alter_formentry_zip_code.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0061_auto_20230203_2048'), + ("marketing", "0061_auto_20230203_2048"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='zip_code', + model_name="formentry", + name="zip_code", field=models.CharField(blank=True, default=None, max_length=15, null=True), ), ] diff --git a/breathecode/marketing/migrations/0063_auto_20230215_2219.py b/breathecode/marketing/migrations/0063_auto_20230215_2219.py index fe155638c..6074adcdc 100644 --- a/breathecode/marketing/migrations/0063_auto_20230215_2219.py +++ b/breathecode/marketing/migrations/0063_auto_20230215_2219.py @@ -6,27 +6,29 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0062_alter_formentry_zip_code'), + ("marketing", "0062_alter_formentry_zip_code"), ] operations = [ migrations.AddField( - model_name='formentry', - name='ac_deal_owner_full_name', + model_name="formentry", + name="ac_deal_owner_full_name", field=models.CharField(blank=True, default=None, max_length=150, null=True), ), migrations.AddField( - model_name='formentry', - name='ac_deal_owner_id', + model_name="formentry", + name="ac_deal_owner_id", field=models.CharField(blank=True, default=None, max_length=15, null=True), ), migrations.AddField( - model_name='formentry', - name='ac_expected_cohort_date', - field=models.CharField(blank=True, - default=None, - help_text='Which date is this student expecting to join', - max_length=100, - null=True), + model_name="formentry", + name="ac_expected_cohort_date", + field=models.CharField( + blank=True, + default=None, + help_text="Which date is this student expecting to join", + max_length=100, + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0064_auto_20230217_0523.py b/breathecode/marketing/migrations/0064_auto_20230217_0523.py index a87a3289a..75f1653e1 100644 --- a/breathecode/marketing/migrations/0064_auto_20230217_0523.py +++ b/breathecode/marketing/migrations/0064_auto_20230217_0523.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0063_auto_20230215_2219'), + ("marketing", "0063_auto_20230215_2219"), ] operations = [ migrations.AddField( - model_name='formentry', - name='ac_deal_amount', + model_name="formentry", + name="ac_deal_amount", field=models.FloatField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='formentry', - name='ac_deal_currency_code', + model_name="formentry", + name="ac_deal_currency_code", field=models.CharField(blank=True, default=None, max_length=3, null=True), ), ] diff --git a/breathecode/marketing/migrations/0065_course_coursetranslation.py b/breathecode/marketing/migrations/0065_course_coursetranslation.py index 9aa66363e..3ed24c52a 100644 --- a/breathecode/marketing/migrations/0065_course_coursetranslation.py +++ b/breathecode/marketing/migrations/0065_course_coursetranslation.py @@ -8,42 +8,53 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0055_cohort_available_as_saas'), - ('marketing', '0064_auto_20230217_0523'), + ("admissions", "0055_cohort_available_as_saas"), + ("marketing", "0064_auto_20230217_0523"), ] operations = [ migrations.CreateModel( - name='Course', + name="Course", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('status', - models.CharField(choices=[('ACTIVE', 'Active'), ('DELETED', 'Deleted'), ('ARCHIVED', 'Archived')], - default='ACTIVE', - max_length=15)), - ('visibility', - models.CharField(choices=[('PRIVATE', 'Private'), ('UNLISTED', 'Unlisted'), ('PUBLIC', 'Public')], - default='PRIVATE', - max_length=15)), - ('icon_url', models.URLField(help_text='Image icon to show on website')), - ('technologies', models.CharField(max_length=150)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('syllabus', models.ManyToManyField(blank=True, to='admissions.Syllabus')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ( + "status", + models.CharField( + choices=[("ACTIVE", "Active"), ("DELETED", "Deleted"), ("ARCHIVED", "Archived")], + default="ACTIVE", + max_length=15, + ), + ), + ( + "visibility", + models.CharField( + choices=[("PRIVATE", "Private"), ("UNLISTED", "Unlisted"), ("PUBLIC", "Public")], + default="PRIVATE", + max_length=15, + ), + ), + ("icon_url", models.URLField(help_text="Image icon to show on website")), + ("technologies", models.CharField(max_length=150)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("syllabus", models.ManyToManyField(blank=True, to="admissions.Syllabus")), ], ), migrations.CreateModel( - name='CourseTranslation', + name="CourseTranslation", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('title', models.CharField(max_length=60)), - ('description', models.CharField(max_length=255)), - ('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='marketing.course')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + max_length=5, validators=[breathecode.utils.validators.language.validate_language_code] + ), + ), + ("title", models.CharField(max_length=60)), + ("description", models.CharField(max_length=255)), + ("course", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="marketing.course")), ], ), ] diff --git a/breathecode/marketing/migrations/0066_coursetranslation_course_modules.py b/breathecode/marketing/migrations/0066_coursetranslation_course_modules.py index 1d97e1a9a..0b27aa2b6 100644 --- a/breathecode/marketing/migrations/0066_coursetranslation_course_modules.py +++ b/breathecode/marketing/migrations/0066_coursetranslation_course_modules.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0065_course_coursetranslation'), + ("marketing", "0065_course_coursetranslation"), ] operations = [ migrations.AddField( - model_name='coursetranslation', - name='course_modules', + model_name="coursetranslation", + name="course_modules", field=models.JSONField( blank=True, default=None, - help_text='The course modules should be a list of objects of each of the modules taught', - null=True), + help_text="The course modules should be a list of objects of each of the modules taught", + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0067_course_status_message.py b/breathecode/marketing/migrations/0067_course_status_message.py index 2388e78ee..c0936f17f 100644 --- a/breathecode/marketing/migrations/0067_course_status_message.py +++ b/breathecode/marketing/migrations/0067_course_status_message.py @@ -6,17 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0066_coursetranslation_course_modules'), + ("marketing", "0066_coursetranslation_course_modules"), ] operations = [ migrations.AddField( - model_name='course', - name='status_message', - field=models.CharField(blank=True, - default=None, - help_text='Error message if status is ERROR', - max_length=250, - null=True), + model_name="course", + name="status_message", + field=models.CharField( + blank=True, default=None, help_text="Error message if status is ERROR", max_length=250, null=True + ), ), ] diff --git a/breathecode/marketing/migrations/0068_auto_20230429_0045.py b/breathecode/marketing/migrations/0068_auto_20230429_0045.py index 8b115eb61..2a47a9a08 100644 --- a/breathecode/marketing/migrations/0068_auto_20230429_0045.py +++ b/breathecode/marketing/migrations/0068_auto_20230429_0045.py @@ -6,26 +6,30 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0067_course_status_message'), + ("marketing", "0067_course_status_message"), ] operations = [ migrations.AddField( - model_name='formentry', - name='ac_deal_course', - field=models.CharField(blank=True, - default=None, - help_text='If != course it means it was updated later on CRM', - max_length=100, - null=True), + model_name="formentry", + name="ac_deal_course", + field=models.CharField( + blank=True, + default=None, + help_text="If != course it means it was updated later on CRM", + max_length=100, + null=True, + ), ), migrations.AddField( - model_name='formentry', - name='ac_deal_location', - field=models.CharField(blank=True, - default=None, - help_text='If != location it means it was updated later on CRM', - max_length=50, - null=True), + model_name="formentry", + name="ac_deal_location", + field=models.CharField( + blank=True, + default=None, + help_text="If != location it means it was updated later on CRM", + max_length=50, + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0069_alter_activecampaignwebhook_run_at.py b/breathecode/marketing/migrations/0069_alter_activecampaignwebhook_run_at.py index 942b86086..abf5c0f91 100644 --- a/breathecode/marketing/migrations/0069_alter_activecampaignwebhook_run_at.py +++ b/breathecode/marketing/migrations/0069_alter_activecampaignwebhook_run_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0068_auto_20230429_0045'), + ("marketing", "0068_auto_20230429_0045"), ] operations = [ migrations.AlterField( - model_name='activecampaignwebhook', - name='run_at', - field=models.DateTimeField(blank=True, default=None, help_text='Date/time that the webhook ran', null=True), + model_name="activecampaignwebhook", + name="run_at", + field=models.DateTimeField(blank=True, default=None, help_text="Date/time that the webhook ran", null=True), ), ] diff --git a/breathecode/marketing/migrations/0070_auto_20230622_0838.py b/breathecode/marketing/migrations/0070_auto_20230622_0838.py index 84019e821..0c29ad22d 100644 --- a/breathecode/marketing/migrations/0070_auto_20230622_0838.py +++ b/breathecode/marketing/migrations/0070_auto_20230622_0838.py @@ -6,22 +6,21 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0040_userinvite_is_email_validated'), - ('marketing', '0069_alter_activecampaignwebhook_run_at'), + ("authenticate", "0040_userinvite_is_email_validated"), + ("marketing", "0069_alter_activecampaignwebhook_run_at"), ] operations = [ migrations.AddField( - model_name='course', - name='has_waiting_list', - field=models.BooleanField(default=False, help_text='Has waiting list?'), + model_name="course", + name="has_waiting_list", + field=models.BooleanField(default=False, help_text="Has waiting list?"), ), migrations.AddField( - model_name='course', - name='invites', - field=models.ManyToManyField(blank=True, - help_text="Plan's invites", - related_name='courses', - to='authenticate.UserInvite'), + model_name="course", + name="invites", + field=models.ManyToManyField( + blank=True, help_text="Plan's invites", related_name="courses", to="authenticate.UserInvite" + ), ), ] diff --git a/breathecode/marketing/migrations/0070_leadgenerationapp_last_call_log.py b/breathecode/marketing/migrations/0070_leadgenerationapp_last_call_log.py index 1b6137273..613c9efd7 100644 --- a/breathecode/marketing/migrations/0070_leadgenerationapp_last_call_log.py +++ b/breathecode/marketing/migrations/0070_leadgenerationapp_last_call_log.py @@ -6,17 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0069_alter_activecampaignwebhook_run_at'), + ("marketing", "0069_alter_activecampaignwebhook_run_at"), ] operations = [ migrations.AddField( - model_name='leadgenerationapp', - name='last_call_log', - field=models.TextField(blank=True, - default=None, - help_text='Incomig payload from the last request', - max_length=450, - null=True), + model_name="leadgenerationapp", + name="last_call_log", + field=models.TextField( + blank=True, default=None, help_text="Incomig payload from the last request", max_length=450, null=True + ), ), ] diff --git a/breathecode/marketing/migrations/0071_merge_20230629_0305.py b/breathecode/marketing/migrations/0071_merge_20230629_0305.py index 50d474b45..32783e20d 100644 --- a/breathecode/marketing/migrations/0071_merge_20230629_0305.py +++ b/breathecode/marketing/migrations/0071_merge_20230629_0305.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0070_auto_20230622_0838'), - ('marketing', '0070_leadgenerationapp_last_call_log'), + ("marketing", "0070_auto_20230622_0838"), + ("marketing", "0070_leadgenerationapp_last_call_log"), ] operations = [] diff --git a/breathecode/marketing/migrations/0072_alter_formentry_phone.py b/breathecode/marketing/migrations/0072_alter_formentry_phone.py index a5a4ff035..8f7dff83c 100644 --- a/breathecode/marketing/migrations/0072_alter_formentry_phone.py +++ b/breathecode/marketing/migrations/0072_alter_formentry_phone.py @@ -7,13 +7,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0071_merge_20230629_0305'), + ("marketing", "0071_merge_20230629_0305"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='phone', + model_name="formentry", + name="phone", field=models.CharField( blank=True, default=None, @@ -22,7 +22,9 @@ class Migration(migrations.Migration): validators=[ django.core.validators.RegexValidator( message="Phone number must be entered in the format: '+99999999'. Up to 15 digits allowed.", - regex='^\\+?1?\\d{8,15}$') - ]), + regex="^\\+?1?\\d{8,15}$", + ) + ], + ), ), ] diff --git a/breathecode/marketing/migrations/0073_formentry_attribution_id.py b/breathecode/marketing/migrations/0073_formentry_attribution_id.py index e09eeb961..a93914d6a 100644 --- a/breathecode/marketing/migrations/0073_formentry_attribution_id.py +++ b/breathecode/marketing/migrations/0073_formentry_attribution_id.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0072_alter_formentry_phone'), + ("marketing", "0072_alter_formentry_phone"), ] operations = [ migrations.AddField( - model_name='formentry', - name='attribution_id', + model_name="formentry", + name="attribution_id", field=models.BigIntegerField( blank=True, default=None, - help_text= - 'Keep a consistent attribution from al the previous applications from the same email (it will reset to a new one for each WON)', - null=True), + help_text="Keep a consistent attribution from al the previous applications from the same email (it will reset to a new one for each WON)", + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0074_alter_formentry_attribution_id.py b/breathecode/marketing/migrations/0074_alter_formentry_attribution_id.py index ffb55ce9a..bd28b86a1 100644 --- a/breathecode/marketing/migrations/0074_alter_formentry_attribution_id.py +++ b/breathecode/marketing/migrations/0074_alter_formentry_attribution_id.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0073_formentry_attribution_id'), + ("marketing", "0073_formentry_attribution_id"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='attribution_id', + model_name="formentry", + name="attribution_id", field=models.CharField( blank=True, default=None, - help_text= - 'Keep a consistent attribution from al the previous applications from the same email (it will reset to a new one for each WON)', + help_text="Keep a consistent attribution from al the previous applications from the same email (it will reset to a new one for each WON)", max_length=30, - null=True), + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0074_coursetranslation_landing_url.py b/breathecode/marketing/migrations/0074_coursetranslation_landing_url.py index 2ff10cb52..c205ef973 100644 --- a/breathecode/marketing/migrations/0074_coursetranslation_landing_url.py +++ b/breathecode/marketing/migrations/0074_coursetranslation_landing_url.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0073_formentry_attribution_id'), + ("marketing", "0073_formentry_attribution_id"), ] operations = [ migrations.AddField( - model_name='coursetranslation', - name='landing_url', + model_name="coursetranslation", + name="landing_url", field=models.URLField( blank=True, default=None, - help_text= - 'Landing URL used on call to actions where the course is shown. A URL is needed per each translation.', - null=True), + help_text="Landing URL used on call to actions where the course is shown. A URL is needed per each translation.", + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0075_auto_20230831_1958.py b/breathecode/marketing/migrations/0075_auto_20230831_1958.py index 4b28e1bb6..ac142cde2 100644 --- a/breathecode/marketing/migrations/0075_auto_20230831_1958.py +++ b/breathecode/marketing/migrations/0075_auto_20230831_1958.py @@ -7,21 +7,21 @@ def convert_to_str(apps, schema_editor): with connection.cursor() as cursor: cursor.execute( - 'UPDATE marketing_formentry SET attribution_id = CAST(attribution_id AS VARCHAR) WHERE attribution_id IS NOT NULL' + "UPDATE marketing_formentry SET attribution_id = CAST(attribution_id AS VARCHAR) WHERE attribution_id IS NOT NULL" ) def convert_to_int(apps, schema_editor): with connection.cursor() as cursor: cursor.execute( - 'UPDATE marketing_formentry SET attribution_id = CAST(attribution_id AS BIGINT) WHERE attribution_id IS NOT NULL' + "UPDATE marketing_formentry SET attribution_id = CAST(attribution_id AS BIGINT) WHERE attribution_id IS NOT NULL" ) class Migration(migrations.Migration): dependencies = [ - ('marketing', '0074_alter_formentry_attribution_id'), + ("marketing", "0074_alter_formentry_attribution_id"), ] operations = [ diff --git a/breathecode/marketing/migrations/0076_merge_20230831_2059.py b/breathecode/marketing/migrations/0076_merge_20230831_2059.py index 83c64b96c..2a24b2a32 100644 --- a/breathecode/marketing/migrations/0076_merge_20230831_2059.py +++ b/breathecode/marketing/migrations/0076_merge_20230831_2059.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0074_coursetranslation_landing_url'), - ('marketing', '0075_auto_20230831_1958'), + ("marketing", "0074_coursetranslation_landing_url"), + ("marketing", "0075_auto_20230831_1958"), ] operations = [] diff --git a/breathecode/marketing/migrations/0077_course_cohort.py b/breathecode/marketing/migrations/0077_course_cohort.py index 19f230bc1..5349df81f 100644 --- a/breathecode/marketing/migrations/0077_course_cohort.py +++ b/breathecode/marketing/migrations/0077_course_cohort.py @@ -7,18 +7,16 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0061_academy_white_label_url'), - ('marketing', '0076_merge_20230831_2059'), + ("admissions", "0061_academy_white_label_url"), + ("marketing", "0076_merge_20230831_2059"), ] operations = [ migrations.AddField( - model_name='course', - name='cohort', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="course", + name="cohort", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), ), ] diff --git a/breathecode/marketing/migrations/0078_alter_formentry_storage_status.py b/breathecode/marketing/migrations/0078_alter_formentry_storage_status.py index df0aabce4..7cf2a2c1d 100644 --- a/breathecode/marketing/migrations/0078_alter_formentry_storage_status.py +++ b/breathecode/marketing/migrations/0078_alter_formentry_storage_status.py @@ -6,16 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0077_course_cohort'), + ("marketing", "0077_course_cohort"), ] operations = [ migrations.AlterField( - model_name='formentry', - name='storage_status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), - ('REJECTED', 'Rejected'), ('DUPLICATED', 'Duplicated'), ('ERROR', 'Error')], - default='PENDING', - max_length=15), + model_name="formentry", + name="storage_status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("REJECTED", "Rejected"), + ("DUPLICATED", "Duplicated"), + ("ERROR", "Error"), + ], + default="PENDING", + max_length=15, + ), ), ] diff --git a/breathecode/marketing/migrations/0078_coursetranslation_short_description.py b/breathecode/marketing/migrations/0078_coursetranslation_short_description.py index 33bf95fe6..275929659 100644 --- a/breathecode/marketing/migrations/0078_coursetranslation_short_description.py +++ b/breathecode/marketing/migrations/0078_coursetranslation_short_description.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0077_course_cohort'), + ("marketing", "0077_course_cohort"), ] operations = [ migrations.AddField( - model_name='coursetranslation', - name='short_description', + model_name="coursetranslation", + name="short_description", field=models.CharField(blank=True, default=None, max_length=120, null=True), ), ] diff --git a/breathecode/marketing/migrations/0079_alter_coursetranslation_description.py b/breathecode/marketing/migrations/0079_alter_coursetranslation_description.py index d237cfc1f..b0b723859 100644 --- a/breathecode/marketing/migrations/0079_alter_coursetranslation_description.py +++ b/breathecode/marketing/migrations/0079_alter_coursetranslation_description.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0078_coursetranslation_short_description'), + ("marketing", "0078_coursetranslation_short_description"), ] operations = [ migrations.AlterField( - model_name='coursetranslation', - name='description', + model_name="coursetranslation", + name="description", field=models.TextField(max_length=400), ), ] diff --git a/breathecode/marketing/migrations/0080_course_plan_slug.py b/breathecode/marketing/migrations/0080_course_plan_slug.py index 2db3300f4..20a8fadcf 100644 --- a/breathecode/marketing/migrations/0080_course_plan_slug.py +++ b/breathecode/marketing/migrations/0080_course_plan_slug.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0079_alter_coursetranslation_description'), + ("marketing", "0079_alter_coursetranslation_description"), ] operations = [ migrations.AddField( - model_name='course', - name='plan_slug', + model_name="course", + name="plan_slug", field=models.SlugField(blank=True, default=None, max_length=150, null=True), ), ] diff --git a/breathecode/marketing/migrations/0081_coursetranslation_video_url.py b/breathecode/marketing/migrations/0081_coursetranslation_video_url.py index 5b742b624..f8186a9c1 100644 --- a/breathecode/marketing/migrations/0081_coursetranslation_video_url.py +++ b/breathecode/marketing/migrations/0081_coursetranslation_video_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0080_course_plan_slug'), + ("marketing", "0080_course_plan_slug"), ] operations = [ migrations.AddField( - model_name='coursetranslation', - name='video_url', - field=models.URLField(default=None, help_text='Video that introduces/promotes this course', null=True), + model_name="coursetranslation", + name="video_url", + field=models.URLField(default=None, help_text="Video that introduces/promotes this course", null=True), ), ] diff --git a/breathecode/marketing/migrations/0082_course_color.py b/breathecode/marketing/migrations/0082_course_color.py index f84bac136..dc54bd9f3 100644 --- a/breathecode/marketing/migrations/0082_course_color.py +++ b/breathecode/marketing/migrations/0082_course_color.py @@ -6,17 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0081_coursetranslation_video_url'), + ("marketing", "0081_coursetranslation_video_url"), ] operations = [ migrations.AddField( - model_name='course', - name='color', - field=models.CharField(blank=True, - default=None, - help_text='Add the color with hexadecimal format, i.e.: #FFFFFF', - max_length=50, - null=True), + model_name="course", + name="color", + field=models.CharField( + blank=True, + default=None, + help_text="Add the color with hexadecimal format, i.e.: #FFFFFF", + max_length=50, + null=True, + ), ), ] diff --git a/breathecode/marketing/migrations/0083_alter_coursetranslation_video_url.py b/breathecode/marketing/migrations/0083_alter_coursetranslation_video_url.py index fe52ba043..a3a8150e2 100644 --- a/breathecode/marketing/migrations/0083_alter_coursetranslation_video_url.py +++ b/breathecode/marketing/migrations/0083_alter_coursetranslation_video_url.py @@ -6,16 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0082_course_color'), + ("marketing", "0082_course_color"), ] operations = [ migrations.AlterField( - model_name='coursetranslation', - name='video_url', - field=models.URLField(blank=True, - default=None, - help_text='Video that introduces/promotes this course', - null=True), + model_name="coursetranslation", + name="video_url", + field=models.URLField( + blank=True, default=None, help_text="Video that introduces/promotes this course", null=True + ), ), ] diff --git a/breathecode/marketing/migrations/0084_merge_20240322_1818.py b/breathecode/marketing/migrations/0084_merge_20240322_1818.py index 1d5ab10a6..acfc2d238 100644 --- a/breathecode/marketing/migrations/0084_merge_20240322_1818.py +++ b/breathecode/marketing/migrations/0084_merge_20240322_1818.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0078_alter_formentry_storage_status'), - ('marketing', '0083_alter_coursetranslation_video_url'), + ("marketing", "0078_alter_formentry_storage_status"), + ("marketing", "0083_alter_coursetranslation_video_url"), ] operations = [] diff --git a/breathecode/marketing/migrations/0084_merge_20240329_1501.py b/breathecode/marketing/migrations/0084_merge_20240329_1501.py index a85c9e965..97705acf6 100644 --- a/breathecode/marketing/migrations/0084_merge_20240329_1501.py +++ b/breathecode/marketing/migrations/0084_merge_20240329_1501.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0078_alter_formentry_storage_status'), - ('marketing', '0083_alter_coursetranslation_video_url'), + ("marketing", "0078_alter_formentry_storage_status"), + ("marketing", "0083_alter_coursetranslation_video_url"), ] operations = [] diff --git a/breathecode/marketing/migrations/0085_merge_20240403_0116.py b/breathecode/marketing/migrations/0085_merge_20240403_0116.py index 438f2b3a3..eb6a6ce53 100644 --- a/breathecode/marketing/migrations/0085_merge_20240403_0116.py +++ b/breathecode/marketing/migrations/0085_merge_20240403_0116.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0084_merge_20240322_1818'), - ('marketing', '0084_merge_20240329_1501'), + ("marketing", "0084_merge_20240322_1818"), + ("marketing", "0084_merge_20240329_1501"), ] operations = [] diff --git a/breathecode/marketing/migrations/0086_coursetranslation_landing_variables.py b/breathecode/marketing/migrations/0086_coursetranslation_landing_variables.py index 5e5a33933..9ebaa85cc 100644 --- a/breathecode/marketing/migrations/0086_coursetranslation_landing_variables.py +++ b/breathecode/marketing/migrations/0086_coursetranslation_landing_variables.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('marketing', '0085_merge_20240403_0116'), + ("marketing", "0085_merge_20240403_0116"), ] operations = [ migrations.AddField( - model_name='coursetranslation', - name='landing_variables', + model_name="coursetranslation", + name="landing_variables", field=models.JSONField( blank=True, default=None, - help_text='Different variables that can be used for marketing purposes in the landing page.', - null=True), + help_text="Different variables that can be used for marketing purposes in the landing page.", + null=True, + ), ), ] diff --git a/breathecode/marketing/models.py b/breathecode/marketing/models.py index a03f7e444..571614323 100644 --- a/breathecode/marketing/models.py +++ b/breathecode/marketing/models.py @@ -14,8 +14,14 @@ from .signals import form_entry_won_or_lost, new_form_entry_deal __all__ = [ - 'ActiveCampaignAcademy', 'AcademyAlias', 'Automation', 'Tag', 'Contact', 'FormEntry', 'ShortLink', - 'ActiveCampaignWebhook' + "ActiveCampaignAcademy", + "AcademyAlias", + "Automation", + "Tag", + "Contact", + "FormEntry", + "ShortLink", + "ActiveCampaignWebhook", ] @@ -25,45 +31,48 @@ class Meta: proxy = True -INCOMPLETED = 'INCOMPLETED' -COMPLETED = 'COMPLETED' +INCOMPLETED = "INCOMPLETED" +COMPLETED = "COMPLETED" SYNC_STATUS = ( - (INCOMPLETED, 'Incompleted'), - (COMPLETED, 'Completed'), + (INCOMPLETED, "Incompleted"), + (COMPLETED, "Completed"), ) class ActiveCampaignAcademy(models.Model): ac_key = models.CharField(max_length=150) ac_url = models.URLField() - event_attendancy_automation = models.ForeignKey('Automation', - on_delete=models.CASCADE, - blank=True, - null=True, - default=None) + event_attendancy_automation = models.ForeignKey( + "Automation", on_delete=models.CASCADE, blank=True, null=True, default=None + ) academy = models.OneToOneField(Academy, on_delete=models.CASCADE) duplicate_leads_delta_avoidance = models.DurationField( default=timedelta(minutes=30), - help_text='Leads that apply to the same course on this timedelta will not be sent to AC') - - sync_status = models.CharField(max_length=15, - choices=SYNC_STATUS, - default=INCOMPLETED, - help_text='Automatically set when interacting with the Active Campaign API') - sync_message = models.CharField(max_length=100, - blank=True, - null=True, - default=None, - help_text='Contains any success or error messages depending on the status') + help_text="Leads that apply to the same course on this timedelta will not be sent to AC", + ) + + sync_status = models.CharField( + max_length=15, + choices=SYNC_STATUS, + default=INCOMPLETED, + help_text="Automatically set when interacting with the Active Campaign API", + ) + sync_message = models.CharField( + max_length=100, + blank=True, + null=True, + default=None, + help_text="Contains any success or error messages depending on the status", + ) last_interaction_at = models.DateTimeField(default=None, blank=True, null=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.academy.name}' if self.academy else 'Unnamed' + return f"{self.academy.name}" if self.academy else "Unnamed" class AcademyAlias(models.Model): @@ -79,29 +88,30 @@ class AcademyAlias(models.Model): academy = models.ForeignKey(Academy, on_delete=models.CASCADE) -ACTIVE = '1' -INNACTIVE = '2' -UKNOWN = '0' +ACTIVE = "1" +INNACTIVE = "2" +UKNOWN = "0" AUTOMATION_STATUS = ( - (ACTIVE, 'Active'), - (INNACTIVE, 'Innactive'), - (UKNOWN, 'Uknown'), + (ACTIVE, "Active"), + (INNACTIVE, "Innactive"), + (UKNOWN, "Uknown"), ) class Automation(models.Model): - slug = models.SlugField(max_length=150, - blank=True, - default='', - help_text='unique string id that is used to connect incoming leads to automations') + slug = models.SlugField( + max_length=150, + blank=True, + default="", + help_text="unique string id that is used to connect incoming leads to automations", + ) name = models.CharField(max_length=100) - acp_id = models.PositiveIntegerField(help_text='ID asigned in active campaign') - status = models.CharField(max_length=1, - choices=AUTOMATION_STATUS, - default=UKNOWN, - help_text='2 = inactive, 1=active') - entered = models.PositiveIntegerField(help_text='How many contacts have entered') - exited = models.PositiveIntegerField(help_text='How many contacts have exited') + acp_id = models.PositiveIntegerField(help_text="ID asigned in active campaign") + status = models.CharField( + max_length=1, choices=AUTOMATION_STATUS, default=UKNOWN, help_text="2 = inactive, 1=active" + ) + entered = models.PositiveIntegerField(help_text="How many contacts have entered") + exited = models.PositiveIntegerField(help_text="How many contacts have exited") ac_academy = models.ForeignKey(ActiveCampaignAcademy, on_delete=models.CASCADE, null=True, default=None) @@ -109,25 +119,25 @@ class Automation(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - tag_imporance = self.slug if self.slug != '' else 'unknown' - return f'{tag_imporance} -> {self.name}' + tag_imporance = self.slug if self.slug != "" else "unknown" + return f"{tag_imporance} -> {self.name}" -STRONG = 'STRONG' -SOFT = 'SOFT' -DISCOVERY = 'DISCOVERY' -COHORT = 'COHORT' -DOWNLOADABLE = 'DOWNLOADABLE' -EVENT = 'EVENT' -OTHER = 'OTHER' +STRONG = "STRONG" +SOFT = "SOFT" +DISCOVERY = "DISCOVERY" +COHORT = "COHORT" +DOWNLOADABLE = "DOWNLOADABLE" +EVENT = "EVENT" +OTHER = "OTHER" TAG_TYPE = ( - (STRONG, 'Strong'), - (SOFT, 'Soft'), - (DISCOVERY, 'Discovery'), - (COHORT, 'Cohort'), - (DOWNLOADABLE, 'Downloadable'), - (EVENT, 'Event'), - (OTHER, 'Other'), + (STRONG, "Strong"), + (SOFT, "Soft"), + (DISCOVERY, "Discovery"), + (COHORT, "Cohort"), + (DOWNLOADABLE, "Downloadable"), + (EVENT, "Event"), + (OTHER, "Other"), ) @@ -138,10 +148,9 @@ class Tag(models.Model): choices=TAG_TYPE, null=True, default=None, - help_text= - "The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON" + help_text="The STRONG tags in a lead will determine to witch automation it does unless there is an 'automation' property on the lead JSON", ) - acp_id = models.IntegerField(help_text='The id coming from active campaign') + acp_id = models.IntegerField(help_text="The id coming from active campaign") subscribers = models.IntegerField() # For better maintance the tags can be disputed for deletion @@ -149,22 +158,23 @@ class Tag(models.Model): blank=True, null=True, default=None, - help_text='Disputed tags get deleted after 10 days unless its used in 1+ automations or has 1+ subscriber') - disputed_reason = models.TextField(blank=True, - null=True, - default=None, - help_text='Explain why you think the tag should be deleted') - description = models.TextField(blank=True, - null=True, - default=None, - help_text='How is this tag being used? Why is it needed?') - - automation = models.ForeignKey(Automation, - on_delete=models.CASCADE, - null=True, - blank=True, - default=None, - help_text='Leads that contain this tag will be asociated to this automation') + help_text="Disputed tags get deleted after 10 days unless its used in 1+ automations or has 1+ subscriber", + ) + disputed_reason = models.TextField( + blank=True, null=True, default=None, help_text="Explain why you think the tag should be deleted" + ) + description = models.TextField( + blank=True, null=True, default=None, help_text="How is this tag being used? Why is it needed?" + ) + + automation = models.ForeignKey( + Automation, + on_delete=models.CASCADE, + null=True, + blank=True, + default=None, + help_text="Leads that contain this tag will be asociated to this automation", + ) ac_academy = models.ForeignKey(ActiveCampaignAcademy, on_delete=models.CASCADE, null=True, default=None) @@ -172,7 +182,7 @@ class Tag(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.slug} ({str(self.id)})' + return f"{self.slug} ({str(self.id)})" class Contact(models.Model): @@ -181,10 +191,12 @@ class Contact(models.Model): email = models.CharField(max_length=150, unique=True) phone_regex = RegexValidator( - regex=r'^\+?1?\d{9,15}$', - message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.") - phone = models.CharField(validators=[phone_regex], max_length=17, blank=True, null=True, - default=None) # validators should be a list + regex=r"^\+?1?\d{9,15}$", + message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", + ) + phone = models.CharField( + validators=[phone_regex], max_length=17, blank=True, null=True, default=None + ) # validators should be a list language = models.CharField(max_length=2) country = models.CharField(max_length=30) @@ -196,14 +208,14 @@ class Contact(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return self.first_name + ' ' + (self.last_name or '') + return self.first_name + " " + (self.last_name or "") -OK = 'OK' -ERROR = 'ERROR' +OK = "OK" +ERROR = "ERROR" LAST_CALL_STATUS = ( - (OK, 'Ok'), - (ERROR, 'Error'), + (OK, "Ok"), + (ERROR, "Error"), ) @@ -211,44 +223,42 @@ class LeadGenerationApp(models.Model): slug = models.SlugField(max_length=150, unique=True) name = models.CharField(max_length=100) description = models.TextField(max_length=450) - app_id = models.CharField(max_length=255, - unique=True, - help_text='Unique token generated only for this app, can be reset to revoke access') + app_id = models.CharField( + max_length=255, unique=True, help_text="Unique token generated only for this app, can be reset to revoke access" + ) hits = models.IntegerField(default=0) - last_request_data = models.TextField(max_length=450, - default=None, - null=True, - blank=True, - help_text='Incomig payload from the last request') - last_call_log = models.TextField(max_length=450, - default=None, - null=True, - blank=True, - help_text='Incomig payload from the last request') + last_request_data = models.TextField( + max_length=450, default=None, null=True, blank=True, help_text="Incomig payload from the last request" + ) + last_call_log = models.TextField( + max_length=450, default=None, null=True, blank=True, help_text="Incomig payload from the last request" + ) last_call_status = models.CharField(max_length=9, choices=LAST_CALL_STATUS, default=None, null=True, blank=True) - last_call_at = models.DateTimeField(default=None, - blank=True, - null=True, - help_text='Timestamp from the last time this app called our API') + last_call_at = models.DateTimeField( + default=None, blank=True, null=True, help_text="Timestamp from the last time this app called our API" + ) # defaults default_tags = models.ManyToManyField(Tag, blank=True) default_automations = models.ManyToManyField( - Automation, blank=True, help_text='Automations with are slug will be excluded, make sure to set slug to them') + Automation, blank=True, help_text="Automations with are slug will be excluded, make sure to set slug to them" + ) location = models.CharField(max_length=70, blank=True, null=True, default=None) language = models.CharField(max_length=2, blank=True, null=True, default=None) utm_url = models.CharField(max_length=2000, null=True, default=None, blank=True) utm_medium = models.CharField(max_length=70, blank=True, null=True, default=None) utm_campaign = models.CharField(max_length=70, blank=True, null=True, default=None) utm_source = models.CharField(max_length=70, blank=True, null=True, default=None) - utm_plan = models.CharField(max_length=50, - blank=True, - null=True, - default=None, - help_text='If its applying for a scholarship, upfront, isa, financing, etc.') + utm_plan = models.CharField( + max_length=50, + blank=True, + null=True, + default=None, + help_text="If its applying for a scholarship, upfront, isa, financing, etc.", + ) # Status academy = models.ForeignKey(Academy, on_delete=models.CASCADE) @@ -257,7 +267,7 @@ class LeadGenerationApp(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.slug}' + return f"{self.slug}" def save(self, *args, **kwargs): created = not self.id @@ -268,37 +278,37 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) -PENDING = 'PENDING' -PERSISTED = 'PERSISTED' -DUPLICATED = 'DUPLICATED' -REJECTED = 'REJECTED' -ERROR = 'ERROR' +PENDING = "PENDING" +PERSISTED = "PERSISTED" +DUPLICATED = "DUPLICATED" +REJECTED = "REJECTED" +ERROR = "ERROR" STORAGE_STATUS = ( - (PENDING, 'Pending'), - (PERSISTED, 'Persisted'), - (REJECTED, 'Rejected'), # If rejection rules apply - (DUPLICATED, 'Duplicated'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (PERSISTED, "Persisted"), + (REJECTED, "Rejected"), # If rejection rules apply + (DUPLICATED, "Duplicated"), + (ERROR, "Error"), ) LEAD_TYPE = ( - (STRONG, 'Strong'), - (SOFT, 'Soft'), - (DISCOVERY, 'Discovery'), + (STRONG, "Strong"), + (SOFT, "Soft"), + (DISCOVERY, "Discovery"), ) -WON = 'WON' -LOST = 'LOST' +WON = "WON" +LOST = "LOST" DEAL_STATUS = ( - (WON, 'Won'), - (LOST, 'Lost'), + (WON, "Won"), + (LOST, "Lost"), ) -GOOD = 'GOOD' -BAD = 'BAD' +GOOD = "GOOD" +BAD = "BAD" DEAL_SENTIMENT = ( - (GOOD, 'Good'), - (BAD, 'Bad'), + (GOOD, "Good"), + (BAD, "Bad"), ) @@ -318,70 +328,67 @@ def __init__(self, *args, **kwargs): fb_adgroup_id = models.BigIntegerField(null=True, default=None, blank=True) fb_ad_id = models.BigIntegerField(null=True, default=None, blank=True) - ac_contact_id = models.CharField(max_length=20, - null=True, - default=None, - blank=True, - help_text='Active Campaign Contact ID') + ac_contact_id = models.CharField( + max_length=20, null=True, default=None, blank=True, help_text="Active Campaign Contact ID" + ) - ac_deal_id = models.CharField(max_length=20, - null=True, - default=None, - blank=True, - help_text='Active Campaign Deal ID') + ac_deal_id = models.CharField( + max_length=20, null=True, default=None, blank=True, help_text="Active Campaign Deal ID" + ) - first_name = models.CharField(max_length=150, default='') - last_name = models.CharField(max_length=150, default='', blank=True) + first_name = models.CharField(max_length=150, default="") + last_name = models.CharField(max_length=150, default="", blank=True) email = models.CharField(max_length=150, null=True, default=None, blank=True) phone_regex = RegexValidator( - regex=r'^\+?1?\d{8,15}$', - message="Phone number must be entered in the format: '+99999999'. Up to 15 digits allowed.") - phone = models.CharField(validators=[phone_regex], max_length=17, blank=True, null=True, - default=None) # validators should be a list + regex=r"^\+?1?\d{8,15}$", + message="Phone number must be entered in the format: '+99999999'. Up to 15 digits allowed.", + ) + phone = models.CharField( + validators=[phone_regex], max_length=17, blank=True, null=True, default=None + ) # validators should be a list course = models.CharField(max_length=70, null=True, default=None) client_comments = models.CharField(max_length=250, blank=True, null=True, default=None) location = models.CharField(max_length=70, blank=True, null=True, default=None) - language = models.CharField(max_length=2, default='en') + language = models.CharField(max_length=2, default="en") utm_url = models.CharField(max_length=2000, null=True, default=None, blank=True) utm_medium = models.CharField(max_length=70, blank=True, null=True, default=None) utm_content = models.CharField(max_length=70, blank=True, null=True, default=None) utm_campaign = models.CharField(max_length=70, blank=True, null=True, default=None) utm_content = models.CharField(max_length=70, blank=True, null=True, default=None) utm_source = models.CharField(max_length=70, blank=True, null=True, default=None) - utm_term = models.CharField(max_length=50, blank=True, null=True, default=None, help_text='Keyword used in cpc') - utm_placement = models.CharField(max_length=50, - blank=True, - null=True, - default=None, - help_text='User agent or device screen') - utm_plan = models.CharField(max_length=50, - blank=True, - null=True, - default=None, - help_text='If its applying for a scholarship, upfront, isa, financing, etc.') - - custom_fields = models.JSONField(blank=True, - null=True, - default=None, - help_text='Other incoming values in the payload will be saved here as they come') - - current_download = models.CharField(max_length=255, - blank=True, - null=True, - default=None, - help_text='Slug of the breathecode.marketing.downloadable') + utm_term = models.CharField(max_length=50, blank=True, null=True, default=None, help_text="Keyword used in cpc") + utm_placement = models.CharField( + max_length=50, blank=True, null=True, default=None, help_text="User agent or device screen" + ) + utm_plan = models.CharField( + max_length=50, + blank=True, + null=True, + default=None, + help_text="If its applying for a scholarship, upfront, isa, financing, etc.", + ) + + custom_fields = models.JSONField( + blank=True, + null=True, + default=None, + help_text="Other incoming values in the payload will be saved here as they come", + ) + + current_download = models.CharField( + max_length=255, blank=True, null=True, default=None, help_text="Slug of the breathecode.marketing.downloadable" + ) referral_key = models.CharField(max_length=70, blank=True, null=True, default=None) gclid = models.CharField(max_length=255, blank=True, null=True, default=None) - tags = models.CharField(max_length=100, blank=True, default='', help_text='Comma separated list of tags') - automations = models.CharField(max_length=100, - blank=True, - default='', - help_text='Comma separated list of automations') + tags = models.CharField(max_length=100, blank=True, default="", help_text="Comma separated list of tags") + automations = models.CharField( + max_length=100, blank=True, default="", help_text="Comma separated list of automations" + ) street_address = models.CharField(max_length=250, null=True, default=None, blank=True) country = models.CharField(max_length=30, null=True, default=None, blank=True) @@ -392,15 +399,16 @@ def __init__(self, *args, **kwargs): zip_code = models.CharField(max_length=15, null=True, default=None, blank=True) browser_lang = models.CharField(max_length=10, null=True, default=None, blank=True) - sex = models.CharField(max_length=15, null=True, default=None, blank=True, help_text='M=male,F=female,O=other') + sex = models.CharField(max_length=15, null=True, default=None, blank=True, help_text="M=male,F=female,O=other") # is it saved into active campaign? storage_status = models.CharField(max_length=15, choices=STORAGE_STATUS, default=PENDING) storage_status_text = models.CharField( - default='', + default="", blank=True, max_length=250, - help_text='Will show exception message or any other cloud on the error that occurred (if any)') + help_text="Will show exception message or any other cloud on the error that occurred (if any)", + ) lead_type = models.CharField(max_length=15, choices=LEAD_TYPE, null=True, default=None) @@ -415,44 +423,41 @@ def __init__(self, *args, **kwargs): on_delete=models.CASCADE, null=True, default=None, - help_text='Other apps can send leads to breathecode but they need to be registered here') + help_text="Other apps can send leads to breathecode but they need to be registered here", + ) # if user is not null, it probably means the lead was won and we invited it to breathecode user = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, default=None, blank=True) - ac_expected_cohort = models.CharField(max_length=100, - null=True, - default=None, - blank=True, - help_text='Which cohort is this student expecting to join') - - ac_expected_cohort_date = models.CharField(max_length=100, - null=True, - default=None, - blank=True, - help_text='Which date is this student expecting to join') - - ac_contact_id = models.CharField(max_length=20, - null=True, - default=None, - blank=True, - help_text='Active Campaign Contact ID') - ac_deal_id = models.CharField(max_length=20, - null=True, - default=None, - blank=True, - help_text='Active Campaign Deal ID') - - ac_deal_location = models.CharField(max_length=50, - default=None, - null=True, - blank=True, - help_text='If != location it means it was updated later on CRM') - ac_deal_course = models.CharField(max_length=100, - default=None, - null=True, - blank=True, - help_text='If != course it means it was updated later on CRM') + ac_expected_cohort = models.CharField( + max_length=100, null=True, default=None, blank=True, help_text="Which cohort is this student expecting to join" + ) + + ac_expected_cohort_date = models.CharField( + max_length=100, null=True, default=None, blank=True, help_text="Which date is this student expecting to join" + ) + + ac_contact_id = models.CharField( + max_length=20, null=True, default=None, blank=True, help_text="Active Campaign Contact ID" + ) + ac_deal_id = models.CharField( + max_length=20, null=True, default=None, blank=True, help_text="Active Campaign Deal ID" + ) + + ac_deal_location = models.CharField( + max_length=50, + default=None, + null=True, + blank=True, + help_text="If != location it means it was updated later on CRM", + ) + ac_deal_course = models.CharField( + max_length=100, + default=None, + null=True, + blank=True, + help_text="If != course it means it was updated later on CRM", + ) ac_deal_owner_id = models.CharField(max_length=15, default=None, null=True, blank=True) ac_deal_owner_full_name = models.CharField(max_length=150, default=None, null=True, blank=True) @@ -467,15 +472,14 @@ def __init__(self, *args, **kwargs): max_length=30, default=None, blank=True, - help_text= - 'Keep a consistent attribution from al the previous applications from the same email (it will reset to a new one for each WON)' + help_text="Keep a consistent attribution from al the previous applications from the same email (it will reset to a new one for each WON)", ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return self.first_name + ' ' + self.last_name + return self.first_name + " " + self.last_name def save(self, *args, **kwargs): @@ -493,8 +497,10 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) - if deal_status_modified: form_entry_won_or_lost.send_robust(instance=self, sender=FormEntry) - if is_new_deal: new_form_entry_deal.send_robust(instance=self, sender=FormEntry) + if deal_status_modified: + form_entry_won_or_lost.send_robust(instance=self, sender=FormEntry) + if is_new_deal: + new_form_entry_deal.send_robust(instance=self, sender=FormEntry) self.__old_deal_status = self.deal_status self.__old_deal_id = self.ac_deal_id @@ -504,11 +510,17 @@ def is_duplicate(self, incoming_lead): if self.academy is not None and self.academy.activecampaignacademy is not None: duplicate_leads_delta_avoidance = self.academy.activecampaignacademy.duplicate_leads_delta_avoidance - last_one = FormEntry.objects.filter( - email=self.email, - course=incoming_lead['course'], - storage_status='PERSISTED', - created_at__lte=self.created_at).exclude(id=self.id).order_by('-created_at').first() + last_one = ( + FormEntry.objects.filter( + email=self.email, + course=incoming_lead["course"], + storage_status="PERSISTED", + created_at__lte=self.created_at, + ) + .exclude(id=self.id) + .order_by("-created_at") + .first() + ) if last_one is None: return False @@ -525,8 +537,9 @@ def set_attribution_id(self): if self.email is None: return None - previously_not_won = FormEntry.objects.filter(email=self.email, - won_at__isnull=True).order_by('-created_at').first() + previously_not_won = ( + FormEntry.objects.filter(email=self.email, won_at__isnull=True).order_by("-created_at").first() + ) # Generate a 30-character hash def generate_hash(): @@ -541,8 +554,9 @@ def generate_hash(): # has the attribution id already been attributed to a previous won lead? # if true, we need a new one to reset the attribution cycle - if FormEntry.objects.filter(email=self.email, attribution_id=self.attribution_id, - won_at__isnull=False).exists(): + if FormEntry.objects.filter( + email=self.email, attribution_id=self.attribution_id, won_at__isnull=False + ).exists(): # if true, reset self.attribution_id = generate_hash() @@ -555,7 +569,7 @@ def calculate_academy(self): if self.academy is not None: return self.academy - elif self.location is not None and self.location != '': + elif self.location is not None and self.location != "": _alias = AcademyAlias.objects.filter(slug=self.location).first() if _alias is not None: return _alias.academy @@ -564,34 +578,34 @@ def calculate_academy(self): def to_form_data(self): _entry = { - 'id': self.id, - 'first_name': self.first_name, - 'last_name': self.last_name, - 'phone': self.phone, - 'email': self.email, - 'location': self.location, - 'referral_key': self.referral_key, - 'course': self.course, - 'tags': self.tags, - 'automations': self.automations, - 'language': self.language, - 'city': self.city, - 'country': self.country, - 'utm_url': self.utm_url, - 'client_comments': self.client_comments, - 'current_download': self.current_download, - 'latitude': self.longitude, - 'longitude': self.latitude, + "id": self.id, + "first_name": self.first_name, + "last_name": self.last_name, + "phone": self.phone, + "email": self.email, + "location": self.location, + "referral_key": self.referral_key, + "course": self.course, + "tags": self.tags, + "automations": self.automations, + "language": self.language, + "city": self.city, + "country": self.country, + "utm_url": self.utm_url, + "client_comments": self.client_comments, + "current_download": self.current_download, + "latitude": self.longitude, + "longitude": self.latitude, } return _entry -_ACTIVE = 'ACTIVE' -NOT_FOUND = 'NOT_FOUND' +_ACTIVE = "ACTIVE" +NOT_FOUND = "NOT_FOUND" DESTINATION_STATUS = ( - (_ACTIVE, 'Active'), - (NOT_FOUND, 'Not found'), - (ERROR, 'Error'), + (_ACTIVE, "Active"), + (NOT_FOUND, "Not found"), + (ERROR, "Error"), ) @@ -605,71 +619,67 @@ class ShortLink(models.Model): destination_status = models.CharField(max_length=15, choices=DESTINATION_STATUS, default=_ACTIVE) destination_status_text = models.CharField(max_length=250, default=None, blank=True, null=True) - utm_content = models.CharField(max_length=250, - null=True, - default=None, - blank=True, - help_text='Can be de ad group id or ad id') - utm_medium = models.CharField(max_length=50, - blank=True, - null=True, - default=None, - help_text='social, organic, paid, email, referral, etc.') - utm_campaign = models.CharField(max_length=50, - blank=True, - null=True, - default=None, - help_text='Campaign ID when PPC but can be a string in more informal campaigns') - utm_source = models.CharField(max_length=50, - blank=True, - null=True, - default=None, - help_text='fb, ig, google, twitter, quora, etc.') - utm_term = models.CharField(max_length=50, blank=True, null=True, default=None, help_text='Keyword used in cpc') - utm_placement = models.CharField(max_length=50, - blank=True, - null=True, - default=None, - help_text='User agent or device screen') - utm_plan = models.CharField(max_length=50, - blank=True, - null=True, - default=None, - help_text='If its applying for a scholarship, upfront, isa, financing, etc.') + utm_content = models.CharField( + max_length=250, null=True, default=None, blank=True, help_text="Can be de ad group id or ad id" + ) + utm_medium = models.CharField( + max_length=50, blank=True, null=True, default=None, help_text="social, organic, paid, email, referral, etc." + ) + utm_campaign = models.CharField( + max_length=50, + blank=True, + null=True, + default=None, + help_text="Campaign ID when PPC but can be a string in more informal campaigns", + ) + utm_source = models.CharField( + max_length=50, blank=True, null=True, default=None, help_text="fb, ig, google, twitter, quora, etc." + ) + utm_term = models.CharField(max_length=50, blank=True, null=True, default=None, help_text="Keyword used in cpc") + utm_placement = models.CharField( + max_length=50, blank=True, null=True, default=None, help_text="User agent or device screen" + ) + utm_plan = models.CharField( + max_length=50, + blank=True, + null=True, + default=None, + help_text="If its applying for a scholarship, upfront, isa, financing, etc.", + ) # Status academy = models.ForeignKey(Academy, on_delete=models.CASCADE) author = models.ForeignKey(User, on_delete=models.CASCADE) - lastclick_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='Last time a click was registered for this link') + lastclick_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="Last time a click was registered for this link" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{str(self.hits)} {self.slug}' + return f"{str(self.hits)} {self.slug}" -PENDING = 'PENDING' -DONE = 'DONE' +PENDING = "PENDING" +DONE = "DONE" WEBHOOK_STATUS = ( - (PENDING, 'Pending'), - (DONE, 'Done'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (DONE, "Done"), + (ERROR, "Error"), ) class ActiveCampaignWebhook(models.Model): webhook_type = models.CharField(max_length=100, blank=True, null=True, default=None) - run_at = models.DateTimeField(help_text='Date/time that the webhook ran', blank=True, null=True, default=None) - initiated_by = models.CharField(max_length=100, - help_text='Source/section of the software that triggered the webhook to run') + run_at = models.DateTimeField(help_text="Date/time that the webhook ran", blank=True, null=True, default=None) + initiated_by = models.CharField( + max_length=100, help_text="Source/section of the software that triggered the webhook to run" + ) - payload = models.JSONField(help_text='Extra info that came on the request, it varies depending on the webhook type') + payload = models.JSONField(help_text="Extra info that came on the request, it varies depending on the webhook type") ac_academy = models.ForeignKey(ActiveCampaignAcademy, on_delete=models.CASCADE) contact = models.ForeignKey(Contact, on_delete=models.CASCADE, default=None, null=True, blank=True) @@ -682,7 +692,7 @@ class ActiveCampaignWebhook(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'Webhook {self.webhook_type} {self.status} => {self.status_text}' + return f"Webhook {self.webhook_type} {self.status} => {self.status_text}" class Downloadable(models.Model): @@ -691,7 +701,7 @@ class Downloadable(models.Model): description = models.TextField(max_length=450) hits = models.IntegerField(default=0) - active = models.BooleanField(default=True, help_text='Non-active downloadables will display a message to the user') + active = models.BooleanField(default=True, help_text="Non-active downloadables will display a message to the user") preview_url = models.URLField() destination_url = models.URLField() @@ -705,10 +715,11 @@ class Downloadable(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.slug}' + return f"{self.slug}" def save(self, *args, **kwargs): from .signals import downloadable_saved + created = not self.id if created: @@ -717,15 +728,15 @@ def save(self, *args, **kwargs): downloadable_saved.send_robust(instance=self, sender=self.__class__, created=created) -SOURCE = 'SOURCE' -MEDIUM = 'MEDIUM' -CONTENT = 'CONTENT' -CAMPAIGN = 'CAMPAIGN' +SOURCE = "SOURCE" +MEDIUM = "MEDIUM" +CONTENT = "CONTENT" +CAMPAIGN = "CAMPAIGN" UTM_TYPE = ( - (CONTENT, 'Content'), - (SOURCE, 'Source'), - (MEDIUM, 'Medium'), - (CAMPAIGN, 'Campaign'), + (CONTENT, "Content"), + (SOURCE, "Source"), + (MEDIUM, "Medium"), + (CAMPAIGN, "Campaign"), ) @@ -744,25 +755,25 @@ class UTMField(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.slug}' + return f"{self.slug}" -ACTIVE = 'ACTIVE' -DELETED = 'DELETED' -ARCHIVED = 'ARCHIVED' +ACTIVE = "ACTIVE" +DELETED = "DELETED" +ARCHIVED = "ARCHIVED" COURSE_STATUS = ( - (ACTIVE, 'Active'), - (DELETED, 'Deleted'), - (ARCHIVED, 'Archived'), + (ACTIVE, "Active"), + (DELETED, "Deleted"), + (ARCHIVED, "Archived"), ) -PRIVATE = 'PRIVATE' -UNLISTED = 'UNLISTED' -PUBLIC = 'PUBLIC' +PRIVATE = "PRIVATE" +UNLISTED = "UNLISTED" +PUBLIC = "PUBLIC" VISIBILITY_STATUS = ( - (PRIVATE, 'Private'), - (UNLISTED, 'Unlisted'), - (PUBLIC, 'Public'), + (PRIVATE, "Private"), + (UNLISTED, "Unlisted"), + (PUBLIC, "Public"), ) @@ -775,41 +786,43 @@ class Course(models.Model): plan_slug = models.SlugField(max_length=150, null=True, blank=True, default=None) status = models.CharField(max_length=15, choices=COURSE_STATUS, default=ACTIVE) - color = models.CharField(max_length=50, - null=True, - blank=True, - default=None, - help_text='Add the color with hexadecimal format, i.e.: #FFFFFF') - status_message = models.CharField(max_length=250, - null=True, - blank=True, - default=None, - help_text='Error message if status is ERROR') + color = models.CharField( + max_length=50, + null=True, + blank=True, + default=None, + help_text="Add the color with hexadecimal format, i.e.: #FFFFFF", + ) + status_message = models.CharField( + max_length=250, null=True, blank=True, default=None, help_text="Error message if status is ERROR" + ) visibility = models.CharField(max_length=15, choices=VISIBILITY_STATUS, default=PRIVATE) - icon_url = models.URLField(help_text='Image icon to show on website') + icon_url = models.URLField(help_text="Image icon to show on website") technologies = models.CharField(max_length=150, blank=False) - has_waiting_list = models.BooleanField(default=False, help_text='Has waiting list?') + has_waiting_list = models.BooleanField(default=False, help_text="Has waiting list?") - invites = models.ManyToManyField(UserInvite, blank=True, help_text='Plan\'s invites', related_name='courses') + invites = models.ManyToManyField(UserInvite, blank=True, help_text="Plan's invites", related_name="courses") created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.slug}' + return f"{self.slug}" def clean(self) -> None: if self.cohort and self.cohort.never_ends == False: - raise Exception('Cohort must be a never ending cohort') + raise Exception("Cohort must be a never ending cohort") - if self.cohort and (self.cohort.available_as_saas == False or - (self.cohort.available_as_saas == None and self.cohort.academy.available_as_saas == False)): + if self.cohort and ( + self.cohort.available_as_saas == False + or (self.cohort.available_as_saas == None and self.cohort.academy.available_as_saas == False) + ): - raise Exception('Cohort must be available as saas') + raise Exception("Cohort must be available as saas") if self.cohort and self.academy != self.cohort.academy: - raise Exception('Cohort must belong to the same academy') + raise Exception("Cohort must belong to the same academy") def save(self, *args, **kwargs): self.full_clean() @@ -822,37 +835,40 @@ class CourseTranslation(models.Model): title = models.CharField(max_length=60) description = models.TextField(max_length=400) short_description = models.CharField(max_length=120, null=True, default=None, blank=True) - video_url = models.URLField(default=None, - null=True, - blank=True, - help_text='Video that introduces/promotes this course') - landing_url = models.URLField(default=None, - null=True, - blank=True, - help_text='Landing URL used on call to actions where the course is shown. ' - 'A URL is needed per each translation.') + video_url = models.URLField( + default=None, null=True, blank=True, help_text="Video that introduces/promotes this course" + ) + landing_url = models.URLField( + default=None, + null=True, + blank=True, + help_text="Landing URL used on call to actions where the course is shown. " + "A URL is needed per each translation.", + ) course_modules = models.JSONField( default=None, blank=True, null=True, - help_text='The course modules should be a list of objects of each of the modules taught') + help_text="The course modules should be a list of objects of each of the modules taught", + ) landing_variables = models.JSONField( default=None, blank=True, null=True, - help_text='Different variables that can be used for marketing purposes in the landing page.') + help_text="Different variables that can be used for marketing purposes in the landing page.", + ) def __str__(self) -> str: - return f'{self.lang}: {self.title}' + return f"{self.lang}: {self.title}" def save(self, *args, **kwargs): course_modules = self.course_modules or [] for course_module in course_modules: - if course_module['name'] is None or course_module['name'] == '': - raise Exception('The module does not have a name.') - if course_module['slug'] is None or course_module['slug'] == '': + if course_module["name"] is None or course_module["name"] == "": + raise Exception("The module does not have a name.") + if course_module["slug"] is None or course_module["slug"] == "": raise Exception(f'The module {course_module["name"]} does not have a slug.') - if course_module['description'] is None or course_module['description'] == '': + if course_module["description"] is None or course_module["description"] == "": raise Exception(f'The module {course_module["name"]} does not have a description.') result = super().save(*args, **kwargs) diff --git a/breathecode/marketing/receivers.py b/breathecode/marketing/receivers.py index b4e8aa7dc..7a64759bd 100644 --- a/breathecode/marketing/receivers.py +++ b/breathecode/marketing/receivers.py @@ -19,7 +19,7 @@ def post_save_profileacademy(sender, instance, **kwargs): # if a new ProfileAcademy is created on the authanticate app # look for the email on the formentry list and bind it - logger.debug('Receiver for academy_invite_accepted triggered, linking the new user to its respective form entries') + logger.debug("Receiver for academy_invite_accepted triggered, linking the new user to its respective form entries") entries = FormEntry.objects.filter(email=instance.user.email, user__isnull=True) for entry in entries: entry.user = instance.user @@ -28,8 +28,8 @@ def post_save_profileacademy(sender, instance, **kwargs): @receiver(student_edu_status_updated, sender=CohortUser) def student_edustatus_updated(sender, instance, *args, **kwargs): - if instance.educational_status == 'ACTIVE': - logger.warning(f'Student is now active in cohort `{instance.cohort.slug}`, processing task') + if instance.educational_status == "ACTIVE": + logger.warning(f"Student is now active in cohort `{instance.cohort.slug}`, processing task") tasks.add_cohort_task_to_student.delay(instance.user.id, instance.cohort.id, instance.cohort.academy.id) diff --git a/breathecode/marketing/serializers.py b/breathecode/marketing/serializers.py index 139bae986..cfe8a3fe4 100644 --- a/breathecode/marketing/serializers.py +++ b/breathecode/marketing/serializers.py @@ -226,7 +226,7 @@ def get_cohort(self, obj): return CohortHookSerializer(_cohort).data def get_is_won(self, obj): - return obj.deal_status == 'WON' + return obj.deal_status == "WON" class FormEntrySmallSerializer(serpy.Serializer): @@ -323,7 +323,7 @@ class FormEntryBigSerializer(serpy.Serializer): def get_tag_objects(self, obj): tag_ids = [] if obj.tags is not None: - tag_ids = obj.tags.split(',') + tag_ids = obj.tags.split(",") tags = Tag.objects.filter(slug__in=tag_ids, ac_academy__academy=obj.calculate_academy()) return TagSmallSerializer(tags, many=True).data @@ -331,7 +331,7 @@ def get_tag_objects(self, obj): def get_automation_objects(self, obj): automation_ids = [] if obj.automations is not None: - automation_ids = obj.automations.split(',') + automation_ids = obj.automations.split(",") automations = Automation.objects.filter(slug__in=automation_ids, ac_academy__academy=obj.calculate_academy()) return AutomationSmallSerializer(automations, many=True).data @@ -376,12 +376,12 @@ def get_academy(self, obj): return obj.academy.id def get_syllabus(self, obj): - return [x for x in obj.syllabus.all().values_list('id', flat=True)] + return [x for x in obj.syllabus.all().values_list("id", flat=True)] def get_course_translation(self, obj): query_args = [] - query_kwargs = {'course': obj} - obj.lang = obj.lang or 'en' + query_kwargs = {"course": obj} + obj.lang = obj.lang or "en" query_args.append(Q(lang=obj.lang) | Q(lang=obj.lang[:2]) | Q(lang__startswith=obj.lang[:2])) @@ -413,30 +413,30 @@ class PostFormEntrySerializer(serializers.ModelSerializer): class Meta: model = FormEntry exclude = () - read_only_fields = ['id'] + read_only_fields = ["id"] def create(self, validated_data): academy = None - if 'location' in validated_data: - alias = AcademyAlias.objects.filter(active_campaign_slug=validated_data['location']).first() + if "location" in validated_data: + alias = AcademyAlias.objects.filter(active_campaign_slug=validated_data["location"]).first() if alias is not None: academy = alias.academy else: - academy = Academy.objects.filter(active_campaign_slug=validated_data['location']).first() + academy = Academy.objects.filter(active_campaign_slug=validated_data["location"]).first() # copy the validated data just to do small last minute corrections data = validated_data.copy() # "us" language will become "en" language, its the right lang code - if 'language' in data and data['language'] == 'us': - data['language'] = 'en' + if "language" in data and data["language"] == "us": + data["language"] = "en" - if 'tag_objects' in data and data['tag_objects'] != '': - tag_ids = data['tag_objects'].split(',') - data['tags'] = Tag.objects.filter(id__in=tag_ids) + if "tag_objects" in data and data["tag_objects"] != "": + tag_ids = data["tag_objects"].split(",") + data["tags"] = Tag.objects.filter(id__in=tag_ids) - result = super().create({**data, 'academy': academy}) + result = super().create({**data, "academy": academy}) return result @@ -445,51 +445,55 @@ class ShortLinkSerializer(serializers.ModelSerializer): class Meta: model = ShortLink - exclude = ('academy', 'author', 'hits', 'destination_status', 'destination_status_text') + exclude = ("academy", "author", "hits", "destination_status", "destination_status_text") def validate(self, data): - if 'slug' in data and data['slug'] is not None: + if "slug" in data and data["slug"] is not None: - if not re.match(r'^[-\w]+$', data['slug']): + if not re.match(r"^[-\w]+$", data["slug"]): raise ValidationException( f'Invalid link slug {data["slug"]}, should only contain letters, numbers and slash "-"', - slug='invalid-slug-format') + slug="invalid-slug-format", + ) - link = ShortLink.objects.filter(slug=data['slug']).first() + link = ShortLink.objects.filter(slug=data["slug"]).first() if link is not None and (self.instance is None or self.instance.id != link.id): - raise ValidationException(f'Shortlink with slug {data["slug"]} already exists', - slug='shortlink-already-exists') + raise ValidationException( + f'Shortlink with slug {data["slug"]} already exists', slug="shortlink-already-exists" + ) elif self.instance is None: # only if it's being created I will pick a new slug, if not I will allow it to have the original slug latest_url = ShortLink.objects.all().last() if latest_url is None: - data['slug'] = 'L' + to_base(1) + data["slug"] = "L" + to_base(1) else: - data['slug'] = 'L' + to_base(latest_url.id + 1) + data["slug"] = "L" + to_base(latest_url.id + 1) - status = test_link(data['destination']) - if status['status_code'] < 200 or status['status_code'] > 299: + status = test_link(data["destination"]) + if status["status_code"] < 200 or status["status_code"] > 299: raise ValidationException(f'Destination URL is invalid, returning status {status["status_code"]}') - academy = Academy.objects.filter(id=self.context['academy']).first() + academy = Academy.objects.filter(id=self.context["academy"]).first() if academy is None: - raise ValidationException(f'Academy {self.context["academy"]} not found', slug='academy-not-found') + raise ValidationException(f'Academy {self.context["academy"]} not found', slug="academy-not-found") - if self.instance is not None: #creating a new link (instead of updating) + if self.instance is not None: # creating a new link (instead of updating) utc_now = timezone.now() days_ago = self.instance.created_at + timedelta(days=1) - if days_ago < utc_now and (self.instance.destination != data['destination'] - or self.instance.slug != data['slug']): + if days_ago < utc_now and ( + self.instance.destination != data["destination"] or self.instance.slug != data["slug"] + ): raise ValidationException( - 'You cannot update or delete short links that have been created more than 1 day ago, create a new link instead', - slug='update-days-ago') + "You cannot update or delete short links that have been created more than 1 day ago, create a new link instead", + slug="update-days-ago", + ) - return {**data, 'academy': academy} + return {**data, "academy": academy} def create(self, validated_data): - return ShortLink.objects.create(**validated_data, author=self.context.get('request').user) + return ShortLink.objects.create(**validated_data, author=self.context.get("request").user) class TagListSerializer(serializers.ListSerializer): @@ -507,7 +511,7 @@ class PUTTagSerializer(serializers.ModelSerializer): class Meta: model = Tag - exclude = ('slug', 'acp_id', 'subscribers', 'ac_academy', 'created_at', 'updated_at') + exclude = ("slug", "acp_id", "subscribers", "ac_academy", "created_at", "updated_at") list_serializer_class = TagListSerializer @@ -516,7 +520,7 @@ class PUTAutomationSerializer(serializers.ModelSerializer): class Meta: model = Automation - exclude = ('acp_id', 'ac_academy', 'created_at', 'updated_at', 'entered', 'exited') + exclude = ("acp_id", "ac_academy", "created_at", "updated_at", "entered", "exited") list_serializer_class = TagListSerializer @@ -524,15 +528,15 @@ class ActiveCampaignAcademySerializer(serializers.ModelSerializer): class Meta: model = ActiveCampaignAcademy - exclude = ('academy', ) + exclude = ("academy",) def validate(self, data): - academy = Academy.objects.filter(id=self.context['academy']).first() + academy = Academy.objects.filter(id=self.context["academy"]).first() if academy is None: - raise ValidationException(f'Academy {self.context["academy"]} not found', slug='academy-not-found') + raise ValidationException(f'Academy {self.context["academy"]} not found', slug="academy-not-found") - return {**data, 'academy': academy} + return {**data, "academy": academy} def create(self, validated_data): return ActiveCampaignAcademy.objects.create(**validated_data) diff --git a/breathecode/marketing/signals.py b/breathecode/marketing/signals.py index d5a1b653f..e075a455e 100644 --- a/breathecode/marketing/signals.py +++ b/breathecode/marketing/signals.py @@ -2,9 +2,9 @@ from task_manager.django.dispatch import Emisor -emisor = Emisor('breathecode.marketing') +emisor = Emisor("breathecode.marketing") -downloadable_saved = emisor.signal('downloadable_saved') +downloadable_saved = emisor.signal("downloadable_saved") -form_entry_won_or_lost = emisor.signal('form_entry_won_or_lost') -new_form_entry_deal = emisor.signal('new_form_entry_deal') +form_entry_won_or_lost = emisor.signal("form_entry_won_or_lost") +new_form_entry_deal = emisor.signal("new_form_entry_deal") diff --git a/breathecode/marketing/tasks.py b/breathecode/marketing/tasks.py index 83e46d48c..924f70c3f 100644 --- a/breathecode/marketing/tasks.py +++ b/breathecode/marketing/tasks.py @@ -34,40 +34,44 @@ from .serializers import PostFormEntrySerializer logger = getLogger(__name__) -is_test_env = os.getenv('ENV') == 'test' +is_test_env = os.getenv("ENV") == "test" @task(priority=TaskPriority.MARKETING.value) def persist_single_lead(form_data, **_: Any): - logger.info('Starting persist_single_lead') + logger.info("Starting persist_single_lead") entry = None try: entry = register_new_lead(form_data) except Timeout as e: - if 'id' in form_data: - entry = FormEntry.objects.filter(id=form_data['id']).first() + if "id" in form_data: + entry = FormEntry.objects.filter(id=form_data["id"]).first() if entry is not None: entry.storage_status_text = str(e) - entry.storage_status = 'PENDING' + entry.storage_status = "PENDING" entry.save() - raise RetryTask(f'Timeout processing lead for form_entry {str(entry.id)}') + raise RetryTask(f"Timeout processing lead for form_entry {str(entry.id)}") except Exception as e: if not form_data: return - if 'id' in form_data: - entry = FormEntry.objects.filter(id=form_data['id']).first() + if "id" in form_data: + entry = FormEntry.objects.filter(id=form_data["id"]).first() if entry is not None: entry.storage_status_text = str(e) - entry.storage_status = 'ERROR' + entry.storage_status = "ERROR" entry.save() raise e - if entry is not None and entry != False and not is_test_env and ('city' not in form_data - or form_data['city'] is None): + if ( + entry is not None + and entry != False + and not is_test_env + and ("city" not in form_data or form_data["city"] is None) + ): save_get_geolocal(entry, form_data) return True @@ -75,40 +79,40 @@ def persist_single_lead(form_data, **_: Any): @task(priority=TaskPriority.MARKETING.value) def update_link_viewcount(slug, **_: Any): - logger.info('Starting update_link_viewcount') + logger.info("Starting update_link_viewcount") sl = ShortLink.objects.filter(slug=slug).first() if sl is None: - raise RetryTask(f'ShortLink with slug {slug} not found') + raise RetryTask(f"ShortLink with slug {slug} not found") sl.hits = sl.hits + 1 sl.lastclick_at = timezone.now() sl.save() result = test_link(url=sl.destination) - if result['status_code'] < 200 or result['status_code'] > 299: - sl.destination_status_text = result['status_text'] - sl.destination_status = 'ERROR' + if result["status_code"] < 200 or result["status_code"] > 299: + sl.destination_status_text = result["status_text"] + sl.destination_status = "ERROR" sl.save() - raise Exception(result['status_text']) + raise Exception(result["status_text"]) else: - sl.destination_status = 'ACTIVE' - sl.destination_status_text = result['status_text'] + sl.destination_status = "ACTIVE" + sl.destination_status_text = result["status_text"] sl.save() @task(priority=TaskPriority.MARKETING.value) def async_update_deal_custom_fields(formentry_id: str, **_: Any): - logger.info('Starting to sync deal with contact') + logger.info("Starting to sync deal with contact") update_deal_custom_fields(formentry_id) - logger.debug('async_update_deal_custom_fields: ok') + logger.debug("async_update_deal_custom_fields: ok") @task(priority=TaskPriority.REALTIME.value) def async_activecampaign_webhook(webhook_id, **_: Any): - logger.info('Starting async_activecampaign_webhook') + logger.info("Starting async_activecampaign_webhook") webhook = ActiveCampaignWebhook.objects.filter(id=webhook_id).first() ac_academy = webhook.ac_academy @@ -120,301 +124,303 @@ def async_activecampaign_webhook(webhook_id, **_: Any): client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) client.execute_action(webhook_id) except Exception as e: - logger.debug('ActiveCampaign Webhook Exception') + logger.debug("ActiveCampaign Webhook Exception") raise e else: - message = f"ActiveCampaign Academy Profile {webhook_id} doesn\'t exist" + message = f"ActiveCampaign Academy Profile {webhook_id} doesn't exist" - webhook.status = 'ERROR' + webhook.status = "ERROR" webhook.status_text = message webhook.save() logger.debug(message) raise Exception(message) - logger.debug('ActiveCampaign webook status: ok') + logger.debug("ActiveCampaign webook status: ok") @task(priority=TaskPriority.MARKETING.value) def add_cohort_task_to_student(user_id, cohort_id, academy_id, **_: Any): - logger.info('Task add_cohort_task_to_student started') + logger.info("Task add_cohort_task_to_student started") if not Academy.objects.filter(id=academy_id).exists(): - raise AbortTask(f'Academy {academy_id} not found') + raise AbortTask(f"Academy {academy_id} not found") ac_academy = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).first() if ac_academy is None: - raise AbortTask(f'ActiveCampaign Academy {academy_id} not found') + raise AbortTask(f"ActiveCampaign Academy {academy_id} not found") user = User.objects.filter(id=user_id).first() if user is None: - raise AbortTask(f'User {user_id} not found') + raise AbortTask(f"User {user_id} not found") cohort = Cohort.objects.filter(id=cohort_id).first() if cohort is None: - raise AbortTask(f'Cohort {cohort_id} not found') + raise AbortTask(f"Cohort {cohort_id} not found") client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) tag = Tag.objects.filter(slug__iexact=cohort.slug, ac_academy__id=ac_academy.id).first() if tag is None: raise AbortTask( - f'Cohort tag `{cohort.slug}` does not exist in the system, the tag could not be added to the student. ' - 'This tag was supposed to be created by the system when creating a new cohort') + f"Cohort tag `{cohort.slug}` does not exist in the system, the tag could not be added to the student. " + "This tag was supposed to be created by the system when creating a new cohort" + ) contact = client.get_contact_by_email(user.email) logger.info(f'Adding tag {tag.id} to acp contact {contact["id"]}') - client.add_tag_to_contact(contact['id'], tag.acp_id) + client.add_tag_to_contact(contact["id"], tag.acp_id) @task(priority=TaskPriority.MARKETING.value) def add_event_tags_to_student(event_id: int, user_id: Optional[int] = None, email: Optional[str] = None, **_: Any): - logger.info('Task add_event_tags_to_student started') + logger.info("Task add_event_tags_to_student started") if not user_id and not email: - raise AbortTask('Impossible to determine the user email') + raise AbortTask("Impossible to determine the user email") if user_id and email: - raise AbortTask('You can\'t provide the user_id and email together') + raise AbortTask("You can't provide the user_id and email together") if not email: - email = User.objects.filter(id=user_id).values_list('email', flat=True).first() + email = User.objects.filter(id=user_id).values_list("email", flat=True).first() if not email: - raise AbortTask('We can\'t get the user email') + raise AbortTask("We can't get the user email") event = Event.objects.filter(id=event_id).first() if event is None: - raise AbortTask(f'Event {event_id} not found') + raise AbortTask(f"Event {event_id} not found") if not event.academy: - raise AbortTask('Impossible to determine the academy') + raise AbortTask("Impossible to determine the academy") academy = event.academy ac_academy = ActiveCampaignAcademy.objects.filter(academy__id=academy.id).first() if ac_academy is None: - raise AbortTask(f'ActiveCampaign Academy {academy.id} not found') + raise AbortTask(f"ActiveCampaign Academy {academy.id} not found") client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) - tag_slugs = [x for x in event.tags.split(',') if x] # prevent a tag with the slug '' + tag_slugs = [x for x in event.tags.split(",") if x] # prevent a tag with the slug '' if event.slug: - tag_slugs.append(f'event-{event.slug}' if not event.slug.startswith('event-') else event.slug) + tag_slugs.append(f"event-{event.slug}" if not event.slug.startswith("event-") else event.slug) tags = Tag.objects.filter(slug__in=tag_slugs, ac_academy__id=ac_academy.id) if not tags: - raise AbortTask('Tags not found') + raise AbortTask("Tags not found") contact = client.get_contact_by_email(email) for tag in tags: logger.info(f'Adding tag {tag.id} to acp contact {contact["id"]}') - client.add_tag_to_contact(contact['id'], tag.acp_id) + client.add_tag_to_contact(contact["id"], tag.acp_id) @task(priority=TaskPriority.MARKETING.value) def add_cohort_slug_as_acp_tag(cohort_id: int, academy_id: int, **_: Any) -> None: - logger.info('Task add_cohort_slug_as_acp_tag started') + logger.info("Task add_cohort_slug_as_acp_tag started") if not Academy.objects.filter(id=academy_id).exists(): - raise AbortTask(f'Academy {academy_id} not found') + raise AbortTask(f"Academy {academy_id} not found") ac_academy = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).first() if ac_academy is None: - raise AbortTask(f'ActiveCampaign Academy {academy_id} not found') + raise AbortTask(f"ActiveCampaign Academy {academy_id} not found") cohort = Cohort.objects.filter(id=cohort_id).first() if cohort is None: - raise AbortTask(f'Cohort {cohort_id} not found') + raise AbortTask(f"Cohort {cohort_id} not found") client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) tag = Tag.objects.filter(slug=cohort.slug, ac_academy__id=ac_academy.id).first() if tag: - raise AbortTask(f'Tag for cohort `{cohort.slug}` already exists') + raise AbortTask(f"Tag for cohort `{cohort.slug}` already exists") - data = client.create_tag(cohort.slug, description=f'Cohort {cohort.slug} at {ac_academy.academy.slug}') + data = client.create_tag(cohort.slug, description=f"Cohort {cohort.slug} at {ac_academy.academy.slug}") - tag = Tag(slug=data['tag'], acp_id=data['id'], tag_type='COHORT', ac_academy=ac_academy, subscribers=0) + tag = Tag(slug=data["tag"], acp_id=data["id"], tag_type="COHORT", ac_academy=ac_academy, subscribers=0) tag.save() @task(priority=TaskPriority.MARKETING.value) def add_event_slug_as_acp_tag(event_id: int, academy_id: int, force=False, **_: Any) -> None: - logger.info('Task add_event_slug_as_acp_tag started') + logger.info("Task add_event_slug_as_acp_tag started") if not Academy.objects.filter(id=academy_id).exists(): - raise AbortTask(f'Academy {academy_id} not found') + raise AbortTask(f"Academy {academy_id} not found") ac_academy = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).first() if ac_academy is None: - raise AbortTask(f'ActiveCampaign Academy {academy_id} not found') + raise AbortTask(f"ActiveCampaign Academy {academy_id} not found") event = Event.objects.filter(id=event_id).first() if event is None: - raise AbortTask(f'Event {event_id} not found') + raise AbortTask(f"Event {event_id} not found") if not event.slug: - raise AbortTask(f'Event {event_id} does not have slug') + raise AbortTask(f"Event {event_id} does not have slug") client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) - if event.slug.startswith('event-'): + if event.slug.startswith("event-"): new_tag_slug = event.slug else: - new_tag_slug = f'event-{event.slug}' + new_tag_slug = f"event-{event.slug}" if (tag := Tag.objects.filter(slug=new_tag_slug, ac_academy__id=ac_academy.id).first()) and not force: - raise AbortTask(f'Tag for event `{event.slug}` already exists') + raise AbortTask(f"Tag for event `{event.slug}` already exists") - data = client.create_tag(new_tag_slug, description=f'Event {event.slug} at {ac_academy.academy.slug}') + data = client.create_tag(new_tag_slug, description=f"Event {event.slug} at {ac_academy.academy.slug}") # retry create the tag in Active Campaign if tag: - tag.slug = data['tag'] - tag.acp_id = data['id'] - tag.tag_type = 'EVENT' + tag.slug = data["tag"] + tag.acp_id = data["id"] + tag.tag_type = "EVENT" tag.ac_academy = ac_academy else: - tag = Tag(slug=data['tag'], acp_id=data['id'], tag_type='EVENT', ac_academy=ac_academy, subscribers=0) + tag = Tag(slug=data["tag"], acp_id=data["id"], tag_type="EVENT", ac_academy=ac_academy, subscribers=0) tag.save() @task(priority=TaskPriority.MARKETING.value) def add_downloadable_slug_as_acp_tag(downloadable_id: int, academy_id: int, **_: Any) -> None: - logger.info('Task add_downloadable_slug_as_acp_tag started') + logger.info("Task add_downloadable_slug_as_acp_tag started") if not Academy.objects.filter(id=academy_id).exists(): - raise AbortTask(f'Academy {academy_id} not found') + raise AbortTask(f"Academy {academy_id} not found") ac_academy = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).first() if ac_academy is None: - raise AbortTask(f'ActiveCampaign Academy {academy_id} not found') + raise AbortTask(f"ActiveCampaign Academy {academy_id} not found") downloadable = Downloadable.objects.filter(id=downloadable_id).first() if downloadable is None: - raise AbortTask(f'Downloadable {downloadable_id} not found') + raise AbortTask(f"Downloadable {downloadable_id} not found") client = ActiveCampaign(ac_academy.ac_key, ac_academy.ac_url) - if downloadable.slug.startswith('down-'): + if downloadable.slug.startswith("down-"): new_tag_slug = downloadable.slug else: - new_tag_slug = f'down-{downloadable.slug}' + new_tag_slug = f"down-{downloadable.slug}" tag = Tag.objects.filter(slug=new_tag_slug, ac_academy__id=ac_academy.id).first() if tag: - raise AbortTask(f'Tag for downloadable `{downloadable.slug}` already exists') + raise AbortTask(f"Tag for downloadable `{downloadable.slug}` already exists") try: - data = client.create_tag(new_tag_slug, - description=f'Downloadable {downloadable.slug} at {ac_academy.academy.slug}') + data = client.create_tag( + new_tag_slug, description=f"Downloadable {downloadable.slug} at {ac_academy.academy.slug}" + ) - tag = Tag(slug=data['tag'], acp_id=data['id'], tag_type='DOWNLOADABLE', ac_academy=ac_academy, subscribers=0) + tag = Tag(slug=data["tag"], acp_id=data["id"], tag_type="DOWNLOADABLE", ac_academy=ac_academy, subscribers=0) tag.save() except Exception as e: - logger.error(f'There was an error creating tag for downloadable {downloadable.slug}') + logger.error(f"There was an error creating tag for downloadable {downloadable.slug}") raise e @task(priority=TaskPriority.MARKETING.value) def create_form_entry(csv_upload_id, **item): # remove the task manager parameters - item.pop('pop', None) - item.pop('total_pages', None) - item.pop('attempts', None) - item.pop('task_manager_id', None) + item.pop("pop", None) + item.pop("total_pages", None) + item.pop("attempts", None) + item.pop("task_manager_id", None) - logger.info('Create form entry started') + logger.info("Create form entry started") csv_upload = CSVUpload.objects.filter(id=csv_upload_id).first() if not csv_upload: - raise RetryTask('No CSVUpload found with this id') + raise RetryTask("No CSVUpload found with this id") form_entry = FormEntry() - error_message = '' - - if 'first_name' in item: - form_entry.first_name = item['first_name'] - if 'last_name' in item: - form_entry.last_name = item['last_name'] - if 'email' in item: - form_entry.email = item['email'] - if 'location' in item: - if AcademyAlias.objects.filter(active_campaign_slug=item['location']).exists(): - form_entry.location = item['location'] - elif Academy.objects.filter(active_campaign_slug=item['location']).exists(): - form_entry.location = item['location'] + error_message = "" + + if "first_name" in item: + form_entry.first_name = item["first_name"] + if "last_name" in item: + form_entry.last_name = item["last_name"] + if "email" in item: + form_entry.email = item["email"] + if "location" in item: + if AcademyAlias.objects.filter(active_campaign_slug=item["location"]).exists(): + form_entry.location = item["location"] + elif Academy.objects.filter(active_campaign_slug=item["location"]).exists(): + form_entry.location = item["location"] else: message = f'No academy exists with this academy active_campaign_slug: {item["academy"]}' - error_message += f'{message}, ' + error_message += f"{message}, " logger.error(message) - if 'academy' in item: - if alias := AcademyAlias.objects.filter(slug=item['academy']).first(): + if "academy" in item: + if alias := AcademyAlias.objects.filter(slug=item["academy"]).first(): form_entry.academy = alias.academy - elif academy := Academy.objects.filter(slug=item['academy']).first(): + elif academy := Academy.objects.filter(slug=item["academy"]).first(): form_entry.academy = academy else: message = f'No academy exists with this academy slug: {item["academy"]}' - error_message += f'{message}, ' + error_message += f"{message}, " logger.error(message) if not form_entry.first_name: - message = 'No first name in form entry' - error_message += f'{message}, ' + message = "No first name in form entry" + error_message += f"{message}, " logger.error(message) - if form_entry.first_name and not re.findall(r'^[A-Za-zÀ-ÖØ-öø-ÿ ]+$', form_entry.first_name): - message = 'first name has incorrect characters' - error_message += f'{message}, ' + if form_entry.first_name and not re.findall(r"^[A-Za-zÀ-ÖØ-öø-ÿ ]+$", form_entry.first_name): + message = "first name has incorrect characters" + error_message += f"{message}, " logger.error(message) if not form_entry.last_name: - message = 'No last name in form entry' - error_message += f'{message}, ' + message = "No last name in form entry" + error_message += f"{message}, " logger.error(message) - if form_entry.last_name and not re.findall(r'^[A-Za-zÀ-ÖØ-öø-ÿ ]+$', form_entry.last_name): - message = 'last name has incorrect characters' - error_message += f'{message}, ' + if form_entry.last_name and not re.findall(r"^[A-Za-zÀ-ÖØ-öø-ÿ ]+$", form_entry.last_name): + message = "last name has incorrect characters" + error_message += f"{message}, " logger.error(message) if not form_entry.email: - message = 'No email in form entry' - error_message += f'{message}, ' + message = "No email in form entry" + error_message += f"{message}, " logger.error(message) email_pattern = r'(?:[a-z0-9!#$%&\'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&\'*+/=?^_`{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])' if form_entry.email and not re.findall(email_pattern, form_entry.email, re.IGNORECASE): - message = 'email has incorrect format' - error_message += f'{message}, ' + message = "email has incorrect format" + error_message += f"{message}, " logger.error(message) if not form_entry.location or not form_entry.academy: - message = 'No location or academy in form entry' - error_message += f'{message}, ' + message = "No location or academy in form entry" + error_message += f"{message}, " logger.error(message) - if error_message.endswith(', '): + if error_message.endswith(", "): error_message = error_message[0:-2] - error_message = f'{error_message}. ' + error_message = f"{error_message}. " if error_message: - csv_upload.log = csv_upload.log or '' + csv_upload.log = csv_upload.log or "" csv_upload.log += error_message - logger.error('Missing field in received item') + logger.error("Missing field in received item") logger.error(item) - csv_upload.status = 'ERROR' + csv_upload.status = "ERROR" - elif csv_upload.status != 'ERROR': - csv_upload.status = 'DONE' + elif csv_upload.status != "ERROR": + csv_upload.status = "DONE" csv_upload.id = csv_upload_id @@ -425,7 +431,7 @@ def create_form_entry(csv_upload_id, **item): serializer = PostFormEntrySerializer(form_entry, data={}) if serializer.is_valid(): persist_single_lead.delay(serializer.data) - logger.info('create_form_entry successfully created') + logger.info("create_form_entry successfully created") else: raise Exception(error_message) diff --git a/breathecode/marketing/tests/management/commands/tests_rerun_pending_ac_webhooks.py b/breathecode/marketing/tests/management/commands/tests_rerun_pending_ac_webhooks.py index d569bf0a1..7ae3a3cff 100644 --- a/breathecode/marketing/tests/management/commands/tests_rerun_pending_ac_webhooks.py +++ b/breathecode/marketing/tests/management/commands/tests_rerun_pending_ac_webhooks.py @@ -13,7 +13,7 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch): - monkeypatch.setattr(tasks.async_activecampaign_webhook, 'delay', MagicMock()) + monkeypatch.setattr(tasks.async_activecampaign_webhook, "delay", MagicMock()) yield @@ -27,29 +27,29 @@ def test_no_webhooks(): def generate_params(): deltas = [timedelta(days=0), timedelta(days=1), timedelta(days=2)] for delta in deltas: - yield 'PENDING', delta + yield "PENDING", delta deltas = deltas + [None, timedelta(days=3, seconds=1), timedelta(days=4), timedelta(days=5)] - for status in ['DONE', 'ERROR']: + for status in ["DONE", "ERROR"]: for delta in deltas: yield status, delta -@pytest.mark.parametrize('status,delta', [*generate_params()]) +@pytest.mark.parametrize("status,delta", [*generate_params()]) def test_with_webhooks_requirements_no_meet(bc: Breathecode, status, delta): active_campaign_webhook = { - 'status': status, - 'payload': {}, - 'run_at': None, + "status": status, + "payload": {}, + "run_at": None, } if delta is not None: - active_campaign_webhook['run_at'] = timezone.now() - delta + active_campaign_webhook["run_at"] = timezone.now() - delta model = bc.database.create(active_campaign_webhook=active_campaign_webhook) command = Command() command.handle() - assert bc.database.list_of('marketing.ActiveCampaignWebhook') == [ + assert bc.database.list_of("marketing.ActiveCampaignWebhook") == [ bc.format.to_dict(model.active_campaign_webhook), ] assert tasks.async_activecampaign_webhook.delay.call_args_list == [] @@ -58,23 +58,23 @@ def test_with_webhooks_requirements_no_meet(bc: Breathecode, status, delta): def generate_params(): deltas = [None, timedelta(days=3, seconds=1), timedelta(days=4), timedelta(days=5)] for delta in deltas: - yield 'PENDING', delta + yield "PENDING", delta -@pytest.mark.parametrize('status,delta', [*generate_params()]) +@pytest.mark.parametrize("status,delta", [*generate_params()]) def test_with_webhooks_requirements_meet(bc: Breathecode, status, delta): active_campaign_webhook = { - 'status': status, - 'payload': {}, - 'run_at': None, + "status": status, + "payload": {}, + "run_at": None, } if delta is not None: - active_campaign_webhook['run_at'] = timezone.now() - delta + active_campaign_webhook["run_at"] = timezone.now() - delta model = bc.database.create(active_campaign_webhook=(3, active_campaign_webhook)) command = Command() command.handle() - assert bc.database.list_of('marketing.ActiveCampaignWebhook') == bc.format.to_dict(model.active_campaign_webhook) + assert bc.database.list_of("marketing.ActiveCampaignWebhook") == bc.format.to_dict(model.active_campaign_webhook) assert tasks.async_activecampaign_webhook.delay.call_args_list == [call(n + 1) for n in range(3)] diff --git a/breathecode/marketing/tests/management/commands/tests_retry_pending_leads.py b/breathecode/marketing/tests/management/commands/tests_retry_pending_leads.py index 6359f61a4..e22e82128 100644 --- a/breathecode/marketing/tests/management/commands/tests_retry_pending_leads.py +++ b/breathecode/marketing/tests/management/commands/tests_retry_pending_leads.py @@ -11,24 +11,24 @@ def serialize_form_entry(form_entry): return { - 'id': form_entry['id'], - 'first_name': form_entry['first_name'], - 'last_name': form_entry['last_name'], - 'phone': form_entry['phone'], - 'email': form_entry['email'], - 'location': form_entry['location'], - 'referral_key': form_entry['referral_key'], - 'course': form_entry['course'], - 'tags': form_entry['tags'], - 'automations': form_entry['automations'], - 'language': form_entry['language'], - 'city': form_entry['city'], - 'country': form_entry['country'], - 'utm_url': form_entry['utm_url'], - 'client_comments': form_entry['client_comments'], - 'current_download': form_entry['current_download'], - 'latitude': form_entry['latitude'], - 'longitude': form_entry['longitude'] + "id": form_entry["id"], + "first_name": form_entry["first_name"], + "last_name": form_entry["last_name"], + "phone": form_entry["phone"], + "email": form_entry["email"], + "location": form_entry["location"], + "referral_key": form_entry["referral_key"], + "course": form_entry["course"], + "tags": form_entry["tags"], + "automations": form_entry["automations"], + "language": form_entry["language"], + "city": form_entry["city"], + "country": form_entry["country"], + "utm_url": form_entry["utm_url"], + "client_comments": form_entry["client_comments"], + "current_download": form_entry["current_download"], + "latitude": form_entry["latitude"], + "longitude": form_entry["longitude"], } @@ -37,58 +37,64 @@ class RetryPendingLeadsTestSuite(MarketingTestCase): 🔽🔽🔽 With no form entries """ - @patch('breathecode.marketing.tasks.persist_single_lead.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.persist_single_lead.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_without_formentries(self): command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) self.assertEqual(tasks.persist_single_lead.delay.call_args_list, []) """ 🔽🔽🔽 With form entries not pending """ - @patch('breathecode.marketing.tasks.persist_single_lead.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.persist_single_lead.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_without_pending_formentries(self): - model = self.bc.database.create(form_entry=[{'storage_status': 'PERSISTED'}, {'storage_status': 'PERSISTED'}], ) + model = self.bc.database.create( + form_entry=[{"storage_status": "PERSISTED"}, {"storage_status": "PERSISTED"}], + ) command = Command() command.handle() - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), self.bc.format.to_dict(model.form_entry)) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), self.bc.format.to_dict(model.form_entry)) self.assertEqual(tasks.persist_single_lead.delay.call_args_list, []) """ 🔽🔽🔽 With form entries pending """ - @patch('breathecode.marketing.tasks.persist_single_lead.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.persist_single_lead.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_pending_formentries(self): - model = self.bc.database.create(form_entry={'storage_status': 'PENDING'}, ) + model = self.bc.database.create( + form_entry={"storage_status": "PENDING"}, + ) command = Command() result = command.handle() model_dict = self.bc.format.to_dict(model.form_entry) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), self.bc.format.to_dict([model.form_entry])) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), self.bc.format.to_dict([model.form_entry])) self.assertEqual(tasks.persist_single_lead.delay.call_args_list, [call(serialize_form_entry(model_dict))]) """ 🔽🔽🔽 With two form entries """ - @patch('breathecode.marketing.tasks.persist_single_lead.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.persist_single_lead.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_two_formentries(self): - model = self.bc.database.create(form_entry=[{'storage_status': 'PENDING'}, {'storage_status': 'PERSISTED'}], ) + model = self.bc.database.create( + form_entry=[{"storage_status": "PENDING"}, {"storage_status": "PERSISTED"}], + ) command = Command() result = command.handle() model_dict = self.bc.format.to_dict(model.form_entry) serialize_model = [serialize_form_entry(dict) for dict in model_dict] - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), self.bc.format.to_dict(model.form_entry)) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), self.bc.format.to_dict(model.form_entry)) self.assertEqual(tasks.persist_single_lead.delay.call_args_list, [call(serialize_form_entry(model_dict[0]))]) diff --git a/breathecode/marketing/tests/mixins/__init__.py b/breathecode/marketing/tests/mixins/__init__.py index 05931279d..a2d17c59c 100644 --- a/breathecode/marketing/tests/mixins/__init__.py +++ b/breathecode/marketing/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Admissions mixins """ + from .marketing_test_case import MarketingTestCase # noqa: F401 diff --git a/breathecode/marketing/tests/mixins/marketing_test_case.py b/breathecode/marketing/tests/mixins/marketing_test_case.py index 79c3fe794..26c32b771 100644 --- a/breathecode/marketing/tests/mixins/marketing_test_case.py +++ b/breathecode/marketing/tests/mixins/marketing_test_case.py @@ -1,19 +1,27 @@ """ Collections of mixins used to login in authorize microservice """ + import os import re from breathecode.authenticate.models import Token from unittest.mock import call from breathecode.notify.actions import get_template_content from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, - BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + DatetimeMixin, + BreathecodeMixin, +) from breathecode.feedback.actions import strings -class MarketingTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, - BreathecodeMixin): +class MarketingTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, BreathecodeMixin +): """MarketingTestCase with auth methods""" def tearDown(self): @@ -28,64 +36,71 @@ def setUp(self): def get_token_key(self, id=None): kwargs = {} if id: - kwargs['id'] = id - return Token.objects.filter(**kwargs).values_list('key', flat=True).first() + kwargs["id"] = id + return Token.objects.filter(**kwargs).values_list("key", flat=True).first() # This function was moved here because i want to use it as one example to # test the email def check_email_contain_a_correct_token(self, lang, dicts, mock, model): token = self.get_token_key() - question = dicts[0]['title'] + question = dicts[0]["title"] link = f"https://nps.4geeks.com/{dicts[0]['id']}?token={token}" args_list = mock.call_args_list template = get_template_content( - 'nps', { - 'QUESTION': question, - 'HIGHEST': dicts[0]['highest'], - 'LOWEST': dicts[0]['lowest'], - 'SUBJECT': question, - 'ANSWER_ID': dicts[0]['id'], - 'BUTTON': strings[lang]['button_label'], - 'LINK': link, - }, ['email']) + "nps", + { + "QUESTION": question, + "HIGHEST": dicts[0]["highest"], + "LOWEST": dicts[0]["lowest"], + "SUBJECT": question, + "ANSWER_ID": dicts[0]["id"], + "BUTTON": strings[lang]["button_label"], + "LINK": link, + }, + ["email"], + ) - self.assertEqual(args_list, [ - call('https://api.mailgun.net/v3/None/messages', - auth=('api', os.environ.get('MAILGUN_API_KEY', '')), - data={ - 'from': f"4Geeks <mailgun@{os.environ.get('MAILGUN_DOMAIN')}>", - 'to': model['user'].email, - 'subject': template['subject'], - 'text': template['text'], - 'html': template['html'] - }) - ]) + self.assertEqual( + args_list, + [ + call( + "https://api.mailgun.net/v3/None/messages", + auth=("api", os.environ.get("MAILGUN_API_KEY", "")), + data={ + "from": f"4Geeks <mailgun@{os.environ.get('MAILGUN_DOMAIN')}>", + "to": model["user"].email, + "subject": template["subject"], + "text": template["text"], + "html": template["html"], + }, + ) + ], + ) - html = template['html'] - del template['html'] + html = template["html"] + del template["html"] self.assertEqual( - template, { - 'SUBJECT': - question, - 'subject': - question, - 'text': - '\n' - '\n' - 'Please take 2 min to answer the following question:\n' - '\n' - f'{question}\n' - '\n' - 'Click here to vote: ' - f'{link}' - '\n' - '\n' - '\n' - '\n' - 'The 4Geeks Team' - }) + template, + { + "SUBJECT": question, + "subject": question, + "text": "\n" + "\n" + "Please take 2 min to answer the following question:\n" + "\n" + f"{question}\n" + "\n" + "Click here to vote: " + f"{link}" + "\n" + "\n" + "\n" + "\n" + "The 4Geeks Team", + }, + ) self.assertToken(token) self.assertTrue(link in html) @@ -93,41 +108,47 @@ def check_old_breathecode_calls(self, mock, model, course=None): extras = {} if course: - extras['field[2,0]'] = 'asdasd' + extras["field[2,0]"] = "asdasd" - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://old.hardcoded.breathecode.url/admin/api.php', - params=[('api_action', 'contact_sync'), ('api_key', model['active_campaign_academy'].ac_key), - ('api_output', 'json')], - data={ - 'email': 'pokemon@potato.io', - 'first_name': 'Konan', - 'last_name': 'Amegakure', - 'phone': '123123123', - 'field[18,0]': model['academy'].slug, - **extras, - }), - call('POST', - 'https://old.hardcoded.breathecode.url/api/3/contactAutomations', - headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - 'Api-Token': model['active_campaign_academy'].ac_key - }, - json={'contactAutomation': { - 'contact': 1, - 'automation': model['automation'].acp_id - }}), - call('POST', - 'https://old.hardcoded.breathecode.url/api/3/contactTags', - headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - 'Api-Token': model['active_campaign_academy'].ac_key - }, - json={'contactTag': { - 'contact': 1, - 'tag': model['tag'].acp_id - }}) - ]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://old.hardcoded.breathecode.url/admin/api.php", + params=[ + ("api_action", "contact_sync"), + ("api_key", model["active_campaign_academy"].ac_key), + ("api_output", "json"), + ], + data={ + "email": "pokemon@potato.io", + "first_name": "Konan", + "last_name": "Amegakure", + "phone": "123123123", + "field[18,0]": model["academy"].slug, + **extras, + }, + ), + call( + "POST", + "https://old.hardcoded.breathecode.url/api/3/contactAutomations", + headers={ + "Accept": "application/json", + "Content-Type": "application/json", + "Api-Token": model["active_campaign_academy"].ac_key, + }, + json={"contactAutomation": {"contact": 1, "automation": model["automation"].acp_id}}, + ), + call( + "POST", + "https://old.hardcoded.breathecode.url/api/3/contactTags", + headers={ + "Accept": "application/json", + "Content-Type": "application/json", + "Api-Token": model["active_campaign_academy"].ac_key, + }, + json={"contactTag": {"contact": 1, "tag": model["tag"].acp_id}}, + ), + ], + ) diff --git a/breathecode/marketing/tests/signals/tests_cohort_saved.py b/breathecode/marketing/tests/signals/tests_cohort_saved.py index 95b1dfb70..edc9a05ed 100644 --- a/breathecode/marketing/tests/signals/tests_cohort_saved.py +++ b/breathecode/marketing/tests/signals/tests_cohort_saved.py @@ -8,7 +8,7 @@ class TestLead(LegacyAPITestCase): 🔽🔽🔽 Create without ActiveCampaignAcademy """ - @patch('breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay", MagicMock()) def test_cohort_saved__create__without_active_campaign_academy(self, enable_signals): enable_signals() """Test /cohort/:id/user without auth""" @@ -16,14 +16,14 @@ def test_cohort_saved__create__without_active_campaign_academy(self, enable_sign model = self.generate_models(cohort=True) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [self.model_to_dict(model, 'cohort')]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [self.model_to_dict(model, "cohort")]) self.assertEqual(add_cohort_slug_as_acp_tag.delay.call_args_list, []) """ 🔽🔽🔽 Create with ActiveCampaignAcademy """ - @patch('breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay", MagicMock()) def test_cohort_saved__create__with_active_campaign_academy(self, enable_signals): enable_signals() """Test /cohort/:id/user without auth""" @@ -32,14 +32,14 @@ def test_cohort_saved__create__with_active_campaign_academy(self, enable_signals base = self.generate_models(academy=True, active_campaign_academy=True, skip_cohort=True) model = self.generate_models(cohort=True, models=base) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [self.model_to_dict(model, 'cohort')]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [self.model_to_dict(model, "cohort")]) self.assertEqual(add_cohort_slug_as_acp_tag.delay.call_args_list, [call(1, 1)]) """ 🔽🔽🔽 Update with ActiveCampaignAcademy """ - @patch('breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_cohort_slug_as_acp_tag.delay", MagicMock()) def test_cohort_saved__update__with_active_campaign_academy(self, enable_signals): enable_signals() """Test /cohort/:id/user without auth""" @@ -48,8 +48,8 @@ def test_cohort_saved__update__with_active_campaign_academy(self, enable_signals base = self.generate_models(academy=True, active_campaign_academy=True, skip_cohort=True) model = self.generate_models(cohort=True, models=base) - model.cohort.slug = 'they-killed-kenny' + model.cohort.slug = "they-killed-kenny" model.cohort.save() - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [self.model_to_dict(model, 'cohort')]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), [self.model_to_dict(model, "cohort")]) self.assertEqual(add_cohort_slug_as_acp_tag.delay.call_args_list, [call(1, 1)]) diff --git a/breathecode/marketing/tests/signals/tests_downloadable_saved.py b/breathecode/marketing/tests/signals/tests_downloadable_saved.py index 41c4ce2ef..9793bd0cf 100644 --- a/breathecode/marketing/tests/signals/tests_downloadable_saved.py +++ b/breathecode/marketing/tests/signals/tests_downloadable_saved.py @@ -8,7 +8,7 @@ class TestLead(LegacyAPITestCase): 🔽🔽🔽 Create with ActiveCampaignAcademy """ - @patch('breathecode.marketing.tasks.add_downloadable_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_downloadable_slug_as_acp_tag.delay", MagicMock()) def test_downloadable_saved__create__with_active_campaign_academy(self, enable_signals): enable_signals() """Test /downloadable/:id/user without auth""" @@ -17,6 +17,7 @@ def test_downloadable_saved__create__with_active_campaign_academy(self, enable_s base = self.generate_models(academy=True, active_campaign_academy=True, skip_event=True) model = self.generate_models(downloadable=True, models=base) - self.assertEqual(self.bc.database.list_of('marketing.Downloadable'), - [self.model_to_dict(model, 'downloadable')]) + self.assertEqual( + self.bc.database.list_of("marketing.Downloadable"), [self.model_to_dict(model, "downloadable")] + ) self.assertEqual(add_downloadable_slug_as_acp_tag.delay.call_args_list, [call(1, 1)]) diff --git a/breathecode/marketing/tests/signals/tests_event_saved.py b/breathecode/marketing/tests/signals/tests_event_saved.py index 871ab2bdb..abb05e977 100644 --- a/breathecode/marketing/tests/signals/tests_event_saved.py +++ b/breathecode/marketing/tests/signals/tests_event_saved.py @@ -8,7 +8,7 @@ class TestLead(LegacyAPITestCase): 🔽🔽🔽 Create without slug """ - @patch('breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay", MagicMock()) def test_event_saved__create__without_slug(self, enable_signals): enable_signals() """Test /cohort/:id/user without auth""" @@ -16,30 +16,30 @@ def test_event_saved__create__without_slug(self, enable_signals): model = self.bc.database.create(event=1) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.bc.format.to_dict(model.event)]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.bc.format.to_dict(model.event)]) self.assertEqual(add_event_slug_as_acp_tag.delay.call_args_list, []) """ 🔽🔽🔽 Create with slug, without academy """ - @patch('breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay", MagicMock()) def test_event_saved__create__with_slug__without_academy(self, enable_signals): enable_signals() """Test /cohort/:id/user without auth""" from breathecode.marketing.tasks import add_event_slug_as_acp_tag - event = {'slug': 'they-killed-kenny'} + event = {"slug": "they-killed-kenny"} model = self.bc.database.create(event=event) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.bc.format.to_dict(model.event)]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.bc.format.to_dict(model.event)]) self.assertEqual(add_event_slug_as_acp_tag.delay.call_args_list, []) """ 🔽🔽🔽 Create without slug, with academy """ - @patch('breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay", MagicMock()) def test_event_saved__create__without_slug__with_academy(self, enable_signals): enable_signals() """Test /cohort/:id/user without auth""" @@ -47,21 +47,21 @@ def test_event_saved__create__without_slug__with_academy(self, enable_signals): model = self.bc.database.create(event=1, academy=1) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.bc.format.to_dict(model.event)]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.bc.format.to_dict(model.event)]) self.assertEqual(add_event_slug_as_acp_tag.delay.call_args_list, []) """ 🔽🔽🔽 Create with slug, with academy """ - @patch('breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay', MagicMock()) + @patch("breathecode.marketing.tasks.add_event_slug_as_acp_tag.delay", MagicMock()) def test_event_saved__create__with_slug__with_academy(self, enable_signals): enable_signals() """Test /cohort/:id/user without auth""" from breathecode.marketing.tasks import add_event_slug_as_acp_tag - event = {'slug': 'they-killed-kenny'} + event = {"slug": "they-killed-kenny"} model = self.bc.database.create(event=event, academy=1) - self.assertEqual(self.bc.database.list_of('events.Event'), [self.bc.format.to_dict(model.event)]) + self.assertEqual(self.bc.database.list_of("events.Event"), [self.bc.format.to_dict(model.event)]) self.assertEqual(add_event_slug_as_acp_tag.delay.call_args_list, [call(1, 1)]) diff --git a/breathecode/marketing/tests/signals/tests_student_edustatus_updated.py b/breathecode/marketing/tests/signals/tests_student_edustatus_updated.py index dad5ace59..14bf0f6be 100644 --- a/breathecode/marketing/tests/signals/tests_student_edustatus_updated.py +++ b/breathecode/marketing/tests/signals/tests_student_edustatus_updated.py @@ -10,35 +10,38 @@ class TestLead(LegacyAPITestCase): 🔽🔽🔽 CohortUser without educational_status ACTIVE """ - @patch('breathecode.marketing.tasks.add_cohort_task_to_student.delay', MagicMock()) - @patch('logging.Logger.warning', MagicMock()) + @patch("breathecode.marketing.tasks.add_cohort_task_to_student.delay", MagicMock()) + @patch("logging.Logger.warning", MagicMock()) def test_cohort_saved__create__without_educational_status_active(self, enable_signals): - enable_signals('breathecode.admissions.signals.student_edu_status_updated') + enable_signals("breathecode.admissions.signals.student_edu_status_updated") import logging from breathecode.marketing.tasks import add_cohort_task_to_student - educational_status = random.choice(['POSTPONED', 'SUSPENDED', 'GRADUATED', 'DROPPED']) + educational_status = random.choice(["POSTPONED", "SUSPENDED", "GRADUATED", "DROPPED"]) cohort_user = { - 'educational_status': educational_status, + "educational_status": educational_status, } - with self.assertRaisesMessage(ValidationException, 'user-not-found-in-org'): + with self.assertRaisesMessage(ValidationException, "user-not-found-in-org"): model = self.generate_models(cohort_user=cohort_user) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [ - { - 'cohort_id': 1, - 'finantial_status': None, - 'history_log': {}, - 'id': 1, - 'role': 'STUDENT', - 'user_id': 1, - 'watching': False, - 'educational_status': educational_status, - }, - ]) + self.assertEqual( + self.bc.database.list_of("admissions.CohortUser"), + [ + { + "cohort_id": 1, + "finantial_status": None, + "history_log": {}, + "id": 1, + "role": "STUDENT", + "user_id": 1, + "watching": False, + "educational_status": educational_status, + }, + ], + ) self.assertEqual(add_cohort_task_to_student.delay.call_args_list, []) self.assertEqual(logging.Logger.warning.call_args_list, []) @@ -46,22 +49,28 @@ def test_cohort_saved__create__without_educational_status_active(self, enable_si 🔽🔽🔽 CohortUser with status ACTIVE """ - @patch('breathecode.marketing.tasks.add_cohort_task_to_student.delay', MagicMock()) - @patch('logging.Logger.warning', MagicMock()) + @patch("breathecode.marketing.tasks.add_cohort_task_to_student.delay", MagicMock()) + @patch("logging.Logger.warning", MagicMock()) def test_cohort_saved__create__with_educational_status_active(self, enable_signals): - enable_signals('breathecode.admissions.signals.student_edu_status_updated') + enable_signals("breathecode.admissions.signals.student_edu_status_updated") import logging from breathecode.marketing.tasks import add_cohort_task_to_student - cohort_user_kwargs = {'educational_status': 'ACTIVE'} + cohort_user_kwargs = {"educational_status": "ACTIVE"} model = self.generate_models(cohort_user=True, cohort_user_kwargs=cohort_user_kwargs) - self.assertEqual(self.bc.database.list_of('admissions.CohortUser'), [self.model_to_dict(model, 'cohort_user')]) - self.assertEqual(add_cohort_task_to_student.delay.call_args_list, [ - call(model.user.id, model.cohort.id, model.cohort.academy.id), - ]) - self.assertEqual(logging.Logger.warning.call_args_list, [ - call(f'Student is now active in cohort `{model.cohort.slug}`, processing task'), - ]) + self.assertEqual(self.bc.database.list_of("admissions.CohortUser"), [self.model_to_dict(model, "cohort_user")]) + self.assertEqual( + add_cohort_task_to_student.delay.call_args_list, + [ + call(model.user.id, model.cohort.id, model.cohort.academy.id), + ], + ) + self.assertEqual( + logging.Logger.warning.call_args_list, + [ + call(f"Student is now active in cohort `{model.cohort.slug}`, processing task"), + ], + ) diff --git a/breathecode/marketing/tests/tasks/tests_add_cohort_slug_as_acp_tag.py b/breathecode/marketing/tests/tasks/tests_add_cohort_slug_as_acp_tag.py index 54f5cca35..ab784d8fe 100644 --- a/breathecode/marketing/tests/tasks/tests_add_cohort_slug_as_acp_tag.py +++ b/breathecode/marketing/tests/tasks/tests_add_cohort_slug_as_acp_tag.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import os from unittest.mock import MagicMock, call, patch @@ -9,19 +10,19 @@ from ..mixins import MarketingTestCase -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) -AC_HOST = 'https://ac.ca' -AC_URL = f'{AC_HOST}/api/3/tags' +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) +AC_HOST = "https://ac.ca" +AC_URL = f"{AC_HOST}/api/3/tags" AC_RESPONSE = { - 'tag': { - 'id': 1, - 'tag': 'they-killed-kenny', + "tag": { + "id": 1, + "tag": "they-killed-kenny", }, } AC_ERROR_RESPONSE = { - 'message': 'they-killed-kenny', + "message": "they-killed-kenny", } -TASK_STARTED_MESSAGE = 'Task add_cohort_slug_as_acp_tag started' +TASK_STARTED_MESSAGE = "Task add_cohort_slug_as_acp_tag started" class AnswerIdTestSuite(MarketingTestCase): @@ -29,202 +30,236 @@ class AnswerIdTestSuite(MarketingTestCase): 🔽🔽🔽 Without Academy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_cohort_slug_as_acp_tag__without_academy(self): import logging add_cohort_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Academy 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Academy 1 not found", exc_info=True)]) """ 🔽🔽🔽 Without ActiveCampaignAcademy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_cohort_slug_as_acp_tag__without_active_campaign_academy(self): import logging - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): model = self.generate_models(academy=True) logging.Logger.info.call_args_list = [] add_cohort_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, - [call('ActiveCampaign Academy 1 not found', exc_info=True)]) + self.assertEqual( + logging.Logger.error.call_args_list, [call("ActiveCampaign Academy 1 not found", exc_info=True)] + ) """ 🔽🔽🔽 Without Cohort """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_cohort_slug_as_acp_tag__without_cohort(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(academy=True, - skip_cohort=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + academy=True, + skip_cohort=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Cohort 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Cohort 1 not found", exc_info=True)]) """ 🔽🔽🔽 Create a Tag in active campaign """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_cohort_slug_as_acp_tag(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(academy=True, - cohort=1, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + academy=True, + cohort=1, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [{ - 'ac_academy_id': 1, - 'acp_id': 1, - 'automation_id': None, - 'id': 1, - 'slug': 'they-killed-kenny', - 'subscribers': 0, - 'tag_type': 'COHORT', - 'disputed_at': None, - 'description': None, - 'disputed_reason': None, - }]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call(f'Creating tag `{model.cohort.slug}` on active campaign'), - call('Tag created successfully'), - ]) + self.assertEqual( + self.bc.database.list_of("marketing.Tag"), + [ + { + "ac_academy_id": 1, + "acp_id": 1, + "automation_id": None, + "id": 1, + "slug": "they-killed-kenny", + "subscribers": 0, + "tag_type": "COHORT", + "disputed_at": None, + "description": None, + "disputed_reason": None, + } + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call(f"Creating tag `{model.cohort.slug}` on active campaign"), + call("Tag created successfully"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_cohort_slug_as_acp_tag_type_cohort(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(academy=True, - cohort=1, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + academy=True, + cohort=1, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag')[0]['tag_type'], 'COHORT') - - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call(f'Creating tag `{model.cohort.slug}` on active campaign'), - call('Tag created successfully'), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag")[0]["tag_type"], "COHORT") + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call(f"Creating tag `{model.cohort.slug}` on active campaign"), + call("Tag created successfully"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) """ 🔽🔽🔽 Tag already exists in active campaign """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_cohort_slug_as_acp_tag__tag_exists(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - tag_kwargs = {'slug': 'they-killed-kenny'} - cohort_kwargs = {'slug': 'they-killed-kenny'} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + tag_kwargs = {"slug": "they-killed-kenny"} + cohort_kwargs = {"slug": "they-killed-kenny"} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(tag=True, - academy=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs, - tag_kwargs=tag_kwargs, - cohort=cohort_kwargs) + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + tag=True, + academy=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + tag_kwargs=tag_kwargs, + cohort=cohort_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [self.model_to_dict(model, 'tag')]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'Tag for cohort `{model.cohort.slug}` already exists', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), [self.model_to_dict(model, "tag")]) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f"Tag for cohort `{model.cohort.slug}` already exists", exc_info=True), + ], + ) """ 🔽🔽🔽 Active campaign return 404 (check cases status code are not equal to 201) """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.cohort_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(404, AC_URL, AC_ERROR_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.cohort_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(404, AC_URL, AC_ERROR_RESPONSE)])) def test_add_cohort_slug_as_acp_tag__status_404(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(academy=True, - cohort=1, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + academy=True, + cohort=1, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call(f'Creating tag `{model.cohort.slug}` on active campaign'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'Error creating tag `{model.cohort.slug}` with status=404'), - call(AC_ERROR_RESPONSE), - call(f'Error creating tag `{model.cohort.slug}` with status=404', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call(f"Creating tag `{model.cohort.slug}` on active campaign"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f"Error creating tag `{model.cohort.slug}` with status=404"), + call(AC_ERROR_RESPONSE), + call(f"Error creating tag `{model.cohort.slug}` with status=404", exc_info=True), + ], + ) diff --git a/breathecode/marketing/tests/tasks/tests_add_cohort_task_to_student.py b/breathecode/marketing/tests/tasks/tests_add_cohort_task_to_student.py index d4843707b..385d2c902 100644 --- a/breathecode/marketing/tests/tasks/tests_add_cohort_task_to_student.py +++ b/breathecode/marketing/tests/tasks/tests_add_cohort_task_to_student.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import os from unittest.mock import MagicMock, call, patch @@ -10,21 +11,21 @@ from ..mixins import MarketingTestCase -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) -AC_HOST = 'https://ac.ca' -AC_URL = f'{AC_HOST}/api/3/contacts' -AC_POST_URL = f'{AC_HOST}/api/3/contactTags' +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) +AC_HOST = "https://ac.ca" +AC_URL = f"{AC_HOST}/api/3/contacts" +AC_POST_URL = f"{AC_HOST}/api/3/contactTags" AC_RESPONSE = { - 'contacts': [ + "contacts": [ { - 'id': 1, - 'tag': 'they-killed-kenny', + "id": 1, + "tag": "they-killed-kenny", }, ] } -AC_EMPTY_RESPONSE = {'contacts': []} -AC_POST_RESPONSE = {'contactTag': {}} -TASK_STARTED_MESSAGE = 'Task add_cohort_task_to_student started' +AC_EMPTY_RESPONSE = {"contacts": []} +AC_POST_RESPONSE = {"contactTag": {}} +TASK_STARTED_MESSAGE = "Task add_cohort_task_to_student started" class AnswerIdTestSuite(MarketingTestCase): @@ -32,28 +33,28 @@ class AnswerIdTestSuite(MarketingTestCase): 🔽🔽🔽 Without Academy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) def test_add_cohort_task_to_student__without_academy(self): import logging add_cohort_task_to_student.delay(1, 1, 1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Academy 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Academy 1 not found", exc_info=True)]) """ 🔽🔽🔽 Without ActiveCampaignAcademy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) def test_add_cohort_task_to_student__without_active_campaign_academy(self): import logging @@ -64,237 +65,280 @@ def test_add_cohort_task_to_student__without_active_campaign_academy(self): add_cohort_task_to_student.delay(1, 1, 1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, - [call('ActiveCampaign Academy 1 not found', exc_info=True)]) + self.assertEqual( + logging.Logger.error.call_args_list, [call("ActiveCampaign Academy 1 not found", exc_info=True)] + ) """ 🔽🔽🔽 Without User """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) def test_add_cohort_task_to_student__without_user(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(academy=True, - skip_cohort=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + academy=True, + skip_cohort=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_task_to_student.delay(1, 1, 1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('User 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("User 1 not found", exc_info=True)]) """ 🔽🔽🔽 Without Cohort """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) def test_add_cohort_task_to_student__without_cohort(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(academy=True, - skip_cohort=True, - user=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + academy=True, + skip_cohort=True, + user=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_task_to_student.delay(1, 1, 1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Cohort 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Cohort 1 not found", exc_info=True)]) """ 🔽🔽🔽 Tag not exists """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) def test_add_cohort_task_to_student(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(academy=True, - cohort=1, - user=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + academy=True, + cohort=1, + user=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_task_to_student.delay(1, 1, 1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call( - f'Cohort tag `{model.cohort.slug}` does not exist in the system, the tag could not be added to the ' - 'student. This tag was supposed to be created by the system when creating a new cohort', - exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call( + f"Cohort tag `{model.cohort.slug}` does not exist in the system, the tag could not be added to the " + "student. This tag was supposed to be created by the system when creating a new cohort", + exc_info=True, + ), + ], + ) """ 🔽🔽🔽 Tag already exists in active campaign """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) def test_add_cohort_task_to_student__tag_exists(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - tag_kwargs = {'slug': 'they-killed-kenny'} - cohort_kwargs = {'slug': 'they-killed-kenny'} - - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(tag=True, - user=True, - academy=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs, - tag_kwargs=tag_kwargs, - cohort=cohort_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + tag_kwargs = {"slug": "they-killed-kenny"} + cohort_kwargs = {"slug": "they-killed-kenny"} + + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + tag=True, + user=True, + academy=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + tag_kwargs=tag_kwargs, + cohort=cohort_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_task_to_student.delay(1, 1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [self.model_to_dict(model, 'tag')]) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), [self.model_to_dict(model, "tag")]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) """ 🔽🔽🔽 Tag already exists in active campaign and return status 404 in post method """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(404, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(404, AC_POST_URL, AC_POST_RESPONSE)])) def test_add_cohort_task_to_student__tag_exists__active_campaign_returns_404(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - tag_kwargs = {'slug': 'they-killed-kenny'} - cohort_kwargs = {'slug': 'they-killed-kenny'} - - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(tag=True, - user=True, - academy=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs, - tag_kwargs=tag_kwargs, - cohort=cohort_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + tag_kwargs = {"slug": "they-killed-kenny"} + cohort_kwargs = {"slug": "they-killed-kenny"} + + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + tag=True, + user=True, + academy=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + tag_kwargs=tag_kwargs, + cohort=cohort_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_task_to_student.delay(1, 1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [self.model_to_dict(model, 'tag')]) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(AC_POST_RESPONSE), - call('Failed to add tag to contact 1 with status=404', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), [self.model_to_dict(model, "tag")]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(AC_POST_RESPONSE), + call("Failed to add tag to contact 1 with status=404", exc_info=True), + ], + ) """ 🔽🔽🔽 Tag already exists in active campaign and return status 201 but the api was changed """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, {})])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, {})])) def test_add_cohort_task_to_student__tag_exists__the_api_was_changed(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - tag_kwargs = {'slug': 'they-killed-kenny'} - cohort_kwargs = {'slug': 'they-killed-kenny'} - - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(tag=True, - user=True, - academy=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs, - tag_kwargs=tag_kwargs, - cohort=cohort_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + tag_kwargs = {"slug": "they-killed-kenny"} + cohort_kwargs = {"slug": "they-killed-kenny"} + + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + tag=True, + user=True, + academy=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + tag_kwargs=tag_kwargs, + cohort=cohort_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_task_to_student.delay(1, 1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [self.model_to_dict(model, 'tag')]) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Bad response format from ActiveCampaign when adding a new tag to contact', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), [self.model_to_dict(model, "tag")]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Bad response format from ActiveCampaign when adding a new tag to contact", exc_info=True), + ], + ) """ 🔽🔽🔽 Active campaign return a empty list of contacts """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('requests.get', apply_requests_request_mock([(200, AC_URL, AC_EMPTY_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("requests.get", apply_requests_request_mock([(200, AC_URL, AC_EMPTY_RESPONSE)])) def test_add_cohort_task_to_student__status_404(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - tag_kwargs = {'slug': 'they-killed-kenny'} - cohort_kwargs = {'slug': 'they-killed-kenny'} - with patch('breathecode.activity.tasks.get_attendancy_log.delay', MagicMock()): - model = self.generate_models(academy=True, - tag=True, - user=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs, - tag_kwargs=tag_kwargs, - cohort=cohort_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + tag_kwargs = {"slug": "they-killed-kenny"} + cohort_kwargs = {"slug": "they-killed-kenny"} + with patch("breathecode.activity.tasks.get_attendancy_log.delay", MagicMock()): + model = self.generate_models( + academy=True, + tag=True, + user=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + tag_kwargs=tag_kwargs, + cohort=cohort_kwargs, + ) logging.Logger.info.call_args_list = [] add_cohort_task_to_student.delay(1, 1, 1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'Problem fetching contact in activecampaign with email {model.user.email}', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f"Problem fetching contact in activecampaign with email {model.user.email}", exc_info=True), + ], + ) diff --git a/breathecode/marketing/tests/tasks/tests_add_downloadable_tags_as_acp_tag.py b/breathecode/marketing/tests/tasks/tests_add_downloadable_tags_as_acp_tag.py index 3aa03099c..d4cf4c070 100644 --- a/breathecode/marketing/tests/tasks/tests_add_downloadable_tags_as_acp_tag.py +++ b/breathecode/marketing/tests/tasks/tests_add_downloadable_tags_as_acp_tag.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import os from unittest.mock import MagicMock, call, patch @@ -9,19 +10,19 @@ from ..mixins import MarketingTestCase -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) -AC_HOST = 'https://ac.ca' -AC_URL = f'{AC_HOST}/api/3/tags' +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) +AC_HOST = "https://ac.ca" +AC_URL = f"{AC_HOST}/api/3/tags" AC_RESPONSE = { - 'tag': { - 'id': 1, - 'tag': 'down-they-killed-kenny', + "tag": { + "id": 1, + "tag": "down-they-killed-kenny", }, } AC_ERROR_RESPONSE = { - 'message': 'they-killed-kenny', + "message": "they-killed-kenny", } -TASK_STARTED_MESSAGE = 'Task add_downloadable_slug_as_acp_tag started' +TASK_STARTED_MESSAGE = "Task add_downloadable_slug_as_acp_tag started" class AnswerIdTestSuite(MarketingTestCase): @@ -29,27 +30,27 @@ class AnswerIdTestSuite(MarketingTestCase): 🔽🔽🔽 Without Academy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_downloadable_slug_as_acp_tag__without_academy(self): import logging add_downloadable_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Academy 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Academy 1 not found", exc_info=True)]) """ 🔽🔽🔽 Without ActiveCampaignAcademy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_downloadable_slug_as_acp_tag__without_active_campaign_academy(self): import logging @@ -59,113 +60,132 @@ def test_add_downloadable_slug_as_acp_tag__without_active_campaign_academy(self) add_downloadable_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, - [call('ActiveCampaign Academy 1 not found', exc_info=True)]) + self.assertEqual( + logging.Logger.error.call_args_list, [call("ActiveCampaign Academy 1 not found", exc_info=True)] + ) """ 🔽🔽🔽 Without Downloadable """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_downloadable_slug_as_acp_tag__without_event(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(academy=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + academy=True, active_campaign_academy=True, active_campaign_academy_kwargs=active_campaign_academy_kwargs + ) logging.Logger.info.call_args_list = [] add_downloadable_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Downloadable 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Downloadable 1 not found", exc_info=True)]) """ 🔽🔽🔽 Create a Tag in active campaign """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_downloadable_slug_as_acp_tag(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(academy=True, - downloadable=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + academy=True, + downloadable=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_downloadable_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [{ - 'ac_academy_id': 1, - 'acp_id': 1, - 'automation_id': None, - 'disputed_at': None, - 'description': None, - 'disputed_reason': None, - 'id': 1, - 'slug': 'down-they-killed-kenny', - 'subscribers': 0, - 'tag_type': 'DOWNLOADABLE', - }]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call(f'Creating tag `down-{model.downloadable.slug}` on active campaign'), - call('Tag created successfully'), - ]) + self.assertEqual( + self.bc.database.list_of("marketing.Tag"), + [ + { + "ac_academy_id": 1, + "acp_id": 1, + "automation_id": None, + "disputed_at": None, + "description": None, + "disputed_reason": None, + "id": 1, + "slug": "down-they-killed-kenny", + "subscribers": 0, + "tag_type": "DOWNLOADABLE", + } + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call(f"Creating tag `down-{model.downloadable.slug}` on active campaign"), + call("Tag created successfully"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) """ 🔽🔽🔽 Tag already exists in active campaign """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.marketing.signals.downloadable_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.marketing.signals.downloadable_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag__tag_exists(self): import logging - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - tag_kwargs = {'slug': 'down-they-killed-kenny', 'tag_type': 'DOWNLOADABLE'} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + tag_kwargs = {"slug": "down-they-killed-kenny", "tag_type": "DOWNLOADABLE"} downloadable_kwargs = { - 'slug': 'they-killed-kenny', - 'name': 'they-killed-kenny', + "slug": "they-killed-kenny", + "name": "they-killed-kenny", } - model = self.generate_models(tag=True, - academy=True, - active_campaign_academy=True, - active_campaign_academy_kwargs=active_campaign_academy_kwargs, - tag_kwargs=tag_kwargs, - downloadable=downloadable_kwargs) + model = self.generate_models( + tag=True, + academy=True, + active_campaign_academy=True, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + tag_kwargs=tag_kwargs, + downloadable=downloadable_kwargs, + ) logging.Logger.info.call_args_list = [] add_downloadable_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [self.model_to_dict(model, 'tag')]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) - - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'Tag for downloadable `{model.downloadable.slug}` already exists', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), [self.model_to_dict(model, "tag")]) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) + + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f"Tag for downloadable `{model.downloadable.slug}` already exists", exc_info=True), + ], + ) diff --git a/breathecode/marketing/tests/tasks/tests_add_event_slug_as_acp_tag.py b/breathecode/marketing/tests/tasks/tests_add_event_slug_as_acp_tag.py index e461a7428..542e6bcda 100644 --- a/breathecode/marketing/tests/tasks/tests_add_event_slug_as_acp_tag.py +++ b/breathecode/marketing/tests/tasks/tests_add_event_slug_as_acp_tag.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import os from unittest.mock import MagicMock, call, patch @@ -9,20 +10,20 @@ from ..mixins import MarketingTestCase -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) -AC_HOST = 'https://ac.ca' -AC_URL = f'{AC_HOST}/api/3/tags' +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) +AC_HOST = "https://ac.ca" +AC_URL = f"{AC_HOST}/api/3/tags" AC_RESPONSE = { - 'tag': { - 'id': 1, - 'tag_type': 'EVENT', - 'tag': 'event-they-killed-kenny', + "tag": { + "id": 1, + "tag_type": "EVENT", + "tag": "event-they-killed-kenny", }, } AC_ERROR_RESPONSE = { - 'message': 'they-killed-kenny', + "message": "they-killed-kenny", } -TASK_STARTED_MESSAGE = 'Task add_event_slug_as_acp_tag started' +TASK_STARTED_MESSAGE = "Task add_event_slug_as_acp_tag started" class AnswerIdTestSuite(MarketingTestCase): @@ -30,27 +31,27 @@ class AnswerIdTestSuite(MarketingTestCase): 🔽🔽🔽 Without Academy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag__without_academy(self): import logging add_event_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Academy 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Academy 1 not found", exc_info=True)]) """ 🔽🔽🔽 Without ActiveCampaignAcademy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag__without_active_campaign_academy(self): import logging @@ -60,174 +61,193 @@ def test_add_event_slug_as_acp_tag__without_active_campaign_academy(self): add_event_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, - [call('ActiveCampaign Academy 1 not found', exc_info=True)]) + self.assertEqual( + logging.Logger.error.call_args_list, [call("ActiveCampaign Academy 1 not found", exc_info=True)] + ) """ 🔽🔽🔽 Without Event """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag__without_event(self): import logging - active_campaign_academy = {'ac_url': AC_HOST} + active_campaign_academy = {"ac_url": AC_HOST} model = self.bc.database.create(academy=1, active_campaign_academy=active_campaign_academy) logging.Logger.info.call_args_list = [] add_event_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Event 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Event 1 not found", exc_info=True)]) """ 🔽🔽🔽 Event without slug """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag__event_without_slug(self): import logging - active_campaign_academy = {'ac_url': AC_HOST} + active_campaign_academy = {"ac_url": AC_HOST} model = self.bc.database.create(academy=1, event=1, active_campaign_academy=active_campaign_academy) logging.Logger.info.call_args_list = [] add_event_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Event 1 does not have slug', exc_info=True)]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) + self.assertEqual(logging.Logger.error.call_args_list, [call("Event 1 does not have slug", exc_info=True)]) """ 🔽🔽🔽 Event slug already exists """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag__event_slug_already_exists(self): import logging - active_campaign_academy = {'ac_url': AC_HOST} - event = {'slug': 'event-they-killed-kenny'} - tag = {'slug': 'event-they-killed-kenny', 'tag_type': 'EVENT'} + active_campaign_academy = {"ac_url": AC_HOST} + event = {"slug": "event-they-killed-kenny"} + tag = {"slug": "event-they-killed-kenny", "tag_type": "EVENT"} - model = self.bc.database.create(academy=1, - tag=tag, - active_campaign_academy=active_campaign_academy, - event=event) + model = self.bc.database.create( + academy=1, tag=tag, active_campaign_academy=active_campaign_academy, event=event + ) logging.Logger.info.call_args_list = [] add_event_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [self.bc.format.to_dict(model.tag)]) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Tag for event `event-they-killed-kenny` already exists', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), [self.bc.format.to_dict(model.tag)]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Tag for event `event-they-killed-kenny` already exists", exc_info=True), + ], + ) """ 🔽🔽🔽 Event slug already exists, with force false """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag__event_slug_already_exists__with_force_false(self): import logging - active_campaign_academy = {'ac_url': AC_HOST} - event = {'slug': 'event-they-killed-kenny'} - tag = {'slug': 'event-they-killed-kenny', 'tag_type': 'EVENT'} + active_campaign_academy = {"ac_url": AC_HOST} + event = {"slug": "event-they-killed-kenny"} + tag = {"slug": "event-they-killed-kenny", "tag_type": "EVENT"} - model = self.bc.database.create(academy=1, - tag=tag, - active_campaign_academy=active_campaign_academy, - event=event) + model = self.bc.database.create( + academy=1, tag=tag, active_campaign_academy=active_campaign_academy, event=event + ) logging.Logger.info.call_args_list = [] add_event_slug_as_acp_tag.delay(1, 1, force=False) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [self.bc.format.to_dict(model.tag)]) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Tag for event `event-they-killed-kenny` already exists', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), [self.bc.format.to_dict(model.tag)]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Tag for event `event-they-killed-kenny` already exists", exc_info=True), + ], + ) """ 🔽🔽🔽 Event slug already exists, with force true """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag__event_slug_already_exists__with_force_true(self): import logging - active_campaign_academy = {'ac_url': AC_HOST} - event = {'slug': 'event-they-killed-kenny'} - tag = {'slug': 'event-they-killed-kenny', 'tag_type': 'EVENT'} + active_campaign_academy = {"ac_url": AC_HOST} + event = {"slug": "event-they-killed-kenny"} + tag = {"slug": "event-they-killed-kenny", "tag_type": "EVENT"} - model = self.bc.database.create(academy=1, - tag=tag, - active_campaign_academy=active_campaign_academy, - event=event) + model = self.bc.database.create( + academy=1, tag=tag, active_campaign_academy=active_campaign_academy, event=event + ) logging.Logger.info.call_args_list = [] add_event_slug_as_acp_tag.delay(1, 1, force=True) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [ - { - **self.bc.format.to_dict(model.tag), - 'acp_id': 1, - }, - ]) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call(f'Creating tag `{model.event.slug}` on active campaign'), - call('Tag created successfully'), - ]) + self.assertEqual( + self.bc.database.list_of("marketing.Tag"), + [ + { + **self.bc.format.to_dict(model.tag), + "acp_id": 1, + }, + ], + ) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call(f"Creating tag `{model.event.slug}` on active campaign"), + call("Tag created successfully"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) """ 🔽🔽🔽 Create tag in Active Campaign """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(201, AC_URL, AC_RESPONSE)])) def test_add_event_slug_as_acp_tag(self): import logging - active_campaign_academy = {'ac_url': AC_HOST} - event = {'slug': 'event-they-killed-kenny'} + active_campaign_academy = {"ac_url": AC_HOST} + event = {"slug": "event-they-killed-kenny"} model = self.bc.database.create(academy=1, active_campaign_academy=active_campaign_academy, event=event) @@ -235,53 +255,67 @@ def test_add_event_slug_as_acp_tag(self): add_event_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), [{ - 'ac_academy_id': 1, - 'acp_id': 1, - 'automation_id': None, - 'disputed_at': None, - 'description': None, - 'disputed_reason': None, - 'id': 1, - 'slug': 'event-they-killed-kenny', - 'subscribers': 0, - 'tag_type': 'EVENT', - }]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call(f'Creating tag `{model.event.slug}` on active campaign'), - call('Tag created successfully'), - ]) + self.assertEqual( + self.bc.database.list_of("marketing.Tag"), + [ + { + "ac_academy_id": 1, + "acp_id": 1, + "automation_id": None, + "disputed_at": None, + "description": None, + "disputed_reason": None, + "id": 1, + "slug": "event-they-killed-kenny", + "subscribers": 0, + "tag_type": "EVENT", + } + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call(f"Creating tag `{model.event.slug}` on active campaign"), + call("Tag created successfully"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) """ 🔽🔽🔽 Active campaign return 404 (check cases status code are not equal to 201) """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.events.signals.event_saved.send_robust', MagicMock()) - @patch('requests.post', apply_requests_request_mock([(404, AC_URL, AC_ERROR_RESPONSE)])) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.events.signals.event_saved.send_robust", MagicMock()) + @patch("requests.post", apply_requests_request_mock([(404, AC_URL, AC_ERROR_RESPONSE)])) def test_add_event_slug_as_acp_tag__status_404(self): import logging - active_campaign_academy = {'ac_url': AC_HOST} - event = {'slug': 'event-they-killed-kenny'} + active_campaign_academy = {"ac_url": AC_HOST} + event = {"slug": "event-they-killed-kenny"} model = self.bc.database.create(academy=1, event=event, active_campaign_academy=active_campaign_academy) logging.Logger.info.call_args_list = [] add_event_slug_as_acp_tag.delay(1, 1) - self.assertEqual(self.bc.database.list_of('marketing.Tag'), []) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call(f'Creating tag `{model.event.slug}` on active campaign'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'Error creating tag `{model.event.slug}` with status=404'), - call(AC_ERROR_RESPONSE), - call(f'Error creating tag `{model.event.slug}` with status=404', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Tag"), []) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call(f"Creating tag `{model.event.slug}` on active campaign"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f"Error creating tag `{model.event.slug}` with status=404"), + call(AC_ERROR_RESPONSE), + call(f"Error creating tag `{model.event.slug}` with status=404", exc_info=True), + ], + ) diff --git a/breathecode/marketing/tests/tasks/tests_add_event_tags_to_student.py b/breathecode/marketing/tests/tasks/tests_add_event_tags_to_student.py index 226bdd304..73861b3f2 100644 --- a/breathecode/marketing/tests/tasks/tests_add_event_tags_to_student.py +++ b/breathecode/marketing/tests/tasks/tests_add_event_tags_to_student.py @@ -1,36 +1,35 @@ """ Test /answer/:id """ + import os from unittest.mock import MagicMock, call, patch from breathecode.marketing.tasks import add_event_tags_to_student from breathecode.tests.mocks.requests import apply_requests_get_mock, apply_requests_post_mock from ..mixins import MarketingTestCase -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) -AC_HOST = 'https://ac.ca' -AC_URL = f'{AC_HOST}/api/3/contacts' -AC_POST_URL = f'{AC_HOST}/api/3/contactTags' +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) +AC_HOST = "https://ac.ca" +AC_URL = f"{AC_HOST}/api/3/contacts" +AC_POST_URL = f"{AC_HOST}/api/3/contactTags" AC_RESPONSE = { - 'contacts': [ + "contacts": [ { - 'id': 1, - 'tag': 'they-killed-kenny', + "id": 1, + "tag": "they-killed-kenny", }, ] } -AC_EMPTY_RESPONSE = {'contacts': []} -AC_POST_RESPONSE = {'contactTag': {}} -TASK_STARTED_MESSAGE = 'Task add_event_tags_to_student started' -GET_CONTACT_BY_EMAIL_PATH = ('breathecode.services.activecampaign.client.ActiveCampaign.' - 'get_contact_by_email') +AC_EMPTY_RESPONSE = {"contacts": []} +AC_POST_RESPONSE = {"contactTag": {}} +TASK_STARTED_MESSAGE = "Task add_event_tags_to_student started" +GET_CONTACT_BY_EMAIL_PATH = "breathecode.services.activecampaign.client.ActiveCampaign." "get_contact_by_email" -ADD_TAG_TO_CONTACT_PATH = ('breathecode.services.activecampaign.client.ActiveCampaign.' - 'add_tag_to_contact') +ADD_TAG_TO_CONTACT_PATH = "breathecode.services.activecampaign.client.ActiveCampaign." "add_tag_to_contact" -GET_CONTACT_BY_EMAIL_EXCEPTION = 'Random exception in get_contact_by_email' -ADD_TAG_TO_CONTACT_EXCEPTION = 'Random exception in add_tag_to_contact' -NEW_RELIC_LOG = 'New Relic Python Agent (9.1.2)' +GET_CONTACT_BY_EMAIL_EXCEPTION = "Random exception in get_contact_by_email" +ADD_TAG_TO_CONTACT_EXCEPTION = "Random exception in add_tag_to_contact" +NEW_RELIC_LOG = "New Relic Python Agent (9.1.2)" class AnswerIdTestSuite(MarketingTestCase): @@ -38,11 +37,11 @@ class AnswerIdTestSuite(MarketingTestCase): 🔽🔽🔽 Without optional arguments """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_optional_arguments(self): import logging import requests @@ -50,9 +49,12 @@ def test_add_event_tags_to_student__without_optional_arguments(self): add_event_tags_to_student.delay(1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Impossible to determine the user email', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Impossible to determine the user email", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) @@ -61,21 +63,24 @@ def test_add_event_tags_to_student__without_optional_arguments(self): 🔽🔽🔽 Without Academy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__user_id_and_email(self): import logging import requests - add_event_tags_to_student.delay(1, user_id=1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, user_id=1, email="pokemon@potato.io") self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('You can\'t provide the user_id and email together', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("You can't provide the user_id and email together", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) @@ -84,11 +89,11 @@ def test_add_event_tags_to_student__user_id_and_email(self): 🔽🔽🔽 Without User """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_user(self): import logging import requests @@ -96,7 +101,7 @@ def test_add_event_tags_to_student__without_user(self): add_event_tags_to_student.delay(1, user_id=1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('We can\'t get the user email', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("We can't get the user email", exc_info=True)]) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) @@ -104,11 +109,11 @@ def test_add_event_tags_to_student__without_user(self): 🔽🔽🔽 Without Event """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_event__with_user(self): import logging import requests @@ -120,24 +125,24 @@ def test_add_event_tags_to_student__without_event__with_user(self): add_event_tags_to_student.delay(1, user_id=1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Event 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Event 1 not found", exc_info=True)]) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_event__with_email(self): import logging import requests - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Event 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Event 1 not found", exc_info=True)]) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) @@ -145,12 +150,12 @@ def test_add_event_tags_to_student__without_event__with_email(self): 🔽🔽🔽 Without Academy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_academy__with_user(self): import logging import requests @@ -162,18 +167,21 @@ def test_add_event_tags_to_student__without_academy__with_user(self): add_event_tags_to_student.delay(1, user_id=1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Impossible to determine the academy', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Impossible to determine the academy", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_academy__with_email(self): import logging import requests @@ -182,12 +190,15 @@ def test_add_event_tags_to_student__without_academy__with_email(self): logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Impossible to determine the academy', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Impossible to determine the academy", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) @@ -196,11 +207,11 @@ def test_add_event_tags_to_student__without_academy__with_email(self): 🔽🔽🔽 Without ActiveCampaignAcademy """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_active_campaign_academy__with_user(self): import logging import requests @@ -212,18 +223,21 @@ def test_add_event_tags_to_student__without_active_campaign_academy__with_user(s add_event_tags_to_student.delay(1, user_id=1) self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('ActiveCampaign Academy 1 not found', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("ActiveCampaign Academy 1 not found", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_active_campaign_academy__with_email(self): import logging import requests @@ -232,12 +246,15 @@ def test_add_event_tags_to_student__without_active_campaign_academy__with_email( logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('ActiveCampaign Academy 1 not found', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("ActiveCampaign Academy 1 not found", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) @@ -246,11 +263,11 @@ def test_add_event_tags_to_student__without_active_campaign_academy__with_email( 🔽🔽🔽 Without Tag """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_tags__with_user(self): import logging import requests @@ -261,22 +278,28 @@ def test_add_event_tags_to_student__without_tags__with_user(self): add_event_tags_to_student.delay(1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Tags not found', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Tags not found", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__without_tags__with_email(self): import logging import requests @@ -285,15 +308,21 @@ def test_add_event_tags_to_student__without_tags__with_email(self): logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + ], + ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Tags not found', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Tags not found", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) @@ -302,27 +331,29 @@ def test_add_event_tags_to_student__without_tags__with_email(self): 🔽🔽🔽 With a exception in ActiveCampaign.get_contact_by_email """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) @patch(GET_CONTACT_BY_EMAIL_PATH, MagicMock(side_effect=Exception(GET_CONTACT_BY_EMAIL_EXCEPTION))) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__exception_in_get_contact_by_email__with_user(self): import logging import requests - tag_kwargs = {'slug': 'they-killed-kenny'} - event_kwargs = {'tags': 'they-killed-kenny'} - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(user=True, - event=True, - academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs=tag_kwargs, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + tag_kwargs = {"slug": "they-killed-kenny"} + event_kwargs = {"tags": "they-killed-kenny"} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + user=True, + event=True, + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs=tag_kwargs, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] @@ -336,30 +367,32 @@ def test_add_event_tags_to_student__exception_in_get_contact_by_email__with_user self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) @patch(GET_CONTACT_BY_EMAIL_PATH, MagicMock(side_effect=Exception(GET_CONTACT_BY_EMAIL_EXCEPTION))) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__exception_in_get_contact_by_email__with_email(self): import logging import requests - tag_kwargs = {'slug': 'they-killed-kenny'} - event_kwargs = {'tags': 'they-killed-kenny'} - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(event=True, - academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs=tag_kwargs, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + tag_kwargs = {"slug": "they-killed-kenny"} + event_kwargs = {"tags": "they-killed-kenny"} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + event=True, + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs=tag_kwargs, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") self.assertEqual(logging.Logger.info.call_args_list, [call(TASK_STARTED_MESSAGE)]) self.assertEqual(logging.Logger.error.call_args_list, [call(GET_CONTACT_BY_EMAIL_EXCEPTION, exc_info=True)]) @@ -371,84 +404,104 @@ def test_add_event_tags_to_student__exception_in_get_contact_by_email__with_emai 🔽🔽🔽 With a exception in ActiveCampaign.add_tag_to_contact """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) @patch(ADD_TAG_TO_CONTACT_PATH, MagicMock(side_effect=Exception(ADD_TAG_TO_CONTACT_EXCEPTION))) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__exception_in_add_tag_to_contact__with_user(self): import logging import requests - tag_kwargs = {'slug': 'they-killed-kenny'} - event_kwargs = {'tags': 'they-killed-kenny'} - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(user=True, - event=True, - academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs=tag_kwargs, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + tag_kwargs = {"slug": "they-killed-kenny"} + event_kwargs = {"tags": "they-killed-kenny"} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + user=True, + event=True, + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs=tag_kwargs, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_event_tags_to_student.delay(1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, [call(ADD_TAG_TO_CONTACT_EXCEPTION, exc_info=True)]) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model.active_campaign_academy.ac_key}, - params={'email': model.user.email}, - timeout=2), - ]) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model.active_campaign_academy.ac_key}, + params={"email": model.user.email}, + timeout=2, + ), + ], + ) self.assertEqual(requests.post.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) @patch(ADD_TAG_TO_CONTACT_PATH, MagicMock(side_effect=Exception(ADD_TAG_TO_CONTACT_EXCEPTION))) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__exception_in_add_tag_to_contact__with_email(self): import logging import requests - tag_kwargs = {'slug': 'they-killed-kenny'} - event_kwargs = {'tags': 'they-killed-kenny'} - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(event=True, - academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs=tag_kwargs, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + tag_kwargs = {"slug": "they-killed-kenny"} + event_kwargs = {"tags": "they-killed-kenny"} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + event=True, + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs=tag_kwargs, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, [call(ADD_TAG_TO_CONTACT_EXCEPTION, exc_info=True)]) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model.active_campaign_academy.ac_key}, - params={'email': 'pokemon@potato.io'}, - timeout=2), - ]) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model.active_campaign_academy.ac_key}, + params={"email": "pokemon@potato.io"}, + timeout=2, + ), + ], + ) self.assertEqual(requests.post.call_args_list, []) @@ -456,560 +509,648 @@ def test_add_event_tags_to_student__exception_in_add_tag_to_contact__with_email( 🔽🔽🔽 With one Tag """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__with_one_tag__with_user(self): import logging import requests - tag_kwargs = {'slug': 'they-killed-kenny'} - event_kwargs = {'tags': 'they-killed-kenny'} - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(user=True, - event=True, - academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs=tag_kwargs, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + tag_kwargs = {"slug": "they-killed-kenny"} + event_kwargs = {"tags": "they-killed-kenny"} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + user=True, + event=True, + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs=tag_kwargs, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] add_event_tags_to_student.delay(1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model.active_campaign_academy.ac_key}, - params={'email': model.user.email}, - timeout=2), - ]) - - self.assertEqual(requests.post.call_args_list, [ - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model.tag.acp_id - }}, - timeout=2), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model.active_campaign_academy.ac_key}, + params={"email": model.user.email}, + timeout=2, + ), + ], + ) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model.tag.acp_id}}, + timeout=2, + ), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__with_one_tag__with_email(self): import logging import requests - tag_kwargs = {'slug': 'they-killed-kenny'} - event_kwargs = {'tags': 'they-killed-kenny'} - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - model = self.generate_models(event=True, - academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs=tag_kwargs, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + tag_kwargs = {"slug": "they-killed-kenny"} + event_kwargs = {"tags": "they-killed-kenny"} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + model = self.generate_models( + event=True, + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs=tag_kwargs, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model.active_campaign_academy.ac_key}, - params={'email': 'pokemon@potato.io'}, - timeout=2), - ]) - - self.assertEqual(requests.post.call_args_list, [ - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model.tag.acp_id - }}, - timeout=2), - ]) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model.active_campaign_academy.ac_key}, + params={"email": "pokemon@potato.io"}, + timeout=2, + ), + ], + ) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model.tag.acp_id}}, + timeout=2, + ), + ], + ) """ 🔽🔽🔽 With two Tags """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__with_two_tags__with_user(self): import logging import requests - event_kwargs = {'tags': 'they-killed-kenny1,they-killed-kenny2'} - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - base = self.generate_models(user=True, - event=True, - academy=True, - active_campaign_academy=True, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) - - tag_kwargs = {'slug': 'they-killed-kenny1'} + event_kwargs = {"tags": "they-killed-kenny1,they-killed-kenny2"} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + base = self.generate_models( + user=True, + event=True, + academy=True, + active_campaign_academy=True, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) + + tag_kwargs = {"slug": "they-killed-kenny1"} model1 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) - tag_kwargs = {'slug': 'they-killed-kenny2'} + tag_kwargs = {"slug": "they-killed-kenny2"} model2 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) logging.Logger.info.call_args_list = [] add_event_tags_to_student.delay(1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - call('Adding tag 2 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + call("Adding tag 2 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model1.active_campaign_academy.ac_key}, - params={'email': model1.user.email}, - timeout=2), - ]) - - self.assertEqual(requests.post.call_args_list, [ - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model1.tag.acp_id - }}, - timeout=2), - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model2.tag.acp_id - }}, - timeout=2), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model1.active_campaign_academy.ac_key}, + params={"email": model1.user.email}, + timeout=2, + ), + ], + ) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model1.tag.acp_id}}, + timeout=2, + ), + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model2.tag.acp_id}}, + timeout=2, + ), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__with_two_tags__with_email(self): import logging import requests - event_kwargs = {'tags': 'they-killed-kenny1,they-killed-kenny2'} - active_campaign_academy_kwargs = {'ac_url': AC_HOST} - base = self.generate_models(event=True, - academy=True, - active_campaign_academy=True, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) - - tag_kwargs = {'slug': 'they-killed-kenny1'} + event_kwargs = {"tags": "they-killed-kenny1,they-killed-kenny2"} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} + base = self.generate_models( + event=True, + academy=True, + active_campaign_academy=True, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) + + tag_kwargs = {"slug": "they-killed-kenny1"} model1 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) - tag_kwargs = {'slug': 'they-killed-kenny2'} + tag_kwargs = {"slug": "they-killed-kenny2"} model2 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - call('Adding tag 2 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + call("Adding tag 2 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model1.active_campaign_academy.ac_key}, - params={'email': 'pokemon@potato.io'}, - timeout=2), - ]) - - self.assertEqual(requests.post.call_args_list, [ - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model1.tag.acp_id - }}, - timeout=2), - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model2.tag.acp_id - }}, - timeout=2), - ]) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model1.active_campaign_academy.ac_key}, + params={"email": "pokemon@potato.io"}, + timeout=2, + ), + ], + ) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model1.tag.acp_id}}, + timeout=2, + ), + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model2.tag.acp_id}}, + timeout=2, + ), + ], + ) """ 🔽🔽🔽 With two Tags, a with event name and the other from the tags attr """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__with_two_tags__event_slug_math_with_tag__with_user(self): import logging import requests - active_campaign_academy_kwargs = {'ac_url': AC_HOST} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} event_kwargs = { - 'slug': 'they-killed-kenny1', - 'tags': 'they-killed-kenny2', + "slug": "they-killed-kenny1", + "tags": "they-killed-kenny2", } - base = self.generate_models(user=True, - event=True, - academy=True, - active_campaign_academy=True, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + base = self.generate_models( + user=True, + event=True, + academy=True, + active_campaign_academy=True, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) - tag_kwargs = {'slug': 'event-they-killed-kenny1'} + tag_kwargs = {"slug": "event-they-killed-kenny1"} model1 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) - tag_kwargs = {'slug': 'they-killed-kenny2'} + tag_kwargs = {"slug": "they-killed-kenny2"} model2 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) logging.Logger.info.call_args_list = [] add_event_tags_to_student.delay(1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - call('Adding tag 2 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + call("Adding tag 2 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model1.active_campaign_academy.ac_key}, - params={'email': model1.user.email}, - timeout=2), - ]) - - self.assertEqual(requests.post.call_args_list, [ - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model1.tag.acp_id - }}, - timeout=2), - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model2.tag.acp_id - }}, - timeout=2), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model1.active_campaign_academy.ac_key}, + params={"email": model1.user.email}, + timeout=2, + ), + ], + ) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model1.tag.acp_id}}, + timeout=2, + ), + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model2.tag.acp_id}}, + timeout=2, + ), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__with_two_tags__event_slug_math_with_tag__with_email(self): import logging import requests - active_campaign_academy_kwargs = {'ac_url': AC_HOST} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} event_kwargs = { - 'slug': 'they-killed-kenny1', - 'tags': 'they-killed-kenny2', + "slug": "they-killed-kenny1", + "tags": "they-killed-kenny2", } - base = self.generate_models(event=True, - academy=True, - active_campaign_academy=True, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + base = self.generate_models( + event=True, + academy=True, + active_campaign_academy=True, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) - tag_kwargs = {'slug': 'event-they-killed-kenny1'} + tag_kwargs = {"slug": "event-they-killed-kenny1"} model1 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) - tag_kwargs = {'slug': 'they-killed-kenny2'} + tag_kwargs = {"slug": "they-killed-kenny2"} model2 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - call('Adding tag 2 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + call("Adding tag 2 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model1.active_campaign_academy.ac_key}, - params={'email': 'pokemon@potato.io'}, - timeout=2), - ]) - - self.assertEqual(requests.post.call_args_list, [ - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model1.tag.acp_id - }}, - timeout=2), - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model2.tag.acp_id - }}, - timeout=2), - ]) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model1.active_campaign_academy.ac_key}, + params={"email": "pokemon@potato.io"}, + timeout=2, + ), + ], + ) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model1.tag.acp_id}}, + timeout=2, + ), + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model2.tag.acp_id}}, + timeout=2, + ), + ], + ) """ 🔽🔽🔽 With three Tags, a with event name and the other from the tags attr """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__with_three_tags__event_slug_math_with_tag__with_user(self): import logging import requests - active_campaign_academy_kwargs = {'ac_url': AC_HOST} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} event_kwargs = { - 'slug': 'they-killed-kenny1', - 'tags': 'they-killed-kenny2,they-killed-kenny3', + "slug": "they-killed-kenny1", + "tags": "they-killed-kenny2,they-killed-kenny3", } - base = self.generate_models(user=True, - event=True, - academy=True, - active_campaign_academy=True, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + base = self.generate_models( + user=True, + event=True, + academy=True, + active_campaign_academy=True, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) - tag_kwargs = {'slug': 'event-they-killed-kenny1'} + tag_kwargs = {"slug": "event-they-killed-kenny1"} model1 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) - tag_kwargs = {'slug': 'they-killed-kenny2'} + tag_kwargs = {"slug": "they-killed-kenny2"} model2 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) - tag_kwargs = {'slug': 'they-killed-kenny3'} + tag_kwargs = {"slug": "they-killed-kenny3"} model3 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) logging.Logger.info.call_args_list = [] add_event_tags_to_student.delay(1, user_id=1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - call('Adding tag 2 to acp contact 1'), - call('Adding tag 3 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + call("Adding tag 2 to acp contact 1"), + call("Adding tag 3 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model1.active_campaign_academy.ac_key}, - params={'email': model1.user.email}, - timeout=2), - ]) - - self.assertEqual(requests.post.call_args_list, [ - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model1.tag.acp_id - }}, - timeout=2), - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model2.tag.acp_id - }}, - timeout=2), - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model3.tag.acp_id - }}, - timeout=2), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('requests.get', apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) - @patch('requests.post', apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) - @patch('breathecode.events.signals.event_saved', MagicMock()) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model1.active_campaign_academy.ac_key}, + params={"email": model1.user.email}, + timeout=2, + ), + ], + ) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model1.tag.acp_id}}, + timeout=2, + ), + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model2.tag.acp_id}}, + timeout=2, + ), + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model3.tag.acp_id}}, + timeout=2, + ), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, AC_URL, AC_RESPONSE)])) + @patch("requests.post", apply_requests_post_mock([(201, AC_POST_URL, AC_POST_RESPONSE)])) + @patch("breathecode.events.signals.event_saved", MagicMock()) def test_add_event_tags_to_student__with_three_tags__event_slug_math_with_tag__with_email(self): import logging import requests - active_campaign_academy_kwargs = {'ac_url': AC_HOST} + active_campaign_academy_kwargs = {"ac_url": AC_HOST} event_kwargs = { - 'slug': 'they-killed-kenny1', - 'tags': 'they-killed-kenny2,they-killed-kenny3', + "slug": "they-killed-kenny1", + "tags": "they-killed-kenny2,they-killed-kenny3", } - base = self.generate_models(event=True, - academy=True, - active_campaign_academy=True, - event_kwargs=event_kwargs, - active_campaign_academy_kwargs=active_campaign_academy_kwargs) + base = self.generate_models( + event=True, + academy=True, + active_campaign_academy=True, + event_kwargs=event_kwargs, + active_campaign_academy_kwargs=active_campaign_academy_kwargs, + ) - tag_kwargs = {'slug': 'event-they-killed-kenny1'} + tag_kwargs = {"slug": "event-they-killed-kenny1"} model1 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) - tag_kwargs = {'slug': 'they-killed-kenny2'} + tag_kwargs = {"slug": "they-killed-kenny2"} model2 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) - tag_kwargs = {'slug': 'they-killed-kenny3'} + tag_kwargs = {"slug": "they-killed-kenny3"} model3 = self.generate_models(tag=True, tag_kwargs=tag_kwargs, models=base) logging.Logger.info.call_args_list = [] - add_event_tags_to_student.delay(1, email='pokemon@potato.io') + add_event_tags_to_student.delay(1, email="pokemon@potato.io") - self.assertEqual(logging.Logger.info.call_args_list, [ - call(TASK_STARTED_MESSAGE), - call('Adding tag 1 to acp contact 1'), - call('Adding tag 2 to acp contact 1'), - call('Adding tag 3 to acp contact 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call(TASK_STARTED_MESSAGE), + call("Adding tag 1 to acp contact 1"), + call("Adding tag 2 to acp contact 1"), + call("Adding tag 3 to acp contact 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(requests.get.call_args_list, [ - call('https://ac.ca/api/3/contacts', - headers={'Api-Token': model1.active_campaign_academy.ac_key}, - params={'email': 'pokemon@potato.io'}, - timeout=2), - ]) - - self.assertEqual(requests.post.call_args_list, [ - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model1.tag.acp_id - }}, - timeout=2), - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model2.tag.acp_id - }}, - timeout=2), - call('https://ac.ca/api/3/contactTags', - headers={ - 'Api-Token': model1.active_campaign_academy.ac_key, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - }, - json={'contactTag': { - 'contact': 1, - 'tag': model3.tag.acp_id - }}, - timeout=2), - ]) + self.assertEqual( + requests.get.call_args_list, + [ + call( + "https://ac.ca/api/3/contacts", + headers={"Api-Token": model1.active_campaign_academy.ac_key}, + params={"email": "pokemon@potato.io"}, + timeout=2, + ), + ], + ) + + self.assertEqual( + requests.post.call_args_list, + [ + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model1.tag.acp_id}}, + timeout=2, + ), + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model2.tag.acp_id}}, + timeout=2, + ), + call( + "https://ac.ca/api/3/contactTags", + headers={ + "Api-Token": model1.active_campaign_academy.ac_key, + "Content-Type": "application/json", + "Accept": "application/json", + }, + json={"contactTag": {"contact": 1, "tag": model3.tag.acp_id}}, + timeout=2, + ), + ], + ) diff --git a/breathecode/marketing/tests/tasks/tests_create_form_entry.py b/breathecode/marketing/tests/tasks/tests_create_form_entry.py index 0978af0c9..d41c1fbbc 100644 --- a/breathecode/marketing/tests/tasks/tests_create_form_entry.py +++ b/breathecode/marketing/tests/tasks/tests_create_form_entry.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + from django.utils import timezone from breathecode.marketing.tasks import create_form_entry from breathecode.marketing import tasks @@ -29,7 +30,7 @@ from ..mixins import MarketingTestCase from faker import Faker -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) fake = Faker() fake_url = fake.url() @@ -38,341 +39,391 @@ def random_string(): - return ''.join(choices(string.ascii_letters, k=10)) + return "".join(choices(string.ascii_letters, k=10)) def generate_form_entry_kwargs(): """That random values is too long that i prefer have it in one function""" return { - 'fb_leadgen_id': randint(0, 9999), - 'fb_page_id': randint(0, 9999), - 'fb_form_id': randint(0, 9999), - 'fb_adgroup_id': randint(0, 9999), - 'fb_ad_id': randint(0, 9999), - 'gclid': random_string(), - 'first_name': choice(['Rene', 'Albert', 'Immanuel']), - 'last_name': choice(['Descartes', 'Camus', 'Kant']), - 'email': choice(['a@a.com', 'b@b.com', 'c@c.com']), - 'phone': '123456789', - 'course': random_string(), - 'client_comments': random_string(), - 'location': random_string(), - 'language': 'en', - 'utm_url': random_string(), - 'utm_medium': random_string(), - 'utm_campaign': random_string(), - 'utm_source': random_string(), - 'referral_key': random_string(), - 'gclid': random_string(), - 'tags': random_string(), - 'automations': random_string(), - 'street_address': random_string(), - 'country': random_string(), - 'city': random_string(), - 'latitude': 15, - 'longitude': 15, - 'state': random_string(), - 'zip_code': randint(0, 9999), - 'browser_lang': random_string(), - 'storage_status': choice(['PENDING', 'PERSISTED']), - 'lead_type': choice(['STRONG', 'SOFT', 'DISCOVERY']), - 'deal_status': choice(['WON', 'LOST']), - 'sentiment': choice(['GOOD', 'BAD']), - 'current_download': fake_url, + "fb_leadgen_id": randint(0, 9999), + "fb_page_id": randint(0, 9999), + "fb_form_id": randint(0, 9999), + "fb_adgroup_id": randint(0, 9999), + "fb_ad_id": randint(0, 9999), + "gclid": random_string(), + "first_name": choice(["Rene", "Albert", "Immanuel"]), + "last_name": choice(["Descartes", "Camus", "Kant"]), + "email": choice(["a@a.com", "b@b.com", "c@c.com"]), + "phone": "123456789", + "course": random_string(), + "client_comments": random_string(), + "location": random_string(), + "language": "en", + "utm_url": random_string(), + "utm_medium": random_string(), + "utm_campaign": random_string(), + "utm_source": random_string(), + "referral_key": random_string(), + "gclid": random_string(), + "tags": random_string(), + "automations": random_string(), + "street_address": random_string(), + "country": random_string(), + "city": random_string(), + "latitude": 15, + "longitude": 15, + "state": random_string(), + "zip_code": randint(0, 9999), + "browser_lang": random_string(), + "storage_status": choice(["PENDING", "PERSISTED"]), + "lead_type": choice(["STRONG", "SOFT", "DISCOVERY"]), + "deal_status": choice(["WON", "LOST"]), + "sentiment": choice(["GOOD", "BAD"]), + "current_download": fake_url, } def form_entry_field(data={}): return { - 'id': 1, - 'fb_leadgen_id': None, - 'fb_page_id': None, - 'fb_form_id': None, - 'fb_adgroup_id': None, - 'fb_ad_id': None, - 'first_name': '', - 'last_name': '', - 'email': None, - 'phone': None, - 'course': None, - 'client_comments': None, - 'current_download': None, - 'location': None, - 'language': 'en', - 'utm_url': None, - 'utm_medium': None, - 'utm_campaign': None, - 'utm_content': None, - 'utm_source': None, - 'referral_key': None, - 'gclid': None, - 'tags': '', - 'automations': '', - 'street_address': None, - 'sex': None, - 'country': None, - 'city': None, - 'custom_fields': None, - 'latitude': None, - 'longitude': None, - 'state': None, - 'zip_code': None, - 'browser_lang': None, - 'storage_status': 'PENDING', - 'storage_status_text': '', - 'lead_type': None, - 'deal_status': None, - 'sentiment': None, - 'ac_contact_id': None, - 'ac_deal_id': None, - 'ac_expected_cohort': None, - 'utm_placement': None, - 'utm_plan': None, - 'utm_term': None, - 'won_at': None, - 'contact_id': None, - 'academy_id': None, - 'user_id': None, - 'lead_generation_app_id': None, - 'ac_deal_course': None, - 'ac_deal_location': None, - 'ac_deal_owner_full_name': None, - 'ac_deal_owner_id': None, - 'ac_expected_cohort_date': None, - 'ac_deal_amount': None, - 'ac_deal_currency_code': None, + "id": 1, + "fb_leadgen_id": None, + "fb_page_id": None, + "fb_form_id": None, + "fb_adgroup_id": None, + "fb_ad_id": None, + "first_name": "", + "last_name": "", + "email": None, + "phone": None, + "course": None, + "client_comments": None, + "current_download": None, + "location": None, + "language": "en", + "utm_url": None, + "utm_medium": None, + "utm_campaign": None, + "utm_content": None, + "utm_source": None, + "referral_key": None, + "gclid": None, + "tags": "", + "automations": "", + "street_address": None, + "sex": None, + "country": None, + "city": None, + "custom_fields": None, + "latitude": None, + "longitude": None, + "state": None, + "zip_code": None, + "browser_lang": None, + "storage_status": "PENDING", + "storage_status_text": "", + "lead_type": None, + "deal_status": None, + "sentiment": None, + "ac_contact_id": None, + "ac_deal_id": None, + "ac_expected_cohort": None, + "utm_placement": None, + "utm_plan": None, + "utm_term": None, + "won_at": None, + "contact_id": None, + "academy_id": None, + "user_id": None, + "lead_generation_app_id": None, + "ac_deal_course": None, + "ac_deal_location": None, + "ac_deal_owner_full_name": None, + "ac_deal_owner_id": None, + "ac_expected_cohort_date": None, + "ac_deal_amount": None, + "ac_deal_currency_code": None, **data, } def form_entry_serializer(self, data={}): return { - 'id': 1, - 'fb_leadgen_id': None, - 'fb_page_id': None, - 'fb_form_id': None, - 'fb_adgroup_id': None, - 'fb_ad_id': None, - 'first_name': '', - 'last_name': '', - 'email': None, - 'phone': None, - 'course': None, - 'client_comments': None, - 'custom_fields': None, - 'location': None, - 'language': 'en', - 'utm_url': None, - 'utm_medium': None, - 'utm_campaign': None, - 'utm_content': None, - 'utm_source': None, - 'utm_term': None, - 'utm_placement': None, - 'utm_plan': None, - 'current_download': None, - 'referral_key': None, - 'gclid': None, - 'tags': '', - 'automations': '', - 'street_address': None, - 'sex': None, - 'country': None, - 'city': None, - 'latitude': None, - 'longitude': None, - 'state': None, - 'zip_code': None, - 'browser_lang': None, - 'storage_status': 'PENDING', - 'storage_status_text': '', - 'lead_type': None, - 'deal_status': None, - 'sentiment': None, - 'ac_expected_cohort': None, - 'ac_contact_id': None, - 'ac_deal_id': None, - 'won_at': None, - 'created_at': self.bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': self.bc.datetime.to_iso_string(UTC_NOW), - 'contact': None, - 'academy': None, - 'lead_generation_app': None, - 'user': None, - 'ac_deal_course': None, - 'ac_deal_location': None, - 'ac_deal_owner_full_name': None, - 'ac_deal_owner_id': None, - 'ac_expected_cohort_date': None, - 'ac_deal_amount': None, - 'ac_deal_currency_code': None, - **data + "id": 1, + "fb_leadgen_id": None, + "fb_page_id": None, + "fb_form_id": None, + "fb_adgroup_id": None, + "fb_ad_id": None, + "first_name": "", + "last_name": "", + "email": None, + "phone": None, + "course": None, + "client_comments": None, + "custom_fields": None, + "location": None, + "language": "en", + "utm_url": None, + "utm_medium": None, + "utm_campaign": None, + "utm_content": None, + "utm_source": None, + "utm_term": None, + "utm_placement": None, + "utm_plan": None, + "current_download": None, + "referral_key": None, + "gclid": None, + "tags": "", + "automations": "", + "street_address": None, + "sex": None, + "country": None, + "city": None, + "latitude": None, + "longitude": None, + "state": None, + "zip_code": None, + "browser_lang": None, + "storage_status": "PENDING", + "storage_status_text": "", + "lead_type": None, + "deal_status": None, + "sentiment": None, + "ac_expected_cohort": None, + "ac_contact_id": None, + "ac_deal_id": None, + "won_at": None, + "created_at": self.bc.datetime.to_iso_string(UTC_NOW), + "updated_at": self.bc.datetime.to_iso_string(UTC_NOW), + "contact": None, + "academy": None, + "lead_generation_app": None, + "user": None, + "ac_deal_course": None, + "ac_deal_location": None, + "ac_deal_owner_full_name": None, + "ac_deal_owner_id": None, + "ac_expected_cohort_date": None, + "ac_deal_amount": None, + "ac_deal_currency_code": None, + **data, } class CreateFormEntryTestSuite(MarketingTestCase): - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_create_form_entry_with_dict_empty_without_csv_upload_id(self): """Test create_form_entry task without data""" create_form_entry.delay(1, **{}) self.assertEqual(self.count_form_entry(), 0) - self.assertEqual(self.bc.database.list_of('monitoring.CSVUpload'), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Create form entry started'), - call('Create form entry started'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [call('No CSVUpload found with this id', exc_info=True)]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.marketing.tasks.persist_single_lead.delay', MagicMock()) + self.assertEqual(self.bc.database.list_of("monitoring.CSVUpload"), []) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Create form entry started"), + call("Create form entry started"), + ], + ) + self.assertEqual(logging.Logger.error.call_args_list, [call("No CSVUpload found with this id", exc_info=True)]) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.persist_single_lead.delay", MagicMock()) def test_create_form_entry_with_dict_empty_with_csv_upload_id(self): """Test create_form_entry task without data""" - model = self.bc.database.create(csv_upload={'log': ''}) + model = self.bc.database.create(csv_upload={"log": ""}) logging.Logger.info.call_args_list = [] create_form_entry.delay(1, **{}) self.assertEqual(self.count_form_entry(), 0) - self.assertEqual(self.bc.database.list_of('monitoring.CSVUpload'), - [{ - **self.bc.format.to_dict(model.csv_upload), - 'status': 'ERROR', - 'finished_at': UTC_NOW, - 'log': 'No first name in form entry, No last name in form entry, No email '\ - 'in form entry, No location or academy in form entry. ' - }]) - self.assertEqual(logging.Logger.info.call_args_list, [call('Create form entry started')]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('No first name in form entry'), - call('No last name in form entry'), - call('No email in form entry'), - call('No location or academy in form entry'), - call('Missing field in received item'), - call({}), - call( - 'No first name in form entry, No last name in form entry, No email in form entry, No location or academy in form entry. ', - exc_info=True), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.CSVUpload"), + [ + { + **self.bc.format.to_dict(model.csv_upload), + "status": "ERROR", + "finished_at": UTC_NOW, + "log": "No first name in form entry, No last name in form entry, No email " + "in form entry, No location or academy in form entry. ", + } + ], + ) + self.assertEqual(logging.Logger.info.call_args_list, [call("Create form entry started")]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("No first name in form entry"), + call("No last name in form entry"), + call("No email in form entry"), + call("No location or academy in form entry"), + call("Missing field in received item"), + call({}), + call( + "No first name in form entry, No last name in form entry, No email in form entry, No location or academy in form entry. ", + exc_info=True, + ), + ], + ) self.assertEqual(tasks.persist_single_lead.delay.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.marketing.tasks.persist_single_lead.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.persist_single_lead.delay", MagicMock()) def test_create_form_entry_with_dict_check_regex(self): """Test create_form_entry task without data""" - cases = [('Brandon' + self.bc.random.string(number=True, size=1), - 'Smith' + self.bc.random.string(number=True, size=1), 'test12.net'), - ('Brandon' + self.bc.random.string(symbol=True, size=1), - 'Smith' + self.bc.random.string(symbol=True, size=1), 'test12@.net'), - ('Brandon' + self.bc.random.string(symbol=True, size=1), - 'Smith' + self.bc.random.string(symbol=True, size=1), 'test12.net@'), - ('Brandon' + self.bc.random.string(symbol=True, size=1), - 'Smith' + self.bc.random.string(symbol=True, size=1), '@test12.net')] - - model = self.bc.database.create(csv_upload={'log': ''}) + cases = [ + ( + "Brandon" + self.bc.random.string(number=True, size=1), + "Smith" + self.bc.random.string(number=True, size=1), + "test12.net", + ), + ( + "Brandon" + self.bc.random.string(symbol=True, size=1), + "Smith" + self.bc.random.string(symbol=True, size=1), + "test12@.net", + ), + ( + "Brandon" + self.bc.random.string(symbol=True, size=1), + "Smith" + self.bc.random.string(symbol=True, size=1), + "test12.net@", + ), + ( + "Brandon" + self.bc.random.string(symbol=True, size=1), + "Smith" + self.bc.random.string(symbol=True, size=1), + "@test12.net", + ), + ] + + model = self.bc.database.create(csv_upload={"log": ""}) for first_name, last_name, email in cases: slug = self.bc.fake.slug() logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - model.csv_upload.log = '' + model.csv_upload.log = "" model.csv_upload.save() data = { - 'first_name': first_name, - 'last_name': last_name, - 'email': email, - 'location': 'Madrid', - 'academy': slug + "first_name": first_name, + "last_name": last_name, + "email": email, + "location": "Madrid", + "academy": slug, } create_form_entry.delay(1, **data) self.assertEqual(self.count_form_entry(), 0) - self.assertEqual(self.bc.database.list_of('monitoring.CSVUpload'), - [{ - **self.bc.format.to_dict(model.csv_upload), - 'status': 'ERROR', - 'finished_at': UTC_NOW, - 'log': f'No academy exists with this academy active_campaign_slug: {slug}, '\ - f'No academy exists with this academy slug: {slug}, first '\ - 'name has incorrect characters, last name has incorrect characters, '\ - 'email has incorrect format, No location or academy in form entry. ' - }]) - self.assertEqual(logging.Logger.info.call_args_list, [call('Create form entry started')]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call(f'No academy exists with this academy active_campaign_slug: {data["academy"]}'), - call(f'No academy exists with this academy slug: {data["academy"]}'), - call('first name has incorrect characters'), - call('last name has incorrect characters'), - call('email has incorrect format'), - call('No location or academy in form entry'), - call('Missing field in received item'), - call(data), - call( - f'No academy exists with this academy active_campaign_slug: {slug}, No ' - f'academy exists with this academy slug: {slug}, first name has incorrect characters, ' - 'last name has incorrect characters, email has incorrect format, No location or ' - 'academy in form entry. ', - exc_info=True), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.CSVUpload"), + [ + { + **self.bc.format.to_dict(model.csv_upload), + "status": "ERROR", + "finished_at": UTC_NOW, + "log": f"No academy exists with this academy active_campaign_slug: {slug}, " + f"No academy exists with this academy slug: {slug}, first " + "name has incorrect characters, last name has incorrect characters, " + "email has incorrect format, No location or academy in form entry. ", + } + ], + ) + self.assertEqual(logging.Logger.info.call_args_list, [call("Create form entry started")]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call(f'No academy exists with this academy active_campaign_slug: {data["academy"]}'), + call(f'No academy exists with this academy slug: {data["academy"]}'), + call("first name has incorrect characters"), + call("last name has incorrect characters"), + call("email has incorrect format"), + call("No location or academy in form entry"), + call("Missing field in received item"), + call(data), + call( + f"No academy exists with this academy active_campaign_slug: {slug}, No " + f"academy exists with this academy slug: {slug}, first name has incorrect characters, " + "last name has incorrect characters, email has incorrect format, No location or " + "academy in form entry. ", + exc_info=True, + ), + ], + ) self.assertEqual(tasks.persist_single_lead.delay.call_args_list, []) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.marketing.tasks.persist_single_lead.delay', MagicMock()) - @patch('uuid.UUID.int', PropertyMock(return_value=1000)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.persist_single_lead.delay", MagicMock()) + @patch("uuid.UUID.int", PropertyMock(return_value=1000)) def test_create_form_entry_with_dict_with_correct_format(self): """Test create_form_entry task without data""" - model = self.bc.database.create(csv_upload={'log': ''}, academy={'active_campaign_slug': self.bc.fake.slug()}) + model = self.bc.database.create(csv_upload={"log": ""}, academy={"active_campaign_slug": self.bc.fake.slug()}) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'first_name': 'John', - 'last_name': 'Smith', - 'email': 'test@gmail.com', - 'location': model.academy.active_campaign_slug, - 'academy': model.academy.slug + "first_name": "John", + "last_name": "Smith", + "email": "test@gmail.com", + "location": model.academy.active_campaign_slug, + "academy": model.academy.slug, } create_form_entry.delay(1, **data) - del data['academy'] + del data["academy"] self.assertEqual( - self.bc.database.list_of('marketing.FormEntry'), - [form_entry_field({ - **data, - 'attribution_id': '75b36c508866d18732305da14fe9a0', - 'academy_id': 1, - })]) - self.assertEqual(self.bc.database.list_of('monitoring.CSVUpload'), - [{ - **self.bc.format.to_dict(model.csv_upload), - 'status': 'DONE', - 'finished_at': UTC_NOW, - }]) - self.assertEqual(logging.Logger.info.call_args_list, - [call('Create form entry started'), - call('create_form_entry successfully created')]) + self.bc.database.list_of("marketing.FormEntry"), + [ + form_entry_field( + { + **data, + "attribution_id": "75b36c508866d18732305da14fe9a0", + "academy_id": 1, + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("monitoring.CSVUpload"), + [ + { + **self.bc.format.to_dict(model.csv_upload), + "status": "DONE", + "finished_at": UTC_NOW, + } + ], + ) + self.assertEqual( + logging.Logger.info.call_args_list, + [call("Create form entry started"), call("create_form_entry successfully created")], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.bc.check.calls(tasks.persist_single_lead.delay.call_args_list, [ - call( - form_entry_serializer(self, { - **data, - 'academy': 1, - 'attribution_id': '75b36c508866d18732305da14fe9a0', - })), - ]) + self.bc.check.calls( + tasks.persist_single_lead.delay.call_args_list, + [ + call( + form_entry_serializer( + self, + { + **data, + "academy": 1, + "attribution_id": "75b36c508866d18732305da14fe9a0", + }, + ) + ), + ], + ) diff --git a/breathecode/marketing/tests/tasks/tests_persist_single_lead.py b/breathecode/marketing/tests/tasks/tests_persist_single_lead.py index 8d9c4ffef..9366edfc7 100644 --- a/breathecode/marketing/tests/tasks/tests_persist_single_lead.py +++ b/breathecode/marketing/tests/tasks/tests_persist_single_lead.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import requests from breathecode.marketing.tasks import persist_single_lead import logging @@ -19,49 +20,55 @@ MAILGUN_URL = f"https://api.mailgun.net/v3/{os.environ.get('MAILGUN_DOMAIN')}/messages" -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) -GOOGLE_MAPS_URL = ('https://maps.googleapis.com/maps/api/geocode/json?latlng=15.000000000000000,' - f'15.000000000000000&key={GOOGLE_CLOUD_KEY}') +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) +GOOGLE_MAPS_URL = ( + "https://maps.googleapis.com/maps/api/geocode/json?latlng=15.000000000000000," + f"15.000000000000000&key={GOOGLE_CLOUD_KEY}" +) GOOGLE_MAPS_INVALID_REQUEST = { - 'status': 'INVALID_REQUEST', + "status": "INVALID_REQUEST", } GOOGLE_MAPS_OK = { - 'status': - 'OK', - 'results': [{ - 'address_components': [{ - 'types': { - 'country': 'US', - }, - 'long_name': 'US', - }, { - 'types': { - 'locality': 'New York', - }, - 'long_name': 'New York', - }, { - 'types': { - 'route': 'Avenue', - }, - 'long_name': 'Avenue', - }, { - 'types': { - 'postal_code': '10028' - }, - 'long_name': '10028', - }] - }] + "status": "OK", + "results": [ + { + "address_components": [ + { + "types": { + "country": "US", + }, + "long_name": "US", + }, + { + "types": { + "locality": "New York", + }, + "long_name": "New York", + }, + { + "types": { + "route": "Avenue", + }, + "long_name": "Avenue", + }, + { + "types": {"postal_code": "10028"}, + "long_name": "10028", + }, + ] + } + ], } def random_string(): - return ''.join(choices(string.ascii_letters, k=10)) + return "".join(choices(string.ascii_letters, k=10)) def fix_db_field(data={}): - del data['ac_academy'] + del data["ac_academy"] return data @@ -72,41 +79,41 @@ def fix_db_field(data={}): def generate_form_entry_kwargs(kwargs={}): """That random values is too long that i prefer have it in one function""" return { - 'fb_leadgen_id': randint(0, 9999), - 'fb_page_id': randint(0, 9999), - 'fb_form_id': randint(0, 9999), - 'fb_adgroup_id': randint(0, 9999), - 'fb_ad_id': randint(0, 9999), - 'gclid': random_string(), - 'first_name': choice(['Rene', 'Albert', 'Immanuel']), - 'last_name': choice(['Descartes', 'Camus', 'Kant']), - 'email': choice(['a@a.com', 'b@b.com', 'c@c.com']), - 'phone': '123456789', - 'course': random_string(), - 'client_comments': random_string(), - 'location': random_string(), - 'language': 'en', - 'utm_url': random_string(), - 'utm_medium': random_string(), - 'utm_campaign': random_string(), - 'utm_source': random_string(), - 'referral_key': random_string(), - 'gclid': random_string(), - 'tags': random_string(), - 'automations': random_string(), - 'street_address': random_string(), - 'country': random_string(), - 'city': random_string(), - 'latitude': 15, - 'longitude': 15, - 'state': random_string(), - 'zip_code': str(randint(0, 9999)), - 'browser_lang': random_string(), - 'storage_status': choice(['PENDING', 'PERSISTED']), - 'lead_type': choice(['STRONG', 'SOFT', 'DISCOVERY']), - 'deal_status': choice(['WON', 'LOST']), - 'sentiment': choice(['GOOD', 'BAD']), - 'current_download': fake_url, + "fb_leadgen_id": randint(0, 9999), + "fb_page_id": randint(0, 9999), + "fb_form_id": randint(0, 9999), + "fb_adgroup_id": randint(0, 9999), + "fb_ad_id": randint(0, 9999), + "gclid": random_string(), + "first_name": choice(["Rene", "Albert", "Immanuel"]), + "last_name": choice(["Descartes", "Camus", "Kant"]), + "email": choice(["a@a.com", "b@b.com", "c@c.com"]), + "phone": "123456789", + "course": random_string(), + "client_comments": random_string(), + "location": random_string(), + "language": "en", + "utm_url": random_string(), + "utm_medium": random_string(), + "utm_campaign": random_string(), + "utm_source": random_string(), + "referral_key": random_string(), + "gclid": random_string(), + "tags": random_string(), + "automations": random_string(), + "street_address": random_string(), + "country": random_string(), + "city": random_string(), + "latitude": 15, + "longitude": 15, + "state": random_string(), + "zip_code": str(randint(0, 9999)), + "browser_lang": random_string(), + "storage_status": choice(["PENDING", "PERSISTED"]), + "lead_type": choice(["STRONG", "SOFT", "DISCOVERY"]), + "deal_status": choice(["WON", "LOST"]), + "sentiment": choice(["GOOD", "BAD"]), + "current_download": fake_url, **kwargs, } @@ -116,64 +123,70 @@ class AnswerIdTestSuite(MarketingTestCase): 🔽🔽🔽 Passing None """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_passing_none(self): data = None persist_single_lead.delay(data) self.assertEqual(self.count_form_entry(), 0) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 Passing empty dict """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_empty_dict(self): data = {} persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 Passing dict with bad location """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_dict_with_bad_location(self): - data = {'location': 'they-killed-kenny'} + data = {"location": "they-killed-kenny"} persist_single_lead.delay(data) @@ -181,28 +194,30 @@ def test_dict_with_bad_location(self): self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), + call("Starting persist_single_lead"), # retrying - call('Starting persist_single_lead'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, - [call('No academy found with slug they-killed-kenny', exc_info=True)]) + call("Starting persist_single_lead"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, [call("No academy found with slug they-killed-kenny", exc_info=True)] + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 Passing dict with Academy.slug as location """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_dict_with_location(self): """Test /answer/:id without auth""" model = self.generate_models(academy=True, active_campaign_academy=True) @@ -210,70 +225,84 @@ def test_dict_with_location(self): logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - data = {'location': model['academy'].slug} + data = {"location": model["academy"].slug} persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('automations not found'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('You need to specify tags for this entry', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("automations not found"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("You need to specify tags for this entry", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 Passing dict with AcademyAlias.active_campaign_slug as location """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_location_academy_alias(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=True, - active_campaign_academy=True, - academy_alias=True, - academy_alias_kwargs={'active_campaign_slug': 'odin'}) + model = self.generate_models( + academy=True, + active_campaign_academy=True, + academy_alias=True, + academy_alias_kwargs={"active_campaign_slug": "odin"}, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - data = {'location': 'odin'} + data = {"location": "odin"} persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('automations not found'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('You need to specify tags for this entry', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("automations not found"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("You need to specify tags for this entry", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 Passing dict with AcademyAlias.active_campaign_slug as location and bad tags """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_bad_tags(self): """Test /answer/:id without auth""" model = self.generate_models(academy=True, active_campaign_academy=True) @@ -282,65 +311,78 @@ def test_with_bad_tags(self): logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': 'they-killed-kenny', + "location": model["academy"].slug, + "tags": "they-killed-kenny", } persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('automations not found'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("automations not found"), + ], + ) self.assertEqual( str(logging.Logger.error.call_args_list), - str([ - call( - 'Some tag applied to the contact not found or have tag_type different than [STRONG, SOFT, DISCOVER, OTHER]: Check for the follow tags: they-killed-kenny', - exc_info=True), - ])) + str( + [ + call( + "Some tag applied to the contact not found or have tag_type different than [STRONG, SOFT, DISCOVER, OTHER]: Check for the follow tags: they-killed-kenny", + exc_info=True, + ), + ] + ), + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 Passing dict with AcademyAlias.active_campaign_slug as location and Tag.slug as tags of type STRONG """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_tag_type_strong(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=1, active_campaign_academy=1, tag={'tag_type': 'STRONG'}) + model = self.generate_models(academy=1, active_campaign_academy=1, tag={"tag_type": "STRONG"}) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, + "location": model["academy"].slug, + "tags": model["tag"].slug, } persist_single_lead.delay(data) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('automations not found'), - call('found tags'), - call({model.tag.slug}), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('No automation was specified and the the specified tag has no automation either', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("automations not found"), + call("found tags"), + call({model.tag.slug}), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("No automation was specified and the the specified tag has no automation either", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) @@ -350,32 +392,35 @@ def test_with_tag_type_strong(self): 🔽🔽🔽 With one Automation but not found """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_tag_type_strong__with_automation(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=1, active_campaign_academy=1, tag={'tag_type': 'STRONG'}, automation=1) + model = self.generate_models(academy=1, active_campaign_academy=1, tag={"tag_type": "STRONG"}, automation=1) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, + "location": model["academy"].slug, + "tags": model["tag"].slug, } persist_single_lead.delay(data) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('automations not found'), - call('found tags'), - call({model.tag.slug}), - ]) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("automations not found"), + call("found tags"), + call({model.tag.slug}), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, [call("The email doesn't exist", exc_info=True)]) self.assertEqual(requests.get.error.call_args_list, []) @@ -386,65 +431,73 @@ def test_with_tag_type_strong__with_automation(self): 🔽🔽🔽 Dict with bad automations and with one Automation but not found """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_automations__not_found(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=1, active_campaign_academy=1, tag={'tag_type': 'STRONG'}, automation=1) + model = self.generate_models(academy=1, active_campaign_academy=1, tag={"tag_type": "STRONG"}, automation=1) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - data = {'location': model['academy'].slug, 'tags': model['tag'].slug, 'automations': 'they-killed-kenny'} + data = {"location": model["academy"].slug, "tags": model["tag"].slug, "automations": "they-killed-kenny"} persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The specified automation they-killed-kenny was not found for this AC Academy', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The specified automation they-killed-kenny was not found for this AC Academy", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 Dict with automations, without email and with one Automation found """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_automations_slug(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=1, - active_campaign_academy=1, - tag={'tag_type': 'STRONG'}, - automation={'slug': 'they-killed-kenny'}) + model = self.generate_models( + academy=1, active_campaign_academy=1, tag={"tag_type": "STRONG"}, automation={"slug": "they-killed-kenny"} + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - data = {'location': model['academy'].slug, 'tags': model['tag'].slug, 'automations': model['automation'].slug} + data = {"location": model["academy"].slug, "tags": model["tag"].slug, "automations": model["automation"].slug} persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('found automations'), - call([model.automation.acp_id]), - call('found tags'), - call({model.tag.slug}), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("found automations"), + call([model.automation.acp_id]), + call("found tags"), + call({model.tag.slug}), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, [call("The email doesn't exist", exc_info=True)]) @@ -452,322 +505,374 @@ def test_with_automations_slug(self): self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 With email in dict """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_email(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs={'tag_type': 'STRONG'}, - automation=True, - automation_kwargs={'slug': 'they-killed-kenny'}) + model = self.generate_models( + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs={"tag_type": "STRONG"}, + automation=True, + automation_kwargs={"slug": "they-killed-kenny"}, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, - 'automations': model['automation'].slug, - 'email': 'pokemon@potato.io' + "location": model["academy"].slug, + "tags": model["tag"].slug, + "automations": model["automation"].slug, + "email": "pokemon@potato.io", } persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('found automations'), - call([model.automation.acp_id]), - call('found tags'), - call({model.tag.slug}), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("found automations"), + call([model.automation.acp_id]), + call("found tags"), + call({model.tag.slug}), + ], + ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call("The first name doesn't exist", exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The first name doesn't exist", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 With first_name in dict """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_first_name(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs={'tag_type': 'STRONG'}, - automation=True, - automation_kwargs={'slug': 'they-killed-kenny'}) + model = self.generate_models( + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs={"tag_type": "STRONG"}, + automation=True, + automation_kwargs={"slug": "they-killed-kenny"}, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, - 'automations': model['automation'].slug, - 'email': 'pokemon@potato.io', - 'first_name': 'Konan' + "location": model["academy"].slug, + "tags": model["tag"].slug, + "automations": model["automation"].slug, + "email": "pokemon@potato.io", + "first_name": "Konan", } persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('found automations'), - call([model.automation.acp_id]), - call('found tags'), - call({model.tag.slug}), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("found automations"), + call([model.automation.acp_id]), + call("found tags"), + call({model.tag.slug}), + ], + ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call("The last name doesn't exist", exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The last name doesn't exist", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 With last_name in dict """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_last_name(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs={'tag_type': 'STRONG'}, - automation=True, - automation_kwargs={'slug': 'they-killed-kenny'}) + model = self.generate_models( + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs={"tag_type": "STRONG"}, + automation=True, + automation_kwargs={"slug": "they-killed-kenny"}, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, - 'automations': model['automation'].slug, - 'email': 'pokemon@potato.io', - 'first_name': 'Konan', - 'last_name': 'Amegakure', + "location": model["academy"].slug, + "tags": model["tag"].slug, + "automations": model["automation"].slug, + "email": "pokemon@potato.io", + "first_name": "Konan", + "last_name": "Amegakure", } persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('found automations'), - call([model.automation.acp_id]), - call('found tags'), - call({model.tag.slug}), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("found automations"), + call([model.automation.acp_id]), + call("found tags"), + call({model.tag.slug}), + ], + ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call("The phone doesn't exist", exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The phone doesn't exist", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 With phone in dict """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_phone(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs={'tag_type': 'STRONG'}, - automation=True, - automation_kwargs={'slug': 'they-killed-kenny'}) + model = self.generate_models( + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs={"tag_type": "STRONG"}, + automation=True, + automation_kwargs={"slug": "they-killed-kenny"}, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, - 'automations': model['automation'].slug, - 'email': 'pokemon@potato.io', - 'first_name': 'Konan', - 'last_name': 'Amegakure', - 'phone': '123123123', + "location": model["academy"].slug, + "tags": model["tag"].slug, + "automations": model["automation"].slug, + "email": "pokemon@potato.io", + "first_name": "Konan", + "last_name": "Amegakure", + "phone": "123123123", } persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('found automations'), - call([model.automation.acp_id]), - call('found tags'), - call({model.tag.slug}), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call("The id doesn't exist", exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("found automations"), + call([model.automation.acp_id]), + call("found tags"), + call({model.tag.slug}), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The id doesn't exist", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 With id in dict but FormEntry doesn't exist """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_id(self): """Test /answer/:id without auth""" - model = self.generate_models(academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs={'tag_type': 'STRONG'}, - automation=True, - automation_kwargs={'slug': 'they-killed-kenny'}) + model = self.generate_models( + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs={"tag_type": "STRONG"}, + automation=True, + automation_kwargs={"slug": "they-killed-kenny"}, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, - 'automations': model['automation'].slug, - 'email': 'pokemon@potato.io', - 'first_name': 'Konan', - 'last_name': 'Amegakure', - 'phone': '123123123', - 'id': 123123123, - 'course': 'asdasd', + "location": model["academy"].slug, + "tags": model["tag"].slug, + "automations": model["automation"].slug, + "email": "pokemon@potato.io", + "first_name": "Konan", + "last_name": "Amegakure", + "phone": "123123123", + "id": 123123123, + "course": "asdasd", } persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('found automations'), - call([model.automation.acp_id]), - call('found tags'), - call({model.tag.slug}), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("found automations"), + call([model.automation.acp_id]), + call("found tags"), + call({model.tag.slug}), + ], + ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('FormEntry not found (id: 123123123)', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("FormEntry not found (id: 123123123)", exc_info=True), + ], + ) self.assertEqual(requests.get.error.call_args_list, []) self.assertEqual(requests.post.error.call_args_list, []) self.assertEqual(requests.request.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), []) + self.assertEqual(self.bc.database.list_of("marketing.FormEntry"), []) """ 🔽🔽🔽 With id and without course in dict, FormEntry exists """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_INVALID_REQUEST)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_INVALID_REQUEST)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_form_entry_with_data_invalid(self): - mock_old_breathecode = OLD_BREATHECODE_INSTANCES['request'] + mock_old_breathecode = OLD_BREATHECODE_INSTANCES["request"] mock_old_breathecode.call_args_list = [] - model = self.generate_models(academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs={'tag_type': 'STRONG'}, - automation=True, - automation_kwargs={'slug': 'they-killed-kenny'}, - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs(), - active_campaign_academy_kwargs={'ac_url': 'https://old.hardcoded.breathecode.url'}) + model = self.generate_models( + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs={"tag_type": "STRONG"}, + automation=True, + automation_kwargs={"slug": "they-killed-kenny"}, + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + active_campaign_academy_kwargs={"ac_url": "https://old.hardcoded.breathecode.url"}, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, - 'automations': model['automation'].slug, - 'email': 'pokemon@potato.io', - 'first_name': 'Konan', - 'last_name': 'Amegakure', - 'phone': '123123123', - 'id': model['form_entry'].id, + "location": model["academy"].slug, + "tags": model["tag"].slug, + "automations": model["automation"].slug, + "email": "pokemon@potato.io", + "first_name": "Konan", + "last_name": "Amegakure", + "phone": "123123123", + "id": model["form_entry"].id, } persist_single_lead.delay(data) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('found automations'), - call([model.automation.acp_id]), - call('found tags'), - call({model.tag.slug}), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("found automations"), + call([model.automation.acp_id]), + call("found tags"), + call({model.tag.slug}), + ], + ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call("The course doesn't exist", exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The course doesn't exist", exc_info=True), + ], + ) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) self.assertEqual(requests.request.call_args_list, []) - db = self.bc.format.to_dict(model['form_entry']) - del db['ac_academy'] + db = self.bc.format.to_dict(model["form_entry"]) + del db["ac_academy"] - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), - [{ - **db, - 'ac_contact_id': None, - 'storage_status': 'ERROR', - 'storage_status_text': "The course doesn't exist", - }]) + self.assertEqual( + self.bc.database.list_of("marketing.FormEntry"), + [ + { + **db, + "ac_contact_id": None, + "storage_status": "ERROR", + "storage_status_text": "The course doesn't exist", + } + ], + ) # """ # 🔽🔽🔽 First successful response, with id in dict, FormEntry found @@ -891,101 +996,127 @@ def test_with_form_entry_with_data_invalid(self): 🔽🔽🔽 First successful response, with id in dict, FormEntry found two times """ - @patch('requests.get', apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) - @patch('requests.post', apply_requests_post_mock([(201, MAILGUN_URL, 'ok')])) - @patch('requests.request', apply_old_breathecode_requests_request_mock()) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("requests.get", apply_requests_get_mock([(200, GOOGLE_MAPS_URL, GOOGLE_MAPS_OK)])) + @patch("requests.post", apply_requests_post_mock([(201, MAILGUN_URL, "ok")])) + @patch("requests.request", apply_old_breathecode_requests_request_mock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_with_form_entry_with_data__two_form_entries_found(self): """Test /answer/:id without auth""" form_entries = [ - generate_form_entry_kwargs({ - 'email': 'pokemon@potato.io', - 'course': 'asdasd', - 'storage_status': 'PERSISTED', - }), - generate_form_entry_kwargs({ - 'email': 'pokemon@potato.io', - 'course': 'asdasd', - 'storage_status': 'PERSISTED', - }), + generate_form_entry_kwargs( + { + "email": "pokemon@potato.io", + "course": "asdasd", + "storage_status": "PERSISTED", + } + ), + generate_form_entry_kwargs( + { + "email": "pokemon@potato.io", + "course": "asdasd", + "storage_status": "PERSISTED", + } + ), ] - model = self.generate_models(academy=True, - active_campaign_academy=True, - tag=True, - tag_kwargs={'tag_type': 'STRONG'}, - automation=True, - automation_kwargs={'slug': 'they-killed-kenny'}, - form_entry=form_entries, - active_campaign_academy_kwargs={'ac_url': 'https://old.hardcoded.breathecode.url'}) + model = self.generate_models( + academy=True, + active_campaign_academy=True, + tag=True, + tag_kwargs={"tag_type": "STRONG"}, + automation=True, + automation_kwargs={"slug": "they-killed-kenny"}, + form_entry=form_entries, + active_campaign_academy_kwargs={"ac_url": "https://old.hardcoded.breathecode.url"}, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] data = { - 'location': model['academy'].slug, - 'tags': model['tag'].slug, - 'automations': model['automation'].slug, - 'email': 'pokemon@potato.io', - 'first_name': 'Konan', - 'last_name': 'Amegakure', - 'phone': '123123123', - 'course': 'asdasd', - 'id': 2, + "location": model["academy"].slug, + "tags": model["tag"].slug, + "automations": model["automation"].slug, + "email": "pokemon@potato.io", + "first_name": "Konan", + "last_name": "Amegakure", + "phone": "123123123", + "course": "asdasd", + "id": 2, } persist_single_lead.delay(data) form = self.get_form_entry(1) - db = self.bc.format.to_dict(model['form_entry'][1]) - del db['ac_academy'] - - self.assertEqual(self.bc.database.list_of('marketing.FormEntry'), [ - fix_db_field(self.bc.format.to_dict(model.form_entry[0])), { - **db, - 'ac_contact_id': '1', - 'ac_expected_cohort': None, - 'latitude': form.latitude, - 'longitude': form.longitude, - 'storage_status': 'DUPLICATED', - 'lead_generation_app_id': None, - 'storage_status_text': '', - } - ]) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting persist_single_lead'), - call('found automations'), - call([model.automation.acp_id]), - call('found tags'), - call({model.tag.slug}), - call('ready to send contact with following details: ' + str({ - 'email': 'pokemon@potato.io', - 'first_name': 'Konan', - 'last_name': 'Amegakure', - 'phone': '123123123', - 'field[18,0]': model.academy.slug, - 'field[2,0]': 'asdasd', - })), - call('FormEntry is considered a duplicate, no automations or tags added'), - ]) + db = self.bc.format.to_dict(model["form_entry"][1]) + del db["ac_academy"] + + self.assertEqual( + self.bc.database.list_of("marketing.FormEntry"), + [ + fix_db_field(self.bc.format.to_dict(model.form_entry[0])), + { + **db, + "ac_contact_id": "1", + "ac_expected_cohort": None, + "latitude": form.latitude, + "longitude": form.longitude, + "storage_status": "DUPLICATED", + "lead_generation_app_id": None, + "storage_status_text": "", + }, + ], + ) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting persist_single_lead"), + call("found automations"), + call([model.automation.acp_id]), + call("found tags"), + call({model.tag.slug}), + call( + "ready to send contact with following details: " + + str( + { + "email": "pokemon@potato.io", + "first_name": "Konan", + "last_name": "Amegakure", + "phone": "123123123", + "field[18,0]": model.academy.slug, + "field[2,0]": "asdasd", + } + ) + ), + call("FormEntry is considered a duplicate, no automations or tags added"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(requests.get.call_args_list, []) self.assertEqual(requests.post.call_args_list, []) - self.assertEqual(requests.request.call_args_list, [ - call('POST', - 'https://old.hardcoded.breathecode.url/admin/api.php', - params=[('api_action', 'contact_sync'), ('api_key', model['active_campaign_academy'].ac_key), - ('api_output', 'json')], - data={ - 'email': 'pokemon@potato.io', - 'first_name': 'Konan', - 'last_name': 'Amegakure', - 'phone': '123123123', - 'field[18,0]': model['academy'].slug, - 'field[2,0]': 'asdasd', - }, - timeout=3), - ]) + self.assertEqual( + requests.request.call_args_list, + [ + call( + "POST", + "https://old.hardcoded.breathecode.url/admin/api.php", + params=[ + ("api_action", "contact_sync"), + ("api_key", model["active_campaign_academy"].ac_key), + ("api_output", "json"), + ], + data={ + "email": "pokemon@potato.io", + "first_name": "Konan", + "last_name": "Amegakure", + "phone": "123123123", + "field[18,0]": model["academy"].slug, + "field[2,0]": "asdasd", + }, + timeout=3, + ), + ], + ) diff --git a/breathecode/marketing/tests/urls/tests_academy_lead.py b/breathecode/marketing/tests/urls/tests_academy_lead.py index 6c427922d..a84a4fe76 100644 --- a/breathecode/marketing/tests/urls/tests_academy_lead.py +++ b/breathecode/marketing/tests/urls/tests_academy_lead.py @@ -1,6 +1,7 @@ """ Test /academy/lead """ + from unittest.mock import MagicMock, call, patch from django.utils import timezone from datetime import timedelta @@ -14,98 +15,99 @@ def random_string(): - return ''.join(choices(string.ascii_letters, k=10)) + return "".join(choices(string.ascii_letters, k=10)) def generate_form_entry_kwargs(): """That random values is too long that i prefer have it in one function""" return { - 'fb_leadgen_id': randint(0, 9999), - 'fb_page_id': randint(0, 9999), - 'fb_form_id': randint(0, 9999), - 'fb_adgroup_id': randint(0, 9999), - 'fb_ad_id': randint(0, 9999), - 'gclid': random_string(), - 'first_name': choice(['Rene', 'Albert', 'Immanuel']), - 'last_name': choice(['Descartes', 'Camus', 'Kant']), - 'email': choice(['a@a.com', 'b@b.com', 'c@c.com']), - 'phone': choice(['123', '456', '789']), - 'course': random_string(), - 'client_comments': random_string(), - 'location': random_string(), - 'language': random_string(), - 'utm_url': random_string(), - 'utm_medium': random_string(), - 'utm_campaign': random_string(), - 'utm_source': random_string(), - 'utm_placement': random_string(), - 'utm_term': random_string(), - 'utm_plan': random_string(), - 'sex': random_string(), - 'custom_fields': None, - 'referral_key': random_string(), - 'gclid': random_string(), - 'tags': random_string(), - 'automations': random_string(), - 'street_address': random_string(), - 'country': random_string(), - 'city': random_string(), - 'latitude': randint(0, 9999), - 'longitude': randint(0, 9999), - 'state': random_string(), - 'zip_code': str(randint(0, 9999)), - 'browser_lang': random_string(), - 'storage_status': choice(['PENDING', 'PERSISTED']), - 'lead_type': choice(['STRONG', 'SOFT', 'DISCOVERY']), - 'deal_status': choice(['WON', 'LOST']), - 'sentiment': choice(['GOOD', 'BAD']), + "fb_leadgen_id": randint(0, 9999), + "fb_page_id": randint(0, 9999), + "fb_form_id": randint(0, 9999), + "fb_adgroup_id": randint(0, 9999), + "fb_ad_id": randint(0, 9999), + "gclid": random_string(), + "first_name": choice(["Rene", "Albert", "Immanuel"]), + "last_name": choice(["Descartes", "Camus", "Kant"]), + "email": choice(["a@a.com", "b@b.com", "c@c.com"]), + "phone": choice(["123", "456", "789"]), + "course": random_string(), + "client_comments": random_string(), + "location": random_string(), + "language": random_string(), + "utm_url": random_string(), + "utm_medium": random_string(), + "utm_campaign": random_string(), + "utm_source": random_string(), + "utm_placement": random_string(), + "utm_term": random_string(), + "utm_plan": random_string(), + "sex": random_string(), + "custom_fields": None, + "referral_key": random_string(), + "gclid": random_string(), + "tags": random_string(), + "automations": random_string(), + "street_address": random_string(), + "country": random_string(), + "city": random_string(), + "latitude": randint(0, 9999), + "longitude": randint(0, 9999), + "state": random_string(), + "zip_code": str(randint(0, 9999)), + "browser_lang": random_string(), + "storage_status": choice(["PENDING", "PERSISTED"]), + "lead_type": choice(["STRONG", "SOFT", "DISCOVERY"]), + "deal_status": choice(["WON", "LOST"]), + "sentiment": choice(["GOOD", "BAD"]), } def get_serializer(self, form_entry): return { - 'country': form_entry.country, - 'course': form_entry.course, - 'email': form_entry.email, - 'first_name': form_entry.first_name, - 'gclid': form_entry.gclid, - 'id': form_entry.id, - 'language': form_entry.language, - 'last_name': form_entry.last_name, - 'lead_type': form_entry.lead_type, - 'location': form_entry.location, - 'storage_status': form_entry.storage_status, - 'tags': form_entry.tags, - 'utm_campaign': form_entry.utm_campaign, - 'utm_medium': form_entry.utm_medium, - 'utm_source': form_entry.utm_source, - 'utm_content': form_entry.utm_content, - 'utm_placement': form_entry.utm_placement, - 'utm_term': form_entry.utm_term, - 'utm_plan': form_entry.utm_plan, - 'sex': form_entry.sex, - 'custom_fields': form_entry.custom_fields, - 'utm_url': form_entry.utm_url, - 'ac_expected_cohort': form_entry.ac_expected_cohort, - 'user': None, - 'phone': form_entry.phone, - 'created_at': self.bc.datetime.to_iso_string(form_entry.created_at), - 'storage_status_text': form_entry.storage_status_text, + "country": form_entry.country, + "course": form_entry.course, + "email": form_entry.email, + "first_name": form_entry.first_name, + "gclid": form_entry.gclid, + "id": form_entry.id, + "language": form_entry.language, + "last_name": form_entry.last_name, + "lead_type": form_entry.lead_type, + "location": form_entry.location, + "storage_status": form_entry.storage_status, + "tags": form_entry.tags, + "utm_campaign": form_entry.utm_campaign, + "utm_medium": form_entry.utm_medium, + "utm_source": form_entry.utm_source, + "utm_content": form_entry.utm_content, + "utm_placement": form_entry.utm_placement, + "utm_term": form_entry.utm_term, + "utm_plan": form_entry.utm_plan, + "sex": form_entry.sex, + "custom_fields": form_entry.custom_fields, + "utm_url": form_entry.utm_url, + "ac_expected_cohort": form_entry.ac_expected_cohort, + "user": None, + "phone": form_entry.phone, + "created_at": self.bc.datetime.to_iso_string(form_entry.created_at), + "storage_status_text": form_entry.storage_status_text, } class CohortUserTestSuite(MarketingTestCase): """Test /academy/lead""" + """ 🔽🔽🔽 Auth """ def test_academy_lead__without_auth(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('marketing:academy_lead') + url = reverse_lazy("marketing:academy_lead") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -113,15 +115,14 @@ def test_academy_lead__without_auth(self): def test_academy_lead__without_academy_header(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('marketing:academy_lead') - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_lead', role='potato') + url = reverse_lazy("marketing:academy_lead") + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_lead", role="potato") response = self.client.get(url) json = response.json() expected = { - 'detail': 'Missing academy_id parameter expected for the endpoint url or ' - "'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or " "'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -135,8 +136,8 @@ def test_academy_lead__without_academy_header(self): def test_academy_lead__without_data(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead') - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_lead', role='potato') + url = reverse_lazy("marketing:academy_lead") + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_lead", role="potato") response = self.client.get(url) json = response.json() @@ -153,24 +154,26 @@ def test_academy_lead__without_data(self): def test_academy_lead(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 Storage status in querystring @@ -179,44 +182,48 @@ def test_academy_lead(self): def test_academy_lead__with_bad_storage_status_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + '?storage_status=freyja' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + "?storage_status=freyja" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_lead__with_storage_status_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + f'?storage_status={model.form_entry.storage_status}' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + f"?storage_status={model.form_entry.storage_status}" response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 Course in querystring @@ -225,43 +232,47 @@ def test_academy_lead__with_storage_status_in_querystring(self): def test_academy_lead__with_bad_course_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + '?course=freyja' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + "?course=freyja" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_lead__with_course_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + f'?course={model.form_entry.course}' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + f"?course={model.form_entry.course}" response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 Location in querystring @@ -270,65 +281,71 @@ def test_academy_lead__with_course_in_querystring(self): def test_academy_lead__with_bad_location_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + '?location=freyja' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + "?location=freyja" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_lead__with_location_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + f'?location={model.form_entry.location}' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + f"?location={model.form_entry.location}" response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_lead__with_location_alias_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + f'?location_alias={model.form_entry.location}' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + f"?location_alias={model.form_entry.location}" response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 utm_term in querystring @@ -337,43 +354,47 @@ def test_academy_lead__with_location_alias_in_querystring(self): def test_academy_lead__with_bad_utm_term_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + '?utm_term=freyja' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + "?utm_term=freyja" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_lead__with_utm_term_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + f'?utm_term={model.form_entry.utm_term}' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + f"?utm_term={model.form_entry.utm_term}" response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 utm_source in querystring @@ -382,43 +403,47 @@ def test_academy_lead__with_utm_term_in_querystring(self): def test_academy_lead__with_bad_utm_source_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + '?utm_source=freyja' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + "?utm_source=freyja" response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_lead__with_utm_source_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) - - url = reverse_lazy('marketing:academy_lead') + f'?utm_source={model.form_entry.utm_source}' + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) + + url = reverse_lazy("marketing:academy_lead") + f"?utm_source={model.form_entry.utm_source}" response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 Start in querystring @@ -427,12 +452,10 @@ def test_academy_lead__with_utm_source_in_querystring(self): def test_academy_lead__with_bad_start_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead') + '?start=2100-01-01' - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) + url = reverse_lazy("marketing:academy_lead") + "?start=2100-01-01" + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) response = self.client.get(url) json = response.json() @@ -440,29 +463,27 @@ def test_academy_lead__with_bad_start_in_querystring(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_lead__with_start_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - query_date = (timezone.now() - timedelta(hours=48)).strftime('%Y-%m-%d') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) - - url = reverse_lazy('marketing:academy_lead') + f'?start={query_date}' + query_date = (timezone.now() - timedelta(hours=48)).strftime("%Y-%m-%d") + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) + + url = reverse_lazy("marketing:academy_lead") + f"?start={query_date}" response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 End in querystring @@ -471,12 +492,10 @@ def test_academy_lead__with_start_in_querystring(self): def test_academy_lead__with_bad_end_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead') + '?end=1900-01-01' - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) + url = reverse_lazy("marketing:academy_lead") + "?end=1900-01-01" + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) response = self.client.get(url) json = response.json() @@ -484,29 +503,27 @@ def test_academy_lead__with_bad_end_in_querystring(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) def test_academy_lead__with_end_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - query_date = (timezone.now() + timedelta(hours=48)).strftime('%Y-%m-%d') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) - - url = reverse_lazy('marketing:academy_lead') + f'?end={query_date}' + query_date = (timezone.now() + timedelta(hours=48)).strftime("%Y-%m-%d") + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) + + url = reverse_lazy("marketing:academy_lead") + f"?end={query_date}" response = self.client.get(url) json = response.json() expected = get_serializer(self, model.form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 Bulk delete @@ -515,24 +532,24 @@ def test_academy_lead__with_end_in_querystring(self): def test_academy_lead__delete__in_bulk_with_one(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_lead', - role='potato', - academy=True, - active_campaign_academy=True) + base = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_lead", + role="potato", + academy=True, + active_campaign_academy=True, + ) for field in many_fields: form_entry_kwargs = generate_form_entry_kwargs() - model = self.generate_models(form_entry=True, - contact=True, - automation=True, - form_entry_kwargs=form_entry_kwargs, - models=base) + model = self.generate_models( + form_entry=True, contact=True, automation=True, form_entry_kwargs=form_entry_kwargs, models=base + ) - url = (reverse_lazy('marketing:academy_lead') + f'?{field}=' + str(getattr(model['form_entry'], field))) + url = reverse_lazy("marketing:academy_lead") + f"?{field}=" + str(getattr(model["form_entry"], field)) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) @@ -541,32 +558,35 @@ def test_academy_lead__delete__in_bulk_with_one(self): def test_academy_lead__delete__in_bulk_with_two(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - many_fields = ['id'] + many_fields = ["id"] - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_lead', - role='potato', - academy=True, - active_campaign_academy=True) + base = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_lead", + role="potato", + academy=True, + active_campaign_academy=True, + ) for field in many_fields: form_entry_kwargs = generate_form_entry_kwargs() - model1 = self.generate_models(form_entry=True, - contact=True, - automation=True, - form_entry_kwargs=form_entry_kwargs, - models=base) + model1 = self.generate_models( + form_entry=True, contact=True, automation=True, form_entry_kwargs=form_entry_kwargs, models=base + ) form_entry_kwargs = generate_form_entry_kwargs() - model2 = self.generate_models(form_entry=True, - contact=True, - automation=True, - form_entry_kwargs=form_entry_kwargs, - models=base) - - url = (reverse_lazy('marketing:academy_lead') + f'?{field}=' + str(getattr(model1['form_entry'], field)) + - ',' + str(getattr(model2['form_entry'], field))) + model2 = self.generate_models( + form_entry=True, contact=True, automation=True, form_entry_kwargs=form_entry_kwargs, models=base + ) + + url = ( + reverse_lazy("marketing:academy_lead") + + f"?{field}=" + + str(getattr(model1["form_entry"], field)) + + "," + + str(getattr(model2["form_entry"], field)) + ) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) @@ -577,160 +597,158 @@ def test_academy_lead__delete__in_bulk_with_two(self): """ def test_academy_lead__with_full_name_in_querystring(self): - """Test /academy/lead """ + """Test /academy/lead""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - academy=True, - capability='read_lead', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, academy=True, capability="read_lead", role="potato" + ) form_entry_kwargs_1 = generate_form_entry_kwargs() form_entry_kwargs_2 = generate_form_entry_kwargs() - form_entry_kwargs_1['first_name'] = 'Michael' - form_entry_kwargs_1['last_name'] = 'Jordan' + form_entry_kwargs_1["first_name"] = "Michael" + form_entry_kwargs_1["last_name"] = "Jordan" models = [ self.generate_models(form_entry_kwargs=form_entry_kwargs_1, form_entry=True, models=base), - self.generate_models(form_entry_kwargs=form_entry_kwargs_2, form_entry=True, models=base) + self.generate_models(form_entry_kwargs=form_entry_kwargs_2, form_entry=True, models=base), ] - base_url = reverse_lazy('marketing:academy_lead') - url = f'{base_url}?like={models[0].form_entry.first_name} {models[0].form_entry.last_name}' + base_url = reverse_lazy("marketing:academy_lead") + url = f"{base_url}?like={models[0].form_entry.first_name} {models[0].form_entry.last_name}" response = self.client.get(url) json = response.json() expected = get_serializer(self, models[0].form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')} for model in models]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")} for model in models]) def test_academy_lead__with_first_name_in_querystring(self): - """Test /academy/lead """ + """Test /academy/lead""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - academy=True, - capability='read_lead', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, academy=True, capability="read_lead", role="potato" + ) form_entry_kwargs_1 = generate_form_entry_kwargs() form_entry_kwargs_2 = generate_form_entry_kwargs() - form_entry_kwargs_1['first_name'] = 'Michael' - form_entry_kwargs_1['last_name'] = 'Jordan' + form_entry_kwargs_1["first_name"] = "Michael" + form_entry_kwargs_1["last_name"] = "Jordan" models = [ self.generate_models(form_entry_kwargs=form_entry_kwargs_1, form_entry=True, models=base), - self.generate_models(form_entry_kwargs=form_entry_kwargs_2, form_entry=True, models=base) + self.generate_models(form_entry_kwargs=form_entry_kwargs_2, form_entry=True, models=base), ] - base_url = reverse_lazy('marketing:academy_lead') - url = f'{base_url}?like={models[0].form_entry.first_name}' + base_url = reverse_lazy("marketing:academy_lead") + url = f"{base_url}?like={models[0].form_entry.first_name}" response = self.client.get(url) json = response.json() expected = get_serializer(self, models[0].form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')} for model in models]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")} for model in models]) def test_academy_lead__with_last_name_in_querystring(self): - """Test /academy/lead """ + """Test /academy/lead""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - academy=True, - capability='read_lead', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, academy=True, capability="read_lead", role="potato" + ) form_entry_kwargs_1 = generate_form_entry_kwargs() form_entry_kwargs_2 = generate_form_entry_kwargs() - form_entry_kwargs_1['first_name'] = 'Michael' - form_entry_kwargs_1['last_name'] = 'Jordan' + form_entry_kwargs_1["first_name"] = "Michael" + form_entry_kwargs_1["last_name"] = "Jordan" models = [ self.generate_models(form_entry_kwargs=form_entry_kwargs_1, form_entry=True, models=base), - self.generate_models(form_entry_kwargs=form_entry_kwargs_2, form_entry=True, models=base) + self.generate_models(form_entry_kwargs=form_entry_kwargs_2, form_entry=True, models=base), ] - base_url = reverse_lazy('marketing:academy_lead') - url = f'{base_url}?like={models[0].form_entry.last_name}' + base_url = reverse_lazy("marketing:academy_lead") + url = f"{base_url}?like={models[0].form_entry.last_name}" response = self.client.get(url) json = response.json() expected = get_serializer(self, models[0].form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')} for model in models]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")} for model in models]) def test_academy_lead__with_email_in_querystring(self): - """Test /academy/lead """ + """Test /academy/lead""" self.headers(academy=1) - base = self.generate_models(authenticate=True, - profile_academy=True, - academy=True, - capability='read_lead', - role='potato') + base = self.generate_models( + authenticate=True, profile_academy=True, academy=True, capability="read_lead", role="potato" + ) form_entry_kwargs_1 = generate_form_entry_kwargs() form_entry_kwargs_2 = generate_form_entry_kwargs() - form_entry_kwargs_1['email'] = 'michael@jordan.com' + form_entry_kwargs_1["email"] = "michael@jordan.com" models = [ self.generate_models(form_entry_kwargs=form_entry_kwargs_1, form_entry=True, models=base), - self.generate_models(form_entry_kwargs=form_entry_kwargs_2, form_entry=True, models=base) + self.generate_models(form_entry_kwargs=form_entry_kwargs_2, form_entry=True, models=base), ] - base_url = reverse_lazy('marketing:academy_lead') - url = f'{base_url}?like={models[0].form_entry.email}' + base_url = reverse_lazy("marketing:academy_lead") + url = f"{base_url}?like={models[0].form_entry.email}" response = self.client.get(url) json = response.json() expected = get_serializer(self, models[0].form_entry) - del expected['custom_fields'] + del expected["custom_fields"] expected = [expected] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')} for model in models]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")} for model in models]) """ 🔽🔽🔽 Spy extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_academy_lead__spy_extensions(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead') - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_lead', role='potato') + url = reverse_lazy("marketing:academy_lead") + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_lead", role="potato") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_academy_lead__spy_extension_arguments(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead') - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_lead', role='potato') + url = reverse_lazy("marketing:academy_lead") + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_lead", role="potato") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) diff --git a/breathecode/marketing/tests/urls/tests_academy_tag_slug.py b/breathecode/marketing/tests/urls/tests_academy_tag_slug.py index 0ac7c4ff9..d4225fd16 100644 --- a/breathecode/marketing/tests/urls/tests_academy_tag_slug.py +++ b/breathecode/marketing/tests/urls/tests_academy_tag_slug.py @@ -10,12 +10,12 @@ def get_serializer(self, tag, data={}): return { - 'id': tag.id, - 'tag_type': tag.tag_type, - 'description': tag.description, - 'automation': tag.automation, - 'disputed_reason': tag.disputed_reason, - 'disputed_at': tag.disputed_at, + "id": tag.id, + "tag_type": tag.tag_type, + "description": tag.description, + "automation": tag.automation, + "disputed_reason": tag.disputed_reason, + "disputed_at": tag.disputed_at, **data, } @@ -24,10 +24,10 @@ class TestTagSlugView(MarketingTestCase): def test_tag_slug__without_auth(self): """Test /tag/:slug without auth""" - url = reverse_lazy('marketing:academy_tag_slug', kwargs={'tag_slug': 'slug'}) + url = reverse_lazy("marketing:academy_tag_slug", kwargs={"tag_slug": "slug"}) response = self.client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -35,15 +35,14 @@ def test_tag_slug__without_auth(self): def test_tag_slug__without_academy_header(self): """Test /tag/:slug without academy header""" - url = reverse_lazy('marketing:academy_tag_slug', kwargs={'tag_slug': 'slug'}) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_tag', role='potato') + url = reverse_lazy("marketing:academy_tag_slug", kwargs={"tag_slug": "slug"}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_tag", role="potato") response = self.client.put(url) json = response.json() expected = { - 'detail': 'Missing academy_id parameter expected for the endpoint url or ' - "'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or " "'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -53,12 +52,12 @@ def test_tag_slug__without_academy_header(self): def test_tag_slug__without_data(self): """Test /tag/:slug without data""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_tag_slug', kwargs={'tag_slug': 'slug'}) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_tag', role='potato') + url = reverse_lazy("marketing:academy_tag_slug", kwargs={"tag_slug": "slug"}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_tag", role="potato") response = self.client.put(url) json = response.json() - expected = {'detail': 'tag-not-found', 'status_code': 400} + expected = {"detail": "tag-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -69,26 +68,26 @@ def test_tag_slug__without_data(self): """ def test_tag_slug__put(self): - """Test /tag/:slug """ + """Test /tag/:slug""" self.headers(academy=1) model = self.generate_models( authenticate=True, profile_academy=True, - capability='crud_tag', - role='potato', + capability="crud_tag", + role="potato", active_campaign_academy=True, ) - tag_model = self.generate_models(tag={'ac_academy': model.active_campaign_academy, 'slug': 'tag_slug'}) + tag_model = self.generate_models(tag={"ac_academy": model.active_campaign_academy, "slug": "tag_slug"}) - url = reverse_lazy('marketing:academy_tag_slug', kwargs={'tag_slug': 'tag_slug'}) + url = reverse_lazy("marketing:academy_tag_slug", kwargs={"tag_slug": "tag_slug"}) data = { - 'tag_type': 'DISCOVERY', - 'description': 'descriptive', + "tag_type": "DISCOVERY", + "description": "descriptive", } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = get_serializer(self, tag_model.tag, data=data) @@ -97,86 +96,93 @@ def test_tag_slug__put(self): self.assertEqual(response.status_code, 200) def test_tag_slug__put_many_without_id(self): - """Test /tag/:slug """ + """Test /tag/:slug""" self.headers(academy=1) model = self.generate_models( authenticate=True, profile_academy=True, - capability='crud_tag', - role='potato', + capability="crud_tag", + role="potato", active_campaign_academy=True, ) - tag_model = self.generate_models(tag={'ac_academy': model.active_campaign_academy, 'slug': 'tag_slug'}) + tag_model = self.generate_models(tag={"ac_academy": model.active_campaign_academy, "slug": "tag_slug"}) - url = reverse_lazy('marketing:academy_tag') - data = [{ - 'tag_type': 'DISCOVERY', - 'description': 'descriptive', - }] + url = reverse_lazy("marketing:academy_tag") + data = [ + { + "tag_type": "DISCOVERY", + "description": "descriptive", + } + ] - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'without-id', 'status_code': 400} + expected = {"detail": "without-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_tag_slug__put_many_with_wrong_id(self): - """Test /tag/:slug """ + """Test /tag/:slug""" self.headers(academy=1) model = self.generate_models( authenticate=True, profile_academy=True, - capability='crud_tag', - role='potato', + capability="crud_tag", + role="potato", active_campaign_academy=True, ) - tag_model = self.generate_models(tag={'ac_academy': model.active_campaign_academy, 'slug': 'tag_slug'}) + tag_model = self.generate_models(tag={"ac_academy": model.active_campaign_academy, "slug": "tag_slug"}) - url = reverse_lazy('marketing:academy_tag') - data = [{ - 'id': 2, - 'tag_type': 'DISCOVERY', - 'description': 'descriptive', - }] + url = reverse_lazy("marketing:academy_tag") + data = [ + { + "id": 2, + "tag_type": "DISCOVERY", + "description": "descriptive", + } + ] - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_tag_slug__put_many(self): - """Test /tag/:slug """ + """Test /tag/:slug""" self.headers(academy=1) model = self.generate_models( authenticate=True, profile_academy=True, - capability='crud_tag', - role='potato', + capability="crud_tag", + role="potato", active_campaign_academy=True, ) - tag_model = self.generate_models(tag=2, tag_kwargs={'ac_academy': model.active_campaign_academy}) - - url = reverse_lazy('marketing:academy_tag') - data = [{ - 'id': 1, - 'tag_type': 'DISCOVERY', - }, { - 'id': 2, - 'tag_type': 'DISCOVERY', - }] - - response = self.client.put(url, data, format='json') + tag_model = self.generate_models(tag=2, tag_kwargs={"ac_academy": model.active_campaign_academy}) + + url = reverse_lazy("marketing:academy_tag") + data = [ + { + "id": 1, + "tag_type": "DISCOVERY", + }, + { + "id": 2, + "tag_type": "DISCOVERY", + }, + ] + + response = self.client.put(url, data, format="json") json = response.json() expected = [get_serializer(self, tag, data=data[i]) for i, tag in enumerate(tag_model.tag)] diff --git a/breathecode/marketing/tests/urls/tests_active_campaign.py b/breathecode/marketing/tests/urls/tests_active_campaign.py index 19b7cb491..b218210da 100644 --- a/breathecode/marketing/tests/urls/tests_active_campaign.py +++ b/breathecode/marketing/tests/urls/tests_active_campaign.py @@ -10,37 +10,38 @@ def get_serializer(active_campaign_academy, academy, event_attendancy_automation=None, data={}): return { - 'id': active_campaign_academy.id, - 'ac_key': active_campaign_academy.ac_key, - 'ac_url': active_campaign_academy.ac_url, - 'duplicate_leads_delta_avoidance': str(active_campaign_academy.duplicate_leads_delta_avoidance.total_seconds()), - 'sync_status': active_campaign_academy.sync_status, - 'sync_message': active_campaign_academy.sync_message, - 'last_interaction_at': active_campaign_academy.last_interaction_at, - 'event_attendancy_automation': event_attendancy_automation, - 'academy': { - 'id': academy.id, - 'slug': academy.slug, - 'name': academy.name, + "id": active_campaign_academy.id, + "ac_key": active_campaign_academy.ac_key, + "ac_url": active_campaign_academy.ac_url, + "duplicate_leads_delta_avoidance": str(active_campaign_academy.duplicate_leads_delta_avoidance.total_seconds()), + "sync_status": active_campaign_academy.sync_status, + "sync_message": active_campaign_academy.sync_message, + "last_interaction_at": active_campaign_academy.last_interaction_at, + "event_attendancy_automation": event_attendancy_automation, + "academy": { + "id": academy.id, + "slug": academy.slug, + "name": academy.name, }, - 'created_at': datetime_to_iso_format(active_campaign_academy.created_at), - 'updated_at': datetime_to_iso_format(active_campaign_academy.updated_at), + "created_at": datetime_to_iso_format(active_campaign_academy.created_at), + "updated_at": datetime_to_iso_format(active_campaign_academy.updated_at), **data, } class ActiveCampaignTestSuite(MarketingTestCase): """Test /activecampaign""" + """ 🔽🔽🔽 without Auth """ def test_active_campaign_without_auth(self): """Test /activecampaign without auth""" - url = reverse_lazy('marketing:activecampaign') + url = reverse_lazy("marketing:activecampaign") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -52,7 +53,7 @@ def test_active_campaign_without_auth(self): def test_without_capability(self): """Test /activecampaign without data""" - url = reverse_lazy('marketing:activecampaign') + url = reverse_lazy("marketing:activecampaign") self.generate_models( authenticate=True, academy=True, @@ -61,7 +62,7 @@ def test_without_capability(self): self.headers(academy=1) response = self.client.get(url) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: read_lead for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: read_lead for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -72,7 +73,7 @@ def test_without_capability(self): def test_without_academy_header(self): """Test /activecampaign without data""" - url = reverse_lazy('marketing:activecampaign') + url = reverse_lazy("marketing:activecampaign") self.generate_models( authenticate=True, academy=True, @@ -81,8 +82,8 @@ def test_without_academy_header(self): response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -94,18 +95,18 @@ def test_without_academy_header(self): def test_without_data(self): """Test /activecampaign without data""" - url = reverse_lazy('marketing:activecampaign') + url = reverse_lazy("marketing:activecampaign") self.generate_models( authenticate=True, academy=True, profile_academy=True, - capability='read_lead', - role='potato', + capability="read_lead", + role="potato", ) self.headers(academy=1) response = self.client.get(url) json = response.json() - expected = {'detail': 'Active Campaign Academy not found', 'status_code': 404} + expected = {"detail": "Active Campaign Academy not found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -117,14 +118,14 @@ def test_without_data(self): def test_active_campaign(self): """Test /activecampaign""" - url = reverse_lazy('marketing:activecampaign') + url = reverse_lazy("marketing:activecampaign") model = self.generate_models( authenticate=True, academy=True, profile_academy=True, active_campaign_academy=True, - capability='read_lead', - role='potato', + capability="read_lead", + role="potato", ) self.headers(academy=1) response = self.client.get(url) @@ -141,21 +142,21 @@ def test_active_campaign(self): def test_post_active_campaign_without_academy(self): """Test /activecampaign""" - url = reverse_lazy('marketing:activecampaign') + url = reverse_lazy("marketing:activecampaign") model = self.generate_models( authenticate=True, academy=True, profile_academy=True, - capability='crud_lead', - role='potato', + capability="crud_lead", + role="potato", ) - data = {'ac_key': '55555555', 'ac_url': 'https://www.potato.com/'} + data = {"ac_key": "55555555", "ac_url": "https://www.potato.com/"} response = self.client.post(url, data) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -167,32 +168,32 @@ def test_post_active_campaign_without_academy(self): def test_post_active_campaign(self): """Test /activecampaign""" - url = reverse_lazy('marketing:activecampaign') + url = reverse_lazy("marketing:activecampaign") model = self.generate_models( authenticate=True, academy=True, profile_academy=True, - capability='crud_lead', - role='potato', + capability="crud_lead", + role="potato", ) self.headers(academy=1) - data = {'ac_key': '55555555', 'ac_url': 'https://www.potato.com/'} + data = {"ac_key": "55555555", "ac_url": "https://www.potato.com/"} response = self.client.post(url, data) json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'id': 1, - 'event_attendancy_automation': None, - 'last_interaction_at': None, - 'sync_message': None, - 'sync_status': 'INCOMPLETED', - 'duplicate_leads_delta_avoidance': '00:30:00', + "id": 1, + "event_attendancy_automation": None, + "last_interaction_at": None, + "sync_message": None, + "sync_status": "INCOMPLETED", + "duplicate_leads_delta_avoidance": "00:30:00", **data, } diff --git a/breathecode/marketing/tests/urls/tests_active_campaign_id.py b/breathecode/marketing/tests/urls/tests_active_campaign_id.py index dd04af6ec..19a6605d3 100644 --- a/breathecode/marketing/tests/urls/tests_active_campaign_id.py +++ b/breathecode/marketing/tests/urls/tests_active_campaign_id.py @@ -10,17 +10,18 @@ class ActiveCampaignIdTestSuite(MarketingTestCase): """Test /activecampaign/ac_id""" + """ 🔽🔽🔽 without Auth """ def test_active_campaign_without_auth(self): """Test /activecampaign without auth""" - url = reverse_lazy('marketing:activecampaign_id', kwargs={'ac_id': 1}) - data = {'ac_key': '88888', 'ac_url': 'https://www.tomatoes.com/'} + url = reverse_lazy("marketing:activecampaign_id", kwargs={"ac_id": 1}) + data = {"ac_key": "88888", "ac_url": "https://www.tomatoes.com/"} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -31,19 +32,19 @@ def test_active_campaign_without_auth(self): def test_without_capability(self): """Test /activecampaign without data""" - url = reverse_lazy('marketing:activecampaign_id', kwargs={'ac_id': 1}) + url = reverse_lazy("marketing:activecampaign_id", kwargs={"ac_id": 1}) self.generate_models( authenticate=True, academy=True, profile_academy=True, active_campaign_academy=True, - role='potato', + role="potato", ) - data = {'ac_key': '88888', 'ac_url': 'https://www.tomatoes.com/'} + data = {"ac_key": "88888", "ac_url": "https://www.tomatoes.com/"} self.headers(academy=1) response = self.client.put(url, data) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: crud_lead for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: crud_lead for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -54,20 +55,20 @@ def test_without_capability(self): def test_without_data(self): """Test /activecampaign Put With wrong active campaign Id""" - url = reverse_lazy('marketing:activecampaign_id', kwargs={'ac_id': 2}) + url = reverse_lazy("marketing:activecampaign_id", kwargs={"ac_id": 2}) self.generate_models( authenticate=True, academy=True, profile_academy=True, - capability='crud_lead', + capability="crud_lead", active_campaign_academy=True, - role='potato', + role="potato", ) self.headers(academy=1) - data = {'ac_key': '55555555', 'ac_url': 'https://www.potato.com/'} + data = {"ac_key": "55555555", "ac_url": "https://www.potato.com/"} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'active-campaign-not-found', 'status_code': 400} + expected = {"detail": "active-campaign-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -78,22 +79,22 @@ def test_without_data(self): def test_post_active_campaign_without_academy(self): """Test /activecampaign""" - url = reverse_lazy('marketing:activecampaign_id', kwargs={'ac_id': 1}) + url = reverse_lazy("marketing:activecampaign_id", kwargs={"ac_id": 1}) model = self.generate_models( authenticate=True, academy=True, profile_academy=True, active_campaign_academy=True, - capability='crud_lead', - role='potato', + capability="crud_lead", + role="potato", ) - data = {'ac_key': '55555555', 'ac_url': 'https://www.potato.com/'} + data = {"ac_key": "55555555", "ac_url": "https://www.potato.com/"} response = self.client.put(url, data) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -105,35 +106,35 @@ def test_post_active_campaign_without_academy(self): def test_put_active_campaign(self): """Test /activecampaign""" - url = reverse_lazy('marketing:activecampaign_id', kwargs={'ac_id': 1}) - ac_kwargs = {'ac_key': '55555555', 'ac_url': 'https://www.potato.com/'} + url = reverse_lazy("marketing:activecampaign_id", kwargs={"ac_id": 1}) + ac_kwargs = {"ac_key": "55555555", "ac_url": "https://www.potato.com/"} model = self.generate_models( authenticate=True, academy=True, profile_academy=True, active_campaign_academy=True, active_campaign_academy_kwargs=ac_kwargs, - capability='crud_lead', - role='potato', + capability="crud_lead", + role="potato", ) self.headers(academy=1) - data = {'ac_key': '88888', 'ac_url': 'https://www.tomatoes.com/'} + data = {"ac_key": "88888", "ac_url": "https://www.tomatoes.com/"} response = self.client.put(url, data) json = response.json() - self.assertDatetime(json['created_at']) - self.assertDatetime(json['updated_at']) + self.assertDatetime(json["created_at"]) + self.assertDatetime(json["updated_at"]) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'id': 1, - 'event_attendancy_automation': None, - 'last_interaction_at': None, - 'sync_message': None, - 'sync_status': 'INCOMPLETED', - 'duplicate_leads_delta_avoidance': '00:30:00', + "id": 1, + "event_attendancy_automation": None, + "last_interaction_at": None, + "sync_message": None, + "sync_status": "INCOMPLETED", + "duplicate_leads_delta_avoidance": "00:30:00", **data, } diff --git a/breathecode/marketing/tests/urls/tests_app_slug_lead.py b/breathecode/marketing/tests/urls/tests_app_slug_lead.py index 71b136c1d..961e1106c 100644 --- a/breathecode/marketing/tests/urls/tests_app_slug_lead.py +++ b/breathecode/marketing/tests/urls/tests_app_slug_lead.py @@ -1,6 +1,7 @@ """ Test /academy/app_slug_lead """ + from django.utils import timezone from datetime import timedelta import re, string @@ -23,127 +24,127 @@ def post_serializer(data={}): return { - 'attribution_id': None, - 'ac_contact_id': None, - 'ac_deal_id': None, - 'ac_expected_cohort': None, - 'academy': 1, - 'automations': '', - 'browser_lang': None, - 'city': None, - 'client_comments': None, - 'contact': None, - 'country': None, - 'course': None, - 'current_download': None, - 'deal_status': None, - 'email': None, - 'fb_ad_id': None, - 'fb_adgroup_id': None, - 'fb_form_id': None, - 'fb_leadgen_id': None, - 'fb_page_id': None, - 'first_name': '', - 'gclid': None, - 'id': 1, - 'last_name': '', - 'latitude': None, - 'lead_generation_app': 1, - 'lead_type': None, - 'location': None, - 'longitude': None, - 'phone': None, - 'referral_key': None, - 'sentiment': None, - 'state': None, - 'storage_status': 'PENDING', - 'street_address': None, - 'tags': '', - 'user': None, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_source': None, - 'utm_content': None, - 'utm_placement': None, - 'utm_term': None, - 'utm_plan': None, - 'sex': None, - 'custom_fields': None, - 'won_at': None, - 'zip_code': None, - 'utm_url': None, - 'storage_status_text': '', - 'ac_deal_owner_full_name': None, - 'ac_deal_course': None, - 'ac_deal_location': None, - 'ac_deal_owner_id': None, - 'ac_expected_cohort_date': None, - 'ac_deal_amount': None, - 'ac_deal_currency_code': None, + "attribution_id": None, + "ac_contact_id": None, + "ac_deal_id": None, + "ac_expected_cohort": None, + "academy": 1, + "automations": "", + "browser_lang": None, + "city": None, + "client_comments": None, + "contact": None, + "country": None, + "course": None, + "current_download": None, + "deal_status": None, + "email": None, + "fb_ad_id": None, + "fb_adgroup_id": None, + "fb_form_id": None, + "fb_leadgen_id": None, + "fb_page_id": None, + "first_name": "", + "gclid": None, + "id": 1, + "last_name": "", + "latitude": None, + "lead_generation_app": 1, + "lead_type": None, + "location": None, + "longitude": None, + "phone": None, + "referral_key": None, + "sentiment": None, + "state": None, + "storage_status": "PENDING", + "street_address": None, + "tags": "", + "user": None, + "utm_campaign": None, + "utm_medium": None, + "utm_source": None, + "utm_content": None, + "utm_placement": None, + "utm_term": None, + "utm_plan": None, + "sex": None, + "custom_fields": None, + "won_at": None, + "zip_code": None, + "utm_url": None, + "storage_status_text": "", + "ac_deal_owner_full_name": None, + "ac_deal_course": None, + "ac_deal_location": None, + "ac_deal_owner_id": None, + "ac_expected_cohort_date": None, + "ac_deal_amount": None, + "ac_deal_currency_code": None, **data, } def form_entry_field(data={}): return { - 'id': 1, - 'attribution_id': None, - 'ac_contact_id': None, - 'ac_deal_id': None, - 'ac_expected_cohort': None, - 'academy_id': 1, - 'automations': '', - 'browser_lang': None, - 'city': None, - 'client_comments': None, - 'contact_id': None, - 'country': None, - 'course': None, - 'current_download': None, - 'deal_status': None, - 'email': None, - 'fb_ad_id': None, - 'fb_adgroup_id': None, - 'fb_form_id': None, - 'fb_leadgen_id': None, - 'fb_page_id': None, - 'first_name': '', - 'gclid': None, - 'id': 1, - 'last_name': '', - 'latitude': None, - 'lead_generation_app_id': 1, - 'lead_type': None, - 'location': None, - 'longitude': None, - 'phone': None, - 'referral_key': None, - 'sentiment': None, - 'state': None, - 'storage_status': 'PENDING', - 'storage_status_text': '', - 'street_address': None, - 'tags': '', - 'user_id': None, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_content': None, - 'utm_source': None, - 'utm_placement': None, - 'utm_term': None, - 'utm_plan': None, - 'utm_url': None, - 'sex': None, - 'custom_fields': None, - 'won_at': None, - 'zip_code': None, - 'ac_deal_course': None, - 'ac_deal_location': None, - 'ac_deal_owner_full_name': None, - 'ac_deal_owner_id': None, - 'ac_expected_cohort_date': None, - 'ac_deal_amount': None, - 'ac_deal_currency_code': None, + "id": 1, + "attribution_id": None, + "ac_contact_id": None, + "ac_deal_id": None, + "ac_expected_cohort": None, + "academy_id": 1, + "automations": "", + "browser_lang": None, + "city": None, + "client_comments": None, + "contact_id": None, + "country": None, + "course": None, + "current_download": None, + "deal_status": None, + "email": None, + "fb_ad_id": None, + "fb_adgroup_id": None, + "fb_form_id": None, + "fb_leadgen_id": None, + "fb_page_id": None, + "first_name": "", + "gclid": None, + "id": 1, + "last_name": "", + "latitude": None, + "lead_generation_app_id": 1, + "lead_type": None, + "location": None, + "longitude": None, + "phone": None, + "referral_key": None, + "sentiment": None, + "state": None, + "storage_status": "PENDING", + "storage_status_text": "", + "street_address": None, + "tags": "", + "user_id": None, + "utm_campaign": None, + "utm_medium": None, + "utm_content": None, + "utm_source": None, + "utm_placement": None, + "utm_term": None, + "utm_plan": None, + "utm_url": None, + "sex": None, + "custom_fields": None, + "won_at": None, + "zip_code": None, + "ac_deal_course": None, + "ac_deal_location": None, + "ac_deal_owner_full_name": None, + "ac_deal_owner_id": None, + "ac_expected_cohort_date": None, + "ac_deal_amount": None, + "ac_deal_currency_code": None, **data, } @@ -164,20 +165,20 @@ class AppSlugLeadTestSuite(MarketingTestCase): 🔽🔽🔽 Post without app slug or app_id """ - @patch('breathecode.marketing.tasks.persist_single_lead', MagicMock()) + @patch("breathecode.marketing.tasks.persist_single_lead", MagicMock()) @patch.multiple( - 'breathecode.services.google_cloud.Recaptcha', + "breathecode.services.google_cloud.Recaptcha", __init__=MagicMock(return_value=None), create_assessment=MagicMock(return_value=FakeRecaptcha()), ) def test_app_slug_lead__post__without_app_slug_or_app_id(self): from breathecode.marketing.tasks import persist_single_lead - url = reverse_lazy('marketing:app_slug_lead', kwargs={'app_slug': 'they-killed-kenny'}) + url = reverse_lazy("marketing:app_slug_lead", kwargs={"app_slug": "they-killed-kenny"}) response = self.client.post(url) json = response.json() - expected = {'detail': 'without-app-slug-or-app-id', 'status_code': 400} + expected = {"detail": "without-app-slug-or-app-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -189,26 +190,29 @@ def test_app_slug_lead__post__without_app_slug_or_app_id(self): 🔽🔽🔽 Post without app_id """ - @patch('breathecode.marketing.tasks.persist_single_lead', MagicMock()) + @patch("breathecode.marketing.tasks.persist_single_lead", MagicMock()) @patch.multiple( - 'breathecode.services.google_cloud.Recaptcha', + "breathecode.services.google_cloud.Recaptcha", __init__=MagicMock(return_value=None), create_assessment=MagicMock(return_value=FakeRecaptcha()), ) def test_app_slug_lead__post__without_app_id(self): from breathecode.marketing.tasks import persist_single_lead + model = self.generate_models(lead_generation_app=True) - url = (reverse_lazy('marketing:app_slug_lead', kwargs={'app_slug': 'they-killed-kenny'}) + - f'?app_id={model.lead_generation_app.app_id}') + url = ( + reverse_lazy("marketing:app_slug_lead", kwargs={"app_slug": "they-killed-kenny"}) + + f"?app_id={model.lead_generation_app.app_id}" + ) response = self.client.post(url) json = response.json() - expected = {'detail': 'without-app-id', 'status_code': 401} + expected = {"detail": "without-app-id", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.all_lead_generation_app_dict(), [self.model_to_dict(model, 'lead_generation_app')]) + self.assertEqual(self.all_lead_generation_app_dict(), [self.model_to_dict(model, "lead_generation_app")]) self.assertEqual(persist_single_lead.delay.call_args_list, []) @@ -216,45 +220,53 @@ def test_app_slug_lead__post__without_app_id(self): 🔽🔽🔽 Post without required fields """ - @patch('breathecode.marketing.tasks.persist_single_lead', MagicMock()) + @patch("breathecode.marketing.tasks.persist_single_lead", MagicMock()) @patch.multiple( - 'breathecode.services.google_cloud.Recaptcha', + "breathecode.services.google_cloud.Recaptcha", __init__=MagicMock(return_value=None), create_assessment=MagicMock(return_value=FakeRecaptcha()), ) def test_app_slug_lead__post__without_required_fields(self): from breathecode.marketing.tasks import persist_single_lead + model = self.generate_models(lead_generation_app=True) - url = (reverse_lazy('marketing:app_slug_lead', kwargs={'app_slug': model.lead_generation_app.slug}) + - f'?app_id={model.lead_generation_app.app_id}') + url = ( + reverse_lazy("marketing:app_slug_lead", kwargs={"app_slug": model.lead_generation_app.slug}) + + f"?app_id={model.lead_generation_app.app_id}" + ) start = timezone.now() response = self.client.post(url) end = timezone.now() json = response.json() - expected = {'language': ['This field may not be null.']} + expected = {"language": ["This field may not be null."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) db = self.all_lead_generation_app_dict() - last_call_at = db[0]['last_call_at'] + last_call_at = db[0]["last_call_at"] self.assertGreater(end, last_call_at) self.assertGreater(last_call_at, start) - db[0]['last_call_at'] = None + db[0]["last_call_at"] = None - self.assertEqual(db, [{ - **self.model_to_dict(model, 'lead_generation_app'), - 'last_call_log': '{"language": ["This field may not be null."]}', - 'hits': 1, - 'last_call_status': 'ERROR', - 'last_request_data': '{}', - 'last_call_log': '{"language": ["This field may not be null."]}', - }]) + self.assertEqual( + db, + [ + { + **self.model_to_dict(model, "lead_generation_app"), + "last_call_log": '{"language": ["This field may not be null."]}', + "hits": 1, + "last_call_status": "ERROR", + "last_request_data": "{}", + "last_call_log": '{"language": ["This field may not be null."]}', + } + ], + ) self.assertEqual(persist_single_lead.delay.call_args_list, []) @@ -262,9 +274,9 @@ def test_app_slug_lead__post__without_required_fields(self): 🔽🔽🔽 Post data """ - @patch('breathecode.marketing.tasks.persist_single_lead', MagicMock()) + @patch("breathecode.marketing.tasks.persist_single_lead", MagicMock()) @patch.multiple( - 'breathecode.services.google_cloud.Recaptcha', + "breathecode.services.google_cloud.Recaptcha", __init__=MagicMock(return_value=None), create_assessment=MagicMock(return_value=FakeRecaptcha()), ) @@ -273,18 +285,20 @@ def test_app_slug_lead__post(self): model = self.generate_models(lead_generation_app=True) - url = (reverse_lazy('marketing:app_slug_lead', kwargs={'app_slug': model.lead_generation_app.slug}) + - f'?app_id={model.lead_generation_app.app_id}') - data = {'language': 'eo'} + url = ( + reverse_lazy("marketing:app_slug_lead", kwargs={"app_slug": model.lead_generation_app.slug}) + + f"?app_id={model.lead_generation_app.app_id}" + ) + data = {"language": "eo"} start = timezone.now() - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") end = timezone.now() json = response.json() - created_at_iso_string = json['created_at'] - updated_at_iso_string = json['updated_at'] + created_at_iso_string = json["created_at"] + updated_at_iso_string = json["updated_at"] created_at = self.iso_to_datetime(created_at_iso_string) updated_at = self.iso_to_datetime(updated_at_iso_string) @@ -294,48 +308,57 @@ def test_app_slug_lead__post(self): self.assertGreater(end, updated_at) self.assertGreater(updated_at, start) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] - expected = post_serializer({ - **data, - }) + expected = post_serializer( + { + **data, + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - form_entry = form_entry_field({ - **data, - }) + form_entry = form_entry_field( + { + **data, + } + ) self.assertEqual(self.all_form_entry_dict(), [form_entry]) db = self.all_lead_generation_app_dict() - last_call_at = db[0]['last_call_at'] + last_call_at = db[0]["last_call_at"] self.assertGreater(end, last_call_at) self.assertGreater(last_call_at, start) - db[0]['last_call_at'] = None - - self.assertEqual(db, [{ - **self.model_to_dict(model, 'lead_generation_app'), - 'hits': 1, - 'last_call_status': 'OK', - 'last_request_data': '{"language": "eo"}', - }]) + db[0]["last_call_at"] = None - form_entry['academy'] = 1 - form_entry['contact'] = None - form_entry['created_at'] = created_at_iso_string - form_entry['updated_at'] = updated_at_iso_string - form_entry['contact'] = None - form_entry['lead_generation_app'] = 1 - form_entry['user'] = None - - del form_entry['academy_id'] - del form_entry['contact_id'] - del form_entry['lead_generation_app_id'] - del form_entry['user_id'] + self.assertEqual( + db, + [ + { + **self.model_to_dict(model, "lead_generation_app"), + "hits": 1, + "last_call_status": "OK", + "last_request_data": '{"language": "eo"}', + } + ], + ) + + form_entry["academy"] = 1 + form_entry["contact"] = None + form_entry["created_at"] = created_at_iso_string + form_entry["updated_at"] = updated_at_iso_string + form_entry["contact"] = None + form_entry["lead_generation_app"] = 1 + form_entry["user"] = None + + del form_entry["academy_id"] + del form_entry["contact_id"] + del form_entry["lead_generation_app_id"] + del form_entry["user_id"] self.assertEqual(persist_single_lead.delay.call_args_list, [call(form_entry)]) @@ -343,9 +366,9 @@ def test_app_slug_lead__post(self): 🔽🔽🔽 Post data with bad utm_url (this resolve a bug) """ - @patch('breathecode.marketing.tasks.persist_single_lead', MagicMock()) + @patch("breathecode.marketing.tasks.persist_single_lead", MagicMock()) @patch.multiple( - 'breathecode.services.google_cloud.Recaptcha', + "breathecode.services.google_cloud.Recaptcha", __init__=MagicMock(return_value=None), create_assessment=MagicMock(return_value=FakeRecaptcha()), ) @@ -354,18 +377,20 @@ def test_app_slug_lead__post__with_utm_url(self): model = self.generate_models(lead_generation_app=True) - url = (reverse_lazy('marketing:app_slug_lead', kwargs={'app_slug': model.lead_generation_app.slug}) + - f'?app_id={model.lead_generation_app.app_id}') - data = {'language': 'eo', 'utm_url': 'https:/bad_url/google.co.ve/'} + url = ( + reverse_lazy("marketing:app_slug_lead", kwargs={"app_slug": model.lead_generation_app.slug}) + + f"?app_id={model.lead_generation_app.app_id}" + ) + data = {"language": "eo", "utm_url": "https:/bad_url/google.co.ve/"} start = timezone.now() - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") end = timezone.now() json = response.json() - created_at_iso_string = json['created_at'] - updated_at_iso_string = json['updated_at'] + created_at_iso_string = json["created_at"] + updated_at_iso_string = json["updated_at"] created_at = self.iso_to_datetime(created_at_iso_string) updated_at = self.iso_to_datetime(updated_at_iso_string) @@ -375,104 +400,111 @@ def test_app_slug_lead__post__with_utm_url(self): self.assertGreater(end, updated_at) self.assertGreater(updated_at, start) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'attribution_id': None, - 'ac_contact_id': None, - 'ac_deal_id': None, - 'ac_expected_cohort': None, - 'academy': 1, - 'automations': '', - 'browser_lang': None, - 'city': None, - 'client_comments': None, - 'contact': None, - 'country': None, - 'course': None, - 'current_download': None, - 'deal_status': None, - 'email': None, - 'fb_ad_id': None, - 'fb_adgroup_id': None, - 'fb_form_id': None, - 'fb_leadgen_id': None, - 'fb_page_id': None, - 'first_name': '', - 'gclid': None, - 'id': 1, - 'last_name': '', - 'latitude': None, - 'lead_generation_app': 1, - 'lead_type': None, - 'location': None, - 'longitude': None, - 'phone': None, - 'referral_key': None, - 'sentiment': None, - 'state': None, - 'storage_status': 'PENDING', - 'storage_status_text': '', - 'street_address': None, - 'tags': '', - 'user': None, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_content': None, - 'utm_source': None, - 'utm_placement': None, - 'utm_term': None, - 'utm_plan': None, - 'sex': None, - 'custom_fields': None, - 'won_at': None, - 'zip_code': None, - 'ac_deal_course': None, - 'ac_deal_location': None, - 'ac_deal_owner_full_name': None, - 'ac_deal_owner_id': None, - 'ac_expected_cohort_date': None, - 'ac_deal_amount': None, - 'ac_deal_currency_code': None, + "attribution_id": None, + "ac_contact_id": None, + "ac_deal_id": None, + "ac_expected_cohort": None, + "academy": 1, + "automations": "", + "browser_lang": None, + "city": None, + "client_comments": None, + "contact": None, + "country": None, + "course": None, + "current_download": None, + "deal_status": None, + "email": None, + "fb_ad_id": None, + "fb_adgroup_id": None, + "fb_form_id": None, + "fb_leadgen_id": None, + "fb_page_id": None, + "first_name": "", + "gclid": None, + "id": 1, + "last_name": "", + "latitude": None, + "lead_generation_app": 1, + "lead_type": None, + "location": None, + "longitude": None, + "phone": None, + "referral_key": None, + "sentiment": None, + "state": None, + "storage_status": "PENDING", + "storage_status_text": "", + "street_address": None, + "tags": "", + "user": None, + "utm_campaign": None, + "utm_medium": None, + "utm_content": None, + "utm_source": None, + "utm_placement": None, + "utm_term": None, + "utm_plan": None, + "sex": None, + "custom_fields": None, + "won_at": None, + "zip_code": None, + "ac_deal_course": None, + "ac_deal_location": None, + "ac_deal_owner_full_name": None, + "ac_deal_owner_id": None, + "ac_expected_cohort_date": None, + "ac_deal_amount": None, + "ac_deal_currency_code": None, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - form_entry = form_entry_field({ - **data, - }) + form_entry = form_entry_field( + { + **data, + } + ) self.assertEqual(self.all_form_entry_dict(), [form_entry]) db = self.all_lead_generation_app_dict() - last_call_at = db[0]['last_call_at'] + last_call_at = db[0]["last_call_at"] self.assertGreater(end, last_call_at) self.assertGreater(last_call_at, start) - db[0]['last_call_at'] = None - - self.assertEqual(db, [{ - **self.model_to_dict(model, 'lead_generation_app'), - 'hits': 1, - 'last_call_status': 'OK', - 'last_request_data': '{"language": "eo", "utm_url": "https:/bad_url/google.co.ve/"}', - }]) - - form_entry['academy'] = 1 - form_entry['contact'] = None - form_entry['created_at'] = created_at_iso_string - form_entry['updated_at'] = updated_at_iso_string - form_entry['contact'] = None - form_entry['lead_generation_app'] = 1 - form_entry['user'] = None + db[0]["last_call_at"] = None - del form_entry['academy_id'] - del form_entry['contact_id'] - del form_entry['lead_generation_app_id'] - del form_entry['user_id'] + self.assertEqual( + db, + [ + { + **self.model_to_dict(model, "lead_generation_app"), + "hits": 1, + "last_call_status": "OK", + "last_request_data": '{"language": "eo", "utm_url": "https:/bad_url/google.co.ve/"}', + } + ], + ) + + form_entry["academy"] = 1 + form_entry["contact"] = None + form_entry["created_at"] = created_at_iso_string + form_entry["updated_at"] = updated_at_iso_string + form_entry["contact"] = None + form_entry["lead_generation_app"] = 1 + form_entry["user"] = None + + del form_entry["academy_id"] + del form_entry["contact_id"] + del form_entry["lead_generation_app_id"] + del form_entry["user_id"] self.assertEqual(persist_single_lead.delay.call_args_list, [call(form_entry)]) @@ -480,9 +512,9 @@ def test_app_slug_lead__post__with_utm_url(self): 🔽🔽🔽 Post data with automations """ - @patch('breathecode.marketing.tasks.persist_single_lead', MagicMock()) + @patch("breathecode.marketing.tasks.persist_single_lead", MagicMock()) @patch.multiple( - 'breathecode.services.google_cloud.Recaptcha', + "breathecode.services.google_cloud.Recaptcha", __init__=MagicMock(return_value=None), create_assessment=MagicMock(return_value=FakeRecaptcha()), ) @@ -491,18 +523,20 @@ def test_app_slug_lead__post__with_automations(self): model = self.generate_models(lead_generation_app=True) - url = (reverse_lazy('marketing:app_slug_lead', kwargs={'app_slug': model.lead_generation_app.slug}) + - f'?app_id={model.lead_generation_app.app_id}') - data = {'language': 'eo', 'automations': 'they-killed-kenny1,they-killed-kenny2'} + url = ( + reverse_lazy("marketing:app_slug_lead", kwargs={"app_slug": model.lead_generation_app.slug}) + + f"?app_id={model.lead_generation_app.app_id}" + ) + data = {"language": "eo", "automations": "they-killed-kenny1,they-killed-kenny2"} start = timezone.now() - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") end = timezone.now() json = response.json() - created_at_iso_string = json['created_at'] - updated_at_iso_string = json['updated_at'] + created_at_iso_string = json["created_at"] + updated_at_iso_string = json["updated_at"] created_at = self.iso_to_datetime(created_at_iso_string) updated_at = self.iso_to_datetime(updated_at_iso_string) @@ -512,49 +546,57 @@ def test_app_slug_lead__post__with_automations(self): self.assertGreater(end, updated_at) self.assertGreater(updated_at, start) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] - expected = post_serializer({ - **data, - }) + expected = post_serializer( + { + **data, + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - form_entry = form_entry_field({ - **data, - }) + form_entry = form_entry_field( + { + **data, + } + ) self.assertEqual(self.all_form_entry_dict(), [form_entry]) db = self.all_lead_generation_app_dict() - last_call_at = db[0]['last_call_at'] + last_call_at = db[0]["last_call_at"] self.assertGreater(end, last_call_at) self.assertGreater(last_call_at, start) - db[0]['last_call_at'] = None + db[0]["last_call_at"] = None self.assertEqual( - db, [{ - **self.model_to_dict(model, 'lead_generation_app'), - 'hits': 1, - 'last_call_status': 'OK', - 'last_request_data': '{"language": "eo", "automations": "they-killed-kenny1,they-killed-kenny2"}', - }]) - - form_entry['academy'] = 1 - form_entry['contact'] = None - form_entry['created_at'] = created_at_iso_string - form_entry['updated_at'] = updated_at_iso_string - form_entry['contact'] = None - form_entry['lead_generation_app'] = 1 - form_entry['user'] = None - - del form_entry['academy_id'] - del form_entry['contact_id'] - del form_entry['lead_generation_app_id'] - del form_entry['user_id'] + db, + [ + { + **self.model_to_dict(model, "lead_generation_app"), + "hits": 1, + "last_call_status": "OK", + "last_request_data": '{"language": "eo", "automations": "they-killed-kenny1,they-killed-kenny2"}', + } + ], + ) + + form_entry["academy"] = 1 + form_entry["contact"] = None + form_entry["created_at"] = created_at_iso_string + form_entry["updated_at"] = updated_at_iso_string + form_entry["contact"] = None + form_entry["lead_generation_app"] = 1 + form_entry["user"] = None + + del form_entry["academy_id"] + del form_entry["contact_id"] + del form_entry["lead_generation_app_id"] + del form_entry["user_id"] self.assertEqual(persist_single_lead.delay.call_args_list, [call(form_entry)]) @@ -562,9 +604,9 @@ def test_app_slug_lead__post__with_automations(self): 🔽🔽🔽 Post data with tags """ - @patch('breathecode.marketing.tasks.persist_single_lead', MagicMock()) + @patch("breathecode.marketing.tasks.persist_single_lead", MagicMock()) @patch.multiple( - 'breathecode.services.google_cloud.Recaptcha', + "breathecode.services.google_cloud.Recaptcha", __init__=MagicMock(return_value=None), create_assessment=MagicMock(return_value=FakeRecaptcha()), ) @@ -573,18 +615,20 @@ def test_app_slug_lead__post__with_tags(self): model = self.generate_models(lead_generation_app=True) - url = (reverse_lazy('marketing:app_slug_lead', kwargs={'app_slug': model.lead_generation_app.slug}) + - f'?app_id={model.lead_generation_app.app_id}') - data = {'language': 'eo', 'tags': 'they-killed-kenny1,they-killed-kenny2'} + url = ( + reverse_lazy("marketing:app_slug_lead", kwargs={"app_slug": model.lead_generation_app.slug}) + + f"?app_id={model.lead_generation_app.app_id}" + ) + data = {"language": "eo", "tags": "they-killed-kenny1,they-killed-kenny2"} start = timezone.now() - response = self.client.post(url, data, format='json') + response = self.client.post(url, data, format="json") end = timezone.now() json = response.json() - created_at_iso_string = json['created_at'] - updated_at_iso_string = json['updated_at'] + created_at_iso_string = json["created_at"] + updated_at_iso_string = json["updated_at"] created_at = self.iso_to_datetime(created_at_iso_string) updated_at = self.iso_to_datetime(updated_at_iso_string) @@ -594,105 +638,111 @@ def test_app_slug_lead__post__with_tags(self): self.assertGreater(end, updated_at) self.assertGreater(updated_at, start) - del json['created_at'] - del json['updated_at'] + del json["created_at"] + del json["updated_at"] expected = { - 'attribution_id': None, - 'ac_contact_id': None, - 'ac_deal_id': None, - 'ac_expected_cohort': None, - 'academy': 1, - 'automations': '', - 'browser_lang': None, - 'city': None, - 'client_comments': None, - 'contact': None, - 'country': None, - 'course': None, - 'current_download': None, - 'deal_status': None, - 'email': None, - 'fb_ad_id': None, - 'fb_adgroup_id': None, - 'fb_form_id': None, - 'fb_leadgen_id': None, - 'fb_page_id': None, - 'first_name': '', - 'gclid': None, - 'id': 1, - 'last_name': '', - 'latitude': None, - 'lead_generation_app': 1, - 'lead_type': None, - 'location': None, - 'longitude': None, - 'phone': None, - 'referral_key': None, - 'sentiment': None, - 'state': None, - 'storage_status': 'PENDING', - 'storage_status_text': '', - 'street_address': None, - 'tags': '', - 'user': None, - 'utm_campaign': None, - 'utm_medium': None, - 'utm_content': None, - 'utm_source': None, - 'utm_placement': None, - 'utm_term': None, - 'utm_plan': None, - 'sex': None, - 'custom_fields': None, - 'won_at': None, - 'zip_code': None, - 'utm_url': None, - 'ac_deal_course': None, - 'ac_deal_location': None, - 'ac_deal_owner_full_name': None, - 'ac_deal_owner_id': None, - 'ac_expected_cohort_date': None, - 'ac_deal_amount': None, - 'ac_deal_currency_code': None, + "attribution_id": None, + "ac_contact_id": None, + "ac_deal_id": None, + "ac_expected_cohort": None, + "academy": 1, + "automations": "", + "browser_lang": None, + "city": None, + "client_comments": None, + "contact": None, + "country": None, + "course": None, + "current_download": None, + "deal_status": None, + "email": None, + "fb_ad_id": None, + "fb_adgroup_id": None, + "fb_form_id": None, + "fb_leadgen_id": None, + "fb_page_id": None, + "first_name": "", + "gclid": None, + "id": 1, + "last_name": "", + "latitude": None, + "lead_generation_app": 1, + "lead_type": None, + "location": None, + "longitude": None, + "phone": None, + "referral_key": None, + "sentiment": None, + "state": None, + "storage_status": "PENDING", + "storage_status_text": "", + "street_address": None, + "tags": "", + "user": None, + "utm_campaign": None, + "utm_medium": None, + "utm_content": None, + "utm_source": None, + "utm_placement": None, + "utm_term": None, + "utm_plan": None, + "sex": None, + "custom_fields": None, + "won_at": None, + "zip_code": None, + "utm_url": None, + "ac_deal_course": None, + "ac_deal_location": None, + "ac_deal_owner_full_name": None, + "ac_deal_owner_id": None, + "ac_expected_cohort_date": None, + "ac_deal_amount": None, + "ac_deal_currency_code": None, **data, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - form_entry = form_entry_field({ - **data, - }) + form_entry = form_entry_field( + { + **data, + } + ) self.assertEqual(self.all_form_entry_dict(), [form_entry]) db = self.all_lead_generation_app_dict() - last_call_at = db[0]['last_call_at'] + last_call_at = db[0]["last_call_at"] self.assertGreater(end, last_call_at) self.assertGreater(last_call_at, start) - db[0]['last_call_at'] = None - - self.assertEqual(db, - [{ - **self.model_to_dict(model, 'lead_generation_app'), - 'hits': 1, - 'last_call_status': 'OK', - 'last_request_data': '{"language": "eo", "tags": "they-killed-kenny1,they-killed-kenny2"}', - }]) - - form_entry['academy'] = 1 - form_entry['contact'] = None - form_entry['created_at'] = created_at_iso_string - form_entry['updated_at'] = updated_at_iso_string - form_entry['contact'] = None - form_entry['lead_generation_app'] = 1 - form_entry['user'] = None - - del form_entry['academy_id'] - del form_entry['contact_id'] - del form_entry['lead_generation_app_id'] - del form_entry['user_id'] + db[0]["last_call_at"] = None + + self.assertEqual( + db, + [ + { + **self.model_to_dict(model, "lead_generation_app"), + "hits": 1, + "last_call_status": "OK", + "last_request_data": '{"language": "eo", "tags": "they-killed-kenny1,they-killed-kenny2"}', + } + ], + ) + + form_entry["academy"] = 1 + form_entry["contact"] = None + form_entry["created_at"] = created_at_iso_string + form_entry["updated_at"] = updated_at_iso_string + form_entry["contact"] = None + form_entry["lead_generation_app"] = 1 + form_entry["user"] = None + + del form_entry["academy_id"] + del form_entry["contact_id"] + del form_entry["lead_generation_app_id"] + del form_entry["user_id"] self.assertEqual(persist_single_lead.delay.call_args_list, [call(form_entry)]) diff --git a/breathecode/marketing/tests/urls/tests_course.py b/breathecode/marketing/tests/urls/tests_course.py index 6af4b47c3..b3690b820 100644 --- a/breathecode/marketing/tests/urls/tests_course.py +++ b/breathecode/marketing/tests/urls/tests_course.py @@ -1,6 +1,7 @@ """ Test /academy/lead """ + import random from random import choice, choices, randint from unittest.mock import MagicMock, patch @@ -16,33 +17,33 @@ def course_translation_serializer(course_translation): return { - 'course_modules': course_translation.course_modules, - 'landing_variables': course_translation.landing_variables, - 'description': course_translation.description, - 'short_description': course_translation.short_description, - 'lang': course_translation.lang, - 'title': course_translation.title, - 'landing_url': course_translation.landing_url, - 'video_url': course_translation.video_url, + "course_modules": course_translation.course_modules, + "landing_variables": course_translation.landing_variables, + "description": course_translation.description, + "short_description": course_translation.short_description, + "lang": course_translation.lang, + "title": course_translation.title, + "landing_url": course_translation.landing_url, + "video_url": course_translation.video_url, } def academy_serializer(academy): return { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, } def syllabus_serializer(syllabus): return { - 'id': syllabus.id, - 'logo': syllabus.logo, - 'name': syllabus.name, - 'slug': syllabus.slug, + "id": syllabus.id, + "logo": syllabus.logo, + "name": syllabus.name, + "slug": syllabus.slug, } @@ -51,17 +52,17 @@ def get_serializer(course, academy, syllabus=[], course_translation=None, cohort course_translation = course_translation_serializer(course_translation) return { - 'slug': course.slug, - 'icon_url': course.icon_url, - 'color': course.color, - 'status': course.status, - 'visibility': course.visibility, - 'technologies': course.technologies, - 'academy': academy_serializer(academy), - 'cohort': cohort.id if cohort else None, - 'syllabus': [syllabus_serializer(x) for x in syllabus], - 'plan_slug': course.plan_slug, - 'course_translation': course_translation, + "slug": course.slug, + "icon_url": course.icon_url, + "color": course.color, + "status": course.status, + "visibility": course.visibility, + "technologies": course.technologies, + "academy": academy_serializer(academy), + "cohort": cohort.id if cohort else None, + "syllabus": [syllabus_serializer(x) for x in syllabus], + "plan_slug": course.plan_slug, + "course_translation": course_translation, **data, } @@ -71,11 +72,11 @@ class LeadTestSuite(MarketingTestCase): 🔽🔽🔽 Zero Course """ - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_zero_courses(self): - url = reverse_lazy('marketing:course') + url = reverse_lazy("marketing:course") - response = self.client.get(url, format='json') + response = self.client.get(url, format="json") json = response.json() expected = [] @@ -83,21 +84,21 @@ def test_zero_courses(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('marketing.Course'), []) - self.assertEqual(self.bc.database.list_of('marketing.CourseTranslation'), []) + self.assertEqual(self.bc.database.list_of("marketing.Course"), []) + self.assertEqual(self.bc.database.list_of("marketing.CourseTranslation"), []) """ 🔽🔽🔽 Two Course """ - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_two_courses__status_active__visibility_public(self): - courses = [{'status': 'ACTIVE', 'visibility': 'PUBLIC'} for _ in range(2)] + courses = [{"status": "ACTIVE", "visibility": "PUBLIC"} for _ in range(2)] model = self.bc.database.create(course=courses) - url = reverse_lazy('marketing:course') + url = reverse_lazy("marketing:course") - response = self.client.get(url, format='json') + response = self.client.get(url, format="json") json = response.json() expected = [ @@ -108,20 +109,20 @@ def test_two_courses__status_active__visibility_public(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('marketing.Course'), self.bc.format.to_dict(model.course)) - self.assertEqual(self.bc.database.list_of('marketing.CourseTranslation'), []) + self.assertEqual(self.bc.database.list_of("marketing.Course"), self.bc.format.to_dict(model.course)) + self.assertEqual(self.bc.database.list_of("marketing.CourseTranslation"), []) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_two_courses__wrong_status__wrong_visibility(self): - courses = [{ - 'status': random.choice(['ARCHIVED', 'DELETED']), - 'visibility': random.choice(['UNLISTED', 'PRIVATE']) - } for _ in range(2)] + courses = [ + {"status": random.choice(["ARCHIVED", "DELETED"]), "visibility": random.choice(["UNLISTED", "PRIVATE"])} + for _ in range(2) + ] model = self.bc.database.create(course=courses) - url = reverse_lazy('marketing:course') + url = reverse_lazy("marketing:course") - response = self.client.get(url, format='json') + response = self.client.get(url, format="json") json = response.json() expected = [] @@ -129,28 +130,26 @@ def test_two_courses__wrong_status__wrong_visibility(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('marketing.Course'), self.bc.format.to_dict(model.course)) - self.assertEqual(self.bc.database.list_of('marketing.CourseTranslation'), []) + self.assertEqual(self.bc.database.list_of("marketing.Course"), self.bc.format.to_dict(model.course)) + self.assertEqual(self.bc.database.list_of("marketing.CourseTranslation"), []) """ 🔽🔽🔽 Two Course with one CourseTranslation each one """ - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_two_courses__status_active__visibility_public__with_course_translation(self): - courses = [{'status': 'ACTIVE', 'visibility': 'PUBLIC'} for _ in range(2)] - course_translations = [{ - 'lang': - 'en' + (f'-{random.choice(["US", "UK"])}' if random.choice([True, False]) else ''), - 'course_id': - n - } for n in range(1, 3)] + courses = [{"status": "ACTIVE", "visibility": "PUBLIC"} for _ in range(2)] + course_translations = [ + {"lang": "en" + (f'-{random.choice(["US", "UK"])}' if random.choice([True, False]) else ""), "course_id": n} + for n in range(1, 3) + ] model = self.bc.database.create(course=courses, course_translation=course_translations) - url = reverse_lazy('marketing:course') + url = reverse_lazy("marketing:course") - response = self.client.get(url, format='json') + response = self.client.get(url, format="json") json = response.json() expected = [ @@ -161,6 +160,7 @@ def test_two_courses__status_active__visibility_public__with_course_translation( self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('marketing.Course'), self.bc.format.to_dict(model.course)) - self.assertEqual(self.bc.database.list_of('marketing.CourseTranslation'), - self.bc.format.to_dict(model.course_translation)) + self.assertEqual(self.bc.database.list_of("marketing.Course"), self.bc.format.to_dict(model.course)) + self.assertEqual( + self.bc.database.list_of("marketing.CourseTranslation"), self.bc.format.to_dict(model.course_translation) + ) diff --git a/breathecode/marketing/tests/urls/tests_course_slug.py b/breathecode/marketing/tests/urls/tests_course_slug.py index 9facd3a63..d458422ba 100644 --- a/breathecode/marketing/tests/urls/tests_course_slug.py +++ b/breathecode/marketing/tests/urls/tests_course_slug.py @@ -1,6 +1,7 @@ """ Test /academy/lead """ + import random from random import choice, choices, randint from unittest.mock import MagicMock, patch @@ -16,33 +17,33 @@ def course_translation_serializer(course_translation): return { - 'course_modules': course_translation.course_modules, - 'landing_variables': course_translation.landing_variables, - 'description': course_translation.description, - 'short_description': course_translation.short_description, - 'lang': course_translation.lang, - 'title': course_translation.title, - 'landing_url': course_translation.landing_url, - 'video_url': course_translation.video_url, + "course_modules": course_translation.course_modules, + "landing_variables": course_translation.landing_variables, + "description": course_translation.description, + "short_description": course_translation.short_description, + "lang": course_translation.lang, + "title": course_translation.title, + "landing_url": course_translation.landing_url, + "video_url": course_translation.video_url, } def academy_serializer(academy): return { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, } def syllabus_serializer(syllabus): return { - 'id': syllabus.id, - 'logo': syllabus.logo, - 'name': syllabus.name, - 'slug': syllabus.slug, + "id": syllabus.id, + "logo": syllabus.logo, + "name": syllabus.name, + "slug": syllabus.slug, } @@ -52,17 +53,17 @@ def get_serializer(course, academy, syllabus=[], course_translation=None, data={ course_translation = course_translation_serializer(course_translation) return { - 'slug': course.slug, - 'icon_url': course.icon_url, - 'technologies': course.technologies, - 'academy': academy_serializer(academy), - 'syllabus': [syllabus_serializer(x) for x in syllabus], - 'course_translation': course_translation, - 'status': course.status, - 'visibility': course.visibility, - 'cohort': course.cohort, - 'color': course.color, - 'plan_slug': course.plan_slug, + "slug": course.slug, + "icon_url": course.icon_url, + "technologies": course.technologies, + "academy": academy_serializer(academy), + "syllabus": [syllabus_serializer(x) for x in syllabus], + "course_translation": course_translation, + "status": course.status, + "visibility": course.visibility, + "cohort": course.cohort, + "color": course.color, + "plan_slug": course.plan_slug, **data, } @@ -72,33 +73,33 @@ class LeadTestSuite(MarketingTestCase): 🔽🔽🔽 Zero Course """ - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_zero_courses(self): - url = reverse_lazy('marketing:course_slug', kwargs={'course_slug': 'gangster'}) + url = reverse_lazy("marketing:course_slug", kwargs={"course_slug": "gangster"}) - response = self.client.get(url, format='json') + response = self.client.get(url, format="json") json = response.json() - expected = {'detail': 'course-not-found', 'status_code': 404} + expected = {"detail": "course-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('marketing.Course'), []) - self.assertEqual(self.bc.database.list_of('marketing.CourseTranslation'), []) + self.assertEqual(self.bc.database.list_of("marketing.Course"), []) + self.assertEqual(self.bc.database.list_of("marketing.CourseTranslation"), []) """ 🔽🔽🔽 One Course """ - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_one_course__status_active__good_visibility(self): - course = {'status': 'ACTIVE', 'visibility': random.choice(['PUBLIC', 'UNLISTED'])} + course = {"status": "ACTIVE", "visibility": random.choice(["PUBLIC", "UNLISTED"])} model = self.bc.database.create(course=course) - url = reverse_lazy('marketing:course_slug', kwargs={'course_slug': model.course.slug}) + url = reverse_lazy("marketing:course_slug", kwargs={"course_slug": model.course.slug}) - response = self.client.get(url, format='json') + response = self.client.get(url, format="json") json = response.json() expected = get_serializer(model.course, model.academy, [model.syllabus]) @@ -106,46 +107,46 @@ def test_one_course__status_active__good_visibility(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('marketing.Course'), [self.bc.format.to_dict(model.course)]) - self.assertEqual(self.bc.database.list_of('marketing.CourseTranslation'), []) + self.assertEqual(self.bc.database.list_of("marketing.Course"), [self.bc.format.to_dict(model.course)]) + self.assertEqual(self.bc.database.list_of("marketing.CourseTranslation"), []) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_one_course__wrong_status__wrong_visibility(self): course = { - 'status': random.choice(['ARCHIVED', 'DELETED']), - 'visibility': 'PRIVATE', + "status": random.choice(["ARCHIVED", "DELETED"]), + "visibility": "PRIVATE", } model = self.bc.database.create(course=course) - url = reverse_lazy('marketing:course_slug', kwargs={'course_slug': model.course.slug}) + url = reverse_lazy("marketing:course_slug", kwargs={"course_slug": model.course.slug}) - response = self.client.get(url, format='json') + response = self.client.get(url, format="json") json = response.json() - expected = {'detail': 'course-not-found', 'status_code': 404} + expected = {"detail": "course-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('marketing.Course'), [self.bc.format.to_dict(model.course)]) - self.assertEqual(self.bc.database.list_of('marketing.CourseTranslation'), []) + self.assertEqual(self.bc.database.list_of("marketing.Course"), [self.bc.format.to_dict(model.course)]) + self.assertEqual(self.bc.database.list_of("marketing.CourseTranslation"), []) """ 🔽🔽🔽 One Course with one CourseTranslation in english """ - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_one_course__status_active__good_visibility__with_course_translation(self): - course = {'status': 'ACTIVE', 'visibility': random.choice(['PUBLIC', 'UNLISTED'])} - course_translation = {'lang': 'en'} + course = {"status": "ACTIVE", "visibility": random.choice(["PUBLIC", "UNLISTED"])} + course_translation = {"lang": "en"} if random.choice([True, False]): - course_translation['lang'] += f'-{random.choice(["US", "UK"])}' + course_translation["lang"] += f'-{random.choice(["US", "UK"])}' model = self.bc.database.create(course=course, course_translation=course_translation) - url = reverse_lazy('marketing:course_slug', kwargs={'course_slug': model.course.slug}) + url = reverse_lazy("marketing:course_slug", kwargs={"course_slug": model.course.slug}) - response = self.client.get(url, format='json') + response = self.client.get(url, format="json") json = response.json() expected = get_serializer(model.course, model.academy, [model.syllabus], model.course_translation) @@ -153,7 +154,10 @@ def test_one_course__status_active__good_visibility__with_course_translation(sel self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('marketing.Course'), [self.bc.format.to_dict(model.course)]) - self.assertEqual(self.bc.database.list_of('marketing.CourseTranslation'), [ - self.bc.format.to_dict(model.course_translation), - ]) + self.assertEqual(self.bc.database.list_of("marketing.Course"), [self.bc.format.to_dict(model.course)]) + self.assertEqual( + self.bc.database.list_of("marketing.CourseTranslation"), + [ + self.bc.format.to_dict(model.course_translation), + ], + ) diff --git a/breathecode/marketing/tests/urls/tests_downloadable.py b/breathecode/marketing/tests/urls/tests_downloadable.py index 0d3bdd9a3..ca49339c6 100644 --- a/breathecode/marketing/tests/urls/tests_downloadable.py +++ b/breathecode/marketing/tests/urls/tests_downloadable.py @@ -1,6 +1,7 @@ """ Test /downloadable """ + from django.urls.base import reverse_lazy from rest_framework import status from breathecode.marketing.models import Downloadable @@ -12,7 +13,7 @@ class DownloadableTestSuite(MarketingTestCase): def test_downloadable_without_model(self): """Test /downloadable to check if it returns an empty list""" - url = reverse_lazy('marketing:downloadable') + url = reverse_lazy("marketing:downloadable") response = self.client.get(url) json = response.json() expected = [] @@ -23,46 +24,51 @@ def test_downloadable_without_model(self): def test_downloadable_with_data(self): """Test /downloadable to check if it returns data after creating model""" - url = reverse_lazy('marketing:downloadable') + url = reverse_lazy("marketing:downloadable") model = self.generate_models(downloadable=True) response = self.client.get(url) json = response.json() - expected = [{ - 'destination_url': model['downloadable'].destination_url, - 'name': model['downloadable'].name, - 'preview_url': model['downloadable'].preview_url, - 'slug': model['downloadable'].slug, - }] + expected = [ + { + "destination_url": model["downloadable"].destination_url, + "name": model["downloadable"].name, + "preview_url": model["downloadable"].preview_url, + "slug": model["downloadable"].slug, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_downloadable_with_multiple_data(self): """Test /downloadable to check if it returns data after creating model""" - url = reverse_lazy('marketing:downloadable') + url = reverse_lazy("marketing:downloadable") model = self.generate_models(downloadable=2) response = self.client.get(url) json = response.json() - expected = [{ - 'destination_url': model['downloadable'][0].destination_url, - 'name': model['downloadable'][0].name, - 'preview_url': model['downloadable'][0].preview_url, - 'slug': model['downloadable'][0].slug, - }, { - 'destination_url': model['downloadable'][1].destination_url, - 'name': model['downloadable'][1].name, - 'preview_url': model['downloadable'][1].preview_url, - 'slug': model['downloadable'][1].slug, - }] + expected = [ + { + "destination_url": model["downloadable"][0].destination_url, + "name": model["downloadable"][0].name, + "preview_url": model["downloadable"][0].preview_url, + "slug": model["downloadable"][0].slug, + }, + { + "destination_url": model["downloadable"][1].destination_url, + "name": model["downloadable"][1].name, + "preview_url": model["downloadable"][1].preview_url, + "slug": model["downloadable"][1].slug, + }, + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_downloadable_with_incorrect_academy(self): """Test /downloadable to check if it returns data from one downloadable depending on academy""" - url = reverse_lazy('marketing:downloadable') + url = reverse_lazy("marketing:downloadable") model = self.generate_models(downloadable=2) - response = self.client.get(url + f'?academy=test') + response = self.client.get(url + f"?academy=test") json = response.json() expected = [] @@ -71,68 +77,77 @@ def test_downloadable_with_incorrect_academy(self): def test_downloadable_with_one_academy(self): """Test /downloadable to check if it returns data from one downloadable depending on academy""" - url = reverse_lazy('marketing:downloadable') + url = reverse_lazy("marketing:downloadable") model = self.generate_models(downloadable=2) response = self.client.get(url + f'?academy={model["downloadable"][0].academy.slug}') json = response.json() - expected = [{ - 'destination_url': model['downloadable'][0].destination_url, - 'name': model['downloadable'][0].name, - 'preview_url': model['downloadable'][0].preview_url, - 'slug': model['downloadable'][0].slug, - }] + expected = [ + { + "destination_url": model["downloadable"][0].destination_url, + "name": model["downloadable"][0].name, + "preview_url": model["downloadable"][0].preview_url, + "slug": model["downloadable"][0].slug, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_downloadable_with_multiple_academy(self): """Test /downloadable to check if it returns data from one downloadable depending on academy""" - url = reverse_lazy('marketing:downloadable') + url = reverse_lazy("marketing:downloadable") model = self.generate_models(downloadable=2) response = self.client.get( - url + f'?academy={model["downloadable"][0].academy.slug},{model["downloadable"][1].academy.slug}') + url + f'?academy={model["downloadable"][0].academy.slug},{model["downloadable"][1].academy.slug}' + ) json = response.json() - expected = [{ - 'destination_url': model['downloadable'][0].destination_url, - 'name': model['downloadable'][0].name, - 'preview_url': model['downloadable'][0].preview_url, - 'slug': model['downloadable'][0].slug, - }, { - 'destination_url': model['downloadable'][1].destination_url, - 'name': model['downloadable'][1].name, - 'preview_url': model['downloadable'][1].preview_url, - 'slug': model['downloadable'][1].slug, - }] + expected = [ + { + "destination_url": model["downloadable"][0].destination_url, + "name": model["downloadable"][0].name, + "preview_url": model["downloadable"][0].preview_url, + "slug": model["downloadable"][0].slug, + }, + { + "destination_url": model["downloadable"][1].destination_url, + "name": model["downloadable"][1].name, + "preview_url": model["downloadable"][1].preview_url, + "slug": model["downloadable"][1].slug, + }, + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_downloadable_with_active_true(self): """Test /downloadable to check if it returns data if academy is active""" - url = reverse_lazy('marketing:downloadable') + url = reverse_lazy("marketing:downloadable") model = self.generate_models(downloadable=2) - response = self.client.get(url + f'?active=true') + response = self.client.get(url + f"?active=true") json = response.json() - expected = [{ - 'destination_url': model['downloadable'][0].destination_url, - 'name': model['downloadable'][0].name, - 'preview_url': model['downloadable'][0].preview_url, - 'slug': model['downloadable'][0].slug, - }, { - 'destination_url': model['downloadable'][1].destination_url, - 'name': model['downloadable'][1].name, - 'preview_url': model['downloadable'][1].preview_url, - 'slug': model['downloadable'][1].slug, - }] + expected = [ + { + "destination_url": model["downloadable"][0].destination_url, + "name": model["downloadable"][0].name, + "preview_url": model["downloadable"][0].preview_url, + "slug": model["downloadable"][0].slug, + }, + { + "destination_url": model["downloadable"][1].destination_url, + "name": model["downloadable"][1].name, + "preview_url": model["downloadable"][1].preview_url, + "slug": model["downloadable"][1].slug, + }, + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_downloadable_with_active_false(self): """Test /downloadable to check if it returns data if academy is active""" - url = reverse_lazy('marketing:downloadable') + url = reverse_lazy("marketing:downloadable") model = self.generate_models(downloadable=2) - response = self.client.get(url + f'?active=false') + response = self.client.get(url + f"?active=false") json = response.json() expected = [] diff --git a/breathecode/marketing/tests/urls/tests_downloadable_slug.py b/breathecode/marketing/tests/urls/tests_downloadable_slug.py index 0a757d788..a98e9bb59 100644 --- a/breathecode/marketing/tests/urls/tests_downloadable_slug.py +++ b/breathecode/marketing/tests/urls/tests_downloadable_slug.py @@ -1,6 +1,7 @@ """ Test /downloadable """ + from django.urls.base import reverse_lazy from rest_framework import status from breathecode.marketing.models import Downloadable @@ -12,10 +13,10 @@ class DownloadableTestSuite(MarketingTestCase): def test_downloadable_slug_without_data(self): """Test /downloadable to check if it returns an empty list""" - url = reverse_lazy('marketing:single_downloadable', kwargs={'slug': 'test'}) + url = reverse_lazy("marketing:single_downloadable", kwargs={"slug": "test"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -24,14 +25,14 @@ def test_downloadable_slug_without_data(self): def test_downloadable_slug_with_data(self): """Test /downloadable to check if it returns an empty list""" model = self.generate_models(downloadable=True) - url = reverse_lazy('marketing:single_downloadable', kwargs={'slug': f'{model["downloadable"].slug}'}) + url = reverse_lazy("marketing:single_downloadable", kwargs={"slug": f'{model["downloadable"].slug}'}) response = self.client.get(url) json = response.json() expected = { - 'slug': f'{model["downloadable"].slug}', - 'name': f'{model["downloadable"].name}', - 'destination_url': f'{model["downloadable"].destination_url}', - 'preview_url': f'{model["downloadable"].preview_url}' + "slug": f'{model["downloadable"].slug}', + "name": f'{model["downloadable"].name}', + "destination_url": f'{model["downloadable"].destination_url}', + "preview_url": f'{model["downloadable"].preview_url}', } self.assertEqual(json, expected) @@ -41,9 +42,9 @@ def test_downloadable_slug_with_data(self): def test_downloadable_slug_with_data_with_redirect(self): """Test /downloadable to check if it returns an empty list""" model = self.generate_models(downloadable=True) - url = reverse_lazy('marketing:single_downloadable', kwargs={'slug': f'{model["downloadable"].slug}'}) - response = self.client.get(url + '?raw=true') - expected = model['downloadable'].destination_url + url = reverse_lazy("marketing:single_downloadable", kwargs={"slug": f'{model["downloadable"].slug}'}) + response = self.client.get(url + "?raw=true") + expected = model["downloadable"].destination_url self.assertEqual(response.url, expected) self.assertEqual(response.status_code, status.HTTP_302_FOUND) diff --git a/breathecode/marketing/tests/urls/tests_googleads_data.py b/breathecode/marketing/tests/urls/tests_googleads_data.py index ecca00749..690f43ddb 100644 --- a/breathecode/marketing/tests/urls/tests_googleads_data.py +++ b/breathecode/marketing/tests/urls/tests_googleads_data.py @@ -1,6 +1,7 @@ """ Test /academy/cohort """ + import urllib, pytz from django.urls.base import reverse_lazy from rest_framework import status @@ -12,268 +13,292 @@ class AcademyCohortTestSuite(MarketingTestCase): def test_googleads_data__without_entries(self): """Test /academy/cohort without auth""" - url = reverse_lazy('marketing:googleads_csv') + url = reverse_lazy("marketing:googleads_csv") response = self.client.get(url) - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n' - ]) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n", + ] + ) - self.assertEqual(response.content.decode('utf-8'), expected) + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entry_bad_gclid(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': '532', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "532", "deal_status": "WON"} model = self.generate_models(academy=True, form_entry=True, form_entry_kwargs=form_entry_kwargs) - url = reverse_lazy('marketing:googleads_csv') + url = reverse_lazy("marketing:googleads_csv") response = self.client.get(url) - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n' - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entry_empty_gclid(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'deal_status': 'WON'} + form_entry_kwargs = {"deal_status": "WON"} model = self.generate_models(academy=True, form_entry=True, form_entry_kwargs=form_entry_kwargs) - url = reverse_lazy('marketing:googleads_csv') + url = reverse_lazy("marketing:googleads_csv") response = self.client.get(url) - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n' - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entry_empty_values(self): """Test /academy/cohort without auth""" model = self.generate_models(academy=True, form_entry=True) - url = reverse_lazy('marketing:googleads_csv') + url = reverse_lazy("marketing:googleads_csv") response = self.client.get(url) - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n' - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entry_bad_deal_status(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': 'D_BwE', 'deal_status': 'LOST'} + form_entry_kwargs = {"gclid": "D_BwE", "deal_status": "LOST"} model = self.generate_models(academy=True, form_entry=True, form_entry_kwargs=form_entry_kwargs) - url = reverse_lazy('marketing:googleads_csv') + url = reverse_lazy("marketing:googleads_csv") response = self.client.get(url) - print(model['form_entry'].gclid) - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n' - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + print(model["form_entry"].gclid) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency\r\n", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entry(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': 'D_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "D_BwE", "deal_status": "WON"} model = self.generate_models(academy=True, form_entry=True, form_entry_kwargs=form_entry_kwargs) - url = reverse_lazy('marketing:googleads_csv') + url = reverse_lazy("marketing:googleads_csv") response = self.client.get(url) - gclid = model['form_entry'].gclid - timezone = pytz.timezone('US/Eastern') - convertion_time = model['form_entry'].created_at.astimezone(timezone) - conversion_time = convertion_time.strftime('%Y-%m-%d %H:%M:%S') - - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency', - f'{gclid},,{conversion_time},,\r\n' - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + gclid = model["form_entry"].gclid + timezone = pytz.timezone("US/Eastern") + convertion_time = model["form_entry"].created_at.astimezone(timezone) + conversion_time = convertion_time.strftime("%Y-%m-%d %H:%M:%S") + + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency", + f"{gclid},,{conversion_time},,\r\n", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entries(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': 'D_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "D_BwE", "deal_status": "WON"} model = self.generate_models(form_entry=True, form_entry_kwargs=form_entry_kwargs) - form_entry_kwargs = {'gclid': 'A_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "A_BwE", "deal_status": "WON"} model2 = self.generate_models(form_entry=True, form_entry_kwargs=form_entry_kwargs) - url = reverse_lazy('marketing:googleads_csv') + url = reverse_lazy("marketing:googleads_csv") response = self.client.get(url) - timezone = pytz.timezone('US/Eastern') - convertion_time = model['form_entry'].created_at.astimezone(timezone) - conversion_time = convertion_time.strftime('%Y-%m-%d %H:%M:%S') + timezone = pytz.timezone("US/Eastern") + convertion_time = model["form_entry"].created_at.astimezone(timezone) + conversion_time = convertion_time.strftime("%Y-%m-%d %H:%M:%S") - convertion_time2 = model2['form_entry'].created_at.astimezone(timezone) - conversion_time2 = convertion_time2.strftime('%Y-%m-%d %H:%M:%S') + convertion_time2 = model2["form_entry"].created_at.astimezone(timezone) + conversion_time2 = convertion_time2.strftime("%Y-%m-%d %H:%M:%S") - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency', - f"{model['form_entry'].gclid},,{conversion_time},,", - f"{model2['form_entry'].gclid},,{conversion_time2},,\r\n" - ]) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency", + f"{model['form_entry'].gclid},,{conversion_time},,", + f"{model2['form_entry'].gclid},,{conversion_time2},,\r\n", + ] + ) - self.assertEqual(response.content.decode('utf-8'), expected) + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entries_bad_values(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': 'D_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "D_BwE", "deal_status": "WON"} model = self.generate_models(form_entry=True, form_entry_kwargs=form_entry_kwargs) - form_entry_kwargs = {'gclid': '123', 'deal_status': 'LOST'} + form_entry_kwargs = {"gclid": "123", "deal_status": "LOST"} model2 = self.generate_models(form_entry=True, form_entry_kwargs=form_entry_kwargs) - form_entry_kwargs = {'gclid': 'A_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "A_BwE", "deal_status": "WON"} model3 = self.generate_models(form_entry=True, form_entry_kwargs=form_entry_kwargs) - url = reverse_lazy('marketing:googleads_csv') + url = reverse_lazy("marketing:googleads_csv") response = self.client.get(url) - timezone = pytz.timezone('US/Eastern') - convertion_time = model['form_entry'].created_at.astimezone(timezone) - conversion_time = convertion_time.strftime('%Y-%m-%d %H:%M:%S') - - convertion_time2 = model3['form_entry'].created_at.astimezone(timezone) - conversion_time2 = convertion_time2.strftime('%Y-%m-%d %H:%M:%S') - - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency', - f"{model['form_entry'].gclid},,{conversion_time},,", - f"{model3['form_entry'].gclid},,{conversion_time2},,\r\n" - ]) - - self.assertEqual(response.content.decode('utf-8'), expected) + timezone = pytz.timezone("US/Eastern") + convertion_time = model["form_entry"].created_at.astimezone(timezone) + conversion_time = convertion_time.strftime("%Y-%m-%d %H:%M:%S") + + convertion_time2 = model3["form_entry"].created_at.astimezone(timezone) + conversion_time2 = convertion_time2.strftime("%Y-%m-%d %H:%M:%S") + + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency", + f"{model['form_entry'].gclid},,{conversion_time},,", + f"{model3['form_entry'].gclid},,{conversion_time2},,\r\n", + ] + ) + + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entries_with_academy_slug(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': 'D_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "D_BwE", "deal_status": "WON"} model = self.generate_models(form_entry=True, academy=True, form_entry_kwargs=form_entry_kwargs) - form_entry_kwargs = {'gclid': 'A_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "A_BwE", "deal_status": "WON"} model3 = self.generate_models(form_entry=True, form_entry_kwargs=form_entry_kwargs) - url = reverse_lazy('marketing:googleads_csv') - args = {'academy_slug': ','.join(list(dict.fromkeys([model.academy.slug])))} - url = url + '?' + urllib.parse.urlencode(args) + url = reverse_lazy("marketing:googleads_csv") + args = {"academy_slug": ",".join(list(dict.fromkeys([model.academy.slug])))} + url = url + "?" + urllib.parse.urlencode(args) response = self.client.get(url) - timezone = pytz.timezone('US/Eastern') - convertion_time = model['form_entry'].created_at.astimezone(timezone) - conversion_time = convertion_time.strftime('%Y-%m-%d %H:%M:%S') + timezone = pytz.timezone("US/Eastern") + convertion_time = model["form_entry"].created_at.astimezone(timezone) + conversion_time = convertion_time.strftime("%Y-%m-%d %H:%M:%S") - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency', - f"{model['form_entry'].gclid},,{conversion_time},,\r\n", - ]) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency", + f"{model['form_entry'].gclid},,{conversion_time},,\r\n", + ] + ) - self.assertEqual(response.content.decode('utf-8'), expected) + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entries_with_academy_id(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': 'D_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "D_BwE", "deal_status": "WON"} model = self.generate_models(form_entry=True, academy=True, form_entry_kwargs=form_entry_kwargs) - form_entry_kwargs = {'gclid': 'A_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "A_BwE", "deal_status": "WON"} model3 = self.generate_models(form_entry=True, form_entry_kwargs=form_entry_kwargs) - url = reverse_lazy('marketing:googleads_csv') - args = {'academy': '1'} - url = url + '?' + urllib.parse.urlencode(args) + url = reverse_lazy("marketing:googleads_csv") + args = {"academy": "1"} + url = url + "?" + urllib.parse.urlencode(args) response = self.client.get(url) - timezone = pytz.timezone('US/Eastern') - convertion_time = model['form_entry'].created_at.astimezone(timezone) - conversion_time = convertion_time.strftime('%Y-%m-%d %H:%M:%S') + timezone = pytz.timezone("US/Eastern") + convertion_time = model["form_entry"].created_at.astimezone(timezone) + conversion_time = convertion_time.strftime("%Y-%m-%d %H:%M:%S") - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency', - f"{model['form_entry'].gclid},,{conversion_time},,\r\n", - ]) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency", + f"{model['form_entry'].gclid},,{conversion_time},,\r\n", + ] + ) - self.assertEqual(response.content.decode('utf-8'), expected) + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entries_with_two_academy_slug(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': 'D_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "D_BwE", "deal_status": "WON"} model = self.generate_models(form_entry=True, academy=True, form_entry_kwargs=form_entry_kwargs) - form_entry_kwargs = {'gclid': 'A_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "A_BwE", "deal_status": "WON"} model2 = self.generate_models(form_entry=True, academy=True, form_entry_kwargs=form_entry_kwargs) models = [model, model2] - url = reverse_lazy('marketing:googleads_csv') - args = {'academy_slug': ','.join(list(dict.fromkeys([x.academy.slug for x in models])))} - url = url + '?' + urllib.parse.urlencode(args) + url = reverse_lazy("marketing:googleads_csv") + args = {"academy_slug": ",".join(list(dict.fromkeys([x.academy.slug for x in models])))} + url = url + "?" + urllib.parse.urlencode(args) response = self.client.get(url) - timezone = pytz.timezone('US/Eastern') - convertion_time = model['form_entry'].created_at.astimezone(timezone) - conversion_time = convertion_time.strftime('%Y-%m-%d %H:%M:%S') + timezone = pytz.timezone("US/Eastern") + convertion_time = model["form_entry"].created_at.astimezone(timezone) + conversion_time = convertion_time.strftime("%Y-%m-%d %H:%M:%S") - convertion_time2 = model2['form_entry'].created_at.astimezone(timezone) - conversion_time2 = convertion_time2.strftime('%Y-%m-%d %H:%M:%S') + convertion_time2 = model2["form_entry"].created_at.astimezone(timezone) + conversion_time2 = convertion_time2.strftime("%Y-%m-%d %H:%M:%S") - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency', - f"{model['form_entry'].gclid},,{conversion_time},,", - f"{model2['form_entry'].gclid},,{conversion_time2},,\r\n" - ]) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency", + f"{model['form_entry'].gclid},,{conversion_time},,", + f"{model2['form_entry'].gclid},,{conversion_time2},,\r\n", + ] + ) - self.assertEqual(response.content.decode('utf-8'), expected) + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_googleads_data__with_entries_with_two_academy_id(self): """Test /academy/cohort without auth""" - form_entry_kwargs = {'gclid': 'D_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "D_BwE", "deal_status": "WON"} model = self.generate_models(form_entry=True, academy=True, form_entry_kwargs=form_entry_kwargs) - form_entry_kwargs = {'gclid': 'A_BwE', 'deal_status': 'WON'} + form_entry_kwargs = {"gclid": "A_BwE", "deal_status": "WON"} model2 = self.generate_models(form_entry=True, academy=True, form_entry_kwargs=form_entry_kwargs) models = [model, model2] - url = reverse_lazy('marketing:googleads_csv') - args = {'academy': '1,2'} - url = url + '?' + urllib.parse.urlencode(args) + url = reverse_lazy("marketing:googleads_csv") + args = {"academy": "1,2"} + url = url + "?" + urllib.parse.urlencode(args) response = self.client.get(url) - timezone = pytz.timezone('US/Eastern') - convertion_time = model['form_entry'].created_at.astimezone(timezone) - conversion_time = convertion_time.strftime('%Y-%m-%d %H:%M:%S') + timezone = pytz.timezone("US/Eastern") + convertion_time = model["form_entry"].created_at.astimezone(timezone) + conversion_time = convertion_time.strftime("%Y-%m-%d %H:%M:%S") - convertion_time2 = model2['form_entry'].created_at.astimezone(timezone) - conversion_time2 = convertion_time2.strftime('%Y-%m-%d %H:%M:%S') + convertion_time2 = model2["form_entry"].created_at.astimezone(timezone) + conversion_time2 = convertion_time2.strftime("%Y-%m-%d %H:%M:%S") - expected = '\r\n'.join([ - 'Parameters:TimeZone=US/Eastern', - 'Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency', - f"{model['form_entry'].gclid},,{conversion_time},,", - f"{model2['form_entry'].gclid},,{conversion_time2},,\r\n" - ]) + expected = "\r\n".join( + [ + "Parameters:TimeZone=US/Eastern", + "Google Click ID,Conversion Name,Conversion Time,Conversion Value,Conversion Currency", + f"{model['form_entry'].gclid},,{conversion_time},,", + f"{model2['form_entry'].gclid},,{conversion_time2},,\r\n", + ] + ) - self.assertEqual(response.content.decode('utf-8'), expected) + self.assertEqual(response.content.decode("utf-8"), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/marketing/tests/urls/tests_lead.py b/breathecode/marketing/tests/urls/tests_lead.py index fa2dba031..07d2721cf 100644 --- a/breathecode/marketing/tests/urls/tests_lead.py +++ b/breathecode/marketing/tests/urls/tests_lead.py @@ -1,6 +1,7 @@ """ Test /academy/lead """ + from datetime import datetime from decimal import Decimal import re @@ -19,131 +20,131 @@ def random_string(): - return ''.join(choices(string.ascii_letters, k=10)) + return "".join(choices(string.ascii_letters, k=10)) def post_serializer(data={}): return { - 'id': 1, - 'fb_leadgen_id': None, - 'fb_page_id': None, - 'fb_form_id': None, - 'fb_adgroup_id': None, - 'fb_ad_id': None, - 'first_name': '', - 'last_name': '', - 'email': None, - 'phone': None, - 'course': None, - 'client_comments': None, - 'current_download': None, - 'location': None, - 'language': 'en', - 'utm_url': None, - 'utm_medium': None, - 'utm_campaign': None, - 'utm_content': None, - 'utm_source': None, - 'utm_placement': None, - 'utm_term': None, - 'utm_plan': None, - 'sex': None, - 'custom_fields': None, - 'referral_key': None, - 'gclid': None, - 'tags': '', - 'automations': '', - 'street_address': None, - 'country': None, - 'city': None, - 'latitude': None, - 'longitude': None, - 'state': None, - 'zip_code': None, - 'browser_lang': None, - 'storage_status': 'PENDING', - 'storage_status_text': '', - 'lead_type': None, - 'deal_status': None, - 'sentiment': None, - 'ac_contact_id': None, - 'ac_deal_id': None, - 'ac_expected_cohort': None, - 'ac_deal_owner_id': None, - 'ac_deal_location': None, - 'ac_deal_course': None, - 'ac_deal_owner_full_name': None, - 'ac_expected_cohort_date': None, - 'ac_deal_amount': None, - 'ac_deal_currency_code': None, - 'won_at': None, - 'contact': None, - 'academy': None, - 'user': None, - 'lead_generation_app': None, + "id": 1, + "fb_leadgen_id": None, + "fb_page_id": None, + "fb_form_id": None, + "fb_adgroup_id": None, + "fb_ad_id": None, + "first_name": "", + "last_name": "", + "email": None, + "phone": None, + "course": None, + "client_comments": None, + "current_download": None, + "location": None, + "language": "en", + "utm_url": None, + "utm_medium": None, + "utm_campaign": None, + "utm_content": None, + "utm_source": None, + "utm_placement": None, + "utm_term": None, + "utm_plan": None, + "sex": None, + "custom_fields": None, + "referral_key": None, + "gclid": None, + "tags": "", + "automations": "", + "street_address": None, + "country": None, + "city": None, + "latitude": None, + "longitude": None, + "state": None, + "zip_code": None, + "browser_lang": None, + "storage_status": "PENDING", + "storage_status_text": "", + "lead_type": None, + "deal_status": None, + "sentiment": None, + "ac_contact_id": None, + "ac_deal_id": None, + "ac_expected_cohort": None, + "ac_deal_owner_id": None, + "ac_deal_location": None, + "ac_deal_course": None, + "ac_deal_owner_full_name": None, + "ac_expected_cohort_date": None, + "ac_deal_amount": None, + "ac_deal_currency_code": None, + "won_at": None, + "contact": None, + "academy": None, + "user": None, + "lead_generation_app": None, **data, } def form_entry_field(data={}): return { - 'id': 1, - 'fb_leadgen_id': None, - 'fb_page_id': None, - 'fb_form_id': None, - 'fb_adgroup_id': None, - 'fb_ad_id': None, - 'first_name': '', - 'last_name': '', - 'email': None, - 'phone': None, - 'course': None, - 'client_comments': None, - 'current_download': None, - 'location': None, - 'language': 'en', - 'utm_url': None, - 'utm_medium': None, - 'utm_campaign': None, - 'utm_content': None, - 'utm_source': None, - 'utm_placement': None, - 'utm_term': None, - 'utm_plan': None, - 'sex': None, - 'custom_fields': None, - 'referral_key': None, - 'gclid': None, - 'tags': '', - 'automations': '', - 'street_address': None, - 'country': None, - 'city': None, - 'latitude': None, - 'longitude': None, - 'state': None, - 'zip_code': None, - 'browser_lang': None, - 'storage_status': 'PENDING', - 'storage_status_text': '', - 'lead_type': None, - 'deal_status': None, - 'sentiment': None, - 'ac_contact_id': None, - 'ac_deal_id': None, - 'ac_expected_cohort': None, - 'won_at': None, - 'contact_id': None, - 'academy_id': None, - 'user_id': None, - 'lead_generation_app_id': None, - 'ac_deal_course': None, - 'ac_deal_location': None, - 'ac_deal_owner_full_name': None, - 'ac_deal_owner_id': None, - 'ac_expected_cohort_date': None, - 'ac_deal_amount': None, - 'ac_deal_currency_code': None, + "id": 1, + "fb_leadgen_id": None, + "fb_page_id": None, + "fb_form_id": None, + "fb_adgroup_id": None, + "fb_ad_id": None, + "first_name": "", + "last_name": "", + "email": None, + "phone": None, + "course": None, + "client_comments": None, + "current_download": None, + "location": None, + "language": "en", + "utm_url": None, + "utm_medium": None, + "utm_campaign": None, + "utm_content": None, + "utm_source": None, + "utm_placement": None, + "utm_term": None, + "utm_plan": None, + "sex": None, + "custom_fields": None, + "referral_key": None, + "gclid": None, + "tags": "", + "automations": "", + "street_address": None, + "country": None, + "city": None, + "latitude": None, + "longitude": None, + "state": None, + "zip_code": None, + "browser_lang": None, + "storage_status": "PENDING", + "storage_status_text": "", + "lead_type": None, + "deal_status": None, + "sentiment": None, + "ac_contact_id": None, + "ac_deal_id": None, + "ac_expected_cohort": None, + "won_at": None, + "contact_id": None, + "academy_id": None, + "user_id": None, + "lead_generation_app_id": None, + "ac_deal_course": None, + "ac_deal_location": None, + "ac_deal_owner_full_name": None, + "ac_deal_owner_id": None, + "ac_expected_cohort_date": None, + "ac_deal_amount": None, + "ac_deal_currency_code": None, **data, } @@ -151,41 +152,41 @@ def form_entry_field(data={}): def generate_form_entry_kwargs(data={}): """That random values is too long that i prefer have it in one function""" return { - 'fb_leadgen_id': randint(0, 9999), - 'fb_page_id': randint(0, 9999), - 'fb_form_id': randint(0, 9999), - 'fb_adgroup_id': randint(0, 9999), - 'fb_ad_id': randint(0, 9999), - 'gclid': random_string(), - 'first_name': choice(['Rene', 'Albert', 'Immanuel']), - 'last_name': choice(['Descartes', 'Camus', 'Kant']), - 'email': choice(['a@a.com', 'b@b.com', 'c@c.com']), - 'phone': choice(['123', '456', '789']), - 'course': random_string(), - 'client_comments': random_string(), - 'location': random_string(), - 'language': random_string(), - 'utm_url': fake.url(), - 'utm_medium': random_string(), - 'utm_campaign': random_string(), - 'utm_source': random_string(), - 'referral_key': random_string(), - 'gclid': random_string(), - 'tags': random_string(), - 'automations': random_string(), - 'street_address': random_string(), - 'country': random_string(), - 'city': random_string(), - 'latitude': randint(0, 9999), - 'longitude': randint(0, 9999), - 'state': random_string(), - 'zip_code': str(randint(0, 9999)), - 'browser_lang': random_string(), - 'storage_status': choice(['PENDING', 'PERSISTED']), - 'lead_type': choice(['STRONG', 'SOFT', 'DISCOVERY']), - 'deal_status': choice(['WON', 'LOST']), - 'sentiment': choice(['GOOD', 'BAD']), - 'current_download': random_string(), + "fb_leadgen_id": randint(0, 9999), + "fb_page_id": randint(0, 9999), + "fb_form_id": randint(0, 9999), + "fb_adgroup_id": randint(0, 9999), + "fb_ad_id": randint(0, 9999), + "gclid": random_string(), + "first_name": choice(["Rene", "Albert", "Immanuel"]), + "last_name": choice(["Descartes", "Camus", "Kant"]), + "email": choice(["a@a.com", "b@b.com", "c@c.com"]), + "phone": choice(["123", "456", "789"]), + "course": random_string(), + "client_comments": random_string(), + "location": random_string(), + "language": random_string(), + "utm_url": fake.url(), + "utm_medium": random_string(), + "utm_campaign": random_string(), + "utm_source": random_string(), + "referral_key": random_string(), + "gclid": random_string(), + "tags": random_string(), + "automations": random_string(), + "street_address": random_string(), + "country": random_string(), + "city": random_string(), + "latitude": randint(0, 9999), + "longitude": randint(0, 9999), + "state": random_string(), + "zip_code": str(randint(0, 9999)), + "browser_lang": random_string(), + "storage_status": choice(["PENDING", "PERSISTED"]), + "lead_type": choice(["STRONG", "SOFT", "DISCOVERY"]), + "deal_status": choice(["WON", "LOST"]), + "sentiment": choice(["GOOD", "BAD"]), + "current_download": random_string(), **data, } @@ -207,7 +208,7 @@ def assertDatetime(date: datetime) -> bool: return True try: - string = re.sub(r'Z$', '', date) + string = re.sub(r"Z$", "", date) datetime.fromisoformat(string) return True except Exception: @@ -216,54 +217,59 @@ def assertDatetime(date: datetime) -> bool: @pytest.fixture(autouse=True) def setup_db(db, monkeypatch): - monkeypatch.setattr('breathecode.services.google_cloud.Recaptcha.__init__', lambda: None) - monkeypatch.setattr('breathecode.services.google_cloud.Recaptcha.create_assessment', - MagicMock(return_value=FakeRecaptcha())) - monkeypatch.setattr('uuid.UUID.int', PropertyMock(return_value=1000)) + monkeypatch.setattr("breathecode.services.google_cloud.Recaptcha.__init__", lambda: None) + monkeypatch.setattr( + "breathecode.services.google_cloud.Recaptcha.create_assessment", MagicMock(return_value=FakeRecaptcha()) + ) + monkeypatch.setattr("uuid.UUID.int", PropertyMock(return_value=1000)) yield # When: Passing nothing def test_lead__without_data(bc: Breathecode, client: APIClient): - url = reverse_lazy('marketing:lead') + url = reverse_lazy("marketing:lead") - response = client.post(url, format='json') + response = client.post(url, format="json") json = response.json() - assertDatetime(json['created_at']) - assertDatetime(json['updated_at']) - del json['created_at'] - del json['updated_at'] + assertDatetime(json["created_at"]) + assertDatetime(json["updated_at"]) + del json["created_at"] + del json["updated_at"] - expected = post_serializer(data={ - 'attribution_id': None, - }) + expected = post_serializer( + data={ + "attribution_id": None, + } + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('marketing.FormEntry') == [ - form_entry_field({ - 'id': 1, - 'academy_id': None, - 'storage_status': 'ERROR', - 'storage_status_text': 'Missing location information', - 'attribution_id': None, - }) + assert bc.database.list_of("marketing.FormEntry") == [ + form_entry_field( + { + "id": 1, + "academy_id": None, + "storage_status": "ERROR", + "storage_status_text": "Missing location information", + "attribution_id": None, + } + ) ] # When: Validations of fields def test_lead__with__bad_data(bc: Breathecode, client: APIClient): - url = reverse_lazy('marketing:lead') + url = reverse_lazy("marketing:lead") data = generate_form_entry_kwargs() - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() expected = { - 'phone': ["Phone number must be entered in the format: '+99999999'. Up to 15 digits allowed."], - 'language': ['Ensure this field has no more than 2 characters.'] + "phone": ["Phone number must be entered in the format: '+99999999'. Up to 15 digits allowed."], + "language": ["Ensure this field has no more than 2 characters."], } assert json == expected @@ -272,97 +278,107 @@ def test_lead__with__bad_data(bc: Breathecode, client: APIClient): # When: Passing required fields def test_lead__with__data(bc: Breathecode, client: APIClient): - url = reverse_lazy('marketing:lead') + url = reverse_lazy("marketing:lead") - data = generate_form_entry_kwargs({ - 'phone': '123456789', - 'language': 'en', - }) + data = generate_form_entry_kwargs( + { + "phone": "123456789", + "language": "en", + } + ) - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - assertDatetime(json['created_at']) - assertDatetime(json['updated_at']) - del json['created_at'] - del json['updated_at'] + assertDatetime(json["created_at"]) + assertDatetime(json["updated_at"]) + del json["created_at"] + del json["updated_at"] - expected = post_serializer({ - **data, - 'id': 1, - 'academy': None, - 'latitude': bc.format.to_decimal_string(data['latitude']), - 'longitude': bc.format.to_decimal_string(data['longitude']), - 'attribution_id': '75b36c508866d18732305da14fe9a0', - }) + expected = post_serializer( + { + **data, + "id": 1, + "academy": None, + "latitude": bc.format.to_decimal_string(data["latitude"]), + "longitude": bc.format.to_decimal_string(data["longitude"]), + "attribution_id": "75b36c508866d18732305da14fe9a0", + } + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('marketing.FormEntry') == [ - form_entry_field({ - **data, - 'id': 1, - 'academy_id': None, - 'latitude': Decimal(data['latitude']), - 'longitude': Decimal(data['longitude']), - 'storage_status': 'ERROR', - 'storage_status_text': f"No academy found with slug {data['location']}", - 'attribution_id': '75b36c508866d18732305da14fe9a0', - }) + assert bc.database.list_of("marketing.FormEntry") == [ + form_entry_field( + { + **data, + "id": 1, + "academy_id": None, + "latitude": Decimal(data["latitude"]), + "longitude": Decimal(data["longitude"]), + "storage_status": "ERROR", + "storage_status_text": f"No academy found with slug {data['location']}", + "attribution_id": "75b36c508866d18732305da14fe9a0", + } + ) ] # When: Passing slug of Academy or AcademyAlias -@pytest.mark.parametrize('academy,academy_alias,academy_id', [ - ({ - 'slug': 'midgard' - }, None, None), - ({ - 'slug': 'midgard' - }, 1, None), - (1, { - 'active_campaign_slug': 'midgard' - }, 1), -]) -def test_passing_slug_of_academy_or_academy_alias(bc: Breathecode, client: APIClient, academy, academy_alias, - academy_id): +@pytest.mark.parametrize( + "academy,academy_alias,academy_id", + [ + ({"slug": "midgard"}, None, None), + ({"slug": "midgard"}, 1, None), + (1, {"active_campaign_slug": "midgard"}, 1), + ], +) +def test_passing_slug_of_academy_or_academy_alias( + bc: Breathecode, client: APIClient, academy, academy_alias, academy_id +): model = bc.database.create(academy=academy, academy_alias=academy_alias, active_campaig_academy=1) - url = reverse_lazy('marketing:lead') + url = reverse_lazy("marketing:lead") - data = generate_form_entry_kwargs({ - 'phone': '123456789', - 'language': 'en', - 'location': 'midgard', - }) + data = generate_form_entry_kwargs( + { + "phone": "123456789", + "language": "en", + "location": "midgard", + } + ) - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - assertDatetime(json['created_at']) - assertDatetime(json['updated_at']) - del json['created_at'] - del json['updated_at'] + assertDatetime(json["created_at"]) + assertDatetime(json["updated_at"]) + del json["created_at"] + del json["updated_at"] - expected = post_serializer({ - **data, - 'id': model.academy.id, - 'academy': academy_id, - 'latitude': bc.format.to_decimal_string(data['latitude']), - 'longitude': bc.format.to_decimal_string(data['longitude']), - 'attribution_id': '75b36c508866d18732305da14fe9a0', - }) + expected = post_serializer( + { + **data, + "id": model.academy.id, + "academy": academy_id, + "latitude": bc.format.to_decimal_string(data["latitude"]), + "longitude": bc.format.to_decimal_string(data["longitude"]), + "attribution_id": "75b36c508866d18732305da14fe9a0", + } + ) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('marketing.FormEntry') == [ - form_entry_field({ - **data, - 'id': model.academy.id, - 'academy_id': academy_id, - 'latitude': Decimal(data['latitude']), - 'longitude': Decimal(data['longitude']), - 'storage_status': 'ERROR', - 'storage_status_text': 'No academy found with slug midgard', - 'attribution_id': '75b36c508866d18732305da14fe9a0', - }) + assert bc.database.list_of("marketing.FormEntry") == [ + form_entry_field( + { + **data, + "id": model.academy.id, + "academy_id": academy_id, + "latitude": Decimal(data["latitude"]), + "longitude": Decimal(data["longitude"]), + "storage_status": "ERROR", + "storage_status_text": "No academy found with slug midgard", + "attribution_id": "75b36c508866d18732305da14fe9a0", + } + ) ] diff --git a/breathecode/marketing/tests/urls/tests_lead_all.py b/breathecode/marketing/tests/urls/tests_lead_all.py index ad5e948b9..b048b9093 100644 --- a/breathecode/marketing/tests/urls/tests_lead_all.py +++ b/breathecode/marketing/tests/urls/tests_lead_all.py @@ -1,6 +1,7 @@ """ Test /academy/lead """ + from django.utils import timezone from datetime import timedelta import re, string @@ -19,76 +20,77 @@ def random_string(): - return ''.join(choices(string.ascii_letters, k=10)) + return "".join(choices(string.ascii_letters, k=10)) def generate_form_entry_kwargs(): """That random values is too long that i prefer have it in one function""" return { - 'fb_leadgen_id': randint(0, 9999), - 'fb_page_id': randint(0, 9999), - 'fb_form_id': randint(0, 9999), - 'fb_adgroup_id': randint(0, 9999), - 'fb_ad_id': randint(0, 9999), - 'gclid': random_string(), - 'first_name': choice(['Rene', 'Albert', 'Immanuel']), - 'last_name': choice(['Descartes', 'Camus', 'Kant']), - 'email': choice(['a@a.com', 'b@b.com', 'c@c.com']), - 'phone': choice(['123', '456', '789']), - 'course': random_string(), - 'client_comments': random_string(), - 'location': random_string(), - 'language': random_string(), - 'utm_url': random_string(), - 'utm_medium': random_string(), - 'utm_campaign': random_string(), - 'utm_source': random_string(), - 'referral_key': random_string(), - 'gclid': random_string(), - 'tags': random_string(), - 'automations': random_string(), - 'street_address': random_string(), - 'country': random_string(), - 'city': random_string(), - 'latitude': randint(0, 9999), - 'longitude': randint(0, 9999), - 'state': random_string(), - 'zip_code': str(randint(0, 9999)), - 'browser_lang': random_string(), - 'storage_status': choice(['PENDING', 'PERSISTED']), - 'lead_type': choice(['STRONG', 'SOFT', 'DISCOVERY']), - 'deal_status': choice(['WON', 'LOST']), - 'sentiment': choice(['GOOD', 'BAD']), + "fb_leadgen_id": randint(0, 9999), + "fb_page_id": randint(0, 9999), + "fb_form_id": randint(0, 9999), + "fb_adgroup_id": randint(0, 9999), + "fb_ad_id": randint(0, 9999), + "gclid": random_string(), + "first_name": choice(["Rene", "Albert", "Immanuel"]), + "last_name": choice(["Descartes", "Camus", "Kant"]), + "email": choice(["a@a.com", "b@b.com", "c@c.com"]), + "phone": choice(["123", "456", "789"]), + "course": random_string(), + "client_comments": random_string(), + "location": random_string(), + "language": random_string(), + "utm_url": random_string(), + "utm_medium": random_string(), + "utm_campaign": random_string(), + "utm_source": random_string(), + "referral_key": random_string(), + "gclid": random_string(), + "tags": random_string(), + "automations": random_string(), + "street_address": random_string(), + "country": random_string(), + "city": random_string(), + "latitude": randint(0, 9999), + "longitude": randint(0, 9999), + "state": random_string(), + "zip_code": str(randint(0, 9999)), + "browser_lang": random_string(), + "storage_status": choice(["PENDING", "PERSISTED"]), + "lead_type": choice(["STRONG", "SOFT", "DISCOVERY"]), + "deal_status": choice(["WON", "LOST"]), + "sentiment": choice(["GOOD", "BAD"]), } class CohortUserTestSuite(MarketingTestCase): """Test /academy/lead""" + """ 🔽🔽🔽 Auth """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__without_auth(self): """Test /cohort/:id/user without auth""" - url = reverse_lazy('marketing:lead_all') + url = reverse_lazy("marketing:lead_all") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual(self.all_form_entry_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__without_profile_acedemy(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:lead_all') + url = reverse_lazy("marketing:lead_all") model = self.generate_models(authenticate=True, form_entry=True) response = self.client.get(url) @@ -98,20 +100,20 @@ def test_lead_all__without_profile_acedemy(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 Without data """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__without_data(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:lead_all') - model = self.generate_models(authenticate=True, profile_academy=True, capability='read_lead', role='potato') + url = reverse_lazy("marketing:lead_all") + model = self.generate_models(authenticate=True, profile_academy=True, capability="read_lead", role="potato") response = self.client.get(url) json = response.json() @@ -125,76 +127,78 @@ def test_lead_all__without_data(self): 🔽🔽🔽 With data """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:lead_all') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - form_entry_kwargs=generate_form_entry_kwargs()) + url = reverse_lazy("marketing:lead_all") + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + form_entry_kwargs=generate_form_entry_kwargs(), + ) response = self.client.get(url) json = response.json() - self.assertDatetime(json[0]['created_at']) - del json[0]['created_at'] - - expected = [{ - 'academy': { - 'id': model.form_entry.academy.id, - 'name': model.form_entry.academy.name, - 'slug': model.form_entry.academy.slug - }, - 'country': model.form_entry.country, - 'course': model.form_entry.course, - 'client_comments': model.form_entry.client_comments, - 'email': model.form_entry.email, - 'first_name': model.form_entry.first_name, - 'gclid': model.form_entry.gclid, - 'id': model.form_entry.id, - 'language': model.form_entry.language, - 'last_name': model.form_entry.last_name, - 'lead_type': model.form_entry.lead_type, - 'location': model.form_entry.location, - 'storage_status': model.form_entry.storage_status, - 'tags': model.form_entry.tags, - 'utm_campaign': model.form_entry.utm_campaign, - 'utm_medium': model.form_entry.utm_medium, - 'utm_source': model.form_entry.utm_source, - 'utm_url': model.form_entry.utm_url, - 'utm_placement': model.form_entry.utm_placement, - 'utm_term': model.form_entry.utm_term, - 'utm_plan': model.form_entry.utm_plan, - 'sex': model.form_entry.sex, - 'custom_fields': model.form_entry.custom_fields, - }] + self.assertDatetime(json[0]["created_at"]) + del json[0]["created_at"] + + expected = [ + { + "academy": { + "id": model.form_entry.academy.id, + "name": model.form_entry.academy.name, + "slug": model.form_entry.academy.slug, + }, + "country": model.form_entry.country, + "course": model.form_entry.course, + "client_comments": model.form_entry.client_comments, + "email": model.form_entry.email, + "first_name": model.form_entry.first_name, + "gclid": model.form_entry.gclid, + "id": model.form_entry.id, + "language": model.form_entry.language, + "last_name": model.form_entry.last_name, + "lead_type": model.form_entry.lead_type, + "location": model.form_entry.location, + "storage_status": model.form_entry.storage_status, + "tags": model.form_entry.tags, + "utm_campaign": model.form_entry.utm_campaign, + "utm_medium": model.form_entry.utm_medium, + "utm_source": model.form_entry.utm_source, + "utm_url": model.form_entry.utm_url, + "utm_placement": model.form_entry.utm_placement, + "utm_term": model.form_entry.utm_term, + "utm_plan": model.form_entry.utm_plan, + "sex": model.form_entry.sex, + "custom_fields": model.form_entry.custom_fields, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 Academy in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__with_bad_academy_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:lead_all') + '?academy=freyja' - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) + url = reverse_lazy("marketing:lead_all") + "?academy=freyja" + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) response = self.client.get(url) json = response.json() @@ -202,142 +206,150 @@ def test_lead_all__with_bad_academy_in_querystring(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__with_academy_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - academy_kwargs = {'slug': 'freyja'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - academy_kwargs=academy_kwargs) - - url = reverse_lazy('marketing:lead_all') + '?academy=freyja' + academy_kwargs = {"slug": "freyja"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + academy_kwargs=academy_kwargs, + ) + + url = reverse_lazy("marketing:lead_all") + "?academy=freyja" response = self.client.get(url) json = response.json() - self.assertDatetime(json[0]['created_at']) - del json[0]['created_at'] - - expected = [{ - 'academy': { - 'id': model.form_entry.academy.id, - 'name': model.form_entry.academy.name, - 'slug': model.form_entry.academy.slug - }, - 'country': model.form_entry.country, - 'course': model.form_entry.course, - 'email': model.form_entry.email, - 'first_name': model.form_entry.first_name, - 'client_comments': model.form_entry.client_comments, - 'gclid': model.form_entry.gclid, - 'id': model.form_entry.id, - 'language': model.form_entry.language, - 'last_name': model.form_entry.last_name, - 'lead_type': model.form_entry.lead_type, - 'location': model.form_entry.location, - 'storage_status': model.form_entry.storage_status, - 'tags': model.form_entry.tags, - 'utm_campaign': model.form_entry.utm_campaign, - 'utm_medium': model.form_entry.utm_medium, - 'utm_source': model.form_entry.utm_source, - 'utm_url': model.form_entry.utm_url, - 'utm_placement': model.form_entry.utm_placement, - 'utm_term': model.form_entry.utm_term, - 'utm_plan': model.form_entry.utm_plan, - 'sex': model.form_entry.sex, - 'custom_fields': model.form_entry.custom_fields, - }] + self.assertDatetime(json[0]["created_at"]) + del json[0]["created_at"] + + expected = [ + { + "academy": { + "id": model.form_entry.academy.id, + "name": model.form_entry.academy.name, + "slug": model.form_entry.academy.slug, + }, + "country": model.form_entry.country, + "course": model.form_entry.course, + "email": model.form_entry.email, + "first_name": model.form_entry.first_name, + "client_comments": model.form_entry.client_comments, + "gclid": model.form_entry.gclid, + "id": model.form_entry.id, + "language": model.form_entry.language, + "last_name": model.form_entry.last_name, + "lead_type": model.form_entry.lead_type, + "location": model.form_entry.location, + "storage_status": model.form_entry.storage_status, + "tags": model.form_entry.tags, + "utm_campaign": model.form_entry.utm_campaign, + "utm_medium": model.form_entry.utm_medium, + "utm_source": model.form_entry.utm_source, + "utm_url": model.form_entry.utm_url, + "utm_placement": model.form_entry.utm_placement, + "utm_term": model.form_entry.utm_term, + "utm_plan": model.form_entry.utm_plan, + "sex": model.form_entry.sex, + "custom_fields": model.form_entry.custom_fields, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__with_two_academy_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) base = self.generate_models(user=True) models = [ - self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True, - models=base, - academy_kwargs={'slug': 'konan' if index == 0 else 'freyja'}) for index in range(0, 2) + self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_lead", + role="potato", + form_entry=True, + models=base, + academy_kwargs={"slug": "konan" if index == 0 else "freyja"}, + ) + for index in range(0, 2) ] models.sort(key=lambda x: x.form_entry.created_at) - url = reverse_lazy('marketing:lead_all') + '?academy=' + ','.join([x.academy.slug for x in models]) + url = reverse_lazy("marketing:lead_all") + "?academy=" + ",".join([x.academy.slug for x in models]) response = self.client.get(url) json = response.json() - self.assertDatetime(json[0]['created_at']) - del json[0]['created_at'] - - self.assertDatetime(json[1]['created_at']) - del json[1]['created_at'] - - expected = [{ - 'academy': { - 'id': model.form_entry.academy.id, - 'name': model.form_entry.academy.name, - 'slug': model.form_entry.academy.slug - }, - 'country': model.form_entry.country, - 'client_comments': model.form_entry.client_comments, - 'course': model.form_entry.course, - 'email': model.form_entry.email, - 'first_name': model.form_entry.first_name, - 'gclid': model.form_entry.gclid, - 'id': model.form_entry.id, - 'language': model.form_entry.language, - 'last_name': model.form_entry.last_name, - 'lead_type': model.form_entry.lead_type, - 'location': model.form_entry.location, - 'storage_status': model.form_entry.storage_status, - 'tags': model.form_entry.tags, - 'utm_campaign': model.form_entry.utm_campaign, - 'utm_medium': model.form_entry.utm_medium, - 'utm_source': model.form_entry.utm_source, - 'utm_url': model.form_entry.utm_url, - 'utm_placement': model.form_entry.utm_placement, - 'utm_term': model.form_entry.utm_term, - 'utm_plan': model.form_entry.utm_plan, - 'sex': model.form_entry.sex, - 'custom_fields': model.form_entry.custom_fields, - } for model in models] + self.assertDatetime(json[0]["created_at"]) + del json[0]["created_at"] + + self.assertDatetime(json[1]["created_at"]) + del json[1]["created_at"] + + expected = [ + { + "academy": { + "id": model.form_entry.academy.id, + "name": model.form_entry.academy.name, + "slug": model.form_entry.academy.slug, + }, + "country": model.form_entry.country, + "client_comments": model.form_entry.client_comments, + "course": model.form_entry.course, + "email": model.form_entry.email, + "first_name": model.form_entry.first_name, + "gclid": model.form_entry.gclid, + "id": model.form_entry.id, + "language": model.form_entry.language, + "last_name": model.form_entry.last_name, + "lead_type": model.form_entry.lead_type, + "location": model.form_entry.location, + "storage_status": model.form_entry.storage_status, + "tags": model.form_entry.tags, + "utm_campaign": model.form_entry.utm_campaign, + "utm_medium": model.form_entry.utm_medium, + "utm_source": model.form_entry.utm_source, + "utm_url": model.form_entry.utm_url, + "utm_placement": model.form_entry.utm_placement, + "utm_term": model.form_entry.utm_term, + "utm_plan": model.form_entry.utm_plan, + "sex": model.form_entry.sex, + "custom_fields": model.form_entry.custom_fields, + } + for model in models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')} for model in models]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")} for model in models]) """ 🔽🔽🔽 Start in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__with_bad_start_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:lead_all') + '?start=2100-01-01' - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) + url = reverse_lazy("marketing:lead_all") + "?start=2100-01-01" + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) response = self.client.get(url) json = response.json() @@ -345,78 +357,76 @@ def test_lead_all__with_bad_start_in_querystring(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__with_start_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - query_date = (timezone.now() - timedelta(hours=48)).strftime('%Y-%m-%d') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) - - url = reverse_lazy('marketing:lead_all') + f'?start={query_date}' + query_date = (timezone.now() - timedelta(hours=48)).strftime("%Y-%m-%d") + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) + + url = reverse_lazy("marketing:lead_all") + f"?start={query_date}" response = self.client.get(url) json = response.json() - self.assertDatetime(json[0]['created_at']) - del json[0]['created_at'] - - expected = [{ - 'academy': { - 'id': model.form_entry.academy.id, - 'name': model.form_entry.academy.name, - 'slug': model.form_entry.academy.slug - }, - 'country': model.form_entry.country, - 'course': model.form_entry.course, - 'email': model.form_entry.email, - 'first_name': model.form_entry.first_name, - 'client_comments': model.form_entry.client_comments, - 'gclid': model.form_entry.gclid, - 'id': model.form_entry.id, - 'language': model.form_entry.language, - 'last_name': model.form_entry.last_name, - 'lead_type': model.form_entry.lead_type, - 'location': model.form_entry.location, - 'storage_status': model.form_entry.storage_status, - 'tags': model.form_entry.tags, - 'utm_campaign': model.form_entry.utm_campaign, - 'utm_medium': model.form_entry.utm_medium, - 'utm_source': model.form_entry.utm_source, - 'utm_url': model.form_entry.utm_url, - 'utm_placement': model.form_entry.utm_placement, - 'utm_term': model.form_entry.utm_term, - 'utm_plan': model.form_entry.utm_plan, - 'sex': model.form_entry.sex, - 'custom_fields': model.form_entry.custom_fields, - }] + self.assertDatetime(json[0]["created_at"]) + del json[0]["created_at"] + + expected = [ + { + "academy": { + "id": model.form_entry.academy.id, + "name": model.form_entry.academy.name, + "slug": model.form_entry.academy.slug, + }, + "country": model.form_entry.country, + "course": model.form_entry.course, + "email": model.form_entry.email, + "first_name": model.form_entry.first_name, + "client_comments": model.form_entry.client_comments, + "gclid": model.form_entry.gclid, + "id": model.form_entry.id, + "language": model.form_entry.language, + "last_name": model.form_entry.last_name, + "lead_type": model.form_entry.lead_type, + "location": model.form_entry.location, + "storage_status": model.form_entry.storage_status, + "tags": model.form_entry.tags, + "utm_campaign": model.form_entry.utm_campaign, + "utm_medium": model.form_entry.utm_medium, + "utm_source": model.form_entry.utm_source, + "utm_url": model.form_entry.utm_url, + "utm_placement": model.form_entry.utm_placement, + "utm_term": model.form_entry.utm_term, + "utm_plan": model.form_entry.utm_plan, + "sex": model.form_entry.sex, + "custom_fields": model.form_entry.custom_fields, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) """ 🔽🔽🔽 End in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__with_bad_end_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - url = reverse_lazy('marketing:lead_all') + '?end=1900-01-01' - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) + url = reverse_lazy("marketing:lead_all") + "?end=1900-01-01" + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) response = self.client.get(url) json = response.json() @@ -424,58 +434,58 @@ def test_lead_all__with_bad_end_in_querystring(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_all__with_end_in_querystring(self): """Test /cohort/:id/user without auth""" self.headers(academy=1) - query_date = (timezone.now() + timedelta(hours=48)).strftime('%Y-%m-%d') - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_lead', - role='potato', - form_entry=True) - - url = reverse_lazy('marketing:lead_all') + f'?end={query_date}' + query_date = (timezone.now() + timedelta(hours=48)).strftime("%Y-%m-%d") + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_lead", role="potato", form_entry=True + ) + + url = reverse_lazy("marketing:lead_all") + f"?end={query_date}" response = self.client.get(url) json = response.json() - self.assertDatetime(json[0]['created_at']) - del json[0]['created_at'] - - expected = [{ - 'academy': { - 'id': model.form_entry.academy.id, - 'name': model.form_entry.academy.name, - 'slug': model.form_entry.academy.slug - }, - 'country': model.form_entry.country, - 'course': model.form_entry.course, - 'client_comments': model.form_entry.client_comments, - 'email': model.form_entry.email, - 'first_name': model.form_entry.first_name, - 'gclid': model.form_entry.gclid, - 'id': model.form_entry.id, - 'language': model.form_entry.language, - 'last_name': model.form_entry.last_name, - 'lead_type': model.form_entry.lead_type, - 'location': model.form_entry.location, - 'storage_status': model.form_entry.storage_status, - 'tags': model.form_entry.tags, - 'utm_campaign': model.form_entry.utm_campaign, - 'utm_medium': model.form_entry.utm_medium, - 'utm_source': model.form_entry.utm_source, - 'utm_url': model.form_entry.utm_url, - 'utm_placement': model.form_entry.utm_placement, - 'utm_term': model.form_entry.utm_term, - 'utm_plan': model.form_entry.utm_plan, - 'sex': model.form_entry.sex, - 'custom_fields': model.form_entry.custom_fields, - }] + self.assertDatetime(json[0]["created_at"]) + del json[0]["created_at"] + + expected = [ + { + "academy": { + "id": model.form_entry.academy.id, + "name": model.form_entry.academy.name, + "slug": model.form_entry.academy.slug, + }, + "country": model.form_entry.country, + "course": model.form_entry.course, + "client_comments": model.form_entry.client_comments, + "email": model.form_entry.email, + "first_name": model.form_entry.first_name, + "gclid": model.form_entry.gclid, + "id": model.form_entry.id, + "language": model.form_entry.language, + "last_name": model.form_entry.last_name, + "lead_type": model.form_entry.lead_type, + "location": model.form_entry.location, + "storage_status": model.form_entry.storage_status, + "tags": model.form_entry.tags, + "utm_campaign": model.form_entry.utm_campaign, + "utm_medium": model.form_entry.utm_medium, + "utm_source": model.form_entry.utm_source, + "utm_url": model.form_entry.utm_url, + "utm_placement": model.form_entry.utm_placement, + "utm_term": model.form_entry.utm_term, + "utm_plan": model.form_entry.utm_plan, + "sex": model.form_entry.sex, + "custom_fields": model.form_entry.custom_fields, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, 'form_entry')}]) + self.assertEqual(self.all_form_entry_dict(), [{**self.model_to_dict(model, "form_entry")}]) diff --git a/breathecode/marketing/tests/urls/tests_lead_id.py b/breathecode/marketing/tests/urls/tests_lead_id.py index d3753a1ef..1b6a9b459 100644 --- a/breathecode/marketing/tests/urls/tests_lead_id.py +++ b/breathecode/marketing/tests/urls/tests_lead_id.py @@ -1,6 +1,7 @@ """ Test /academy/lead/id """ + from django.utils import timezone from datetime import timedelta import re, string @@ -19,62 +20,63 @@ def random_string(): - return ''.join(choices(string.ascii_letters, k=10)) + return "".join(choices(string.ascii_letters, k=10)) def generate_form_entry_kwargs(): """That random values is too long that i prefer have it in one function""" return { - 'fb_leadgen_id': randint(0, 9999), - 'fb_page_id': randint(0, 9999), - 'fb_form_id': randint(0, 9999), - 'fb_adgroup_id': randint(0, 9999), - 'fb_ad_id': randint(0, 9999), - 'gclid': random_string(), - 'first_name': choice(['Rene', 'Albert', 'Immanuel']), - 'last_name': choice(['Descartes', 'Camus', 'Kant']), - 'email': choice(['a@a.com', 'b@b.com', 'c@c.com']), - 'phone': choice(['123', '456', '789']), - 'course': random_string(), - 'client_comments': random_string(), - 'location': random_string(), - 'language': random_string(), - 'utm_url': random_string(), - 'utm_medium': random_string(), - 'utm_campaign': random_string(), - 'utm_source': random_string(), - 'referral_key': random_string(), - 'gclid': random_string(), - 'tags': random_string(), - 'automations': random_string(), - 'street_address': random_string(), - 'country': random_string(), - 'city': random_string(), - 'latitude': randint(0, 9999), - 'longitude': randint(0, 9999), - 'state': random_string(), - 'zip_code': str(randint(0, 9999)), - 'browser_lang': random_string(), - 'storage_status': choice(['PENDING', 'PERSISTED']), - 'lead_type': choice(['STRONG', 'SOFT', 'DISCOVERY']), - 'deal_status': choice(['WON', 'LOST']), - 'sentiment': choice(['GOOD', 'BAD']), + "fb_leadgen_id": randint(0, 9999), + "fb_page_id": randint(0, 9999), + "fb_form_id": randint(0, 9999), + "fb_adgroup_id": randint(0, 9999), + "fb_ad_id": randint(0, 9999), + "gclid": random_string(), + "first_name": choice(["Rene", "Albert", "Immanuel"]), + "last_name": choice(["Descartes", "Camus", "Kant"]), + "email": choice(["a@a.com", "b@b.com", "c@c.com"]), + "phone": choice(["123", "456", "789"]), + "course": random_string(), + "client_comments": random_string(), + "location": random_string(), + "language": random_string(), + "utm_url": random_string(), + "utm_medium": random_string(), + "utm_campaign": random_string(), + "utm_source": random_string(), + "referral_key": random_string(), + "gclid": random_string(), + "tags": random_string(), + "automations": random_string(), + "street_address": random_string(), + "country": random_string(), + "city": random_string(), + "latitude": randint(0, 9999), + "longitude": randint(0, 9999), + "state": random_string(), + "zip_code": str(randint(0, 9999)), + "browser_lang": random_string(), + "storage_status": choice(["PENDING", "PERSISTED"]), + "lead_type": choice(["STRONG", "SOFT", "DISCOVERY"]), + "deal_status": choice(["WON", "LOST"]), + "sentiment": choice(["GOOD", "BAD"]), } class CohortLeadIdSuite(MarketingTestCase): """Test /academy/lead""" + """ 🔽🔽🔽 No auth """ def test_lead_id_no_auth(self): self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead_id', kwargs={'lead_id': 1}) + url = reverse_lazy("marketing:academy_lead_id", kwargs={"lead_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, 401) @@ -85,12 +87,12 @@ def test_lead_id_no_auth(self): def test_lead_id_without_capability(self): self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead_id', kwargs={'lead_id': 1}) + url = reverse_lazy("marketing:academy_lead_id", kwargs={"lead_id": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: read_lead for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: read_lead for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, 403) @@ -99,24 +101,24 @@ def test_lead_id_without_capability(self): 🔽🔽🔽 Single lead with data wrong id """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_lead_wrong_id(self): """Test /lead/:id/ with data wrong id""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead_id', kwargs={'lead_id': 1}) + url = reverse_lazy("marketing:academy_lead_id", kwargs={"lead_id": 1}) model = self.generate_models( authenticate=True, profile_academy=True, - capability='read_lead', - role='potato', + capability="read_lead", + role="potato", ) response = self.client.get(url) json = response.json() - expected = {'detail': 'lead-not-found', 'status_code': 404} + expected = {"detail": "lead-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, 404) @@ -125,90 +127,90 @@ def test_lead_wrong_id(self): 🔽🔽🔽 Single lead with data """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_single_lead(self): """Test /lead/:id/ with data""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead_id', kwargs={'lead_id': 1}) + url = reverse_lazy("marketing:academy_lead_id", kwargs={"lead_id": 1}) model = self.generate_models( authenticate=True, profile_academy=True, - capability='read_lead', - role='potato', + capability="read_lead", + role="potato", form_entry=2, ) response = self.client.get(url) json = response.json() - self.assertDatetime(json['created_at']) - del json['created_at'] + self.assertDatetime(json["created_at"]) + del json["created_at"] expected = { - 'ac_expected_cohort': model.form_entry[0].ac_expected_cohort, - 'automation_objects': [], - 'tag_objects': [], - 'automations': model.form_entry[0].automations, - 'browser_lang': model.form_entry[0].browser_lang, - 'city': model.form_entry[0].city, - 'country': model.form_entry[0].country, - 'course': model.form_entry[0].course, - 'client_comments': model.form_entry[0].client_comments, - 'email': model.form_entry[0].email, - 'first_name': model.form_entry[0].first_name, - 'gclid': model.form_entry[0].gclid, - 'id': model.form_entry[0].id, - 'language': model.form_entry[0].language, - 'last_name': model.form_entry[0].last_name, - 'lead_type': model.form_entry[0].lead_type, - 'location': model.form_entry[0].location, - 'storage_status': model.form_entry[0].storage_status, - 'tags': model.form_entry[0].tags, - 'utm_campaign': model.form_entry[0].utm_campaign, - 'utm_medium': model.form_entry[0].utm_medium, - 'utm_content': model.form_entry[0].utm_content, - 'utm_source': model.form_entry[0].utm_source, - 'utm_url': model.form_entry[0].utm_url, - 'utm_placement': model.form_entry[0].utm_placement, - 'utm_term': model.form_entry[0].utm_term, - 'utm_plan': model.form_entry[0].utm_plan, - 'custom_fields': model.form_entry[0].custom_fields, - 'sex': model.form_entry[0].sex, - 'latitude': model.form_entry[0].latitude, - 'longitude': model.form_entry[0].longitude, - 'phone': model.form_entry[0].phone, - 'user': model.form_entry[0].user, - 'referral_key': model.form_entry[0].referral_key, - 'state': model.form_entry[0].state, - 'storage_status_text': model.form_entry[0].storage_status_text, - 'street_address': model.form_entry[0].street_address, - 'won_at': model.form_entry[0].won_at, - 'updated_at': self.bc.datetime.to_iso_string(model.form_entry[0].updated_at), - 'lead_generation_app': model.form_entry[0].lead_generation_app, - 'fb_page_id': model.form_entry[0].fb_page_id, - 'fb_leadgen_id': model.form_entry[0].fb_leadgen_id, - 'fb_form_id': model.form_entry[0].fb_form_id, - 'fb_adgroup_id': model.form_entry[0].fb_adgroup_id, - 'fb_ad_id': model.form_entry[0].fb_ad_id, - 'deal_status': model.form_entry[0].deal_status, - 'current_download': model.form_entry[0].current_download, - 'contact': model.form_entry[0].contact, - 'ac_deal_id': model.form_entry[0].ac_deal_id, - 'ac_contact_id': model.form_entry[0].ac_contact_id, - 'sentiment': model.form_entry[0].sentiment, - 'ac_expected_cohort_date': model.form_entry[0].ac_expected_cohort_date, - 'ac_deal_location': model.form_entry[0].ac_deal_location, - 'ac_deal_course': model.form_entry[0].ac_deal_course, - 'ac_deal_owner_id': model.form_entry[0].ac_deal_owner_id, - 'ac_deal_owner_full_name': model.form_entry[0].ac_deal_owner_full_name, - 'academy': { - 'id': model.form_entry[0].academy.id, - 'name': model.form_entry[0].academy.name, - 'slug': model.form_entry[0].academy.slug, + "ac_expected_cohort": model.form_entry[0].ac_expected_cohort, + "automation_objects": [], + "tag_objects": [], + "automations": model.form_entry[0].automations, + "browser_lang": model.form_entry[0].browser_lang, + "city": model.form_entry[0].city, + "country": model.form_entry[0].country, + "course": model.form_entry[0].course, + "client_comments": model.form_entry[0].client_comments, + "email": model.form_entry[0].email, + "first_name": model.form_entry[0].first_name, + "gclid": model.form_entry[0].gclid, + "id": model.form_entry[0].id, + "language": model.form_entry[0].language, + "last_name": model.form_entry[0].last_name, + "lead_type": model.form_entry[0].lead_type, + "location": model.form_entry[0].location, + "storage_status": model.form_entry[0].storage_status, + "tags": model.form_entry[0].tags, + "utm_campaign": model.form_entry[0].utm_campaign, + "utm_medium": model.form_entry[0].utm_medium, + "utm_content": model.form_entry[0].utm_content, + "utm_source": model.form_entry[0].utm_source, + "utm_url": model.form_entry[0].utm_url, + "utm_placement": model.form_entry[0].utm_placement, + "utm_term": model.form_entry[0].utm_term, + "utm_plan": model.form_entry[0].utm_plan, + "custom_fields": model.form_entry[0].custom_fields, + "sex": model.form_entry[0].sex, + "latitude": model.form_entry[0].latitude, + "longitude": model.form_entry[0].longitude, + "phone": model.form_entry[0].phone, + "user": model.form_entry[0].user, + "referral_key": model.form_entry[0].referral_key, + "state": model.form_entry[0].state, + "storage_status_text": model.form_entry[0].storage_status_text, + "street_address": model.form_entry[0].street_address, + "won_at": model.form_entry[0].won_at, + "updated_at": self.bc.datetime.to_iso_string(model.form_entry[0].updated_at), + "lead_generation_app": model.form_entry[0].lead_generation_app, + "fb_page_id": model.form_entry[0].fb_page_id, + "fb_leadgen_id": model.form_entry[0].fb_leadgen_id, + "fb_form_id": model.form_entry[0].fb_form_id, + "fb_adgroup_id": model.form_entry[0].fb_adgroup_id, + "fb_ad_id": model.form_entry[0].fb_ad_id, + "deal_status": model.form_entry[0].deal_status, + "current_download": model.form_entry[0].current_download, + "contact": model.form_entry[0].contact, + "ac_deal_id": model.form_entry[0].ac_deal_id, + "ac_contact_id": model.form_entry[0].ac_contact_id, + "sentiment": model.form_entry[0].sentiment, + "ac_expected_cohort_date": model.form_entry[0].ac_expected_cohort_date, + "ac_deal_location": model.form_entry[0].ac_deal_location, + "ac_deal_course": model.form_entry[0].ac_deal_course, + "ac_deal_owner_id": model.form_entry[0].ac_deal_owner_id, + "ac_deal_owner_full_name": model.form_entry[0].ac_deal_owner_full_name, + "academy": { + "id": model.form_entry[0].academy.id, + "name": model.form_entry[0].academy.name, + "slug": model.form_entry[0].academy.slug, }, - 'zip_code': model.form_entry[0].zip_code + "zip_code": model.form_entry[0].zip_code, } self.assertEqual(json, expected) @@ -218,25 +220,25 @@ def test_single_lead(self): 🔽🔽🔽 Update lead with wrong id """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_update_lead_wrong_id(self): """Test /lead/:id/ with data wrong id""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead_id', kwargs={'lead_id': 1}) + url = reverse_lazy("marketing:academy_lead_id", kwargs={"lead_id": 1}) model = self.generate_models( authenticate=True, profile_academy=True, - capability='crud_lead', - role='potato', + capability="crud_lead", + role="potato", ) - data = {'first_name': 'Juan'} - response = self.client.put(url, data, format='json') + data = {"first_name": "Juan"} + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'lead-not-found', 'status_code': 400} + expected = {"detail": "lead-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) @@ -245,103 +247,103 @@ def test_update_lead_wrong_id(self): 🔽🔽🔽 Update lead """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_update_lead(self): """Test /lead/:id/ with data wrong id""" self.headers(academy=1) - url = reverse_lazy('marketing:academy_lead_id', kwargs={'lead_id': 1}) + url = reverse_lazy("marketing:academy_lead_id", kwargs={"lead_id": 1}) model = self.generate_models( authenticate=True, profile_academy=True, - capability='crud_lead', - role='potato', + capability="crud_lead", + role="potato", form_entry=True, ) data = { - 'id': 1, - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), - 'utm_url': self.bc.fake.url(), - 'utm_medium': self.bc.fake.slug(), - 'utm_campaign': self.bc.fake.slug(), - 'utm_source': self.bc.fake.slug(), - 'gclid': random_string(), + "id": 1, + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), + "utm_url": self.bc.fake.url(), + "utm_medium": self.bc.fake.slug(), + "utm_campaign": self.bc.fake.slug(), + "utm_source": self.bc.fake.slug(), + "gclid": random_string(), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - self.assertDatetime(json['created_at']) - del json['created_at'] - self.assertDatetime(json['updated_at']) - del json['updated_at'] - del json['custom_fields'] + self.assertDatetime(json["created_at"]) + del json["created_at"] + self.assertDatetime(json["updated_at"]) + del json["updated_at"] + del json["custom_fields"] expected = { - 'ac_contact_id': model.form_entry.ac_contact_id, - 'ac_deal_id': model.form_entry.ac_deal_id, - 'ac_expected_cohort': model.form_entry.ac_expected_cohort, - 'academy': { - 'id': model.form_entry.academy.id, - 'name': model.form_entry.academy.name, - 'slug': model.form_entry.academy.slug + "ac_contact_id": model.form_entry.ac_contact_id, + "ac_deal_id": model.form_entry.ac_deal_id, + "ac_expected_cohort": model.form_entry.ac_expected_cohort, + "academy": { + "id": model.form_entry.academy.id, + "name": model.form_entry.academy.name, + "slug": model.form_entry.academy.slug, }, - 'automation_objects': [], - 'tag_objects': [], - 'automations': model.form_entry.automations, - 'fb_ad_id': model.form_entry.fb_ad_id, - 'fb_adgroup_id': model.form_entry.fb_adgroup_id, - 'fb_form_id': model.form_entry.fb_form_id, - 'fb_leadgen_id': model.form_entry.fb_leadgen_id, - 'fb_page_id': model.form_entry.fb_page_id, - 'current_download': model.form_entry.current_download, - 'contact': model.form_entry.contact, - 'deal_status': model.form_entry.deal_status, - 'browser_lang': model.form_entry.browser_lang, - 'city': model.form_entry.city, - 'country': model.form_entry.country, - 'course': model.form_entry.course, - 'client_comments': model.form_entry.client_comments, - 'email': model.form_entry.email, - 'first_name': model.form_entry.first_name, - 'gclid': model.form_entry.gclid, - 'id': model.form_entry.id, - 'language': model.form_entry.language, - 'last_name': model.form_entry.last_name, - 'lead_type': model.form_entry.lead_type, - 'location': model.form_entry.location, - 'storage_status': model.form_entry.storage_status, - 'tags': model.form_entry.tags, - 'utm_campaign': model.form_entry.utm_campaign, - 'utm_medium': model.form_entry.utm_medium, - 'utm_content': model.form_entry.utm_content, - 'utm_source': model.form_entry.utm_source, - 'utm_placement': model.form_entry.utm_placement, - 'utm_term': model.form_entry.utm_term, - 'utm_plan': model.form_entry.utm_plan, - 'sex': model.form_entry.sex, - 'utm_url': model.form_entry.utm_url, - 'latitude': model.form_entry.latitude, - 'longitude': model.form_entry.longitude, - 'phone': model.form_entry.phone, - 'user': model.form_entry.user, - 'referral_key': model.form_entry.referral_key, - 'state': model.form_entry.state, - 'storage_status_text': model.form_entry.storage_status_text, - 'street_address': model.form_entry.street_address, - 'won_at': model.form_entry.won_at, - 'zip_code': model.form_entry.zip_code, - 'sentiment': model.form_entry.sentiment, - 'lead_generation_app': model.form_entry.ac_deal_location, - 'ac_deal_location': model.form_entry.ac_deal_owner_full_name, - 'ac_deal_course': model.form_entry.ac_deal_course, - 'ac_deal_owner_full_name': model.form_entry.ac_deal_owner_full_name, - 'ac_deal_owner_id': model.form_entry.ac_deal_owner_id, - 'ac_expected_cohort_date': model.form_entry.ac_expected_cohort_date, - **data + "automation_objects": [], + "tag_objects": [], + "automations": model.form_entry.automations, + "fb_ad_id": model.form_entry.fb_ad_id, + "fb_adgroup_id": model.form_entry.fb_adgroup_id, + "fb_form_id": model.form_entry.fb_form_id, + "fb_leadgen_id": model.form_entry.fb_leadgen_id, + "fb_page_id": model.form_entry.fb_page_id, + "current_download": model.form_entry.current_download, + "contact": model.form_entry.contact, + "deal_status": model.form_entry.deal_status, + "browser_lang": model.form_entry.browser_lang, + "city": model.form_entry.city, + "country": model.form_entry.country, + "course": model.form_entry.course, + "client_comments": model.form_entry.client_comments, + "email": model.form_entry.email, + "first_name": model.form_entry.first_name, + "gclid": model.form_entry.gclid, + "id": model.form_entry.id, + "language": model.form_entry.language, + "last_name": model.form_entry.last_name, + "lead_type": model.form_entry.lead_type, + "location": model.form_entry.location, + "storage_status": model.form_entry.storage_status, + "tags": model.form_entry.tags, + "utm_campaign": model.form_entry.utm_campaign, + "utm_medium": model.form_entry.utm_medium, + "utm_content": model.form_entry.utm_content, + "utm_source": model.form_entry.utm_source, + "utm_placement": model.form_entry.utm_placement, + "utm_term": model.form_entry.utm_term, + "utm_plan": model.form_entry.utm_plan, + "sex": model.form_entry.sex, + "utm_url": model.form_entry.utm_url, + "latitude": model.form_entry.latitude, + "longitude": model.form_entry.longitude, + "phone": model.form_entry.phone, + "user": model.form_entry.user, + "referral_key": model.form_entry.referral_key, + "state": model.form_entry.state, + "storage_status_text": model.form_entry.storage_status_text, + "street_address": model.form_entry.street_address, + "won_at": model.form_entry.won_at, + "zip_code": model.form_entry.zip_code, + "sentiment": model.form_entry.sentiment, + "lead_generation_app": model.form_entry.ac_deal_location, + "ac_deal_location": model.form_entry.ac_deal_owner_full_name, + "ac_deal_course": model.form_entry.ac_deal_course, + "ac_deal_owner_full_name": model.form_entry.ac_deal_owner_full_name, + "ac_deal_owner_id": model.form_entry.ac_deal_owner_id, + "ac_expected_cohort_date": model.form_entry.ac_expected_cohort_date, + **data, } self.assertEqual(json, expected) diff --git a/breathecode/marketing/tests/urls/tests_upload.py b/breathecode/marketing/tests/urls/tests_upload.py index 35f3cf5b9..e06810586 100644 --- a/breathecode/marketing/tests/urls/tests_upload.py +++ b/breathecode/marketing/tests/urls/tests_upload.py @@ -1,6 +1,7 @@ """ Test /v1/marketing/upload """ + import csv import tempfile import os @@ -21,7 +22,7 @@ class MarketingTestSuite(MarketingTestCase): def setUp(self): super().setUp() - self.file_name = '' + self.file_name = "" def tearDown(self): if self.file_name: @@ -32,11 +33,11 @@ def test_upload_without_auth(self): self.headers(content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('marketing:upload') + url = reverse_lazy("marketing:upload") data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -46,11 +47,11 @@ def test_upload_wrong_academy(self): self.headers(academy=1, content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('marketing:upload') + url = reverse_lazy("marketing:upload") data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -60,71 +61,79 @@ def test_upload_without_capability(self): self.headers(academy=1, content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('marketing:upload') + url = reverse_lazy("marketing:upload") self.generate_models(authenticate=True) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': "You (user: 1) don't have this capability: crud_media for academy 1", 'status_code': 403} + expected = {"detail": "You (user: 1) don't have this capability: crud_media for academy 1", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_with_csv_file(self): from breathecode.services.google_cloud import Storage, File self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('marketing:upload') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("marketing:upload") response = self.client.put(url, {}) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('monitoring.CSVUpload'), []) + self.assertEqual(self.bc.database.list_of("monitoring.CSVUpload"), []) self.assertEqual(Storage.__init__.call_args_list, []) self.assertEqual(File.__init__.call_args_list, []) self.assertEqual(File.upload.call_args_list, []) self.assertEqual(File.url.call_args_list, []) - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_upload_random(self): from breathecode.services.google_cloud import Storage, File from breathecode.marketing.tasks import create_form_entry self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") - url = reverse_lazy('marketing:upload') + url = reverse_lazy("marketing:upload") - file = tempfile.NamedTemporaryFile(suffix='.csv', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False, mode="w+") # list of name, degree, score first_names = [self.bc.fake.first_name() for _ in range(0, 3)] @@ -136,12 +145,12 @@ def test_upload_random(self): # dictionary of lists obj = { - 'first_name': first_names, - 'last_name': last_names, - 'email': emails, - 'location': locations, - 'phone': phone_numbers, - 'language': languages, + "first_name": first_names, + "last_name": last_names, + "email": emails, + "location": locations, + "phone": phone_numbers, + "language": languages, } df = pd.DataFrame(obj) @@ -152,65 +161,81 @@ def test_upload_random(self): df.to_csv(file.name) - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) json = response.json() - file_name = file.name.split('/')[-1] - expected = [{'file_name': file_name, 'message': 'Despues', 'status': 'PENDING'}] + file_name = file.name.split("/")[-1] + expected = [{"file_name": file_name, "message": "Despues", "status": "PENDING"}] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(create_form_entry.delay.call_args_list, [ - call( - 1, **{ - 'first_name': df.iloc[0]['first_name'], - 'last_name': df.iloc[0]['last_name'], - 'email': df.iloc[0]['email'], - 'location': df.iloc[0]['location'], - 'phone': df.iloc[0]['phone'], - 'language': df.iloc[0]['language'], - }), - call( - 1, **{ - 'first_name': df.iloc[1]['first_name'], - 'last_name': df.iloc[1]['last_name'], - 'email': df.iloc[1]['email'], - 'location': df.iloc[1]['location'], - 'phone': df.iloc[1]['phone'], - 'language': df.iloc[1]['language'], - }), - call( - 1, **{ - 'first_name': df.iloc[2]['first_name'], - 'last_name': df.iloc[2]['last_name'], - 'email': df.iloc[2]['email'], - 'location': df.iloc[2]['location'], - 'phone': df.iloc[2]['phone'], - 'language': df.iloc[2]['language'], - }) - ]) - - self.assertEqual(self.bc.database.list_of('monitoring.CSVUpload'), - [{ - 'academy_id': 1, - 'hash': hash, - 'finished_at': UTC_NOW, - 'id': 1, - 'name': file_name, - 'status': 'PENDING', - 'log': '', - 'status_message': None, - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }]) + self.assertEqual( + create_form_entry.delay.call_args_list, + [ + call( + 1, + **{ + "first_name": df.iloc[0]["first_name"], + "last_name": df.iloc[0]["last_name"], + "email": df.iloc[0]["email"], + "location": df.iloc[0]["location"], + "phone": df.iloc[0]["phone"], + "language": df.iloc[0]["language"], + } + ), + call( + 1, + **{ + "first_name": df.iloc[1]["first_name"], + "last_name": df.iloc[1]["last_name"], + "email": df.iloc[1]["email"], + "location": df.iloc[1]["location"], + "phone": df.iloc[1]["phone"], + "language": df.iloc[1]["language"], + } + ), + call( + 1, + **{ + "first_name": df.iloc[2]["first_name"], + "last_name": df.iloc[2]["last_name"], + "email": df.iloc[2]["email"], + "location": df.iloc[2]["location"], + "phone": df.iloc[2]["phone"], + "language": df.iloc[2]["language"], + } + ), + ], + ) + + self.assertEqual( + self.bc.database.list_of("monitoring.CSVUpload"), + [ + { + "academy_id": 1, + "hash": hash, + "finished_at": UTC_NOW, + "id": 1, + "name": file_name, + "status": "PENDING", + "log": "", + "status_message": None, + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -218,6 +243,6 @@ def test_upload_random(self): self.assertEqual(len(args), 1) self.assertEqual(args[0].name, os.path.basename(file.name)) - self.assertEqual(kwargs, {'content_type': 'text/csv'}) + self.assertEqual(kwargs, {"content_type": "text/csv"}) self.assertEqual(File.url.call_args_list, [call()]) diff --git a/breathecode/marketing/urls.py b/breathecode/marketing/urls.py index 3d5f6b102..186975e28 100644 --- a/breathecode/marketing/urls.py +++ b/breathecode/marketing/urls.py @@ -1,51 +1,74 @@ from django.urls import path -from .views import (CourseView, create_lead, create_lead_captcha, sync_tags_with_active_campaign, - sync_automations_with_active_campaign, receive_facebook_lead, get_leads, get_leads_report, - AcademyLeadView, AcademyWonLeadView, AcademyTagView, AcademyAutomationView, activecampaign_webhook, - googleads_enrollments, googleads_csv, get_downloadable, ShortLinkView, create_lead_from_app, - UTMView, AcademyProcessView, AcademyAppView, AcademyAliasView, ActiveCampaignView, UploadView, - validate_email_from_app, get_alias) +from .views import ( + CourseView, + create_lead, + create_lead_captcha, + sync_tags_with_active_campaign, + sync_automations_with_active_campaign, + receive_facebook_lead, + get_leads, + get_leads_report, + AcademyLeadView, + AcademyWonLeadView, + AcademyTagView, + AcademyAutomationView, + activecampaign_webhook, + googleads_enrollments, + googleads_csv, + get_downloadable, + ShortLinkView, + create_lead_from_app, + UTMView, + AcademyProcessView, + AcademyAppView, + AcademyAliasView, + ActiveCampaignView, + UploadView, + validate_email_from_app, + get_alias, +) -app_name = 'marketing' +app_name = "marketing" urlpatterns = [ - path('lead', create_lead, name='lead'), - path('lead-captcha', create_lead_captcha, name='lead_captcha'), - path('app', AcademyAppView.as_view(), name='app'), - path('app/<slug:app_slug>/lead', create_lead_from_app, name='app_slug_lead'), - path('app/validateemail', validate_email_from_app, name='app_email_validate'), - path('app/lead', create_lead_from_app, name='app_lead'), - path('lead/all', get_leads, name='lead_all'), - path('alias', get_alias, name='alias'), - path('academy/lead', AcademyLeadView.as_view(), name='academy_lead'), - path('academy/upload', UploadView.as_view(), name='upload'), - path('academy/lead/<int:lead_id>', AcademyLeadView.as_view(), name='academy_lead_id'), - path('academy/lead/process', AcademyProcessView.as_view(), name='academy_process_lead'), - path('academy/lead/won', AcademyWonLeadView.as_view(), name='academy_won_lead'), - path('academy/app', AcademyAppView.as_view(), name='app'), - path('academy/alias', AcademyAliasView.as_view(), name='academy_alias'), - path('academy/<int:academy_id>/tag/sync', sync_tags_with_active_campaign, name='academy_id_tag_sync'), - path('academy/<int:academt_id>/automation/sync', - sync_automations_with_active_campaign, - name='academy_id_automation_sync'), - path('academy/tag', AcademyTagView.as_view(), name='academy_tag'), - path('academy/tag/<str:tag_slug>', AcademyTagView.as_view(), name='academy_tag_slug'), - path('academy/automation', AcademyAutomationView.as_view(), name='academy_automation'), - path('academy/automation/<int:automation_id>', AcademyAutomationView.as_view(), name='academy_automation_id'), - path('academy/short', ShortLinkView.as_view(), name='short'), - path('academy/short/<slug:short_slug>', ShortLinkView.as_view(), name='short-slug'), - path('academy/utm', UTMView.as_view(), name='academy_utm'), - path('facebook/lead', receive_facebook_lead, name='facebook_all'), - path('report/lead', get_leads_report, name='report_lead'), - path('downloadable', get_downloadable, name='downloadable'), - path('downloadable/<str:slug>', get_downloadable, name='single_downloadable'), - + path("lead", create_lead, name="lead"), + path("lead-captcha", create_lead_captcha, name="lead_captcha"), + path("app", AcademyAppView.as_view(), name="app"), + path("app/<slug:app_slug>/lead", create_lead_from_app, name="app_slug_lead"), + path("app/validateemail", validate_email_from_app, name="app_email_validate"), + path("app/lead", create_lead_from_app, name="app_lead"), + path("lead/all", get_leads, name="lead_all"), + path("alias", get_alias, name="alias"), + path("academy/lead", AcademyLeadView.as_view(), name="academy_lead"), + path("academy/upload", UploadView.as_view(), name="upload"), + path("academy/lead/<int:lead_id>", AcademyLeadView.as_view(), name="academy_lead_id"), + path("academy/lead/process", AcademyProcessView.as_view(), name="academy_process_lead"), + path("academy/lead/won", AcademyWonLeadView.as_view(), name="academy_won_lead"), + path("academy/app", AcademyAppView.as_view(), name="app"), + path("academy/alias", AcademyAliasView.as_view(), name="academy_alias"), + path("academy/<int:academy_id>/tag/sync", sync_tags_with_active_campaign, name="academy_id_tag_sync"), + path( + "academy/<int:academt_id>/automation/sync", + sync_automations_with_active_campaign, + name="academy_id_automation_sync", + ), + path("academy/tag", AcademyTagView.as_view(), name="academy_tag"), + path("academy/tag/<str:tag_slug>", AcademyTagView.as_view(), name="academy_tag_slug"), + path("academy/automation", AcademyAutomationView.as_view(), name="academy_automation"), + path("academy/automation/<int:automation_id>", AcademyAutomationView.as_view(), name="academy_automation_id"), + path("academy/short", ShortLinkView.as_view(), name="short"), + path("academy/short/<slug:short_slug>", ShortLinkView.as_view(), name="short-slug"), + path("academy/utm", UTMView.as_view(), name="academy_utm"), + path("facebook/lead", receive_facebook_lead, name="facebook_all"), + path("report/lead", get_leads_report, name="report_lead"), + path("downloadable", get_downloadable, name="downloadable"), + path("downloadable/<str:slug>", get_downloadable, name="single_downloadable"), # path('report/summary', get_summary, name="report_summary"), - path('activecampaign', ActiveCampaignView.as_view(), name='activecampaign'), - path('activecampaign/<int:ac_id>', ActiveCampaignView.as_view(), name='activecampaign_id'), - path('activecampaign/webhook/<int:ac_academy_id>', activecampaign_webhook, name='activecampaign_webhook'), - path('activecampaign/webhook/<str:academy_slug>', activecampaign_webhook, name='activecampaign_webhook'), - path('googleads/enrollments/<str:academy_slugs>', googleads_enrollments, name='activecampaign_webhook'), - path('googleads/data', googleads_csv, name='googleads_csv'), - path('course', CourseView.as_view(), name='course'), - path('course/<slug:course_slug>', CourseView.as_view(), name='course_slug'), + path("activecampaign", ActiveCampaignView.as_view(), name="activecampaign"), + path("activecampaign/<int:ac_id>", ActiveCampaignView.as_view(), name="activecampaign_id"), + path("activecampaign/webhook/<int:ac_academy_id>", activecampaign_webhook, name="activecampaign_webhook"), + path("activecampaign/webhook/<str:academy_slug>", activecampaign_webhook, name="activecampaign_webhook"), + path("googleads/enrollments/<str:academy_slugs>", googleads_enrollments, name="activecampaign_webhook"), + path("googleads/data", googleads_csv, name="googleads_csv"), + path("course", CourseView.as_view(), name="course"), + path("course/<slug:course_slug>", CourseView.as_view(), name="course_slug"), ] diff --git a/breathecode/marketing/urls_shortner.py b/breathecode/marketing/urls_shortner.py index a0439df30..051b44982 100644 --- a/breathecode/marketing/urls_shortner.py +++ b/breathecode/marketing/urls_shortner.py @@ -1,7 +1,7 @@ from django.urls import path from .views import redirect_link -app_name = 'marketing' +app_name = "marketing" urlpatterns = [ - path('<str:link_slug>', redirect_link, name='slug'), + path("<str:link_slug>", redirect_link, name="slug"), ] diff --git a/breathecode/marketing/views.py b/breathecode/marketing/views.py index f41d99bfd..6e2eb557d 100644 --- a/breathecode/marketing/views.py +++ b/breathecode/marketing/views.py @@ -74,70 +74,70 @@ from .tasks import async_activecampaign_webhook, persist_single_lead, update_link_viewcount logger = logging.getLogger(__name__) -MIME_ALLOW = 'text/csv' -SYSTEM_EMAIL = os.getenv('SYSTEM_EMAIL') +MIME_ALLOW = "text/csv" +SYSTEM_EMAIL = os.getenv("SYSTEM_EMAIL") # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_downloadable(request, slug=None): if slug is not None: download = Downloadable.objects.filter(slug=slug).first() if download is None: - raise ValidationException('Document not found', 404, slug='not-found') + raise ValidationException("Document not found", 404, slug="not-found") - if request.GET.get('raw', None) == 'true': + if request.GET.get("raw", None) == "true": return HttpResponseRedirect(redirect_to=download.destination_url) seri = DownloadableSerializer(download, many=False) return Response(seri.data) items = Downloadable.objects.all() - academy = request.GET.get('academy', None) + academy = request.GET.get("academy", None) if academy is not None: - items = items.filter(academy__slug__in=academy.split(',')) + items = items.filter(academy__slug__in=academy.split(",")) - active = request.GET.get('active', None) + active = request.GET.get("active", None) if active is not None: - if active == 'true': + if active == "true": active = True else: active = False items = items.filter(active=active) - items = items.order_by('created_at') + items = items.order_by("created_at") serializer = DownloadableSerializer(items, many=True) return Response(serializer.data) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_alias(request): items = AcademyAlias.objects.all() - academy = request.GET.get('academy', None) + academy = request.GET.get("academy", None) if academy is not None: - items = items.filter(academy__id__in=academy.split(',')) + items = items.filter(academy__id__in=academy.split(",")) serializer = AcademyAliasSmallSerializer(items, many=True) return Response(serializer.data) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) @validate_captcha_challenge def create_lead(request): data = request.data.copy() # remove spaces from phone - if 'phone' in data: - data['phone'] = data['phone'].replace(' ', '') + if "phone" in data: + data["phone"] = data["phone"].replace(" ", "") - if 'utm_url' in data and ('//localhost:' in data['utm_url'] or 'gitpod.io' in data['utm_url']): - print('Ignoring lead because its coming from development team') + if "utm_url" in data and ("//localhost:" in data["utm_url"] or "gitpod.io" in data["utm_url"]): + print("Ignoring lead because its coming from development team") return Response(data, status=status.HTTP_201_CREATED) serializer = PostFormEntrySerializer(data=data) @@ -151,18 +151,18 @@ def create_lead(request): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) @validate_captcha def create_lead_captcha(request): data = request.data.copy() # remove spaces from phone - if 'phone' in data: - data['phone'] = data['phone'].replace(' ', '') + if "phone" in data: + data["phone"] = data["phone"].replace(" ", "") - if 'utm_url' in data and ('//localhost:' in data['utm_url'] or 'gitpod.io' in data['utm_url']): - print('Ignoring lead because its coming from development team') + if "utm_url" in data and ("//localhost:" in data["utm_url"] or "gitpod.io" in data["utm_url"]): + print("Ignoring lead because its coming from development team") return Response(data, status=status.HTTP_201_CREATED) serializer = PostFormEntrySerializer(data=data) @@ -176,25 +176,25 @@ def create_lead_captcha(request): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def create_lead_from_app(request, app_slug=None): - app_id = request.GET.get('app_id', None) + app_id = request.GET.get("app_id", None) if app_id is None: - raise ValidationException('Invalid app slug and/or id', code=400, slug='without-app-slug-or-app-id') + raise ValidationException("Invalid app slug and/or id", code=400, slug="without-app-slug-or-app-id") if app_slug is None: # try get the slug from the encoded app_id decoded_id = parse.unquote(app_id) - if ':' not in decoded_id: - raise ValidationException('Missing app slug', code=400, slug='without-app-slug-or-app-id') + if ":" not in decoded_id: + raise ValidationException("Missing app slug", code=400, slug="without-app-slug-or-app-id") else: - app_slug, app_id = decoded_id.split(':') + app_slug, app_id = decoded_id.split(":") app = LeadGenerationApp.objects.filter(slug=app_slug, app_id=app_id).first() if app is None: - raise ValidationException('App not found with those credentials', code=401, slug='without-app-id') + raise ValidationException("App not found with those credentials", code=401, slug="without-app-id") app.hits += 1 app.last_call_at = timezone.now() @@ -202,27 +202,27 @@ def create_lead_from_app(request, app_slug=None): ## apply defaults from the app payload = { - 'location': app.location, - 'language': app.language, - 'utm_url': app.utm_url, - 'utm_medium': app.utm_medium, - 'utm_campaign': app.utm_campaign, - 'utm_source': app.utm_source, - 'utm_plan': app.utm_plan, - 'academy': app.academy.id, - 'lead_generation_app': app.id + "location": app.location, + "language": app.language, + "utm_url": app.utm_url, + "utm_medium": app.utm_medium, + "utm_campaign": app.utm_campaign, + "utm_source": app.utm_source, + "utm_plan": app.utm_plan, + "academy": app.academy.id, + "lead_generation_app": app.id, } payload.update(request.data) - if 'automations' not in request.data: - payload['automations'] = ','.join([str(auto.slug) for auto in app.default_automations.all()]) + if "automations" not in request.data: + payload["automations"] = ",".join([str(auto.slug) for auto in app.default_automations.all()]) - if 'tags' not in request.data: - payload['tags'] = ','.join([tag.slug for tag in app.default_tags.all()]) + if "tags" not in request.data: + payload["tags"] = ",".join([tag.slug for tag in app.default_tags.all()]) # remove spaces from phone - if 'phone' in request.data: - payload['phone'] = payload['phone'].replace(' ', '') + if "phone" in request.data: + payload["phone"] = payload["phone"].replace(" ", "") serializer = PostFormEntrySerializer(data=payload) if serializer.is_valid(): @@ -230,29 +230,29 @@ def create_lead_from_app(request, app_slug=None): tasks.persist_single_lead.delay(serializer.data) - app.last_call_status = 'OK' + app.last_call_status = "OK" app.save() return Response(serializer.data, status=status.HTTP_201_CREATED) else: - app.last_call_status = 'ERROR' + app.last_call_status = "ERROR" app.last_call_log = json.dumps(serializer.errors) app.save() return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def validate_email_from_app(request): lang = get_user_language(request) data = request.data.copy() - email = data['email'] if 'email' in data else None + email = data["email"] if "email" in data else None if email is None: - raise ValidationException('Please provide an email to validate', code=400, slug='without-email') + raise ValidationException("Please provide an email to validate", code=400, slug="without-email") try: payload = validate_email(email, lang) @@ -262,13 +262,16 @@ def validate_email_from_app(request): except Exception: raise ValidationException( - translation(lang, - en='Error while validating email address', - es='Se ha producido un error validando tu dirección de correo electrónico', - slug='email-validation-error')) + translation( + lang, + en="Error while validating email address", + es="Se ha producido un error validando tu dirección de correo electrónico", + slug="email-validation-error", + ) + ) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) @renderer_classes([PlainTextRenderer]) def activecampaign_webhook(request, ac_academy_id=None, academy_slug=None): @@ -276,32 +279,32 @@ def activecampaign_webhook(request, ac_academy_id=None, academy_slug=None): if ac_academy_id is not None: a = Academy.objects.filter(slug=ac_academy_id).first() if a is None: - raise APIException(f'Academy not found (id:{ac_academy_id}) ') + raise APIException(f"Academy not found (id:{ac_academy_id}) ") webhook = ActiveCampaign.add_webhook_to_log(request.data, a.slug) elif academy_slug is not None: webhook = ActiveCampaign.add_webhook_to_log(request.data, academy_slug) else: - raise APIException('Please specify a valid academy slug or id') + raise APIException("Please specify a valid academy slug or id") if webhook: async_activecampaign_webhook.delay(webhook.id) else: - logger.debug('One request cannot be parsed, maybe you should update `ActiveCampaign' - '.add_webhook_to_log`') - #logger.debug(request.data) + logger.debug("One request cannot be parsed, maybe you should update `ActiveCampaign" ".add_webhook_to_log`") + # logger.debug(request.data) # async_eventbrite_webhook(request.data) - return Response('ok', content_type='text/plain') + return Response("ok", content_type="text/plain") -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) @renderer_classes([CSVRenderer]) def googleads_enrollments(request, academy_slugs): - slugs = academy_slugs.split(',') - academies = FormEntry.objects.filter(Q(academy__slug__in=slugs) - | Q(ac_academy__academy__slug__in=slugs)).exclude(gclid__isnull=True) + slugs = academy_slugs.split(",") + academies = FormEntry.objects.filter(Q(academy__slug__in=slugs) | Q(ac_academy__academy__slug__in=slugs)).exclude( + gclid__isnull=True + ) serializer = FormEntrySerializer(academies, many=True) return Response(serializer.data) @@ -310,53 +313,54 @@ def googleads_enrollments(request, academy_slugs): # Create your views here. -@api_view(['POST', 'GET']) +@api_view(["POST", "GET"]) @permission_classes([AllowAny]) def receive_facebook_lead(request): - if request.method == 'GET': + if request.method == "GET": - challenge = 'no challenge' - if 'hub.challenge' in request.GET: - challenge = request.GET['hub.challenge'] + challenge = "no challenge" + if "hub.challenge" in request.GET: + challenge = request.GET["hub.challenge"] - verify_token = '' - if 'hub.verify_token' in request.GET: - verify_token = request.GET['hub.verify_token'] + verify_token = "" + if "hub.verify_token" in request.GET: + verify_token = request.GET["hub.verify_token"] - if verify_token == os.getenv('FACEBOOK_VERIFY_TOKEN', ''): + if verify_token == os.getenv("FACEBOOK_VERIFY_TOKEN", ""): return Response(int(challenge), status=status.HTTP_200_OK) else: return Response(int(challenge), status=status.HTTP_400_BAD_REQUEST) else: - if 'object' in request.data: - if request.data['object'] == 'page': - for entry in request.data['entry']: - for changes in entry['changes']: - if changes['field'] == 'leadgen': + if "object" in request.data: + if request.data["object"] == "page": + for entry in request.data["entry"]: + for changes in entry["changes"]: + if changes["field"] == "leadgen": serializer = PostFormEntrySerializer( data={ - 'fb_leadgen_id': changes['value']['leadgen_id'], - 'fb_page_id': changes['value']['page_id'], - 'fb_form_id': changes['value']['form_id'], - 'fb_adgroup_id': changes['value']['adgroup_id'], - 'fb_ad_id': changes['value']['ad_id'] - }) + "fb_leadgen_id": changes["value"]["leadgen_id"], + "fb_page_id": changes["value"]["page_id"], + "fb_form_id": changes["value"]["form_id"], + "fb_adgroup_id": changes["value"]["adgroup_id"], + "fb_ad_id": changes["value"]["ad_id"], + } + ) if serializer.is_valid(): serializer.save() # persist_single_lead.delay(request.data) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - return Response({'details': 'No leads found'}, status=status.HTTP_400_BAD_REQUEST) + return Response({"details": "No leads found"}, status=status.HTTP_400_BAD_REQUEST) # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) def sync_tags_with_active_campaign(request, academy_id): academy = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).first() if academy is None: - raise APIException('Academy not found') + raise APIException("Academy not found") tags = sync_tags(academy) return Response(tags, status=status.HTTP_200_OK) @@ -365,12 +369,12 @@ def sync_tags_with_active_campaign(request, academy_id): # Create your views here. -@api_view(['GET']) +@api_view(["GET"]) def sync_automations_with_active_campaign(request, academy_id): academy = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).first() if academy is None: - raise APIException('Academy not found') + raise APIException("Academy not found") tags = sync_automations(academy) return Response(tags, status=status.HTTP_200_OK) @@ -379,56 +383,56 @@ def sync_automations_with_active_campaign(request, academy_id): def redirect_link(request, link_slug): short_link = ShortLink.objects.filter(slug=link_slug, active=True).first() if short_link is None: - return HttpResponseNotFound('URL not found') + return HttpResponseNotFound("URL not found") update_link_viewcount.delay(short_link.slug) params = {} if short_link.utm_source is not None: - params['utm_source'] = short_link.utm_source + params["utm_source"] = short_link.utm_source if short_link.utm_content is not None: - params['utm_content'] = short_link.utm_content + params["utm_content"] = short_link.utm_content if short_link.utm_medium is not None: - params['utm_medium'] = short_link.utm_medium + params["utm_medium"] = short_link.utm_medium if short_link.utm_campaign is not None: - params['utm_campaign'] = short_link.utm_campaign + params["utm_campaign"] = short_link.utm_campaign destination_params = {} - url_parts = short_link.destination.split('?') + url_parts = short_link.destination.split("?") if len(url_parts) > 1: destination_params = dict(parse.parse_qsl(url_parts[1])) params = {**destination_params, **params} - return HttpResponseRedirect(redirect_to=url_parts[0] + '?' + parse.urlencode(params)) + return HttpResponseRedirect(redirect_to=url_parts[0] + "?" + parse.urlencode(params)) -@api_view(['GET']) +@api_view(["GET"]) def get_leads(request, id=None): items = FormEntry.objects.all() if isinstance(request.user, AnonymousUser) == False: items = localize_query(items, request) - academy = request.GET.get('academy', None) + academy = request.GET.get("academy", None) if academy is not None: - items = items.filter(academy__slug__in=academy.split(',')) + items = items.filter(academy__slug__in=academy.split(",")) - start = request.GET.get('start', None) + start = request.GET.get("start", None) if start is not None: - start_date = datetime.datetime.strptime(start, '%Y-%m-%d').date() + start_date = datetime.datetime.strptime(start, "%Y-%m-%d").date() items = items.filter(created_at__gte=start_date) - end = request.GET.get('end', None) + end = request.GET.get("end", None) if end is not None: - end_date = datetime.datetime.strptime(end, '%Y-%m-%d').date() + end_date = datetime.datetime.strptime(end, "%Y-%m-%d").date() items = items.filter(created_at__lte=end_date) - items = items.order_by('created_at') + items = items.order_by("created_at") serializer = FormEntrySerializer(items, many=True) return Response(serializer.data) -@api_view(['GET']) +@api_view(["GET"]) def get_leads_report(request, id=None): items = FormEntry.objects.all() @@ -437,31 +441,32 @@ def get_leads_report(request, id=None): # filter only to the local academy items = localize_query(items, request) - group_by = request.GET.get('by', 'location,created_at__date,course') - if group_by != '': - group_by = group_by.split(',') + group_by = request.GET.get("by", "location,created_at__date,course") + if group_by != "": + group_by = group_by.split(",") else: - group_by = ['location', 'created_at__date', 'course'] + group_by = ["location", "created_at__date", "course"] - academy = request.GET.get('academy', None) + academy = request.GET.get("academy", None) if academy is not None: - items = items.filter(location__in=academy.split(',')) + items = items.filter(location__in=academy.split(",")) - start = request.GET.get('start', None) + start = request.GET.get("start", None) if start is not None: - start_date = datetime.datetime.strptime(start, '%Y-%m-%d').date() + start_date = datetime.datetime.strptime(start, "%Y-%m-%d").date() items = items.filter(created_at__gte=start_date) - end = request.GET.get('end', None) + end = request.GET.get("end", None) if end is not None: - end_date = datetime.datetime.strptime(end, '%Y-%m-%d').date() + end_date = datetime.datetime.strptime(end, "%Y-%m-%d").date() items = items.filter(created_at__lte=end_date) - items = items.values(*group_by).annotate(total_leads=Count('location')) + items = items.values(*group_by).annotate(total_leads=Count("location")) - if 'created_at__date' in group_by: + if "created_at__date" in group_by: items = items.annotate( - created_date=Func(F('created_at'), Value('YYYYMMDD'), function='to_char', output_field=CharField())) + created_date=Func(F("created_at"), Value("YYYYMMDD"), function="to_char", output_field=CharField()) + ) # items = items.order_by('created_at') return Response(items) @@ -470,65 +475,61 @@ class AcademyTagView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) - @capable_of('read_tag') + extensions = APIViewExtensions(sort="-created_at", paginate=True) + + @capable_of("read_tag") def get(self, request, format=None, academy_id=None): handler = self.extensions(request) items = Tag.objects.filter(ac_academy__academy__id=academy_id) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(slug__icontains=like) - status = request.GET.get('status', None) + status = request.GET.get("status", None) if status is not None: aproved = True - if status == 'DISPUTED': + if status == "DISPUTED": aproved = False items = items.filter(disputed_at__isnull=aproved) - types = request.GET.get('type', None) + types = request.GET.get("type", None) if types is not None: - _types = types.split(',') + _types = types.split(",") items = items.filter(tag_type__in=[x.upper() for x in _types]) items = handler.queryset(items) serializer = TagSmallSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_tag') + @capable_of("crud_tag") def put(self, request, tag_slug=None, academy_id=None): many = isinstance(request.data, list) if not many: tag = Tag.objects.filter(slug=tag_slug, ac_academy__academy__id=academy_id).first() if tag is None: - raise ValidationException(f'Tag {tag_slug} not found for this academy', slug='tag-not-found') + raise ValidationException(f"Tag {tag_slug} not found for this academy", slug="tag-not-found") else: tag = [] index = -1 for x in request.data: index = index + 1 - if 'id' not in x: - raise ValidationException('Cannot determine tag in ' - f'index {index}', slug='without-id') + if "id" not in x: + raise ValidationException("Cannot determine tag in " f"index {index}", slug="without-id") - instance = Tag.objects.filter(id=x['id'], ac_academy__academy__id=academy_id).first() + instance = Tag.objects.filter(id=x["id"], ac_academy__academy__id=academy_id).first() if not instance: - raise ValidationException(f'Tag({x["id"]}) does not exist on this academy', - code=404, - slug='not-found') + raise ValidationException( + f'Tag({x["id"]}) does not exist on this academy', code=404, slug="not-found" + ) tag.append(instance) - serializer = PUTTagSerializer(tag, - data=request.data, - context={ - 'request': request, - 'academy': academy_id - }, - many=many) + serializer = PUTTagSerializer( + tag, data=request.data, context={"request": request, "academy": academy_id}, many=many + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -539,58 +540,55 @@ class AcademyAutomationView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) - @capable_of('read_tag') + extensions = APIViewExtensions(sort="-created_at", paginate=True) + + @capable_of("read_tag") def get(self, request, format=None, academy_id=None): handler = self.extensions(request) items = Automation.objects.filter(ac_academy__academy__id=academy_id) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(Q(slug__icontains=like) | Q(name__icontains=like)) - status = request.GET.get('status', None) + status = request.GET.get("status", None) if status is not None: - _status = status.split(',') + _status = status.split(",") items = items.filter(status__in=[x.upper() for x in _status]) items = handler.queryset(items) serializer = AutomationSmallSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_tag') + @capable_of("crud_tag") def put(self, request, automation_id=None, academy_id=None): many = isinstance(request.data, list) if not many: automation = Automation.objects.filter(id=automation_id, ac_academy__academy__id=academy_id).first() if automation is None: - raise ValidationException(f'Automation {automation_id} not found for this academy', - slug='automation-not-found') + raise ValidationException( + f"Automation {automation_id} not found for this academy", slug="automation-not-found" + ) else: automation = [] index = -1 for x in request.data: index = index + 1 - if 'id' not in x: - raise ValidationException('Cannot determine automation in ' - f'index {index}', slug='without-id') + if "id" not in x: + raise ValidationException("Cannot determine automation in " f"index {index}", slug="without-id") - instance = Automation.objects.filter(id=x['id'], ac_academy__academy__id=academy_id).first() + instance = Automation.objects.filter(id=x["id"], ac_academy__academy__id=academy_id).first() if not instance: - raise ValidationException(f'Automation({x["id"]}) does not exist on this academy', - code=404, - slug='not-found') + raise ValidationException( + f'Automation({x["id"]}) does not exist on this academy', code=404, slug="not-found" + ) automation.append(instance) - serializer = PUTAutomationSerializer(automation, - data=request.data, - context={ - 'request': request, - 'academy': academy_id - }, - many=many) + serializer = PUTAutomationSerializer( + automation, data=request.data, context={"request": request, "academy": academy_id}, many=many + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -602,7 +600,7 @@ class AcademyAppView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - @capable_of('read_lead_gen_app') + @capable_of("read_lead_gen_app") def get(self, request, academy_id=None): apps = LeadGenerationApp.objects.filter(academy__id=academy_id) @@ -616,7 +614,7 @@ class AcademyAliasView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - @capable_of('read_my_academy') + @capable_of("read_my_academy") def get(self, request, academy_id): alias = AcademyAlias.objects.filter(academy__id=academy_id) @@ -630,18 +628,18 @@ class UTMView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - @capable_of('read_lead') + @capable_of("read_lead") def get(self, request, format=None, academy_id=None): utms = UTMField.objects.filter(academy__id=academy_id) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: utms = utms.filter(slug__icontains=like) - types = request.GET.get('type', None) + types = request.GET.get("type", None) if types is not None: - _types = types.split(',') + _types = types.split(",") utms = utms.filter(utm_type__in=[x.upper() for x in _types]) serializer = UTMSmallSerializer(utms, many=True) @@ -653,42 +651,42 @@ class AcademyWonLeadView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMi List all snippets, or create a new snippet. """ - @capable_of('read_won_lead') + @capable_of("read_won_lead") def get(self, request, format=None, academy_id=None): academy = Academy.objects.get(id=academy_id) - items = FormEntry.objects.filter(academy__id=academy.id, deal_status='WON') + items = FormEntry.objects.filter(academy__id=academy.id, deal_status="WON") lookup = {} - start = request.GET.get('start', '') - if start != '': - start_date = datetime.datetime.strptime(start, '%Y-%m-%d').date() - lookup['created_at__gte'] = start_date + start = request.GET.get("start", "") + if start != "": + start_date = datetime.datetime.strptime(start, "%Y-%m-%d").date() + lookup["created_at__gte"] = start_date - end = request.GET.get('end', '') - if end != '': - end_date = datetime.datetime.strptime(end, '%Y-%m-%d').date() - lookup['created_at__lte'] = end_date + end = request.GET.get("end", "") + if end != "": + end_date = datetime.datetime.strptime(end, "%Y-%m-%d").date() + lookup["created_at__lte"] = end_date - if 'storage_status' in self.request.GET: - param = self.request.GET.get('storage_status') - lookup['storage_status'] = param + if "storage_status" in self.request.GET: + param = self.request.GET.get("storage_status") + lookup["storage_status"] = param - course = request.GET.get('course', '') - if course != '': - lookup['course__in'] = course.split(',') + course = request.GET.get("course", "") + if course != "": + lookup["course__in"] = course.split(",") - location = request.GET.get('location', '') - if location != '': - lookup['location__in'] = location.split(',') + location = request.GET.get("location", "") + if location != "": + lookup["location__in"] = location.split(",") - sort_by = '-created_at' - if 'sort' in self.request.GET and self.request.GET['sort'] != '': - sort_by = self.request.GET.get('sort') + sort_by = "-created_at" + if "sort" in self.request.GET and self.request.GET["sort"] != "": + sort_by = self.request.GET.get("sort") items = items.filter(**lookup).order_by(sort_by) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = query_like_by_full_name(like=like, items=items) @@ -703,17 +701,17 @@ def get(self, request, format=None, academy_id=None): class AcademyProcessView(APIView, GenerateLookupsMixin): - @capable_of('crud_lead') + @capable_of("crud_lead") def put(self, request, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if not lookups: - raise ValidationException('Missing id parameters in the querystring', code=400) + raise ValidationException("Missing id parameters in the querystring", code=400) items = FormEntry.objects.filter(**lookups, academy__id=academy_id) for item in items: persist_single_lead.delay(item.to_form_data()) - return Response({'details': f'{items.count()} leads added to the processing queue'}, status=status.HTTP_200_OK) + return Response({"details": f"{items.count()} leads added to the processing queue"}, status=status.HTTP_200_OK) class AcademyLeadView(APIView, GenerateLookupsMixin): @@ -721,16 +719,16 @@ class AcademyLeadView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_lead') + @capable_of("read_lead") def get(self, request, academy_id=None, lead_id=None): handler = self.extensions(request) if lead_id is not None: single_lead = FormEntry.objects.filter(id=lead_id, academy__id=academy_id).first() if single_lead is None: - raise ValidationException(f'Lead {lead_id} not found', 404, slug='lead-not-found') + raise ValidationException(f"Lead {lead_id} not found", 404, slug="lead-not-found") serializer = FormEntryBigSerializer(single_lead, many=False) return handler.response(serializer.data) @@ -739,79 +737,82 @@ def get(self, request, academy_id=None, lead_id=None): items = FormEntry.objects.filter(academy__id=academy.id) lookup = {} - start = request.GET.get('start', None) + start = request.GET.get("start", None) if start is not None: - start_date = datetime.datetime.strptime(start, '%Y-%m-%d').date() - lookup['created_at__gte'] = start_date + start_date = datetime.datetime.strptime(start, "%Y-%m-%d").date() + lookup["created_at__gte"] = start_date - end = request.GET.get('end', None) + end = request.GET.get("end", None) if end is not None: - end_date = datetime.datetime.strptime(end, '%Y-%m-%d').date() - lookup['created_at__lte'] = end_date - - if 'storage_status' in self.request.GET: - param = self.request.GET.get('storage_status') - lookup['storage_status'] = param - - if 'deal_status' in self.request.GET: - param = self.request.GET.get('deal_status') - lookup['deal_status'] = param.upper() - - if 'course' in self.request.GET: - param = self.request.GET.get('course') - lookup['course__in'] = [x.strip() for x in param.split(',')] - - if 'ac_deal_id' in self.request.GET: - param = self.request.GET.get('ac_deal_id') - lookup['ac_deal_id'] = param - - if 'location' in self.request.GET or 'location_alias' in self.request.GET: - param = self.request.GET.get('location') if self.request.GET.get( - 'location') is not None else self.request.GET.get('location_alias') - lookup['location__in'] = [x.strip() for x in param.split(',')] - - if 'deal_location' in self.request.GET: - param = self.request.GET.get('deal_location') - lookup['ac_deal_location__in'] = [x.strip() for x in param.split(',')] - - if 'deal_course' in self.request.GET: - param = self.request.GET.get('deal_course') - lookup['ac_deal_course__in'] = [x.strip() for x in param.split(',')] - - if 'utm_medium' in self.request.GET: - param = self.request.GET.get('utm_medium') + end_date = datetime.datetime.strptime(end, "%Y-%m-%d").date() + lookup["created_at__lte"] = end_date + + if "storage_status" in self.request.GET: + param = self.request.GET.get("storage_status") + lookup["storage_status"] = param + + if "deal_status" in self.request.GET: + param = self.request.GET.get("deal_status") + lookup["deal_status"] = param.upper() + + if "course" in self.request.GET: + param = self.request.GET.get("course") + lookup["course__in"] = [x.strip() for x in param.split(",")] + + if "ac_deal_id" in self.request.GET: + param = self.request.GET.get("ac_deal_id") + lookup["ac_deal_id"] = param + + if "location" in self.request.GET or "location_alias" in self.request.GET: + param = ( + self.request.GET.get("location") + if self.request.GET.get("location") is not None + else self.request.GET.get("location_alias") + ) + lookup["location__in"] = [x.strip() for x in param.split(",")] + + if "deal_location" in self.request.GET: + param = self.request.GET.get("deal_location") + lookup["ac_deal_location__in"] = [x.strip() for x in param.split(",")] + + if "deal_course" in self.request.GET: + param = self.request.GET.get("deal_course") + lookup["ac_deal_course__in"] = [x.strip() for x in param.split(",")] + + if "utm_medium" in self.request.GET: + param = self.request.GET.get("utm_medium") items = items.filter(utm_medium__icontains=param) - if 'utm_url' in self.request.GET: - param = self.request.GET.get('utm_url') + if "utm_url" in self.request.GET: + param = self.request.GET.get("utm_url") items = items.filter(utm_url__icontains=param) - if 'utm_campaign' in self.request.GET: - param = self.request.GET.get('utm_campaign') + if "utm_campaign" in self.request.GET: + param = self.request.GET.get("utm_campaign") items = items.filter(utm_campaign__icontains=param) - if 'utm_source' in self.request.GET: - param = self.request.GET.get('utm_source') + if "utm_source" in self.request.GET: + param = self.request.GET.get("utm_source") items = items.filter(utm_source__icontains=param) - if 'utm_term' in self.request.GET: - param = self.request.GET.get('utm_term') + if "utm_term" in self.request.GET: + param = self.request.GET.get("utm_term") items = items.filter(utm_term__icontains=param) - if 'tags' in self.request.GET: - lookups = self.generate_lookups(request, many_fields=['tags']) - items = items.filter(tag_objects__slug__in=lookups['tags__in']) + if "tags" in self.request.GET: + lookups = self.generate_lookups(request, many_fields=["tags"]) + items = items.filter(tag_objects__slug__in=lookups["tags__in"]) items = items.filter(**lookup) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = query_like_by_full_name(like=like, items=items) items = handler.queryset(items) - only_first = request.GET.get('only_first', None) - if only_first is not None and only_first.lower() == 'true': + only_first = request.GET.get("only_first", None) + if only_first is not None and only_first.lower() == "true": first = items.first() first = [first] if first is not None else [] serializer = FormEntryHookSerializer(first, many=True) @@ -821,47 +822,47 @@ def get(self, request, academy_id=None, lead_id=None): return handler.response(serializer.data) - @capable_of('crud_lead') + @capable_of("crud_lead") def post(self, request, academy_id=None): academy = Academy.objects.filter(id=academy_id).first() if academy is None: - raise ValidationException(f'Academy {academy_id} not found', slug='academy-not-found') + raise ValidationException(f"Academy {academy_id} not found", slug="academy-not-found") # ignore the incoming location information and override with the session academy - data = {**request.data, 'location': academy.active_campaign_slug} + data = {**request.data, "location": academy.active_campaign_slug} - serializer = PostFormEntrySerializer(data=data, context={'request': request, 'academy': academy_id}) + serializer = PostFormEntrySerializer(data=data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() big_serializer = FormEntryBigSerializer(serializer.instance) return Response(big_serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_lead') + @capable_of("crud_lead") def put(self, request, academy_id=None, lead_id=None): academy = Academy.objects.filter(id=academy_id).first() if academy is None: - raise ValidationException(f'Academy {academy_id} not found', slug='academy-not-found') + raise ValidationException(f"Academy {academy_id} not found", slug="academy-not-found") lookups = None if lead_id is not None: - lookups = {'id': lead_id} + lookups = {"id": lead_id} else: - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if not lookups and lead_id is None: - raise ValidationException('Missing lead ids parameters in the querystring', code=400) + raise ValidationException("Missing lead ids parameters in the querystring", code=400) leads = FormEntry.objects.filter(**lookups, academy__id=academy_id) if leads.count() == 0: - raise ValidationException('Leads not found', slug='lead-not-found') + raise ValidationException("Leads not found", slug="lead-not-found") data = {**request.data} serializers = [] for lead in leads: - serializer = PostFormEntrySerializer(lead, data=data, context={'request': request, 'academy': academy_id}) + serializer = PostFormEntrySerializer(lead, data=data, context={"request": request, "academy": academy_id}) serializers.append(serializer) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -876,14 +877,14 @@ def put(self, request, academy_id=None, lead_id=None): return Response(big_serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_lead') + @capable_of("crud_lead") def delete(self, request, academy_id=None): # TODO: here i don't add one single delete, because i don't know if it is required - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) # automation_objects if not lookups: - raise ValidationException('Missing parameters in the querystring', code=400) + raise ValidationException("Missing parameters in the querystring", code=400) items = FormEntry.objects.filter(**lookups, academy__id=academy_id) @@ -898,42 +899,37 @@ class ActiveCampaignView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - @capable_of('read_lead') + @capable_of("read_lead") def get(self, request, academy_id=None): ac_academy = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).first() if ac_academy is None: - raise ValidationException('Active Campaign Academy not found', 404) + raise ValidationException("Active Campaign Academy not found", 404) serializer = ActiveCampaignAcademyBigSerializer(ac_academy) return Response(serializer.data, status=200) - @capable_of('crud_lead') + @capable_of("crud_lead") def post(self, request, academy_id=None): - serializer = ActiveCampaignAcademySerializer(data=request.data, - context={ - 'request': request, - 'academy': academy_id - }) + serializer = ActiveCampaignAcademySerializer( + data=request.data, context={"request": request, "academy": academy_id} + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_lead') + @capable_of("crud_lead") def put(self, request, ac_id, academy_id=None): ac_academy = ActiveCampaignAcademy.objects.filter(id=ac_id, academy__id=academy_id).first() if ac_academy is None: - raise ValidationException(f'Active Campaign {ac_id} not found', slug='active-campaign-not-found') - serializer = ActiveCampaignAcademySerializer(ac_academy, - data=request.data, - context={ - 'request': request, - 'academy': academy_id - }) + raise ValidationException(f"Active Campaign {ac_id} not found", slug="active-campaign-not-found") + serializer = ActiveCampaignAcademySerializer( + ac_academy, data=request.data, context={"request": request, "academy": academy_id} + ) if serializer.is_valid(): serializer.save() @@ -946,31 +942,32 @@ class ShortLinkView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - @capable_of('read_shortlink') + @capable_of("read_shortlink") def get(self, request, slug=None, academy_id=None): if slug is not None: link = ShortLink.objects.filter(slug=slug).first() if link is None or (link.private and link.academy.id != academy_id): raise ValidationException( - f'Shortlink with slug {slug} not found or its private and it belongs to another academy', - slug='shortlink-not-found') + f"Shortlink with slug {slug} not found or its private and it belongs to another academy", + slug="shortlink-not-found", + ) academy = Academy.objects.get(id=academy_id) items = ShortLink.objects.filter(Q(academy__id=academy.id) | Q(private=False)) lookup = {} - private = request.GET.get('private', None) - if private == 'true': - lookup['private'] = True + private = request.GET.get("private", None) + if private == "true": + lookup["private"] = True - sort_by = '-created_at' - if 'sort' in self.request.GET and self.request.GET['sort'] != '': - sort_by = self.request.GET.get('sort') + sort_by = "-created_at" + if "sort" in self.request.GET and self.request.GET["sort"] != "": + sort_by = self.request.GET.get("sort") items = items.filter(**lookup).order_by(sort_by) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(slug__icontains=like) @@ -982,36 +979,36 @@ def get(self, request, slug=None, academy_id=None): else: return Response(serializer.data, status=200) - @capable_of('crud_shortlink') + @capable_of("crud_shortlink") def post(self, request, academy_id=None): - serializer = ShortLinkSerializer(data=request.data, context={'request': request, 'academy': academy_id}) + serializer = ShortLinkSerializer(data=request.data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_shortlink') + @capable_of("crud_shortlink") def put(self, request, short_slug, academy_id=None): short = ShortLink.objects.filter(slug=short_slug, academy__id=academy_id).first() if short is None: - raise ValidationException(f'ShortLink {short_slug} not found', slug='short-not-found') + raise ValidationException(f"ShortLink {short_slug} not found", slug="short-not-found") - serializer = ShortLinkSerializer(short, data=request.data, context={'request': request, 'academy': academy_id}) + serializer = ShortLinkSerializer(short, data=request.data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_shortlink') + @capable_of("crud_shortlink") def delete(self, request, academy_id=None): # TODO: here i don't add one single delete, because i don't know if it is required - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) # automation_objects if not lookups: - raise ValidationException('Missing parameters in the querystring', code=400) + raise ValidationException("Missing parameters in the querystring", code=400) items = ShortLink.objects.filter(**lookups, academy__id=academy_id) for i in items: @@ -1019,8 +1016,9 @@ def delete(self, request, academy_id=None): days_ago = i.created_at + timedelta(days=1) if days_ago < utc_now: raise ValidationException( - 'You cannot update or delete short links that have been created more than 1 day ago, create a new link instead', - slug='update-days-ago') + "You cannot update or delete short links that have been created more than 1 day ago, create a new link instead", + slug="update-days-ago", + ) items.delete() @@ -1032,6 +1030,7 @@ class UploadView(APIView): put: Upload a file to Google Cloud. """ + parser_classes = [MultiPartParser, FileUploadParser] # permission_classes = [AllowAny] @@ -1042,35 +1041,35 @@ def upload(self, file, lang, academy_id=None, update=False): from ..services.google_cloud import Storage if not file: - raise ValidationException('Missing file in request', code=400) + raise ValidationException("Missing file in request", code=400) # files validation below if file.content_type != MIME_ALLOW: - raise ValidationException(f'You can upload only files on the following formats: {MIME_ALLOW}') + raise ValidationException(f"You can upload only files on the following formats: {MIME_ALLOW}") file_bytes = file.read() file_name = hashlib.sha256(file_bytes).hexdigest() - file_bytes = file_bytes.decode('utf-8') + file_bytes = file_bytes.decode("utf-8") - with open(file.name, 'w') as f: + with open(file.name, "w") as f: f.write(file_bytes) df = pd.read_csv(file.name) os.remove(file.name) - required_fields = ['first_name', 'last_name', 'email', 'location', 'phone', 'language'] + required_fields = ["first_name", "last_name", "email", "location", "phone", "language"] # Think about uploading correct files and leaving out incorrect ones for item in required_fields: if item not in df.keys(): - return ValidationException(f'{item} field missing inside of csv') + return ValidationException(f"{item} field missing inside of csv") - data = {'file_name': file.name, 'status': 'PENDING', 'message': 'Despues'} + data = {"file_name": file.name, "status": "PENDING", "message": "Despues"} # upload file section try: storage = Storage() - cloud_file = storage.file(os.getenv('DOWNLOADS_BUCKET', None), file_name) + cloud_file = storage.file(os.getenv("DOWNLOADS_BUCKET", None), file_name) cloud_file.upload(file, content_type=file.content_type) csv_upload = CSVUpload() @@ -1081,15 +1080,18 @@ def upload(self, file, lang, academy_id=None, update=False): csv_upload.save() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) for num in range(len(df)): value = df.iloc[num] @@ -1099,10 +1101,10 @@ def upload(self, file, lang, academy_id=None, update=False): return data - @capable_of('crud_media') + @capable_of("crud_media") def put(self, request, academy_id=None): lang = get_user_language(request) - files = request.data.getlist('file') + files = request.data.getlist("file") result = [] for file in files: upload = self.upload(file, lang, academy_id, update=True) @@ -1112,11 +1114,11 @@ def put(self, request, academy_id=None): def get_real_conversion_name(slug): mapper = { - 'Website Lead': 'Application Submitted', + "Website Lead": "Application Submitted", } - words = re.split(' |_|-', slug) + words = re.split(" |_|-", slug) words = [w.capitalize() for w in words] - words = ' '.join(words) + words = " ".join(words) if words in mapper: words = mapper[words] @@ -1125,29 +1127,30 @@ def get_real_conversion_name(slug): def googleads_csv(request): - ids = request.GET.get('academy', '') - slugs = request.GET.get('academy_slug', '') + ids = request.GET.get("academy", "") + slugs = request.GET.get("academy_slug", "") - ids = ids.split(',') if ids else [] - slugs = slugs.split(',') if slugs else [] + ids = ids.split(",") if ids else [] + slugs = slugs.split(",") if slugs else [] if ids: - form_entries = FormEntry.objects.filter(academy__id__in=ids).order_by('id') + form_entries = FormEntry.objects.filter(academy__id__in=ids).order_by("id") elif slugs: - form_entries = FormEntry.objects.filter(academy__slug__in=slugs).order_by('id') + form_entries = FormEntry.objects.filter(academy__slug__in=slugs).order_by("id") else: form_entries = FormEntry.objects.all() - if (Academy.objects.filter(id__in=ids).count() != len(ids) - or Academy.objects.filter(slug__in=slugs).count() != len(slugs)): - raise ValidationException('Some academy not exist', slug='academy-not-found') + if Academy.objects.filter(id__in=ids).count() != len(ids) or Academy.objects.filter(slug__in=slugs).count() != len( + slugs + ): + raise ValidationException("Some academy not exist", slug="academy-not-found") data = [] response = HttpResponse( - content_type='text/csv', - headers={'Content-Disposition': 'attachment; filename="googleads.csv"'}, + content_type="text/csv", + headers={"Content-Disposition": 'attachment; filename="googleads.csv"'}, ) for entry in form_entries: @@ -1155,24 +1158,25 @@ def googleads_csv(request): if entry.gclid: entry_gclid = entry.gclid[-4:] - if (entry_gclid == '_BwE' and entry.deal_status == 'WON'): + if entry_gclid == "_BwE" and entry.deal_status == "WON": gclid = entry.gclid convertion_name = get_real_conversion_name(entry.tags) - timezone = pytz.timezone('US/Eastern') + timezone = pytz.timezone("US/Eastern") if entry.won_at is not None: convertion_time = entry.won_at.astimezone(timezone) else: convertion_time = entry.updated_at.astimezone(timezone) - convertion_time = convertion_time.strftime('%Y-%m-%d %H:%M:%S') + convertion_time = convertion_time.strftime("%Y-%m-%d %H:%M:%S") data.append([gclid, convertion_name, convertion_time, None, None]) writer = csv.writer(response) - writer.writerow(['Parameters:TimeZone=US/Eastern']) + writer.writerow(["Parameters:TimeZone=US/Eastern"]) writer.writerow( - ['Google Click ID', 'Conversion Name', 'Conversion Time', 'Conversion Value', 'Conversion Currency']) + ["Google Click ID", "Conversion Name", "Conversion Time", "Conversion Value", "Conversion Currency"] + ) for d in data: writer.writerow(d) @@ -1182,15 +1186,15 @@ def googleads_csv(request): class CourseView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(cache=CourseCache, sort='-updated_at', paginate=True) + extensions = APIViewExtensions(cache=CourseCache, sort="-updated_at", paginate=True) def get_lookup(self, key, value): args = () kwargs = {} - slug_key = f'{key}__slug__in' - pk_key = f'{key}__id__in' + slug_key = f"{key}__slug__in" + pk_key = f"{key}__id__in" - for v in value.split(','): + for v in value.split(","): if slug_key not in kwargs and not v.isnumeric(): kwargs[slug_key] = [] @@ -1204,7 +1208,7 @@ def get_lookup(self, key, value): kwargs[slug_key].append(v) if len(kwargs) > 1: - args = (Q(**{slug_key: kwargs[slug_key]}) | Q(**{pk_key: kwargs[pk_key]}), ) + args = (Q(**{slug_key: kwargs[slug_key]}) | Q(**{pk_key: kwargs[pk_key]}),) kwargs = {} return args, kwargs @@ -1216,45 +1220,49 @@ def get(self, request, course_slug=None): if cache is not None: return cache - lang = request.GET.get('lang') + lang = request.GET.get("lang") if lang is None: lang = get_user_language(request) if course_slug: - item = Course.objects.filter(slug=course_slug).annotate(lang=Value(lang, output_field=CharField())).exclude( - status='DELETED').exclude(visibility='PRIVATE').first() + item = ( + Course.objects.filter(slug=course_slug) + .annotate(lang=Value(lang, output_field=CharField())) + .exclude(status="DELETED") + .exclude(visibility="PRIVATE") + .first() + ) if not item: - raise ValidationException(translation(lang, - en='Course not found', - es='Curso no encontrado', - slug='course-not-found'), - code=404) + raise ValidationException( + translation(lang, en="Course not found", es="Curso no encontrado", slug="course-not-found"), + code=404, + ) - serializer = GetCourseSerializer(item, context={'lang': lang}, many=False) + serializer = GetCourseSerializer(item, context={"lang": lang}, many=False) return handler.response(serializer.data) - items = Course.objects.filter().exclude(status='DELETED').exclude(visibility='PRIVATE') + items = Course.objects.filter().exclude(status="DELETED").exclude(visibility="PRIVATE") - if academy := request.GET.get('academy'): - args, kwargs = self.get_lookup('academy', academy) + if academy := request.GET.get("academy"): + args, kwargs = self.get_lookup("academy", academy) items = items.filter(*args, **kwargs) - if syllabus := request.GET.get('syllabus'): - args, kwargs = self.get_lookup('syllabus', syllabus) + if syllabus := request.GET.get("syllabus"): + args, kwargs = self.get_lookup("syllabus", syllabus) items = items.filter(*args, **kwargs) - if s := request.GET.get('status'): - items = items.filter(status__in=s.split(',')) + if s := request.GET.get("status"): + items = items.filter(status__in=s.split(",")) else: - items = items.exclude(status='ARCHIVED') + items = items.exclude(status="ARCHIVED") - if icon_url := request.GET.get('icon_url'): + if icon_url := request.GET.get("icon_url"): items = items.filter(icon_url__icontains=icon_url) - if technologies := request.GET.get('technologies'): - technologies = technologies.split(',') + if technologies := request.GET.get("technologies"): + technologies = technologies.split(",") query = Q(technologies__icontains=technologies[0]) for technology in technologies[1:]: query |= Q(technologies__icontains=technology) @@ -1262,7 +1270,7 @@ def get(self, request, course_slug=None): items = items.filter(query) items = items.annotate(lang=Value(lang, output_field=CharField())) - items = items.order_by('created_at') + items = items.order_by("created_at") items = handler.queryset(items) - serializer = GetCourseSerializer(items, context={'lang': lang}, many=True) + serializer = GetCourseSerializer(items, context={"lang": lang}, many=True) return handler.response(serializer.data) diff --git a/breathecode/media/admin.py b/breathecode/media/admin.py index 8723cd9ed..184f82cbc 100644 --- a/breathecode/media/admin.py +++ b/breathecode/media/admin.py @@ -5,9 +5,9 @@ @admin.register(Media) class MediaAdmin(admin.ModelAdmin): - search_fields = ['slug', 'name'] - list_display = ('slug', 'name', 'mime', 'hits', 'academy', 'open_url') - list_filter = ['categories', 'mime', 'academy'] + search_fields = ["slug", "name"] + list_display = ("slug", "name", "mime", "hits", "academy", "open_url") + list_filter = ["categories", "mime", "academy"] def open_url(self, obj): return format_html(f"<a target='blank' href='/v1/media/file/{obj.slug}'>/v1/media/file/{obj.slug}</span>") @@ -15,10 +15,10 @@ def open_url(self, obj): @admin.register(Category) class MediaCategoryAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'created_at') + list_display = ("slug", "name", "created_at") @admin.register(MediaResolution) class MediaResolutionAdmin(admin.ModelAdmin): - list_display = ('hash', 'width', 'height', 'hits') - list_filter = ['hash', 'width', 'height', 'hits'] + list_display = ("hash", "width", "height", "hits") + list_filter = ["hash", "width", "height", "hits"] diff --git a/breathecode/media/apps.py b/breathecode/media/apps.py index 96d56e97c..1af807055 100644 --- a/breathecode/media/apps.py +++ b/breathecode/media/apps.py @@ -2,4 +2,4 @@ class MediaConfig(AppConfig): - name = 'breathecode.media' + name = "breathecode.media" diff --git a/breathecode/media/migrations/0001_initial.py b/breathecode/media/migrations/0001_initial.py index ba876636b..7b9712695 100644 --- a/breathecode/media/migrations/0001_initial.py +++ b/breathecode/media/migrations/0001_initial.py @@ -15,32 +15,33 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Category', + name="Category", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Media', + name="Media", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('mime', models.CharField(max_length=60)), - ('url', models.URLField(max_length=255)), - ('hash', models.CharField(max_length=64)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('categories', models.ManyToManyField(to='media.Category')), - ('owner', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ("mime", models.CharField(max_length=60)), + ("url", models.URLField(max_length=255)), + ("hash", models.CharField(max_length=64)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("categories", models.ManyToManyField(to="media.Category")), + ( + "owner", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), ] diff --git a/breathecode/media/migrations/0002_auto_20210330_0754.py b/breathecode/media/migrations/0002_auto_20210330_0754.py index 75233f1b4..71b146530 100644 --- a/breathecode/media/migrations/0002_auto_20210330_0754.py +++ b/breathecode/media/migrations/0002_auto_20210330_0754.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('media', '0001_initial'), + ("media", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='media', - name='categories', - field=models.ManyToManyField(blank=True, to='media.Category'), + model_name="media", + name="categories", + field=models.ManyToManyField(blank=True, to="media.Category"), ), ] diff --git a/breathecode/media/migrations/0003_media_hits.py b/breathecode/media/migrations/0003_media_hits.py index 5ce93208e..7425e2d12 100644 --- a/breathecode/media/migrations/0003_media_hits.py +++ b/breathecode/media/migrations/0003_media_hits.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('media', '0002_auto_20210330_0754'), + ("media", "0002_auto_20210330_0754"), ] operations = [ migrations.AddField( - model_name='media', - name='hits', + model_name="media", + name="hits", field=models.IntegerField(default=0), ), ] diff --git a/breathecode/media/migrations/0004_auto_20210401_0249.py b/breathecode/media/migrations/0004_auto_20210401_0249.py index d831284f6..53d15556d 100644 --- a/breathecode/media/migrations/0004_auto_20210401_0249.py +++ b/breathecode/media/migrations/0004_auto_20210401_0249.py @@ -7,21 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0015_auto_20210302_0254'), - ('media', '0003_media_hits'), + ("admissions", "0015_auto_20210302_0254"), + ("media", "0003_media_hits"), ] operations = [ migrations.RemoveField( - model_name='media', - name='owner', + model_name="media", + name="owner", ), migrations.AddField( - model_name='media', - name='academy', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="media", + name="academy", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), ), ] diff --git a/breathecode/media/migrations/0005_auto_20210524_1426.py b/breathecode/media/migrations/0005_auto_20210524_1426.py index 21d7a1562..8fe04c1aa 100644 --- a/breathecode/media/migrations/0005_auto_20210524_1426.py +++ b/breathecode/media/migrations/0005_auto_20210524_1426.py @@ -6,25 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('media', '0004_auto_20210401_0249'), + ("media", "0004_auto_20210401_0249"), ] operations = [ migrations.CreateModel( - name='MediaResolution', + name="MediaResolution", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('hash', models.CharField(max_length=64)), - ('width', models.IntegerField()), - ('height', models.IntegerField()), - ('hits', models.IntegerField(default=0)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("hash", models.CharField(max_length=64)), + ("width", models.IntegerField()), + ("height", models.IntegerField()), + ("hits", models.IntegerField(default=0)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.AddField( - model_name='media', - name='thumbnail', + model_name="media", + name="thumbnail", field=models.URLField(blank=True, max_length=255, null=True), ), ] diff --git a/breathecode/media/models.py b/breathecode/media/models.py index 17f19c021..13bfa94ef 100644 --- a/breathecode/media/models.py +++ b/breathecode/media/models.py @@ -1,7 +1,7 @@ from breathecode.admissions.models import Academy from django.db import models -__all__ = ['Category', 'Media', 'MediaResolution'] +__all__ = ["Category", "Media", "MediaResolution"] class Category(models.Model): @@ -12,7 +12,7 @@ class Category(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class Media(models.Model): @@ -31,7 +31,7 @@ class Media(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id} - {self.slug})' + return f"{self.name} ({self.id} - {self.slug})" class MediaResolution(models.Model): @@ -44,4 +44,4 @@ class MediaResolution(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.hash} ({self.width}x{self.height})' + return f"{self.hash} ({self.width}x{self.height})" diff --git a/breathecode/media/schemas.py b/breathecode/media/schemas.py index f7db7f885..4fd62feb8 100644 --- a/breathecode/media/schemas.py +++ b/breathecode/media/schemas.py @@ -5,15 +5,15 @@ class GlobalSchema(AutoSchema): def get_operation(self, path, method): operation = super().get_operation(path, method) - operation['parameters'].append({ - 'name': 'Authorization', - 'in': 'header', - 'required': True, - 'description': 'Token', - 'schema': { - 'type': 'string' + operation["parameters"].append( + { + "name": "Authorization", + "in": "header", + "required": True, + "description": "Token", + "schema": {"type": "string"}, } - }) + ) return operation @@ -21,15 +21,15 @@ class MediaSchema(GlobalSchema): def get_operation(self, path, method): operation = super().get_operation(path, method) - operation['parameters'].append({ - 'name': 'Academy', - 'in': 'header', - 'required': True, - 'description': 'What foo does...', - 'schema': { - 'type': 'string' + operation["parameters"].append( + { + "name": "Academy", + "in": "header", + "required": True, + "description": "What foo does...", + "schema": {"type": "string"}, } - }) + ) return operation @@ -37,22 +37,22 @@ class FileSchema(AutoSchema): def get_operation(self, path, method): operation = super().get_operation(path, method) - operation['parameters'].append({ - 'name': 'width', - 'in': 'query', - 'required': False, - 'description': 'Width of image', - 'schema': { - 'type': 'integer' + operation["parameters"].append( + { + "name": "width", + "in": "query", + "required": False, + "description": "Width of image", + "schema": {"type": "integer"}, } - }) - operation['parameters'].append({ - 'name': 'height', - 'in': 'query', - 'required': False, - 'description': 'Height of image', - 'schema': { - 'type': 'integer' + ) + operation["parameters"].append( + { + "name": "height", + "in": "query", + "required": False, + "description": "Height of image", + "schema": {"type": "integer"}, } - }) + ) return operation diff --git a/breathecode/media/serializers.py b/breathecode/media/serializers.py index d3aa3818d..69b3e9a7b 100644 --- a/breathecode/media/serializers.py +++ b/breathecode/media/serializers.py @@ -41,7 +41,7 @@ class GetMediaSerializer(serpy.Serializer): academy = GetAcademySerializer(required=False) def get_thumbnail(self, obj): - return obj.url + '-thumbnail' + return obj.url + "-thumbnail" def get_categories(self, obj): return [GetCategorySerializer(x).data for x in obj.categories.all()] @@ -56,7 +56,7 @@ class GetResolutionSerializer(serializers.ModelSerializer): class Meta: model = MediaResolution - fields = ('id', 'hash', 'width', 'height', 'hits') + fields = ("id", "hash", "width", "height", "hits") class MediaListSerializer(serializers.ListSerializer): @@ -65,16 +65,16 @@ def update(self, instance, validated_data): ret = [] for data in validated_data: - item = [x for x in instance if x.id == data['id']] + item = [x for x in instance if x.id == data["id"]] item = item[0] if len(item) else None - if 'id' in data and not data['id']: - del data['id'] + if "id" in data and not data["id"]: + del data["id"] - if 'id' in data: - if item and 'categories' in data and data['categories']: - item.categories.set(data['categories']) - del data['categories'] + if "id" in data: + if item and "categories" in data and data["categories"]: + item.categories.set(data["categories"]) + del data["categories"] ret.append(self.child.update(item, data)) else: @@ -94,7 +94,7 @@ class MediaSerializer(serializers.ModelSerializer): class Meta: model = Media - fields = ('id', 'url', 'thumbnail', 'hash', 'hits', 'slug', 'mime', 'name', 'categories', 'academy') + fields = ("id", "url", "thumbnail", "hash", "hits", "slug", "mime", "name", "categories", "academy") exclude = () list_serializer_class = MediaListSerializer @@ -110,14 +110,17 @@ class MediaPUTSerializer(serializers.ModelSerializer): class Meta: model = Media - fields = ('id', 'url', 'thumbnail', 'hash', 'hits', 'slug', 'mime', 'name', 'categories', 'academy') + fields = ("id", "url", "thumbnail", "hash", "hits", "slug", "mime", "name", "categories", "academy") exclude = () list_serializer_class = MediaListSerializer def validate(self, data): - if 'hash' in data and 'academy' in data and isinstance(data['academy'], Academy): - data['id'] = Media.objects.filter(hash=data['hash'], - academy__id=data['academy'].id).values_list('id', flat=True).first() + if "hash" in data and "academy" in data and isinstance(data["academy"], Academy): + data["id"] = ( + Media.objects.filter(hash=data["hash"], academy__id=data["academy"].id) + .values_list("id", flat=True) + .first() + ) return data @@ -130,10 +133,10 @@ class CategorySerializer(serializers.ModelSerializer): class Meta: model = Category - fields = ('name', 'slug', 'created_at', 'id') + fields = ("name", "slug", "created_at", "id") def create(self, validated_data): - _slug = slugify(validated_data['name']) - result = super().create({**validated_data, 'slug': _slug}) + _slug = slugify(validated_data["name"]) + result = super().create({**validated_data, "slug": _slug}) return result diff --git a/breathecode/media/tests/mixins/__init__.py b/breathecode/media/tests/mixins/__init__.py index b73c178bc..8f66ab70d 100644 --- a/breathecode/media/tests/mixins/__init__.py +++ b/breathecode/media/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Media mixins """ + from .media_test_case import MediaTestCase # noqa: F401 diff --git a/breathecode/media/tests/mixins/media_test_case.py b/breathecode/media/tests/mixins/media_test_case.py index 6cef70255..c2e15c20d 100644 --- a/breathecode/media/tests/mixins/media_test_case.py +++ b/breathecode/media/tests/mixins/media_test_case.py @@ -1,23 +1,41 @@ """ Collections of mixins used to login in authorize microservice """ + import os from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, Sha256Mixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + Sha256Mixin, + BreathecodeMixin, +) from breathecode.media.models import Media from breathecode.media.serializers import GetMediaSerializer -class MediaTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, Sha256Mixin, BreathecodeMixin): +class MediaTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + Sha256Mixin, + BreathecodeMixin, +): """FeedbackTestCase with auth methods""" def tearDown(self): self.clear_cache() def setUp(self): - os.environ['MEDIA_GALLERY_BUCKET'] = 'bucket-name' + os.environ["MEDIA_GALLERY_BUCKET"] = "bucket-name" self.generate_queries() self.set_test_instance(self) diff --git a/breathecode/media/tests/urls/tests_category.py b/breathecode/media/tests/urls/tests_category.py index 60c32a77a..5b2bdd9d9 100644 --- a/breathecode/media/tests/urls/tests_category.py +++ b/breathecode/media/tests/urls/tests_category.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -18,53 +19,52 @@ class MediaTestSuite(MediaTestCase): """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:category') + url = reverse_lazy("media:category") response = self.client.get(url) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:category') - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:category") + response = self.client.get(url, **{"HTTP_Academy": 1}) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:category') + url = reverse_lazy("media:category") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: read_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') - url = reverse_lazy('media:category') + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") + url = reverse_lazy("media:category") response = self.client.get(url) json = response.json() @@ -72,108 +72,119 @@ def test_category_without_data(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(self.all_category_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - category=True) - url = reverse_lazy('media:category') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", category=True + ) + url = reverse_lazy("media:category") response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'id': 1, - 'medias': 0, - 'name': model['category'].name, - 'slug': model['category'].slug, - }]) + self.assertEqual( + json, + [ + { + "id": 1, + "medias": 0, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, 'category')}]) + self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, "category")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_with_media(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:category') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:category") response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }]) - self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, 'category')}]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + json, + [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + ) + self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, "category")}]) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_post(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('media:category') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("media:category") data = { - 'name': 'They killed kenny', - 'slug': 'they-killed-kenny', + "name": "They killed kenny", + "slug": "they-killed-kenny", } response = self.client.post(url, data) json = response.json() expected = { - 'id': 1, + "id": 1, **data, } - self.assertDatetime(json['created_at']) - del json['created_at'] + self.assertDatetime(json["created_at"]) + del json["created_at"] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(self.all_category_dict(), [expected]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_category__spy_extensions(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") - url = reverse_lazy('media:category') + url = reverse_lazy("media:category") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension']), - ]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension"]), + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_category__spy_extension_arguments(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") - url = reverse_lazy('media:category') + url = reverse_lazy("media:category") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(paginate=True), + ], + ) diff --git a/breathecode/media/tests/urls/tests_category_id.py b/breathecode/media/tests/urls/tests_category_id.py index 73eae612d..b1b04f108 100644 --- a/breathecode/media/tests/urls/tests_category_id.py +++ b/breathecode/media/tests/urls/tests_category_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import patch from django.urls.base import reverse_lazy @@ -17,105 +18,105 @@ class MediaTestSuite(MediaTestCase): """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_id_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:category_id', kwargs={'category_id': 1}) + url = reverse_lazy("media:category_id", kwargs={"category_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_id_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:category_id', kwargs={'category_id': 1}) - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:category_id", kwargs={"category_id": 1}) + response = self.client.get(url, **{"HTTP_Academy": 1}) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_id_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:category_id', kwargs={'category_id': 1}) + url = reverse_lazy("media:category_id", kwargs={"category_id": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: read_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_id_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') - url = reverse_lazy('media:category_id', kwargs={'category_id': 1}) + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") + url = reverse_lazy("media:category_id", kwargs={"category_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Category not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Category not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_category_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - category=True) - url = reverse_lazy('media:category_id', kwargs={'category_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", category=True + ) + url = reverse_lazy("media:category_id", kwargs={"category_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'id': 1, - 'medias': 0, - 'name': model['category'].name, - 'slug': model['category'].slug, - }) + self.assertEqual( + json, + { + "id": 1, + "medias": 0, + "name": model["category"].name, + "slug": model["category"].slug, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, 'category')}]) + self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, "category")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_id_with_media(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:category_id', kwargs={'category_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:category_id", kwargs={"category_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }) + self.assertEqual( + json, + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, 'category')}]) + self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, "category")}]) diff --git a/breathecode/media/tests/urls/tests_category_slug.py b/breathecode/media/tests/urls/tests_category_slug.py index a38dabc0c..682febf30 100644 --- a/breathecode/media/tests/urls/tests_category_slug.py +++ b/breathecode/media/tests/urls/tests_category_slug.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import patch from django.urls.base import reverse_lazy @@ -17,231 +18,231 @@ class MediaTestSuite(MediaTestCase): """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_slug_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:category_slug', kwargs={'category_slug': 'they-killed-kenny'}) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_slug_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:category_slug', kwargs={'category_slug': 'they-killed-kenny'}) - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": "they-killed-kenny"}) + response = self.client.get(url, **{"HTTP_Academy": 1}) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_slug_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:category_slug', kwargs={'category_slug': 'they-killed-kenny'}) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": "they-killed-kenny"}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: read_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_slug_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') - url = reverse_lazy('media:category_slug', kwargs={'category_slug': 'they-killed-kenny'}) + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") + url = reverse_lazy("media:category_slug", kwargs={"category_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Category not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Category not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_category_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - category=True) - url = reverse_lazy('media:category_slug', kwargs={'category_slug': model['category'].slug}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", category=True + ) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": model["category"].slug}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'id': 1, - 'medias': 0, - 'name': model['category'].name, - 'slug': model['category'].slug, - }) + self.assertEqual( + json, + { + "id": 1, + "medias": 0, + "name": model["category"].name, + "slug": model["category"].slug, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, 'category')}]) + self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, "category")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_slug_with_media(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:category_slug', kwargs={'category_slug': model['category'].slug}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": model["category"].slug}) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }) + self.assertEqual( + json, + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, 'category')}]) + self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, "category")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_academy_slug_put_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:category_slug', kwargs={'category_slug': 'they-killed-kenny'}) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": "they-killed-kenny"}) data = {} response = self.client.put(url, data) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_academy_slug_put_wrong_academy(self): """Test /answer without auth""" self.headers(academy=1) - url = reverse_lazy('media:category_slug', kwargs={'category_slug': 'they-killed-kenny'}) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": "they-killed-kenny"}) data = {} response = self.client.put(url, data) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_academy_slug_put_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:category_slug', kwargs={'category_slug': 'they-killed-kenny'}) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": "they-killed-kenny"}) self.generate_models(authenticate=True) data = {} response = self.client.put(url, data) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: crud_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_academy_slug_put_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('media:category_slug', kwargs={'category_slug': 'they-killed-kenny'}) + models = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("media:category_slug", kwargs={"category_slug": "they-killed-kenny"}) data = {} response = self.client.put(url, data) json = response.json() - self.assertEqual(json, {'detail': 'Category not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Category not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_category_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_academy_slug_put(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) - url = reverse_lazy('media:category_slug', kwargs={'category_slug': model['category'].slug}) - data = {'slug': 'they-killed-kenny', 'name': 'They killed kenny'} + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": model["category"].slug}) + data = {"slug": "they-killed-kenny", "name": "They killed kenny"} response = self.client.put(url, data) json = response.json() category = self.get_category(1) - self.assertDatetime(json['created_at']) - del json['created_at'] + self.assertDatetime(json["created_at"]) + del json["created_at"] - self.assertEqual(json, { - 'id': 1, - **data, - }) + self.assertEqual( + json, + { + "id": 1, + **data, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_category_dict(), [{ - 'id': 1, - **data, - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_category_dict(), + [ + { + "id": 1, + **data, + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_slug_delete_with_media(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:category_slug', kwargs={'category_slug': model['category'].slug}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": model["category"].slug}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, {'detail': 'Category contain some medias', 'status_code': 403}) + self.assertEqual(json, {"detail": "Category contain some medias", "status_code": 403}) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, 'category')}]) + self.assertEqual(self.all_category_dict(), [{**self.model_to_dict(model, "category")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_category_slug_delete(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) - url = reverse_lazy('media:category_slug', kwargs={'category_slug': model['category'].slug}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) + url = reverse_lazy("media:category_slug", kwargs={"category_slug": model["category"].slug}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) diff --git a/breathecode/media/tests/urls/tests_file_id.py b/breathecode/media/tests/urls/tests_file_id.py index 4ce71b6b3..6d774969d 100644 --- a/breathecode/media/tests/urls/tests_file_id.py +++ b/breathecode/media/tests/urls/tests_file_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + from breathecode.tests.mocks.requests import apply_requests_request_mock from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -11,7 +12,7 @@ ) from ..mixins import MediaTestCase -RESIZE_IMAGE_URL = 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image' +RESIZE_IMAGE_URL = "https://us-central1-labor-day-story.cloudfunctions.net/resize-image" def apply_get_env(configuration={}): @@ -23,29 +24,30 @@ def get_env(key, value=None): def bad_mime_response(): - data = {'message': 'File type not allowed', 'status_code': 400} + data = {"message": "File type not allowed", "status_code": 400} return (400, RESIZE_IMAGE_URL, data) def bad_size_response(): - data = {'message': 'Incorrect width or height', 'status_code': 400} + data = {"message": "Incorrect width or height", "status_code": 400} return (400, RESIZE_IMAGE_URL, data) def bad_server_response(): - data = {'message': 'They killed Kenny', 'status_code': 400} + data = {"message": "They killed Kenny", "status_code": 400} return (500, RESIZE_IMAGE_URL, data) def resized_response(width=1000, height=1000): - data = {'message': 'Ok', 'status_code': 200, 'width': width, 'height': height} + data = {"message": "Ok", "status_code": 200, "width": width, "height": height} return (200, RESIZE_IMAGE_URL, data) # MEDIA_GALLERY_BUCKET -@patch.dict('os.environ', {'GOOGLE_CLOUD_TOKEN': 'blablabla'}) +@patch.dict("os.environ", {"GOOGLE_CLOUD_TOKEN": "blablabla"}) class MediaTestSuite(MediaTestCase): """Test /answer""" + """ 🔽🔽🔽 Without data """ @@ -54,30 +56,35 @@ def test_file_id__without_data(self): """Test /answer without auth""" self.headers(academy=1) models = self.generate_models(academy=True) - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Resource not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Resource not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__without_data__with_mask_true(self): """Test /answer without auth""" self.headers(academy=1) models = self.generate_models(academy=True) - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?mask=true' + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?mask=true" response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Resource not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Resource not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) self.assertEqual(self.all_media_resolution_dict(), []) @@ -87,47 +94,67 @@ def test_file_id__without_data__with_mask_true(self): """ @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id(self): """Test /answer without auth""" self.headers(academy=1) model = self.generate_models(academy=True, media=True) - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) response = self.client.get(url) - self.assertEqual(response.url, model['media'].url) + self.assertEqual(response.url, model["media"].url) self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', 'ok')])) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", "ok")])) def test_file_id_with_mask_true(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io'} + media_kwargs = {"url": "https://potato.io"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?mask=true' + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?mask=true" response = self.client.get(url) - self.assertEqual(response.getvalue().decode('utf-8'), 'ok') + self.assertEqual(response.getvalue().decode("utf-8"), "ok") self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) """ @@ -135,205 +162,281 @@ def test_file_id_with_mask_true(self): """ @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_width_in_querystring__bad_mime(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io', 'mime': 'application/json'} + media_kwargs = {"url": "https://potato.io", "mime": "application/json"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?width=1000' + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?width=1000" response = self.client.get(url) json = response.json() - expected = {'detail': 'cannot-resize-media', 'status_code': 400} + expected = {"detail": "cannot-resize-media", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_width_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([resized_response()])) as mock: - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?width=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([resized_response()])) as mock: + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?width=1000" response = self.client.get(url) - self.assertEqual(response.url, 'https://potato.io/harcoded-1000x1000') + self.assertEqual(response.url, "https://potato.io/harcoded-1000x1000") self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) - - self.assertEqual(self.all_media_resolution_dict(), [{ - 'hash': model.media.hash, - 'height': 1000, - 'hits': 1, - 'id': 1, - 'width': 1000, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) + + self.assertEqual( + self.all_media_resolution_dict(), + [ + { + "hash": model.media.hash, + "height": 1000, + "hits": 1, + "id": 1, + "width": 1000, + } + ], + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_width_in_querystring__resolution_exist(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} - media_resolution_kwargs = {'width': 1000, 'height': 1000, 'hash': 'harcoded'} - model = self.generate_models(academy=True, - media=True, - media_resolution=True, - media_kwargs=media_kwargs, - media_resolution_kwargs=media_resolution_kwargs) - - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([resized_response()])) as mock: - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?width=1000' + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} + media_resolution_kwargs = {"width": 1000, "height": 1000, "hash": "harcoded"} + model = self.generate_models( + academy=True, + media=True, + media_resolution=True, + media_kwargs=media_kwargs, + media_resolution_kwargs=media_resolution_kwargs, + ) + + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([resized_response()])) as mock: + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?width=1000" response = self.client.get(url) - self.assertEqual(response.url, 'https://potato.io/harcoded-1000x1000') + self.assertEqual(response.url, "https://potato.io/harcoded-1000x1000") self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) self.assertEqual(mock.call_args_list, []) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) - - self.assertEqual(self.all_media_resolution_dict(), - [{ - **self.model_to_dict(model, 'media_resolution'), - 'hits': model['media_resolution'].hits + 1, - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) + + self.assertEqual( + self.all_media_resolution_dict(), + [ + { + **self.model_to_dict(model, "media_resolution"), + "hits": model["media_resolution"].hits + 1, + } + ], + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_width_in_querystring__bad_mime(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([bad_size_response()])) as mock: - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?width=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([bad_size_response()])) as mock: + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?width=1000" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cloud-function-bad-input', - 'status_code': 500, + "detail": "cloud-function-bad-input", + "status_code": 500, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_width_in_querystring__cloud_function_error(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([bad_server_response()])) as mock: - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?width=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([bad_server_response()])) as mock: + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?width=1000" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cloud-function-bad-input', - 'status_code': 500, + "detail": "cloud-function-bad-input", + "status_code": 500, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @@ -342,204 +445,280 @@ def test_file_id__with_width_in_querystring__cloud_function_error(self): """ @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_height_in_querystring__bad_mime(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io', 'mime': 'application/json'} + media_kwargs = {"url": "https://potato.io", "mime": "application/json"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?height=1000' + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?height=1000" response = self.client.get(url) json = response.json() - expected = {'detail': 'cannot-resize-media', 'status_code': 400} + expected = {"detail": "cannot-resize-media", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_height_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([resized_response()])) as mock: - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?height=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([resized_response()])) as mock: + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?height=1000" response = self.client.get(url) - self.assertEqual(response.url, 'https://potato.io/harcoded-1000x1000') + self.assertEqual(response.url, "https://potato.io/harcoded-1000x1000") self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) - - self.assertEqual(self.all_media_resolution_dict(), [{ - 'hash': model.media.hash, - 'height': 1000, - 'hits': 1, - 'id': 1, - 'width': 1000, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) + + self.assertEqual( + self.all_media_resolution_dict(), + [ + { + "hash": model.media.hash, + "height": 1000, + "hits": 1, + "id": 1, + "width": 1000, + } + ], + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_height_in_querystring__resolution_exist(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} - media_resolution_kwargs = {'width': 1000, 'height': 1000, 'hash': 'harcoded'} - model = self.generate_models(academy=True, - media=True, - media_resolution=True, - media_kwargs=media_kwargs, - media_resolution_kwargs=media_resolution_kwargs) - - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([resized_response()])) as mock: - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?height=1000' + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} + media_resolution_kwargs = {"width": 1000, "height": 1000, "hash": "harcoded"} + model = self.generate_models( + academy=True, + media=True, + media_resolution=True, + media_kwargs=media_kwargs, + media_resolution_kwargs=media_resolution_kwargs, + ) + + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([resized_response()])) as mock: + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?height=1000" response = self.client.get(url) - self.assertEqual(response.url, 'https://potato.io/harcoded-1000x1000') + self.assertEqual(response.url, "https://potato.io/harcoded-1000x1000") self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) self.assertEqual(mock.call_args_list, []) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) - - self.assertEqual(self.all_media_resolution_dict(), - [{ - **self.model_to_dict(model, 'media_resolution'), - 'hits': model['media_resolution'].hits + 1, - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) + + self.assertEqual( + self.all_media_resolution_dict(), + [ + { + **self.model_to_dict(model, "media_resolution"), + "hits": model["media_resolution"].hits + 1, + } + ], + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_height_in_querystring__bad_mime(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([bad_size_response()])) as mock: - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?height=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([bad_size_response()])) as mock: + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?height=1000" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cloud-function-bad-input', - 'status_code': 500, + "detail": "cloud-function-bad-input", + "status_code": 500, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_id__with_height_in_querystring__cloud_function_error(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([bad_server_response()])) as mock: - url = reverse_lazy('media:file_id', kwargs={'media_id': 1}) + '?height=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([bad_server_response()])) as mock: + url = reverse_lazy("media:file_id", kwargs={"media_id": 1}) + "?height=1000" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cloud-function-bad-input', - 'status_code': 500, + "detail": "cloud-function-bad-input", + "status_code": 500, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) diff --git a/breathecode/media/tests/urls/tests_file_slug.py b/breathecode/media/tests/urls/tests_file_slug.py index 8d63eddad..e8166c271 100644 --- a/breathecode/media/tests/urls/tests_file_slug.py +++ b/breathecode/media/tests/urls/tests_file_slug.py @@ -1,6 +1,7 @@ """ Test /answer """ + from breathecode.tests.mocks.requests import apply_requests_request_mock from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -8,7 +9,7 @@ from breathecode.tests.mocks import REQUESTS_PATH, apply_requests_get_mock from ..mixins import MediaTestCase -RESIZE_IMAGE_URL = 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image' +RESIZE_IMAGE_URL = "https://us-central1-labor-day-story.cloudfunctions.net/resize-image" def apply_get_env(configuration={}): @@ -20,28 +21,29 @@ def get_env(key, value=None): def bad_mime_response(): - data = {'message': 'File type not allowed', 'status_code': 400} + data = {"message": "File type not allowed", "status_code": 400} return (400, RESIZE_IMAGE_URL, data) def bad_size_response(): - data = {'message': 'Incorrect width or height', 'status_code': 400} + data = {"message": "Incorrect width or height", "status_code": 400} return (400, RESIZE_IMAGE_URL, data) def bad_server_response(): - data = {'message': 'They killed Kenny', 'status_code': 400} + data = {"message": "They killed Kenny", "status_code": 400} return (500, RESIZE_IMAGE_URL, data) def resized_response(width=1000, height=1000): - data = {'message': 'Ok', 'status_code': 200, 'width': width, 'height': height} + data = {"message": "Ok", "status_code": 200, "width": width, "height": height} return (200, RESIZE_IMAGE_URL, data) -@patch.dict('os.environ', {'GOOGLE_CLOUD_TOKEN': 'blablabla'}) +@patch.dict("os.environ", {"GOOGLE_CLOUD_TOKEN": "blablabla"}) class MediaTestSuite(MediaTestCase): """Test /answer""" + """ 🔽🔽🔽 Without data """ @@ -50,30 +52,35 @@ def test_file_slug__without_data(self): """Test /answer without auth""" self.headers(academy=1) models = self.generate_models(academy=True) - url = reverse_lazy('media:file_slug', kwargs={'media_slug': 'they-killed-kenny'}) + url = reverse_lazy("media:file_slug", kwargs={"media_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Resource not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Resource not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug_without_data_with_mask_true(self): """Test /answer without auth""" self.headers(academy=1) model = self.generate_models(academy=True) - url = reverse_lazy('media:file_slug', kwargs={'media_slug': 'they-killed-kenny'}) + '?mask=true' + url = reverse_lazy("media:file_slug", kwargs={"media_slug": "they-killed-kenny"}) + "?mask=true" response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Resource not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Resource not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) self.assertEqual(self.all_media_resolution_dict(), []) @@ -83,47 +90,67 @@ def test_file_slug_without_data_with_mask_true(self): """ @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug(self): """Test /answer without auth""" self.headers(academy=1) model = self.generate_models(academy=True, media=True) - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) response = self.client.get(url) - self.assertEqual(response.url, model['media'].url) + self.assertEqual(response.url, model["media"].url) self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', 'ok')])) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", "ok")])) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug_with_mask_true(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io'} + media_kwargs = {"url": "https://potato.io"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?mask=true' + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?mask=true" response = self.client.get(url) - self.assertEqual(response.getvalue().decode('utf-8'), 'ok') + self.assertEqual(response.getvalue().decode("utf-8"), "ok") self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) """ @@ -131,183 +158,249 @@ def test_file_slug_with_mask_true(self): """ @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_width_in_querystring__bad_mime(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io', 'mime': 'application/json'} + media_kwargs = {"url": "https://potato.io", "mime": "application/json"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?width=1000' + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?width=1000" response = self.client.get(url) json = response.json() - expected = {'detail': 'cannot-resize-media', 'status_code': 400} + expected = {"detail": "cannot-resize-media", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_width_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([resized_response()])) as mock: - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?width=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([resized_response()])) as mock: + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?width=1000" response = self.client.get(url) - self.assertEqual(response.url, 'https://potato.io/harcoded-1000x1000') + self.assertEqual(response.url, "https://potato.io/harcoded-1000x1000") self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) - - self.assertEqual(self.all_media_resolution_dict(), [{ - 'hash': model.media.hash, - 'height': 1000, - 'hits': 1, - 'id': 1, - 'width': 1000, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) + + self.assertEqual( + self.all_media_resolution_dict(), + [ + { + "hash": model.media.hash, + "height": 1000, + "hits": 1, + "id": 1, + "width": 1000, + } + ], + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_width_in_querystring__resolution_exist(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} - media_resolution_kwargs = {'width': 1000, 'height': 1000, 'hash': 'harcoded'} - model = self.generate_models(academy=True, - media=True, - media_resolution=True, - media_kwargs=media_kwargs, - media_resolution_kwargs=media_resolution_kwargs) - - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([resized_response()])) as mock: - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?width=1000' + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} + media_resolution_kwargs = {"width": 1000, "height": 1000, "hash": "harcoded"} + model = self.generate_models( + academy=True, + media=True, + media_resolution=True, + media_kwargs=media_kwargs, + media_resolution_kwargs=media_resolution_kwargs, + ) + + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([resized_response()])) as mock: + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?width=1000" response = self.client.get(url) - self.assertEqual(response.url, 'https://potato.io/harcoded-1000x1000') + self.assertEqual(response.url, "https://potato.io/harcoded-1000x1000") self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) self.assertEqual(mock.call_args_list, []) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) - self.assertEqual(self.all_media_resolution_dict(), - [{ - **self.model_to_dict(model, 'media_resolution'), - 'hits': model['media_resolution'].hits + 1, - }]) + self.assertEqual( + self.all_media_resolution_dict(), + [ + { + **self.model_to_dict(model, "media_resolution"), + "hits": model["media_resolution"].hits + 1, + } + ], + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_width_in_querystring__bad_mime(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([bad_size_response()])) as mock: - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?width=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([bad_size_response()])) as mock: + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?width=1000" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cloud-function-bad-input', - 'status_code': 500, + "detail": "cloud-function-bad-input", + "status_code": 500, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_width_in_querystring__cloud_function_error(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([bad_server_response()])) as mock: - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?width=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([bad_server_response()])) as mock: + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?width=1000" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cloud-function-bad-input', - 'status_code': 500, + "detail": "cloud-function-bad-input", + "status_code": 500, } self.assertEqual(json, expected) @@ -315,23 +408,33 @@ def test_file_slug__with_width_in_querystring__cloud_function_error(self): self.assertEqual( str(mock.call_args_list), - str([ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ])) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + str( + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": "1000", "height": null, "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ] + ), + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @@ -340,204 +443,280 @@ def test_file_slug__with_width_in_querystring__cloud_function_error(self): """ @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_height_in_querystring__bad_mime(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io', 'mime': 'application/json'} + media_kwargs = {"url": "https://potato.io", "mime": "application/json"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?height=1000' + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?height=1000" response = self.client.get(url) json = response.json() - expected = {'detail': 'cannot-resize-media', 'status_code': 400} + expected = {"detail": "cannot-resize-media", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_height_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([resized_response()])) as mock: - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?height=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([resized_response()])) as mock: + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?height=1000" response = self.client.get(url) - self.assertEqual(response.url, 'https://potato.io/harcoded-1000x1000') + self.assertEqual(response.url, "https://potato.io/harcoded-1000x1000") self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) - - self.assertEqual(self.all_media_resolution_dict(), [{ - 'hash': model.media.hash, - 'height': 1000, - 'hits': 1, - 'id': 1, - 'width': 1000, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) + + self.assertEqual( + self.all_media_resolution_dict(), + [ + { + "hash": model.media.hash, + "height": 1000, + "hits": 1, + "id": 1, + "width": 1000, + } + ], + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_height_in_querystring__resolution_exist(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} - media_resolution_kwargs = {'width': 1000, 'height': 1000, 'hash': 'harcoded'} - model = self.generate_models(academy=True, - media=True, - media_resolution=True, - media_kwargs=media_kwargs, - media_resolution_kwargs=media_resolution_kwargs) - - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([resized_response()])) as mock: - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?height=1000' + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} + media_resolution_kwargs = {"width": 1000, "height": 1000, "hash": "harcoded"} + model = self.generate_models( + academy=True, + media=True, + media_resolution=True, + media_kwargs=media_kwargs, + media_resolution_kwargs=media_resolution_kwargs, + ) + + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([resized_response()])) as mock: + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?height=1000" response = self.client.get(url) - self.assertEqual(response.url, 'https://potato.io/harcoded-1000x1000') + self.assertEqual(response.url, "https://potato.io/harcoded-1000x1000") self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY) self.assertEqual(mock.call_args_list, []) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) - self.assertEqual(self.all_media_resolution_dict(), - [{ - **self.model_to_dict(model, 'media_resolution'), - 'hits': model['media_resolution'].hits + 1, - }]) + self.assertEqual( + self.all_media_resolution_dict(), + [ + { + **self.model_to_dict(model, "media_resolution"), + "hits": model["media_resolution"].hits + 1, + } + ], + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_height_in_querystring__bad_mime(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([bad_size_response()])) as mock: - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?height=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([bad_size_response()])) as mock: + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?height=1000" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cloud-function-bad-input', - 'status_code': 500, + "detail": "cloud-function-bad-input", + "status_code": 500, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test_file_slug__with_height_in_querystring__cloud_function_error(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'url': 'https://potato.io/harcoded', 'mime': 'image/png', 'hash': 'harcoded'} + media_kwargs = {"url": "https://potato.io/harcoded", "mime": "image/png", "hash": "harcoded"} model = self.generate_models(academy=True, media=True, media_kwargs=media_kwargs) - with patch('google.oauth2.id_token.fetch_id_token') as token_mock: - token_mock.return_value = 'blablabla' + with patch("google.oauth2.id_token.fetch_id_token") as token_mock: + token_mock.return_value = "blablabla" - with patch(REQUESTS_PATH['request'], apply_requests_request_mock([bad_server_response()])) as mock: - url = reverse_lazy('media:file_slug', kwargs={'media_slug': model['media'].slug}) + '?height=1000' + with patch(REQUESTS_PATH["request"], apply_requests_request_mock([bad_server_response()])) as mock: + url = reverse_lazy("media:file_slug", kwargs={"media_slug": model["media"].slug}) + "?height=1000" response = self.client.get(url) json = response.json() expected = { - 'detail': 'cloud-function-bad-input', - 'status_code': 500, + "detail": "cloud-function-bad-input", + "status_code": 500, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - self.assertEqual(mock.call_args_list, [ - call('POST', - 'https://us-central1-labor-day-story.cloudfunctions.net/resize-image', - data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', - headers={ - 'Authorization': 'Bearer blablabla', - 'Content-Type': 'application/json', - 'Accept': 'application/json' - }, - params={}, - timeout=2) - ]) - - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hits': model['media'].hits + 1, - }]) + self.assertEqual( + mock.call_args_list, + [ + call( + "POST", + "https://us-central1-labor-day-story.cloudfunctions.net/resize-image", + data='{"width": null, "height": "1000", "filename": "harcoded", "bucket": "bucket-name"}', + headers={ + "Authorization": "Bearer blablabla", + "Content-Type": "application/json", + "Accept": "application/json", + }, + params={}, + timeout=2, + ) + ], + ) + + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hits": model["media"].hits + 1, + } + ], + ) self.assertEqual(self.all_media_resolution_dict(), []) diff --git a/breathecode/media/tests/urls/tests_info.py b/breathecode/media/tests/urls/tests_info.py index 0965210a4..8910c0786 100644 --- a/breathecode/media/tests/urls/tests_info.py +++ b/breathecode/media/tests/urls/tests_info.py @@ -12,432 +12,448 @@ class MediaTestSuite(MediaTestCase): - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_put_without_args_in_url_or_bulk(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - media=True, - role='potato') - url = reverse_lazy('media:info') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", media=True, role="potato" + ) + url = reverse_lazy("media:info") response = self.client.put(url) json = response.json() - expected = {'detail': 'no-args', 'status_code': 400} + expected = {"detail": "no-args", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_put_without_category_in_url_or_bulk(self): self.headers(academy=1) - url = reverse_lazy('media:info') - model = self.generate_models(authenticate=True, - media=True, - profile_academy=True, - capability='crud_media', - role='potato') - data = [{'slug': 'they-killed-kenny'}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("media:info") + model = self.generate_models( + authenticate=True, media=True, profile_academy=True, capability="crud_media", role="potato" + ) + data = [{"slug": "they-killed-kenny"}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'categories-not-in-bulk', 'status_code': 400} + expected = {"detail": "categories-not-in-bulk", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_put_without_id_in_url_or_bulk(self): self.headers(academy=1) - url = reverse_lazy('media:info') - model = self.generate_models(authenticate=True, - media=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) - data = [{'slug': 'they-killed-kenny', 'categories': [1]}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("media:info") + model = self.generate_models( + authenticate=True, media=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) + data = [{"slug": "they-killed-kenny", "categories": [1]}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'id-not-in-bulk', 'status_code': 400} + expected = {"detail": "id-not-in-bulk", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__put__in_bulk__without_categories(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('media:info') - model = self.generate_models(authenticate=True, - media=True, - profile_academy=True, - capability='crud_media', - role='potato') - data = [{ - 'id': model['media'].id, - 'hash': model['media'].hash, - 'slug': 'they-killed-kenny', - 'name': model['media'].name, - 'mime': model['media'].mime - }] - response = self.client.put(url, data, format='json') + url = reverse_lazy("media:info") + model = self.generate_models( + authenticate=True, media=True, profile_academy=True, capability="crud_media", role="potato" + ) + data = [ + { + "id": model["media"].id, + "hash": model["media"].hash, + "slug": "they-killed-kenny", + "name": model["media"].name, + "mime": model["media"].mime, + } + ] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'categories-not-in-bulk', 'status_code': 400} + expected = {"detail": "categories-not-in-bulk", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__put__in_bulk__with_more_arguments(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('media:info') - model = self.generate_models(authenticate=True, - media=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) - data = [{ - 'id': model['media'].id, - 'categories': [1, 2], - 'hash': model['media'].hash, - }] - response = self.client.put(url, data, format='json') + url = reverse_lazy("media:info") + model = self.generate_models( + authenticate=True, media=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) + data = [ + { + "id": model["media"].id, + "categories": [1, 2], + "hash": model["media"].hash, + } + ] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'extra-args-bulk-mode', 'status_code': 400} + expected = {"detail": "extra-args-bulk-mode", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_put_in_bulk_from_different_academy(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) model2 = self.generate_models(media=True) - data = [{'id': 2, 'categories': [1]}] - url = reverse_lazy('media:info') - response = self.client.put(url, data, format='json') + data = [{"id": 2, "categories": [1]}] + url = reverse_lazy("media:info") + response = self.client.put(url, data, format="json") json = response.json() - self.assertEqual(json, {'detail': 'different-academy-media-put', 'status_code': 400}) + self.assertEqual(json, {"detail": "different-academy-media-put", "status_code": 400}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media') - }, { - **self.model_to_dict(model2, 'media') - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), [{**self.model_to_dict(model, "media")}, {**self.model_to_dict(model2, "media")}] + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__put__in_bulk__with_one_item(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('media:info') - model = self.generate_models(authenticate=True, - media=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) - data = [{'id': model['media'].id, 'categories': [1]}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("media:info") + model = self.generate_models( + authenticate=True, media=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) + data = [{"id": model["media"].id, "categories": [1]}] + response = self.client.put(url, data, format="json") json = response.json() - self.assertEqual(json, [{ - 'categories': [1], - 'academy': 1, - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'slug': model['media'].slug, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'thumbnail': None, - 'url': model['media'].url, - }]) + self.assertEqual( + json, + [ + { + "categories": [1], + "academy": 1, + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "slug": model["media"].slug, + "mime": model["media"].mime, + "name": model["media"].name, + "thumbnail": None, + "url": model["media"].url, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.full_media_dict(), [{ - 'categories': [{ - 'id': model['category'].id, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug - }], - 'hash': - model['media'].hash, - 'hits': - model['media'].hits, - 'id': - 1, - 'slug': - model['media'].slug, - 'mime': - model['media'].mime, - 'name': - model['media'].name, - 'thumbnail': - f"{model['media'].url}-thumbnail", - 'url': - model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.full_media_dict(), + [ + { + "categories": [ + { + "id": model["category"].id, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": 1, + "slug": model["media"].slug, + "mime": model["media"].mime, + "name": model["media"].name, + "thumbnail": f"{model['media'].url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__put__in_bulk__with_two_item(self): """Test /cohort/user without auth""" self.headers(academy=1) - url = reverse_lazy('media:info') + url = reverse_lazy("media:info") model = [ - self.generate_models(authenticate=True, - media=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) + self.generate_models( + authenticate=True, + media=True, + profile_academy=True, + capability="crud_media", + role="potato", + category=True, + ) ] base = model[0].copy() - del base['user'] - del base['profile_academy'] - del base['media'] - del base['category'] + del base["user"] + del base["profile_academy"] + del base["media"] + del base["category"] model = model + [self.generate_models(media=True, profile_academy=True, category=True, models=base)] - data = [{'id': 1, 'categories': [1, 2]}, {'id': 2, 'categories': [1, 2]}] - response = self.client.put(url, data, format='json') + data = [{"id": 1, "categories": [1, 2]}, {"id": 2, "categories": [1, 2]}] + response = self.client.put(url, data, format="json") json = response.json() - self.assertEqual(json, [{ - 'categories': [1, 2], - 'academy': 1, - 'hash': model[0]['media'].hash, - 'hits': model[0]['media'].hits, - 'id': 1, - 'slug': model[0]['media'].slug, - 'mime': model[0]['media'].mime, - 'name': model[0]['media'].name, - 'thumbnail': None, - 'url': model[0]['media'].url, - 'academy': model[0]['academy'].id, - }, { - 'categories': [1, 2], - 'academy': 1, - 'hash': model[1]['media'].hash, - 'hits': model[1]['media'].hits, - 'id': 2, - 'slug': model[1]['media'].slug, - 'mime': model[1]['media'].mime, - 'name': model[1]['media'].name, - 'thumbnail': None, - 'url': model[1]['media'].url, - 'academy': model[1]['academy'].id, - }]) + self.assertEqual( + json, + [ + { + "categories": [1, 2], + "academy": 1, + "hash": model[0]["media"].hash, + "hits": model[0]["media"].hits, + "id": 1, + "slug": model[0]["media"].slug, + "mime": model[0]["media"].mime, + "name": model[0]["media"].name, + "thumbnail": None, + "url": model[0]["media"].url, + "academy": model[0]["academy"].id, + }, + { + "categories": [1, 2], + "academy": 1, + "hash": model[1]["media"].hash, + "hits": model[1]["media"].hits, + "id": 2, + "slug": model[1]["media"].slug, + "mime": model[1]["media"].mime, + "name": model[1]["media"].name, + "thumbnail": None, + "url": model[1]["media"].url, + "academy": model[1]["academy"].id, + }, + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.full_media_dict(), [{ - 'categories': [{ - 'id': model[0]['category'].id, - 'medias': 2, - 'name': model[0]['category'].name, - 'slug': model[0]['category'].slug - }, { - 'id': model[1]['category'].id, - 'medias': 2, - 'name': model[1]['category'].name, - 'slug': model[1]['category'].slug - }], - 'hash': - model[0]['media'].hash, - 'hits': - model[0]['media'].hits, - 'id': - 1, - 'slug': - model[0]['media'].slug, - 'mime': - model[0]['media'].mime, - 'name': - model[0]['media'].name, - 'thumbnail': - f"{model[0]['media'].url}-thumbnail", - 'url': - model[0]['media'].url, - 'academy': { - 'id': model[0]['academy'].id, - 'slug': model[0]['academy'].slug, - 'name': model[0]['academy'].name, - } - }, { - 'categories': [{ - 'id': model[0]['category'].id, - 'medias': 2, - 'name': model[0]['category'].name, - 'slug': model[0]['category'].slug - }, { - 'id': model[1]['category'].id, - 'medias': 2, - 'name': model[1]['category'].name, - 'slug': model[1]['category'].slug - }], - 'hash': - model[1]['media'].hash, - 'hits': - model[1]['media'].hits, - 'id': - 2, - 'slug': - model[1]['media'].slug, - 'mime': - model[1]['media'].mime, - 'name': - model[1]['media'].name, - 'thumbnail': - f"{model[1]['media'].url}-thumbnail", - 'url': - model[1]['media'].url, - 'academy': { - 'id': model[1]['academy'].id, - 'slug': model[1]['academy'].slug, - 'name': model[1]['academy'].name, - } - }]) + self.assertEqual( + self.full_media_dict(), + [ + { + "categories": [ + { + "id": model[0]["category"].id, + "medias": 2, + "name": model[0]["category"].name, + "slug": model[0]["category"].slug, + }, + { + "id": model[1]["category"].id, + "medias": 2, + "name": model[1]["category"].name, + "slug": model[1]["category"].slug, + }, + ], + "hash": model[0]["media"].hash, + "hits": model[0]["media"].hits, + "id": 1, + "slug": model[0]["media"].slug, + "mime": model[0]["media"].mime, + "name": model[0]["media"].name, + "thumbnail": f"{model[0]['media'].url}-thumbnail", + "url": model[0]["media"].url, + "academy": { + "id": model[0]["academy"].id, + "slug": model[0]["academy"].slug, + "name": model[0]["academy"].name, + }, + }, + { + "categories": [ + { + "id": model[0]["category"].id, + "medias": 2, + "name": model[0]["category"].name, + "slug": model[0]["category"].slug, + }, + { + "id": model[1]["category"].id, + "medias": 2, + "name": model[1]["category"].name, + "slug": model[1]["category"].slug, + }, + ], + "hash": model[1]["media"].hash, + "hits": model[1]["media"].hits, + "id": 2, + "slug": model[1]["media"].slug, + "mime": model[1]["media"].mime, + "name": model[1]["media"].name, + "thumbnail": f"{model[1]['media'].url}-thumbnail", + "url": model[1]["media"].url, + "academy": { + "id": model[1]["academy"].id, + "slug": model[1]["academy"].slug, + "name": model[1]["academy"].name, + }, + }, + ], + ) """ 🔽🔽🔽 Bulk delete """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__delete__without_bulk(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) - url = reverse_lazy('media:info') + url = reverse_lazy("media:info") response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__delete__bad_id(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) - url = reverse_lazy('media:info') + '?id=0' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) + url = reverse_lazy("media:info") + "?id=0" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__delete(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) - url = reverse_lazy('media:info') + '?id=1' + url = reverse_lazy("media:info") + "?id=1" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__delete__media_that_belongs_to_a_different_academy(self): """Test /answer without auth""" self.headers(academy=1) - model1 = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model1 = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) model2 = self.generate_models(media=True, academy=True) - url = reverse_lazy('media:info') + '?id=1,2' + url = reverse_lazy("media:info") + "?id=1,2" response = self.client.delete(url) json = response.json() expected = { - 'detail': 'academy-different-than-media-academy', - 'status_code': 400, + "detail": "academy-different-than-media-academy", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model1, 'media'), - }, { - **self.model_to_dict(model2, 'media'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model1, "media"), + }, + { + **self.model_to_dict(model2, "media"), + }, + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info__delete__two_media(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") for _ in range(0, 2): self.generate_models(media=True, models=base) - url = reverse_lazy('media:info') + '?id=1,2' + url = reverse_lazy("media:info") + "?id=1,2" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) diff --git a/breathecode/media/tests/urls/tests_info_id.py b/breathecode/media/tests/urls/tests_info_id.py index f1f8fc992..ced506767 100644 --- a/breathecode/media/tests/urls/tests_info_id.py +++ b/breathecode/media/tests/urls/tests_info_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + from unittest.mock import Mock, call, patch from django.urls.base import reverse_lazy from rest_framework import status @@ -13,7 +14,7 @@ from ..mixins import MediaTestCase -class FileMock(): +class FileMock: def delete(*args, **kwargs): pass @@ -22,7 +23,7 @@ def delete(*args, **kwargs): file_mock = Mock(side_effect=FileMock) -class StorageMock(): +class StorageMock: def file(*args, **kwargs): return file_mock @@ -34,354 +35,347 @@ def file(*args, **kwargs): class MediaTestSuite(MediaTestCase): """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) + response = self.client.get(url, **{"HTTP_Academy": 1}) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: read_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Media not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Media not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True + ) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'categories': [], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - }) + json, + { + "categories": [], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_with_category(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - }) + json, + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_put_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) data = {} response = self.client.put(url, data) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_put_wrong_academy(self): """Test /answer without auth""" self.headers(academy=1) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) data = {} response = self.client.put(url, data) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_put_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) self.generate_models(authenticate=True) data = {} response = self.client.put(url, data) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: crud_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_put_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + models = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) data = {} response = self.client.put(url, data) json = response.json() - self.assertEqual(json, {'detail': 'media-not-found', 'status_code': 404}) + self.assertEqual(json, {"detail": "media-not-found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_put_from_different_academy(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) model2 = self.generate_models(media=True) - url = reverse_lazy('media:info_id', kwargs={'media_id': 2}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 2}) response = self.client.put(url) json = response.json() - self.assertEqual(json, {'detail': 'different-academy-media-put', 'status_code': 400}) + self.assertEqual(json, {"detail": "different-academy-media-put", "status_code": 400}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media') - }, { - **self.model_to_dict(model2, 'media') - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), [{**self.model_to_dict(model, "media")}, {**self.model_to_dict(model2, "media")}] + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_put(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) data = { - 'slug': 'they-killed-kenny', - 'name': 'they-killed-kenny.exe', + "slug": "they-killed-kenny", + "name": "they-killed-kenny.exe", } ignored_data = { - 'url': 'https://www.google.com/', - 'mime': 'application/hitman', - 'hits': 9999, - 'mime': '1234567890123456789012345678901234567890123456', + "url": "https://www.google.com/", + "mime": "application/hitman", + "hits": 9999, + "mime": "1234567890123456789012345678901234567890123456", } response = self.client.put(url, {**data, **ignored_data}) json = response.json() self.assertEqual( - json, { - 'categories': [], - 'academy': 1, - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'thumbnail': None, - 'url': model['media'].url, + json, + { + "categories": [], + "academy": 1, + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "thumbnail": None, + "url": model["media"].url, **data, - }) + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - **data, - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + **data, + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_delete_without_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, {'detail': 'Media not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Media not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_delete_from_different_academy(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) model2 = self.generate_models(media=True) - url = reverse_lazy('media:info_id', kwargs={'media_id': 2}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 2}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, {'detail': 'academy-different-than-media-academy', 'status_code': 400}) + self.assertEqual(json, {"detail": "academy-different-than-media-academy", "status_code": 400}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media') - }, { - **self.model_to_dict(model2, 'media') - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), [{**self.model_to_dict(model, "media")}, {**self.model_to_dict(model2, "media")}] + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_delete(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_delete_with_resolution(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True, - media_resolution=True, - media_kwargs={'hash': 'abc'}, - media_resolution_kwargs={'hash': 'abc'}) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_media", + role="potato", + media=True, + media_resolution=True, + media_kwargs={"hash": "abc"}, + media_resolution_kwargs={"hash": "abc"}, + ) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.all_media_dict(), []) self.assertEqual(self.all_media_resolution_dict(), []) - @patch('breathecode.services.google_cloud.Storage', storage_mock) + @patch("breathecode.services.google_cloud.Storage", storage_mock) def test_info_id_delete_with_category(self): """Test /answer without auth""" self.headers(academy=1) storage_mock.call_args_list = [] file_mock.call_args_list = [] - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.delete(url) self.assertEqual(storage_mock.call_args_list, [call()]) @@ -389,24 +383,22 @@ def test_info_id_delete_with_category(self): self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.all_media_dict(), []) - @patch('breathecode.services.google_cloud.Storage', storage_mock) + @patch("breathecode.services.google_cloud.Storage", storage_mock) def test_info_id_delete_with_category_with_two_media(self): """Test /answer without auth""" self.headers(academy=1) storage_mock.call_args_list = [] file_mock.delete.call_args_list = [] - base = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) + base = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) - media_kwargs = {'hash': '1234567890123456789012345678901234567890123456'} + media_kwargs = {"hash": "1234567890123456789012345678901234567890123456"} models = [self.generate_models(media=True, media_kwargs=media_kwargs, models=base) for _ in range(0, 2)] - url = reverse_lazy('media:info_id', kwargs={'media_id': 1}) + url = reverse_lazy("media:info_id", kwargs={"media_id": 1}) response = self.client.delete(url) self.assertEqual(storage_mock.call_args_list, []) self.assertEqual(file_mock.delete.call_args_list, []) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(models[1], 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(models[1], "media")}]) diff --git a/breathecode/media/tests/urls/tests_info_id_resolution.py b/breathecode/media/tests/urls/tests_info_id_resolution.py index 3a8b6844f..7e1f7c23d 100644 --- a/breathecode/media/tests/urls/tests_info_id_resolution.py +++ b/breathecode/media/tests/urls/tests_info_id_resolution.py @@ -1,6 +1,7 @@ """ Test /answer """ + from unittest.mock import patch from django.urls.base import reverse_lazy from rest_framework import status @@ -15,86 +16,92 @@ class MediaTestSuite(MediaTestCase): - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_resolution_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:info_id_resolution', kwargs={'media_id': 1}) + url = reverse_lazy("media:info_id_resolution", kwargs={"media_id": 1}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_resolution_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:info_id_resolution', kwargs={'media_id': 1}) - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:info_id_resolution", kwargs={"media_id": 1}) + response = self.client.get(url, **{"HTTP_Academy": 1}) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_resolution_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:info_id_resolution', kwargs={'media_id': 1}) + url = reverse_lazy("media:info_id_resolution", kwargs={"media_id": 1}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_media_resolution for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: read_media_resolution for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media_resolution', - role='potato') - url = reverse_lazy('media:info_id_resolution', kwargs={'media_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media_resolution", role="potato" + ) + url = reverse_lazy("media:info_id_resolution", kwargs={"media_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'media-not-found', 'status_code': 404}) + self.assertEqual(json, {"detail": "media-not-found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_resolution_get_with_id(self): """Test /info/media:id/resolution""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - media_resolution=True, - media=True, - capability='read_media_resolution', - role='potato', - profile_academy=True, - media_kwargs={'hash': 'abc'}, - media_resolution_kwargs={'hash': 'abc'}) - model_dict = self.remove_dinamics_fields(model['media_resolution'].__dict__) - url = reverse_lazy('media:info_id_resolution', kwargs={'media_id': model['media'].id}) + model = self.generate_models( + authenticate=True, + media_resolution=True, + media=True, + capability="read_media_resolution", + role="potato", + profile_academy=True, + media_kwargs={"hash": "abc"}, + media_resolution_kwargs={"hash": "abc"}, + ) + model_dict = self.remove_dinamics_fields(model["media_resolution"].__dict__) + url = reverse_lazy("media:info_id_resolution", kwargs={"media_id": model["media"].id}) response = self.client.get(url) json = response.json() - expected = [{ - 'id': model['media_resolution'].id, - 'hash': model['media'].hash, - 'width': model['media_resolution'].width, - 'height': model['media_resolution'].height, - 'hits': model['media_resolution'].hits, - }] + expected = [ + { + "id": model["media_resolution"].id, + "hash": model["media"].hash, + "width": model["media_resolution"].width, + "height": model["media_resolution"].height, + "hits": model["media_resolution"].hits, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/media/tests/urls/tests_info_name.py b/breathecode/media/tests/urls/tests_info_name.py index 4b6427110..1b808891a 100644 --- a/breathecode/media/tests/urls/tests_info_name.py +++ b/breathecode/media/tests/urls/tests_info_name.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import patch from django.urls.base import reverse_lazy @@ -17,132 +18,132 @@ class MediaTestSuite(MediaTestCase): """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:info_name', kwargs={'media_name': 'they-killed-kenny.exe'}) + url = reverse_lazy("media:info_name", kwargs={"media_name": "they-killed-kenny.exe"}) response = self.client.get(url) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:info_name', kwargs={'media_name': 'they-killed-kenny.exe'}) - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:info_name", kwargs={"media_name": "they-killed-kenny.exe"}) + response = self.client.get(url, **{"HTTP_Academy": 1}) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:info_name', kwargs={'media_name': 'they-killed-kenny.exe'}) + url = reverse_lazy("media:info_name", kwargs={"media_name": "they-killed-kenny.exe"}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: read_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') - url = reverse_lazy('media:info_name', kwargs={'media_name': 'they-killed-kenny.exe'}) + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") + url = reverse_lazy("media:info_name", kwargs={"media_name": "they-killed-kenny.exe"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Media not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Media not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True) - url = reverse_lazy('media:info_name', kwargs={'media_name': model['media'].name}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True + ) + url = reverse_lazy("media:info_name", kwargs={"media_name": model["media"].name}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'categories': [], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - }) + json, + { + "categories": [], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_with_category(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:info_name', kwargs={'media_name': model['media'].name}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:info_name", kwargs={"media_name": model["media"].name}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - }) + json, + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) diff --git a/breathecode/media/tests/urls/tests_info_slug.py b/breathecode/media/tests/urls/tests_info_slug.py index c9333d936..5feccf83d 100644 --- a/breathecode/media/tests/urls/tests_info_slug.py +++ b/breathecode/media/tests/urls/tests_info_slug.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import patch from django.urls.base import reverse_lazy @@ -17,134 +18,134 @@ class MediaTestSuite(MediaTestCase): """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:info_slug', kwargs={'media_slug': 'they-killed-kenny'}) + url = reverse_lazy("media:info_slug", kwargs={"media_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:info_slug', kwargs={'media_slug': 'they-killed-kenny'}) - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:info_slug", kwargs={"media_slug": "they-killed-kenny"}) + response = self.client.get(url, **{"HTTP_Academy": 1}) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:info_slug', kwargs={'media_slug': 'they-killed-kenny'}) + url = reverse_lazy("media:info_slug", kwargs={"media_slug": "they-killed-kenny"}) self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: read_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') - url = reverse_lazy('media:info_slug', kwargs={'media_slug': 'they-killed-kenny'}) + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") + url = reverse_lazy("media:info_slug", kwargs={"media_slug": "they-killed-kenny"}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'Media not found', 'status_code': 404}) + self.assertEqual(json, {"detail": "Media not found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True) - url = reverse_lazy('media:info_slug', kwargs={'media_slug': model['media'].slug}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True + ) + url = reverse_lazy("media:info_slug", kwargs={"media_slug": model["media"].slug}) response = self.client.get(url) json = response.json() self.assertEqual( - json, { - 'categories': [], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - }) + json, + { + "categories": [], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_info_id_with_category(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:info_slug', kwargs={'media_slug': model['media'].slug}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:info_slug", kwargs={"media_slug": model["media"].slug}) response = self.client.get(url) json = response.json() - self.print_model(model, 'media') - self.print_model(model, 'category') + self.print_model(model, "media") + self.print_model(model, "category") self.assertEqual( - json, { - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - }) + json, + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + }, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) diff --git a/breathecode/media/tests/urls/tests_resolution_id.py b/breathecode/media/tests/urls/tests_resolution_id.py index 9f0a2a6be..bdcbc6695 100644 --- a/breathecode/media/tests/urls/tests_resolution_id.py +++ b/breathecode/media/tests/urls/tests_resolution_id.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import MagicMock, Mock, call, patch from django.urls.base import reverse_lazy @@ -16,134 +17,141 @@ class MediaTestSuite(MediaTestCase): - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_resolution_id_without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:resolution_id', kwargs={'resolution_id': 1}) + url = reverse_lazy("media:resolution_id", kwargs={"resolution_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_resolution_id_wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:resolution_id', kwargs={'resolution_id': 1}) - response = self.client.delete(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:resolution_id", kwargs={"resolution_id": 1}) + response = self.client.delete(url, **{"HTTP_Academy": 1}) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_resolution_id_without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media_resolution', - role='potato') - url = reverse_lazy('media:resolution_id', kwargs={'resolution_id': 1}) + models = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media_resolution", role="potato" + ) + url = reverse_lazy("media:resolution_id", kwargs={"resolution_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'resolution-not-found', 'status_code': 404}) + self.assertEqual(json, {"detail": "resolution-not-found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_resolution_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_resolution_id_delete_without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:resolution_id', kwargs={'resolution_id': 1}) + url = reverse_lazy("media:resolution_id", kwargs={"resolution_id": 1}) self.generate_models(authenticate=True) data = {} response = self.client.delete(url, data) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_media_resolution for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, + { + "detail": "You (user: 1) don't have this capability: crud_media_resolution for academy 1", + "status_code": 403, + }, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_resolution_id_get_without_media(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media_resolution', - role='potato', - media_resolution=True) - url = reverse_lazy('media:resolution_id', kwargs={'resolution_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_media_resolution", + role="potato", + media_resolution=True, + ) + url = reverse_lazy("media:resolution_id", kwargs={"resolution_id": 1}) response = self.client.get(url) json = response.json() - self.assertEqual(json, {'detail': 'resolution-media-not-found', 'status_code': 404}) + self.assertEqual(json, {"detail": "resolution-media-not-found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_resolution_dict(), []) """Test /answer""" - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_resolution_id_delete_without_data(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media_resolution', - role='potato') - url = reverse_lazy('media:resolution_id', kwargs={'resolution_id': 1}) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media_resolution", role="potato" + ) + url = reverse_lazy("media:resolution_id", kwargs={"resolution_id": 1}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, {'detail': 'resolution-not-found', 'status_code': 404}) + self.assertEqual(json, {"detail": "resolution-not-found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_resolution_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_resolution_id_delete_without_media(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media_resolution', - role='potato', - media_resolution=True) - url = reverse_lazy('media:resolution_id', kwargs={'resolution_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_media_resolution", + role="potato", + media_resolution=True, + ) + url = reverse_lazy("media:resolution_id", kwargs={"resolution_id": 1}) response = self.client.delete(url) json = response.json() - self.assertEqual(json, {'detail': 'resolution-media-not-found', 'status_code': 404}) + self.assertEqual(json, {"detail": "resolution-media-not-found", "status_code": 404}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(self.all_media_resolution_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_resolution_id_delete(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media_resolution', - role='potato', - media_resolution=True, - media=True, - media_kwargs={'hash': 'abc'}, - media_resolution_kwargs={'hash': 'abc'}) - url = reverse_lazy('media:resolution_id', kwargs={'resolution_id': 1}) + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_media_resolution", + role="potato", + media_resolution=True, + media=True, + media_kwargs={"hash": "abc"}, + media_resolution_kwargs={"hash": "abc"}, + ) + url = reverse_lazy("media:resolution_id", kwargs={"resolution_id": 1}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) diff --git a/breathecode/media/tests/urls/tests_root.py b/breathecode/media/tests/urls/tests_root.py index 33f49a48b..289f193b5 100644 --- a/breathecode/media/tests/urls/tests_root.py +++ b/breathecode/media/tests/urls/tests_root.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import MagicMock, call, patch from django.urls.base import reverse_lazy @@ -17,61 +18,61 @@ class MediaTestSuite(MediaTestCase): """Test /answer""" + """ 🔽🔽🔽 Auth """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__without_auth(self): """Test /answer without auth""" - url = reverse_lazy('media:root') + url = reverse_lazy("media:root") response = self.client.get(url) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__wrong_academy(self): """Test /answer without auth""" - url = reverse_lazy('media:root') - response = self.client.get(url, **{'HTTP_Academy': 1}) + url = reverse_lazy("media:root") + response = self.client.get(url, **{"HTTP_Academy": 1}) json = response.json() self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__without_capability(self): """Test /cohort/:id without auth""" self.headers(academy=1) - url = reverse_lazy('media:root') + url = reverse_lazy("media:root") self.generate_models(authenticate=True) response = self.client.get(url) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: read_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: read_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) """ 🔽🔽🔽 Without data """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__without_data(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') - url = reverse_lazy('media:root') + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") + url = reverse_lazy("media:root") response = self.client.get(url) json = response.json() @@ -83,1173 +84,1194 @@ def test_root__without_data(self): 🔽🔽🔽 With data """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True) - url = reverse_lazy('media:root') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True + ) + url = reverse_lazy("media:root") response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'categories': [], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - }]) + self.assertEqual( + json, + [ + { + "categories": [], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) """ 🔽🔽🔽 Academy in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_bad_academy_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?academy=0' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?academy=0" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_academy_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?academy=1' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?academy=1" response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_two_academy_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") - del base['academy'] + del base["academy"] models = [self.generate_models(academy=True, media=True, category=True, models=base) for _ in range(0, 2)] - ordened_models = sorted(models, key=lambda x: x['media'].created_at, reverse=True) + ordened_models = sorted(models, key=lambda x: x["media"].created_at, reverse=True) - url = (reverse_lazy('media:root') + '?academy=' + str(models[0]['media'].academy.id) + ',' + - str(models[1]['media'].academy.id)) + url = ( + reverse_lazy("media:root") + + "?academy=" + + str(models[0]["media"].academy.id) + + "," + + str(models[1]["media"].academy.id) + ) response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'categories': [{ - 'id': model['category'].id, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': - model['media'].hash, - 'hits': - model['media'].hits, - 'id': - model['media'].id, - 'mime': - model['media'].mime, - 'name': - model['media'].name, - 'slug': - model['media'].slug, - 'thumbnail': - f'{model.media.url}-thumbnail', - 'url': - model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - } for model in ordened_models]) + self.assertEqual( + json, + [ + { + "categories": [ + { + "id": model["category"].id, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + for model in ordened_models + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')} for model in models]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")} for model in models]) """ 🔽🔽🔽 Mime in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_bad_mime_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?mime=application/hitman' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?mime=application/hitman" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_mime_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?mime=' + model['media'].mime + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?mime=" + model["media"].mime response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_two_mime_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") models = [self.generate_models(media=True, category=True, models=base) for _ in range(0, 2)] - ordened_models = sorted(models, key=lambda x: x['media'].created_at, reverse=True) + ordened_models = sorted(models, key=lambda x: x["media"].created_at, reverse=True) - url = reverse_lazy('media:root') + '?mime=' + models[0]['media'].mime + ',' + models[1]['media'].mime + url = reverse_lazy("media:root") + "?mime=" + models[0]["media"].mime + "," + models[1]["media"].mime response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'categories': [{ - 'id': model['category'].id, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': - model['media'].hash, - 'hits': - model['media'].hits, - 'id': - model['media'].id, - 'mime': - model['media'].mime, - 'name': - model['media'].name, - 'slug': - model['media'].slug, - 'thumbnail': - f'{model.media.url}-thumbnail', - 'url': - model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - } for model in ordened_models]) + self.assertEqual( + json, + [ + { + "categories": [ + { + "id": model["category"].id, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + for model in ordened_models + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')} for model in models]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")} for model in models]) """ 🔽🔽🔽 Name in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_bad_name_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?name=hitman' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?name=hitman" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_name_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?name=' + model['media'].name + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?name=" + model["media"].name response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_two_name_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") models = [self.generate_models(media=True, category=True, models=base) for _ in range(0, 2)] - ordened_models = sorted(models, key=lambda x: x['media'].created_at, reverse=True) + ordened_models = sorted(models, key=lambda x: x["media"].created_at, reverse=True) - url = (reverse_lazy('media:root') + '?name=' + models[0]['media'].name + ',' + models[1]['media'].name) + url = reverse_lazy("media:root") + "?name=" + models[0]["media"].name + "," + models[1]["media"].name response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'categories': [{ - 'id': model['category'].id, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': - model['media'].hash, - 'hits': - model['media'].hits, - 'id': - model['media'].id, - 'mime': - model['media'].mime, - 'name': - model['media'].name, - 'slug': - model['media'].slug, - 'thumbnail': - f'{model.media.url}-thumbnail', - 'url': - model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - } for model in ordened_models]) + self.assertEqual( + json, + [ + { + "categories": [ + { + "id": model["category"].id, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + for model in ordened_models + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')} for model in models]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")} for model in models]) """ 🔽🔽🔽 Slug in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_bad_slug_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?slug=hitman' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?slug=hitman" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_slug_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?slug=' + model['media'].slug + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?slug=" + model["media"].slug response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_two_slug_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") models = [self.generate_models(media=True, category=True, models=base) for _ in range(0, 2)] - ordened_models = sorted(models, key=lambda x: x['media'].created_at, reverse=True) + ordened_models = sorted(models, key=lambda x: x["media"].created_at, reverse=True) - url = (reverse_lazy('media:root') + '?slug=' + models[0]['media'].slug + ',' + models[1]['media'].slug) + url = reverse_lazy("media:root") + "?slug=" + models[0]["media"].slug + "," + models[1]["media"].slug response = self.client.get(url) json = response.json() - expected = [{ - 'categories': [{ - 'id': model['category'].id, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': - model['media'].hash, - 'hits': - model['media'].hits, - 'id': - model['media'].id, - 'mime': - model['media'].mime, - 'name': - model['media'].name, - 'slug': - model['media'].slug, - 'thumbnail': - f'{model.media.url}-thumbnail', - 'url': - model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + expected = [ + { + "categories": [ + { + "id": model["category"].id, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - } for model in ordened_models] + for model in ordened_models + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')} for model in models]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")} for model in models]) """ 🔽🔽🔽 Id in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_bad_id_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?id=0' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?id=0" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_id_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?id=' + str(model['media'].id) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?id=" + str(model["media"].id) response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_two_id_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") models = [self.generate_models(media=True, category=True, models=base) for _ in range(0, 2)] - ordened_models = sorted(models, key=lambda x: x['media'].created_at, reverse=True) + ordened_models = sorted(models, key=lambda x: x["media"].created_at, reverse=True) - url = (reverse_lazy('media:root') + '?id=' + str(models[0]['media'].id) + ',' + str(models[1]['media'].id)) + url = reverse_lazy("media:root") + "?id=" + str(models[0]["media"].id) + "," + str(models[1]["media"].id) response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'categories': [{ - 'id': model['category'].id, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': - model['media'].hash, - 'hits': - model['media'].hits, - 'id': - model['media'].id, - 'mime': - model['media'].mime, - 'name': - model['media'].name, - 'slug': - model['media'].slug, - 'thumbnail': - f'{model.media.url}-thumbnail', - 'url': - model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - } for model in ordened_models]) + self.assertEqual( + json, + [ + { + "categories": [ + { + "id": model["category"].id, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + for model in ordened_models + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')} for model in models]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")} for model in models]) """ 🔽🔽🔽 Categories in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_bad_categories_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?categories=0' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?categories=0" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_categories_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?categories=1' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?categories=1" response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_two_categories_in_querystring__return_nothing(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") models = [self.generate_models(media=True, category=True, models=base) for _ in range(0, 2)] - url = (reverse_lazy('media:root') + '?categories=1,2') + url = reverse_lazy("media:root") + "?categories=1,2" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')} for model in models]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")} for model in models]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_two_categories_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") categories = [self.generate_models(category=True).category for _ in range(0, 2)] category1 = categories[0] category2 = categories[1] - media_kwargs = {'categories': [x.id for x in categories]} + media_kwargs = {"categories": [x.id for x in categories]} models = [self.generate_models(media=True, models=base, media_kwargs=media_kwargs) for _ in range(0, 2)] - ordened_models = sorted(models, key=lambda x: x['media'].created_at, reverse=True) + ordened_models = sorted(models, key=lambda x: x["media"].created_at, reverse=True) - url = (reverse_lazy('media:root') + '?categories=1,2') + url = reverse_lazy("media:root") + "?categories=1,2" response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'categories': [{ - 'id': category1.id, - 'medias': 2, - 'name': category1.name, - 'slug': category1.slug, - }, { - 'id': category2.id, - 'medias': 2, - 'name': category2.name, - 'slug': category2.slug, - }], - 'hash': - model['media'].hash, - 'hits': - model['media'].hits, - 'id': - model['media'].id, - 'mime': - model['media'].mime, - 'name': - model['media'].name, - 'slug': - model['media'].slug, - 'thumbnail': - f'{model.media.url}-thumbnail', - 'url': - model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - } for model in ordened_models]) + self.assertEqual( + json, + [ + { + "categories": [ + { + "id": category1.id, + "medias": 2, + "name": category1.name, + "slug": category1.slug, + }, + { + "id": category2.id, + "medias": 2, + "name": category2.name, + "slug": category2.slug, + }, + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + for model in ordened_models + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')} for model in models]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")} for model in models]) """ 🔽🔽🔽 Type in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_bad_type_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?type=freyja' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?type=freyja" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_type_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'mime': 'application/pdf'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True, - media_kwargs=media_kwargs) - url = reverse_lazy('media:root') + '?type=pdf' + media_kwargs = {"mime": "application/pdf"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_media", + role="potato", + media=True, + category=True, + media_kwargs=media_kwargs, + ) + url = reverse_lazy("media:root") + "?type=pdf" response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) """ 🔽🔽🔽 Like in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_bad_like_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True) - url = reverse_lazy('media:root') + '?like=freyja' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="read_media", role="potato", media=True, category=True + ) + url = reverse_lazy("media:root") + "?like=freyja" response = self.client.get(url) json = response.json() self.assertEqual(json, []) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_like_in_querystring__like_match_name(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'name': 'Freyja'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True, - media_kwargs=media_kwargs) - url = reverse_lazy('media:root') + '?like=fre' + media_kwargs = {"name": "Freyja"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_media", + role="potato", + media=True, + category=True, + media_kwargs=media_kwargs, + ) + url = reverse_lazy("media:root") + "?like=fre" response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, } - }]) + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_like_in_querystring__like_match_slug(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'slug': 'freyja'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='read_media', - role='potato', - media=True, - category=True, - media_kwargs=media_kwargs) - url = reverse_lazy('media:root') + '?like=Fre' + media_kwargs = {"slug": "freyja"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="read_media", + role="potato", + media=True, + category=True, + media_kwargs=media_kwargs, + ) + url = reverse_lazy("media:root") + "?like=Fre" response = self.client.get(url) json = response.json() self.assertEqual( json, - [{ - 'categories': [{ - 'id': 1, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': model['media'].hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': model['media'].mime, - 'name': model['media'].name, - 'slug': model['media'].slug, - 'thumbnail': f'{model.media.url}-thumbnail', - 'url': model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - }, - }]) + [ + { + "categories": [ + { + "id": 1, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) """ 🔽🔽🔽 Sort in querystring """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__with_category__with_sort_in_querystring(self): """Test /answer without auth""" self.headers(academy=1) - media_kwargs = {'name': 'Freyja'} - base = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + media_kwargs = {"name": "Freyja"} + base = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") models = [ self.generate_models(media=True, category=True, models=base, media_kwargs=media_kwargs) for _ in range(2) ] - ordened_models = sorted(models, key=lambda x: x['media'].id, reverse=True) + ordened_models = sorted(models, key=lambda x: x["media"].id, reverse=True) - url = reverse_lazy('media:root') + '?sort=-id' + url = reverse_lazy("media:root") + "?sort=-id" response = self.client.get(url) json = response.json() - self.assertEqual(json, [{ - 'categories': [{ - 'id': model['category'].id, - 'medias': 1, - 'name': model['category'].name, - 'slug': model['category'].slug, - }], - 'hash': - model['media'].hash, - 'hits': - model['media'].hits, - 'id': - model['media'].id, - 'mime': - model['media'].mime, - 'name': - model['media'].name, - 'slug': - model['media'].slug, - 'thumbnail': - f'{model.media.url}-thumbnail', - 'url': - model['media'].url, - 'academy': { - 'id': model['academy'].id, - 'slug': model['academy'].slug, - 'name': model['academy'].name, - } - } for model in ordened_models]) + self.assertEqual( + json, + [ + { + "categories": [ + { + "id": model["category"].id, + "medias": 1, + "name": model["category"].name, + "slug": model["category"].slug, + } + ], + "hash": model["media"].hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": model["media"].mime, + "name": model["media"].name, + "slug": model["media"].slug, + "thumbnail": f"{model.media.url}-thumbnail", + "url": model["media"].url, + "academy": { + "id": model["academy"].id, + "slug": model["academy"].slug, + "name": model["academy"].name, + }, + } + for model in ordened_models + ], + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')} for model in models]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")} for model in models]) """ 🔽🔽🔽 Bulk delete """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__delete__without_bulk(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) - url = reverse_lazy('media:root') + url = reverse_lazy("media:root") response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__delete__bad_id(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) - url = reverse_lazy('media:root') + '?id=0' + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) + url = reverse_lazy("media:root") + "?id=0" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, 'media')}]) + self.assertEqual(self.all_media_dict(), [{**self.model_to_dict(model, "media")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__delete(self): """Test /answer without auth""" self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) - url = reverse_lazy('media:root') + '?id=1' + url = reverse_lazy("media:root") + "?id=1" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__delete__media_that_belongs_to_a_different_academy(self): """Test /answer without auth""" self.headers(academy=1) - model1 = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True) + model1 = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", media=True + ) model2 = self.generate_models(media=True, academy=True) - url = reverse_lazy('media:root') + '?id=1,2' + url = reverse_lazy("media:root") + "?id=1,2" response = self.client.delete(url) json = response.json() expected = { - 'detail': 'academy-different-than-media-academy', - 'status_code': 400, + "detail": "academy-different-than-media-academy", + "status_code": 400, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model1, 'media'), - }, { - **self.model_to_dict(model2, 'media'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model1, "media"), + }, + { + **self.model_to_dict(model2, "media"), + }, + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def test_root__delete__two_media(self): """Test /answer without auth""" self.headers(academy=1) - base = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') + base = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") for _ in range(0, 2): self.generate_models(media=True, models=base) - url = reverse_lazy('media:root') + '?id=1,2' + url = reverse_lazy("media:root") + "?id=1,2" response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(self.all_media_dict(), []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_root__spy_extensions(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") - url = reverse_lazy('media:root') + url = reverse_lazy("media:root") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_root__spy_extension_arguments(self): """Test /answer without auth""" self.headers(academy=1) - models = self.generate_models(authenticate=True, profile_academy=True, capability='read_media', role='potato') + models = self.generate_models(authenticate=True, profile_academy=True, capability="read_media", role="potato") - url = reverse_lazy('media:root') + url = reverse_lazy("media:root") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) diff --git a/breathecode/media/tests/urls/tests_upload.py b/breathecode/media/tests/urls/tests_upload.py index 3b538daf4..2831a8179 100644 --- a/breathecode/media/tests/urls/tests_upload.py +++ b/breathecode/media/tests/urls/tests_upload.py @@ -1,6 +1,7 @@ """ Test /v1/media/upload """ + import hashlib import os import tempfile @@ -22,7 +23,7 @@ def test_upload_without_auth(self): self.headers(content_disposition='attachment; filename="filename.jpg"') - url = reverse_lazy('media:upload') + url = reverse_lazy("media:upload") data = {} response = self.client.put(url, data) json = response.json() @@ -34,7 +35,7 @@ def test_upload_wrong_academy(self): self.headers(academy=1, content_disposition='attachment; filename="filename.jpg"') - url = reverse_lazy('media:upload') + url = reverse_lazy("media:upload") data = {} response = self.client.put(url, data) json = response.json() @@ -46,44 +47,50 @@ def test_upload_without_capability(self): self.headers(academy=1, content_disposition='attachment; filename="filename.jpg"') - url = reverse_lazy('media:upload') + url = reverse_lazy("media:upload") self.generate_models(authenticate=True) data = {} response = self.client.put(url, data) json = response.json() - self.assertEqual(json, { - 'detail': "You (user: 1) don't have this capability: crud_media for academy 1", - 'status_code': 403 - }) + self.assertEqual( + json, {"detail": "You (user: 1) don't have this capability: crud_media for academy 1", "status_code": 403} + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_without_data(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('media:upload') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("media:upload") data = {} response = self.client.put(url, data) json = response.json() - self.assertEqual(json, { - 'detail': 'Missing file in request', - 'status_code': 400, - }) + self.assertEqual( + json, + { + "detail": "Missing file in request", + "status_code": 400, + }, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(self.all_media_dict(), []) @@ -92,70 +99,83 @@ def test_upload_without_data(self): self.assertEqual(File.upload.call_args_list, []) self.assertEqual(File.url.call_args_list, []) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('media:upload') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("media:upload") - file = tempfile.NamedTemporaryFile(suffix='.png', delete=False) + file = tempfile.NamedTemporaryFile(suffix=".png", delete=False) file.write(os.urandom(1024)) file.close() - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': 'filename.png', 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": "filename.png", "file": data}) json = response.json() self.assertHash(hash) - expected = [{ - 'academy': 1, - 'categories': [], - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': 'filename.png', - 'slug': 'filename-png', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }] + expected = [ + { + "academy": 1, + "categories": [], + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": "filename.png", + "slug": "filename-png", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.all_media_dict(), [{ - 'academy_id': 1, - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': 'filename.png', - 'slug': 'filename-png', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }]) + self.all_media_dict(), + [ + { + "academy_id": 1, + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": "filename.png", + "slug": "filename-png", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -165,145 +185,174 @@ def test_upload(self): self.assertEqual(args[0].name, os.path.basename(file.name)) self.assertEqual(args[0].size, 1024) - self.assertEqual(kwargs, {'content_type': 'image/png'}) + self.assertEqual(kwargs, {"content_type": "image/png"}) self.assertEqual(File.url.call_args_list, [call()]) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_with_media(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - file = tempfile.NamedTemporaryFile(suffix='.png', delete=False) + file = tempfile.NamedTemporaryFile(suffix=".png", delete=False) file.write(os.urandom(1024)) file.close() - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - media_kwargs = {'hash': hash} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True, - media_kwargs=media_kwargs) - url = reverse_lazy('media:upload') - - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': ['filename.jpg'], 'file': [data]}) + media_kwargs = {"hash": hash} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_media", + role="potato", + media=True, + media_kwargs=media_kwargs, + ) + url = reverse_lazy("media:upload") + + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": ["filename.jpg"], "file": [data]}) json = response.json() self.assertHash(hash) - expected = [{ - 'academy': model['media'].academy.id, - 'categories': [], - 'hash': hash, - 'hits': model['media'].hits, - 'id': model['media'].id, - 'mime': 'image/png', - 'name': 'filename.jpg', - 'slug': 'filename-jpg', - 'thumbnail': None, - 'url': model['media'].url, - }] + expected = [ + { + "academy": model["media"].academy.id, + "categories": [], + "hash": hash, + "hits": model["media"].hits, + "id": model["media"].id, + "mime": "image/png", + "name": "filename.jpg", + "slug": "filename-jpg", + "thumbnail": None, + "url": model["media"].url, + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - 'hash': hash, - 'mime': 'image/png', - 'name': 'filename.jpg', - 'slug': 'filename-jpg', - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + "hash": hash, + "mime": "image/png", + "name": "filename.jpg", + "slug": "filename-jpg", + } + ], + ) self.assertEqual(Storage.__init__.call_args_list, []) self.assertEqual(File.__init__.call_args_list, []) self.assertEqual(File.upload.call_args_list, []) self.assertEqual(File.url.call_args_list, []) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_with_media_with_same_slug(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - file = tempfile.NamedTemporaryFile(suffix='.png', delete=False) + file = tempfile.NamedTemporaryFile(suffix=".png", delete=False) file.write(os.urandom(1024)) file.close() - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - media_kwargs = {'slug': 'filename-jpg'} - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - media=True, - media_kwargs=media_kwargs) - url = reverse_lazy('media:upload') - - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': 'filename.jpg', 'file': data}) + media_kwargs = {"slug": "filename-jpg"} + model = self.generate_models( + authenticate=True, + profile_academy=True, + capability="crud_media", + role="potato", + media=True, + media_kwargs=media_kwargs, + ) + url = reverse_lazy("media:upload") + + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": "filename.jpg", "file": data}) json = response.json() - expected = [{ - 'academy': 1, - 'categories': [], - 'hash': hash, - 'hits': 0, - 'id': 2, - 'mime': 'image/png', - 'name': 'filename.jpg', - 'slug': 'filename-jpg-ii', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }] + expected = [ + { + "academy": 1, + "categories": [], + "hash": hash, + "hits": 0, + "id": 2, + "mime": "image/png", + "name": "filename.jpg", + "slug": "filename-jpg-ii", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.all_media_dict(), [{ - **self.model_to_dict(model, 'media'), - }, { - 'academy_id': 1, - 'hash': hash, - 'hits': 0, - 'id': 2, - 'mime': 'image/png', - 'name': 'filename.jpg', - 'slug': 'filename-jpg-ii', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }]) + self.all_media_dict(), + [ + { + **self.model_to_dict(model, "media"), + }, + { + "academy_id": 1, + "hash": hash, + "hits": 0, + "id": 2, + "mime": "image/png", + "name": "filename.jpg", + "slug": "filename-jpg-ii", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + }, + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -313,79 +362,90 @@ def test_upload_with_media_with_same_slug(self): self.assertEqual(args[0].name, os.path.basename(file.name)) self.assertEqual(args[0].size, 1024) - self.assertEqual(kwargs, {'content_type': 'image/png'}) + self.assertEqual(kwargs, {"content_type": "image/png"}) self.assertEqual(File.url.call_args_list, [call()]) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_categories(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) - url = reverse_lazy('media:upload') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) + url = reverse_lazy("media:upload") - file = tempfile.NamedTemporaryFile(suffix='.png', delete=False) + file = tempfile.NamedTemporaryFile(suffix=".png", delete=False) file.write(os.urandom(1024)) file.close() - with open(file.name, 'rb') as file: + with open(file.name, "rb") as file: hash = hashlib.sha256(file.read()).hexdigest() - with open(file.name, 'rb') as file: - data = {'name': 'filename.jpg', 'file': file, 'categories': '1'} - response = self.client.put(url, data, format='multipart') + with open(file.name, "rb") as file: + data = {"name": "filename.jpg", "file": file, "categories": "1"} + response = self.client.put(url, data, format="multipart") json = response.json() self.assertHash(hash) - expected = [{ - 'academy': 1, - 'categories': [1], - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': 'filename.jpg', - 'slug': 'filename-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }] + expected = [ + { + "academy": 1, + "categories": [1], + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": "filename.jpg", + "slug": "filename-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.all_media_dict(), [{ - 'academy_id': 1, - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': 'filename.jpg', - 'slug': 'filename-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }]) + self.all_media_dict(), + [ + { + "academy_id": 1, + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": "filename.jpg", + "slug": "filename-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -395,80 +455,91 @@ def test_upload_categories(self): self.assertEqual(args[0].name, os.path.basename(file.name)) self.assertEqual(args[0].size, 1024) - self.assertEqual(kwargs, {'content_type': 'image/png'}) + self.assertEqual(kwargs, {"content_type": "image/png"}) self.assertEqual(File.url.call_args_list, [call()]) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_categories_in_headers(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1, categories=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) - url = reverse_lazy('media:upload') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) + url = reverse_lazy("media:upload") - file = tempfile.NamedTemporaryFile(suffix='.png', delete=False) + file = tempfile.NamedTemporaryFile(suffix=".png", delete=False) file.write(os.urandom(1024)) file.close() - with open(file.name, 'rb') as file: + with open(file.name, "rb") as file: file_bytes = file.read() hash = hashlib.sha256(file_bytes).hexdigest() - with open(file.name, 'rb') as file: - data = {'name': 'filename.jpg', 'file': file} - response = self.client.put(url, data, format='multipart') + with open(file.name, "rb") as file: + data = {"name": "filename.jpg", "file": file} + response = self.client.put(url, data, format="multipart") json = response.json() self.assertHash(hash) - expected = [{ - 'academy': 1, - 'categories': [1], - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': 'filename.jpg', - 'slug': 'filename-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }] + expected = [ + { + "academy": 1, + "categories": [1], + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": "filename.jpg", + "slug": "filename-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.all_media_dict(), [{ - 'academy_id': 1, - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': 'filename.jpg', - 'slug': 'filename-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }]) + self.all_media_dict(), + [ + { + "academy_id": 1, + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": "filename.jpg", + "slug": "filename-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -478,111 +549,124 @@ def test_upload_categories_in_headers(self): self.assertEqual(args[0].name, os.path.basename(file.name)) self.assertEqual(args[0].size, 1024) - self.assertEqual(kwargs, {'content_type': 'image/png'}) + self.assertEqual(kwargs, {"content_type": "image/png"}) self.assertEqual(File.url.call_args_list, [call()]) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_categories_in_headers__two_items(self): """Test /answer without auth""" from breathecode.services.google_cloud import File, Storage self.headers(academy=1, categories=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - capability='crud_media', - role='potato', - category=True) - url = reverse_lazy('media:upload') + model = self.generate_models( + authenticate=True, profile_academy=True, capability="crud_media", role="potato", category=True + ) + url = reverse_lazy("media:upload") - file1 = tempfile.NamedTemporaryFile(suffix='.png', delete=False) + file1 = tempfile.NamedTemporaryFile(suffix=".png", delete=False) file1.write(os.urandom(1024)) file1.close() - file2 = tempfile.NamedTemporaryFile(suffix='.png', delete=False) + file2 = tempfile.NamedTemporaryFile(suffix=".png", delete=False) file2.write(os.urandom(1024)) file2.close() - with open(file1.name, 'rb') as file1: + with open(file1.name, "rb") as file1: file_bytes1 = file1.read() hash1 = hashlib.sha256(file_bytes1).hexdigest() - with open(file2.name, 'rb') as file2: + with open(file2.name, "rb") as file2: file_bytes2 = file2.read() hash2 = hashlib.sha256(file_bytes2).hexdigest() - file1 = open(file1.name, 'rb') - file2 = open(file2.name, 'rb') + file1 = open(file1.name, "rb") + file2 = open(file2.name, "rb") - data = {'name': ['filename1.jpg', 'filename2.jpg'], 'file': [file1, file2]} - response = self.client.put(url, data, format='multipart') + data = {"name": ["filename1.jpg", "filename2.jpg"], "file": [file1, file2]} + response = self.client.put(url, data, format="multipart") json = response.json() - expected = [{ - 'academy': 1, - 'categories': [1], - 'hash': hash1, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': 'filename1.jpg', - 'slug': 'filename1-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }, { - 'academy': 1, - 'categories': [1], - 'hash': hash2, - 'hits': 0, - 'id': 2, - 'mime': 'image/png', - 'name': 'filename2.jpg', - 'slug': 'filename2-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }] + expected = [ + { + "academy": 1, + "categories": [1], + "hash": hash1, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": "filename1.jpg", + "slug": "filename1-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + }, + { + "academy": 1, + "categories": [1], + "hash": hash2, + "hits": 0, + "id": 2, + "mime": "image/png", + "name": "filename2.jpg", + "slug": "filename2-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + }, + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.all_media_dict(), - [{ - 'academy_id': 1, - 'hash': hash1, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': 'filename1.jpg', - 'slug': 'filename1-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }, { - 'academy_id': 1, - 'hash': hash2, - 'hits': 0, - 'id': 2, - 'mime': 'image/png', - 'name': 'filename2.jpg', - 'slug': 'filename2-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }]) + self.assertEqual( + self.all_media_dict(), + [ + { + "academy_id": 1, + "hash": hash1, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": "filename1.jpg", + "slug": "filename1-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + }, + { + "academy_id": 1, + "hash": hash2, + "hits": 0, + "id": 2, + "mime": "image/png", + "name": "filename2.jpg", + "slug": "filename2-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + }, + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call(), call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash1), - call(Storage().client.bucket('bucket'), hash2), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash1), + call(Storage().client.bucket("bucket"), hash2), + ], + ) args1, kwargs1 = File.upload.call_args_list[0] args2, kwargs2 = File.upload.call_args_list[1] @@ -595,69 +679,82 @@ def test_upload_categories_in_headers__two_items(self): self.assertEqual(args1[0].size, 1024) self.assertEqual(args2[0].name, os.path.basename(file2.name)) self.assertEqual(args2[0].size, 1024) - self.assertEqual(kwargs1, {'content_type': 'image/png'}) - self.assertEqual(kwargs2, {'content_type': 'image/png'}) + self.assertEqual(kwargs1, {"content_type": "image/png"}) + self.assertEqual(kwargs2, {"content_type": "image/png"}) self.assertEqual(File.url.call_args_list, [call(), call()]) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_valid_format(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('media:upload') - file = tempfile.NamedTemporaryFile(suffix='.jpg', delete=False) + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("media:upload") + file = tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) file.write(os.urandom(1024)) file.close() - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': 'filename.jpg', 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": "filename.jpg", "file": data}) json = response.json() self.assertHash(hash) - expected = [{ - 'academy': 1, - 'categories': [], - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/jpeg', - 'name': 'filename.jpg', - 'slug': 'filename-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }] + expected = [ + { + "academy": 1, + "categories": [], + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/jpeg", + "name": "filename.jpg", + "slug": "filename-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.all_media_dict(), [{ - 'academy_id': 1, - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/jpeg', - 'name': 'filename.jpg', - 'slug': 'filename-jpg', - 'thumbnail': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail', - 'url': 'https://storage.cloud.google.com/media-breathecode/hardcoded_url' - }]) + self.all_media_dict(), + [ + { + "academy_id": 1, + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/jpeg", + "name": "filename.jpg", + "slug": "filename-jpg", + "thumbnail": "https://storage.cloud.google.com/media-breathecode/hardcoded_url-thumbnail", + "url": "https://storage.cloud.google.com/media-breathecode/hardcoded_url", + } + ], + ) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -667,47 +764,51 @@ def test_upload_valid_format(self): self.assertEqual(args[0].name, os.path.basename(file.name)) self.assertEqual(args[0].size, 1024) - self.assertEqual(kwargs, {'content_type': 'image/jpeg'}) + self.assertEqual(kwargs, {"content_type": "image/jpeg"}) self.assertEqual(File.url.call_args_list, [call()]) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_upload_invalid_format(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, capability='crud_media', role='potato') - url = reverse_lazy('media:upload') + model = self.generate_models(authenticate=True, profile_academy=True, capability="crud_media", role="potato") + url = reverse_lazy("media:upload") - file = tempfile.NamedTemporaryFile(suffix='.txt', delete=False) + file = tempfile.NamedTemporaryFile(suffix=".txt", delete=False) text = self.bc.fake.text() - file.write(text.encode('utf-8')) + file.write(text.encode("utf-8")) file.close() - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': 'filename.lbs', 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": "filename.lbs", "file": data}) json = response.json() self.assertHash(hash) expected = { - 'detail': f'You can upload only files on the following formats: {",".join(MIME_ALLOW)}, got text/plain', - 'status_code': 400 + "detail": f'You can upload only files on the following formats: {",".join(MIME_ALLOW)}, got text/plain', + "status_code": 400, } self.assertEqual(json, expected) diff --git a/breathecode/media/urls.py b/breathecode/media/urls.py index b10188f3e..8f5a58116 100644 --- a/breathecode/media/urls.py +++ b/breathecode/media/urls.py @@ -1,90 +1,94 @@ from django.urls.conf import path from .views import MediaView, CategoryView, UploadView, MaskingUrlView, ResolutionView -media_view = MediaView.as_view({'get': 'get', 'put': 'put', 'delete': 'delete'}) +media_view = MediaView.as_view({"get": "get", "put": "put", "delete": "delete"}) -media_by_id_view = MediaView.as_view({'get': 'get_id', 'put': 'put_id', 'delete': 'delete_id'}) +media_by_id_view = MediaView.as_view({"get": "get_id", "put": "put_id", "delete": "delete_id"}) -media_by_slug_view = MediaView.as_view({ - 'get': 'get_slug', -}) +media_by_slug_view = MediaView.as_view( + { + "get": "get_slug", + } +) -media_by_name_view = MediaView.as_view({ - 'get': 'get_name', -}) +media_by_name_view = MediaView.as_view( + { + "get": "get_name", + } +) -category_view = CategoryView.as_view({'get': 'get', 'post': 'post'}) -category_by_id_view = CategoryView.as_view({'get': 'get_id'}) -category_by_slug_view = CategoryView.as_view({'get': 'get_slug', 'put': 'put', 'delete': 'delete'}) +category_view = CategoryView.as_view({"get": "get", "post": "post"}) +category_by_id_view = CategoryView.as_view({"get": "get_id"}) +category_by_slug_view = CategoryView.as_view({"get": "get_slug", "put": "put", "delete": "delete"}) -resolution_by_id_view = ResolutionView.as_view({'get': 'get_id', 'delete': 'delete'}) -resolution_by_media_id_view = ResolutionView.as_view({'get': 'get_media_id'}) +resolution_by_id_view = ResolutionView.as_view({"get": "get_id", "delete": "delete"}) +resolution_by_media_id_view = ResolutionView.as_view({"get": "get_media_id"}) -app_name = 'media' +app_name = "media" urlpatterns = [ path( - '', + "", media_view, - name='root', + name="root", ), path( - 'info', + "info", media_view, - name='info', + name="info", ), path( - 'info/<int:media_id>', + "info/<int:media_id>", media_by_id_view, - name='info_id', + name="info_id", ), path( - 'info/<int:media_id>/resolution', + "info/<int:media_id>/resolution", resolution_by_media_id_view, - name='info_id_resolution', + name="info_id_resolution", ), path( - 'info/<slug:media_slug>', + "info/<slug:media_slug>", media_by_slug_view, - name='info_slug', + name="info_slug", ), path( - 'info/<str:media_name>', + "info/<str:media_name>", media_by_name_view, - name='info_name', + name="info_name", ), path( - 'resolution/<int:resolution_id>', + "resolution/<int:resolution_id>", resolution_by_id_view, - name='resolution_id', + name="resolution_id", ), path( - 'file/<int:media_id>', + "file/<int:media_id>", MaskingUrlView.as_view(), - name='file_id', + name="file_id", ), path( - 'file/<str:media_slug>', + "file/<str:media_slug>", MaskingUrlView.as_view(), - name='file_slug', + name="file_slug", ), path( - 'upload', + "upload", UploadView.as_view(), - name='upload', + name="upload", ), path( - 'category', + "category", category_view, - name='category', + name="category", ), path( - 'category/<int:category_id>', + "category/<int:category_id>", category_by_id_view, - name='category_id', + name="category_id", ), path( - 'category/<str:category_slug>', + "category/<str:category_slug>", category_by_slug_view, - name='category_slug', + name="category_slug", ), ] diff --git a/breathecode/media/views.py b/breathecode/media/views.py index b71b1a0c0..307eed743 100644 --- a/breathecode/media/views.py +++ b/breathecode/media/views.py @@ -36,17 +36,25 @@ logger = logging.getLogger(__name__) MIME_ALLOW = [ - 'image/png', 'image/svg+xml', 'image/jpeg', 'image/gif', 'video/quicktime', 'video/mp4', 'audio/mpeg', - 'application/pdf', 'image/jpg', 'application/octet-stream' + "image/png", + "image/svg+xml", + "image/jpeg", + "image/gif", + "video/quicktime", + "video/mp4", + "audio/mpeg", + "application/pdf", + "image/jpg", + "application/octet-stream", ] def media_gallery_bucket(): - return os.getenv('MEDIA_GALLERY_BUCKET') + return os.getenv("MEDIA_GALLERY_BUCKET") def google_project_id(): - return os.getenv('GOOGLE_PROJECT_ID', '') + return os.getenv("GOOGLE_PROJECT_ID", "") class MediaView(ViewSet, GenerateLookupsMixin): @@ -75,44 +83,45 @@ class MediaView(ViewSet, GenerateLookupsMixin): get_name: Media by name. """ + schema = MediaSchema() - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_media') + @capable_of("read_media") def get(self, request, academy_id=None): handler = self.extensions(request) - lookups = self.generate_lookups(request, - many_fields=['mime', 'name', 'slug', 'id'], - many_relationships=['academy']) + lookups = self.generate_lookups( + request, many_fields=["mime", "name", "slug", "id"], many_relationships=["academy"] + ) items = Media.objects.filter(**lookups) # filter media by all categories, if one request have category 1 and 2, # if just get the media is in all the categories passed - categories = request.GET.get('categories') + categories = request.GET.get("categories") if categories: - categories = categories.split(',') + categories = categories.split(",") for category in categories: items = items.filter(categories__pk=category) - start = request.GET.get('start', None) + start = request.GET.get("start", None) if start is not None: - start_date = datetime.datetime.strptime(start, '%Y-%m-%d').date() - lookups['created_at__gte'] = start_date + start_date = datetime.datetime.strptime(start, "%Y-%m-%d").date() + lookups["created_at__gte"] = start_date - end = request.GET.get('end', None) + end = request.GET.get("end", None) if end is not None: - end_date = datetime.datetime.strptime(end, '%Y-%m-%d').date() - lookups['created_at__lte'] = end_date + end_date = datetime.datetime.strptime(end, "%Y-%m-%d").date() + lookups["created_at__lte"] = end_date - tp = request.GET.get('type') + tp = request.GET.get("type") if tp: items = items.filter(mime__icontains=tp) items = items.filter(**lookups) - like = request.GET.get('like') + like = request.GET.get("like") if like: items = items.filter(Q(name__icontains=like) | Q(slug__icontains=like)) @@ -121,70 +130,73 @@ def get(self, request, academy_id=None): return handler.response(serializer.data) - @capable_of('read_media') + @capable_of("read_media") def get_id(self, request, media_id: int, academy_id=None): item = Media.objects.filter(id=media_id).first() if not item: - raise ValidationException('Media not found', code=404) + raise ValidationException("Media not found", code=404) serializer = GetMediaSerializer(item, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('read_media') + @capable_of("read_media") def get_slug(self, request, media_slug: str, academy_id=None): item = Media.objects.filter(slug=media_slug).first() if not item: - raise ValidationException('Media not found', code=404) + raise ValidationException("Media not found", code=404) serializer = GetMediaSerializer(item, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('read_media') + @capable_of("read_media") def get_name(self, request, media_name: str, academy_id=None): item = Media.objects.filter(name=media_name).first() if not item: - raise ValidationException('Media not found', code=404) + raise ValidationException("Media not found", code=404) serializer = GetMediaSerializer(item, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_media') + @capable_of("crud_media") def put(self, request, academy_id=None): many = isinstance(request.data, list) current = [] context = { - 'request': request, - 'media_id': None, - 'many': True, + "request": request, + "media_id": None, + "many": True, } if not request.data: - raise ValidationException('Please input data to use request', slug='no-args') + raise ValidationException("Please input data to use request", slug="no-args") for x in request.data: - if not 'categories' in x: - raise ValidationException('For bulk mode, please input category in the request', - slug='categories-not-in-bulk') + if not "categories" in x: + raise ValidationException( + "For bulk mode, please input category in the request", slug="categories-not-in-bulk" + ) if len(x) > 2: - raise ValidationException('Bulk mode its only to edit categories, ' + - 'please change to single put for more', - slug='extra-args-bulk-mode') + raise ValidationException( + "Bulk mode its only to edit categories, " + "please change to single put for more", + slug="extra-args-bulk-mode", + ) - if not 'id' in x: - raise ValidationException('Please input id in body for bulk mode', slug='id-not-in-bulk') + if not "id" in x: + raise ValidationException("Please input id in body for bulk mode", slug="id-not-in-bulk") - media = Media.objects.filter(id=x['id']).first() + media = Media.objects.filter(id=x["id"]).first() if not media: - raise ValidationException('Media not found', code=404, slug='media-not-found') + raise ValidationException("Media not found", code=404, slug="media-not-found") if media.academy_id != int(academy_id): - raise ValidationException("You can't edit media belonging to other academies", - slug='different-academy-media-put') + raise ValidationException( + "You can't edit media belonging to other academies", slug="different-academy-media-put" + ) current.append(media) @@ -194,22 +206,23 @@ def put(self, request, academy_id=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_media') + @capable_of("crud_media") def put_id(self, request, media_id, academy_id=None): context = { - 'request': request, - 'media_id': media_id, - 'many': False, + "request": request, + "media_id": media_id, + "many": False, } current = Media.objects.filter(id=media_id).first() if not current: - raise ValidationException('Media not found', code=404, slug='media-not-found') + raise ValidationException("Media not found", code=404, slug="media-not-found") if current.academy_id != int(academy_id): - raise ValidationException("You can't edit media belonging to other academies", - slug='different-academy-media-put') + raise ValidationException( + "You can't edit media belonging to other academies", slug="different-academy-media-put" + ) serializer = MediaSerializer(current, data=request.data, context=context, many=False) if serializer.is_valid(): @@ -217,20 +230,22 @@ def put_id(self, request, media_id, academy_id=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_media') + @capable_of("crud_media") def delete(self, request, academy_id=None): from ..services.google_cloud import Storage lang = get_user_language(request) - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups: items = Media.objects.filter(**lookups) if items.filter(academy__id=academy_id).count() != len(items): - raise ValidationException('You may not delete media that belongs to a different academy', - slug='academy-different-than-media-academy') + raise ValidationException( + "You may not delete media that belongs to a different academy", + slug="academy-different-than-media-academy", + ) for item in items: url = item.url @@ -244,19 +259,22 @@ def delete(self, request, academy_id=None): file.delete() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) resolution = MediaResolution.objects.filter(hash=hash).first() if resolution: - resolution_url = f'{url}-{resolution.width}x{resolution.height}' + resolution_url = f"{url}-{resolution.width}x{resolution.height}" resolution_file = storage.file(media_gallery_bucket(), resolution_url) resolution_file.delete() @@ -266,7 +284,7 @@ def delete(self, request, academy_id=None): return Response(None, status=status.HTTP_204_NO_CONTENT) - @capable_of('crud_media') + @capable_of("crud_media") def delete_id(self, request, media_id=None, academy_id=None): from ..services.google_cloud import Storage @@ -274,11 +292,13 @@ def delete_id(self, request, media_id=None, academy_id=None): data = Media.objects.filter(id=media_id).first() if not data: - raise ValidationException('Media not found', code=404) + raise ValidationException("Media not found", code=404) if not data.academy or data.academy.id != int(academy_id): - raise ValidationException('You may not delete media that belongs to a different academy', - slug='academy-different-than-media-academy') + raise ValidationException( + "You may not delete media that belongs to a different academy", + slug="academy-different-than-media-academy", + ) url = data.url hash = data.hash @@ -291,19 +311,22 @@ def delete_id(self, request, media_id=None, academy_id=None): file.delete() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) resolution = MediaResolution.objects.filter(hash=hash).first() if resolution: - resolution_url = f'{url}-{resolution.width}x{resolution.height}' + resolution_url = f"{url}-{resolution.width}x{resolution.height}" resolution_file = storage.file(media_gallery_bucket(), resolution_url) resolution_file.delete() @@ -337,9 +360,10 @@ class CategoryView(ViewSet): delete: Delete a Category by slug. """ + extensions = APIViewExtensions(paginate=True) - @capable_of('read_media') + @capable_of("read_media") def get(self, request, category_id=None, category_slug=None, academy_id=None): handler = self.extensions(request) @@ -349,27 +373,27 @@ def get(self, request, category_id=None, category_slug=None, academy_id=None): return handler.response(serializer.data) - @capable_of('read_media') + @capable_of("read_media") def get_id(self, request, category_id=None, academy_id=None): item = Category.objects.filter(id=category_id).first() if not item: - raise ValidationException('Category not found', code=404) + raise ValidationException("Category not found", code=404) serializer = GetCategorySerializer(item, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('read_media') + @capable_of("read_media") def get_slug(self, request, category_slug=None, academy_id=None): item = Category.objects.filter(slug=category_slug).first() if not item: - raise ValidationException('Category not found', code=404) + raise ValidationException("Category not found", code=404) serializer = GetCategorySerializer(item, many=False) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_media') + @capable_of("crud_media") def post(self, request, academy_id=None): serializer = CategorySerializer(data=request.data, many=False) if serializer.is_valid(): @@ -377,11 +401,11 @@ def post(self, request, academy_id=None): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_media') + @capable_of("crud_media") def put(self, request, category_slug=None, academy_id=None): data = Category.objects.filter(slug=category_slug).first() if not data: - raise ValidationException('Category not found', code=404) + raise ValidationException("Category not found", code=404) serializer = CategorySerializer(data, data=request.data, many=False) if serializer.is_valid(): @@ -389,14 +413,14 @@ def put(self, request, category_slug=None, academy_id=None): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_media') + @capable_of("crud_media") def delete(self, request, category_slug=None, academy_id=None): data = Category.objects.filter(slug=category_slug).first() if not data: - raise ValidationException('Category not found', code=404) + raise ValidationException("Category not found", code=404) if Media.objects.filter(categories__slug=category_slug).count(): - raise ValidationException('Category contain some medias', code=403) + raise ValidationException("Category contain some medias", code=403) data.delete() @@ -408,6 +432,7 @@ class UploadView(APIView): put: Upload a file to Google Cloud. """ + parser_classes = [MultiPartParser, FileUploadParser] # permission_classes = [AllowAny] @@ -417,28 +442,28 @@ class UploadView(APIView): def upload(self, request, lang, academy_id=None, update=False): from ..services.google_cloud import Storage - files = request.data.getlist('file') - names = request.data.getlist('name') + files = request.data.getlist("file") + names = request.data.getlist("name") result = { - 'data': [], - 'instance': [], + "data": [], + "instance": [], } - file = request.data.get('file') + file = request.data.get("file") slugs = [] if not file: - raise ValidationException('Missing file in request', code=400) + raise ValidationException("Missing file in request", code=400) if not len(files): - raise ValidationException('empty files in request') + raise ValidationException("empty files in request") if not len(names): for file in files: names.append(file.name) elif len(files) != len(names): - raise ValidationException('numbers of files and names not match') + raise ValidationException("numbers of files and names not match") # files validation below for index in range(0, len(files)): @@ -446,7 +471,8 @@ def upload(self, request, lang, academy_id=None, update=False): if file.content_type not in MIME_ALLOW: raise ValidationException( f'You can upload only files on the following formats: {",".join(MIME_ALLOW)}, got {file.content_type}', - code=400) + code=400, + ) for index in range(0, len(files)): file = files[index] @@ -459,39 +485,39 @@ def upload(self, request, lang, academy_id=None, update=False): if slug_number > 1: while True: roman_number = num_to_roman(slug_number, lower=True) - slug = f'{slug}-{roman_number}' + slug = f"{slug}-{roman_number}" if not slug in slugs: break slug_number = slug_number + 1 slugs.append(slug) data = { - 'hash': hash, - 'slug': slug, - 'mime': file.content_type, - 'name': name, - 'categories': [], - 'academy': academy_id, + "hash": hash, + "slug": slug, + "mime": file.content_type, + "name": name, + "categories": [], + "academy": academy_id, } # it is receive in url encoded - if 'categories' in request.data: - data['categories'] = request.data['categories'].split(',') - elif 'Categories' in request.headers: - data['categories'] = request.headers['Categories'].split(',') + if "categories" in request.data: + data["categories"] = request.data["categories"].split(",") + elif "Categories" in request.headers: + data["categories"] = request.headers["Categories"].split(",") media = Media.objects.filter(hash=hash, academy__id=academy_id).first() if media: - data['id'] = media.id + data["id"] = media.id - url = Media.objects.filter(hash=hash).values_list('url', flat=True).first() + url = Media.objects.filter(hash=hash).values_list("url", flat=True).first() if url: - data['url'] = url + data["url"] = url else: - url = Media.objects.filter(hash=hash).values_list('url', flat=True).first() + url = Media.objects.filter(hash=hash).values_list("url", flat=True).first() if url: - data['url'] = url + data["url"] = url else: # upload file section @@ -499,41 +525,45 @@ def upload(self, request, lang, academy_id=None, update=False): storage = Storage() cloud_file = storage.file(media_gallery_bucket(), hash) cloud_file.upload(file, content_type=file.content_type) - data['url'] = cloud_file.url() - data['thumbnail'] = data['url'] + '-thumbnail' + data["url"] = cloud_file.url() + data["thumbnail"] = data["url"] + "-thumbnail" except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) - - result['data'].append(data) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) + + result["data"].append(data) from django.db.models import Q + query = None - datas_with_id = [x for x in result['data'] if 'id' in x] + datas_with_id = [x for x in result["data"] if "id" in x] for x in datas_with_id: if query: - query = query | Q(id=x['id']) + query = query | Q(id=x["id"]) else: - query = Q(id=x['id']) + query = Q(id=x["id"]) if query: - result['instance'] = Media.objects.filter(query) + result["instance"] = Media.objects.filter(query) return result - @capable_of('crud_media') + @capable_of("crud_media") def put(self, request, academy_id=None): lang = get_user_language(request) upload = self.upload(request, lang, academy_id, update=True) - serializer = MediaPUTSerializer(upload['instance'], data=upload['data'], context=upload['data'], many=True) + serializer = MediaPUTSerializer(upload["instance"], data=upload["data"], context=upload["data"], many=True) if serializer.is_valid(): serializer.save() @@ -546,6 +576,7 @@ class MaskingUrlView(APIView): get: Get file from Google Cloud. """ + parser_classes = [FileUploadParser] permission_classes = [AllowAny] schema = FileSchema() @@ -553,27 +584,28 @@ class MaskingUrlView(APIView): def get(self, request, media_id=None, media_slug=None): lookups = {} if media_id: - lookups['id'] = media_id + lookups["id"] = media_id elif media_slug: - lookups['slug'] = media_slug.split('.')[0] #ignore extension + lookups["slug"] = media_slug.split(".")[0] # ignore extension - width = request.GET.get('width') - height = request.GET.get('height') + width = request.GET.get("width") + height = request.GET.get("height") media = Media.objects.filter(**lookups).first() if not media: - raise ValidationException('Resource not found', code=404) + raise ValidationException("Resource not found", code=404) url = media.url if width and height: raise ValidationException( - 'You need to pass either width or height, not both, in order to avoid losing aspect ratio', + "You need to pass either width or height, not both, in order to avoid losing aspect ratio", code=400, - slug='width-and-height-in-querystring') + slug="width-and-height-in-querystring", + ) - if (width or height) and not media.mime.startswith('image/'): - raise ValidationException('cannot resize this resource', code=400, slug='cannot-resize-media') + if (width or height) and not media.mime.startswith("image/"): + raise ValidationException("cannot resize this resource", code=400, slug="cannot-resize-media") # register click media.hits = media.hits + 1 @@ -582,39 +614,41 @@ def get(self, request, media_id=None, media_slug=None): resolution = MediaResolution.objects.filter(Q(width=width) | Q(height=height), hash=media.hash).first() if (width or height) and not resolution: - func = FunctionV1(region='us-central1', project_id=google_project_id(), name='resize-image') + func = FunctionV1(region="us-central1", project_id=google_project_id(), name="resize-image") - func_request = func.call({ - 'width': width, - 'height': height, - 'filename': media.hash, - 'bucket': media_gallery_bucket(), - }) + func_request = func.call( + { + "width": width, + "height": height, + "filename": media.hash, + "bucket": media_gallery_bucket(), + } + ) res = func_request.json() - if not res['status_code'] == 200 or not res['message'] == 'Ok': - if 'message' in res: - raise ValidationException(res['message'], code=500, slug='cloud-function-bad-input') + if not res["status_code"] == 200 or not res["message"] == "Ok": + if "message" in res: + raise ValidationException(res["message"], code=500, slug="cloud-function-bad-input") - raise ValidationException('Unhandled request from cloud functions', - code=500, - slug='unhandled-cloud-function') + raise ValidationException( + "Unhandled request from cloud functions", code=500, slug="unhandled-cloud-function" + ) - width = res['width'] - height = res['height'] + width = res["width"] + height = res["height"] resolution = MediaResolution(width=width, height=height, hash=media.hash) resolution.save() - if (width or height): + if width or height: width = resolution.width height = resolution.height - url = f'{url}-{width}x{height}' + url = f"{url}-{width}x{height}" resolution.hits = resolution.hits + 1 resolution.save() - if request.GET.get('mask') != 'true': + if request.GET.get("mask") != "true": return redirect(url, permanent=True) response = requests.get(url, stream=True) @@ -625,8 +659,9 @@ def get(self, request, media_id=None, media_slug=None): ) header_keys = [ - x for x in response.headers.keys() - if x != 'Transfer-Encoding' and x != 'Content-Encoding' and x != 'Keep-Alive' and x != 'Connection' + x + for x in response.headers.keys() + if x != "Transfer-Encoding" and x != "Content-Encoding" and x != "Keep-Alive" and x != "Connection" ] for header in header_keys: @@ -647,37 +682,37 @@ class ResolutionView(ViewSet): Delete a Resolution by id. """ - @capable_of('read_media_resolution') + @capable_of("read_media_resolution") def get_id(self, request, resolution_id: int, academy_id=None): resolutions = MediaResolution.objects.filter(id=resolution_id).first() if not resolutions: - raise ValidationException('Resolution was not found', code=404, slug='resolution-not-found') + raise ValidationException("Resolution was not found", code=404, slug="resolution-not-found") media = Media.objects.filter(hash=resolutions.hash).first() if not media: resolutions.delete() - raise ValidationException('Resolution was deleted for not having parent element', - slug='resolution-media-not-found', - code=404) + raise ValidationException( + "Resolution was deleted for not having parent element", slug="resolution-media-not-found", code=404 + ) serializer = GetResolutionSerializer(resolutions) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('read_media_resolution') + @capable_of("read_media_resolution") def get_media_id(self, request, media_id: int, academy_id=None): media = Media.objects.filter(id=media_id).first() if not media: - raise ValidationException('Media not found', code=404, slug='media-not-found') + raise ValidationException("Media not found", code=404, slug="media-not-found") resolutions = MediaResolution.objects.filter(hash=media.hash) if not resolutions: - raise ValidationException('Resolution was not found', code=404, slug='resolution-not-found') + raise ValidationException("Resolution was not found", code=404, slug="resolution-not-found") serializer = GetResolutionSerializer(resolutions, many=True) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_media_resolution') + @capable_of("crud_media_resolution") def delete(self, request, resolution_id=None, academy_id=None): from ..services.google_cloud import Storage @@ -685,18 +720,18 @@ def delete(self, request, resolution_id=None, academy_id=None): resolution = MediaResolution.objects.filter(id=resolution_id).first() if not resolution: - raise ValidationException('Resolution was not found', code=404, slug='resolution-not-found') + raise ValidationException("Resolution was not found", code=404, slug="resolution-not-found") media = Media.objects.filter(hash=resolution.hash).first() if not media: resolution.delete() - raise ValidationException('Resolution was deleted for not having parent element', - slug='resolution-media-not-found', - code=404) + raise ValidationException( + "Resolution was deleted for not having parent element", slug="resolution-media-not-found", code=404 + ) hash = resolution.hash url = media.url - url = f'{url}-{resolution.width}x{resolution.height}' + url = f"{url}-{resolution.width}x{resolution.height}" resolution.delete() @@ -707,14 +742,17 @@ def delete(self, request, resolution_id=None, academy_id=None): file.delete() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) return Response(None, status=status.HTTP_204_NO_CONTENT) diff --git a/breathecode/mentorship/actions.py b/breathecode/mentorship/actions.py index 204bcd54b..51cafdcc8 100644 --- a/breathecode/mentorship/actions.py +++ b/breathecode/mentorship/actions.py @@ -27,12 +27,15 @@ def close_older_sessions(): now = timezone.now() diff = timedelta(hours=2) - sessions = MentorshipSession.objects.filter(status__in=['PENDING', 'STARTED'], ends_at__lt=now - diff) - - close_mentoring_sessions(sessions, { - 'summary': 'Automatically closed because its ends was two hours ago or more', - 'status': 'FAILED', - }) + sessions = MentorshipSession.objects.filter(status__in=["PENDING", "STARTED"], ends_at__lt=now - diff) + + close_mentoring_sessions( + sessions, + { + "summary": "Automatically closed because its ends was two hours ago or more", + "status": "FAILED", + }, + ) def get_pending_sessions_or_create(token, mentor, service, mentee=None): @@ -41,29 +44,26 @@ def get_pending_sessions_or_create(token, mentor, service, mentee=None): # starting to pick pending sessions pending_sessions = [] if mentee is not None: - unfinished_with_mentee = MentorshipSession.objects.filter(mentor__id=mentor.id, - mentee__id=mentee.id, - service__id=service.id, - status__in=['PENDING', 'STARTED']) + unfinished_with_mentee = MentorshipSession.objects.filter( + mentor__id=mentor.id, mentee__id=mentee.id, service__id=service.id, status__in=["PENDING", "STARTED"] + ) if unfinished_with_mentee.count() > 0: - pending_sessions += unfinished_with_mentee.values_list('pk', flat=True) + pending_sessions += unfinished_with_mentee.values_list("pk", flat=True) # if its a mentor, I will force him to close pending sessions if mentor.user.id == token.user.id: - unfinished_sessions = MentorshipSession.objects.filter(mentor__id=mentor.id, - service__id=service.id, - status__in=['PENDING', - 'STARTED']).exclude(id__in=pending_sessions) + unfinished_sessions = MentorshipSession.objects.filter( + mentor__id=mentor.id, service__id=service.id, status__in=["PENDING", "STARTED"] + ).exclude(id__in=pending_sessions) # if it has unishined meetings with already started if unfinished_sessions.count() > 0: - pending_sessions += unfinished_sessions.values_list('pk', flat=True) + pending_sessions += unfinished_sessions.values_list("pk", flat=True) # if its a mentee, and there are pending sessions without mentee assigned elif mentee is not None and mentee.id == token.user.id: - unfinished_sessions = MentorshipSession.objects.filter(mentor__id=mentor.id, - mentee__isnull=True, - service__id=service.id, - status__in=['PENDING']).order_by('-mentor_joined_at') + unfinished_sessions = MentorshipSession.objects.filter( + mentor__id=mentor.id, mentee__isnull=True, service__id=service.id, status__in=["PENDING"] + ).order_by("-mentor_joined_at") if unfinished_sessions.count() > 0: # grab the last one the mentor joined @@ -71,11 +71,13 @@ def get_pending_sessions_or_create(token, mentor, service, mentee=None): pending_sessions += [last_one.id] # close the rest close_mentoring_sessions( - unfinished_sessions.exclude(id=last_one.id), { - 'summary': 'Automatically closed, not enough information on the meeting the mentor forgot to ' - 'specify the mentee and the mentee never joined', - 'status': 'FAILED', - }) + unfinished_sessions.exclude(id=last_one.id), + { + "summary": "Automatically closed, not enough information on the meeting the mentor forgot to " + "specify the mentee and the mentee never joined", + "status": "FAILED", + }, + ) # return all the collected pending sessions if len(pending_sessions) > 0: @@ -89,11 +91,9 @@ def get_pending_sessions_or_create(token, mentor, service, mentee=None): if service.duration is not None: duration = service.duration - session = MentorshipSession(mentor=mentor, - mentee=mentee, - is_online=True, - service=service, - ends_at=timezone.now() + duration) + session = MentorshipSession( + mentor=mentor, mentee=mentee, is_online=True, service=service, ends_at=timezone.now() + duration + ) if session.service.video_provider == MentorshipService.VideoProvider.GOOGLE_MEET: create_room_on_google_meet(session) @@ -101,19 +101,18 @@ def get_pending_sessions_or_create(token, mentor, service, mentee=None): elif session.service.video_provider == MentorshipService.VideoProvider.DAILY: daily = DailyClient() room = daily.create_room(exp_in_seconds=service.duration.seconds) - session.online_meeting_url = room['url'] - session.name = room['name'] + session.online_meeting_url = room["url"] + session.name = room["name"] session.mentee = mentee session.save() else: - raise Exception('Invalid video provider') + raise Exception("Invalid video provider") if mentee: - tasks_activity.add_activity.delay(mentee.id, - 'mentorship_session_checkin', - related_type='mentorship.MentorshipSession', - related_id=session.id) + tasks_activity.add_activity.delay( + mentee.id, "mentorship_session_checkin", related_type="mentorship.MentorshipSession", related_id=session.id + ) return MentorshipSession.objects.filter(id=session.id) @@ -143,20 +142,21 @@ def extend_session(session: MentorshipSession, duration_in_minutes=None, exp_in_ def render_session(request, session, token): from .serializers import GETSessionReportSerializer + data = { - 'subject': session.service.name, - 'room_url': session.online_meeting_url, - 'session': GETSessionReportSerializer(session, many=False).data, - 'userName': (token.user.first_name + ' ' + token.user.last_name).strip(), - 'backup_room_url': session.mentor.online_meeting_url, + "subject": session.service.name, + "room_url": session.online_meeting_url, + "session": GETSessionReportSerializer(session, many=False).data, + "userName": (token.user.first_name + " " + token.user.last_name).strip(), + "backup_room_url": session.mentor.online_meeting_url, } if token.user.id == session.mentor.user.id: - data['leave_url'] = '/mentor/session/' + str(session.id) + '?token=' + token.key + data["leave_url"] = "/mentor/session/" + str(session.id) + "?token=" + token.key else: - data['leave_url'] = 'close' + data["leave_url"] = "close" - return render(request, 'daily.html', data) + return render(request, "daily.html", data) def close_mentoring_sessions(sessions: QuerySet[MentorshipSession], data: dict): @@ -166,7 +166,7 @@ def close_mentoring_sessions(sessions: QuerySet[MentorshipSession], data: dict): def close_mentoring_session(session: MentorshipSession, data: dict): sessions_to_close = MentorshipSession.objects.filter(id=session.id) - sessions_to_close.update(summary=data['summary'], status=data['status'].upper(), ended_at=timezone.now()) + sessions_to_close.update(summary=data["summary"], status=data["status"].upper(), ended_at=timezone.now()) return sessions_to_close @@ -174,100 +174,110 @@ def close_mentoring_session(session: MentorshipSession, data: dict): def get_accounted_time(_session): def get_duration(session): - response = {'accounted_duration': 0, 'status_message': ''} + response = {"accounted_duration": 0, "status_message": ""} if session.started_at is None and session.mentor_joined_at is not None: - response['status_message'] = 'Mentor joined but mentee never did, ' + response["status_message"] = "Mentor joined but mentee never did, " if session.service.missed_meeting_duration.seconds > 0: - response['accounted_duration'] = session.service.missed_meeting_duration - response['status_message'] += (f'{duration_to_str(response["accounted_duration"])} will be ' - 'accounted for the bill.') + response["accounted_duration"] = session.service.missed_meeting_duration + response["status_message"] += ( + f'{duration_to_str(response["accounted_duration"])} will be ' "accounted for the bill." + ) else: - response['accounted_duration'] = timedelta(seconds=0) - response['status_message'] += 'No time will be included on the bill.' + response["accounted_duration"] = timedelta(seconds=0) + response["status_message"] += "No time will be included on the bill." return response elif session.started_at is not None: if session.mentor_joined_at is None: - response['accounted_duration'] = timedelta(seconds=0) - response['status_message'] = ('The mentor never joined the meeting, no time will ' - 'be accounted for.') + response["accounted_duration"] = timedelta(seconds=0) + response["status_message"] = "The mentor never joined the meeting, no time will " "be accounted for." return response if session.ended_at is None: if session.ends_at is not None and session.ends_at > session.started_at: - response['accounted_duration'] = session.ends_at - session.started_at - response['status_message'] = ( - 'The session never ended, accounting for the expected meeting duration ' - f'that was {duration_to_str(response["accounted_duration"])}.') + response["accounted_duration"] = session.ends_at - session.started_at + response["status_message"] = ( + "The session never ended, accounting for the expected meeting duration " + f'that was {duration_to_str(response["accounted_duration"])}.' + ) return response elif session.mentee_left_at is not None: - response['accounted_duration'] = session.mentee_left_at - session.started_at - response['status_message'] = ( - 'The session never ended, accounting duration based on the time where ' - f'the mentee left the meeting {duration_to_str(response["accounted_duration"])}.') + response["accounted_duration"] = session.mentee_left_at - session.started_at + response["status_message"] = ( + "The session never ended, accounting duration based on the time where " + f'the mentee left the meeting {duration_to_str(response["accounted_duration"])}.' + ) return response elif session.mentor_left_at is not None: - response['accounted_duration'] = session.mentor_left_at - session.started_at - response['status_message'] = ( - 'The session never ended, accounting duration based on the time where the mentor ' - f'left the meeting {duration_to_str(response["accounted_duration"])}.') + response["accounted_duration"] = session.mentor_left_at - session.started_at + response["status_message"] = ( + "The session never ended, accounting duration based on the time where the mentor " + f'left the meeting {duration_to_str(response["accounted_duration"])}.' + ) return response else: - response['accounted_duration'] = session.service.duration - response['status_message'] = ('The session never ended, accounting for the standard duration ' - f'{duration_to_str(response["accounted_duration"])}.') + response["accounted_duration"] = session.service.duration + response["status_message"] = ( + "The session never ended, accounting for the standard duration " + f'{duration_to_str(response["accounted_duration"])}.' + ) return response if session.started_at > session.ended_at: - response['accounted_duration'] = timedelta(seconds=0) - response['status_message'] = ('Meeting started before it ended? No duration ' - 'will be accounted for.') + response["accounted_duration"] = timedelta(seconds=0) + response["status_message"] = "Meeting started before it ended? No duration " "will be accounted for." return response if (session.ended_at - session.started_at).days >= 1: if session.mentee_left_at is not None: - response['accounted_duration'] = session.mentee_left_at - session.started_at - response['status_message'] = ( - 'The lasted way more than it should, accounting duration based on the time where ' - f'the mentee left the meeting {duration_to_str(response["accounted_duration"])}.') + response["accounted_duration"] = session.mentee_left_at - session.started_at + response["status_message"] = ( + "The lasted way more than it should, accounting duration based on the time where " + f'the mentee left the meeting {duration_to_str(response["accounted_duration"])}.' + ) return response else: - response['accounted_duration'] = session.service.duration - response['status_message'] = ( - 'This session lasted more than a day, no one ever left, was probably never closed, ' + response["accounted_duration"] = session.service.duration + response["status_message"] = ( + "This session lasted more than a day, no one ever left, was probably never closed, " f'accounting for standard duration {duration_to_str(response["accounted_duration"])}' - '.') + "." + ) return response - response['accounted_duration'] = session.ended_at - session.started_at - if response['accounted_duration'] > session.service.max_duration: + response["accounted_duration"] = session.ended_at - session.started_at + if response["accounted_duration"] > session.service.max_duration: if session.service.max_duration.seconds == 0: - response['accounted_duration'] = session.service.duration - response['status_message'] = ( - 'No extra time is allowed for session, accounting for standard duration ' - f'of {duration_to_str(response["accounted_duration"])}.') + response["accounted_duration"] = session.service.duration + response["status_message"] = ( + "No extra time is allowed for session, accounting for standard duration " + f'of {duration_to_str(response["accounted_duration"])}.' + ) return response else: - response['accounted_duration'] = session.service.max_duration - response['status_message'] = ( - 'The duration of the session is bigger than the maximum allowed, accounting ' - f'for max duration of {duration_to_str(response["accounted_duration"])}.') + response["accounted_duration"] = session.service.max_duration + response["status_message"] = ( + "The duration of the session is bigger than the maximum allowed, accounting " + f'for max duration of {duration_to_str(response["accounted_duration"])}.' + ) return response else: # everything perfect, we account for the expected return response else: - response['accounted_duration'] = timedelta(seconds=0) - response['status_message'] = 'No one joined this session, nothing will be accounted for.' + response["accounted_duration"] = timedelta(seconds=0) + response["status_message"] = "No one joined this session, nothing will be accounted for." return response _duration = get_duration(_session) - if _duration['accounted_duration'] > _session.service.max_duration: - _duration['accounted_duration'] = _session.service.max_duration - _duration['status_message'] += (' The session accounted duration was limited to the maximum allowed ' - f'{duration_to_str(_duration["accounted_duration"])}.') + if _duration["accounted_duration"] > _session.service.max_duration: + _duration["accounted_duration"] = _session.service.max_duration + _duration["status_message"] += ( + " The session accounted duration was limited to the maximum allowed " + f'{duration_to_str(_duration["accounted_duration"])}.' + ) return _duration @@ -275,12 +285,13 @@ def last_month_date(current_date): # getting next month # using replace to get to last day + offset # to reach next month - nxt_mnth = current_date.replace(day=28, hour=23, minute=59, second=59, - microsecond=999999) + datetime.timedelta(days=4) + nxt_mnth = current_date.replace(day=28, hour=23, minute=59, second=59, microsecond=999999) + datetime.timedelta( + days=4 + ) # subtracting the days from next month date to # get last date of current Month - last_datetime = (nxt_mnth - datetime.timedelta(days=nxt_mnth.day)) + last_datetime = nxt_mnth - datetime.timedelta(days=nxt_mnth.day) return last_datetime @@ -291,45 +302,50 @@ def generate_mentor_bills(mentor, reset=False): def get_unpaid_sessions(): return MentorshipSession.objects.filter( Q(bill__isnull=True) - | Q(bill__status='DUE', bill__academy=mentor.academy, bill__paid_at__isnull=True) - | Q(bill__status='RECALCULATE', bill__academy=mentor.academy, bill__paid_at__isnull=True), + | Q(bill__status="DUE", bill__academy=mentor.academy, bill__paid_at__isnull=True) + | Q(bill__status="RECALCULATE", bill__academy=mentor.academy, bill__paid_at__isnull=True), service__isnull=False, allow_billing=True, mentor__id=mentor.id, - status__in=['COMPLETED', 'FAILED'], + status__in=["COMPLETED", "FAILED"], started_at__isnull=False, - ).order_by('started_at') + ).order_by("started_at") without_service = MentorshipSession.objects.filter( Q(bill__isnull=True) - | Q(bill__status='DUE', bill__academy=mentor.academy, bill__paid_at__isnull=True) - | Q(bill__status='RECALCULATE', bill__academy=mentor.academy, bill__paid_at__isnull=True), + | Q(bill__status="DUE", bill__academy=mentor.academy, bill__paid_at__isnull=True) + | Q(bill__status="RECALCULATE", bill__academy=mentor.academy, bill__paid_at__isnull=True), mentor=mentor, - service__isnull=True).count() + service__isnull=True, + ).count() if without_service: raise ValidationException( - f'This mentor has {without_service} sessions without an associated service that need to be fixed', - slug='session_without_service') + f"This mentor has {without_service} sessions without an associated service that need to be fixed", + slug="session_without_service", + ) - recalculate_bills = MentorshipBill.objects.filter(Q(status='DUE') | Q(status='RECALCULATE'), - mentor__id=mentor.id, - academy__id=mentor.academy.id) + recalculate_bills = MentorshipBill.objects.filter( + Q(status="DUE") | Q(status="RECALCULATE"), mentor__id=mentor.id, academy__id=mentor.academy.id + ) unpaid_sessions = get_unpaid_sessions() if not unpaid_sessions: if recalculate_bills: for bill in recalculate_bills: - bill.status = 'DUE' + bill.status = "DUE" bill.save() return [] - pending_months = sorted({(x.year, x.month) for x in unpaid_sessions.values_list('started_at', flat=True)}) + pending_months = sorted({(x.year, x.month) for x in unpaid_sessions.values_list("started_at", flat=True)}) for year, month in pending_months: sessions_of_month = unpaid_sessions.filter(started_at__month=month, started_at__year=year) start_at = datetime.datetime(year, month, 1, 0, 0, 0, 0, tzinfo=pytz.UTC) - end_at = datetime.datetime(year, month, 1, 0, 0, 0, 0, - tzinfo=pytz.UTC) + relativedelta(months=1) - datetime.timedelta(microseconds=1) + end_at = ( + datetime.datetime(year, month, 1, 0, 0, 0, 0, tzinfo=pytz.UTC) + + relativedelta(months=1) + - datetime.timedelta(microseconds=1) + ) open_bill = None if recalculate_bills: @@ -339,7 +355,7 @@ def get_unpaid_sessions(): open_bill = MentorshipBill(mentor=mentor, academy=mentor.academy, started_at=start_at, ended_at=end_at) open_bill.save() else: - open_bill.status = 'DUE' + open_bill.status = "DUE" open_bill = generate_mentor_bill(mentor, open_bill, sessions_of_month, reset) @@ -349,36 +365,36 @@ def get_unpaid_sessions(): def generate_mentor_bill(mentor, bill, sessions, reset=False): - total = {'minutes': 0, 'overtime_minutes': 0} + total = {"minutes": 0, "overtime_minutes": 0} for session in sessions: session.bill = bill _result = get_accounted_time(session) - session.suggested_accounted_duration = _result['accounted_duration'] - session.status_message = _result['status_message'] + session.suggested_accounted_duration = _result["accounted_duration"] + session.status_message = _result["status_message"] # if is null and reset=true all the sessions durations will be rest to the suggested one if session.accounted_duration is None or reset == True: - session.accounted_duration = _result['accounted_duration'] + session.accounted_duration = _result["accounted_duration"] extra_minutes = 0 if session.accounted_duration > session.service.duration: extra_minutes = (session.accounted_duration - session.service.duration).seconds / 60 - total['minutes'] = total['minutes'] + (session.accounted_duration.seconds / 60) - total['overtime_minutes'] = total['overtime_minutes'] + extra_minutes + total["minutes"] = total["minutes"] + (session.accounted_duration.seconds / 60) + total["overtime_minutes"] = total["overtime_minutes"] + extra_minutes session.save() - total['hours'] = round(total['minutes'] / 60, 2) - total['price'] = total['hours'] * mentor.price_per_hour + total["hours"] = round(total["minutes"] / 60, 2) + total["price"] = total["hours"] * mentor.price_per_hour - bill.total_duration_in_hours = total['hours'] - bill.total_duration_in_minutes = total['minutes'] - bill.overtime_minutes = total['overtime_minutes'] - bill.total_price = total['price'] + bill.total_duration_in_hours = total["hours"] + bill.total_duration_in_minutes = total["minutes"] + bill.overtime_minutes = total["overtime_minutes"] + bill.total_price = total["price"] bill.save() return bill @@ -386,21 +402,25 @@ def generate_mentor_bill(mentor, bill, sessions, reset=False): def mentor_is_ready(mentor: MentorProfile): - if mentor.online_meeting_url is None or mentor.online_meeting_url == '': + if mentor.online_meeting_url is None or mentor.online_meeting_url == "": raise Exception( - f'Mentor {mentor.name} does not have backup online_meeting_url, update the value before activating.') + f"Mentor {mentor.name} does not have backup online_meeting_url, update the value before activating." + ) - elif mentor.booking_url is None or 'https://calendly.com' not in mentor.booking_url: - raise Exception(f'Mentor {mentor.name} booking_url must point to calendly, update the value before activating.') + elif mentor.booking_url is None or "https://calendly.com" not in mentor.booking_url: + raise Exception(f"Mentor {mentor.name} booking_url must point to calendly, update the value before activating.") elif len(mentor.syllabus.all()) == 0: - raise Exception(f'Mentor {mentor.name} has no syllabus associated, update the value before activating.') + raise Exception(f"Mentor {mentor.name} has no syllabus associated, update the value before activating.") - elif 'no-booking-url' not in mentor.availability_report and 'bad-booking-url' in mentor.availability_report: - raise Exception(f'Mentor {mentor.name} booking URL is failing.') + elif "no-booking-url" not in mentor.availability_report and "bad-booking-url" in mentor.availability_report: + raise Exception(f"Mentor {mentor.name} booking URL is failing.") - elif 'no-online-meeting-url' not in mentor.availability_report and 'bad-online-meeting-url' in mentor.availability_report: - raise Exception(f'Mentor {mentor.name} online meeting URL is failing.') + elif ( + "no-online-meeting-url" not in mentor.availability_report + and "bad-online-meeting-url" in mentor.availability_report + ): + raise Exception(f"Mentor {mentor.name} online meeting URL is failing.") return True @@ -409,13 +429,13 @@ def create_room_on_google_meet(session: MentorshipSession) -> None: """Create a room on google meet for a mentorship session.""" if isinstance(session, MentorshipSession) is False: - raise Exception('session argument must be a MentorshipSession') + raise Exception("session argument must be a MentorshipSession") if session.service.video_provider != session.service.VideoProvider.GOOGLE_MEET: - raise Exception('Video provider must be Google Meet') + raise Exception("Video provider must be Google Meet") if not session.service: - raise Exception('Mentorship session doesn\'t have a service associated with it') + raise Exception("Mentorship session doesn't have a service associated with it") mentor = session.mentor @@ -423,8 +443,7 @@ def create_room_on_google_meet(session: MentorshipSession) -> None: if session.id is None: session.save() - title = (f'{session.service.name} {session.id} | ' - f'{mentor.user.first_name} {mentor.user.last_name}') + title = f"{session.service.name} {session.id} | " f"{mentor.user.first_name} {mentor.user.last_name}" s = Space( name=title, config=SpaceConfig(access_type=SpaceConfig.AccessType.OPEN), diff --git a/breathecode/mentorship/admin.py b/breathecode/mentorship/admin.py index c102bc4a7..eb8309c00 100644 --- a/breathecode/mentorship/admin.py +++ b/breathecode/mentorship/admin.py @@ -1,8 +1,17 @@ import pytz, logging, requests, re from django.contrib import admin, messages from django import forms -from .models import (MentorProfile, MentorshipService, MentorshipSession, MentorshipBill, SupportAgent, SupportChannel, - ChatBot, CalendlyOrganization, CalendlyWebhook) +from .models import ( + MentorProfile, + MentorshipService, + MentorshipSession, + MentorshipBill, + SupportAgent, + SupportChannel, + ChatBot, + CalendlyOrganization, + CalendlyWebhook, +) from breathecode.services.calendly import Calendly from django.utils.html import format_html import breathecode.mentorship.tasks as tasks @@ -16,9 +25,9 @@ @admin.register(MentorshipService) class ServiceAdmin(admin.ModelAdmin): - list_display = ['slug', 'name', 'status', 'academy'] - search_fields = ['slug', 'name'] - list_filter = ['academy__slug', 'status'] + list_display = ["slug", "name", "status", "academy"] + search_fields = ["slug", "name"] + list_filter = ["academy__slug", "status"] # raw_id_fields = ['academy', 'github_user'] # actions = [sync_issues, generate_bill] @@ -30,7 +39,7 @@ class MentorForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(MentorForm, self).__init__(*args, **kwargs) - self.fields['timezone'] = forms.ChoiceField(choices=timezones) + self.fields["timezone"] = forms.ChoiceField(choices=timezones) def generate_bill(modeladmin, request, queryset): @@ -47,14 +56,14 @@ def mark_as_active(modeladmin, request, queryset): connection_errors = [] exceptions = {} - connection_error_message = 'Booking or meeting URL for mentor is failing ({})' + connection_error_message = "Booking or meeting URL for mentor is failing ({})" for entry in entries: try: actions.mentor_is_ready(entry) except requests.exceptions.ConnectionError: - message = 'Error: Booking or meeting URL for mentor is failing' + message = "Error: Booking or meeting URL for mentor is failing" logger.fatal(message) connection_errors.append(entry.slug) @@ -68,7 +77,7 @@ def mark_as_active(modeladmin, request, queryset): exceptions[error].append(entry.slug) if connection_errors and exceptions: - all_errors = 'Error:' + all_errors = "Error:" all_errors = f'{all_errors} {connection_error_message.format(", ".join(connection_errors))}.' @@ -82,7 +91,7 @@ def mark_as_active(modeladmin, request, queryset): messages.error(request, f'Error: {connection_error_message.format(", ".join(connection_errors))}.') if exceptions: - all_errors = 'Error:' + all_errors = "Error:" for error, slugs in exceptions.items(): all_errors = f'{all_errors} {error} ({", ".join(slugs)}).' @@ -90,7 +99,7 @@ def mark_as_active(modeladmin, request, queryset): messages.error(request, all_errors) if not connection_errors and not exceptions: - messages.success(request, 'Mentor updated successfully') + messages.success(request, "Mentor updated successfully") def generate_slug_based_on_calendly(modeladmin, request, queryset): @@ -98,12 +107,12 @@ def generate_slug_based_on_calendly(modeladmin, request, queryset): for entry in entries: if entry.booking_url is None: - messages.error(request, f'Mentor {entry.id} has no booking url') + messages.error(request, f"Mentor {entry.id} has no booking url") continue - result = re.search(r'^https?:\/\/calendly.com\/([\w\-]+)\/?.*', entry.booking_url) + result = re.search(r"^https?:\/\/calendly.com\/([\w\-]+)\/?.*", entry.booking_url) if result is None: - messages.error(request, f'Mentor {entry.id} booking url is not calendly: {entry.booking_url}') + messages.error(request, f"Mentor {entry.id} booking url is not calendly: {entry.booking_url}") continue calendly_username = result.group(1) @@ -113,28 +122,28 @@ def generate_slug_based_on_calendly(modeladmin, request, queryset): @admin.register(SupportChannel) class SupportChannelAdmin(admin.ModelAdmin): - list_display = ['id', 'slug', 'slack_channel', 'academy'] - raw_id_fields = ['slack_channel', 'academy', 'syllabis'] - search_fields = ['slug', 'slack_channel__slack_id', 'slack_channel__name'] - list_filter = ['syllabis'] + list_display = ["id", "slug", "slack_channel", "academy"] + raw_id_fields = ["slack_channel", "academy", "syllabis"] + search_fields = ["slug", "slack_channel__slack_id", "slack_channel__name"] + list_filter = ["syllabis"] @admin.register(SupportAgent) class AgentAdmin(admin.ModelAdmin): - list_display = ['id', 'user', 'channel', 'email', 'current_status'] - raw_id_fields = ['user', 'channel'] - search_fields = ['email', 'user__first_name', 'user__last_name', 'user__email'] - list_filter = ['channel__academy__slug', 'status', 'channel__syllabis__slug', 'channel__slug'] - readonly_fields = ('token', ) - actions = change_field(['INNACTIVE', 'INVITED'], name='status') + list_display = ["id", "user", "channel", "email", "current_status"] + raw_id_fields = ["user", "channel"] + search_fields = ["email", "user__first_name", "user__last_name", "user__email"] + list_filter = ["channel__academy__slug", "status", "channel__syllabis__slug", "channel__slug"] + readonly_fields = ("token",) + actions = change_field(["INNACTIVE", "INVITED"], name="status") def current_status(self, obj): colors = { - 'ACTIVE': 'bg-success', - 'INVITED': 'bg-warning', - 'UNLISTED': 'bg-warning', - 'INNACTIVE': 'bg-error', - None: 'bg-warning', + "ACTIVE": "bg-success", + "INVITED": "bg-warning", + "UNLISTED": "bg-warning", + "INNACTIVE": "bg-error", + None: "bg-warning", } return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") @@ -143,22 +152,23 @@ def current_status(self, obj): @admin.register(MentorProfile) class MentorAdmin(admin.ModelAdmin): form = MentorForm - list_display = ['slug', 'user', 'name', 'email', 'current_status', 'unique_url', 'meet_url', 'academy'] - raw_id_fields = ['user', 'services'] - search_fields = ['name', 'user__first_name', 'user__last_name', 'email', 'user__email', 'slug'] - list_filter = ['services__academy__slug', 'status', 'services__slug'] - readonly_fields = ('token', ) - filter_horizontal = ('syllabus', 'services') - actions = [generate_bill, mark_as_active, generate_slug_based_on_calendly] + change_field(['INNACTIVE', 'INVITED'], - name='status') + list_display = ["slug", "user", "name", "email", "current_status", "unique_url", "meet_url", "academy"] + raw_id_fields = ["user", "services"] + search_fields = ["name", "user__first_name", "user__last_name", "email", "user__email", "slug"] + list_filter = ["services__academy__slug", "status", "services__slug"] + readonly_fields = ("token",) + filter_horizontal = ("syllabus", "services") + actions = [generate_bill, mark_as_active, generate_slug_based_on_calendly] + change_field( + ["INNACTIVE", "INVITED"], name="status" + ) def current_status(self, obj): colors = { - 'ACTIVE': 'bg-success', - 'INVITED': 'bg-warning', - 'UNLISTED': 'bg-warning', - 'INNACTIVE': 'bg-error', - None: 'bg-warning', + "ACTIVE": "bg-success", + "INVITED": "bg-warning", + "UNLISTED": "bg-warning", + "INNACTIVE": "bg-error", + None: "bg-warning", } if obj.online_meeting_url is None: @@ -185,17 +195,17 @@ def allow_billing_this_session(modeladmin, request, queryset): class BilledFilter(SimpleListFilter): - title = 'billed' - parameter_name = 'billed' + title = "billed" + parameter_name = "billed" def lookups(self, request, model_admin): return [ - ('false', 'Not yet billed'), - ('true', 'Already billed'), + ("false", "Not yet billed"), + ("true", "Already billed"), ] def queryset(self, request, queryset): - if self.value() == 'false': + if self.value() == "false": return queryset.filter(bill__isnull=True) if self.value(): return queryset.filter(bill__isnull=False) @@ -203,25 +213,30 @@ def queryset(self, request, queryset): @admin.register(MentorshipSession) class SessionAdmin(admin.ModelAdmin): - list_display = ['id', 'mentor', 'mentee', 'stats', 'started_at', 'mentor_joined_at', 'openurl'] - raw_id_fields = ['mentor', 'mentee'] + list_display = ["id", "mentor", "mentee", "stats", "started_at", "mentor_joined_at", "openurl"] + raw_id_fields = ["mentor", "mentee"] search_fields = [ - 'mentee__first_name', 'mentee__last_name', 'mentee__email', 'mentor__user__first_name', - 'mentor__user__last_name', 'mentor__user__email' + "mentee__first_name", + "mentee__last_name", + "mentee__email", + "mentor__user__first_name", + "mentor__user__last_name", + "mentor__user__email", ] - list_filter = [BilledFilter, 'allow_billing', 'status', 'mentor__services__academy', 'mentor__services__slug'] + list_filter = [BilledFilter, "allow_billing", "status", "mentor__services__academy", "mentor__services__slug"] actions = [avoid_billing_this_session, allow_billing_this_session] + change_field( - ['COMPLETED', 'FAILED', 'STARTED', 'PENDING'], name='status') + ["COMPLETED", "FAILED", "STARTED", "PENDING"], name="status" + ) def stats(self, obj): colors = { - 'COMPLETED': 'bg-success', - 'FAILED': 'bg-error', - 'STARTED': 'bg-warning', - 'PENDING': 'bg-secondary', - 'CANCELED': '', - 'IGNORED': 'bg-secondary', + "COMPLETED": "bg-success", + "FAILED": "bg-error", + "STARTED": "bg-warning", + "PENDING": "bg-secondary", + "CANCELED": "", + "IGNORED": "bg-secondary", } return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") @@ -252,20 +267,21 @@ def release_sessions_from_bill(modeladmin, request, queryset): @admin.register(MentorshipBill) class MentorshipBillAdmin(admin.ModelAdmin): - list_display = ('id', 'mentor', 'status', 'total_duration_in_hours', 'total_price', 'paid_at', 'invoice_url') - list_filter = ['status'] - actions = [release_sessions_from_bill] + change_field(['DUE', 'APPROVED', 'PAID', 'IGNORED'], name='status') + list_display = ("id", "mentor", "status", "total_duration_in_hours", "total_price", "paid_at", "invoice_url") + list_filter = ["status"] + actions = [release_sessions_from_bill] + change_field(["DUE", "APPROVED", "PAID", "IGNORED"], name="status") def invoice_url(self, obj): return format_html( "<a rel='noopener noreferrer' target='_blank' href='/v1/mentorship/academy/bill/{id}/html'>open</a>", - id=obj.id) + id=obj.id, + ) @admin.register(ChatBot) class ChatBotAdmin(admin.ModelAdmin): - list_display = ('slug', 'name', 'academy') - list_filter = ['academy'] + list_display = ("slug", "name", "academy") + list_filter = ["academy"] # actions = [release_sessions_from_bill] + change_field(['DUE', 'APPROVED', 'PAID', 'IGNORED'], # name='status') @@ -289,15 +305,15 @@ def get_subscription_webhooks(modeladmin, request, queryset): for org in entries: cal = Calendly(token=org.access_token) data = cal.get_subscriptions(org.uri) - print('subscriptions', data) + print("subscriptions", data) @admin.register(CalendlyOrganization) class CalendlyOrganizationAdmin(admin.ModelAdmin): - list_display = ('username', 'academy', 'hash', 'sync_status', 'sync_desc') - list_filter = ['sync_status', 'academy'] - search_fields = ['username'] - readonly_fields = ('hash', ) + list_display = ("username", "academy", "hash", "sync_status", "sync_desc") + list_filter = ["sync_status", "academy"] + search_fields = ["username"] + readonly_fields = ("hash",) actions = [subscribe_to_webhooks, get_subscription_webhooks, unsubscribe_to_all_webhooks] @@ -310,8 +326,16 @@ def reattempt_calendly_webhook(modeladmin, request, queryset): @admin.register(CalendlyWebhook) class CalendlyWebhookAdmin(admin.ModelAdmin): - list_display = ('id', 'status', 'event', 'organization', 'organization_hash', 'created_by', 'status_text', - 'created_at') - list_filter = ['organization', 'status', 'event'] - search_fields = ['organization__username'] + list_display = ( + "id", + "status", + "event", + "organization", + "organization_hash", + "created_by", + "status_text", + "created_at", + ) + list_filter = ["organization", "status", "event"] + search_fields = ["organization__username"] actions = [reattempt_calendly_webhook] diff --git a/breathecode/mentorship/apps.py b/breathecode/mentorship/apps.py index 51c8fdc0a..f680ed816 100644 --- a/breathecode/mentorship/apps.py +++ b/breathecode/mentorship/apps.py @@ -2,7 +2,7 @@ class MediaConfig(AppConfig): - name = 'breathecode.mentorship' + name = "breathecode.mentorship" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/mentorship/exceptions.py b/breathecode/mentorship/exceptions.py index 3b59301ea..082e7666f 100644 --- a/breathecode/mentorship/exceptions.py +++ b/breathecode/mentorship/exceptions.py @@ -1,2 +1 @@ -class ExtendSessionException(Exception): - ... +class ExtendSessionException(Exception): ... diff --git a/breathecode/mentorship/forms.py b/breathecode/mentorship/forms.py index 04e67da53..5f1d0ec38 100644 --- a/breathecode/mentorship/forms.py +++ b/breathecode/mentorship/forms.py @@ -5,30 +5,40 @@ class CloseMentoringSessionForm(forms.Form): token = forms.CharField(widget=forms.HiddenInput()) session_id = forms.CharField(widget=forms.HiddenInput()) - student_name = forms.CharField(widget=forms.TextInput(attrs={ - 'class': 'form-control', - 'readonly': 'readonly', - })) - status = forms.CharField(label='Meeting status', - widget=forms.Select(choices=MENTORSHIP_STATUS, attrs={ - 'class': 'form-control', - })) - summary = forms.CharField(widget=forms.Textarea(attrs={'rows': 5, 'cols': 20, 'class': 'form-control'})) + student_name = forms.CharField( + widget=forms.TextInput( + attrs={ + "class": "form-control", + "readonly": "readonly", + } + ) + ) + status = forms.CharField( + label="Meeting status", + widget=forms.Select( + choices=MENTORSHIP_STATUS, + attrs={ + "class": "form-control", + }, + ), + ) + summary = forms.CharField(widget=forms.Textarea(attrs={"rows": 5, "cols": 20, "class": "form-control"})) def __init__(self, params, *args, **kwargs): super(forms.Form, self).__init__(params, *args, **kwargs) - self.fields['token'].widget.attrs.update({'initial': params.get('token')}) + self.fields["token"].widget.attrs.update({"initial": params.get("token")}) def clean(self): super(CloseMentoringSessionForm, self).clean() - status = self.cleaned_data.get('status') + status = self.cleaned_data.get("status") # if status == 'PENDING': # raise ValidationError('You need to chose either Completed or Failed on the session status', # code='invalid') - if status in ['PENDING', 'STARTED']: - self._errors['status'] = self.error_class( - ['You need to chose either Completed or Failed on the session status']) + if status in ["PENDING", "STARTED"]: + self._errors["status"] = self.error_class( + ["You need to chose either Completed or Failed on the session status"] + ) return self.cleaned_data diff --git a/breathecode/mentorship/management/commands/mentorship.py b/breathecode/mentorship/management/commands/mentorship.py index 67e5afdbb..f3d0fbb72 100644 --- a/breathecode/mentorship/management/commands/mentorship.py +++ b/breathecode/mentorship/management/commands/mentorship.py @@ -6,15 +6,15 @@ from breathecode.mentorship import tasks from breathecode.mentorship.models import MentorProfile -IS_DJANGO_REDIS = hasattr(cache, 'delete_pattern') +IS_DJANGO_REDIS = hasattr(cache, "delete_pattern") def db_backup_bucket(): - return os.getenv('DB_BACKUP_BUCKET') + return os.getenv("DB_BACKUP_BUCKET") def get_activity_sampling_rate(): - env = os.getenv('ACTIVITY_SAMPLING_RATE') + env = os.getenv("ACTIVITY_SAMPLING_RATE") if env: return int(env) @@ -22,16 +22,16 @@ def get_activity_sampling_rate(): class Command(BaseCommand): - help = 'Delete duplicate cohort users imported from old breathecode' + help = "Delete duplicate cohort users imported from old breathecode" def handle(self, *args, **options): self.check_mentorship_profiles() def check_mentorship_profiles(self): - self.stdout.write(self.style.SUCCESS('Checking mentorship profiles')) - mentor_profiles = MentorProfile.objects.filter(status__in=['ACTIVE', 'UNLISTED']).only('id') + self.stdout.write(self.style.SUCCESS("Checking mentorship profiles")) + mentor_profiles = MentorProfile.objects.filter(status__in=["ACTIVE", "UNLISTED"]).only("id") for mentor_profile in mentor_profiles: tasks.check_mentorship_profile.delay(mentor_profile.id) - self.stdout.write(self.style.SUCCESS(f'Scheduled {len(mentor_profiles)} mentorship profiles')) + self.stdout.write(self.style.SUCCESS(f"Scheduled {len(mentor_profiles)} mentorship profiles")) diff --git a/breathecode/mentorship/migrations/0001_initial.py b/breathecode/mentorship/migrations/0001_initial.py index d08504d48..3f6ed51b8 100644 --- a/breathecode/mentorship/migrations/0001_initial.py +++ b/breathecode/mentorship/migrations/0001_initial.py @@ -10,93 +10,124 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ('admissions', '0025_merge_20211018_2259'), + ("admissions", "0025_merge_20211018_2259"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='MentorProfile', + name="MentorProfile", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=150)), - ('price_per_hour', models.FloatField()), - ('token', models.CharField(help_text='Used for the invitation', max_length=255, unique=True)), - ('status', - models.CharField(choices=[('INVITED', 'Invited'), ('ACTIVE', 'Active'), ('INNACTIVE', 'Innactive')], - default='INVITED', - help_text='Options are: INVITEDACTIVEINNACTIVE', - max_length=15)), - ('email', - models.CharField(default=None, - help_text='Only use this if the user does not exist on breathecode already', - max_length=150, - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=150)), + ("price_per_hour", models.FloatField()), + ("token", models.CharField(help_text="Used for the invitation", max_length=255, unique=True)), + ( + "status", + models.CharField( + choices=[("INVITED", "Invited"), ("ACTIVE", "Active"), ("INNACTIVE", "Innactive")], + default="INVITED", + help_text="Options are: INVITEDACTIVEINNACTIVE", + max_length=15, + ), + ), + ( + "email", + models.CharField( + default=None, + help_text="Only use this if the user does not exist on breathecode already", + max_length=150, + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='MentorshipSession', + name="MentorshipSession", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('is_online', models.BooleanField()), - ('latitude', models.FloatField(blank=True, default=None, null=True)), - ('longitude', models.FloatField(blank=True, default=None, null=True)), - ('online_meeting_url', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('online_recording_url', - models.CharField( - blank=True, - default=None, - help_text='We encourace the mentors to record the session and share them with the students', - max_length=255, - null=True)), - ('status', - models.CharField(choices=[('INVITED', 'Invited'), ('ACTIVE', 'Active'), ('INNACTIVE', 'Innactive')], - default='INVITED', - help_text='Options are: INVITEDACTIVEINNACTIVE', - max_length=15)), - ('agenda', - models.TextField(blank=True, default=None, help_text='What will this mentorship be about', null=True)), - ('summary', - models.TextField(blank=True, - default=None, - help_text='Describe briefly what happened at the mentorship session', - null=True)), - ('started_at', models.DateTimeField(blank=True, default=None, null=True)), - ('ended_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('mentee', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), - ('mentor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='mentorship.mentorprofile')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("is_online", models.BooleanField()), + ("latitude", models.FloatField(blank=True, default=None, null=True)), + ("longitude", models.FloatField(blank=True, default=None, null=True)), + ("online_meeting_url", models.CharField(blank=True, default=None, max_length=255, null=True)), + ( + "online_recording_url", + models.CharField( + blank=True, + default=None, + help_text="We encourace the mentors to record the session and share them with the students", + max_length=255, + null=True, + ), + ), + ( + "status", + models.CharField( + choices=[("INVITED", "Invited"), ("ACTIVE", "Active"), ("INNACTIVE", "Innactive")], + default="INVITED", + help_text="Options are: INVITEDACTIVEINNACTIVE", + max_length=15, + ), + ), + ( + "agenda", + models.TextField( + blank=True, default=None, help_text="What will this mentorship be about", null=True + ), + ), + ( + "summary", + models.TextField( + blank=True, + default=None, + help_text="Describe briefly what happened at the mentorship session", + null=True, + ), + ), + ("started_at", models.DateTimeField(blank=True, default=None, null=True)), + ("ended_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("mentee", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "mentor", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="mentorship.mentorprofile"), + ), ], ), migrations.CreateModel( - name='MentorshipService', + name="MentorshipService", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150, unique=True)), - ('name', models.CharField(max_length=150)), - ('status', - models.CharField(choices=[('DRAFT', 'Draft'), ('ACTIVE', 'Active'), ('INNACTIVE', 'Innactive')], - default='DRAFT', - max_length=15)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150, unique=True)), + ("name", models.CharField(max_length=150)), + ( + "status", + models.CharField( + choices=[("DRAFT", "Draft"), ("ACTIVE", "Active"), ("INNACTIVE", "Innactive")], + default="DRAFT", + max_length=15, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), migrations.AddField( - model_name='mentorprofile', - name='service', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mentorship.mentorshipservice'), + model_name="mentorprofile", + name="service", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="mentorship.mentorshipservice"), ), migrations.AddField( - model_name='mentorprofile', - name='user', - field=models.ForeignKey(help_text='If the user does not exist, you can use the email field instead', - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="mentorprofile", + name="user", + field=models.ForeignKey( + help_text="If the user does not exist, you can use the email field instead", + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/breathecode/mentorship/migrations/0002_auto_20211109_2203.py b/breathecode/mentorship/migrations/0002_auto_20211109_2203.py index d0f544ddf..f511bbf6b 100644 --- a/breathecode/mentorship/migrations/0002_auto_20211109_2203.py +++ b/breathecode/mentorship/migrations/0002_auto_20211109_2203.py @@ -6,127 +6,135 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0025_merge_20211018_2259'), - ('mentorship', '0001_initial'), + ("admissions", "0025_merge_20211018_2259"), + ("mentorship", "0001_initial"), ] operations = [ migrations.AddField( - model_name='mentorprofile', - name='booking_url', + model_name="mentorprofile", + name="booking_url", field=models.URLField( blank=True, default=None, - help_text='URL where this mentor profile can be booked, E.g: calendly.com/my_username', - null=True), + help_text="URL where this mentor profile can be booked, E.g: calendly.com/my_username", + null=True, + ), ), migrations.AddField( - model_name='mentorprofile', - name='online_meeting_url', + model_name="mentorprofile", + name="online_meeting_url", field=models.URLField( blank=True, default=None, help_text="If set, it will be default for all session's unless the session.online_meeting_url is set", - null=True), + null=True, + ), ), migrations.AddField( - model_name='mentorprofile', - name='slug', + model_name="mentorprofile", + name="slug", field=models.SlugField( default=None, - help_text= - 'Will be used as unique public booking URL with the students, for example: 4geeks.com/meet/bob', + help_text="Will be used as unique public booking URL with the students, for example: 4geeks.com/meet/bob", max_length=150, - unique=True), + unique=True, + ), preserve_default=False, ), migrations.AddField( - model_name='mentorprofile', - name='syllabus', - field=models.ManyToManyField(blank=True, - default=None, - help_text='What syllabis is this mentor going to be menting to?', - null=True, - to='admissions.Syllabus'), + model_name="mentorprofile", + name="syllabus", + field=models.ManyToManyField( + blank=True, + default=None, + help_text="What syllabis is this mentor going to be menting to?", + null=True, + to="admissions.Syllabus", + ), ), migrations.AddField( - model_name='mentorprofile', - name='timezone', - field=models.CharField(default=None, - help_text="Knowing the mentor's timezone helps with more accurrate booking", - max_length=50, - null=True), + model_name="mentorprofile", + name="timezone", + field=models.CharField( + default=None, + help_text="Knowing the mentor's timezone helps with more accurrate booking", + max_length=50, + null=True, + ), ), migrations.AddField( - model_name='mentorshipsession', - name='ends_at', - field=models.DateTimeField(blank=True, default=None, help_text='Scheduled end date', null=True), + model_name="mentorshipsession", + name="ends_at", + field=models.DateTimeField(blank=True, default=None, help_text="Scheduled end date", null=True), ), migrations.AddField( - model_name='mentorshipsession', - name='starts_at', - field=models.DateTimeField(blank=True, default=None, help_text='Scheduled start date', null=True), + model_name="mentorshipsession", + name="starts_at", + field=models.DateTimeField(blank=True, default=None, help_text="Scheduled start date", null=True), ), migrations.AlterField( - model_name='mentorprofile', - name='email', - field=models.CharField(blank=True, - default=None, - help_text='Only use this if the user does not exist on breathecode already', - max_length=150, - null=True), + model_name="mentorprofile", + name="email", + field=models.CharField( + blank=True, + default=None, + help_text="Only use this if the user does not exist on breathecode already", + max_length=150, + null=True, + ), ), migrations.AlterField( - model_name='mentorprofile', - name='name', + model_name="mentorprofile", + name="name", field=models.CharField(blank=True, default=None, max_length=150, null=True), ), migrations.AlterField( - model_name='mentorprofile', - name='token', - field=models.CharField(help_text='Used for inviting the user to become a mentor', - max_length=255, - unique=True), + model_name="mentorprofile", + name="token", + field=models.CharField( + help_text="Used for inviting the user to become a mentor", max_length=255, unique=True + ), ), migrations.AlterField( - model_name='mentorshipsession', - name='ended_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='Real start date (only if it started)', - null=True), + model_name="mentorshipsession", + name="ended_at", + field=models.DateTimeField( + blank=True, default=None, help_text="Real start date (only if it started)", null=True + ), ), migrations.AlterField( - model_name='mentorshipsession', - name='online_meeting_url', - field=models.URLField(blank=True, - default=None, - help_text='Overrides the mentor.online_meeting_url if set', - null=True), + model_name="mentorshipsession", + name="online_meeting_url", + field=models.URLField( + blank=True, default=None, help_text="Overrides the mentor.online_meeting_url if set", null=True + ), ), migrations.AlterField( - model_name='mentorshipsession', - name='online_recording_url', + model_name="mentorshipsession", + name="online_recording_url", field=models.URLField( blank=True, default=None, - help_text='We encourace the mentors to record the session and share them with the students', - null=True), + help_text="We encourace the mentors to record the session and share them with the students", + null=True, + ), ), migrations.AlterField( - model_name='mentorshipsession', - name='started_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='Real start date (only if it started)', - null=True), + model_name="mentorshipsession", + name="started_at", + field=models.DateTimeField( + blank=True, default=None, help_text="Real start date (only if it started)", null=True + ), ), migrations.AlterField( - model_name='mentorshipsession', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('COMPLETED', 'Completed'), ('FAILED', 'Failed')], - default='PENDING', - help_text='Options are: PENDINGCOMPLETEDFAILED', - max_length=15), + model_name="mentorshipsession", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("COMPLETED", "Completed"), ("FAILED", "Failed")], + default="PENDING", + help_text="Options are: PENDINGCOMPLETEDFAILED", + max_length=15, + ), ), ] diff --git a/breathecode/mentorship/migrations/0003_auto_20211110_2055.py b/breathecode/mentorship/migrations/0003_auto_20211110_2055.py index f18d8dfa0..fdae3aa33 100644 --- a/breathecode/mentorship/migrations/0003_auto_20211110_2055.py +++ b/breathecode/mentorship/migrations/0003_auto_20211110_2055.py @@ -6,21 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0002_auto_20211109_2203'), + ("mentorship", "0002_auto_20211109_2203"), ] operations = [ migrations.AddField( - model_name='mentorshipservice', - name='language', - field=models.CharField(default='en', max_length=2), + model_name="mentorshipservice", + name="language", + field=models.CharField(default="en", max_length=2), ), migrations.AlterField( - model_name='mentorshipsession', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('COMPLETED', 'Completed'), ('FAILED', 'Failed')], - default='PENDING', - help_text='Options are: PENDING, COMPLETED, FAILED', - max_length=15), + model_name="mentorshipsession", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("COMPLETED", "Completed"), ("FAILED", "Failed")], + default="PENDING", + help_text="Options are: PENDING, COMPLETED, FAILED", + max_length=15, + ), ), ] diff --git a/breathecode/mentorship/migrations/0004_alter_mentorshipsession_status.py b/breathecode/mentorship/migrations/0004_alter_mentorshipsession_status.py index 8493f177c..d1ab8c0b2 100644 --- a/breathecode/mentorship/migrations/0004_alter_mentorshipsession_status.py +++ b/breathecode/mentorship/migrations/0004_alter_mentorshipsession_status.py @@ -6,17 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0003_auto_20211110_2055'), + ("mentorship", "0003_auto_20211110_2055"), ] operations = [ migrations.AlterField( - model_name='mentorshipsession', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('STARTED', 'Started'), ('COMPLETED', 'Completed'), - ('FAILED', 'Failed')], - default='PENDING', - help_text='Options are: PENDING, STARTED, COMPLETED, FAILED', - max_length=15), + model_name="mentorshipsession", + name="status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("STARTED", "Started"), + ("COMPLETED", "Completed"), + ("FAILED", "Failed"), + ], + default="PENDING", + help_text="Options are: PENDING, STARTED, COMPLETED, FAILED", + max_length=15, + ), ), ] diff --git a/breathecode/mentorship/migrations/0005_auto_20211111_2157.py b/breathecode/mentorship/migrations/0005_auto_20211111_2157.py index e6ec5e2b8..73fbd0f72 100644 --- a/breathecode/mentorship/migrations/0005_auto_20211111_2157.py +++ b/breathecode/mentorship/migrations/0005_auto_20211111_2157.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0004_alter_mentorshipsession_status'), + ("mentorship", "0004_alter_mentorshipsession_status"), ] operations = [ migrations.AddField( - model_name='mentorprofile', - name='bio', + model_name="mentorprofile", + name="bio", field=models.TextField(blank=True, default=None, max_length=500, null=True), ), migrations.AddField( - model_name='mentorshipservice', - name='description', + model_name="mentorshipservice", + name="description", field=models.TextField(blank=True, default=None, max_length=500, null=True), ), ] diff --git a/breathecode/mentorship/migrations/0006_auto_20220225_2059.py b/breathecode/mentorship/migrations/0006_auto_20220225_2059.py index 40ac32f07..a4e7200bb 100644 --- a/breathecode/mentorship/migrations/0006_auto_20220225_2059.py +++ b/breathecode/mentorship/migrations/0006_auto_20220225_2059.py @@ -10,69 +10,79 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('mentorship', '0005_auto_20211111_2157'), + ("mentorship", "0005_auto_20211111_2157"), ] operations = [ migrations.AddField( - model_name='mentorshipservice', - name='duration', - field=models.DurationField(default=datetime.timedelta(seconds=3600), - help_text='Default duration for mentorship sessions of this service'), + model_name="mentorshipservice", + name="duration", + field=models.DurationField( + default=datetime.timedelta(seconds=3600), + help_text="Default duration for mentorship sessions of this service", + ), ), migrations.AddField( - model_name='mentorshipservice', - name='logo_url', + model_name="mentorshipservice", + name="logo_url", field=models.CharField(blank=True, default=None, max_length=150, null=True), ), migrations.AddField( - model_name='mentorshipsession', - name='mentee_left_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='Exact moment the mentee left the meeting for the last time', - null=True), + model_name="mentorshipsession", + name="mentee_left_at", + field=models.DateTimeField( + blank=True, + default=None, + help_text="Exact moment the mentee left the meeting for the last time", + null=True, + ), ), migrations.AddField( - model_name='mentorshipsession', - name='mentor_joined_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='Exact moment the mentor joined the meeting for the first time', - null=True), + model_name="mentorshipsession", + name="mentor_joined_at", + field=models.DateTimeField( + blank=True, + default=None, + help_text="Exact moment the mentor joined the meeting for the first time", + null=True, + ), ), migrations.AddField( - model_name='mentorshipsession', - name='mentor_left_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='Exact moment the mentor left the meeting for the last time', - null=True), + model_name="mentorshipsession", + name="mentor_left_at", + field=models.DateTimeField( + blank=True, + default=None, + help_text="Exact moment the mentor left the meeting for the last time", + null=True, + ), ), migrations.AddField( - model_name='mentorshipsession', - name='name', - field=models.CharField(blank=True, - default=None, - help_text='Room name, used on daily.co', - max_length=255, - null=True), + model_name="mentorshipsession", + name="name", + field=models.CharField( + blank=True, default=None, help_text="Room name, used on daily.co", max_length=255, null=True + ), ), migrations.AlterField( - model_name='mentorshipsession', - name='ends_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='Scheduled end date, will be used as meeting expiration as well', - null=True), + model_name="mentorshipsession", + name="ends_at", + field=models.DateTimeField( + blank=True, + default=None, + help_text="Scheduled end date, will be used as meeting expiration as well", + null=True, + ), ), migrations.AlterField( - model_name='mentorshipsession', - name='mentee', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="mentorshipsession", + name="mentee", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/breathecode/mentorship/migrations/0007_auto_20220227_1654.py b/breathecode/mentorship/migrations/0007_auto_20220227_1654.py index 100eac983..88b188162 100644 --- a/breathecode/mentorship/migrations/0007_auto_20220227_1654.py +++ b/breathecode/mentorship/migrations/0007_auto_20220227_1654.py @@ -6,20 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0006_auto_20220225_2059'), + ("mentorship", "0006_auto_20220225_2059"), ] operations = [ migrations.AddField( - model_name='mentorshipservice', - name='allow_mentee_to_extend', - field=models.BooleanField(default=True, - help_text='If true, mentees will be able to extend mentorship session'), + model_name="mentorshipservice", + name="allow_mentee_to_extend", + field=models.BooleanField( + default=True, help_text="If true, mentees will be able to extend mentorship session" + ), ), migrations.AddField( - model_name='mentorshipservice', - name='allow_mentors_to_extend', - field=models.BooleanField(default=True, - help_text='If true, mentors will be able to extend mentorship session'), + model_name="mentorshipservice", + name="allow_mentors_to_extend", + field=models.BooleanField( + default=True, help_text="If true, mentors will be able to extend mentorship session" + ), ), ] diff --git a/breathecode/mentorship/migrations/0008_auto_20220311_0423.py b/breathecode/mentorship/migrations/0008_auto_20220311_0423.py index 953b9ae5e..2dbec6cfb 100644 --- a/breathecode/mentorship/migrations/0008_auto_20220311_0423.py +++ b/breathecode/mentorship/migrations/0008_auto_20220311_0423.py @@ -10,93 +10,119 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('admissions', '0037_alter_cohort_schedule'), - ('mentorship', '0007_auto_20220227_1654'), + ("admissions", "0037_alter_cohort_schedule"), + ("mentorship", "0007_auto_20220227_1654"), ] operations = [ migrations.AddField( - model_name='mentorshipservice', - name='max_duration', + model_name="mentorshipservice", + name="max_duration", field=models.DurationField( default=datetime.timedelta(seconds=7200), - help_text='Maximum allowed duration or extra time, make it 0 for unlimited meetings'), + help_text="Maximum allowed duration or extra time, make it 0 for unlimited meetings", + ), ), migrations.AddField( - model_name='mentorshipservice', - name='missed_meeting_duration', + model_name="mentorshipservice", + name="missed_meeting_duration", field=models.DurationField( default=datetime.timedelta(seconds=600), - help_text="Duration that will be paid when the mentee doesn't come to the session"), + help_text="Duration that will be paid when the mentee doesn't come to the session", + ), ), migrations.AddField( - model_name='mentorshipsession', - name='accounted_duration', - field=models.DurationField(blank=True, - default=None, - help_text='The duration that will be paid to the mentor for this session', - null=True), + model_name="mentorshipsession", + name="accounted_duration", + field=models.DurationField( + blank=True, + default=None, + help_text="The duration that will be paid to the mentor for this session", + null=True, + ), ), migrations.AddField( - model_name='mentorshipsession', - name='status_message', + model_name="mentorshipsession", + name="status_message", field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='mentorshipsession', - name='status', + model_name="mentorshipsession", + name="status", field=models.CharField( - choices=[('PENDING', 'Pending'), ('STARTED', 'Started'), ('COMPLETED', 'Completed'), - ('FAILED', 'Failed'), ('IGNORED', 'Ignored')], - default='PENDING', - help_text= - 'Options are: PENDING, STARTED, COMPLETED, FAILED, IGNORED. Ignored sessions will not be billed.', - max_length=15), + choices=[ + ("PENDING", "Pending"), + ("STARTED", "Started"), + ("COMPLETED", "Completed"), + ("FAILED", "Failed"), + ("IGNORED", "Ignored"), + ], + default="PENDING", + help_text="Options are: PENDING, STARTED, COMPLETED, FAILED, IGNORED. Ignored sessions will not be billed.", + max_length=15, + ), ), migrations.CreateModel( - name='MentorshipBill', + name="MentorshipBill", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('status', - models.CharField(choices=[('DUE', 'Due'), ('APPROVED', 'Approved'), ('PAID', 'Paid')], - default='DUE', - max_length=20)), - ('status_mesage', - models.TextField(blank=True, - default=None, - help_text='Any important information about the bill', - null=True)), - ('total_duration_in_minutes', models.FloatField(default=0)), - ('total_duration_in_hours', models.FloatField(default=0)), - ('total_price', models.FloatField(default=0)), - ('overtime_minutes', - models.FloatField( - default=0, help_text='Additional time mentorships took based on the expected default duration')), - ('paid_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('mentor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='mentorship.mentorprofile')), - ('reviewer', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "status", + models.CharField( + choices=[("DUE", "Due"), ("APPROVED", "Approved"), ("PAID", "Paid")], + default="DUE", + max_length=20, + ), + ), + ( + "status_mesage", + models.TextField( + blank=True, default=None, help_text="Any important information about the bill", null=True + ), + ), + ("total_duration_in_minutes", models.FloatField(default=0)), + ("total_duration_in_hours", models.FloatField(default=0)), + ("total_price", models.FloatField(default=0)), + ( + "overtime_minutes", + models.FloatField( + default=0, help_text="Additional time mentorships took based on the expected default duration" + ), + ), + ("paid_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + ( + "mentor", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="mentorship.mentorprofile"), + ), + ( + "reviewer", + models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.AddField( - model_name='mentorshipsession', - name='bill', - field=models.ForeignKey(blank=True, - default=None, - help_text='If null, it has not been billed by the mentor yet', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='mentorship.mentorshipbill'), + model_name="mentorshipsession", + name="bill", + field=models.ForeignKey( + blank=True, + default=None, + help_text="If null, it has not been billed by the mentor yet", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="mentorship.mentorshipbill", + ), ), ] diff --git a/breathecode/mentorship/migrations/0009_mentorshipsession_allow_billing.py b/breathecode/mentorship/migrations/0009_mentorshipsession_allow_billing.py index 6d3e5b37b..ae50a8dbf 100644 --- a/breathecode/mentorship/migrations/0009_mentorshipsession_allow_billing.py +++ b/breathecode/mentorship/migrations/0009_mentorshipsession_allow_billing.py @@ -6,14 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0008_auto_20220311_0423'), + ("mentorship", "0008_auto_20220311_0423"), ] operations = [ migrations.AddField( - model_name='mentorshipsession', - name='allow_billing', - field=models.BooleanField(default=True, - help_text='If false it will not be included when generating mentorship bills'), + model_name="mentorshipsession", + name="allow_billing", + field=models.BooleanField( + default=True, help_text="If false it will not be included when generating mentorship bills" + ), ), ] diff --git a/breathecode/mentorship/migrations/0010_auto_20220314_1542.py b/breathecode/mentorship/migrations/0010_auto_20220314_1542.py index 0f9a3d9ff..ddbefb6f7 100644 --- a/breathecode/mentorship/migrations/0010_auto_20220314_1542.py +++ b/breathecode/mentorship/migrations/0010_auto_20220314_1542.py @@ -6,33 +6,38 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0009_mentorshipsession_allow_billing'), + ("mentorship", "0009_mentorshipsession_allow_billing"), ] operations = [ migrations.AddField( - model_name='mentorshipbill', - name='ended_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='The bill includes all sessions from started_at to ended_at', - null=True), + model_name="mentorshipbill", + name="ended_at", + field=models.DateTimeField( + blank=True, + default=None, + help_text="The bill includes all sessions from started_at to ended_at", + null=True, + ), ), migrations.AddField( - model_name='mentorshipbill', - name='started_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='The bill includes all sessions from started_at to ended_at', - null=True), + model_name="mentorshipbill", + name="started_at", + field=models.DateTimeField( + blank=True, + default=None, + help_text="The bill includes all sessions from started_at to ended_at", + null=True, + ), ), migrations.AddField( - model_name='mentorshipsession', - name='suggested_accounted_duration', + model_name="mentorshipsession", + name="suggested_accounted_duration", field=models.DurationField( blank=True, default=None, - help_text='The automatic suggested duration to be paid to the mentor for this session', - null=True), + help_text="The automatic suggested duration to be paid to the mentor for this session", + null=True, + ), ), ] diff --git a/breathecode/mentorship/migrations/0011_auto_20220402_0003.py b/breathecode/mentorship/migrations/0011_auto_20220402_0003.py index 9f64e3c83..4a49657e9 100644 --- a/breathecode/mentorship/migrations/0011_auto_20220402_0003.py +++ b/breathecode/mentorship/migrations/0011_auto_20220402_0003.py @@ -6,25 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0038_alter_cohort_syllabus_version'), - ('mentorship', '0010_auto_20220314_1542'), + ("admissions", "0038_alter_cohort_syllabus_version"), + ("mentorship", "0010_auto_20220314_1542"), ] operations = [ migrations.AlterField( - model_name='mentorprofile', - name='syllabus', - field=models.ManyToManyField(blank=True, - default=None, - help_text='What syllabis is this mentor going to be menting to?', - to='admissions.Syllabus'), + model_name="mentorprofile", + name="syllabus", + field=models.ManyToManyField( + blank=True, + default=None, + help_text="What syllabis is this mentor going to be menting to?", + to="admissions.Syllabus", + ), ), migrations.AlterField( - model_name='mentorshipbill', - name='status', - field=models.CharField(choices=[('DUE', 'Due'), ('APPROVED', 'Approved'), ('PAID', 'Paid'), - ('IGNORED', 'Ignored')], - default='DUE', - max_length=20), + model_name="mentorshipbill", + name="status", + field=models.CharField( + choices=[("DUE", "Due"), ("APPROVED", "Approved"), ("PAID", "Paid"), ("IGNORED", "Ignored")], + default="DUE", + max_length=20, + ), ), ] diff --git a/breathecode/mentorship/migrations/0012_alter_mentorshipsession_is_online.py b/breathecode/mentorship/migrations/0012_alter_mentorshipsession_is_online.py index 38ef00334..e660edbef 100644 --- a/breathecode/mentorship/migrations/0012_alter_mentorshipsession_is_online.py +++ b/breathecode/mentorship/migrations/0012_alter_mentorshipsession_is_online.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0011_auto_20220402_0003'), + ("mentorship", "0011_auto_20220402_0003"), ] operations = [ migrations.AlterField( - model_name='mentorshipsession', - name='is_online', + model_name="mentorshipsession", + name="is_online", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/mentorship/migrations/0013_auto_20220408_2052.py b/breathecode/mentorship/migrations/0013_auto_20220408_2052.py index c6b1d6083..94632a808 100644 --- a/breathecode/mentorship/migrations/0013_auto_20220408_2052.py +++ b/breathecode/mentorship/migrations/0013_auto_20220408_2052.py @@ -6,25 +6,37 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0012_alter_mentorshipsession_is_online'), + ("mentorship", "0012_alter_mentorshipsession_is_online"), ] operations = [ migrations.AlterField( - model_name='mentorprofile', - name='status', - field=models.CharField(choices=[('INVITED', 'Invited'), ('ACTIVE', 'Active'), ('UNLISTED', 'Unlisted'), - ('INNACTIVE', 'Innactive')], - default='INVITED', - help_text='Options are: INVITEDACTIVEUNLISTEDINNACTIVE', - max_length=15), + model_name="mentorprofile", + name="status", + field=models.CharField( + choices=[ + ("INVITED", "Invited"), + ("ACTIVE", "Active"), + ("UNLISTED", "Unlisted"), + ("INNACTIVE", "Innactive"), + ], + default="INVITED", + help_text="Options are: INVITEDACTIVEUNLISTEDINNACTIVE", + max_length=15, + ), ), migrations.AlterField( - model_name='mentorshipservice', - name='status', - field=models.CharField(choices=[('DRAFT', 'Draft'), ('ACTIVE', 'Active'), ('UNLISTED', 'Unlisted'), - ('INNACTIVE', 'Innactive')], - default='DRAFT', - max_length=15), + model_name="mentorshipservice", + name="status", + field=models.CharField( + choices=[ + ("DRAFT", "Draft"), + ("ACTIVE", "Active"), + ("UNLISTED", "Unlisted"), + ("INNACTIVE", "Innactive"), + ], + default="DRAFT", + max_length=15, + ), ), ] diff --git a/breathecode/mentorship/migrations/0014_auto_20220719_0759.py b/breathecode/mentorship/migrations/0014_auto_20220719_0759.py index c62cd69ac..58a72d366 100644 --- a/breathecode/mentorship/migrations/0014_auto_20220719_0759.py +++ b/breathecode/mentorship/migrations/0014_auto_20220719_0759.py @@ -7,34 +7,32 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0041_cohortuser_watching'), - ('mentorship', '0013_auto_20220408_2052'), + ("admissions", "0041_cohortuser_watching"), + ("mentorship", "0013_auto_20220408_2052"), ] operations = [ migrations.RemoveField( - model_name='mentorprofile', - name='service', + model_name="mentorprofile", + name="service", ), migrations.AddField( - model_name='mentorprofile', - name='academy', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="mentorprofile", + name="academy", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), ), migrations.AddField( - model_name='mentorprofile', - name='services', - field=models.ManyToManyField(to='mentorship.MentorshipService'), + model_name="mentorprofile", + name="services", + field=models.ManyToManyField(to="mentorship.MentorshipService"), ), migrations.AddField( - model_name='mentorshipsession', - name='service', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='mentorship.mentorshipservice'), + model_name="mentorshipsession", + name="service", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="mentorship.mentorshipservice" + ), ), ] diff --git a/breathecode/mentorship/migrations/0015_auto_20220817_0355.py b/breathecode/mentorship/migrations/0015_auto_20220817_0355.py index 6ff973181..d64267ed3 100644 --- a/breathecode/mentorship/migrations/0015_auto_20220817_0355.py +++ b/breathecode/mentorship/migrations/0015_auto_20220817_0355.py @@ -6,26 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0014_auto_20220719_0759'), + ("mentorship", "0014_auto_20220719_0759"), ] operations = [ migrations.AddField( - model_name='mentorprofile', - name='one_line_bio', - field=models.TextField(blank=True, - default=None, - help_text='Will be shown to showcase the mentor', - max_length=60, - null=True), + model_name="mentorprofile", + name="one_line_bio", + field=models.TextField( + blank=True, default=None, help_text="Will be shown to showcase the mentor", max_length=60, null=True + ), ), migrations.AddField( - model_name='mentorprofile', - name='rating', + model_name="mentorprofile", + name="rating", field=models.FloatField( blank=True, default=None, - help_text='Automatically filled when new survey responses are collected about this mentor', - null=True), + help_text="Automatically filled when new survey responses are collected about this mentor", + null=True, + ), ), ] diff --git a/breathecode/mentorship/migrations/0016_alter_mentorshipbill_status.py b/breathecode/mentorship/migrations/0016_alter_mentorshipbill_status.py index 9d4f92cc8..29551c636 100644 --- a/breathecode/mentorship/migrations/0016_alter_mentorshipbill_status.py +++ b/breathecode/mentorship/migrations/0016_alter_mentorshipbill_status.py @@ -6,16 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0015_auto_20220817_0355'), + ("mentorship", "0015_auto_20220817_0355"), ] operations = [ migrations.AlterField( - model_name='mentorshipbill', - name='status', - field=models.CharField(choices=[('RECALCULATE', 'Recalculate'), ('DUE', 'Due'), ('APPROVED', 'Approved'), - ('PAID', 'Paid'), ('IGNORED', 'Ignored')], - default='DUE', - max_length=20), + model_name="mentorshipbill", + name="status", + field=models.CharField( + choices=[ + ("RECALCULATE", "Recalculate"), + ("DUE", "Due"), + ("APPROVED", "Approved"), + ("PAID", "Paid"), + ("IGNORED", "Ignored"), + ], + default="DUE", + max_length=20, + ), ), ] diff --git a/breathecode/mentorship/migrations/0017_auto_20221130_0504.py b/breathecode/mentorship/migrations/0017_auto_20221130_0504.py index ec8b7b165..95fcdf761 100644 --- a/breathecode/mentorship/migrations/0017_auto_20221130_0504.py +++ b/breathecode/mentorship/migrations/0017_auto_20221130_0504.py @@ -8,67 +8,99 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0047_merge_20220924_0611'), - ('notify', '0010_auto_20220901_0323'), + ("admissions", "0047_merge_20220924_0611"), + ("notify", "0010_auto_20220901_0323"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('mentorship', '0016_alter_mentorshipbill_status'), + ("mentorship", "0016_alter_mentorshipbill_status"), ] operations = [ migrations.AlterField( - model_name='mentorprofile', - name='status', - field=models.CharField(choices=[('INVITED', 'Invited'), ('ACTIVE', 'Active'), ('UNLISTED', 'Unlisted'), - ('INNACTIVE', 'Innactive')], - default='INVITED', - help_text='Options are: INVITED, ACTIVE, UNLISTED, INNACTIVE', - max_length=15), + model_name="mentorprofile", + name="status", + field=models.CharField( + choices=[ + ("INVITED", "Invited"), + ("ACTIVE", "Active"), + ("UNLISTED", "Unlisted"), + ("INNACTIVE", "Innactive"), + ], + default="INVITED", + help_text="Options are: INVITED, ACTIVE, UNLISTED, INNACTIVE", + max_length=15, + ), ), migrations.CreateModel( - name='SupportChannel', + name="SupportChannel", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=150)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('slack_channel', - models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='notify.slackchannel')), - ('syllabis', models.ManyToManyField(related_name='support_channels', to='admissions.Syllabus')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=150)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "slack_channel", + models.ForeignKey( + blank=True, on_delete=django.db.models.deletion.CASCADE, to="notify.slackchannel" + ), + ), + ("syllabis", models.ManyToManyField(related_name="support_channels", to="admissions.Syllabus")), ], ), migrations.CreateModel( - name='SupportAgent', + name="SupportAgent", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('token', - models.CharField(help_text='Used for inviting the user to become a support agent', - max_length=255, - unique=True)), - ('status', - models.CharField(choices=[('INVITED', 'Invited'), ('ACTIVE', 'Active'), ('UNLISTED', 'Unlisted'), - ('INNACTIVE', 'Innactive')], - default='INVITED', - help_text='Options are: INVITED, ACTIVE, UNLISTED, INNACTIVE', - max_length=15)), - ('email', - models.CharField(blank=True, - default=None, - help_text='Only use this if the user does not exist on 4geeks already', - max_length=150, - null=True)), - ('one_line_bio', models.TextField(blank=True, default=None, max_length=60, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('channel', - models.ForeignKey(null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name='agents', - to='mentorship.supportchannel')), - ('user', - models.ForeignKey(help_text='If the user does not exist, you can use the email field instead', - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "token", + models.CharField( + help_text="Used for inviting the user to become a support agent", max_length=255, unique=True + ), + ), + ( + "status", + models.CharField( + choices=[ + ("INVITED", "Invited"), + ("ACTIVE", "Active"), + ("UNLISTED", "Unlisted"), + ("INNACTIVE", "Innactive"), + ], + default="INVITED", + help_text="Options are: INVITED, ACTIVE, UNLISTED, INNACTIVE", + max_length=15, + ), + ), + ( + "email", + models.CharField( + blank=True, + default=None, + help_text="Only use this if the user does not exist on 4geeks already", + max_length=150, + null=True, + ), + ), + ("one_line_bio", models.TextField(blank=True, default=None, max_length=60, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "channel", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="agents", + to="mentorship.supportchannel", + ), + ), + ( + "user", + models.ForeignKey( + help_text="If the user does not exist, you can use the email field instead", + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/breathecode/mentorship/migrations/0018_chatbot.py b/breathecode/mentorship/migrations/0018_chatbot.py index d477bc58c..36e502701 100644 --- a/breathecode/mentorship/migrations/0018_chatbot.py +++ b/breathecode/mentorship/migrations/0018_chatbot.py @@ -7,22 +7,22 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0047_merge_20220924_0611'), - ('mentorship', '0017_auto_20221130_0504'), + ("admissions", "0047_merge_20220924_0611"), + ("mentorship", "0017_auto_20221130_0504"), ] operations = [ migrations.CreateModel( - name='ChatBot', + name="ChatBot", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=100, unique=True)), - ('slug', models.SlugField(max_length=100, unique=True)), - ('description', models.TextField(blank=True, default=None, null=True)), - ('api_key', models.CharField(blank=True, max_length=250)), - ('api_organization', models.CharField(blank=True, max_length=250)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('syllabus', models.ManyToManyField(blank=True, to='admissions.Syllabus')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=100, unique=True)), + ("slug", models.SlugField(max_length=100, unique=True)), + ("description", models.TextField(blank=True, default=None, null=True)), + ("api_key", models.CharField(blank=True, max_length=250)), + ("api_organization", models.CharField(blank=True, max_length=250)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("syllabus", models.ManyToManyField(blank=True, to="admissions.Syllabus")), ], ), ] diff --git a/breathecode/mentorship/migrations/0019_alter_supportchannel_slack_channel.py b/breathecode/mentorship/migrations/0019_alter_supportchannel_slack_channel.py index 05be967a5..8cb3a35d1 100644 --- a/breathecode/mentorship/migrations/0019_alter_supportchannel_slack_channel.py +++ b/breathecode/mentorship/migrations/0019_alter_supportchannel_slack_channel.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('notify', '0010_auto_20220901_0323'), - ('mentorship', '0018_chatbot'), + ("notify", "0010_auto_20220901_0323"), + ("mentorship", "0018_chatbot"), ] operations = [ migrations.AlterField( - model_name='supportchannel', - name='slack_channel', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='notify.slackchannel'), + model_name="supportchannel", + name="slack_channel", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="notify.slackchannel", + ), ), ] diff --git a/breathecode/mentorship/migrations/0020_alter_mentorshipservice_language.py b/breathecode/mentorship/migrations/0020_alter_mentorshipservice_language.py index d221b2607..22182dcc3 100644 --- a/breathecode/mentorship/migrations/0020_alter_mentorshipservice_language.py +++ b/breathecode/mentorship/migrations/0020_alter_mentorshipservice_language.py @@ -7,16 +7,18 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0019_alter_supportchannel_slack_channel'), + ("mentorship", "0019_alter_supportchannel_slack_channel"), ] operations = [ migrations.AlterField( - model_name='mentorshipservice', - name='language', - field=models.CharField(default='en', - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code]), + model_name="mentorshipservice", + name="language", + field=models.CharField( + default="en", + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), ), ] diff --git a/breathecode/mentorship/migrations/0021_calendlyorganization_calendlywebhook.py b/breathecode/mentorship/migrations/0021_calendlyorganization_calendlywebhook.py index 8f9bceeaf..13d6c1683 100644 --- a/breathecode/mentorship/migrations/0021_calendlyorganization_calendlywebhook.py +++ b/breathecode/mentorship/migrations/0021_calendlyorganization_calendlywebhook.py @@ -7,57 +7,74 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0056_auto_20230317_1657'), - ('mentorship', '0020_alter_mentorshipservice_language'), + ("admissions", "0056_auto_20230317_1657"), + ("mentorship", "0020_alter_mentorshipservice_language"), ] operations = [ migrations.CreateModel( - name='CalendlyOrganization', + name="CalendlyOrganization", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('username', models.CharField(help_text='Calendly username', max_length=100)), - ('access_token', models.TextField(blank=True, default=None, null=True)), - ('hash', models.CharField(max_length=40, unique=True)), - ('sync_status', - models.CharField( - choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('ERROR', 'Error'), - ('WARNING', 'Warning'), ('SYNCHED', 'Synched')], - default='PENDING', - help_text='One of: PENDING, PERSISTED or ERROR depending on how the calendly sync status', - max_length=9)), - ('sync_desc', models.TextField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("username", models.CharField(help_text="Calendly username", max_length=100)), + ("access_token", models.TextField(blank=True, default=None, null=True)), + ("hash", models.CharField(max_length=40, unique=True)), + ( + "sync_status", + models.CharField( + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("ERROR", "Error"), + ("WARNING", "Warning"), + ("SYNCHED", "Synched"), + ], + default="PENDING", + help_text="One of: PENDING, PERSISTED or ERROR depending on how the calendly sync status", + max_length=9, + ), + ), + ("sync_desc", models.TextField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), ], ), migrations.CreateModel( - name='CalendlyWebhook', + name="CalendlyWebhook", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('organization_hash', models.CharField(max_length=50)), - ('created_by', models.CharField(max_length=2500)), - ('event', models.CharField(max_length=100)), - ('called_at', models.DateTimeField()), - ('payload', models.JSONField()), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('ERROR', 'Error')], - default='PENDING', - max_length=9)), - ('status_text', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('organization', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='mentorship.calendlyorganization')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("organization_hash", models.CharField(max_length=50)), + ("created_by", models.CharField(max_length=2500)), + ("event", models.CharField(max_length=100)), + ("called_at", models.DateTimeField()), + ("payload", models.JSONField()), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("ERROR", "Error")], + default="PENDING", + max_length=9, + ), + ), + ("status_text", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "organization", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="mentorship.calendlyorganization", + ), + ), ], ), ] diff --git a/breathecode/mentorship/migrations/0022_auto_20230512_2327.py b/breathecode/mentorship/migrations/0022_auto_20230512_2327.py index c706faefb..3710069ce 100644 --- a/breathecode/mentorship/migrations/0022_auto_20230512_2327.py +++ b/breathecode/mentorship/migrations/0022_auto_20230512_2327.py @@ -6,26 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0021_calendlyorganization_calendlywebhook'), + ("mentorship", "0021_calendlyorganization_calendlywebhook"), ] operations = [ migrations.AddField( - model_name='mentorprofile', - name='calendly_uuid', - field=models.CharField(blank=True, - default=None, - help_text='To be used by the calendly API', - max_length=150, - null=True), + model_name="mentorprofile", + name="calendly_uuid", + field=models.CharField( + blank=True, default=None, help_text="To be used by the calendly API", max_length=150, null=True + ), ), migrations.AddField( - model_name='mentorshipsession', - name='calendly_uuid', - field=models.CharField(blank=True, - default=None, - help_text='To be used by the calendly API', - max_length=150, - null=True), + model_name="mentorshipsession", + name="calendly_uuid", + field=models.CharField( + blank=True, default=None, help_text="To be used by the calendly API", max_length=150, null=True + ), ), ] diff --git a/breathecode/mentorship/migrations/0023_auto_20230512_2338.py b/breathecode/mentorship/migrations/0023_auto_20230512_2338.py index 7bcfe7070..2c965fdfb 100644 --- a/breathecode/mentorship/migrations/0023_auto_20230512_2338.py +++ b/breathecode/mentorship/migrations/0023_auto_20230512_2338.py @@ -6,31 +6,27 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0022_auto_20230512_2327'), + ("mentorship", "0022_auto_20230512_2327"), ] operations = [ migrations.AlterField( - model_name='calendlywebhook', - name='status_text', + model_name="calendlywebhook", + name="status_text", field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='mentorprofile', - name='calendly_uuid', - field=models.CharField(blank=True, - default=None, - help_text='To be used by the calendly API', - max_length=255, - null=True), + model_name="mentorprofile", + name="calendly_uuid", + field=models.CharField( + blank=True, default=None, help_text="To be used by the calendly API", max_length=255, null=True + ), ), migrations.AlterField( - model_name='mentorshipsession', - name='calendly_uuid', - field=models.CharField(blank=True, - default=None, - help_text='To be used by the calendly API', - max_length=255, - null=True), + model_name="mentorshipsession", + name="calendly_uuid", + field=models.CharField( + blank=True, default=None, help_text="To be used by the calendly API", max_length=255, null=True + ), ), ] diff --git a/breathecode/mentorship/migrations/0024_auto_20230821_1922.py b/breathecode/mentorship/migrations/0024_auto_20230821_1922.py index ee1dd16c8..e7575fab5 100644 --- a/breathecode/mentorship/migrations/0024_auto_20230821_1922.py +++ b/breathecode/mentorship/migrations/0024_auto_20230821_1922.py @@ -6,28 +6,33 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0023_auto_20230512_2338'), + ("mentorship", "0023_auto_20230512_2338"), ] operations = [ migrations.AddField( - model_name='calendlyorganization', - name='max_concurrent_sessions', + model_name="calendlyorganization", + name="max_concurrent_sessions", field=models.IntegerField( default=None, - help_text= - 'For example: Users will only be allowed to book 2 sessions per service at a time, they will have to wait for sessions to complete (or cancel) before booking again' + help_text="For example: Users will only be allowed to book 2 sessions per service at a time, they will have to wait for sessions to complete (or cancel) before booking again", ), ), migrations.AlterField( - model_name='mentorshipsession', - name='status', + model_name="mentorshipsession", + name="status", field=models.CharField( - choices=[('PENDING', 'Pending'), ('STARTED', 'Started'), ('COMPLETED', 'Completed'), - ('CANCELED', 'Canceled'), ('FAILED', 'Failed'), ('IGNORED', 'Ignored')], - default='PENDING', - help_text= - 'Options are: PENDING, STARTED, COMPLETED, CANCELED, FAILED, IGNORED. Ignored sessions will not be billed.', - max_length=15), + choices=[ + ("PENDING", "Pending"), + ("STARTED", "Started"), + ("COMPLETED", "Completed"), + ("CANCELED", "Canceled"), + ("FAILED", "Failed"), + ("IGNORED", "Ignored"), + ], + default="PENDING", + help_text="Options are: PENDING, STARTED, COMPLETED, CANCELED, FAILED, IGNORED. Ignored sessions will not be billed.", + max_length=15, + ), ), ] diff --git a/breathecode/mentorship/migrations/0025_alter_calendlyorganization_max_concurrent_sessions.py b/breathecode/mentorship/migrations/0025_alter_calendlyorganization_max_concurrent_sessions.py index 801075ed7..acff80491 100644 --- a/breathecode/mentorship/migrations/0025_alter_calendlyorganization_max_concurrent_sessions.py +++ b/breathecode/mentorship/migrations/0025_alter_calendlyorganization_max_concurrent_sessions.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0024_auto_20230821_1922'), + ("mentorship", "0024_auto_20230821_1922"), ] operations = [ migrations.AlterField( - model_name='calendlyorganization', - name='max_concurrent_sessions', + model_name="calendlyorganization", + name="max_concurrent_sessions", field=models.IntegerField( blank=True, default=None, - help_text= - 'For example: Users will only be allowed to book 2 sessions per service at a time, they will have to wait for sessions to complete (or cancel) before booking again', - null=True), + help_text="For example: Users will only be allowed to book 2 sessions per service at a time, they will have to wait for sessions to complete (or cancel) before booking again", + null=True, + ), ), ] diff --git a/breathecode/mentorship/migrations/0026_calendlyorganization_uri.py b/breathecode/mentorship/migrations/0026_calendlyorganization_uri.py index edd14a2d1..a92966f09 100644 --- a/breathecode/mentorship/migrations/0026_calendlyorganization_uri.py +++ b/breathecode/mentorship/migrations/0026_calendlyorganization_uri.py @@ -6,15 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0025_alter_calendlyorganization_max_concurrent_sessions'), + ("mentorship", "0025_alter_calendlyorganization_max_concurrent_sessions"), ] operations = [ migrations.AddField( - model_name='calendlyorganization', - name='uri', - field=models.URLField(default='https://calendly.com', - help_text='Automatically collected from calendly API'), + model_name="calendlyorganization", + name="uri", + field=models.URLField( + default="https://calendly.com", help_text="Automatically collected from calendly API" + ), preserve_default=False, ), ] diff --git a/breathecode/mentorship/migrations/0027_mentorprofile_availability_report.py b/breathecode/mentorship/migrations/0027_mentorprofile_availability_report.py index c503cf914..0cfc004f2 100644 --- a/breathecode/mentorship/migrations/0027_mentorprofile_availability_report.py +++ b/breathecode/mentorship/migrations/0027_mentorprofile_availability_report.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0026_calendlyorganization_uri'), + ("mentorship", "0026_calendlyorganization_uri"), ] operations = [ migrations.AddField( - model_name='mentorprofile', - name='availability_report', - field=models.JSONField(blank=True, default=[], help_text='Mentor availability report'), + model_name="mentorprofile", + name="availability_report", + field=models.JSONField(blank=True, default=[], help_text="Mentor availability report"), ), ] diff --git a/breathecode/mentorship/migrations/0028_mentorshipsession_questions_and_answers.py b/breathecode/mentorship/migrations/0028_mentorshipsession_questions_and_answers.py index 1b5cff20e..1f87f68df 100644 --- a/breathecode/mentorship/migrations/0028_mentorshipsession_questions_and_answers.py +++ b/breathecode/mentorship/migrations/0028_mentorshipsession_questions_and_answers.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0027_mentorprofile_availability_report'), + ("mentorship", "0027_mentorprofile_availability_report"), ] operations = [ migrations.AddField( - model_name='mentorshipsession', - name='questions_and_answers', + model_name="mentorshipsession", + name="questions_and_answers", field=models.JSONField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/mentorship/migrations/0029_mentorshipservice_video_provider_and_more.py b/breathecode/mentorship/migrations/0029_mentorshipservice_video_provider_and_more.py index 371abe86a..92317caab 100644 --- a/breathecode/mentorship/migrations/0029_mentorshipservice_video_provider_and_more.py +++ b/breathecode/mentorship/migrations/0029_mentorshipservice_video_provider_and_more.py @@ -9,98 +9,128 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0064_academy_legal_name'), - ('mentorship', '0028_mentorshipsession_questions_and_answers'), + ("admissions", "0064_academy_legal_name"), + ("mentorship", "0028_mentorshipsession_questions_and_answers"), ] operations = [ migrations.AddField( - model_name='mentorshipservice', - name='video_provider', - field=models.CharField(blank=True, - choices=[('DAILY', 'Daily'), ('GOOGLE_MEET', 'Google Meet')], - default='GOOGLE_MEET', - max_length=15), + model_name="mentorshipservice", + name="video_provider", + field=models.CharField( + blank=True, + choices=[("DAILY", "Daily"), ("GOOGLE_MEET", "Google Meet")], + default="GOOGLE_MEET", + max_length=15, + ), ), migrations.AlterField( - model_name='mentorshipservice', - name='allow_mentee_to_extend', - field=models.BooleanField(blank=True, - default=None, - help_text='If true, mentees will be able to extend mentorship session'), + model_name="mentorshipservice", + name="allow_mentee_to_extend", + field=models.BooleanField( + blank=True, default=None, help_text="If true, mentees will be able to extend mentorship session" + ), ), migrations.AlterField( - model_name='mentorshipservice', - name='allow_mentors_to_extend', - field=models.BooleanField(blank=True, - default=None, - help_text='If true, mentors will be able to extend mentorship session'), + model_name="mentorshipservice", + name="allow_mentors_to_extend", + field=models.BooleanField( + blank=True, default=None, help_text="If true, mentors will be able to extend mentorship session" + ), ), migrations.AlterField( - model_name='mentorshipservice', - name='duration', - field=models.DurationField(blank=True, - default=None, - help_text='Default duration for mentorship sessions of this service'), + model_name="mentorshipservice", + name="duration", + field=models.DurationField( + blank=True, default=None, help_text="Default duration for mentorship sessions of this service" + ), ), migrations.AlterField( - model_name='mentorshipservice', - name='language', - field=models.CharField(blank=True, - default=None, - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code]), + model_name="mentorshipservice", + name="language", + field=models.CharField( + blank=True, + default=None, + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), ), migrations.AlterField( - model_name='mentorshipservice', - name='max_duration', + model_name="mentorshipservice", + name="max_duration", field=models.DurationField( blank=True, default=None, - help_text='Maximum allowed duration or extra time, make it 0 for unlimited meetings'), + help_text="Maximum allowed duration or extra time, make it 0 for unlimited meetings", + ), ), migrations.AlterField( - model_name='mentorshipservice', - name='missed_meeting_duration', + model_name="mentorshipservice", + name="missed_meeting_duration", field=models.DurationField( blank=True, default=None, - help_text="Duration that will be paid when the mentee doesn't come to the session"), + help_text="Duration that will be paid when the mentee doesn't come to the session", + ), ), migrations.CreateModel( - name='AcademyMentorshipSettings', + name="AcademyMentorshipSettings", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('duration', - models.DurationField(default=datetime.timedelta(seconds=3600), - help_text='Default duration for mentorship sessions of this service')), - ('max_duration', - models.DurationField( - default=datetime.timedelta(seconds=7200), - help_text='Maximum allowed duration or extra time, make it 0 for unlimited meetings')), - ('missed_meeting_duration', - models.DurationField( - default=datetime.timedelta(seconds=600), - help_text="Duration that will be paid when the mentee doesn't come to the session")), - ('language', - models.CharField(default='en', - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('allow_mentee_to_extend', - models.BooleanField(default=True, - help_text='If true, mentees will be able to extend mentorship session')), - ('allow_mentors_to_extend', - models.BooleanField(default=True, - help_text='If true, mentors will be able to extend mentorship session')), - ('video_provider', - models.CharField(choices=[('DAILY', 'Daily'), ('GOOGLE_MEET', 'Google Meet')], - default='GOOGLE_MEET', - max_length=15)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "duration", + models.DurationField( + default=datetime.timedelta(seconds=3600), + help_text="Default duration for mentorship sessions of this service", + ), + ), + ( + "max_duration", + models.DurationField( + default=datetime.timedelta(seconds=7200), + help_text="Maximum allowed duration or extra time, make it 0 for unlimited meetings", + ), + ), + ( + "missed_meeting_duration", + models.DurationField( + default=datetime.timedelta(seconds=600), + help_text="Duration that will be paid when the mentee doesn't come to the session", + ), + ), + ( + "language", + models.CharField( + default="en", + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), + ), + ( + "allow_mentee_to_extend", + models.BooleanField( + default=True, help_text="If true, mentees will be able to extend mentorship session" + ), + ), + ( + "allow_mentors_to_extend", + models.BooleanField( + default=True, help_text="If true, mentors will be able to extend mentorship session" + ), + ), + ( + "video_provider", + models.CharField( + choices=[("DAILY", "Daily"), ("GOOGLE_MEET", "Google Meet")], + default="GOOGLE_MEET", + max_length=15, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), ] diff --git a/breathecode/mentorship/migrations/0030_alter_mentorshipservice_video_provider.py b/breathecode/mentorship/migrations/0030_alter_mentorshipservice_video_provider.py index 13ea932ab..ca46c9fb8 100644 --- a/breathecode/mentorship/migrations/0030_alter_mentorshipservice_video_provider.py +++ b/breathecode/mentorship/migrations/0030_alter_mentorshipservice_video_provider.py @@ -6,16 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('mentorship', '0029_mentorshipservice_video_provider_and_more'), + ("mentorship", "0029_mentorshipservice_video_provider_and_more"), ] operations = [ migrations.AlterField( - model_name='mentorshipservice', - name='video_provider', - field=models.CharField(blank=True, - choices=[('DAILY', 'Daily'), ('GOOGLE_MEET', 'Google Meet')], - default=None, - max_length=15), + model_name="mentorshipservice", + name="video_provider", + field=models.CharField( + blank=True, choices=[("DAILY", "Daily"), ("GOOGLE_MEET", "Google Meet")], default=None, max_length=15 + ), ), ] diff --git a/breathecode/mentorship/models.py b/breathecode/mentorship/models.py index c09fd8d21..fe30a6408 100644 --- a/breathecode/mentorship/models.py +++ b/breathecode/mentorship/models.py @@ -14,18 +14,18 @@ class VideoProvider(models.TextChoices): - DAILY = ('DAILY', 'Daily') - GOOGLE_MEET = ('GOOGLE_MEET', 'Google Meet') + DAILY = ("DAILY", "Daily") + GOOGLE_MEET = ("GOOGLE_MEET", "Google Meet") MENTORSHIP_SETTINGS = { - 'duration': timedelta(hours=1), - 'max_duration': timedelta(hours=2), - 'missed_meeting_duration': timedelta(minutes=10), - 'language': 'en', - 'allow_mentee_to_extend': True, - 'allow_mentors_to_extend': True, - 'video_provider': VideoProvider.GOOGLE_MEET, + "duration": timedelta(hours=1), + "max_duration": timedelta(hours=2), + "missed_meeting_duration": timedelta(minutes=10), + "language": "en", + "allow_mentee_to_extend": True, + "allow_mentors_to_extend": True, + "video_provider": VideoProvider.GOOGLE_MEET, } @@ -33,27 +33,35 @@ class AcademyMentorshipSettings(models.Model): VideoProvider = VideoProvider academy = models.OneToOneField(Academy, on_delete=models.CASCADE) - duration = models.DurationField(default=MENTORSHIP_SETTINGS['duration'], - help_text='Default duration for mentorship sessions of this service') + duration = models.DurationField( + default=MENTORSHIP_SETTINGS["duration"], help_text="Default duration for mentorship sessions of this service" + ) max_duration = models.DurationField( - default=MENTORSHIP_SETTINGS['max_duration'], - help_text='Maximum allowed duration or extra time, make it 0 for unlimited meetings') + default=MENTORSHIP_SETTINGS["max_duration"], + help_text="Maximum allowed duration or extra time, make it 0 for unlimited meetings", + ) missed_meeting_duration = models.DurationField( - default=MENTORSHIP_SETTINGS['missed_meeting_duration'], - help_text='Duration that will be paid when the mentee doesn\'t come to the session') + default=MENTORSHIP_SETTINGS["missed_meeting_duration"], + help_text="Duration that will be paid when the mentee doesn't come to the session", + ) - language = models.CharField(max_length=5, - default=MENTORSHIP_SETTINGS['language'], - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') + language = models.CharField( + max_length=5, + default=MENTORSHIP_SETTINGS["language"], + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) - allow_mentee_to_extend = models.BooleanField(default=MENTORSHIP_SETTINGS['allow_mentee_to_extend'], - help_text='If true, mentees will be able to extend mentorship session') + allow_mentee_to_extend = models.BooleanField( + default=MENTORSHIP_SETTINGS["allow_mentee_to_extend"], + help_text="If true, mentees will be able to extend mentorship session", + ) allow_mentors_to_extend = models.BooleanField( - default=MENTORSHIP_SETTINGS['allow_mentors_to_extend'], - help_text='If true, mentors will be able to extend mentorship session') + default=MENTORSHIP_SETTINGS["allow_mentors_to_extend"], + help_text="If true, mentors will be able to extend mentorship session", + ) video_provider = models.CharField(max_length=15, choices=VideoProvider, default=VideoProvider.GOOGLE_MEET) @@ -74,39 +82,44 @@ class MentorshipService(models.Model): VideoProvider = VideoProvider class Status(models.TextChoices): - DRAFT = ('DRAFT', 'Draft') - ACTIVE = ('ACTIVE', 'Active') - UNLISTED = ('UNLISTED', 'Unlisted') - INNACTIVE = ('INNACTIVE', 'Innactive') + DRAFT = ("DRAFT", "Draft") + ACTIVE = ("ACTIVE", "Active") + UNLISTED = ("UNLISTED", "Unlisted") + INNACTIVE = ("INNACTIVE", "Innactive") slug = models.SlugField(max_length=150, unique=True) name = models.CharField(max_length=150) logo_url = models.CharField(max_length=150, default=None, blank=True, null=True) description = models.TextField(max_length=500, default=None, blank=True, null=True) - duration = models.DurationField(default=None, - blank=True, - help_text='Default duration for mentorship sessions of this service') + duration = models.DurationField( + default=None, blank=True, help_text="Default duration for mentorship sessions of this service" + ) max_duration = models.DurationField( - default=None, blank=True, help_text='Maximum allowed duration or extra time, make it 0 for unlimited meetings') + default=None, blank=True, help_text="Maximum allowed duration or extra time, make it 0 for unlimited meetings" + ) missed_meeting_duration = models.DurationField( - default=None, blank=True, help_text='Duration that will be paid when the mentee doesn\'t come to the session') + default=None, blank=True, help_text="Duration that will be paid when the mentee doesn't come to the session" + ) status = models.CharField(max_length=15, choices=Status, default=Status.DRAFT) - language = models.CharField(max_length=5, - default=None, - blank=True, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') + language = models.CharField( + max_length=5, + default=None, + blank=True, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) - allow_mentee_to_extend = models.BooleanField(blank=True, - default=None, - help_text='If true, mentees will be able to extend mentorship session') + allow_mentee_to_extend = models.BooleanField( + blank=True, default=None, help_text="If true, mentees will be able to extend mentorship session" + ) allow_mentors_to_extend = models.BooleanField( - default=None, blank=True, help_text='If true, mentors will be able to extend mentorship session') + default=None, blank=True, help_text="If true, mentors will be able to extend mentorship session" + ) academy = models.ForeignKey(Academy, on_delete=models.CASCADE) video_provider = models.CharField(max_length=15, default=None, choices=VideoProvider, blank=True) @@ -115,7 +128,7 @@ class Status(models.TextChoices): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.slug})' + return f"{self.name} ({self.slug})" def clean(self) -> None: fetched = False @@ -145,40 +158,44 @@ class SupportChannel(models.Model): slug = models.SlugField(max_length=150) slack_channel = models.ForeignKey(SlackChannel, on_delete=models.CASCADE, blank=True, default=None, null=True) academy = models.ForeignKey(Academy, on_delete=models.CASCADE) - syllabis = models.ManyToManyField(Syllabus, related_name='support_channels') + syllabis = models.ManyToManyField(Syllabus, related_name="support_channels") created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) class MentorStatus(models.TextChoices): - INVITED = ('INVITED', 'Invited') - ACTIVE = ('ACTIVE', 'Active') - UNLISTED = ('UNLISTED', 'Unlisted') - INNACTIVE = ('INNACTIVE', 'Innactive') + INVITED = ("INVITED", "Invited") + ACTIVE = ("ACTIVE", "Active") + UNLISTED = ("UNLISTED", "Unlisted") + INNACTIVE = ("INNACTIVE", "Innactive") class SupportAgent(models.Model): - user = models.ForeignKey(User, - on_delete=models.CASCADE, - help_text='If the user does not exist, you can use the email field instead') - token = models.CharField(max_length=255, - unique=True, - help_text='Used for inviting the user to become a support agent') - status = models.CharField(max_length=15, - choices=MentorStatus, - default=MentorStatus.INVITED, - help_text=f'Options are: {", ".join([key for key,label in MentorStatus.choices])}') - - email = models.CharField(blank=True, - max_length=150, - null=True, - default=None, - help_text='Only use this if the user does not exist on 4geeks already') + user = models.ForeignKey( + User, on_delete=models.CASCADE, help_text="If the user does not exist, you can use the email field instead" + ) + token = models.CharField( + max_length=255, unique=True, help_text="Used for inviting the user to become a support agent" + ) + status = models.CharField( + max_length=15, + choices=MentorStatus, + default=MentorStatus.INVITED, + help_text=f'Options are: {", ".join([key for key,label in MentorStatus.choices])}', + ) + + email = models.CharField( + blank=True, + max_length=150, + null=True, + default=None, + help_text="Only use this if the user does not exist on 4geeks already", + ) one_line_bio = models.TextField(max_length=60, default=None, blank=True, null=True) - channel = models.ForeignKey(SupportChannel, related_name='agents', on_delete=models.SET_NULL, null=True) + channel = models.ForeignKey(SupportChannel, related_name="agents", on_delete=models.SET_NULL, null=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -195,66 +212,70 @@ class MentorProfile(models.Model): slug = models.SlugField( max_length=150, unique=True, - help_text='Will be used as unique public booking URL with the students, for example: 4geeks.com/meet/bob') + help_text="Will be used as unique public booking URL with the students, for example: 4geeks.com/meet/bob", + ) price_per_hour = models.FloatField() - one_line_bio = models.TextField(max_length=60, - default=None, - blank=True, - null=True, - help_text='Will be shown to showcase the mentor') + one_line_bio = models.TextField( + max_length=60, default=None, blank=True, null=True, help_text="Will be shown to showcase the mentor" + ) bio = models.TextField(max_length=500, default=None, blank=True, null=True) services = models.ManyToManyField(to=MentorshipService) academy = models.ForeignKey(Academy, on_delete=models.CASCADE, null=True, default=None) - timezone = models.CharField(max_length=50, - null=True, - default=None, - help_text='Knowing the mentor\'s timezone helps with more accurrate booking') + timezone = models.CharField( + max_length=50, + null=True, + default=None, + help_text="Knowing the mentor's timezone helps with more accurrate booking", + ) online_meeting_url = models.URLField( blank=True, null=True, default=None, - help_text="If set, it will be default for all session's unless the session.online_meeting_url is set") + help_text="If set, it will be default for all session's unless the session.online_meeting_url is set", + ) - token = models.CharField(max_length=255, unique=True, help_text='Used for inviting the user to become a mentor') + token = models.CharField(max_length=255, unique=True, help_text="Used for inviting the user to become a mentor") booking_url = models.URLField( blank=True, null=True, default=None, - help_text='URL where this mentor profile can be booked, E.g: calendly.com/my_username') + help_text="URL where this mentor profile can be booked, E.g: calendly.com/my_username", + ) - syllabus = models.ManyToManyField(to=Syllabus, - blank=True, - default=None, - help_text='What syllabis is this mentor going to be menting to?') + syllabus = models.ManyToManyField( + to=Syllabus, blank=True, default=None, help_text="What syllabis is this mentor going to be menting to?" + ) - status = models.CharField(max_length=15, - choices=MentorStatus, - default=MentorStatus.INVITED, - help_text=f'Options are: {", ".join([key for key,label in MentorStatus.choices])}') + status = models.CharField( + max_length=15, + choices=MentorStatus, + default=MentorStatus.INVITED, + help_text=f'Options are: {", ".join([key for key,label in MentorStatus.choices])}', + ) - email = models.CharField(blank=True, - max_length=150, - null=True, - default=None, - help_text='Only use this if the user does not exist on breathecode already') + email = models.CharField( + blank=True, + max_length=150, + null=True, + default=None, + help_text="Only use this if the user does not exist on breathecode already", + ) - availability_report = models.JSONField(blank=True, null=False, default=[], help_text='Mentor availability report') + availability_report = models.JSONField(blank=True, null=False, default=[], help_text="Mentor availability report") - user = models.ForeignKey(User, - on_delete=models.CASCADE, - help_text='If the user does not exist, you can use the email field instead') + user = models.ForeignKey( + User, on_delete=models.CASCADE, help_text="If the user does not exist, you can use the email field instead" + ) - calendly_uuid = models.CharField(blank=True, - max_length=255, - null=True, - default=None, - help_text='To be used by the calendly API') + calendly_uuid = models.CharField( + blank=True, max_length=255, null=True, default=None, help_text="To be used by the calendly API" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -263,63 +284,62 @@ class MentorProfile(models.Model): null=True, blank=True, default=None, - help_text='Automatically filled when new survey responses are collected about this mentor') + help_text="Automatically filled when new survey responses are collected about this mentor", + ) def save(self, *args, **kwargs): utc_now = timezone.now() - if self.token is None or self.token == '': - self.token = hashlib.sha1((str(self.user.id) + str(utc_now)).encode('UTF-8')).hexdigest() + if self.token is None or self.token == "": + self.token = hashlib.sha1((str(self.user.id) + str(utc_now)).encode("UTF-8")).hexdigest() super().save(*args, **kwargs) # Call the "real" save() method. def __str__(self): name = self.name - if self.user is not None and self.user.first_name is not None and self.user.first_name != '': - name = self.user.first_name + ' ' + self.user.last_name + if self.user is not None and self.user.first_name is not None and self.user.first_name != "": + name = self.user.first_name + " " + self.user.last_name - return f'{name} ({self.id})' + return f"{name} ({self.id})" -RECALCULATE = 'RECALCULATE' -DUE = 'DUE' -APPROVED = 'APPROVED' -PAID = 'PAID' -IGNORED = 'IGNORED' +RECALCULATE = "RECALCULATE" +DUE = "DUE" +APPROVED = "APPROVED" +PAID = "PAID" +IGNORED = "IGNORED" BILL_STATUS = ( - (RECALCULATE, 'Recalculate'), - (DUE, 'Due'), - (APPROVED, 'Approved'), - (PAID, 'Paid'), - (IGNORED, 'Ignored'), + (RECALCULATE, "Recalculate"), + (DUE, "Due"), + (APPROVED, "Approved"), + (PAID, "Paid"), + (IGNORED, "Ignored"), ) class MentorshipBill(models.Model): status = models.CharField(max_length=20, choices=BILL_STATUS, default=DUE) - #FIXME: it's right? - status_mesage = models.TextField(blank=True, - null=True, - default=None, - help_text='Any important information about the bill') + # FIXME: it's right? + status_mesage = models.TextField( + blank=True, null=True, default=None, help_text="Any important information about the bill" + ) total_duration_in_minutes = models.FloatField(default=0) total_duration_in_hours = models.FloatField(default=0) total_price = models.FloatField(default=0) overtime_minutes = models.FloatField( - default=0, help_text='Additional time mentorships took based on the expected default duration') + default=0, help_text="Additional time mentorships took based on the expected default duration" + ) academy = models.ForeignKey(Academy, on_delete=models.CASCADE, null=True, default=None) - started_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='The bill includes all sessions from started_at to ended_at') - ended_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='The bill includes all sessions from started_at to ended_at') + started_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="The bill includes all sessions from started_at to ended_at" + ) + ended_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="The bill includes all sessions from started_at to ended_at" + ) reviewer = models.ForeignKey(User, on_delete=models.CASCADE, null=True, default=None) mentor = models.ForeignKey(MentorProfile, on_delete=models.CASCADE) @@ -329,19 +349,19 @@ class MentorshipBill(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) -PENDING = 'PENDING' -STARTED = 'STARTED' -COMPLETED = 'COMPLETED' -FAILED = 'FAILED' -IGNORED = 'IGNORED' -CANCELED = 'CANCELED' +PENDING = "PENDING" +STARTED = "STARTED" +COMPLETED = "COMPLETED" +FAILED = "FAILED" +IGNORED = "IGNORED" +CANCELED = "CANCELED" MENTORSHIP_STATUS = ( - (PENDING, 'Pending'), - (STARTED, 'Started'), - (COMPLETED, 'Completed'), - (CANCELED, 'Canceled'), - (FAILED, 'Failed'), - (IGNORED, 'Ignored'), # will not be included on the bills + (PENDING, "Pending"), + (STARTED, "Started"), + (COMPLETED, "Completed"), + (CANCELED, "Canceled"), + (FAILED, "Failed"), + (IGNORED, "Ignored"), # will not be included on the bills ) @@ -351,11 +371,9 @@ def __init__(self, *args, **kwargs): super(MentorshipSession, self).__init__(*args, **kwargs) self.__old_status = self.status - name = models.CharField(max_length=255, - help_text='Room name, used on daily.co', - blank=True, - null=True, - default=None) + name = models.CharField( + max_length=255, help_text="Room name, used on daily.co", blank=True, null=True, default=None + ) is_online = models.BooleanField(default=False) latitude = models.FloatField(blank=True, null=True, default=None) @@ -365,84 +383,78 @@ def __init__(self, *args, **kwargs): service = models.ForeignKey(MentorshipService, on_delete=models.CASCADE, blank=True, null=True) mentee = models.ForeignKey(User, on_delete=models.CASCADE, blank=True, null=True, default=None) - online_meeting_url = models.URLField(blank=True, - null=True, - default=None, - help_text='Overrides the mentor.online_meeting_url if set') + online_meeting_url = models.URLField( + blank=True, null=True, default=None, help_text="Overrides the mentor.online_meeting_url if set" + ) online_recording_url = models.URLField( blank=True, null=True, default=None, - help_text='We encourace the mentors to record the session and share them with the students') + help_text="We encourace the mentors to record the session and share them with the students", + ) status = models.CharField( max_length=15, choices=MENTORSHIP_STATUS, default=PENDING, - help_text= - f'Options are: {", ".join([key for key,label in MENTORSHIP_STATUS])}. Ignored sessions will not be billed.') + help_text=f'Options are: {", ".join([key for key,label in MENTORSHIP_STATUS])}. Ignored sessions will not be billed.', + ) status_message = models.TextField(default=None, null=True, blank=True) - allow_billing = models.BooleanField(default=True, - help_text='If false it will not be included when generating mentorship bills') - bill = models.ForeignKey(MentorshipBill, - on_delete=models.SET_NULL, - null=True, - default=None, - blank=True, - help_text='If null, it has not been billed by the mentor yet') + allow_billing = models.BooleanField( + default=True, help_text="If false it will not be included when generating mentorship bills" + ) + bill = models.ForeignKey( + MentorshipBill, + on_delete=models.SET_NULL, + null=True, + default=None, + blank=True, + help_text="If null, it has not been billed by the mentor yet", + ) suggested_accounted_duration = models.DurationField( blank=True, null=True, default=None, - help_text='The automatic suggested duration to be paid to the mentor for this session') - - accounted_duration = models.DurationField(blank=True, - null=True, - default=None, - help_text='The duration that will be paid to the mentor for this session') - - agenda = models.TextField(blank=True, null=True, default=None, help_text='What will this mentorship be about') - summary = models.TextField(blank=True, - null=True, - default=None, - help_text='Describe briefly what happened at the mentorship session') - - starts_at = models.DateTimeField(blank=True, null=True, default=None, help_text='Scheduled start date') - ends_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='Scheduled end date, will be used as meeting expiration as well') - - started_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='Real start date (only if it started)') - ended_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='Real start date (only if it started)') - - mentor_joined_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='Exact moment the mentor joined the meeting for the first time') - - mentor_left_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='Exact moment the mentor left the meeting for the last time') - - mentee_left_at = models.DateTimeField(blank=True, - null=True, - default=None, - help_text='Exact moment the mentee left the meeting for the last time') - - calendly_uuid = models.CharField(blank=True, - max_length=255, - null=True, - default=None, - help_text='To be used by the calendly API') + help_text="The automatic suggested duration to be paid to the mentor for this session", + ) + + accounted_duration = models.DurationField( + blank=True, null=True, default=None, help_text="The duration that will be paid to the mentor for this session" + ) + + agenda = models.TextField(blank=True, null=True, default=None, help_text="What will this mentorship be about") + summary = models.TextField( + blank=True, null=True, default=None, help_text="Describe briefly what happened at the mentorship session" + ) + + starts_at = models.DateTimeField(blank=True, null=True, default=None, help_text="Scheduled start date") + ends_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="Scheduled end date, will be used as meeting expiration as well" + ) + + started_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="Real start date (only if it started)" + ) + ended_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="Real start date (only if it started)" + ) + + mentor_joined_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="Exact moment the mentor joined the meeting for the first time" + ) + + mentor_left_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="Exact moment the mentor left the meeting for the last time" + ) + + mentee_left_at = models.DateTimeField( + blank=True, null=True, default=None, help_text="Exact moment the mentee left the meeting for the last time" + ) + + calendly_uuid = models.CharField( + blank=True, max_length=255, null=True, default=None, help_text="To be used by the calendly API" + ) questions_and_answers = models.JSONField(null=True, blank=True, default=None) @@ -450,7 +462,7 @@ def __init__(self, *args, **kwargs): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'(Session {self.id} with {str(self.mentor)} and {str(self.mentee)})' + return f"(Session {self.id} with {str(self.mentor)} and {str(self.mentee)})" def save(self, *args, **kwargs): @@ -480,32 +492,31 @@ def __str__(self): return self.name -PENDING = 'PENDING' -PERSISTED = 'PERSISTED' -ERROR = 'ERROR' -WARNING = 'WARNING' -SYNCHED = 'SYNCHED' +PENDING = "PENDING" +PERSISTED = "PERSISTED" +ERROR = "ERROR" +WARNING = "WARNING" +SYNCHED = "SYNCHED" SYNC_STATUS = ( - (PENDING, 'Pending'), - (PERSISTED, 'Persisted'), - (ERROR, 'Error'), - (WARNING, 'Warning'), - (SYNCHED, 'Synched'), + (PENDING, "Pending"), + (PERSISTED, "Persisted"), + (ERROR, "Error"), + (WARNING, "Warning"), + (SYNCHED, "Synched"), ) class CalendlyOrganization(models.Model): - username = models.CharField(max_length=100, help_text='Calendly username') + username = models.CharField(max_length=100, help_text="Calendly username") academy = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True) access_token = models.TextField(blank=True, null=True, default=None) - uri = models.URLField(help_text='Automatically collected from calendly API') + uri = models.URLField(help_text="Automatically collected from calendly API") max_concurrent_sessions = models.IntegerField( default=None, blank=True, null=True, - help_text= - 'For example: Users will only be allowed to book 2 sessions per service at a time, they will have to wait for sessions to complete (or cancel) before booking again' + help_text="For example: Users will only be allowed to book 2 sessions per service at a time, they will have to wait for sessions to complete (or cancel) before booking again", ) # this should be use in the future to create automatically the permalinks @@ -515,14 +526,15 @@ class CalendlyOrganization(models.Model): max_length=9, choices=SYNC_STATUS, default=PENDING, - help_text='One of: PENDING, PERSISTED or ERROR depending on how the calendly sync status') + help_text="One of: PENDING, PERSISTED or ERROR depending on how the calendly sync status", + ) sync_desc = models.TextField(max_length=255, null=True, default=None, blank=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return self.username or 'Nameless calendly org' + return self.username or "Nameless calendly org" def save(self, *args, **kwargs): if not self.pk: @@ -536,11 +548,11 @@ def reset_hash(self): # PENDING = 'PENDING' -DONE = 'DONE' +DONE = "DONE" WEBHOOK_STATUS = ( - (PENDING, 'Pending'), - (DONE, 'Done'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (DONE, "Done"), + (ERROR, "Error"), ) @@ -551,11 +563,9 @@ class CalendlyWebhook(models.Model): called_at = models.DateTimeField() payload = models.JSONField() - organization = models.ForeignKey(CalendlyOrganization, - on_delete=models.CASCADE, - null=True, - default=None, - blank=True) + organization = models.ForeignKey( + CalendlyOrganization, on_delete=models.CASCADE, null=True, default=None, blank=True + ) status = models.CharField(max_length=9, choices=WEBHOOK_STATUS, default=PENDING) status_text = models.TextField(default=None, null=True, blank=True) @@ -564,4 +574,4 @@ class CalendlyWebhook(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'Event {self.event} {self.status} => {self.created_by}' + return f"Event {self.event} {self.status} => {self.created_by}" diff --git a/breathecode/mentorship/permissions/consumers.py b/breathecode/mentorship/permissions/consumers.py index 30ccfd90b..ffd033a94 100644 --- a/breathecode/mentorship/permissions/consumers.py +++ b/breathecode/mentorship/permissions/consumers.py @@ -14,86 +14,113 @@ def mentorship_service_by_url_param(context: ServiceContext, args: tuple, kwargs: dict) -> tuple[dict, tuple, dict]: - context['price'] = 0 - request = context['request'] + context["price"] = 0 + request = context["request"] consumable = None lang = get_user_language(request) - slug = kwargs.get('mentor_slug') + slug = kwargs.get("mentor_slug") mentor_profile = MentorProfile.objects.filter(slug=slug).first() if mentor_profile is None: - raise ValidationException(translation(lang, - en=f'No mentor found with slug {slug}', - es=f'No se encontró mentor con slug {slug}'), - code=404) + raise ValidationException( + translation(lang, en=f"No mentor found with slug {slug}", es=f"No se encontró mentor con slug {slug}"), + code=404, + ) - slug = kwargs.get('service_slug') + slug = kwargs.get("service_slug") mentorship_service = MentorshipService.objects.filter(slug=slug).first() if mentorship_service is None: - raise ValidationException(translation(lang, - en=f'No service found with slug {slug}', - es=f'No se encontró el servicio con slug {slug}'), - code=404) + raise ValidationException( + translation( + lang, en=f"No service found with slug {slug}", es=f"No se encontró el servicio con slug {slug}" + ), + code=404, + ) - kwargs['mentor_profile'] = mentor_profile - kwargs['mentorship_service'] = mentorship_service + kwargs["mentor_profile"] = mentor_profile + kwargs["mentorship_service"] = mentorship_service - del kwargs['mentor_slug'] - del kwargs['service_slug'] + del kwargs["mentor_slug"] + del kwargs["service_slug"] # avoid do more stuff if it's a consumption session - if context['is_consumption_session']: + if context["is_consumption_session"]: return (context, args, kwargs) - context['request'] + context["request"] is_saas = mentorship_service and mentorship_service.academy.available_as_saas # avoid call LaunchDarkly if mentorship_service is empty if mentor_profile.user.id != request.user.id and is_saas: - context['price'] = 1 + context["price"] = 1 - if context['price'] == 0 and is_no_saas_student_up_to_date_in_any_cohort(context['request'].user, - academy=mentor_profile.academy) is False: + if ( + context["price"] == 0 + and is_no_saas_student_up_to_date_in_any_cohort(context["request"].user, academy=mentor_profile.academy) + is False + ): raise PaymentException( - translation(lang, - en='You can\'t access this asset because your finantial status is not up to date', - es='No puedes acceder a este recurso porque tu estado financiero no está al dia', - slug='cohort-user-status-later')) - - context['consumables'] = context['consumables'].filter( - mentorship_service_set__mentorship_services=mentorship_service) - - if context['price']: - context['lifetime'] = mentorship_service.max_duration - - if (mentor_profile.user.id == request.user.id and is_saas and (mentee := request.GET.get('mentee')) - and not mentee.isdigit()): - raise ValidationException(translation(lang, en='mentee must be a number', es='mentee debe ser un número'), - code=400) - - if (mentor_profile.user.id == request.user.id and is_saas and mentee - and not (mentee := User.objects.filter(id=mentee).first())): - raise ValidationException(translation(lang, - en=f'Mentee not found with id {mentee}', - es=f'No se encontró el mentee con id {mentee}'), - code=400) - - if (mentor_profile.user.id == request.user.id and is_saas and mentee and not (consumable := Consumable.get( - lang=lang, - user=mentee, - service=context['service'], - extra={'mentorship_service_set__mentorship_services': mentorship_service}, - ))): - - raise ValidationException(translation( - lang, - en=f'Mentee do not have enough credits to access this service: {context["service"]}', - es='El mentee no tiene suficientes créditos para acceder a este servicio: ' - f'{context["service"]}'), - slug='mentee-not-enough-consumables', - code=402) + translation( + lang, + en="You can't access this asset because your finantial status is not up to date", + es="No puedes acceder a este recurso porque tu estado financiero no está al dia", + slug="cohort-user-status-later", + ) + ) + + context["consumables"] = context["consumables"].filter( + mentorship_service_set__mentorship_services=mentorship_service + ) + + if context["price"]: + context["lifetime"] = mentorship_service.max_duration + + if ( + mentor_profile.user.id == request.user.id + and is_saas + and (mentee := request.GET.get("mentee")) + and not mentee.isdigit() + ): + raise ValidationException( + translation(lang, en="mentee must be a number", es="mentee debe ser un número"), code=400 + ) + + if ( + mentor_profile.user.id == request.user.id + and is_saas + and mentee + and not (mentee := User.objects.filter(id=mentee).first()) + ): + raise ValidationException( + translation(lang, en=f"Mentee not found with id {mentee}", es=f"No se encontró el mentee con id {mentee}"), + code=400, + ) + + if ( + mentor_profile.user.id == request.user.id + and is_saas + and mentee + and not ( + consumable := Consumable.get( + lang=lang, + user=mentee, + service=context["service"], + extra={"mentorship_service_set__mentorship_services": mentorship_service}, + ) + ) + ): + + raise ValidationException( + translation( + lang, + en=f'Mentee do not have enough credits to access this service: {context["service"]}', + es="El mentee no tiene suficientes créditos para acceder a este servicio: " f'{context["service"]}', + ), + slug="mentee-not-enough-consumables", + code=402, + ) if consumable: session = ConsumptionSession.build_session(request, consumable, mentorship_service.max_duration, mentee) diff --git a/breathecode/mentorship/permissions/contexts.py b/breathecode/mentorship/permissions/contexts.py index 1093046c7..3b114f9e3 100644 --- a/breathecode/mentorship/permissions/contexts.py +++ b/breathecode/mentorship/permissions/contexts.py @@ -3,15 +3,15 @@ def mentorship_service(client: LaunchDarkly, mentorship_service: MentorshipService): - key = f'{mentorship_service.id}' - name = f'{mentorship_service.name} ({mentorship_service.slug})' - kind = 'mentoring-service' + key = f"{mentorship_service.id}" + name = f"{mentorship_service.name} ({mentorship_service.slug})" + kind = "mentoring-service" context = { - 'id': mentorship_service.id, - 'slug': mentorship_service.slug, - 'max_duration': mentorship_service.max_duration, - 'language': mentorship_service.language, - 'academy': mentorship_service.academy.slug, + "id": mentorship_service.id, + "slug": mentorship_service.slug, + "max_duration": mentorship_service.max_duration, + "language": mentorship_service.language, + "academy": mentorship_service.academy.slug, } return client.context(key, name, kind, context) diff --git a/breathecode/mentorship/permissions/flags.py b/breathecode/mentorship/permissions/flags.py index 9200a91e2..20c7c9e82 100644 --- a/breathecode/mentorship/permissions/flags.py +++ b/breathecode/mentorship/permissions/flags.py @@ -6,7 +6,7 @@ from breathecode.services import LaunchDarkly -__all__ = ['api'] +__all__ = ["api"] class Release: @@ -21,7 +21,7 @@ def enable_consume_mentorships(user: User, mentorship_service: MentorshipService context = ld.join_contexts(user_context, mentorship_service_context, academy_context) - return ld.get('api.release.enable_consume_mentorships', context, False) + return ld.get("api.release.enable_consume_mentorships", context, False) class API: diff --git a/breathecode/mentorship/serializers.py b/breathecode/mentorship/serializers.py index 3a426870b..98468c6c9 100644 --- a/breathecode/mentorship/serializers.py +++ b/breathecode/mentorship/serializers.py @@ -40,6 +40,7 @@ class AnswerSmallSerializer(serpy.Serializer): class ProfileSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() github_username = serpy.Field() @@ -47,12 +48,14 @@ class ProfileSerializer(serpy.Serializer): class ProfilePublicSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() class GetSyllabusSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -217,6 +220,7 @@ def get_services(self, obj): class GETBillSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() status = serpy.Field() @@ -235,6 +239,7 @@ class GETBillSmallSerializer(serpy.Serializer): class BigBillSerializer(GETBillSmallSerializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. overtime_hours = serpy.MethodField() sessions = serpy.MethodField() @@ -247,17 +252,17 @@ def get_overtime_hours(self, obj): return round(obj.overtime_minutes / 60, 2) def get_sessions(self, obj): - _sessions = obj.mentorshipsession_set.order_by('created_at').all() + _sessions = obj.mentorshipsession_set.order_by("created_at").all() return BillSessionSerializer(_sessions, many=True).data def get_unfinished_sessions(self, obj): _sessions = MentorshipSession.objects.filter( - mentor=obj.mentor, bill__isnull=True, allow_billing=True, - bill__academy=obj.academy).exclude(status__in=['COMPLETED', 'FAILED']) + mentor=obj.mentor, bill__isnull=True, allow_billing=True, bill__academy=obj.academy + ).exclude(status__in=["COMPLETED", "FAILED"]) return BillSessionSerializer(_sessions, many=True).data def get_public_url(self, obj): - return '/v1/mentorship/academy/bill/1/html' + return "/v1/mentorship/academy/bill/1/html" class GETMentorBigSerializer(serpy.Serializer): @@ -405,50 +410,52 @@ def get_tooltip(self, obj): service = obj.service if service is None: - return 'Please ser service for this mentorship' + return "Please ser service for this mentorship" - message = f'This mentorship should last no longer than {int(service.duration.seconds/60)} min. <br />' + message = f"This mentorship should last no longer than {int(service.duration.seconds/60)} min. <br />" if obj.started_at is None: - message += 'The mentee never joined the session. <br />' + message += "The mentee never joined the session. <br />" else: message += f'Started on {obj.started_at.strftime("%m/%d/%Y at %H:%M:%S")}. <br />' if obj.mentor_joined_at is None: - message += 'The mentor never joined' + message += "The mentor never joined" elif obj.mentor_joined_at > obj.started_at: - message += f'The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />' + message += f"The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />" elif obj.started_at > obj.mentor_joined_at: - message += f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + message += f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" if obj.ended_at is not None: - message += f'The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />' + message += f"The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />" if (obj.ended_at - obj.started_at) > service.duration: extra_time = (obj.ended_at - obj.started_at) - service.duration - message += f'With extra time of {duration_to_str(extra_time)}. <br />' + message += f"With extra time of {duration_to_str(extra_time)}. <br />" else: - message += 'No extra time detected <br />' + message += "No extra time detected <br />" else: - message += 'The mentorship has not ended yet. <br />' + message += "The mentorship has not ended yet. <br />" if obj.ends_at is not None: - message += f'But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />' + message += ( + f"But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />" + ) return message def get_duration_string(self, obj): if obj.started_at is None: - return 'Never started' + return "Never started" end_date = obj.ended_at if end_date is None: - return 'Never ended' + return "Never ended" if obj.started_at > end_date: - return 'Ended before it started' + return "Ended before it started" if (end_date - obj.started_at).days > 1: - return 'Many days' + return "Many days" return duration_to_str(obj.ended_at - obj.started_at) @@ -464,14 +471,14 @@ def get_extra_time(self, obj): return None if (obj.ended_at - obj.started_at).days > 1: - return 'Many days of extra time, probably it was never closed' + return "Many days of extra time, probably it was never closed" if obj.service is None: - return 'Please setup service for this session' + return "Please setup service for this session" if (obj.ended_at - obj.started_at) > obj.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.service.duration - return f'Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.service.duration)}' + return f"Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.service.duration)}" else: return None @@ -481,14 +488,14 @@ def get_mentor_late(self, obj): return None if obj.started_at > obj.mentor_joined_at and (obj.started_at - obj.mentor_joined_at).seconds > (60 * 4): - return f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + return f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" else: return None def get_mentee_joined(self, obj): if obj.started_at is None: - return 'Session did not start because mentee never joined' + return "Session did not start because mentee never joined" else: return True @@ -505,15 +512,15 @@ class ServicePOSTSerializer(serializers.ModelSerializer): class Meta: model = MentorshipService - exclude = ('created_at', 'updated_at', 'academy') + exclude = ("created_at", "updated_at", "academy") def validate(self, data): - academy = Academy.objects.filter(id=self.context['academy_id']).first() + academy = Academy.objects.filter(id=self.context["academy_id"]).first() if academy is None: - raise ValidationException(f'Academy {self.context["academy"]} not found', slug='academy-not-found') + raise ValidationException(f'Academy {self.context["academy"]} not found', slug="academy-not-found") - return {**data, 'academy': academy} + return {**data, "academy": academy} class ServicePUTSerializer(serializers.ModelSerializer): @@ -521,18 +528,18 @@ class ServicePUTSerializer(serializers.ModelSerializer): class Meta: model = MentorshipService - exclude = ('created_at', 'updated_at', 'academy', 'slug') + exclude = ("created_at", "updated_at", "academy", "slug") def validate(self, data): - academy = Academy.objects.filter(id=self.context['academy_id']).first() + academy = Academy.objects.filter(id=self.context["academy_id"]).first() if academy is None: - raise ValidationException(f'Academy {self.context["academy"]} not found', slug='academy-not-found') + raise ValidationException(f'Academy {self.context["academy"]} not found', slug="academy-not-found") - if 'slug' in data: - raise ValidationException('The service slug cannot be updated', slug='service-cannot-be-updated') + if "slug" in data: + raise ValidationException("The service slug cannot be updated", slug="service-cannot-be-updated") - return {**data, 'academy': academy} + return {**data, "academy": academy} class MentorSerializer(serializers.ModelSerializer): @@ -540,51 +547,60 @@ class MentorSerializer(serializers.ModelSerializer): class Meta: model = MentorProfile - exclude = ('created_at', 'updated_at') + exclude = ("created_at", "updated_at") def validate(self, data): - lang = data.get('lang', 'en') - user = data['user'] - profile_academy = ProfileAcademy.objects.filter(user__id=data['user'].id, - academy__id=data['academy'].id).first() + lang = data.get("lang", "en") + user = data["user"] + profile_academy = ProfileAcademy.objects.filter( + user__id=data["user"].id, academy__id=data["academy"].id + ).first() - if 'name' not in data: - data['name'] = '' + if "name" not in data: + data["name"] = "" - if not data['name'] and profile_academy: - data['name'] = f'{profile_academy.first_name} {profile_academy.last_name}' + if not data["name"] and profile_academy: + data["name"] = f"{profile_academy.first_name} {profile_academy.last_name}" - if not data['name']: - data['name'] = user.first_name + ' ' + user.last_name - data['name'] = data['name'].strip() + if not data["name"]: + data["name"] = user.first_name + " " + user.last_name + data["name"] = data["name"].strip() - if 'None' in data['name']: - data['name'] = '' + if "None" in data["name"]: + data["name"] = "" - if not data['name']: - raise ValidationException(translation(lang, - en='Unable to find name on this user', - es='imposible encontrar el nombre en este usuario', - slug='name-not-found'), - code=400) + if not data["name"]: + raise ValidationException( + translation( + lang, + en="Unable to find name on this user", + es="imposible encontrar el nombre en este usuario", + slug="name-not-found", + ), + code=400, + ) - if 'email' not in data: - data['email'] = '' + if "email" not in data: + data["email"] = "" - if not data['email'] and profile_academy: + if not data["email"] and profile_academy: - data['email'] = profile_academy.email + data["email"] = profile_academy.email - if not data['email']: + if not data["email"]: - data['email'] = data['user'].email + data["email"] = data["user"].email - if not data['email']: - raise ValidationException(translation(lang, - en='Unable to find email on this user', - es='Imposible encontrar el email en este usuario', - slug='email-not-found'), - code=400) + if not data["email"]: + raise ValidationException( + translation( + lang, + en="Unable to find email on this user", + es="Imposible encontrar el email en este usuario", + slug="email-not-found", + ), + code=400, + ) return data @@ -597,51 +613,59 @@ class MentorUpdateSerializer(serializers.ModelSerializer): class Meta: model = MentorProfile - exclude = ('created_at', 'updated_at', 'user', 'token') + exclude = ("created_at", "updated_at", "user", "token") def validate(self, data): - lang = data.get('lang', 'en') - if 'status' in data and data['status'] in ['ACTIVE', 'UNLISTED'] and self.instance.status != data['status']: + lang = data.get("lang", "en") + if "status" in data and data["status"] in ["ACTIVE", "UNLISTED"] and self.instance.status != data["status"]: try: actions.mentor_is_ready(self.instance) except Exception as e: raise ValidationException(str(e)) - user = data['user'] if 'user' in data else self.instance.user - academy = data['academy'] if 'academy' in data else self.instance.academy + user = data["user"] if "user" in data else self.instance.user + academy = data["academy"] if "academy" in data else self.instance.academy profile_academy = ProfileAcademy.objects.filter(user__id=user.id, academy=academy).first() - if 'name' not in data: - data['name'] = '' + if "name" not in data: + data["name"] = "" - if not data['name'] and profile_academy: + if not data["name"] and profile_academy: - data['name'] = profile_academy.first_name + ' ' + profile_academy.last_name + data["name"] = profile_academy.first_name + " " + profile_academy.last_name - if 'None' in data['name']: - data['name'] = '' + if "None" in data["name"]: + data["name"] = "" - data['name'] = data['name'].strip() - if not data['name']: - raise ValidationException(translation(lang, - en='Unable to find name on this user', - es='Imposible encotrar el nombre en este usuario', - slug='name-not-found'), - code=400) + data["name"] = data["name"].strip() + if not data["name"]: + raise ValidationException( + translation( + lang, + en="Unable to find name on this user", + es="Imposible encotrar el nombre en este usuario", + slug="name-not-found", + ), + code=400, + ) - if 'email' not in data: - data['email'] = self.instance.email + if "email" not in data: + data["email"] = self.instance.email - if not data['email'] and profile_academy: + if not data["email"] and profile_academy: - data['email'] = profile_academy.email + data["email"] = profile_academy.email - if not data['email']: - raise ValidationException(translation(lang, - en='Unable to find email on this user', - es='Imposible encontrar el email en este usuario', - slug='email-imposible-to-find'), - code=400) + if not data["email"]: + raise ValidationException( + translation( + lang, + en="Unable to find email on this user", + es="Imposible encontrar el email en este usuario", + slug="email-imposible-to-find", + ), + code=400, + ) return data @@ -663,27 +687,28 @@ class Meta: model = MentorshipSession list_serializer_class = SessionListSerializer exclude = ( - 'created_at', - 'updated_at', - 'suggested_accounted_duration', - 'status_message', + "created_at", + "updated_at", + "suggested_accounted_duration", + "status_message", ) def validate(self, data): - #is_online - if 'is_online' in data and data['is_online'] == True: + # is_online + if "is_online" in data and data["is_online"] == True: online_read_only = [ - 'mentor_joined_at', - 'mentor_left_at', - 'mentee_left_at', - 'started_at', - 'ended_at', + "mentor_joined_at", + "mentor_left_at", + "mentee_left_at", + "started_at", + "ended_at", ] for field in online_read_only: if field in data: raise ValidationException( - f'The field {field} is automatically set by the system during online mentorships', - slug='read-only-field-online') + f"The field {field} is automatically set by the system during online mentorships", + slug="read-only-field-online", + ) return super().validate(data) @@ -698,7 +723,7 @@ def update(self, instance, validated_data): sessions = bill.mentorshipsession_set.all() - success_status = ['APPROVED', 'PAID', 'IGNORED'] + success_status = ["APPROVED", "PAID", "IGNORED"] is_dirty = [x for x in sessions if x.bill.status not in success_status and not x.service] # this prevent errors 500 @@ -706,32 +731,40 @@ def update(self, instance, validated_data): generate_mentor_bill(mentor, bill, bill.mentorshipsession_set.all()) else: - bill.status = 'RECALCULATE' + bill.status = "RECALCULATE" bill.save() - if instance and 'started_at' in validated_data and instance.started_at is None: - tasks_activity.add_activity.delay(instance.mentee, - 'mentorship_session_checkin', - related_type='mentorship.MentorshipSession', - related_id=instance.id) - - if instance and 'mentor_joined_at' in validated_data and instance.mentor_joined_at is None: - tasks_activity.add_activity.delay(instance.mentor, - 'mentorship_session_checkin', - related_type='mentorship.MentorshipSession', - related_id=instance.id) - - if instance and 'mentee_left_at' in validated_data and instance.mentee_left_at is None: - tasks_activity.add_activity.delay(instance.mentee, - 'mentorship_session_checkout', - related_type='mentorship.MentorshipSession', - related_id=instance.id) - - if instance and 'mentor_left_at' in validated_data and instance.mentor_left_at is None: - tasks_activity.add_activity.delay(instance.mentor, - 'mentorship_session_checkout', - related_type='mentorship.MentorshipSession', - related_id=instance.id) + if instance and "started_at" in validated_data and instance.started_at is None: + tasks_activity.add_activity.delay( + instance.mentee, + "mentorship_session_checkin", + related_type="mentorship.MentorshipSession", + related_id=instance.id, + ) + + if instance and "mentor_joined_at" in validated_data and instance.mentor_joined_at is None: + tasks_activity.add_activity.delay( + instance.mentor, + "mentorship_session_checkin", + related_type="mentorship.MentorshipSession", + related_id=instance.id, + ) + + if instance and "mentee_left_at" in validated_data and instance.mentee_left_at is None: + tasks_activity.add_activity.delay( + instance.mentee, + "mentorship_session_checkout", + related_type="mentorship.MentorshipSession", + related_id=instance.id, + ) + + if instance and "mentor_left_at" in validated_data and instance.mentor_left_at is None: + tasks_activity.add_activity.delay( + instance.mentor, + "mentorship_session_checkout", + related_type="mentorship.MentorshipSession", + related_id=instance.id, + ) return result @@ -744,70 +777,84 @@ class SessionSerializer(SessionPUTSerializer): def validate(self, data): - lang = data.get('lang', 'en') + lang = data.get("lang", "en") service = None - if 'service' in data and data['service'] and isinstance(data['service'], - str) and not data['service'].isnumeric(): - service = MentorshipService.objects.filter(academy=self.context['academy_id'], slug=data['service']).first() + if ( + "service" in data + and data["service"] + and isinstance(data["service"], str) + and not data["service"].isnumeric() + ): + service = MentorshipService.objects.filter(academy=self.context["academy_id"], slug=data["service"]).first() else: - service = MentorshipService.objects.filter(academy=self.context['academy_id'], id=data['service']).first() + service = MentorshipService.objects.filter(academy=self.context["academy_id"], id=data["service"]).first() if service is None: - raise ValidationException(f'Service {data["service"]} not found', slug='service-not-found') + raise ValidationException(f'Service {data["service"]} not found', slug="service-not-found") mentor = None - if 'mentor' in data and data['mentor'] and isinstance(data['mentor'], str) and not data['mentor'].isnumeric(): - mentor = MentorProfile.objects.filter(academy=self.context['academy_id'], - user__email=data['mentor']).first() + if "mentor" in data and data["mentor"] and isinstance(data["mentor"], str) and not data["mentor"].isnumeric(): + mentor = MentorProfile.objects.filter( + academy=self.context["academy_id"], user__email=data["mentor"] + ).first() else: - mentor = MentorProfile.objects.filter(academy=self.context['academy_id'], id=data['mentor']).first() + mentor = MentorProfile.objects.filter(academy=self.context["academy_id"], id=data["mentor"]).first() if mentor is None: - raise ValidationException(f'Mentor {data["mentor"]} not found', slug='mentor-not-found') + raise ValidationException(f'Mentor {data["mentor"]} not found', slug="mentor-not-found") mentee = None - if 'mentee' in data: - if not data['mentee'].isnumeric(): - mentee = Consumable.objects.filter(mentorship_service_set__mentorship_services__id=service.id, - user__email=data['mentee']).first() + if "mentee" in data: + if not data["mentee"].isnumeric(): + mentee = Consumable.objects.filter( + mentorship_service_set__mentorship_services__id=service.id, user__email=data["mentee"] + ).first() else: - mentee = Consumable.objects.filter(mentorship_service_set__mentorship_services__id=service.id, - user__id=data['mentee']).first() + mentee = Consumable.objects.filter( + mentorship_service_set__mentorship_services__id=service.id, user__id=data["mentee"] + ).first() if mentee is None: - raise ValidationException(translation( - lang, - en='The session mentee is not a member of 4Geeks.com', - es='El usuario que quieres agregar a la mentoría no pertenece a 4Geeks.com', - slug='mentee-not-found'), - code=400) + raise ValidationException( + translation( + lang, + en="The session mentee is not a member of 4Geeks.com", + es="El usuario que quieres agregar a la mentoría no pertenece a 4Geeks.com", + slug="mentee-not-found", + ), + code=400, + ) mentee = mentee.user if mentee is not None and mentor.id == mentee.id: - raise ValidationException(translation(lang, - en='Mentee and mentor cannot be the same person in the same session', - es='El mentor y el estudiante no pueden ser la misma persona', - slug='mentor-mentee-same-person'), - code=400) - - calendly_organization = CalendlyOrganization.objects.filter(academy=self.context['academy_id']).first() + raise ValidationException( + translation( + lang, + en="Mentee and mentor cannot be the same person in the same session", + es="El mentor y el estudiante no pueden ser la misma persona", + slug="mentor-mentee-same-person", + ), + code=400, + ) + + calendly_organization = CalendlyOrganization.objects.filter(academy=self.context["academy_id"]).first() if calendly_organization is not None: max_sessions = calendly_organization.max_concurrent_sessions if max_sessions is not None and max_sessions > 0: - total_service_mentorships = MentorshipSession.objects.filter(academy=self.context['academy_id'], - status='PENDING', - mentee=mentee, - service=service).count() + total_service_mentorships = MentorshipSession.objects.filter( + academy=self.context["academy_id"], status="PENDING", mentee=mentee, service=service + ).count() if max_sessions <= total_service_mentorships: - raise ValidationException(translation( - lang, - en= - f'You can only schedule {max_sessions} mentoring sessions in advanced. Fix this by cancelling an upcoming session or waiting for it to happen before booking a new one. ', - es= - f'Sólo puedes agendar un máximo de {max_sessions} sessiones de mentoría por adelantado. Soluciona esto cancelando una de tus próximas sesiones o espera a que alguna ocurra antes de volver a agendar', - slug='max-concurrent-sessions'), - code=400) + raise ValidationException( + translation( + lang, + en=f"You can only schedule {max_sessions} mentoring sessions in advanced. Fix this by cancelling an upcoming session or waiting for it to happen before booking a new one. ", + es=f"Sólo puedes agendar un máximo de {max_sessions} sessiones de mentoría por adelantado. Soluciona esto cancelando una de tus próximas sesiones o espera a que alguna ocurra antes de volver a agendar", + slug="max-concurrent-sessions", + ), + code=400, + ) - return super().validate({**data, 'service': service, 'mentor': mentor, 'mentee': mentee}) + return super().validate({**data, "service": service, "mentor": mentor, "mentee": mentee}) class MentorshipBillPUTListSerializer(serializers.ListSerializer): @@ -825,54 +872,68 @@ class MentorshipBillPUTSerializer(serializers.ModelSerializer): class Meta: model = MentorshipBill - exclude = ('created_at', 'updated_at', 'academy', 'mentor', 'reviewer', 'total_duration_in_minutes', - 'total_duration_in_hours', 'total_price', 'overtime_minutes') + exclude = ( + "created_at", + "updated_at", + "academy", + "mentor", + "reviewer", + "total_duration_in_minutes", + "total_duration_in_hours", + "total_price", + "overtime_minutes", + ) list_serializer_class = MentorshipBillPUTListSerializer def validate(self, data): - academy = Academy.objects.filter(id=self.context['academy_id']).first() + academy = Academy.objects.filter(id=self.context["academy_id"]).first() if academy is None: - raise ValidationException(f'Academy {self.context["academy_id"]} not found', slug='academy-not-found') + raise ValidationException(f'Academy {self.context["academy_id"]} not found', slug="academy-not-found") - return {**data, 'academy': academy} + return {**data, "academy": academy} class CalendlyOrganizationSerializer(serializers.ModelSerializer): class Meta: model = CalendlyOrganization - fields = ('access_token', 'sync_status', 'sync_desc', 'username') + fields = ("access_token", "sync_status", "sync_desc", "username") def validate(self, data): - if 'access_token' not in data: + if "access_token" not in data: ValidationException( translation( - self.context['lang'], - en='You need to specify the access token to be used by the calendly organization credentials', - es='Por favor especifíca el access_token para conectar la organización con el API de calendly', - slug='missing-access-token')) - - if 'username' not in data: + self.context["lang"], + en="You need to specify the access token to be used by the calendly organization credentials", + es="Por favor especifíca el access_token para conectar la organización con el API de calendly", + slug="missing-access-token", + ) + ) + + if "username" not in data: ValidationException( - translation(self.context['lang'], - en='You need to specify the organization calendly username or handle', - es='Por favor especifíca el nombre de usuario o handle para la organizacion en calendly', - slug='missing-access-token')) + translation( + self.context["lang"], + en="You need to specify the organization calendly username or handle", + es="Por favor especifíca el nombre de usuario o handle para la organizacion en calendly", + slug="missing-access-token", + ) + ) - academy = Academy.objects.get(pk=self.context['academy_id']) + academy = Academy.objects.get(pk=self.context["academy_id"]) - return super().validate({**data.copy(), 'academy': academy}) + return super().validate({**data.copy(), "academy": academy}) def create(self, validated_data): - cal = Calendly(token=validated_data['access_token']) + cal = Calendly(token=validated_data["access_token"]) try: organization = cal.get_organization() - validated_data['uri'] = organization['resource']['current_organization'] + validated_data["uri"] = organization["resource"]["current_organization"] except Exception as e: - raise ValidationException('Organization not found for the given access token: ' + str(e)) + raise ValidationException("Organization not found for the given access token: " + str(e)) org = super().create(validated_data) @@ -881,11 +942,11 @@ def create(self, validated_data): try: cal.subscribe(org.uri, org.hash) except Exception as e: - raise ValidationException('Error while creating calendly organization: ' + str(e)) + raise ValidationException("Error while creating calendly organization: " + str(e)) try: cal.get_subscriptions(org.uri) except Exception as e: - raise ValidationException('Error retrieving organization subscriptions: ' + str(e)) + raise ValidationException("Error retrieving organization subscriptions: " + str(e)) return org diff --git a/breathecode/mentorship/tasks.py b/breathecode/mentorship/tasks.py index de13eb5e8..bd3ef133f 100644 --- a/breathecode/mentorship/tasks.py +++ b/breathecode/mentorship/tasks.py @@ -19,8 +19,8 @@ @shared_task(bind=True, priority=TaskPriority.STUDENT.value) def async_calendly_webhook(self, calendly_webhook_id): - logger.debug('Starting async_calendly_webhook') - status = 'ok' + logger.debug("Starting async_calendly_webhook") + status = "ok" webhook = CalendlyWebhook.objects.filter(id=calendly_webhook_id).first() organization = webhook.organization @@ -32,40 +32,40 @@ def async_calendly_webhook(self, calendly_webhook_id): client = Calendly(organization.access_token) client.execute_action(calendly_webhook_id) except Exception as e: - logger.debug('Calendly webhook exception') + logger.debug("Calendly webhook exception") logger.debug(str(e)) - status = 'error' + status = "error" else: - message = f"Calendly Organization {organization.id} doesn\'t exist" + message = f"Calendly Organization {organization.id} doesn't exist" - webhook.status = 'ERROR' + webhook.status = "ERROR" webhook.status_text = message webhook.save() logger.debug(message) - status = 'error' + status = "error" - logger.debug(f'Calendly status: {status}') + logger.debug(f"Calendly status: {status}") @shared_task(bind=True, priority=TaskPriority.STUDENT.value) def async_mentorship_session_calendly_webhook(self, calendly_webhook_id): - logger.debug('Starting async_mentorship_session_calendly_webhook') + logger.debug("Starting async_mentorship_session_calendly_webhook") webhook = CalendlyWebhook.objects.filter(id=calendly_webhook_id).first() payload = webhook.payload - calendly_token = os.getenv('CALENDLY_TOKEN') + calendly_token = os.getenv("CALENDLY_TOKEN") client = Calendly(calendly_token) - payload['tracking']['utm_campaign'] = 'geekpal' + payload["tracking"]["utm_campaign"] = "geekpal" mentorship_session = invitee_created(client, webhook, payload) if mentorship_session is not None: tasks_activity.add_activity.delay( mentorship_session.mentee.id, - 'mentoring_session_scheduled', - related_type='mentorship.MentorshipSession', + "mentoring_session_scheduled", + related_type="mentorship.MentorshipSession", related_id=mentorship_session.id, timestamp=webhook.called_at, ) @@ -75,28 +75,28 @@ def async_mentorship_session_calendly_webhook(self, calendly_webhook_id): def check_mentorship_profile(mentor_id: int, **_: Any): mentor = MentorProfile.objects.filter(id=mentor_id).first() if mentor is None: - raise AbortTask(f'Mentorship profile {mentor_id} not found') + raise AbortTask(f"Mentorship profile {mentor_id} not found") status = [] - if mentor.online_meeting_url is None or mentor.online_meeting_url == '': - status.append('no-online-meeting-url') + if mentor.online_meeting_url is None or mentor.online_meeting_url == "": + status.append("no-online-meeting-url") - if mentor.booking_url is None or 'https://calendly.com' not in mentor.booking_url: - status.append('no-booking-url') + if mentor.booking_url is None or "https://calendly.com" not in mentor.booking_url: + status.append("no-booking-url") if len(mentor.syllabus.all()) == 0: - status.append('no-syllabus') + status.append("no-syllabus") - if 'no-online-meeting-url' not in status: + if "no-online-meeting-url" not in status: response = requests.head(mentor.online_meeting_url, timeout=30) if response.status_code > 399: - status.append('bad-online-meeting-url') + status.append("bad-online-meeting-url") - if 'no-booking-url' not in status: + if "no-booking-url" not in status: response = requests.head(mentor.booking_url, timeout=30) if response.status_code > 399: - status.append('bad-booking-url') + status.append("bad-booking-url") mentor.availability_report = status mentor.save() diff --git a/breathecode/mentorship/tests/actions/tests_close_older_sessions.py b/breathecode/mentorship/tests/actions/tests_close_older_sessions.py index e08d3c4f3..e630b45ad 100644 --- a/breathecode/mentorship/tests/actions/tests_close_older_sessions.py +++ b/breathecode/mentorship/tests/actions/tests_close_older_sessions.py @@ -1,6 +1,7 @@ """ Test mentorhips """ + import random from unittest.mock import patch from django.utils import timezone @@ -21,27 +22,31 @@ class GetOrCreateSessionTestSuite(MentorshipTestCase): def test_without_mentorship_session(self): close_older_sessions() - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 With two MentorshipSession, all statuses, ends_at less than two hours ago """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_two_mentorship_session__all_statuses__ends_at_less_than_two_hours_ago(self): - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] for current in statuses: cases = [ - [{ - 'ends_at': None, - 'status': current, - } for _ in range(0, 2)], [ { - 'ends_at': UTC_NOW - timedelta(seconds=random.randint(0, 7200)), # less than 2 hours - 'status': current, - } for _ in range(0, 2) + "ends_at": None, + "status": current, + } + for _ in range(0, 2) + ], + [ + { + "ends_at": UTC_NOW - timedelta(seconds=random.randint(0, 7200)), # less than 2 hours + "status": current, + } + for _ in range(0, 2) ], ] @@ -50,78 +55,89 @@ def test_with_two_mentorship_session__all_statuses__ends_at_less_than_two_hours_ close_older_sessions() - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session[0]), - self.bc.format.to_dict(model.mentorship_session[1]), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session[0]), + self.bc.format.to_dict(model.mentorship_session[1]), + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 With two MentorshipSession, unfinished statuses, ends_at two hours ago or more """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_two_mentorship_session__unfinished_statuses__ends_at_two_hours_ago_or_more(self): - statuses = ['PENDING', 'STARTED'] + statuses = ["PENDING", "STARTED"] for current in statuses: mentorship_sessions = [ { - 'ends_at': UTC_NOW - timedelta(seconds=random.randint(7201, 10000)), # eq or gt than 2 hours - 'status': current, - } for _ in range(0, 2) + "ends_at": UTC_NOW - timedelta(seconds=random.randint(7201, 10000)), # eq or gt than 2 hours + "status": current, + } + for _ in range(0, 2) ] model = self.bc.database.create(mentorship_session=mentorship_sessions) close_older_sessions() - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session[0]), - 'status': 'FAILED', - 'summary': 'Automatically closed because its ends was two hours ago or more', - 'ended_at': UTC_NOW, - }, - { - **self.bc.format.to_dict(model.mentorship_session[1]), - 'status': 'FAILED', - 'summary': 'Automatically closed because its ends was two hours ago or more', - 'ended_at': UTC_NOW, - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session[0]), + "status": "FAILED", + "summary": "Automatically closed because its ends was two hours ago or more", + "ended_at": UTC_NOW, + }, + { + **self.bc.format.to_dict(model.mentorship_session[1]), + "status": "FAILED", + "summary": "Automatically closed because its ends was two hours ago or more", + "ended_at": UTC_NOW, + }, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 With two MentorshipSession, finished statuses, ends_at two hours ago or more """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_two_mentorship_session__finished_statuses__ends_at_two_hours_ago_or_more(self): - statuses = ['COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["COMPLETED", "FAILED", "IGNORED"] for current in statuses: mentorship_sessions = [ { - 'ends_at': UTC_NOW - timedelta(seconds=random.randint(7201, 10000)), # eq or gt than 2 hours - 'status': current, - } for _ in range(0, 2) + "ends_at": UTC_NOW - timedelta(seconds=random.randint(7201, 10000)), # eq or gt than 2 hours + "status": current, + } + for _ in range(0, 2) ] model = self.bc.database.create(mentorship_session=mentorship_sessions) close_older_sessions() - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session[0]), - }, - { - **self.bc.format.to_dict(model.mentorship_session[1]), - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session[0]), + }, + { + **self.bc.format.to_dict(model.mentorship_session[1]), + }, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") diff --git a/breathecode/mentorship/tests/actions/tests_extend_session.py b/breathecode/mentorship/tests/actions/tests_extend_session.py index dbe4fcd43..3255f10f1 100644 --- a/breathecode/mentorship/tests/actions/tests_extend_session.py +++ b/breathecode/mentorship/tests/actions/tests_extend_session.py @@ -1,6 +1,7 @@ """ Test mentorships """ + from datetime import datetime, timedelta import random from unittest.mock import patch @@ -27,10 +28,10 @@ def get_env(key, default=None): return get_env -ENV = {'DAILY_API_URL': 'https://netscape.bankruptcy.story'} -SESSION_NAME = 'luxray' -URL = f'https://netscape.bankruptcy.story/v1/rooms/{SESSION_NAME}' -DATA = {'x': 2} +ENV = {"DAILY_API_URL": "https://netscape.bankruptcy.story"} +SESSION_NAME = "luxray" +URL = f"https://netscape.bankruptcy.story/v1/rooms/{SESSION_NAME}" +DATA = {"x": 2} class GenerateMentorBillsTestCase(MentorshipTestCase): @@ -38,7 +39,7 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): 🔽🔽🔽 without MentorshipSession without name """ - @patch('os.getenv', MagicMock(side_effect=apply_get_env(ENV))) + @patch("os.getenv", MagicMock(side_effect=apply_get_env(ENV))) def test__without_name(self): model = self.bc.database.create(mentorship_session=1) @@ -46,108 +47,123 @@ def test__without_name(self): with self.assertRaisesMessage(ExtendSessionException, "Can't extend sessions not have a name"): extend_session(model.mentorship_session) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 without MentorshipSession without name and ends_at """ - @patch('os.getenv', MagicMock(side_effect=apply_get_env(ENV))) - @patch('requests.request', apply_requests_request_mock([(201, URL, DATA)])) + @patch("os.getenv", MagicMock(side_effect=apply_get_env(ENV))) + @patch("requests.request", apply_requests_request_mock([(201, URL, DATA)])) def test__with_name__without_ends_at(self): - mentorship_session = {'name': SESSION_NAME} + mentorship_session = {"name": SESSION_NAME} model = self.bc.database.create(mentorship_session=mentorship_session) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = extend_session(model.mentorship_session) self.bc.check.queryset_with_pks(result, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession without name with ends_at """ - @patch('os.getenv', MagicMock(side_effect=apply_get_env(ENV))) - @patch('requests.request', apply_requests_request_mock([(201, URL, DATA)])) + @patch("os.getenv", MagicMock(side_effect=apply_get_env(ENV))) + @patch("requests.request", apply_requests_request_mock([(201, URL, DATA)])) def test__with_name____with_ends_at(self): now = timezone.now() - mentorship_session = {'name': SESSION_NAME, 'ends_at': now} + mentorship_session = {"name": SESSION_NAME, "ends_at": now} model = self.bc.database.create(mentorship_session=mentorship_session) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = extend_session(model.mentorship_session) self.bc.check.queryset_with_pks(result, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **mentorship_session_db, - 'ends_at': now + timedelta(minutes=30), - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **mentorship_session_db, + "ends_at": now + timedelta(minutes=30), + }, + ], + ) """ 🔽🔽🔽 without MentorshipSession without name with ends_at, passing duration_in_minutes """ - @patch('os.getenv', MagicMock(side_effect=apply_get_env(ENV))) - @patch('requests.request', apply_requests_request_mock([(201, URL, DATA)])) + @patch("os.getenv", MagicMock(side_effect=apply_get_env(ENV))) + @patch("requests.request", apply_requests_request_mock([(201, URL, DATA)])) def test__with_name____with_ends_at__passing_duration_in_minutes(self): now = timezone.now() duration_in_minutes = random.randint(1, 1000) - mentorship_session = {'name': SESSION_NAME, 'ends_at': now} + mentorship_session = {"name": SESSION_NAME, "ends_at": now} model = self.bc.database.create(mentorship_session=mentorship_session) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = extend_session(model.mentorship_session, duration_in_minutes=duration_in_minutes) self.bc.check.queryset_with_pks(result, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **mentorship_session_db, - 'ends_at': now + timedelta(minutes=duration_in_minutes), - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **mentorship_session_db, + "ends_at": now + timedelta(minutes=duration_in_minutes), + }, + ], + ) """ 🔽🔽🔽 without MentorshipSession without name with ends_at, passing exp_in_epoch """ - @patch('os.getenv', MagicMock(side_effect=apply_get_env(ENV))) - @patch('requests.request', apply_requests_request_mock([(201, URL, DATA)])) + @patch("os.getenv", MagicMock(side_effect=apply_get_env(ENV))) + @patch("requests.request", apply_requests_request_mock([(201, URL, DATA)])) def test__with_name____with_ends_at__passing_exp_in_epoch(self): now = timezone.now() diff = timedelta(minutes=random.randint(1, 1000)) timestamp = datetime.timestamp(now + diff) - mentorship_session = {'name': SESSION_NAME, 'ends_at': now} + mentorship_session = {"name": SESSION_NAME, "ends_at": now} model = self.bc.database.create(mentorship_session=mentorship_session) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = extend_session(model.mentorship_session, exp_in_epoch=timestamp) self.bc.check.queryset_with_pks(result, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **mentorship_session_db, - 'ends_at': now + diff, - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **mentorship_session_db, + "ends_at": now + diff, + }, + ], + ) """ 🔽🔽🔽 without MentorshipSession without name with ends_at, passing exp_in_epoch and tz """ - @patch('os.getenv', MagicMock(side_effect=apply_get_env(ENV))) - @patch('requests.request', apply_requests_request_mock([(201, URL, DATA)])) + @patch("os.getenv", MagicMock(side_effect=apply_get_env(ENV))) + @patch("requests.request", apply_requests_request_mock([(201, URL, DATA)])) def test__with_name____with_ends_at__passing_exp_in_epoch__passing_tz(self): timezones = [] @@ -162,19 +178,22 @@ def test__with_name____with_ends_at__passing_exp_in_epoch__passing_tz(self): diff = timedelta(minutes=random.randint(1, 1000)) timestamp = datetime.timestamp(now + diff) - mentorship_session = {'name': SESSION_NAME, 'ends_at': now} + mentorship_session = {"name": SESSION_NAME, "ends_at": now} model = self.bc.database.create(mentorship_session=mentorship_session) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = extend_session(model.mentorship_session, exp_in_epoch=timestamp, tz=tz) self.bc.check.queryset_with_pks(result, [model.mentorship_session.id]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **mentorship_session_db, - 'ends_at': now + diff, - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **mentorship_session_db, + "ends_at": now + diff, + }, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") diff --git a/breathecode/mentorship/tests/actions/tests_generate_mentor_bills.py b/breathecode/mentorship/tests/actions/tests_generate_mentor_bills.py index 327c1ae78..f12f47ec2 100644 --- a/breathecode/mentorship/tests/actions/tests_generate_mentor_bills.py +++ b/breathecode/mentorship/tests/actions/tests_generate_mentor_bills.py @@ -1,6 +1,7 @@ """ Test mentorships """ + import datetime from unittest.mock import MagicMock, patch @@ -16,58 +17,58 @@ def mentorship_bill_field(data={}): return { - 'academy_id': 0, - 'ended_at': None, - 'id': 0, - 'mentor_id': 0, - 'overtime_minutes': 0.0, - 'paid_at': None, - 'reviewer_id': None, - 'started_at': None, - 'status': 'DUE', - 'status_mesage': None, - 'total_duration_in_hours': 0.0, - 'total_duration_in_minutes': 0.0, - 'total_price': 0.0, + "academy_id": 0, + "ended_at": None, + "id": 0, + "mentor_id": 0, + "overtime_minutes": 0.0, + "paid_at": None, + "reviewer_id": None, + "started_at": None, + "status": "DUE", + "status_mesage": None, + "total_duration_in_hours": 0.0, + "total_duration_in_minutes": 0.0, + "total_price": 0.0, **data, } def mentorship_session_field(data={}): return { - 'name': None, - 'is_online': False, - 'latitude': None, - 'longitude': None, - 'mentor_id': 0, - 'service_id': None, - 'calendly_uuid': None, - 'mentee_id': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'status': 'PENDING', - 'status_message': None, - 'allow_billing': True, - 'bill_id': None, - 'accounted_duration': None, - 'agenda': None, - 'summary': None, - 'starts_at': None, - 'ends_at': None, - 'started_at': None, - 'ended_at': None, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'mentee_left_at': None, - 'suggested_accounted_duration': None, - 'questions_and_answers': None, + "name": None, + "is_online": False, + "latitude": None, + "longitude": None, + "mentor_id": 0, + "service_id": None, + "calendly_uuid": None, + "mentee_id": None, + "online_meeting_url": None, + "online_recording_url": None, + "status": "PENDING", + "status_message": None, + "allow_billing": True, + "bill_id": None, + "accounted_duration": None, + "agenda": None, + "summary": None, + "starts_at": None, + "ends_at": None, + "started_at": None, + "ended_at": None, + "mentor_joined_at": None, + "mentor_left_at": None, + "mentee_left_at": None, + "suggested_accounted_duration": None, + "questions_and_answers": None, **data, } class GenerateMentorBillsTestCase(MentorshipTestCase): - @patch('django.utils.timezone.now', MagicMock(return_value=NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=NOW)) def test_generate_bills_with_no_previous_bills_no_unpaid_sessions__session_without_service(self): """ First bill generate, with no previous bills. @@ -76,15 +77,18 @@ def test_generate_bills_with_no_previous_bills_no_unpaid_sessions__session_witho models = self.bc.database.create(mentor_profile=1, user=1, mentorship_session=1) mentor = models.mentor_profile - with self.assertRaisesMessage(ValidationException, 'session_without_service'): + with self.assertRaisesMessage(ValidationException, "session_without_service"): generate_mentor_bills(mentor) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(models.mentorship_session), - ]) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(models.mentorship_session), + ], + ) - @patch('django.utils.timezone.now', MagicMock(return_value=NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=NOW)) def test_generate_bills_with_no_previous_bills_no_unpaid_sessions__session_with_service(self): """ First bill generate, with no previous bills. @@ -97,58 +101,57 @@ def test_generate_bills_with_no_previous_bills_no_unpaid_sessions__session_with_ self.assertEqual(bills, []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(models.mentorship_session), - ]) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(models.mentorship_session), + ], + ) - @patch('django.utils.timezone.now', MagicMock(return_value=NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_generate_bills_with_no_previous_bills_pending_sessions(self): """ Generate bills with no previous billing history and 3 previous sessions """ - models_a = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_service=1, - mentorship_session={ - 'status': 'COMPLETED', - 'started_at': datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': datetime.timedelta(hours=1) - }) - models = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_service=1, - mentorship_session=[{ - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=1) - }, { - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 10, 17, 21, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 10, 17, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=2) - }, { - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 11, 25, 21, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 11, 25, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=2) - }]) + models_a = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_service=1, + mentorship_session={ + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=1), + }, + ) + models = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_service=1, + mentorship_session=[ + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=1), + }, + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 10, 17, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 17, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=2), + }, + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 11, 25, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 25, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=2), + }, + ], + ) mentor = models.mentor_profile bills = generate_mentor_bills(mentor) @@ -157,127 +160,164 @@ def test_generate_bills_with_no_previous_bills_pending_sessions(self): first = sorted(models.mentorship_session, key=lambda x: x.started_at)[0].started_at latest = sorted(models.mentorship_session, key=lambda x: x.ended_at, reverse=True)[0].ended_at - bill1 = round(models_a.mentorship_session.accounted_duration.seconds / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - bill2 = round((models.mentorship_session[0].accounted_duration.seconds + - models.mentorship_session[1].accounted_duration.seconds) / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - bill3 = round((models.mentorship_session[2].accounted_duration.seconds) / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - self.assertEqual(list_bills, [ - mentorship_bill_field({ - 'academy_id': 2, - 'started_at': datetime.datetime(2021, 10, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 1, - 'mentor_id': 2, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 3.0, - 'total_duration_in_minutes': 180.0, - 'total_price': bill2, - }), - mentorship_bill_field({ - 'academy_id': 2, - 'started_at': datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 2, - 'mentor_id': 2, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 2.0, - 'total_duration_in_minutes': 120.0, - 'total_price': bill3, - }), - ]) - - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - mentorship_bill_field({ - 'academy_id': 2, - 'started_at': datetime.datetime(2021, 10, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 1, - 'mentor_id': 2, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 3.0, - 'total_duration_in_minutes': 180.0, - 'total_price': bill2, - }), - mentorship_bill_field({ - 'academy_id': 2, - 'started_at': datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 2, - 'mentor_id': 2, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 2.0, - 'total_duration_in_minutes': 120.0, - 'total_price': bill3, - }), - ]) - - status_message = ('The mentor never joined the meeting, no time will be ' - 'accounted for.') - - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=3600), - 'id': 1, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 1, - 'status': 'COMPLETED', - 'started_at': datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), - 'bill_id': None, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=3600), - 'id': 2, - 'mentee_id': 2, - 'mentor_id': 2, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 1, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=7200), - 'id': 3, - 'mentee_id': 2, - 'mentor_id': 2, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 10, 17, 21, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 17, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 1, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=7200), - 'id': 4, - 'mentee_id': 2, - 'mentor_id': 2, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 11, 25, 21, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 25, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 2, - }), - ]) - - @patch('django.utils.timezone.now', MagicMock(return_value=NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + bill1 = ( + round(models_a.mentorship_session.accounted_duration.seconds / 60 / 60, 2) + * models.mentor_profile.price_per_hour + ) + + bill2 = ( + round( + ( + models.mentorship_session[0].accounted_duration.seconds + + models.mentorship_session[1].accounted_duration.seconds + ) + / 60 + / 60, + 2, + ) + * models.mentor_profile.price_per_hour + ) + + bill3 = ( + round((models.mentorship_session[2].accounted_duration.seconds) / 60 / 60, 2) + * models.mentor_profile.price_per_hour + ) + + self.assertEqual( + list_bills, + [ + mentorship_bill_field( + { + "academy_id": 2, + "started_at": datetime.datetime(2021, 10, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 1, + "mentor_id": 2, + "overtime_minutes": 60.0, + "total_duration_in_hours": 3.0, + "total_duration_in_minutes": 180.0, + "total_price": bill2, + } + ), + mentorship_bill_field( + { + "academy_id": 2, + "started_at": datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 2, + "mentor_id": 2, + "overtime_minutes": 60.0, + "total_duration_in_hours": 2.0, + "total_duration_in_minutes": 120.0, + "total_price": bill3, + } + ), + ], + ) + + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + mentorship_bill_field( + { + "academy_id": 2, + "started_at": datetime.datetime(2021, 10, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 1, + "mentor_id": 2, + "overtime_minutes": 60.0, + "total_duration_in_hours": 3.0, + "total_duration_in_minutes": 180.0, + "total_price": bill2, + } + ), + mentorship_bill_field( + { + "academy_id": 2, + "started_at": datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 2, + "mentor_id": 2, + "overtime_minutes": 60.0, + "total_duration_in_hours": 2.0, + "total_duration_in_minutes": 120.0, + "total_price": bill3, + } + ), + ], + ) + + status_message = "The mentor never joined the meeting, no time will be " "accounted for." + + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=3600), + "id": 1, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 1, + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), + "bill_id": None, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=3600), + "id": 2, + "mentee_id": 2, + "mentor_id": 2, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 1, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=7200), + "id": 3, + "mentee_id": 2, + "mentor_id": 2, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 10, 17, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 17, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 1, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=7200), + "id": 4, + "mentee_id": 2, + "mentor_id": 2, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 11, 25, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 25, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 2, + } + ), + ], + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_generate_bills_with_previous_bills_and_pending_sessions__status_due(self): """ Generate bills with no previous billing history and 3 previous sessions @@ -286,205 +326,242 @@ def test_generate_bills_with_previous_bills_and_pending_sessions__status_due(sel end = NOW - datetime.timedelta(days=80, hours=1) start_month = start.replace(day=28, hour=23, minute=59, second=59) end_month = start.replace(day=28, hour=23, minute=59, second=59) + datetime.timedelta(days=4) - models_a = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session={ - 'status': 'COMPLETED', - 'started_at': datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': datetime.timedelta(hours=1) - }, - mentorship_service=1, - mentorship_bill={ - 'status': 'DUE', - 'started_at': start_month, - 'ended_at': end_month, - 'reviewer_id': None, - }) - models = self.bc.database.create(mentor_profile=models_a['mentor_profile'], - user=models_a['user'], - mentorship_service=1, - mentorship_session=[{ - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 11, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 11, 16, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=1) - }, { - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 11, 17, 21, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 11, 17, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=2) - }, { - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 12, 30, 21, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 12, 30, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=2) - }]) + models_a = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session={ + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=1), + }, + mentorship_service=1, + mentorship_bill={ + "status": "DUE", + "started_at": start_month, + "ended_at": end_month, + "reviewer_id": None, + }, + ) + models = self.bc.database.create( + mentor_profile=models_a["mentor_profile"], + user=models_a["user"], + mentorship_service=1, + mentorship_session=[ + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 11, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 16, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=1), + }, + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 11, 17, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 17, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=2), + }, + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 12, 30, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 12, 30, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=2), + }, + ], + ) mentor = models.mentor_profile bills = generate_mentor_bills(mentor) list_bills = [self.bc.format.to_dict(x) for x in bills] - bill1 = round(models_a.mentorship_session.accounted_duration.seconds / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - bill2 = round((models.mentorship_session[0].accounted_duration.seconds + - models.mentorship_session[1].accounted_duration.seconds) / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - bill3 = round((models.mentorship_session[2].accounted_duration.seconds) / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - self.assertEqual(list_bills, [ - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': start_month, - 'ended_at': end_month, - 'id': 1, - 'mentor_id': 1, - 'overtime_minutes': 0, - 'total_duration_in_hours': 1.0, - 'total_duration_in_minutes': 60.0, - 'total_price': bill1, - 'reviewer_id': None, - }), - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 2, - 'mentor_id': 1, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 3.0, - 'total_duration_in_minutes': 180.0, - 'total_price': bill2, - }), - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': datetime.datetime(2021, 12, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 3, - 'mentor_id': 1, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 2.0, - 'total_duration_in_minutes': 120.0, - 'total_price': bill3, - }), - ]) - - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': start_month, - 'ended_at': end_month, - 'id': 1, - 'mentor_id': 1, - 'overtime_minutes': 0.0, - 'total_duration_in_hours': 1.0, - 'total_duration_in_minutes': 60.0, - 'total_price': bill1, - 'reviewer_id': None, - }), - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 2, - 'mentor_id': 1, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 3.0, - 'total_duration_in_minutes': 180.0, - 'total_price': bill2, - }), - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': datetime.datetime(2021, 12, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 3, - 'mentor_id': 1, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 2.0, - 'total_duration_in_minutes': 120.0, - 'total_price': bill3, - }), - ]) - - status_message = ('The mentor never joined the meeting, no time will be ' - 'accounted for.') - - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=3600), - 'id': 1, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 1, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 1, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=3600), - 'id': 2, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 11, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 16, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 2, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=7200), - 'id': 3, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 11, 17, 21, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 17, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 2, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=7200), - 'id': 4, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 12, 30, 21, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 12, 30, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 3, - }), - ]) - - @patch('django.utils.timezone.now', MagicMock(return_value=NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + bill1 = ( + round(models_a.mentorship_session.accounted_duration.seconds / 60 / 60, 2) + * models.mentor_profile.price_per_hour + ) + + bill2 = ( + round( + ( + models.mentorship_session[0].accounted_duration.seconds + + models.mentorship_session[1].accounted_duration.seconds + ) + / 60 + / 60, + 2, + ) + * models.mentor_profile.price_per_hour + ) + + bill3 = ( + round((models.mentorship_session[2].accounted_duration.seconds) / 60 / 60, 2) + * models.mentor_profile.price_per_hour + ) + + self.assertEqual( + list_bills, + [ + mentorship_bill_field( + { + "academy_id": 1, + "started_at": start_month, + "ended_at": end_month, + "id": 1, + "mentor_id": 1, + "overtime_minutes": 0, + "total_duration_in_hours": 1.0, + "total_duration_in_minutes": 60.0, + "total_price": bill1, + "reviewer_id": None, + } + ), + mentorship_bill_field( + { + "academy_id": 1, + "started_at": datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 2, + "mentor_id": 1, + "overtime_minutes": 60.0, + "total_duration_in_hours": 3.0, + "total_duration_in_minutes": 180.0, + "total_price": bill2, + } + ), + mentorship_bill_field( + { + "academy_id": 1, + "started_at": datetime.datetime(2021, 12, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 3, + "mentor_id": 1, + "overtime_minutes": 60.0, + "total_duration_in_hours": 2.0, + "total_duration_in_minutes": 120.0, + "total_price": bill3, + } + ), + ], + ) + + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + mentorship_bill_field( + { + "academy_id": 1, + "started_at": start_month, + "ended_at": end_month, + "id": 1, + "mentor_id": 1, + "overtime_minutes": 0.0, + "total_duration_in_hours": 1.0, + "total_duration_in_minutes": 60.0, + "total_price": bill1, + "reviewer_id": None, + } + ), + mentorship_bill_field( + { + "academy_id": 1, + "started_at": datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 2, + "mentor_id": 1, + "overtime_minutes": 60.0, + "total_duration_in_hours": 3.0, + "total_duration_in_minutes": 180.0, + "total_price": bill2, + } + ), + mentorship_bill_field( + { + "academy_id": 1, + "started_at": datetime.datetime(2021, 12, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 3, + "mentor_id": 1, + "overtime_minutes": 60.0, + "total_duration_in_hours": 2.0, + "total_duration_in_minutes": 120.0, + "total_price": bill3, + } + ), + ], + ) + + status_message = "The mentor never joined the meeting, no time will be " "accounted for." + + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=3600), + "id": 1, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 1, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 1, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=3600), + "id": 2, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 11, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 16, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 2, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=7200), + "id": 3, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 11, 17, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 17, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 2, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=7200), + "id": 4, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 12, 30, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 12, 30, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 3, + } + ), + ], + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test_generate_bills_with_previous_bills_and_pending_sessions__status_recalculate(self): """ Generate bills with no previous billing history and 3 previous sessions @@ -493,197 +570,234 @@ def test_generate_bills_with_previous_bills_and_pending_sessions__status_recalcu end = NOW - datetime.timedelta(days=80, hours=1) start_month = start.replace(day=28, hour=23, minute=59, second=59) end_month = start.replace(day=28, hour=23, minute=59, second=59) + datetime.timedelta(days=4) - models_a = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session={ - 'status': 'COMPLETED', - 'started_at': datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': datetime.timedelta(hours=1) - }, - mentorship_service=1, - mentorship_bill={ - 'status': 'RECALCULATE', - 'started_at': start_month, - 'ended_at': end_month, - 'reviewer_id': None, - }) - models = self.bc.database.create(mentor_profile=models_a['mentor_profile'], - user=models_a['user'], - mentorship_service=1, - mentorship_session=[{ - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 11, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 11, 16, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=1) - }, { - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 11, 17, 21, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 11, 17, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=2) - }, { - 'status': - 'COMPLETED', - 'started_at': - datetime.datetime(2021, 12, 30, 21, 0, tzinfo=pytz.UTC), - 'ended_at': - datetime.datetime(2021, 12, 30, 23, 0, tzinfo=pytz.UTC), - 'accounted_duration': - datetime.timedelta(hours=2) - }]) + models_a = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session={ + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=1), + }, + mentorship_service=1, + mentorship_bill={ + "status": "RECALCULATE", + "started_at": start_month, + "ended_at": end_month, + "reviewer_id": None, + }, + ) + models = self.bc.database.create( + mentor_profile=models_a["mentor_profile"], + user=models_a["user"], + mentorship_service=1, + mentorship_session=[ + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 11, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 16, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=1), + }, + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 11, 17, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 17, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=2), + }, + { + "status": "COMPLETED", + "started_at": datetime.datetime(2021, 12, 30, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 12, 30, 23, 0, tzinfo=pytz.UTC), + "accounted_duration": datetime.timedelta(hours=2), + }, + ], + ) mentor = models.mentor_profile bills = generate_mentor_bills(mentor) list_bills = [self.bc.format.to_dict(x) for x in bills] - bill1 = round(models_a.mentorship_session.accounted_duration.seconds / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - bill2 = round((models.mentorship_session[0].accounted_duration.seconds + - models.mentorship_session[1].accounted_duration.seconds) / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - bill3 = round((models.mentorship_session[2].accounted_duration.seconds) / 60 / 60, - 2) * models.mentor_profile.price_per_hour - - self.assertEqual(list_bills, [ - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': start_month, - 'ended_at': end_month, - 'id': 1, - 'mentor_id': 1, - 'overtime_minutes': 0, - 'total_duration_in_hours': 1.0, - 'total_duration_in_minutes': 60.0, - 'total_price': bill1, - }), - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 2, - 'mentor_id': 1, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 3.0, - 'total_duration_in_minutes': 180.0, - 'total_price': bill2, - }), - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': datetime.datetime(2021, 12, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 3, - 'mentor_id': 1, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 2.0, - 'total_duration_in_minutes': 120.0, - 'total_price': bill3, - }), - ]) - - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': start_month, - 'ended_at': end_month, - 'id': 1, - 'mentor_id': 1, - 'overtime_minutes': 0.0, - 'total_duration_in_hours': 1.0, - 'total_duration_in_minutes': 60.0, - 'total_price': bill1, - }), - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 2, - 'mentor_id': 1, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 3.0, - 'total_duration_in_minutes': 180.0, - 'total_price': bill2, - }), - mentorship_bill_field({ - 'academy_id': 1, - 'started_at': datetime.datetime(2021, 12, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), - 'id': 3, - 'mentor_id': 1, - 'overtime_minutes': 60.0, - 'total_duration_in_hours': 2.0, - 'total_duration_in_minutes': 120.0, - 'total_price': bill3, - }), - ]) - - status_message = ('The mentor never joined the meeting, no time will be ' - 'accounted for.') - - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=3600), - 'id': 1, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 1, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 1, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=3600), - 'id': 2, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 11, 16, 22, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 16, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 2, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=7200), - 'id': 3, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 11, 17, 21, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 11, 17, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 2, - }), - mentorship_session_field({ - 'accounted_duration': datetime.timedelta(seconds=7200), - 'id': 4, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 2, - 'status': 'COMPLETED', - 'status_message': status_message, - 'suggested_accounted_duration': datetime.timedelta(0), - 'started_at': datetime.datetime(2021, 12, 30, 21, 0, tzinfo=pytz.UTC), - 'ended_at': datetime.datetime(2021, 12, 30, 23, 0, tzinfo=pytz.UTC), - 'summary': None, - 'bill_id': 3, - }), - ]) + bill1 = ( + round(models_a.mentorship_session.accounted_duration.seconds / 60 / 60, 2) + * models.mentor_profile.price_per_hour + ) + + bill2 = ( + round( + ( + models.mentorship_session[0].accounted_duration.seconds + + models.mentorship_session[1].accounted_duration.seconds + ) + / 60 + / 60, + 2, + ) + * models.mentor_profile.price_per_hour + ) + + bill3 = ( + round((models.mentorship_session[2].accounted_duration.seconds) / 60 / 60, 2) + * models.mentor_profile.price_per_hour + ) + + self.assertEqual( + list_bills, + [ + mentorship_bill_field( + { + "academy_id": 1, + "started_at": start_month, + "ended_at": end_month, + "id": 1, + "mentor_id": 1, + "overtime_minutes": 0, + "total_duration_in_hours": 1.0, + "total_duration_in_minutes": 60.0, + "total_price": bill1, + } + ), + mentorship_bill_field( + { + "academy_id": 1, + "started_at": datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 2, + "mentor_id": 1, + "overtime_minutes": 60.0, + "total_duration_in_hours": 3.0, + "total_duration_in_minutes": 180.0, + "total_price": bill2, + } + ), + mentorship_bill_field( + { + "academy_id": 1, + "started_at": datetime.datetime(2021, 12, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 3, + "mentor_id": 1, + "overtime_minutes": 60.0, + "total_duration_in_hours": 2.0, + "total_duration_in_minutes": 120.0, + "total_price": bill3, + } + ), + ], + ) + + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + mentorship_bill_field( + { + "academy_id": 1, + "started_at": start_month, + "ended_at": end_month, + "id": 1, + "mentor_id": 1, + "overtime_minutes": 0.0, + "total_duration_in_hours": 1.0, + "total_duration_in_minutes": 60.0, + "total_price": bill1, + } + ), + mentorship_bill_field( + { + "academy_id": 1, + "started_at": datetime.datetime(2021, 11, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 30, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 2, + "mentor_id": 1, + "overtime_minutes": 60.0, + "total_duration_in_hours": 3.0, + "total_duration_in_minutes": 180.0, + "total_price": bill2, + } + ), + mentorship_bill_field( + { + "academy_id": 1, + "started_at": datetime.datetime(2021, 12, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 12, 31, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "id": 3, + "mentor_id": 1, + "overtime_minutes": 60.0, + "total_duration_in_hours": 2.0, + "total_duration_in_minutes": 120.0, + "total_price": bill3, + } + ), + ], + ) + + status_message = "The mentor never joined the meeting, no time will be " "accounted for." + + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=3600), + "id": 1, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 1, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 10, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 10, 16, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 1, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=3600), + "id": 2, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 11, 16, 22, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 16, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 2, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=7200), + "id": 3, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 11, 17, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 11, 17, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 2, + } + ), + mentorship_session_field( + { + "accounted_duration": datetime.timedelta(seconds=7200), + "id": 4, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 2, + "status": "COMPLETED", + "status_message": status_message, + "suggested_accounted_duration": datetime.timedelta(0), + "started_at": datetime.datetime(2021, 12, 30, 21, 0, tzinfo=pytz.UTC), + "ended_at": datetime.datetime(2021, 12, 30, 23, 0, tzinfo=pytz.UTC), + "summary": None, + "bill_id": 3, + } + ), + ], + ) diff --git a/breathecode/mentorship/tests/actions/tests_get_accounted_time.py b/breathecode/mentorship/tests/actions/tests_get_accounted_time.py index 28fe857f0..ec4e21991 100644 --- a/breathecode/mentorship/tests/actions/tests_get_accounted_time.py +++ b/breathecode/mentorship/tests/actions/tests_get_accounted_time.py @@ -1,6 +1,7 @@ """ Test mentorships """ + from datetime import datetime, timedelta import random from unittest.mock import patch @@ -27,10 +28,10 @@ def get_env(key, default=None): return get_env -ENV = {'DAILY_API_URL': 'https://netscape.bankruptcy.story'} -SESSION_NAME = 'luxray' -URL = f'https://netscape.bankruptcy.story/v1/rooms/{SESSION_NAME}' -DATA = {'x': 2} +ENV = {"DAILY_API_URL": "https://netscape.bankruptcy.story"} +SESSION_NAME = "luxray" +URL = f"https://netscape.bankruptcy.story/v1/rooms/{SESSION_NAME}" +DATA = {"x": 2} class GenerateMentorBillsTestCase(MentorshipTestCase): @@ -40,23 +41,26 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): def test__without_started_at__without_mentor_joined_at(self): mentorship_session = { - 'started_at': None, - 'mentor_joined_at': None, + "started_at": None, + "mentor_joined_at": None, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': timedelta(0), - 'status_message': 'No one joined this session, nothing will be accounted for.', + "accounted_duration": timedelta(0), + "status_message": "No one joined this session, nothing will be accounted for.", } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession without started_at and with mentor_joined_at @@ -65,23 +69,26 @@ def test__without_started_at__without_mentor_joined_at(self): def test__without_started_at__with_mentor_joined_at__with_missed_meeting_duration_eq_zero(self): now = timezone.now() mentorship_session = { - 'started_at': None, - 'mentor_joined_at': now, + "started_at": None, + "mentor_joined_at": now, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=0)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=0)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': timedelta(0), - 'status_message': 'Mentor joined but mentee never did, No time will be included on the bill.', + "accounted_duration": timedelta(0), + "status_message": "Mentor joined but mentee never did, No time will be included on the bill.", } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession without started_at and with mentor_joined_at @@ -90,23 +97,26 @@ def test__without_started_at__with_mentor_joined_at__with_missed_meeting_duratio def test__without_started_at__with_mentor_joined_at__with_missed_meeting_duration_eq_ten(self): now = timezone.now() mentorship_session = { - 'started_at': None, - 'mentor_joined_at': now, + "started_at": None, + "mentor_joined_at": now, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': timedelta(seconds=600), - 'status_message': 'Mentor joined but mentee never did, 10 min will be accounted for the bill.', + "accounted_duration": timedelta(seconds=600), + "status_message": "Mentor joined but mentee never did, 10 min will be accounted for the bill.", } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession with started_at and without mentor_joined_at @@ -115,23 +125,26 @@ def test__without_started_at__with_mentor_joined_at__with_missed_meeting_duratio def test__with_started_at__without_mentor_joined_at(self): now = timezone.now() mentorship_session = { - 'started_at': now, - 'mentor_joined_at': None, + "started_at": now, + "mentor_joined_at": None, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': timedelta(0), - 'status_message': 'The mentor never joined the meeting, no time will be accounted for.', + "accounted_duration": timedelta(0), + "status_message": "The mentor never joined the meeting, no time will be accounted for.", } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession with started_at and mentor_joined_at @@ -140,23 +153,26 @@ def test__with_started_at__without_mentor_joined_at(self): def test__with_started_at__with_mentor_joined_at(self): now = timezone.now() mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, + "started_at": now, + "mentor_joined_at": now, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': timedelta(seconds=3600), - 'status_message': 'The session never ended, accounting for the standard duration 1 hr.', + "accounted_duration": timedelta(seconds=3600), + "status_message": "The session never ended, accounting for the standard duration 1 hr.", } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession with started_at, mentor_joined_at and ends_at @@ -168,30 +184,35 @@ def test__with_started_at__with_mentor_joined_at__with_ends_at(self): now = timezone.now() diff = timedelta(seconds=n) mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, - 'ends_at': now + diff, + "started_at": now, + "mentor_joined_at": now, + "ends_at": now + diff, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} - model = self.bc.database.create(mentorship_session=mentorship_session, - mentorship_service=mentorship_service) + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} + model = self.bc.database.create( + mentorship_session=mentorship_session, mentorship_service=mentorship_service + ) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': - diff, - 'status_message': ('The session never ended, accounting for the expected meeting duration ' - f'that was {duration_to_str(diff)}.'), + "accounted_duration": diff, + "status_message": ( + "The session never ended, accounting for the expected meeting duration " + f"that was {duration_to_str(diff)}." + ), } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 without MentorshipSession with started_at, mentor_joined_at and mentee_left_at @@ -203,30 +224,35 @@ def test__with_started_at__with_mentor_joined_at__with_mentee_left_at(self): now = timezone.now() diff = timedelta(seconds=n) mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, - 'mentee_left_at': now + diff, + "started_at": now, + "mentor_joined_at": now, + "mentee_left_at": now + diff, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} - model = self.bc.database.create(mentorship_session=mentorship_session, - mentorship_service=mentorship_service) + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} + model = self.bc.database.create( + mentorship_session=mentorship_session, mentorship_service=mentorship_service + ) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': - diff, - 'status_message': ('The session never ended, accounting duration based on the time where ' - f'the mentee left the meeting {duration_to_str(diff)}.'), + "accounted_duration": diff, + "status_message": ( + "The session never ended, accounting duration based on the time where " + f"the mentee left the meeting {duration_to_str(diff)}." + ), } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 without MentorshipSession with started_at, mentor_joined_at and mentor_left_at @@ -238,30 +264,35 @@ def test__with_started_at__with_mentor_joined_at__with_mentor_left_at(self): now = timezone.now() diff = timedelta(seconds=n) mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, - 'mentor_left_at': now + diff, + "started_at": now, + "mentor_joined_at": now, + "mentor_left_at": now + diff, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} - model = self.bc.database.create(mentorship_session=mentorship_session, - mentorship_service=mentorship_service) + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} + model = self.bc.database.create( + mentorship_session=mentorship_session, mentorship_service=mentorship_service + ) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': - diff, - 'status_message': ('The session never ended, accounting duration based on the time where ' - f'the mentor left the meeting {duration_to_str(diff)}.'), + "accounted_duration": diff, + "status_message": ( + "The session never ended, accounting duration based on the time where " + f"the mentor left the meeting {duration_to_str(diff)}." + ), } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 without MentorshipSession with started_at, mentor_joined_at and ended_at, ended in the pass @@ -270,24 +301,27 @@ def test__with_started_at__with_mentor_joined_at__with_mentor_left_at(self): def test__with_started_at__with_mentor_joined_at__with_ended_at__ended_in_the_pass(self): now = timezone.now() mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, - 'ended_at': now - timedelta(seconds=1), + "started_at": now, + "mentor_joined_at": now, + "ended_at": now - timedelta(seconds=1), } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': timedelta(0), - 'status_message': 'Meeting started before it ended? No duration will be accounted for.', + "accounted_duration": timedelta(0), + "status_message": "Meeting started before it ended? No duration will be accounted for.", } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession with started_at, mentor_joined_at and ended_at, one days of duration @@ -296,26 +330,30 @@ def test__with_started_at__with_mentor_joined_at__with_ended_at__ended_in_the_pa def test__with_started_at__with_mentor_joined_at__with_ended_at__one_days_of_duration(self): now = timezone.now() mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, - 'ended_at': now + timedelta(days=1), + "started_at": now, + "mentor_joined_at": now, + "ended_at": now + timedelta(days=1), } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': - timedelta(seconds=3600), - 'status_message': ('This session lasted more than a day, no one ever left, was probably never ' - 'closed, accounting for standard duration 1 hr.'), + "accounted_duration": timedelta(seconds=3600), + "status_message": ( + "This session lasted more than a day, no one ever left, was probably never " + "closed, accounting for standard duration 1 hr." + ), } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession with started_at, mentor_joined_at, ended_at and mentee_left_at, one days of @@ -326,33 +364,39 @@ def test__with_started_at__with_mentor_joined_at__with_ended_at__with_mentee_lef now = timezone.now() diff = timedelta(seconds=random.randint(0, 10000)) mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, - 'mentee_left_at': now + diff, - 'ended_at': now + timedelta(days=1), + "started_at": now, + "mentor_joined_at": now, + "mentee_left_at": now + diff, + "ended_at": now + timedelta(days=1), } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) - extended_message = (' The session accounted duration was limited to the maximum allowed ' - f'{duration_to_str(model.mentorship_service.max_duration)}.') + extended_message = ( + " The session accounted duration was limited to the maximum allowed " + f"{duration_to_str(model.mentorship_service.max_duration)}." + ) maximum = timedelta(hours=2) expected = { - 'accounted_duration': - diff if diff < maximum else maximum, - 'status_message': ('The lasted way more than it should, accounting duration based on the time ' - f'where the mentee left the meeting {duration_to_str(diff)}.'), + "accounted_duration": diff if diff < maximum else maximum, + "status_message": ( + "The lasted way more than it should, accounting duration based on the time " + f"where the mentee left the meeting {duration_to_str(diff)}." + ), } if diff > model.mentorship_service.max_duration: - expected['status_message'] += extended_message + expected["status_message"] += extended_message self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession with started_at, mentor_joined_at and ended_at, less one days of duration, @@ -362,27 +406,31 @@ def test__with_started_at__with_mentor_joined_at__with_ended_at__with_mentee_lef def test__with_started_at__with_mentor_joined_at__with_ended_at__less_one_days_of_duration__with_max_duration(self): now = timezone.now() mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, - 'ended_at': now + timedelta(seconds=random.randint(7201, 85399)), # less one day + "started_at": now, + "mentor_joined_at": now, + "ended_at": now + timedelta(seconds=random.randint(7201, 85399)), # less one day } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) expected = { - 'accounted_duration': - model.mentorship_service.max_duration, - 'status_message': ('The duration of the session is bigger than the maximum allowed, accounting ' - 'for max duration of ' - f'{duration_to_str(model.mentorship_service.max_duration)}.'), + "accounted_duration": model.mentorship_service.max_duration, + "status_message": ( + "The duration of the session is bigger than the maximum allowed, accounting " + "for max duration of " + f"{duration_to_str(model.mentorship_service.max_duration)}." + ), } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) """ 🔽🔽🔽 without MentorshipSession with started_at, mentor_joined_at and ended_at, less one days of duration, @@ -390,29 +438,36 @@ def test__with_started_at__with_mentor_joined_at__with_ended_at__less_one_days_o """ def test__with_started_at__with_mentor_joined_at__with_ended_at__less_one_days_of_duration__without_max_duration( - self): + self, + ): now = timezone.now() diff = timedelta(seconds=random.randint(0, 85399)) # less one day mentorship_session = { - 'started_at': now, - 'mentor_joined_at': now, - 'ended_at': now + diff, + "started_at": now, + "mentor_joined_at": now, + "ended_at": now + diff, } - mentorship_service = {'missed_meeting_duration': timedelta(minutes=10), 'max_duration': timedelta(0)} + mentorship_service = {"missed_meeting_duration": timedelta(minutes=10), "max_duration": timedelta(0)} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=mentorship_service) mentorship_session_db = self.bc.format.to_dict(model.mentorship_session) result = get_accounted_time(model.mentorship_session) - extended_message = (' The session accounted duration was limited to the maximum allowed ' - f'{duration_to_str(model.mentorship_service.max_duration)}.') + extended_message = ( + " The session accounted duration was limited to the maximum allowed " + f"{duration_to_str(model.mentorship_service.max_duration)}." + ) expected = { - 'accounted_duration': - model.mentorship_service.max_duration, - 'status_message': ('No extra time is allowed for session, accounting for standard duration of ' - f'{duration_to_str(model.mentorship_service.duration)}.' + extended_message), + "accounted_duration": model.mentorship_service.max_duration, + "status_message": ( + "No extra time is allowed for session, accounting for standard duration of " + f"{duration_to_str(model.mentorship_service.duration)}." + extended_message + ), } self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_db, + ], + ) diff --git a/breathecode/mentorship/tests/actions/tests_get_pending_sessions_or_create.py b/breathecode/mentorship/tests/actions/tests_get_pending_sessions_or_create.py index 3da199ca1..c89bef7fd 100644 --- a/breathecode/mentorship/tests/actions/tests_get_pending_sessions_or_create.py +++ b/breathecode/mentorship/tests/actions/tests_get_pending_sessions_or_create.py @@ -1,6 +1,7 @@ """ Test mentorhips """ + from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -14,148 +15,160 @@ from ...models import MentorshipSession from ..mixins import MentorshipTestCase -daily_url = '/v1/rooms' -daily_payload = {'url': 'https://4geeks.daily.com/asdasd', 'name': 'asdasd'} +daily_url = "/v1/rooms" +daily_payload = {"url": "https://4geeks.daily.com/asdasd", "name": "asdasd"} ENDS_AT = timezone.now() def format_mentorship_session_attrs(attrs={}): return { - 'accounted_duration': None, - 'agenda': None, - 'allow_billing': True, - 'bill_id': None, - 'ended_at': None, - 'calendly_uuid': None, - 'ends_at': None, - 'id': 0, - 'is_online': False, - 'latitude': None, - 'longitude': None, - 'mentee_id': None, - 'service_id': None, - 'mentee_left_at': None, - 'mentor_id': 0, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'name': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'started_at': None, - 'starts_at': None, - 'status': 'PENDING', - 'status_message': None, - 'suggested_accounted_duration': None, - 'summary': None, - 'questions_and_answers': None, + "accounted_duration": None, + "agenda": None, + "allow_billing": True, + "bill_id": None, + "ended_at": None, + "calendly_uuid": None, + "ends_at": None, + "id": 0, + "is_online": False, + "latitude": None, + "longitude": None, + "mentee_id": None, + "service_id": None, + "mentee_left_at": None, + "mentor_id": 0, + "mentor_joined_at": None, + "mentor_left_at": None, + "name": None, + "online_meeting_url": None, + "online_recording_url": None, + "started_at": None, + "starts_at": None, + "status": "PENDING", + "status_message": None, + "suggested_accounted_duration": None, + "summary": None, + "questions_and_answers": None, **attrs, } class GoogleMeetMock: - def __init__(self, meeting_uri='https://meet.google.com/fake'): + def __init__(self, meeting_uri="https://meet.google.com/fake"): self.meeting_uri = meeting_uri def get_title(pk, service, mentor) -> str: - return (f'{service.name} {pk} | {mentor.user.first_name} {mentor.user.last_name}') + return f"{service.name} {pk} | {mentor.user.first_name} {mentor.user.last_name}" class GetOrCreateSessionTestSuite(MentorshipTestCase): - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, daily_url, daily_payload)])) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, daily_url, daily_payload)])) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentor_first_no_previous_nothing__daily(self): """ When the mentor gets into the room before the mentee if should create a room with status 'pending' """ - models = self.bc.database.create(mentor_profile=1, user=1, mentorship_service={'video_provider': 'DAILY'}) + models = self.bc.database.create(mentor_profile=1, user=1, mentorship_service={"video_provider": "DAILY"}) mentor = models.mentor_profile - mentor_token, created = Token.get_or_create(mentor.user, token_type='permanent') + mentor_token, created = Token.get_or_create(mentor.user, token_type="permanent") pending_sessions = get_pending_sessions_or_create(mentor_token, mentor, models.mentorship_service, mentee=None) self.bc.check.queryset_of(pending_sessions, MentorshipSession) self.bc.check.queryset_with_pks(pending_sessions, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'status': 'PENDING', - 'mentor_id': 1, - 'mentee_id': None, - 'service_id': 1, - 'is_online': True, - 'name': 'asdasd', - 'online_meeting_url': 'https://4geeks.daily.com/asdasd', - 'ends_at': ENDS_AT + timedelta(seconds=3600), - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "status": "PENDING", + "mentor_id": 1, + "mentee_id": None, + "service_id": 1, + "is_online": True, + "name": "asdasd", + "online_meeting_url": "https://4geeks.daily.com/asdasd", + "ends_at": ENDS_AT + timedelta(seconds=3600), + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch.multiple('breathecode.services.google_meet.google_meet.GoogleMeet', - __init__=MagicMock(return_value=None), - create_space=MagicMock(return_value=GoogleMeetMock(meeting_uri='https://meet.google.com/fake'))) - @patch('breathecode.mentorship.signals.mentorship_session_status.send', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch.multiple( + "breathecode.services.google_meet.google_meet.GoogleMeet", + __init__=MagicMock(return_value=None), + create_space=MagicMock(return_value=GoogleMeetMock(meeting_uri="https://meet.google.com/fake")), + ) + @patch("breathecode.mentorship.signals.mentorship_session_status.send", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentor_first_no_previous_nothing__google_meet(self): """ When the mentor gets into the room before the mentee if should create a room with status 'pending' """ - models = self.bc.database.create(mentor_profile=1, user=1, mentorship_service={'video_provider': 'GOOGLE_MEET'}) + models = self.bc.database.create(mentor_profile=1, user=1, mentorship_service={"video_provider": "GOOGLE_MEET"}) mentor = models.mentor_profile - mentor_token, created = Token.get_or_create(mentor.user, token_type='permanent') + mentor_token, created = Token.get_or_create(mentor.user, token_type="permanent") pending_sessions = get_pending_sessions_or_create(mentor_token, mentor, models.mentorship_service, mentee=None) self.bc.check.queryset_of(pending_sessions, MentorshipSession) self.bc.check.queryset_with_pks(pending_sessions, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'status': 'PENDING', - 'mentor_id': 1, - 'mentee_id': None, - 'service_id': 1, - 'is_online': True, - 'name': get_title(1, models.mentorship_service, models.mentor_profile), - 'online_meeting_url': 'https://meet.google.com/fake', - 'ends_at': ENDS_AT + timedelta(seconds=3600), - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "status": "PENDING", + "mentor_id": 1, + "mentee_id": None, + "service_id": 1, + "is_online": True, + "name": get_title(1, models.mentorship_service, models.mentor_profile), + "online_meeting_url": "https://meet.google.com/fake", + "ends_at": ENDS_AT + timedelta(seconds=3600), + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, daily_url, daily_payload)])) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, daily_url, daily_payload)])) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentor_first_previous_pending_without_mentee(self): """ When the mentor gets into the room before the mentee but there was a previous unfinished without mentee it should re-use that previous room """ - mentorship_session = {'mentee_id': None} - models = self.bc.database.create(mentor_profile=1, - mentorship_session=mentorship_session, - mentorship_service={'video_provider': 'DAILY'}) + mentorship_session = {"mentee_id": None} + models = self.bc.database.create( + mentor_profile=1, mentorship_session=mentorship_session, mentorship_service={"video_provider": "DAILY"} + ) mentor = models.mentor_profile - mentor_token, created = Token.get_or_create(mentor.user, token_type='permanent') + mentor_token, created = Token.get_or_create(mentor.user, token_type="permanent") # since there is a previous session without mentee, it should re use it pending_sessions = get_pending_sessions_or_create(mentor_token, mentor, models.mentorship_service, mentee=None) @@ -163,215 +176,252 @@ def test_create_session_mentor_first_previous_pending_without_mentee(self): self.bc.check.queryset_of(pending_sessions, MentorshipSession) self.bc.check.queryset_with_pks(pending_sessions, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'status': 'PENDING', - 'mentor_id': 1, - 'service_id': 1, - 'mentee_id': None, - 'is_online': False, - 'ends_at': None, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "status": "PENDING", + "mentor_id": 1, + "service_id": 1, + "mentee_id": None, + "is_online": False, + "ends_at": None, + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, daily_url, daily_payload)])) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, daily_url, daily_payload)])) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentor_first_previous_pending_with_mentee(self): """ Mentor comes first, there is a previous non-started session with a mentee, it should return that previouse one (because it needs to be closed) instead of creating a new one """ - mentorship_session = {'status': 'PENDING'} - models = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session=mentorship_session, - mentorship_service={'video_provider': 'DAILY'}) + mentorship_session = {"status": "PENDING"} + models = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session=mentorship_session, + mentorship_service={"video_provider": "DAILY"}, + ) mentor = models.mentor_profile session = models.mentorship_session - mentor_token, created = Token.get_or_create(mentor.user, token_type='permanent') + mentor_token, created = Token.get_or_create(mentor.user, token_type="permanent") sessions = get_pending_sessions_or_create(mentor_token, mentor, models.mentorship_service) self.bc.check.queryset_of(sessions, MentorshipSession) self.bc.check.queryset_with_pks(sessions, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'status': 'PENDING', - 'mentor_id': 1, - 'mentee_id': 1, - 'service_id': 1, - 'is_online': False, - 'ends_at': None, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "status": "PENDING", + "mentor_id": 1, + "mentee_id": 1, + "service_id": 1, + "is_online": False, + "ends_at": None, + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, daily_url, daily_payload)])) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) - #TODO: without mentee or with mentee? + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, daily_url, daily_payload)])) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) + # TODO: without mentee or with mentee? def test_create_session_mentor_first_started_without_mentee(self): """ Mentor comes first, there is a previous started session with a mentee, it should return that previouse one (because it needs to be closed) instead of creating a new one """ - mentorship_session = {'status': 'STARTED', 'started_at': timezone.now(), 'mentee_id': None} - models = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session=mentorship_session, - mentorship_service={'video_provider': 'DAILY'}) + mentorship_session = {"status": "STARTED", "started_at": timezone.now(), "mentee_id": None} + models = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session=mentorship_session, + mentorship_service={"video_provider": "DAILY"}, + ) mentor = models.mentor_profile - mentor_token, created = Token.get_or_create(mentor.user, token_type='permanent') + mentor_token, created = Token.get_or_create(mentor.user, token_type="permanent") sessions = get_pending_sessions_or_create(mentor_token, mentor, models.mentorship_service) self.bc.check.queryset_of(sessions, MentorshipSession) self.bc.check.queryset_with_pks(sessions, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'status': 'STARTED', - 'mentor_id': 1, - 'mentee_id': None, - 'service_id': 1, - 'is_online': False, - 'ends_at': None, - 'started_at': ENDS_AT, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "status": "STARTED", + "mentor_id": 1, + "mentee_id": None, + "service_id": 1, + "is_online": False, + "ends_at": None, + "started_at": ENDS_AT, + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, daily_url, daily_payload)])) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, daily_url, daily_payload)])) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentee_first_no_previous_nothing__daily(self): """ Mentee comes first, there is nothing previously created it should return a brand new sessions with started at already started """ - models = self.bc.database.create(mentor_profile=1, user=2, mentorship_service={'video_provider': 'DAILY'}) + models = self.bc.database.create(mentor_profile=1, user=2, mentorship_service={"video_provider": "DAILY"}) mentor = models.mentor_profile mentee = models.user[1] - mentee_token, created = Token.get_or_create(mentee, token_type='permanent') + mentee_token, created = Token.get_or_create(mentee, token_type="permanent") sessions = get_pending_sessions_or_create(mentee_token, mentor, models.mentorship_service, mentee) self.bc.check.queryset_of(sessions, MentorshipSession) self.bc.check.queryset_with_pks(sessions, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'status': 'PENDING', - 'mentor_id': 1, - 'mentee_id': 2, - 'service_id': 1, - 'is_online': True, - 'ends_at': ENDS_AT + timedelta(seconds=3600), - 'name': 'asdasd', - 'online_meeting_url': 'https://4geeks.daily.com/asdasd', - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "status": "PENDING", + "mentor_id": 1, + "mentee_id": 2, + "service_id": 1, + "is_online": True, + "ends_at": ENDS_AT + timedelta(seconds=3600), + "name": "asdasd", + "online_meeting_url": "https://4geeks.daily.com/asdasd", + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch.multiple('breathecode.services.google_meet.google_meet.GoogleMeet', - __init__=MagicMock(return_value=None), - create_space=MagicMock(return_value=GoogleMeetMock(meeting_uri='https://meet.google.com/fake'))) - @patch('breathecode.mentorship.signals.mentorship_session_status.send', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch.multiple( + "breathecode.services.google_meet.google_meet.GoogleMeet", + __init__=MagicMock(return_value=None), + create_space=MagicMock(return_value=GoogleMeetMock(meeting_uri="https://meet.google.com/fake")), + ) + @patch("breathecode.mentorship.signals.mentorship_session_status.send", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentee_first_no_previous_nothing__google_meet(self): """ Mentee comes first, there is nothing previously created it should return a brand new sessions with started at already started """ - models = self.bc.database.create(mentor_profile=1, user=2, mentorship_service={'video_provider': 'GOOGLE_MEET'}) + models = self.bc.database.create(mentor_profile=1, user=2, mentorship_service={"video_provider": "GOOGLE_MEET"}) mentor = models.mentor_profile mentee = models.user[1] - mentee_token, created = Token.get_or_create(mentee, token_type='permanent') + mentee_token, created = Token.get_or_create(mentee, token_type="permanent") sessions = get_pending_sessions_or_create(mentee_token, mentor, models.mentorship_service, mentee) self.bc.check.queryset_of(sessions, MentorshipSession) self.bc.check.queryset_with_pks(sessions, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'status': 'PENDING', - 'mentor_id': 1, - 'mentee_id': 2, - 'service_id': 1, - 'is_online': True, - 'ends_at': ENDS_AT + timedelta(seconds=3600), - 'name': get_title(1, models.mentorship_service, models.mentor_profile), - 'online_meeting_url': 'https://meet.google.com/fake', - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "status": "PENDING", + "mentor_id": 1, + "mentee_id": 2, + "service_id": 1, + "is_online": True, + "ends_at": ENDS_AT + timedelta(seconds=3600), + "name": get_title(1, models.mentorship_service, models.mentor_profile), + "online_meeting_url": "https://meet.google.com/fake", + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, daily_url, daily_payload)])) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, daily_url, daily_payload)])) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentee_first_with_wihout_mentee(self): """ Mentee comes first, there is nothing previously created it should reuse the previous pending session """ - mentorship_session = {'status': 'PENDING', 'mentee_id': None} - models = self.bc.database.create(mentor_profile=1, - user=2, - mentorship_session=mentorship_session, - mentorship_service={'video_provider': 'DAILY'}) + mentorship_session = {"status": "PENDING", "mentee_id": None} + models = self.bc.database.create( + mentor_profile=1, + user=2, + mentorship_session=mentorship_session, + mentorship_service={"video_provider": "DAILY"}, + ) new_mentee = models.user[1] - mentee_token, created = Token.get_or_create(new_mentee, token_type='permanent') - sessions = get_pending_sessions_or_create(mentee_token, - models.mentor_profile, - models.mentorship_service, - mentee=new_mentee) + mentee_token, created = Token.get_or_create(new_mentee, token_type="permanent") + sessions = get_pending_sessions_or_create( + mentee_token, models.mentor_profile, models.mentorship_service, mentee=new_mentee + ) self.bc.check.queryset_of(sessions, MentorshipSession) self.bc.check.queryset_with_pks(sessions, [1]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'status': 'PENDING', - 'mentor_id': 1, - 'service_id': 1, - 'mentee_id': None, - 'is_online': False, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "status": "PENDING", + "mentor_id": 1, + "service_id": 1, + "mentee_id": None, + "is_online": False, + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, daily_url, daily_payload)])) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, daily_url, daily_payload)])) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentee_first_with_another_mentee__daily(self): """ Mentee comes first, there is a previous pending meeting with another mentee @@ -380,63 +430,77 @@ def test_create_session_mentee_first_with_another_mentee__daily(self): # other random mentoring session precreated just for better testing - mentorship_session = {'status': 'PENDING'} - self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session=mentorship_session, - mentorship_service={'video_provider': 'DAILY'}) - - models = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session=mentorship_session, - mentorship_service={'video_provider': 'DAILY'}) + mentorship_session = {"status": "PENDING"} + self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session=mentorship_session, + mentorship_service={"video_provider": "DAILY"}, + ) + + models = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session=mentorship_session, + mentorship_service={"video_provider": "DAILY"}, + ) new_mentee = self.bc.database.create(user=1).user - mentee_token, created = Token.get_or_create(new_mentee, token_type='permanent') - sessions_to_render = get_pending_sessions_or_create(mentee_token, - models.mentor_profile, - models.mentorship_service, - mentee=new_mentee) + mentee_token, created = Token.get_or_create(new_mentee, token_type="permanent") + sessions_to_render = get_pending_sessions_or_create( + mentee_token, models.mentor_profile, models.mentorship_service, mentee=new_mentee + ) self.bc.check.queryset_of(sessions_to_render, MentorshipSession) self.bc.check.queryset_with_pks(sessions_to_render, [3]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'ends_at': None, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 1, - }), - format_mentorship_session_attrs({ - 'id': 2, - 'ends_at': None, - 'mentee_id': 2, - 'mentor_id': 2, - 'service_id': 2, - }), - format_mentorship_session_attrs({ - 'id': 3, - 'status': 'PENDING', - 'mentor_id': 2, - 'mentee_id': 3, - 'is_online': True, - 'ends_at': ENDS_AT + timedelta(seconds=3600), - 'name': 'asdasd', - 'online_meeting_url': 'https://4geeks.daily.com/asdasd', - 'service_id': 2, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "ends_at": None, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 1, + } + ), + format_mentorship_session_attrs( + { + "id": 2, + "ends_at": None, + "mentee_id": 2, + "mentor_id": 2, + "service_id": 2, + } + ), + format_mentorship_session_attrs( + { + "id": 3, + "status": "PENDING", + "mentor_id": 2, + "mentee_id": 3, + "is_online": True, + "ends_at": ENDS_AT + timedelta(seconds=3600), + "name": "asdasd", + "online_meeting_url": "https://4geeks.daily.com/asdasd", + "service_id": 2, + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch.multiple('breathecode.services.google_meet.google_meet.GoogleMeet', - __init__=MagicMock(return_value=None), - create_space=MagicMock(return_value=GoogleMeetMock(meeting_uri='https://meet.google.com/fake'))) - @patch('breathecode.mentorship.signals.mentorship_session_status.send', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch.multiple( + "breathecode.services.google_meet.google_meet.GoogleMeet", + __init__=MagicMock(return_value=None), + create_space=MagicMock(return_value=GoogleMeetMock(meeting_uri="https://meet.google.com/fake")), + ) + @patch("breathecode.mentorship.signals.mentorship_session_status.send", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentee_first_with_another_mentee__google_meet(self): """ Mentee comes first, there is a previous pending meeting with another mentee @@ -445,61 +509,73 @@ def test_create_session_mentee_first_with_another_mentee__google_meet(self): # other random mentoring session precreated just for better testing - mentorship_session = {'status': 'PENDING'} - self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session=mentorship_session, - mentorship_service={'video_provider': 'DAILY'}) - - models = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session=mentorship_session, - mentorship_service={'video_provider': 'GOOGLE_MEET'}) + mentorship_session = {"status": "PENDING"} + self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session=mentorship_session, + mentorship_service={"video_provider": "DAILY"}, + ) + + models = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session=mentorship_session, + mentorship_service={"video_provider": "GOOGLE_MEET"}, + ) new_mentee = self.bc.database.create(user=1).user - mentee_token, created = Token.get_or_create(new_mentee, token_type='permanent') - sessions_to_render = get_pending_sessions_or_create(mentee_token, - models.mentor_profile, - models.mentorship_service, - mentee=new_mentee) + mentee_token, created = Token.get_or_create(new_mentee, token_type="permanent") + sessions_to_render = get_pending_sessions_or_create( + mentee_token, models.mentor_profile, models.mentorship_service, mentee=new_mentee + ) self.bc.check.queryset_of(sessions_to_render, MentorshipSession) self.bc.check.queryset_with_pks(sessions_to_render, [3]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'ends_at': None, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 1, - }), - format_mentorship_session_attrs({ - 'id': 2, - 'ends_at': None, - 'mentee_id': 2, - 'mentor_id': 2, - 'service_id': 2, - }), - format_mentorship_session_attrs({ - 'id': 3, - 'status': 'PENDING', - 'mentor_id': 2, - 'mentee_id': 3, - 'is_online': True, - 'ends_at': ENDS_AT + timedelta(seconds=3600), - 'name': get_title(3, models.mentorship_service, models.mentor_profile), - 'online_meeting_url': 'https://meet.google.com/fake', - 'service_id': 2, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "ends_at": None, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 1, + } + ), + format_mentorship_session_attrs( + { + "id": 2, + "ends_at": None, + "mentee_id": 2, + "mentor_id": 2, + "service_id": 2, + } + ), + format_mentorship_session_attrs( + { + "id": 3, + "status": "PENDING", + "mentor_id": 2, + "mentee_id": 3, + "is_online": True, + "ends_at": ENDS_AT + timedelta(seconds=3600), + "name": get_title(3, models.mentorship_service, models.mentor_profile), + "online_meeting_url": "https://meet.google.com/fake", + "service_id": 2, + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) - @patch(REQUESTS_PATH['request'], apply_requests_request_mock([(200, daily_url, daily_payload)])) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=ENDS_AT)) - @patch('breathecode.mentorship.actions.close_older_sessions', MagicMock()) + @patch(REQUESTS_PATH["request"], apply_requests_request_mock([(200, daily_url, daily_payload)])) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=ENDS_AT)) + @patch("breathecode.mentorship.actions.close_older_sessions", MagicMock()) def test_create_session_mentee_first_with_another_same_mentee(self): """ Mentee comes second, there is a previous pending meeting with same mentee @@ -507,44 +583,54 @@ def test_create_session_mentee_first_with_another_same_mentee(self): """ # some old meeting with another mentee, should be ignored - self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session={'status': 'PENDING'}, - mentorship_service={'video_provider': 'DAILY'}) + self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session={"status": "PENDING"}, + mentorship_service={"video_provider": "DAILY"}, + ) # old meeting with SAME mentee, should be re-used - models = self.bc.database.create(mentor_profile=1, - user=1, - mentorship_session={'status': 'PENDING'}, - mentorship_service={'video_provider': 'DAILY'}) + models = self.bc.database.create( + mentor_profile=1, + user=1, + mentorship_session={"status": "PENDING"}, + mentorship_service={"video_provider": "DAILY"}, + ) same_mentee = models.user - mentee_token, created = Token.get_or_create(same_mentee, token_type='permanent') - sessions_to_render = get_pending_sessions_or_create(mentee_token, - models.mentor_profile, - models.mentorship_service, - mentee=same_mentee) + mentee_token, created = Token.get_or_create(same_mentee, token_type="permanent") + sessions_to_render = get_pending_sessions_or_create( + mentee_token, models.mentor_profile, models.mentorship_service, mentee=same_mentee + ) self.bc.check.queryset_of(sessions_to_render, MentorshipSession) self.bc.check.queryset_with_pks(sessions_to_render, [2]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - format_mentorship_session_attrs({ - 'id': 1, - 'ends_at': None, - 'mentee_id': 1, - 'mentor_id': 1, - 'service_id': 1, - }), - format_mentorship_session_attrs({ - 'id': 2, - 'status': 'PENDING', - 'mentor_id': 2, - 'mentee_id': 2, - 'is_online': False, - 'ends_at': None, - 'service_id': 2, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + format_mentorship_session_attrs( + { + "id": 1, + "ends_at": None, + "mentee_id": 1, + "mentor_id": 1, + "service_id": 1, + } + ), + format_mentorship_session_attrs( + { + "id": 2, + "status": "PENDING", + "mentor_id": 2, + "mentee_id": 2, + "is_online": False, + "ends_at": None, + "service_id": 2, + } + ), + ], + ) self.assertEqual(actions.close_older_sessions.call_args_list, [call()]) diff --git a/breathecode/mentorship/tests/actions/tests_mentor_is_ready.py b/breathecode/mentorship/tests/actions/tests_mentor_is_ready.py index f38f3b396..ebe455270 100644 --- a/breathecode/mentorship/tests/actions/tests_mentor_is_ready.py +++ b/breathecode/mentorship/tests/actions/tests_mentor_is_ready.py @@ -1,6 +1,7 @@ """ Test mentorships """ + import random from datetime import datetime, timedelta from unittest.mock import MagicMock, patch @@ -14,8 +15,8 @@ from ...actions import mentor_is_ready from ..mixins import MentorshipTestCase -BOOKING_URL = 'https://calendly.com/abc-xyz' -ONLINE_MEETING_URL = 'https://hardcoded.url/abc-xyz' +BOOKING_URL = "https://calendly.com/abc-xyz" +ONLINE_MEETING_URL = "https://hardcoded.url/abc-xyz" class GenerateMentorBillsTestCase(MentorshipTestCase): @@ -23,152 +24,205 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): 🔽🔽🔽 with MentorProfile without online_meeting_url """ - @patch('requests.head', apply_requests_head_mock([ - (400, BOOKING_URL, None), - (400, ONLINE_MEETING_URL, None), - ])) + @patch( + "requests.head", + apply_requests_head_mock( + [ + (400, BOOKING_URL, None), + (400, ONLINE_MEETING_URL, None), + ] + ), + ) def test__without_online_meeting_url(self): - cases = [{'online_meeting_url': x} for x in [None, '']] + cases = [{"online_meeting_url": x} for x in [None, ""]] for mentor_profile in cases: model = self.bc.database.create(mentor_profile=mentor_profile) mentor_profile_db = self.bc.format.to_dict(model.mentor_profile) with self.assertRaisesMessage( - Exception, - f'Mentor {model.mentor_profile.name} does not have backup online_meeting_url, update the ' - 'value before activating.'): + Exception, + f"Mentor {model.mentor_profile.name} does not have backup online_meeting_url, update the " + "value before activating.", + ): mentor_is_ready(model.mentor_profile) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - mentor_profile_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + mentor_profile_db, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 with MentorProfile with online_meeting_url with bad booking_url """ - @patch('requests.head', apply_requests_head_mock([ - (400, BOOKING_URL, None), - (400, ONLINE_MEETING_URL, None), - ])) + @patch( + "requests.head", + apply_requests_head_mock( + [ + (400, BOOKING_URL, None), + (400, ONLINE_MEETING_URL, None), + ] + ), + ) def test__with_online_meeting_url__with_bad_booking_url(self): - cases = [{ - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': x, - } for x in [None, '', self.bc.fake.url()]] + cases = [ + { + "online_meeting_url": self.bc.fake.url(), + "booking_url": x, + } + for x in [None, "", self.bc.fake.url()] + ] for mentor_profile in cases: model = self.bc.database.create(mentor_profile=mentor_profile) mentor_profile_db = self.bc.format.to_dict(model.mentor_profile) with self.assertRaisesMessage( - Exception, f'Mentor {model.mentor_profile.name} booking_url must point to calendly, update the ' - 'value before activating.'): + Exception, + f"Mentor {model.mentor_profile.name} booking_url must point to calendly, update the " + "value before activating.", + ): mentor_is_ready(model.mentor_profile) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - mentor_profile_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + mentor_profile_db, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 with MentorProfile with online_meeting_url with booking_url """ - @patch('requests.head', apply_requests_head_mock([ - (400, BOOKING_URL, None), - (400, ONLINE_MEETING_URL, None), - ])) + @patch( + "requests.head", + apply_requests_head_mock( + [ + (400, BOOKING_URL, None), + (400, ONLINE_MEETING_URL, None), + ] + ), + ) def test__with_online_meeting_url__with_booking_url(self): mentor_profile = { - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': BOOKING_URL, + "online_meeting_url": self.bc.fake.url(), + "booking_url": BOOKING_URL, } model = self.bc.database.create(mentor_profile=mentor_profile) mentor_profile_db = self.bc.format.to_dict(model.mentor_profile) with self.assertRaisesMessage( - Exception, f'Mentor {model.mentor_profile.name} has no syllabus associated, update the value before ' - 'activating.'): + Exception, + f"Mentor {model.mentor_profile.name} has no syllabus associated, update the value before " "activating.", + ): mentor_is_ready(model.mentor_profile) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - mentor_profile_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + mentor_profile_db, + ], + ) """ 🔽🔽🔽 with MentorProfile and Syllabus with online_meeting_url with booking_url """ - @patch('requests.head', apply_requests_head_mock([ - (400, BOOKING_URL, None), - (400, ONLINE_MEETING_URL, None), - ])) + @patch( + "requests.head", + apply_requests_head_mock( + [ + (400, BOOKING_URL, None), + (400, ONLINE_MEETING_URL, None), + ] + ), + ) def test__with_online_meeting_url__with_booking_url__with_syllabus(self): mentor_profile = { - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': BOOKING_URL, - 'availability_report': ['bad-booking-url'], + "online_meeting_url": self.bc.fake.url(), + "booking_url": BOOKING_URL, + "availability_report": ["bad-booking-url"], } model = self.bc.database.create(mentor_profile=mentor_profile, syllabus=1) mentor_profile_db = self.bc.format.to_dict(model.mentor_profile) - with self.assertRaisesMessage(Exception, f'Mentor {model.mentor_profile.name} booking URL is failing'): + with self.assertRaisesMessage(Exception, f"Mentor {model.mentor_profile.name} booking URL is failing"): mentor_is_ready(model.mentor_profile) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - mentor_profile_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + mentor_profile_db, + ], + ) """ 🔽🔽🔽 with MentorProfile and Syllabus with online_meeting_url with booking_url, booking status 200 """ - @patch('requests.head', apply_requests_head_mock([ - (200, BOOKING_URL, None), - (400, ONLINE_MEETING_URL, None), - ])) + @patch( + "requests.head", + apply_requests_head_mock( + [ + (200, BOOKING_URL, None), + (400, ONLINE_MEETING_URL, None), + ] + ), + ) def test__with_online_meeting_url__with_booking_url__with_syllabus__booking_status_200(self): mentor_profile = { - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': BOOKING_URL, - 'online_meeting_url': ONLINE_MEETING_URL, - 'availability_report': ['bad-online-meeting-url'], + "online_meeting_url": self.bc.fake.url(), + "booking_url": BOOKING_URL, + "online_meeting_url": ONLINE_MEETING_URL, + "availability_report": ["bad-online-meeting-url"], } model = self.bc.database.create(mentor_profile=mentor_profile, syllabus=1) mentor_profile_db = self.bc.format.to_dict(model.mentor_profile) - with self.assertRaisesMessage(Exception, f'Mentor {model.mentor_profile.name} online meeting URL is failing'): + with self.assertRaisesMessage(Exception, f"Mentor {model.mentor_profile.name} online meeting URL is failing"): mentor_is_ready(model.mentor_profile) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - mentor_profile_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + mentor_profile_db, + ], + ) """ 🔽🔽🔽 with MentorProfile and Syllabus with online_meeting_url with booking_url, booking and online meeting status 200 """ - @patch('requests.head', apply_requests_head_mock([ - (200, BOOKING_URL, None), - (200, ONLINE_MEETING_URL, None), - ])) + @patch( + "requests.head", + apply_requests_head_mock( + [ + (200, BOOKING_URL, None), + (200, ONLINE_MEETING_URL, None), + ] + ), + ) def test__with_online_meeting_url__with_booking_url__with_syllabus__booking_status_200__online_meeting_status_200( - self): + self, + ): mentor_profile = { - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': BOOKING_URL, - 'online_meeting_url': ONLINE_MEETING_URL, + "online_meeting_url": self.bc.fake.url(), + "booking_url": BOOKING_URL, + "online_meeting_url": ONLINE_MEETING_URL, } model = self.bc.database.create(mentor_profile=mentor_profile, syllabus=1) @@ -177,6 +231,9 @@ def test__with_online_meeting_url__with_booking_url__with_syllabus__booking_stat result = mentor_is_ready(model.mentor_profile) self.assertTrue(result) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - mentor_profile_db, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + mentor_profile_db, + ], + ) diff --git a/breathecode/mentorship/tests/admin/tests_allow_billing_this_session.py b/breathecode/mentorship/tests/admin/tests_allow_billing_this_session.py index 13f1db9f1..36bf4ec60 100644 --- a/breathecode/mentorship/tests/admin/tests_allow_billing_this_session.py +++ b/breathecode/mentorship/tests/admin/tests_allow_billing_this_session.py @@ -12,43 +12,50 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): """ def test_with_zero_mentorship_sessions(self): - MentorshipSession = self.bc.database.get_model('mentorship.MentorshipSession') + MentorshipSession = self.bc.database.get_model("mentorship.MentorshipSession") queryset = MentorshipSession.objects.filter() allow_billing_this_session(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 With two MentorshipSession, allow_billing equal to False """ def test_with_two_mentorship_sessions__allow_billing_equal_to_false(self): - mentorship_sessions = [{'allow_billing': False} for _ in range(0, 2)] + mentorship_sessions = [{"allow_billing": False} for _ in range(0, 2)] model = self.bc.database.create(mentorship_session=mentorship_sessions) - MentorshipSession = self.bc.database.get_model('mentorship.MentorshipSession') + MentorshipSession = self.bc.database.get_model("mentorship.MentorshipSession") queryset = MentorshipSession.objects.filter() allow_billing_this_session(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [{ - **x, - 'allow_billing': True, - } for x in self.bc.format.to_dict(model.mentorship_session)]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **x, + "allow_billing": True, + } + for x in self.bc.format.to_dict(model.mentorship_session) + ], + ) """ 🔽🔽🔽 With two MentorshipSession, allow_billing equal to True """ def test_with_two_mentorship_sessions__allow_billing_equal_to_true(self): - mentor_profiles = [{'allow_billing': True} for _ in range(0, 2)] + mentor_profiles = [{"allow_billing": True} for _ in range(0, 2)] model = self.bc.database.create(mentorship_session=mentor_profiles) - MentorshipSession = self.bc.database.get_model('mentorship.MentorshipSession') + MentorshipSession = self.bc.database.get_model("mentorship.MentorshipSession") queryset = MentorshipSession.objects.filter() allow_billing_this_session(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), - self.bc.format.to_dict(model.mentorship_session)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session) + ) diff --git a/breathecode/mentorship/tests/admin/tests_avoid_billing_this_session.py b/breathecode/mentorship/tests/admin/tests_avoid_billing_this_session.py index 31cc82df1..9ad98025d 100644 --- a/breathecode/mentorship/tests/admin/tests_avoid_billing_this_session.py +++ b/breathecode/mentorship/tests/admin/tests_avoid_billing_this_session.py @@ -12,43 +12,50 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): """ def test_with_zero_mentorship_sessions(self): - MentorshipSession = self.bc.database.get_model('mentorship.MentorshipSession') + MentorshipSession = self.bc.database.get_model("mentorship.MentorshipSession") queryset = MentorshipSession.objects.filter() avoid_billing_this_session(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 With two MentorshipSession, allow_billing equal to False """ def test_with_two_mentorship_sessions__allow_billing_equal_to_false(self): - mentorship_sessions = [{'allow_billing': False} for _ in range(0, 2)] + mentorship_sessions = [{"allow_billing": False} for _ in range(0, 2)] model = self.bc.database.create(mentorship_session=mentorship_sessions) - MentorshipSession = self.bc.database.get_model('mentorship.MentorshipSession') + MentorshipSession = self.bc.database.get_model("mentorship.MentorshipSession") queryset = MentorshipSession.objects.filter() avoid_billing_this_session(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), - self.bc.format.to_dict(model.mentorship_session)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session) + ) """ 🔽🔽🔽 With two MentorshipSession, allow_billing equal to True """ def test_with_two_mentorship_sessions__allow_billing_equal_to_true(self): - mentor_profiles = [{'allow_billing': True} for _ in range(0, 2)] + mentor_profiles = [{"allow_billing": True} for _ in range(0, 2)] model = self.bc.database.create(mentorship_session=mentor_profiles) - MentorshipSession = self.bc.database.get_model('mentorship.MentorshipSession') + MentorshipSession = self.bc.database.get_model("mentorship.MentorshipSession") queryset = MentorshipSession.objects.filter() avoid_billing_this_session(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [{ - **x, - 'allow_billing': False, - } for x in self.bc.format.to_dict(model.mentorship_session)]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **x, + "allow_billing": False, + } + for x in self.bc.format.to_dict(model.mentorship_session) + ], + ) diff --git a/breathecode/mentorship/tests/admin/tests_generate_bill.py b/breathecode/mentorship/tests/admin/tests_generate_bill.py index 90ae88d37..032272aa7 100644 --- a/breathecode/mentorship/tests/admin/tests_generate_bill.py +++ b/breathecode/mentorship/tests/admin/tests_generate_bill.py @@ -1,6 +1,7 @@ """ Test mentorships """ + from unittest.mock import call, patch from unittest.mock import MagicMock, patch @@ -15,30 +16,31 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): 🔽🔽🔽 With zero MentorProfile """ - @patch('breathecode.mentorship.actions.generate_mentor_bills', MagicMock()) + @patch("breathecode.mentorship.actions.generate_mentor_bills", MagicMock()) def test_with_zero_mentor_profiles(self): - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() generate_bill(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) self.assertEqual(actions.generate_mentor_bills.call_args_list, []) """ 🔽🔽🔽 With two MentorProfile """ - @patch('breathecode.mentorship.actions.generate_mentor_bills', MagicMock()) + @patch("breathecode.mentorship.actions.generate_mentor_bills", MagicMock()) def test_with_two_mentor_profiles(self): model = self.bc.database.create(mentor_profile=2) - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() generate_bill(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - self.bc.format.to_dict(model.mentor_profile)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile) + ) self.assertEqual( actions.generate_mentor_bills.call_args_list, [call(x, reset=True) for x in model.mentor_profile], diff --git a/breathecode/mentorship/tests/admin/tests_generate_slug_based_on_calendly.py b/breathecode/mentorship/tests/admin/tests_generate_slug_based_on_calendly.py index 1a667da82..c447b3a6f 100644 --- a/breathecode/mentorship/tests/admin/tests_generate_slug_based_on_calendly.py +++ b/breathecode/mentorship/tests/admin/tests_generate_slug_based_on_calendly.py @@ -1,6 +1,7 @@ """ Test mentorships """ + from unittest.mock import call, patch from django.http import HttpRequest from unittest.mock import MagicMock, patch @@ -16,88 +17,100 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): 🔽🔽🔽 With zero MentorProfile """ - @patch('django.contrib.messages.error', MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) def test_with_zero_mentor_profiles(self): - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() generate_slug_based_on_calendly(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) self.assertEqual(messages.error.call_args_list, []) """ 🔽🔽🔽 With two MentorProfile without booking_url """ - @patch('django.contrib.messages.error', MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) def test_with_two_mentor_profiles__without_booking_url(self): model = self.bc.database.create(mentor_profile=2) - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() generate_slug_based_on_calendly(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - self.bc.format.to_dict(model.mentor_profile)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile) + ) - self.assertEqual(messages.error.call_args_list, - [call(request, f'Mentor {x.id} has no booking url') for x in model.mentor_profile]) + self.assertEqual( + messages.error.call_args_list, + [call(request, f"Mentor {x.id} has no booking url") for x in model.mentor_profile], + ) """ 🔽🔽🔽 With two MentorProfile with booking_url """ - @patch('django.contrib.messages.error', MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) def test_with_two_mentor_profiles__with_booking_url__different_of_calendly(self): - mentor_profiles = [{'booking_url': self.bc.fake.url()} for _ in range(0, 2)] + mentor_profiles = [{"booking_url": self.bc.fake.url()} for _ in range(0, 2)] model = self.bc.database.create(mentor_profile=mentor_profiles) - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() generate_slug_based_on_calendly(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - self.bc.format.to_dict(model.mentor_profile)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile) + ) - self.assertEqual(messages.error.call_args_list, [ - call(request, f'Mentor {x.id} booking url is not calendly: {x.booking_url}') for x in model.mentor_profile - ]) + self.assertEqual( + messages.error.call_args_list, + [ + call(request, f"Mentor {x.id} booking url is not calendly: {x.booking_url}") + for x in model.mentor_profile + ], + ) """ 🔽🔽🔽 With two MentorProfile with booking_url of calendly """ - @patch('django.contrib.messages.error', MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) def test_with_two_mentor_profiles__with_booking_url__of_calendly(self): mentor_slug1 = self.bc.fake.slug() mentor_slug2 = self.bc.fake.slug() service_slug1 = self.bc.fake.slug() service_slug2 = self.bc.fake.slug() - mentor_profiles = [{ - 'booking_url': f'https://calendly.com/{mentor_slug}/{service_slug}' - } for mentor_slug, service_slug in [(mentor_slug1, service_slug1), (mentor_slug2, service_slug2)]] + mentor_profiles = [ + {"booking_url": f"https://calendly.com/{mentor_slug}/{service_slug}"} + for mentor_slug, service_slug in [(mentor_slug1, service_slug1), (mentor_slug2, service_slug2)] + ] model = self.bc.database.create(mentor_profile=mentor_profiles) - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() generate_slug_based_on_calendly(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - { - **self.bc.format.to_dict(model.mentor_profile[0]), - 'slug': mentor_slug1, - }, - { - **self.bc.format.to_dict(model.mentor_profile[1]), - 'slug': mentor_slug2, - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + { + **self.bc.format.to_dict(model.mentor_profile[0]), + "slug": mentor_slug1, + }, + { + **self.bc.format.to_dict(model.mentor_profile[1]), + "slug": mentor_slug2, + }, + ], + ) self.assertEqual(messages.error.call_args_list, []) diff --git a/breathecode/mentorship/tests/admin/tests_mark_as_active.py b/breathecode/mentorship/tests/admin/tests_mark_as_active.py index 28396e740..bc3b15f6d 100644 --- a/breathecode/mentorship/tests/admin/tests_mark_as_active.py +++ b/breathecode/mentorship/tests/admin/tests_mark_as_active.py @@ -1,6 +1,7 @@ """ Test mentorships """ + from unittest.mock import call, patch from django.http import HttpRequest from unittest.mock import MagicMock, patch @@ -18,17 +19,17 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): 🔽🔽🔽 With zero MentorProfile """ - @patch('django.contrib.messages.success', MagicMock()) - @patch('django.contrib.messages.error', MagicMock()) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.contrib.messages.success", MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test_with_zero_mentor_profiles(self): - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() mark_as_active(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) self.assertEqual(actions.mentor_is_ready.call_args_list, []) self.assertEqual(messages.success.call_args_list, []) @@ -38,99 +39,121 @@ def test_with_zero_mentor_profiles(self): 🔽🔽🔽 With two MentorProfile """ - @patch('django.contrib.messages.success', MagicMock()) - @patch('django.contrib.messages.error', MagicMock()) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.contrib.messages.success", MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test_with_two_mentor_profiles(self): model = self.bc.database.create(mentor_profile=2) - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() mark_as_active(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - self.bc.format.to_dict(model.mentor_profile)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile) + ) self.assertEqual(actions.mentor_is_ready.call_args_list, [call(x) for x in model.mentor_profile]) - self.assertEqual(messages.success.call_args_list, [ - call(request, 'Mentor updated successfully'), - ]) + self.assertEqual( + messages.success.call_args_list, + [ + call(request, "Mentor updated successfully"), + ], + ) self.assertEqual(messages.error.call_args_list, []) """ 🔽🔽🔽 With two MentorProfile, with ConnectionError """ - @patch('django.contrib.messages.success', MagicMock()) - @patch('django.contrib.messages.error', MagicMock()) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock(side_effect=exceptions.ConnectionError())) + @patch("django.contrib.messages.success", MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock(side_effect=exceptions.ConnectionError())) def test_with_two_mentor_profiles__with_connection_error(self): model = self.bc.database.create(mentor_profile=2) - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() mark_as_active(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - self.bc.format.to_dict(model.mentor_profile)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile) + ) self.assertEqual(actions.mentor_is_ready.call_args_list, [call(x) for x in model.mentor_profile]) self.assertEqual(messages.success.call_args_list, []) - self.assertEqual(messages.error.call_args_list, [ - call( - request, 'Error: Booking or meeting URL for mentor is failing ' - f'({", ".join([x.slug for x in model.mentor_profile])}).'), - ]) + self.assertEqual( + messages.error.call_args_list, + [ + call( + request, + "Error: Booking or meeting URL for mentor is failing " + f'({", ".join([x.slug for x in model.mentor_profile])}).', + ), + ], + ) """ 🔽🔽🔽 With two MentorProfile, with Exception """ - @patch('django.contrib.messages.success', MagicMock()) - @patch('django.contrib.messages.error', MagicMock()) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock(side_effect=Exception('xyz'))) + @patch("django.contrib.messages.success", MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock(side_effect=Exception("xyz"))) def test_with_two_mentor_profiles__with_exception(self): model = self.bc.database.create(mentor_profile=2) - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() mark_as_active(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - self.bc.format.to_dict(model.mentor_profile)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile) + ) self.assertEqual(actions.mentor_is_ready.call_args_list, [call(x) for x in model.mentor_profile]) self.assertEqual(messages.success.call_args_list, []) - self.assertEqual(messages.error.call_args_list, [ - call(request, f'Error: xyz ({", ".join([x.slug for x in model.mentor_profile])}).'), - ]) + self.assertEqual( + messages.error.call_args_list, + [ + call(request, f'Error: xyz ({", ".join([x.slug for x in model.mentor_profile])}).'), + ], + ) """ 🔽🔽🔽 With two MentorProfile, with ConnectionError and Exception """ - @patch('django.contrib.messages.success', MagicMock()) - @patch('django.contrib.messages.error', MagicMock()) - @patch('breathecode.mentorship.actions.mentor_is_ready', - MagicMock(side_effect=[exceptions.ConnectionError(), Exception('xyz')])) + @patch("django.contrib.messages.success", MagicMock()) + @patch("django.contrib.messages.error", MagicMock()) + @patch( + "breathecode.mentorship.actions.mentor_is_ready", + MagicMock(side_effect=[exceptions.ConnectionError(), Exception("xyz")]), + ) def test_with_three_mentor_profiles__with_connection_error__with_exception(self): model = self.bc.database.create(mentor_profile=2) - MentorProfile = self.bc.database.get_model('mentorship.MentorProfile') + MentorProfile = self.bc.database.get_model("mentorship.MentorProfile") queryset = MentorProfile.objects.filter() request = HttpRequest() mark_as_active(None, request, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - self.bc.format.to_dict(model.mentor_profile)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile) + ) self.assertEqual(actions.mentor_is_ready.call_args_list, [call(x) for x in model.mentor_profile]) self.assertEqual(messages.success.call_args_list, []) - self.assertEqual(messages.error.call_args_list, [ - call( - request, 'Error: Booking or meeting URL for mentor is failing ' - f'({model.mentor_profile[0].slug}). xyz ({model.mentor_profile[1].slug}).'), - ]) + self.assertEqual( + messages.error.call_args_list, + [ + call( + request, + "Error: Booking or meeting URL for mentor is failing " + f"({model.mentor_profile[0].slug}). xyz ({model.mentor_profile[1].slug}).", + ), + ], + ) diff --git a/breathecode/mentorship/tests/admin/tests_release_sessions_from_bill.py b/breathecode/mentorship/tests/admin/tests_release_sessions_from_bill.py index a62a31df9..e6ffa56ec 100644 --- a/breathecode/mentorship/tests/admin/tests_release_sessions_from_bill.py +++ b/breathecode/mentorship/tests/admin/tests_release_sessions_from_bill.py @@ -1,6 +1,7 @@ """ Test mentorships """ + from datetime import timedelta import random @@ -15,72 +16,99 @@ class GenerateMentorBillsTestCase(MentorshipTestCase): """ def test_with_zero_mentorship_bills(self): - MentorshipBill = self.bc.database.get_model('mentorship.MentorshipBill') + MentorshipBill = self.bc.database.get_model("mentorship.MentorshipBill") queryset = MentorshipBill.objects.filter() release_sessions_from_bill(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 With two MentorshipBill and MentorshipSession """ def test_with_two_mentorship_bills(self): - mentorship_sessions = [{'bill_id': n} for n in range(1, 3)] + mentorship_sessions = [{"bill_id": n} for n in range(1, 3)] model = self.bc.database.create(mentorship_bill=2, mentorship_session=mentorship_sessions) - MentorshipBill = self.bc.database.get_model('mentorship.MentorshipBill') + MentorshipBill = self.bc.database.get_model("mentorship.MentorshipBill") queryset = MentorshipBill.objects.filter() release_sessions_from_bill(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [{ - **x, - } for x in self.bc.format.to_dict(model.mentorship_bill)]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + { + **x, + } + for x in self.bc.format.to_dict(model.mentorship_bill) + ], + ) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [{ - **x, - 'bill_id': None, - } for x in self.bc.format.to_dict(model.mentorship_session)]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **x, + "bill_id": None, + } + for x in self.bc.format.to_dict(model.mentorship_session) + ], + ) """ 🔽🔽🔽 With two MentorshipBill and MentorshipSession, clean all values """ def test_with_two_mentorship_bills__clean_all_values(self): - mentorship_bills = [{ - 'total_price': random.randint(0, 10000), - 'total_duration_in_hours': random.randint(0, 10000), - 'total_duration_in_minutes': random.randint(0, 10000), - 'overtime_minutes': random.randint(0, 10000), - } for _ in range(1, 3)] - - mentorship_sessions = [{ - 'bill_id': n, - 'accounted_duration': timedelta(hours=random.randint(0, 10000)), - } for n in range(1, 3)] + mentorship_bills = [ + { + "total_price": random.randint(0, 10000), + "total_duration_in_hours": random.randint(0, 10000), + "total_duration_in_minutes": random.randint(0, 10000), + "overtime_minutes": random.randint(0, 10000), + } + for _ in range(1, 3) + ] + + mentorship_sessions = [ + { + "bill_id": n, + "accounted_duration": timedelta(hours=random.randint(0, 10000)), + } + for n in range(1, 3) + ] model = self.bc.database.create(mentorship_bill=mentorship_bills, mentorship_session=mentorship_sessions) - MentorshipBill = self.bc.database.get_model('mentorship.MentorshipBill') + MentorshipBill = self.bc.database.get_model("mentorship.MentorshipBill") queryset = MentorshipBill.objects.filter() release_sessions_from_bill(None, None, queryset) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [{ - **x, - 'total_price': 0.0, - 'total_duration_in_hours': 0.0, - 'total_duration_in_minutes': 0.0, - 'overtime_minutes': 0.0, - } for x in self.bc.format.to_dict(model.mentorship_bill)]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + { + **x, + "total_price": 0.0, + "total_duration_in_hours": 0.0, + "total_duration_in_minutes": 0.0, + "overtime_minutes": 0.0, + } + for x in self.bc.format.to_dict(model.mentorship_bill) + ], + ) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), - [{ - **x, - 'bill_id': None, - 'accounted_duration': None, - } for x in self.bc.format.to_dict(model.mentorship_session)], + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **x, + "bill_id": None, + "accounted_duration": None, + } + for x in self.bc.format.to_dict(model.mentorship_session) + ], ) diff --git a/breathecode/mentorship/tests/management/commands/tests_mentorship.py b/breathecode/mentorship/tests/management/commands/tests_mentorship.py index 5bc782c01..bb0f933f8 100644 --- a/breathecode/mentorship/tests/management/commands/tests_mentorship.py +++ b/breathecode/mentorship/tests/management/commands/tests_mentorship.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + from unittest.mock import MagicMock, call import pytest @@ -15,7 +16,7 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch): - monkeypatch.setattr(check_mentorship_profile, 'delay', MagicMock()) + monkeypatch.setattr(check_mentorship_profile, "delay", MagicMock()) yield @@ -26,23 +27,23 @@ def test_no_mentors(bc: Breathecode): assert check_mentorship_profile.delay.call_args_list == [] -@pytest.mark.parametrize('status', ['ACTIVE', 'UNLISTED']) +@pytest.mark.parametrize("status", ["ACTIVE", "UNLISTED"]) def test_valid_statuses(bc: Breathecode, status): - model = bc.database.create(mentor_profile=(3, {'status': status})) + model = bc.database.create(mentor_profile=(3, {"status": status})) command = Command() command.handle() assert check_mentorship_profile.delay.call_args_list == [call(n) for n in range(1, 4)] - assert bc.database.list_of('mentorship.MentorProfile') == bc.format.to_dict(model.mentor_profile) + assert bc.database.list_of("mentorship.MentorProfile") == bc.format.to_dict(model.mentor_profile) -@pytest.mark.parametrize('status', ['INNACTIVE', 'INVITED']) +@pytest.mark.parametrize("status", ["INNACTIVE", "INVITED"]) def test_wrong_statuses(bc: Breathecode, status): - model = bc.database.create(mentor_profile=(3, {'status': status})) + model = bc.database.create(mentor_profile=(3, {"status": status})) command = Command() command.handle() assert check_mentorship_profile.delay.call_args_list == [] - assert bc.database.list_of('mentorship.MentorProfile') == bc.format.to_dict(model.mentor_profile) + assert bc.database.list_of("mentorship.MentorProfile") == bc.format.to_dict(model.mentor_profile) diff --git a/breathecode/mentorship/tests/mixins/__init__.py b/breathecode/mentorship/tests/mixins/__init__.py index 535e3f07c..8605e524f 100644 --- a/breathecode/mentorship/tests/mixins/__init__.py +++ b/breathecode/mentorship/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Mentorship Mixins """ + from .mentorship_test_case import MentorshipTestCase # noqa: F401 diff --git a/breathecode/mentorship/tests/mixins/mentorship_test_case.py b/breathecode/mentorship/tests/mixins/mentorship_test_case.py index 76d4b4005..77303e3cb 100644 --- a/breathecode/mentorship/tests/mixins/mentorship_test_case.py +++ b/breathecode/mentorship/tests/mixins/mentorship_test_case.py @@ -1,14 +1,30 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + BreathecodeMixin, +) from breathecode.authenticate.models import Token -class MentorshipTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, BreathecodeMixin): +class MentorshipTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + BreathecodeMixin, +): """FeedbackTestCase with auth methods""" def tearDown(self): @@ -20,5 +36,5 @@ def setUp(self): def get_token_key(self, id=None): kwargs = {} if id: - kwargs['id'] = id - return Token.objects.filter(**kwargs).values_list('key', flat=True).first() + kwargs["id"] = id + return Token.objects.filter(**kwargs).values_list("key", flat=True).first() diff --git a/breathecode/mentorship/tests/permissions/contexts/tests_mentorship_service.py b/breathecode/mentorship/tests/permissions/contexts/tests_mentorship_service.py index 3ae680f63..a078efef9 100644 --- a/breathecode/mentorship/tests/permissions/contexts/tests_mentorship_service.py +++ b/breathecode/mentorship/tests/permissions/contexts/tests_mentorship_service.py @@ -8,11 +8,11 @@ def serializer(mentorship_service): return { - 'id': mentorship_service.id, - 'slug': mentorship_service.slug, - 'max_duration': mentorship_service.max_duration, - 'language': mentorship_service.language, - 'academy': mentorship_service.academy.slug, + "id": mentorship_service.id, + "slug": mentorship_service.slug, + "max_duration": mentorship_service.max_duration, + "language": mentorship_service.language, + "academy": mentorship_service.academy.slug, } @@ -21,23 +21,33 @@ def serializer(mentorship_service): class AcademyEventTestSuite(MentorshipTestCase): - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.context', MagicMock(return_value=value)) + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.context", MagicMock(return_value=value)) def test_make_right_calls(self): model = self.bc.database.create(mentorship_service=1) ld = LaunchDarkly() result = mentorship_service(ld, model.mentorship_service) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), [ - self.bc.format.to_dict(model.mentorship_service), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), + [ + self.bc.format.to_dict(model.mentorship_service), + ], + ) contexts = serializer(model.mentorship_service) - self.assertEqual(LaunchDarkly.context.call_args_list, [ - call('1', f'{model.mentorship_service.name} ({model.mentorship_service.slug})', 'mentoring-service', - contexts), - ]) + self.assertEqual( + LaunchDarkly.context.call_args_list, + [ + call( + "1", + f"{model.mentorship_service.name} ({model.mentorship_service.slug})", + "mentoring-service", + contexts, + ), + ], + ) self.assertEqual(result, value) diff --git a/breathecode/mentorship/tests/permissions/flags/release/tests_enable_consume_mentorships.py b/breathecode/mentorship/tests/permissions/flags/release/tests_enable_consume_mentorships.py index e0d52aca5..9fde4650c 100644 --- a/breathecode/mentorship/tests/permissions/flags/release/tests_enable_consume_mentorships.py +++ b/breathecode/mentorship/tests/permissions/flags/release/tests_enable_consume_mentorships.py @@ -28,21 +28,26 @@ def assert_context_was_call(self, fn, model): class AcademyEventTestSuite(MentorshipTestCase): - @patch('ldclient.get', MagicMock()) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.get', MagicMock(return_value=value)) - @patch('breathecode.services.launch_darkly.client.LaunchDarkly.join_contexts', - MagicMock(return_value=join_contexts_value)) - @patch('breathecode.authenticate.permissions.contexts.user', MagicMock(return_value=context1)) - @patch('breathecode.mentorship.permissions.contexts.mentorship_service', MagicMock(return_value=context2)) - @patch('breathecode.admissions.permissions.contexts.academy', MagicMock(return_value=context3)) + @patch("ldclient.get", MagicMock()) + @patch("breathecode.services.launch_darkly.client.LaunchDarkly.get", MagicMock(return_value=value)) + @patch( + "breathecode.services.launch_darkly.client.LaunchDarkly.join_contexts", + MagicMock(return_value=join_contexts_value), + ) + @patch("breathecode.authenticate.permissions.contexts.user", MagicMock(return_value=context1)) + @patch("breathecode.mentorship.permissions.contexts.mentorship_service", MagicMock(return_value=context2)) + @patch("breathecode.admissions.permissions.contexts.academy", MagicMock(return_value=context3)) def test_make_right_calls(self): model = self.bc.database.create(user=1, mentorship_service=1) result = api.release.enable_consume_mentorships(model.user, model.mentorship_service) - self.assertEqual(self.bc.database.list_of('auth.User'), [ - self.bc.format.to_dict(model.user), - ]) + self.assertEqual( + self.bc.database.list_of("auth.User"), + [ + self.bc.format.to_dict(model.user), + ], + ) assert_context_was_call(self, authenticate_contexts.user, model.user) assert_context_was_call(self, admissions_contexts.academy, model.academy) @@ -50,10 +55,16 @@ def test_make_right_calls(self): self.assertEqual(result, value) - self.assertEqual(LaunchDarkly.join_contexts.call_args_list, [ - call(context1, context2, context3), - ]) + self.assertEqual( + LaunchDarkly.join_contexts.call_args_list, + [ + call(context1, context2, context3), + ], + ) - self.assertEqual(LaunchDarkly.get.call_args_list, [ - call('api.release.enable_consume_mentorships', join_contexts_value, False), - ]) + self.assertEqual( + LaunchDarkly.get.call_args_list, + [ + call("api.release.enable_consume_mentorships", join_contexts_value, False), + ], + ) diff --git a/breathecode/mentorship/tests/signals/tests_mentorship_session_status.py b/breathecode/mentorship/tests/signals/tests_mentorship_session_status.py index fe85418a9..db5a52af4 100644 --- a/breathecode/mentorship/tests/signals/tests_mentorship_session_status.py +++ b/breathecode/mentorship/tests/signals/tests_mentorship_session_status.py @@ -11,19 +11,22 @@ class TestLead(LegacyAPITestCase): 🔽🔽🔽 With status PENDING """ - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_pending(self, enable_signals): enable_signals() from breathecode.feedback.tasks import send_mentorship_session_survey - mentorship_session = {'status': 'PENDING'} + mentorship_session = {"status": "PENDING"} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=1) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, []) @@ -32,19 +35,22 @@ def test_mentorship_session_status__with_status_pending(self, enable_signals): 🔽🔽🔽 With status STARTED """ - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_started(self, enable_signals): enable_signals() from breathecode.feedback.tasks import send_mentorship_session_survey - mentorship_session = {'status': 'STARTED'} + mentorship_session = {"status": "STARTED"} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=1) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, []) @@ -53,39 +59,44 @@ def test_mentorship_session_status__with_status_started(self, enable_signals): 🔽🔽🔽 With status FAILED """ - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_failed(self, enable_signals): enable_signals() from breathecode.feedback.tasks import send_mentorship_session_survey - mentorship_session = {'status': 'FAILED'} + mentorship_session = {"status": "FAILED"} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=1) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, []) - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_failed__with_mentor_and_mentee(self, enable_signals): enable_signals() from breathecode.feedback.tasks import send_mentorship_session_survey - mentorship_session = {'status': 'FAILED'} - model = self.bc.database.create(mentorship_session=mentorship_session, - mentorship_service=1, - mentor_profile=1, - user=1) + mentorship_session = {"status": "FAILED"} + model = self.bc.database.create( + mentorship_session=mentorship_session, mentorship_service=1, mentor_profile=1, user=1 + ) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, [call(1)]) @@ -94,39 +105,44 @@ def test_mentorship_session_status__with_status_failed__with_mentor_and_mentee(s 🔽🔽🔽 With status IGNORED """ - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_ignored(self, enable_signals): enable_signals() from breathecode.feedback.tasks import send_mentorship_session_survey - mentorship_session = {'status': 'IGNORED'} + mentorship_session = {"status": "IGNORED"} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=1) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, []) - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_ignored__with_mentor_and_mentee(self, enable_signals): enable_signals() from breathecode.feedback.tasks import send_mentorship_session_survey - mentorship_session = {'status': 'IGNORED'} - model = self.bc.database.create(mentorship_session=mentorship_session, - mentorship_service=1, - mentor_profile=1, - user=1) + mentorship_session = {"status": "IGNORED"} + model = self.bc.database.create( + mentorship_session=mentorship_session, mentorship_service=1, mentor_profile=1, user=1 + ) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, [call(1)]) @@ -135,19 +151,22 @@ def test_mentorship_session_status__with_status_ignored__with_mentor_and_mentee( 🔽🔽🔽 With status COMPLETED and duration 0:00:00 because it not have started_at and ended_at """ - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_completed__duration_equal_to_zero(self, enable_signals): enable_signals() from breathecode.feedback.tasks import send_mentorship_session_survey - mentorship_session = {'status': 'COMPLETED'} + mentorship_session = {"status": "COMPLETED"} model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=1) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, []) @@ -156,8 +175,8 @@ def test_mentorship_session_status__with_status_completed__duration_equal_to_zer 🔽🔽🔽 With status COMPLETED and duration 0:05:00 """ - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_completed__duration_equal_to_five_minutes(self, enable_signals): enable_signals() @@ -165,15 +184,18 @@ def test_mentorship_session_status__with_status_completed__duration_equal_to_fiv utc_now = timezone.now() mentorship_session = { - 'status': 'COMPLETED', - 'started_at': utc_now, - 'ended_at': utc_now + timedelta(minutes=5), + "status": "COMPLETED", + "started_at": utc_now, + "ended_at": utc_now + timedelta(minutes=5), } model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=1) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, []) @@ -182,8 +204,8 @@ def test_mentorship_session_status__with_status_completed__duration_equal_to_fiv 🔽🔽🔽 With status COMPLETED and duration greater than 0:05:00 but without mentee and mentor """ - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_completed__duration_greater_than_five_minutes(self, enable_signals): enable_signals() @@ -191,15 +213,18 @@ def test_mentorship_session_status__with_status_completed__duration_greater_than utc_now = timezone.now() mentorship_session = { - 'status': 'COMPLETED', - 'started_at': utc_now, - 'ended_at': utc_now + timedelta(minutes=5, seconds=1), + "status": "COMPLETED", + "started_at": utc_now, + "ended_at": utc_now + timedelta(minutes=5, seconds=1), } model = self.bc.database.create(mentorship_session=mentorship_session, mentorship_service=1) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, []) self.assertEqual(refund_mentoring_session.delay.call_args_list, []) @@ -208,8 +233,8 @@ def test_mentorship_session_status__with_status_completed__duration_greater_than 🔽🔽🔽 With status COMPLETED, duration greater than 0:05:00, with mentee and with mentor """ - @patch('breathecode.feedback.tasks.send_mentorship_session_survey.delay', MagicMock()) - @patch('breathecode.payments.tasks.refund_mentoring_session.delay', MagicMock()) + @patch("breathecode.feedback.tasks.send_mentorship_session_survey.delay", MagicMock()) + @patch("breathecode.payments.tasks.refund_mentoring_session.delay", MagicMock()) def test_mentorship_session_status__with_status_completed__with_mentee__with_mentor(self, enable_signals): enable_signals() @@ -217,15 +242,18 @@ def test_mentorship_session_status__with_status_completed__with_mentee__with_men utc_now = timezone.now() mentorship_session = { - 'status': 'COMPLETED', - 'started_at': utc_now, - 'ended_at': utc_now + timedelta(minutes=5, seconds=1), + "status": "COMPLETED", + "started_at": utc_now, + "ended_at": utc_now + timedelta(minutes=5, seconds=1), } model = self.bc.database.create(mentorship_session=mentorship_session, user=1, mentorship_service=1) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) self.assertEqual(send_mentorship_session_survey.delay.call_args_list, [call(1)]) self.assertEqual(refund_mentoring_session.delay.call_args_list, []) diff --git a/breathecode/mentorship/tests/urls/tests_academy_bill.py b/breathecode/mentorship/tests/urls/tests_academy_bill.py index 3b310de7e..e29c5e622 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_bill.py +++ b/breathecode/mentorship/tests/urls/tests_academy_bill.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import hashlib import random from datetime import timedelta @@ -27,204 +28,180 @@ def format_datetime(self, date): def get_serializer(self, mentorship_bill, mentor_profile, mentorship_services, user, academy, data={}): return { - 'created_at': format_datetime(self, mentorship_bill.created_at), - 'ended_at': format_datetime(self, mentorship_bill.ended_at), - 'id': mentorship_bill.id, - 'mentor': { - 'booking_url': - mentor_profile.booking_url, - 'id': - mentor_profile.id, - 'services': [{ - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - } for mentorship_service in mentorship_services], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "created_at": format_datetime(self, mentorship_bill.created_at), + "ended_at": format_datetime(self, mentorship_bill.ended_at), + "id": mentorship_bill.id, + "mentor": { + "booking_url": mentor_profile.booking_url, + "id": mentor_profile.id, + "services": [ + { + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + for mentorship_service in mentorship_services + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, }, - 'overtime_minutes': float(mentorship_bill.overtime_minutes), - 'paid_at': format_datetime(self, mentorship_bill.ended_at), - 'reviewer': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "overtime_minutes": float(mentorship_bill.overtime_minutes), + "paid_at": format_datetime(self, mentorship_bill.ended_at), + "reviewer": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'started_at': format_datetime(self, mentorship_bill.ended_at), - 'status': mentorship_bill.status, - 'total_duration_in_hours': float(mentorship_bill.total_duration_in_hours), - 'total_duration_in_minutes': float(mentorship_bill.total_duration_in_minutes), - 'total_price': float(mentorship_bill.total_price), + "started_at": format_datetime(self, mentorship_bill.ended_at), + "status": mentorship_bill.status, + "total_duration_in_hours": float(mentorship_bill.total_duration_in_hours), + "total_duration_in_minutes": float(mentorship_bill.total_duration_in_minutes), + "total_price": float(mentorship_bill.total_price), **data, } def post_serializer(data={}): return { - 'accounted_duration': None, - 'agenda': None, - 'allow_billing': False, - 'bill': None, - 'ended_at': None, - 'ends_at': None, - 'id': 1, - 'is_online': False, - 'latitude': None, - 'longitude': None, - 'mentee': None, - 'mentee_left_at': None, - 'mentor': 1, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'name': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'started_at': None, - 'starts_at': None, - 'status': 'PENDING', - 'summary': None, + "accounted_duration": None, + "agenda": None, + "allow_billing": False, + "bill": None, + "ended_at": None, + "ends_at": None, + "id": 1, + "is_online": False, + "latitude": None, + "longitude": None, + "mentee": None, + "mentee_left_at": None, + "mentor": 1, + "mentor_joined_at": None, + "mentor_left_at": None, + "name": None, + "online_meeting_url": None, + "online_recording_url": None, + "started_at": None, + "starts_at": None, + "status": "PENDING", + "summary": None, **data, } def put_serializer(self, mentorship_bill, mentor_profile, mentorship_services, user, academy, data={}): return { - 'created_at': format_datetime(self, mentorship_bill.created_at), - 'ended_at': format_datetime(self, mentorship_bill.ended_at), - 'id': mentorship_bill.id, - 'mentor': { - 'booking_url': - mentor_profile.booking_url, - 'id': - mentor_profile.id, - 'services': [{ - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - } for mentorship_service in mentorship_services], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "created_at": format_datetime(self, mentorship_bill.created_at), + "ended_at": format_datetime(self, mentorship_bill.ended_at), + "id": mentorship_bill.id, + "mentor": { + "booking_url": mentor_profile.booking_url, + "id": mentor_profile.id, + "services": [ + { + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + for mentorship_service in mentorship_services + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, }, - 'overtime_minutes': float(mentorship_bill.overtime_minutes), - 'paid_at': format_datetime(self, mentorship_bill.ended_at), - 'reviewer': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "overtime_minutes": float(mentorship_bill.overtime_minutes), + "paid_at": format_datetime(self, mentorship_bill.ended_at), + "reviewer": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'started_at': format_datetime(self, mentorship_bill.ended_at), - 'status': mentorship_bill.status, - 'total_duration_in_hours': float(mentorship_bill.total_duration_in_hours), - 'total_duration_in_minutes': float(mentorship_bill.total_duration_in_minutes), - 'total_price': float(mentorship_bill.total_price), + "started_at": format_datetime(self, mentorship_bill.ended_at), + "status": mentorship_bill.status, + "total_duration_in_hours": float(mentorship_bill.total_duration_in_hours), + "total_duration_in_minutes": float(mentorship_bill.total_duration_in_minutes), + "total_price": float(mentorship_bill.total_price), **data, } def mentorship_session_columns(data={}): return { - 'accounted_duration': None, - 'agenda': None, - 'allow_billing': False, - 'bill_id': None, - 'ended_at': None, - 'ends_at': None, - 'id': 1, - 'is_online': False, - 'latitude': None, - 'longitude': None, - 'mentee_id': None, - 'mentee_left_at': None, - 'mentor_id': 1, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'name': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'started_at': None, - 'starts_at': None, - 'status': 'PENDING', - 'status_message': None, - 'suggested_accounted_duration': None, - 'summary': None, + "accounted_duration": None, + "agenda": None, + "allow_billing": False, + "bill_id": None, + "ended_at": None, + "ends_at": None, + "id": 1, + "is_online": False, + "latitude": None, + "longitude": None, + "mentee_id": None, + "mentee_left_at": None, + "mentor_id": 1, + "mentor_joined_at": None, + "mentor_left_at": None, + "name": None, + "online_meeting_url": None, + "online_recording_url": None, + "started_at": None, + "starts_at": None, + "status": "PENDING", + "status_message": None, + "suggested_accounted_duration": None, + "summary": None, **data, } @@ -243,13 +220,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -260,13 +237,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -282,13 +259,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_bill for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_bill for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -299,12 +276,12 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.get(url) json = response.json() @@ -318,72 +295,86 @@ def test__get__without_data(self): """ def test__get__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - mentorship_bill=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + mentorship_bill=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_bill, - model.mentor_profile, [model.mentorship_service], - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_bill, + model.mentor_profile, + [model.mentorship_service], + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService """ def test__get__with_two_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - mentorship_bill=2, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_session=1, + mentorship_bill=2, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.get(url) json = response.json() mentorship_bill_list = sorted(model.mentorship_bill, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentorship_bill, - model.mentor_profile, [model.mentorship_service], - model.user, - model.academy, - data={}) for mentorship_bill in mentorship_bill_list + get_serializer( + self, + mentorship_bill, + model.mentor_profile, + [model.mentorship_service], + model.user, + model.academy, + data={}, + ) + for mentorship_bill in mentorship_bill_list ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipBill'), + self.bc.database.list_of("mentorship.MentorshipBill"), self.bc.format.to_dict(model.mentorship_bill), ) @@ -391,31 +382,32 @@ def test__get__with_two_mentor_profile(self): 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing status """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_status(self): - statuses = ['DUE', 'APPROVED', 'PAID', 'IGNORED'] + statuses = ["DUE", "APPROVED", "PAID", "IGNORED"] for n in range(0, 3): first_status = statuses[n] second_status = statuses[n + 1] choices = [first_status, second_status] - mentorship_bills = [{'status': x} for x in choices] - bad_statuses = ','.join([x for x in statuses if x not in choices]) - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - mentorship_bill=mentorship_bills, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_bills = [{"status": x} for x in choices] + bad_statuses = ",".join([x for x in statuses if x not in choices]) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_session=1, + mentorship_bill=mentorship_bills, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + \ - f'?status={bad_statuses}' + url = reverse_lazy("mentorship:academy_bill") + f"?status={bad_statuses}" response = self.client.get(url) json = response.json() @@ -424,86 +416,98 @@ def test__get__with_two_mentor_profile__passing_bad_status(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipBill'), + self.bc.database.list_of("mentorship.MentorshipBill"), self.bc.format.to_dict(model.mentorship_bill), ) # teardown - self.bc.database.delete('mentorship.MentorshipBill') + self.bc.database.delete("mentorship.MentorshipBill") - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_status(self): - statuses = ['DUE', 'APPROVED', 'PAID', 'IGNORED'] + statuses = ["DUE", "APPROVED", "PAID", "IGNORED"] for n in range(0, 3): first_status = statuses[n] second_status = statuses[n + 1] choices = [first_status, second_status] - mentorship_bills = [{'status': x} for x in choices] - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - mentorship_bill=mentorship_bills, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_bills = [{"status": x} for x in choices] + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_session=1, + mentorship_bill=mentorship_bills, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + f'?status={first_status},{second_status}' + url = reverse_lazy("mentorship:academy_bill") + f"?status={first_status},{second_status}" response = self.client.get(url) json = response.json() mentorship_bill_list = sorted(model.mentorship_bill, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentorship_bill_list[0], - model.mentor_profile, [model.mentorship_service], - model.user, - model.academy, - data={'status': second_status}), - get_serializer(self, - mentorship_bill_list[1], - model.mentor_profile, [model.mentorship_service], - model.user, - model.academy, - data={'status': first_status}), + get_serializer( + self, + mentorship_bill_list[0], + model.mentor_profile, + [model.mentorship_service], + model.user, + model.academy, + data={"status": second_status}, + ), + get_serializer( + self, + mentorship_bill_list[1], + model.mentor_profile, + [model.mentorship_service], + model.user, + model.academy, + data={"status": first_status}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipBill'), + self.bc.database.list_of("mentorship.MentorshipBill"), self.bc.format.to_dict(model.mentorship_bill), ) # teardown - self.bc.database.delete('mentorship.MentorshipBill') + self.bc.database.delete("mentorship.MentorshipBill") """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing started_after """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__get__with_two_mentor_profile__passing_bad_started_after(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - mentorship_bill=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + mentorship_bill=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + \ - f'?after={self.bc.datetime.to_iso_string(UTC_NOW + timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_bill") + + f"?after={self.bc.datetime.to_iso_string(UTC_NOW + timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() @@ -511,66 +515,83 @@ def test__get__with_two_mentor_profile__passing_bad_started_after(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__get__with_two_mentor_profile__passing_started_after(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_bill=1, - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_bill=1, + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + \ - f'?after={self.bc.datetime.to_iso_string(UTC_NOW - timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_bill") + + f"?after={self.bc.datetime.to_iso_string(UTC_NOW - timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_bill, - model.mentor_profile, [model.mentorship_service], - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_bill, + model.mentor_profile, + [model.mentorship_service], + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing ended_before """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__get__with_two_mentor_profile__passing_bad_ended_before(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - mentorship_bill=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + mentorship_bill=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + \ - f'?before={self.bc.datetime.to_iso_string(UTC_NOW - timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_bill") + + f"?before={self.bc.datetime.to_iso_string(UTC_NOW - timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() @@ -578,66 +599,81 @@ def test__get__with_two_mentor_profile__passing_bad_ended_before(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__get__with_two_mentor_profile__passing_ended_before(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_bill=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_bill=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + \ - f'?before={self.bc.datetime.to_iso_string(UTC_NOW + timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_bill") + + f"?before={self.bc.datetime.to_iso_string(UTC_NOW + timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_bill, - model.mentor_profile, [model.mentorship_service], - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_bill, + model.mentor_profile, + [model.mentorship_service], + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) """ 🔽🔽🔽 GET with four MentorshipSession, MentorProfile and MentorshipService, passing mentor """ def test__get__with_four_elements__padding_bad_mentor(self): - mentorship_sessions = [{'mentee_id': x, 'mentor_id': x} for x in range(1, 5)] - mentor_profiles = [{'user_id': x, 'service_id': x} for x in range(1, 5)] - mentorship_bills = [{'reviewer_id': x, 'mentor_id': x} for x in range(1, 5)] - model = self.bc.database.create(user=4, - role=1, - capability='read_mentorship_bill', - mentorship_session=mentorship_sessions, - mentor_profile=mentor_profiles, - mentorship_bill=mentorship_bills, - mentorship_service=4, - profile_academy=1) + mentorship_sessions = [{"mentee_id": x, "mentor_id": x} for x in range(1, 5)] + mentor_profiles = [{"user_id": x, "service_id": x} for x in range(1, 5)] + mentorship_bills = [{"reviewer_id": x, "mentor_id": x} for x in range(1, 5)] + model = self.bc.database.create( + user=4, + role=1, + capability="read_mentorship_bill", + mentorship_session=mentorship_sessions, + mentor_profile=mentor_profiles, + mentorship_bill=mentorship_bills, + mentorship_service=4, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('mentorship:academy_bill') + f'?mentor=5,6' + url = reverse_lazy("mentorship:academy_bill") + f"?mentor=5,6" response = self.client.get(url) json = response.json() @@ -646,51 +682,57 @@ def test__get__with_four_elements__padding_bad_mentor(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipBill'), + self.bc.database.list_of("mentorship.MentorshipBill"), self.bc.format.to_dict(model.mentorship_bill), ) def test__get__with_four_elements__padding_mentor(self): - mentorship_sessions = [{'mentee_id': x, 'mentor_id': x} for x in range(1, 5)] - mentor_profiles = [{'user_id': x, 'service_id': x} for x in range(1, 5)] - mentorship_bills = [{'reviewer_id': x, 'mentor_id': x} for x in range(1, 5)] - model = self.bc.database.create(user=4, - role=1, - capability='read_mentorship_bill', - mentorship_session=mentorship_sessions, - mentorship_bill=mentorship_bills, - mentor_profile=mentor_profiles, - mentorship_service=4, - profile_academy=1) + mentorship_sessions = [{"mentee_id": x, "mentor_id": x} for x in range(1, 5)] + mentor_profiles = [{"user_id": x, "service_id": x} for x in range(1, 5)] + mentorship_bills = [{"reviewer_id": x, "mentor_id": x} for x in range(1, 5)] + model = self.bc.database.create( + user=4, + role=1, + capability="read_mentorship_bill", + mentorship_session=mentorship_sessions, + mentorship_bill=mentorship_bills, + mentor_profile=mentor_profiles, + mentorship_service=4, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('mentorship:academy_bill') + f'?mentor=1,3' + url = reverse_lazy("mentorship:academy_bill") + f"?mentor=1,3" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_bill[2], - model.mentor_profile[2], - model.mentorship_service, - model.user[2], - model.academy, - data={}), - get_serializer(self, - model.mentorship_bill[0], - model.mentor_profile[0], - model.mentorship_service, - model.user[0], - model.academy, - data={}), + get_serializer( + self, + model.mentorship_bill[2], + model.mentor_profile[2], + model.mentorship_service, + model.user[2], + model.academy, + data={}, + ), + get_serializer( + self, + model.mentorship_bill[0], + model.mentor_profile[0], + model.mentorship_service, + model.user[0], + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipBill'), + self.bc.database.list_of("mentorship.MentorshipBill"), self.bc.format.to_dict(model.mentorship_bill), ) @@ -698,28 +740,32 @@ def test__get__with_four_elements__padding_mentor(self): 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_bill", mentorship_session=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) """ 🔽🔽🔽 POST capability @@ -731,13 +777,13 @@ def test__post__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.post(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -748,20 +794,20 @@ def test__post__without_capabilities(self): """ def test__post__missing_fields(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.post(url) json = response.json() - expected = {'detail': 'argument-not-provided', 'status_code': 404} + expected = {"detail": "argument-not-provided", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 PUT capability @@ -773,13 +819,13 @@ def test__put__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -790,32 +836,32 @@ def test__put__without_capabilities(self): """ def test__put__without_data__without_bulk(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') + url = reverse_lazy("mentorship:academy_bill") response = self.client.put(url) json = response.json() - expected = {'detail': 'without-bulk-mode-and-bill-id', 'status_code': 404} + expected = {"detail": "without-bulk-mode-and-bill-id", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test__put__without_data__with_bulk(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') - data = [{'id': 1}, {'id': 2}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_bill") + data = [{"id": 1}, {"id": 2}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'some-not-found', 'status_code': 404} + expected = {"detail": "some-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -825,17 +871,17 @@ def test__put__without_data__with_bulk(self): """ def test__put__without_data__bulk_without_ids(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') - data = [{'slug': self.bc.fake.slug()}, {'slug': self.bc.fake.slug()}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_bill") + data = [{"slug": self.bc.fake.slug()}, {"slug": self.bc.fake.slug()}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'missing-some-id-in-body', 'status_code': 404} + expected = {"detail": "missing-some-id-in-body", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -845,102 +891,116 @@ def test__put__without_data__bulk_without_ids(self): """ def test__put__with_two_mentor_profile__passing_all_forbidden_fields(self): - mentorship_sessions = [{'mentor_id': n, 'bill_id': n} for n in range(1, 3)] - mentor_profiles = [{'service_id': n} for n in range(1, 3)] - mentorship_bills = [{'mentor_id': n} for n in range(1, 3)] - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentorship_session=mentorship_sessions, - mentor_profile=mentor_profiles, - mentorship_service=2, - mentorship_bill=mentorship_bills, - profile_academy=1) + mentorship_sessions = [{"mentor_id": n, "bill_id": n} for n in range(1, 3)] + mentor_profiles = [{"service_id": n} for n in range(1, 3)] + mentorship_bills = [{"mentor_id": n} for n in range(1, 3)] + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_bill", + mentorship_session=mentorship_sessions, + mentor_profile=mentor_profiles, + mentorship_service=2, + mentorship_bill=mentorship_bills, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) created_at = timezone.now() updated_at = timezone.now() - data = [{ - 'id': n, - 'created_at': self.bc.datetime.to_iso_string(created_at), - 'updated_at': self.bc.datetime.to_iso_string(updated_at), - 'academy': 2, - 'reviewer': 2, - 'total_duration_in_minutes': random.random() * 100, - 'total_duration_in_hours': random.random() * 100, - 'total_price': random.random() * 100, - 'overtime_minutes': random.random() * 100, - } for n in range(1, 3)] - - url = reverse_lazy('mentorship:academy_bill') - response = self.client.put(url, data, format='json') + data = [ + { + "id": n, + "created_at": self.bc.datetime.to_iso_string(created_at), + "updated_at": self.bc.datetime.to_iso_string(updated_at), + "academy": 2, + "reviewer": 2, + "total_duration_in_minutes": random.random() * 100, + "total_duration_in_hours": random.random() * 100, + "total_price": random.random() * 100, + "overtime_minutes": random.random() * 100, + } + for n in range(1, 3) + ] + + url = reverse_lazy("mentorship:academy_bill") + response = self.client.put(url, data, format="json") json = response.json() expected = [ - put_serializer(self, - model.mentorship_bill[index], - model.mentor_profile[index], - model.mentorship_service, - model.user, - model.academy, - data={}) for index in range(0, 2) + put_serializer( + self, + model.mentorship_bill[index], + model.mentor_profile[index], + model.mentorship_service, + model.user, + model.academy, + data={}, + ) + for index in range(0, 2) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), - self.bc.format.to_dict(model.mentorship_bill)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), self.bc.format.to_dict(model.mentorship_bill) + ) """ 🔽🔽🔽 PUT with two MentorshipSession, MentorProfile and MentorshipService, edit status of dirty bill """ def test__put__with_two_mentor_profile__edit_status_of_dirty_bill(self): - mentorship_sessions = [{'mentor_id': n, 'bill_id': n} for n in range(1, 3)] - mentor_profiles = [{'service_id': n} for n in range(1, 3)] - mentorship_bills = [{'mentor_id': n, 'status': 'RECALCULATE'} for n in range(1, 3)] - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentorship_session=mentorship_sessions, - mentor_profile=mentor_profiles, - mentorship_service=2, - mentorship_bill=mentorship_bills, - profile_academy=1) + mentorship_sessions = [{"mentor_id": n, "bill_id": n} for n in range(1, 3)] + mentor_profiles = [{"service_id": n} for n in range(1, 3)] + mentorship_bills = [{"mentor_id": n, "status": "RECALCULATE"} for n in range(1, 3)] + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_bill", + mentorship_session=mentorship_sessions, + mentor_profile=mentor_profiles, + mentorship_service=2, + mentorship_bill=mentorship_bills, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill') - data = [{'id': 1, 'status': 'PAID'}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_bill") + data = [{"id": 1, "status": "PAID"}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'trying-edit-status-to-dirty-bill', 'status_code': 400} + expected = {"detail": "trying-edit-status-to-dirty-bill", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), - self.bc.format.to_dict(model.mentorship_bill)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), self.bc.format.to_dict(model.mentorship_bill) + ) """ 🔽🔽🔽 PUT with two MentorshipSession, MentorProfile and MentorshipService, passing all valid fields """ def test__put__with_two_mentor_profile__passing_all_fields(self): - mentorship_sessions = [{'mentor_id': n, 'bill_id': n} for n in range(1, 3)] - mentor_profiles = [{'service_id': n} for n in range(1, 3)] - mentorship_bills = [{'mentor_id': n} for n in range(1, 3)] - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentorship_session=mentorship_sessions, - mentor_profile=mentor_profiles, - mentorship_service=2, - mentorship_bill=mentorship_bills, - profile_academy=1) + mentorship_sessions = [{"mentor_id": n, "bill_id": n} for n in range(1, 3)] + mentor_profiles = [{"service_id": n} for n in range(1, 3)] + mentorship_bills = [{"mentor_id": n} for n in range(1, 3)] + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_bill", + mentorship_session=mentorship_sessions, + mentor_profile=mentor_profiles, + mentorship_service=2, + mentorship_bill=mentorship_bills, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) @@ -948,43 +1008,54 @@ def test__put__with_two_mentor_profile__passing_all_fields(self): started_at = timezone.now() ended_at = timezone.now() paid_at = timezone.now() - data = [{ - 'id': n, - 'status': random.choice(['DUE', 'APPROVED', 'PAID', 'IGNORED']), - 'status_mesage': self.bc.fake.text(), - 'started_at': self.bc.datetime.to_iso_string(started_at), - 'ended_at': self.bc.datetime.to_iso_string(ended_at), - 'paid_at': self.bc.datetime.to_iso_string(paid_at), - } for n in range(1, 3)] - - url = reverse_lazy('mentorship:academy_bill') - response = self.client.put(url, data, format='json') + data = [ + { + "id": n, + "status": random.choice(["DUE", "APPROVED", "PAID", "IGNORED"]), + "status_mesage": self.bc.fake.text(), + "started_at": self.bc.datetime.to_iso_string(started_at), + "ended_at": self.bc.datetime.to_iso_string(ended_at), + "paid_at": self.bc.datetime.to_iso_string(paid_at), + } + for n in range(1, 3) + ] + + url = reverse_lazy("mentorship:academy_bill") + response = self.client.put(url, data, format="json") data_fixed_first_element = data[0].copy() - del data_fixed_first_element['status_mesage'] + del data_fixed_first_element["status_mesage"] data_fixed_second_element = data[1].copy() - del data_fixed_second_element['status_mesage'] + del data_fixed_second_element["status_mesage"] json = response.json() elements = [(0, data_fixed_first_element), (1, data_fixed_second_element)] expected = [ - put_serializer(self, - model.mentorship_bill[index], - model.mentor_profile[index], - model.mentorship_service, - model.user, - model.academy, - data=current_data) for index, current_data in elements + put_serializer( + self, + model.mentorship_bill[index], + model.mentor_profile[index], + model.mentorship_service, + model.user, + model.academy, + data=current_data, + ) + for index, current_data in elements ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), - [{ - **self.bc.format.to_dict(model.mentorship_bill[i]), - **data[i], - 'started_at': started_at, - 'ended_at': ended_at, - 'paid_at': paid_at, - } for i in range(0, 2)]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + { + **self.bc.format.to_dict(model.mentorship_bill[i]), + **data[i], + "started_at": started_at, + "ended_at": ended_at, + "paid_at": paid_at, + } + for i in range(0, 2) + ], + ) diff --git a/breathecode/mentorship/tests/urls/tests_academy_bill_id.py b/breathecode/mentorship/tests/urls/tests_academy_bill_id.py index 2cca7a604..f353e2163 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_bill_id.py +++ b/breathecode/mentorship/tests/urls/tests_academy_bill_id.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import hashlib import random from datetime import timedelta @@ -27,29 +28,29 @@ def format_datetime(self, date): def get_tooltip(obj): - message = f'This mentorship should last no longer than {int(obj.service.duration.seconds/60)} min. <br />' + message = f"This mentorship should last no longer than {int(obj.service.duration.seconds/60)} min. <br />" if obj.started_at is None: - message += 'The mentee never joined the session. <br />' + message += "The mentee never joined the session. <br />" else: message += f'Started on {obj.started_at.strftime("%m/%d/%Y at %H:%M:%S")}. <br />' if obj.mentor_joined_at is None: - message += f'The mentor never joined' + message += f"The mentor never joined" elif obj.mentor_joined_at > obj.started_at: - message += f'The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />' + message += f"The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />" elif obj.started_at > obj.mentor_joined_at: - message += f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + message += f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" if obj.ended_at is not None: - message += f'The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />' + message += f"The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />" if (obj.ended_at - obj.started_at) > obj.mentor.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.mentor.service.duration - message += f'With extra time of {duration_to_str(extra_time)}. <br />' + message += f"With extra time of {duration_to_str(extra_time)}. <br />" else: - message += f'No extra time detected <br />' + message += f"No extra time detected <br />" else: - message += f'The mentorship has not ended yet. <br />' + message += f"The mentorship has not ended yet. <br />" if obj.ends_at is not None: - message += f'But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />' + message += f"But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />" return message @@ -57,17 +58,17 @@ def get_tooltip(obj): def get_duration_string(obj): if obj.started_at is None: - return 'Never started' + return "Never started" end_date = obj.ended_at if end_date is None: - return 'Never ended' + return "Never ended" if obj.started_at > end_date: - return 'Ended before it started' + return "Ended before it started" if (end_date - obj.started_at).days > 1: - return f'Many days' + return f"Many days" return duration_to_str(obj.ended_at - obj.started_at) @@ -86,11 +87,11 @@ def get_extra_time(obj): return None if (obj.ended_at - obj.started_at).days > 1: - return f'Many days of extra time, probably it was never closed' + return f"Many days of extra time, probably it was never closed" if (obj.ended_at - obj.started_at) > obj.mentor.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.mentor.service.duration - return f'Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.mentor.service.duration)}' + return f"Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.mentor.service.duration)}" else: return None @@ -101,7 +102,7 @@ def get_mentor_late(obj): return None if obj.started_at > obj.mentor_joined_at and (obj.started_at - obj.mentor_joined_at).seconds > (60 * 4): - return f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + return f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" else: return None @@ -109,7 +110,7 @@ def get_mentor_late(obj): def get_mente_joined(obj): if obj.started_at is None: - return 'Session did not start because mentee never joined' + return "Session did not start because mentee never joined" else: return True @@ -128,100 +129,82 @@ def get_overtime_hours(obj): def get_sessions(self, obj): - sessions = obj.mentorshipsession_set.order_by('created_at').all() - return [{ - 'accounted_duration': session.accounted_duration, - 'billed_str': get_billed_str(session), - 'duration_string': get_duration_string(session), - 'ended_at': session.ended_at, - 'extra_time': get_extra_time(session), - 'id': session.id, - 'mentee_joined': get_mente_joined(session), - 'mentee': { - 'email': session.mentee.email, - 'first_name': session.mentee.first_name, - 'id': session.mentee.id, - 'last_name': session.mentee.last_name, - }, - 'mentee_left_at': session.mentee_left_at, - 'mentor': { - 'booking_url': - session.mentor.booking_url, - 'created_at': - format_datetime(self, session.mentor.created_at), - 'email': - session.mentor.email, - 'id': - session.mentor.id, - 'one_line_bio': - session.mentor.one_line_bio, - 'online_meeting_url': - session.mentor.online_meeting_url, - 'price_per_hour': - session.mentor.price_per_hour, - 'rating': - session.mentor.rating, - 'services': [{ - 'academy': { - 'icon_url': session.service.academy.icon_url, - 'id': session.service.academy.id, - 'logo_url': session.service.academy.logo_url, - 'name': session.service.academy.name, - 'slug': session.service.academy.slug, + sessions = obj.mentorshipsession_set.order_by("created_at").all() + return [ + { + "accounted_duration": session.accounted_duration, + "billed_str": get_billed_str(session), + "duration_string": get_duration_string(session), + "ended_at": session.ended_at, + "extra_time": get_extra_time(session), + "id": session.id, + "mentee_joined": get_mente_joined(session), + "mentee": { + "email": session.mentee.email, + "first_name": session.mentee.first_name, + "id": session.mentee.id, + "last_name": session.mentee.last_name, + }, + "mentee_left_at": session.mentee_left_at, + "mentor": { + "booking_url": session.mentor.booking_url, + "created_at": format_datetime(self, session.mentor.created_at), + "email": session.mentor.email, + "id": session.mentor.id, + "one_line_bio": session.mentor.one_line_bio, + "online_meeting_url": session.mentor.online_meeting_url, + "price_per_hour": session.mentor.price_per_hour, + "rating": session.mentor.rating, + "services": [ + { + "academy": { + "icon_url": session.service.academy.icon_url, + "id": session.service.academy.id, + "logo_url": session.service.academy.logo_url, + "name": session.service.academy.name, + "slug": session.service.academy.slug, + }, + "allow_mentee_to_extend": session.service.allow_mentee_to_extend, + "allow_mentors_to_extend": session.service.allow_mentors_to_extend, + "created_at": format_datetime(self, session.service.created_at), + "duration": self.bc.datetime.from_timedelta(session.service.duration), + "id": session.service.id, + "language": session.service.language, + "logo_url": session.service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(session.service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + session.service.missed_meeting_duration + ), + "name": session.service.name, + "slug": session.service.slug, + "status": session.service.status, + "updated_at": self.bc.datetime.to_iso_string(session.service.updated_at), + } + ], + "slug": session.mentor.slug, + "status": session.mentor.status, + "timezone": session.mentor.timezone, + "updated_at": format_datetime(self, session.mentor.updated_at), + "user": { + "email": session.mentor.user.email, + "first_name": session.mentor.user.first_name, + "id": session.mentor.user.id, + "last_name": session.mentor.user.last_name, }, - 'allow_mentee_to_extend': - session.service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - session.service.allow_mentors_to_extend, - 'created_at': - format_datetime(self, session.service.created_at), - 'duration': - self.bc.datetime.from_timedelta(session.service.duration), - 'id': - session.service.id, - 'language': - session.service.language, - 'logo_url': - session.service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(session.service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(session.service.missed_meeting_duration), - 'name': - session.service.name, - 'slug': - session.service.slug, - 'status': - session.service.status, - 'updated_at': - self.bc.datetime.to_iso_string(session.service.updated_at), - }], - 'slug': - session.mentor.slug, - 'status': - session.mentor.status, - 'timezone': - session.mentor.timezone, - 'updated_at': - format_datetime(self, session.mentor.updated_at), - 'user': { - 'email': session.mentor.user.email, - 'first_name': session.mentor.user.first_name, - 'id': session.mentor.user.id, - 'last_name': session.mentor.user.last_name, - } - }, - 'mentor_joined_at': session.mentor_joined_at, - 'mentor_late': get_mentor_late(session), - 'mentor_left_at': session.mentor_left_at, - 'rating': get_rating(session), - 'started_at': session.started_at, - 'status': session.status, - 'status_message': session.status_message, - 'suggested_accounted_duration': session.suggested_accounted_duration, - 'summary': session.summary, - 'tooltip': get_tooltip(session), - } for session in sessions] + }, + "mentor_joined_at": session.mentor_joined_at, + "mentor_late": get_mentor_late(session), + "mentor_left_at": session.mentor_left_at, + "rating": get_rating(session), + "started_at": session.started_at, + "status": session.status, + "status_message": session.status_message, + "suggested_accounted_duration": session.suggested_accounted_duration, + "summary": session.summary, + "tooltip": get_tooltip(session), + } + for session in sessions + ] def get_unfinished_sessions(obj): @@ -229,208 +212,174 @@ def get_unfinished_sessions(obj): def get_public_url(): - return '/v1/mentorship/academy/bill/1/html' + return "/v1/mentorship/academy/bill/1/html" def get_serializer(self, mentorship_bill, mentor_profile, mentorship_service, user, academy, data={}): return { - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, }, - 'overtime_hours': get_overtime_hours(mentorship_bill), - 'sessions': get_sessions(self, mentorship_bill), - 'unfinished_sessions': get_unfinished_sessions(mentorship_bill), - 'public_url': get_public_url(), - 'created_at': format_datetime(self, mentorship_bill.created_at), - 'ended_at': format_datetime(self, mentorship_bill.ended_at), - 'id': mentorship_bill.id, - 'mentor': { - 'booking_url': - mentor_profile.booking_url, - 'created_at': - format_datetime(self, mentor_profile.created_at), - 'id': - mentor_profile.id, - 'one_line_bio': - mentor_profile.one_line_bio, - 'email': - mentor_profile.email, - 'online_meeting_url': - mentor_profile.online_meeting_url, - 'price_per_hour': - mentor_profile.price_per_hour, - 'rating': - mentor_profile.rating, - 'services': [{ - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - format_datetime(self, mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - format_datetime(self, mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'timezone': - mentor_profile.timezone, - 'status': - mentor_profile.status, - 'updated_at': - format_datetime(self, mentor_profile.updated_at), - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "overtime_hours": get_overtime_hours(mentorship_bill), + "sessions": get_sessions(self, mentorship_bill), + "unfinished_sessions": get_unfinished_sessions(mentorship_bill), + "public_url": get_public_url(), + "created_at": format_datetime(self, mentorship_bill.created_at), + "ended_at": format_datetime(self, mentorship_bill.ended_at), + "id": mentorship_bill.id, + "mentor": { + "booking_url": mentor_profile.booking_url, + "created_at": format_datetime(self, mentor_profile.created_at), + "id": mentor_profile.id, + "one_line_bio": mentor_profile.one_line_bio, + "email": mentor_profile.email, + "online_meeting_url": mentor_profile.online_meeting_url, + "price_per_hour": mentor_profile.price_per_hour, + "rating": mentor_profile.rating, + "services": [ + { + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": format_datetime(self, mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": format_datetime(self, mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "timezone": mentor_profile.timezone, + "status": mentor_profile.status, + "updated_at": format_datetime(self, mentor_profile.updated_at), + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, }, - 'overtime_minutes': float(mentorship_bill.overtime_minutes), - 'paid_at': format_datetime(self, mentorship_bill.ended_at), - 'reviewer': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "overtime_minutes": float(mentorship_bill.overtime_minutes), + "paid_at": format_datetime(self, mentorship_bill.ended_at), + "reviewer": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'started_at': format_datetime(self, mentorship_bill.ended_at), - 'status': mentorship_bill.status, - 'total_duration_in_hours': float(mentorship_bill.total_duration_in_hours), - 'total_duration_in_minutes': float(mentorship_bill.total_duration_in_minutes), - 'total_price': float(mentorship_bill.total_price), + "started_at": format_datetime(self, mentorship_bill.ended_at), + "status": mentorship_bill.status, + "total_duration_in_hours": float(mentorship_bill.total_duration_in_hours), + "total_duration_in_minutes": float(mentorship_bill.total_duration_in_minutes), + "total_price": float(mentorship_bill.total_price), **data, } def put_serializer(self, mentorship_bill, mentor_profile, mentorship_service, user, academy, data={}): return { - 'created_at': format_datetime(self, mentorship_bill.created_at), - 'ended_at': format_datetime(self, mentorship_bill.ended_at), - 'id': mentorship_bill.id, - 'mentor': { - 'booking_url': - mentor_profile.booking_url, - 'id': - mentor_profile.id, - 'services': [{ - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "created_at": format_datetime(self, mentorship_bill.created_at), + "ended_at": format_datetime(self, mentorship_bill.ended_at), + "id": mentorship_bill.id, + "mentor": { + "booking_url": mentor_profile.booking_url, + "id": mentor_profile.id, + "services": [ + { + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, }, - 'overtime_minutes': float(mentorship_bill.overtime_minutes), - 'paid_at': format_datetime(self, mentorship_bill.ended_at), - 'reviewer': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "overtime_minutes": float(mentorship_bill.overtime_minutes), + "paid_at": format_datetime(self, mentorship_bill.ended_at), + "reviewer": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'started_at': format_datetime(self, mentorship_bill.ended_at), - 'status': mentorship_bill.status, - 'total_duration_in_hours': float(mentorship_bill.total_duration_in_hours), - 'total_duration_in_minutes': float(mentorship_bill.total_duration_in_minutes), - 'total_price': float(mentorship_bill.total_price), + "started_at": format_datetime(self, mentorship_bill.ended_at), + "status": mentorship_bill.status, + "total_duration_in_hours": float(mentorship_bill.total_duration_in_hours), + "total_duration_in_minutes": float(mentorship_bill.total_duration_in_minutes), + "total_price": float(mentorship_bill.total_price), **data, } def mentorship_session_columns(data={}): return { - 'accounted_duration': None, - 'agenda': None, - 'allow_billing': False, - 'bill_id': None, - 'ended_at': None, - 'ends_at': None, - 'id': 1, - 'is_online': False, - 'latitude': None, - 'longitude': None, - 'mentee_id': None, - 'mentee_left_at': None, - 'mentor_id': 1, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'name': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'started_at': None, - 'starts_at': None, - 'status': 'PENDING', - 'status_message': None, - 'suggested_accounted_duration': None, - 'summary': None, + "accounted_duration": None, + "agenda": None, + "allow_billing": False, + "bill_id": None, + "ended_at": None, + "ends_at": None, + "id": 1, + "is_online": False, + "latitude": None, + "longitude": None, + "mentee_id": None, + "mentee_left_at": None, + "mentor_id": 1, + "mentor_joined_at": None, + "mentor_left_at": None, + "name": None, + "online_meeting_url": None, + "online_recording_url": None, + "started_at": None, + "starts_at": None, + "status": "PENDING", + "status_message": None, + "suggested_accounted_duration": None, + "summary": None, **data, } @@ -449,13 +398,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -466,13 +415,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -488,13 +437,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_bill for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_bill for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -505,16 +454,16 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -524,61 +473,70 @@ def test__get__without_data(self): """ def test__get__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - mentorship_bill=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + mentorship_bill=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.mentorship_bill, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}) + expected = get_serializer( + self, + model.mentorship_bill, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), - [self.bc.format.to_dict(model.mentorship_bill)]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), [self.bc.format.to_dict(model.mentorship_bill)] + ) """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_bill', - mentorship_session=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_bill", mentorship_session=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) """ 🔽🔽🔽 PUT capability @@ -590,13 +548,13 @@ def test__put__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -607,16 +565,16 @@ def test__put__without_capabilities(self): """ def test__put__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -626,80 +584,94 @@ def test__put__without_data(self): """ def test__put__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - mentorship_bill=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + mentorship_bill=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url) json = response.json() - expected = put_serializer(self, - model.mentorship_bill, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}) + expected = put_serializer( + self, + model.mentorship_bill, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) """ 🔽🔽🔽 PUT with one MentorshipSession, MentorProfile and MentorshipService, edit status of dirty bill """ def test__put__with_one_mentor_profile__edit_status_of_dirty_bill(self): - mentorship_bill = {'status': 'RECALCULATE'} - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - mentorship_bill=mentorship_bill, - profile_academy=1) + mentorship_bill = {"status": "RECALCULATE"} + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + mentorship_bill=mentorship_bill, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) - data = {'status': 'PAID'} - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) + data = {"status": "PAID"} + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'trying-edit-status-to-dirty-bill', 'status_code': 400} + expected = {"detail": "trying-edit-status-to-dirty-bill", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) """ 🔽🔽🔽 PUT with one MentorshipSession, MentorProfile and MentorshipService, passing forbidden fields """ def test__put__with_one_mentor_profile__passing_all_forbidden_fields(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - mentorship_bill=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + mentorship_bill=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) @@ -707,47 +679,54 @@ def test__put__with_one_mentor_profile__passing_all_forbidden_fields(self): created_at = timezone.now() updated_at = timezone.now() data = { - 'created_at': self.bc.datetime.to_iso_string(created_at), - 'updated_at': self.bc.datetime.to_iso_string(updated_at), - 'academy': 2, - 'reviewer': 2, - 'total_duration_in_minutes': random.random() * 100, - 'total_duration_in_hours': random.random() * 100, - 'total_price': random.random() * 100, - 'overtime_minutes': random.random() * 100, + "created_at": self.bc.datetime.to_iso_string(created_at), + "updated_at": self.bc.datetime.to_iso_string(updated_at), + "academy": 2, + "reviewer": 2, + "total_duration_in_minutes": random.random() * 100, + "total_duration_in_hours": random.random() * 100, + "total_price": random.random() * 100, + "overtime_minutes": random.random() * 100, } - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url, data) json = response.json() - expected = put_serializer(self, - model.mentorship_bill, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}) + expected = put_serializer( + self, + model.mentorship_bill, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) """ 🔽🔽🔽 PUT with one MentorshipSession, MentorProfile and MentorshipService, passing all valid fields """ def test__put__with_one_mentor_profile__passing_all_fields(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - mentorship_bill=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_bill", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + mentorship_bill=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) @@ -756,56 +735,61 @@ def test__put__with_one_mentor_profile__passing_all_fields(self): ended_at = timezone.now() paid_at = timezone.now() data = { - 'status': random.choice(['DUE', 'APPROVED', 'PAID', 'IGNORED']), - 'status_mesage': self.bc.fake.text(), - 'started_at': self.bc.datetime.to_iso_string(started_at), - 'ended_at': self.bc.datetime.to_iso_string(ended_at), - 'paid_at': self.bc.datetime.to_iso_string(paid_at), + "status": random.choice(["DUE", "APPROVED", "PAID", "IGNORED"]), + "status_mesage": self.bc.fake.text(), + "started_at": self.bc.datetime.to_iso_string(started_at), + "ended_at": self.bc.datetime.to_iso_string(ended_at), + "paid_at": self.bc.datetime.to_iso_string(paid_at), } - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url, data) data_fixed = data.copy() - del data_fixed['status_mesage'] + del data_fixed["status_mesage"] json = response.json() - expected = put_serializer(self, - model.mentorship_bill, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data=data_fixed) + expected = put_serializer( + self, + model.mentorship_bill, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data=data_fixed, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - { - **self.bc.format.to_dict(model.mentorship_bill), - **data, - 'started_at': started_at, - 'ended_at': ended_at, - 'paid_at': paid_at, - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + { + **self.bc.format.to_dict(model.mentorship_bill), + **data, + "started_at": started_at, + "ended_at": ended_at, + "paid_at": paid_at, + }, + ], + ) """ 🔽🔽🔽 PUT trying bulk mode """ def test__put__trying_bulk_mode(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) - data = [{'id': 1}, {'id': 2}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) + data = [{"id": 1}, {"id": 2}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'bulk-mode-and-bill-id', 'status_code': 404} + expected = {"detail": "bulk-mode-and-bill-id", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -820,13 +804,13 @@ def test__delete__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.delete(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -837,67 +821,66 @@ def test__delete__without_capabilities(self): """ def test__delete__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 DELETE with valid status """ def test__delete__with_data(self): - statuses = ['DUE', 'APPROVED', 'IGNORED'] + statuses = ["DUE", "APPROVED", "IGNORED"] for current in statuses: - mentorship_bill = {'status': current} - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - profile_academy=1, - mentorship_bill=mentorship_bill) + mentorship_bill = {"status": current} + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_bill", profile_academy=1, mentorship_bill=mentorship_bill + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': model.mentorship_bill.id}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": model.mentorship_bill.id}) response = self.client.delete(url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 DELETE with status PAID """ def test__delete__with_data__status_paid(self): - mentorship_bill = {'status': 'PAID'} - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - profile_academy=1, - mentorship_bill=mentorship_bill) + mentorship_bill = {"status": "PAID"} + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_bill", profile_academy=1, mentorship_bill=mentorship_bill + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("mentorship:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.delete(url) json = response.json() - expected = {'detail': 'paid-bill', 'status_code': 400} + expected = {"detail": "paid-bill", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) diff --git a/breathecode/mentorship/tests/urls/tests_academy_bill_id_html.py b/breathecode/mentorship/tests/urls/tests_academy_bill_id_html.py index 460127e48..fdd142f1f 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_bill_id_html.py +++ b/breathecode/mentorship/tests/urls/tests_academy_bill_id_html.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os import urllib.parse @@ -23,29 +24,29 @@ def format_datetime(self, date): def get_tooltip(obj): - message = f'This mentorship should last no longer than {int(obj.mentor.service.duration.seconds/60)} min. <br />' + message = f"This mentorship should last no longer than {int(obj.mentor.service.duration.seconds/60)} min. <br />" if obj.started_at is None: - message += 'The mentee never joined the session. <br />' + message += "The mentee never joined the session. <br />" else: message += f'Started on {obj.started_at.strftime("%m/%d/%Y at %H:%M:%S")}. <br />' if obj.mentor_joined_at is None: - message += f'The mentor never joined' + message += f"The mentor never joined" elif obj.mentor_joined_at > obj.started_at: - message += f'The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />' + message += f"The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />" elif obj.started_at > obj.mentor_joined_at: - message += f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + message += f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" if obj.ended_at is not None: - message += f'The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />' + message += f"The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />" if (obj.ended_at - obj.started_at) > obj.mentor.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.mentor.service.duration - message += f'With extra time of {duration_to_str(extra_time)}. <br />' + message += f"With extra time of {duration_to_str(extra_time)}. <br />" else: - message += f'No extra time detected <br />' + message += f"No extra time detected <br />" else: - message += f'The mentorship has not ended yet. <br />' + message += f"The mentorship has not ended yet. <br />" if obj.ends_at is not None: - message += f'But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />' + message += f"But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />" return message @@ -53,17 +54,17 @@ def get_tooltip(obj): def get_duration_string(obj): if obj.started_at is None: - return 'Never started' + return "Never started" end_date = obj.ended_at if end_date is None: - return 'Never ended' + return "Never ended" if obj.started_at > end_date: - return 'Ended before it started' + return "Ended before it started" if (end_date - obj.started_at).days > 1: - return f'Many days' + return f"Many days" return duration_to_str(obj.ended_at - obj.started_at) @@ -82,11 +83,11 @@ def get_extra_time(obj): return None if (obj.ended_at - obj.started_at).days > 1: - return f'Many days of extra time, probably it was never closed' + return f"Many days of extra time, probably it was never closed" if (obj.ended_at - obj.started_at) > obj.mentor.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.mentor.service.duration - return f'Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.mentor.service.duration)}' + return f"Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.mentor.service.duration)}" else: return None @@ -97,7 +98,7 @@ def get_mentor_late(obj): return None if obj.started_at > obj.mentor_joined_at and (obj.started_at - obj.mentor_joined_at).seconds > (60 * 4): - return f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + return f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" else: return None @@ -105,7 +106,7 @@ def get_mentor_late(obj): def get_mente_joined(obj): if obj.started_at is None: - return 'Session did not start because mentee never joined' + return "Session did not start because mentee never joined" else: return None @@ -124,74 +125,78 @@ def get_overtime_hours(obj): def get_sessions(self, obj): - sessions = obj.mentorshipsession_set.order_by('created_at').all() - return [{ - 'accounted_duration': session.accounted_duration, - 'billed_str': get_billed_str(session), - 'duration_string': get_duration_string(session), - 'ended_at': session.ended_at, - 'extra_time': get_extra_time(session), - 'id': session.id, - 'mente_joined': get_mente_joined(session), - 'mentee': { - 'email': session.mentee.email, - 'first_name': session.mentee.first_name, - 'id': session.mentee.id, - 'last_name': session.mentee.last_name, - }, - 'mentee_left_at': session.mentee_left_at, - 'mentor': { - 'booking_url': session.mentor.booking_url, - 'created_at': format_datetime(self, session.mentor.created_at), - 'email': session.mentor.email, - 'id': session.mentor.id, - 'online_meeting_url': session.mentor.online_meeting_url, - 'price_per_hour': session.mentor.price_per_hour, - 'service': { - 'academy': { - 'icon_url': session.mentor.service.academy.icon_url, - 'id': session.mentor.service.academy.id, - 'logo_url': session.mentor.service.academy.logo_url, - 'name': session.mentor.service.academy.name, - 'slug': session.mentor.service.academy.slug, + sessions = obj.mentorshipsession_set.order_by("created_at").all() + return [ + { + "accounted_duration": session.accounted_duration, + "billed_str": get_billed_str(session), + "duration_string": get_duration_string(session), + "ended_at": session.ended_at, + "extra_time": get_extra_time(session), + "id": session.id, + "mente_joined": get_mente_joined(session), + "mentee": { + "email": session.mentee.email, + "first_name": session.mentee.first_name, + "id": session.mentee.id, + "last_name": session.mentee.last_name, + }, + "mentee_left_at": session.mentee_left_at, + "mentor": { + "booking_url": session.mentor.booking_url, + "created_at": format_datetime(self, session.mentor.created_at), + "email": session.mentor.email, + "id": session.mentor.id, + "online_meeting_url": session.mentor.online_meeting_url, + "price_per_hour": session.mentor.price_per_hour, + "service": { + "academy": { + "icon_url": session.mentor.service.academy.icon_url, + "id": session.mentor.service.academy.id, + "logo_url": session.mentor.service.academy.logo_url, + "name": session.mentor.service.academy.name, + "slug": session.mentor.service.academy.slug, + }, + "allow_mentee_to_extend": session.mentor.service.allow_mentee_to_extend, + "allow_mentors_to_extend": session.mentor.service.allow_mentors_to_extend, + "created_at": format_datetime(self, session.mentor.service.created_at), + "duration": self.bc.datetime.from_timedelta(session.mentor.service.duration), + "id": session.mentor.service.id, + "language": session.mentor.service.language, + "logo_url": session.mentor.service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(session.mentor.service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + session.mentor.service.missed_meeting_duration + ), + "name": session.mentor.service.name, + "slug": session.mentor.service.slug, + "status": session.mentor.service.status, + "updated_at": self.bc.datetime.to_iso_string(session.mentor.service.updated_at), + }, + "slug": session.mentor.slug, + "status": session.mentor.status, + "timezone": session.mentor.timezone, + "updated_at": format_datetime(self, session.mentor.updated_at), + "user": { + "email": session.mentor.user.email, + "first_name": session.mentor.user.first_name, + "id": session.mentor.user.id, + "last_name": session.mentor.user.last_name, }, - 'allow_mentee_to_extend': session.mentor.service.allow_mentee_to_extend, - 'allow_mentors_to_extend': session.mentor.service.allow_mentors_to_extend, - 'created_at': format_datetime(self, session.mentor.service.created_at), - 'duration': self.bc.datetime.from_timedelta(session.mentor.service.duration), - 'id': session.mentor.service.id, - 'language': session.mentor.service.language, - 'logo_url': session.mentor.service.logo_url, - 'max_duration': self.bc.datetime.from_timedelta(session.mentor.service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(session.mentor.service.missed_meeting_duration), - 'name': session.mentor.service.name, - 'slug': session.mentor.service.slug, - 'status': session.mentor.service.status, - 'updated_at': self.bc.datetime.to_iso_string(session.mentor.service.updated_at), }, - 'slug': session.mentor.slug, - 'status': session.mentor.status, - 'timezone': session.mentor.timezone, - 'updated_at': format_datetime(self, session.mentor.updated_at), - 'user': { - 'email': session.mentor.user.email, - 'first_name': session.mentor.user.first_name, - 'id': session.mentor.user.id, - 'last_name': session.mentor.user.last_name, - } - }, - 'mentor_joined_at': session.mentor_joined_at, - 'mentor_late': get_mentor_late(session), - 'mentor_left_at': session.mentor_left_at, - 'rating': get_rating(session), - 'started_at': session.started_at, - 'status': session.status, - 'status_message': session.status_message, - 'suggested_accounted_duration': session.suggested_accounted_duration, - 'summary': session.summary, - 'tooltip': get_tooltip(session), - } for session in sessions] + "mentor_joined_at": session.mentor_joined_at, + "mentor_late": get_mentor_late(session), + "mentor_left_at": session.mentor_left_at, + "rating": get_rating(session), + "started_at": session.started_at, + "status": session.status, + "status_message": session.status_message, + "suggested_accounted_duration": session.suggested_accounted_duration, + "summary": session.summary, + "tooltip": get_tooltip(session), + } + for session in sessions + ] def get_unfinished_sessions(obj): @@ -199,78 +204,78 @@ def get_unfinished_sessions(obj): def get_public_url(): - return '/v1/mentorship/academy/bill/1/html' + return "/v1/mentorship/academy/bill/1/html" def get_serializer(self, mentorship_bill, mentor_profile, mentorship_service, user, academy, data={}): return { - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, }, - 'overtime_hours': get_overtime_hours(mentorship_bill), - 'sessions': get_sessions(self, mentorship_bill), - 'unfinished_sessions': get_unfinished_sessions(mentorship_bill), - 'public_url': get_public_url(), - 'created_at': mentorship_bill.created_at, - 'ended_at': format_datetime(self, mentorship_bill.ended_at), - 'id': mentorship_bill.id, - 'mentor': { - 'booking_url': mentor_profile.booking_url, - 'created_at': format_datetime(self, mentor_profile.created_at), - 'id': mentor_profile.id, - 'email': mentor_profile.email, - 'online_meeting_url': mentor_profile.online_meeting_url, - 'price_per_hour': mentor_profile.price_per_hour, - 'service': { - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "overtime_hours": get_overtime_hours(mentorship_bill), + "sessions": get_sessions(self, mentorship_bill), + "unfinished_sessions": get_unfinished_sessions(mentorship_bill), + "public_url": get_public_url(), + "created_at": mentorship_bill.created_at, + "ended_at": format_datetime(self, mentorship_bill.ended_at), + "id": mentorship_bill.id, + "mentor": { + "booking_url": mentor_profile.booking_url, + "created_at": format_datetime(self, mentor_profile.created_at), + "id": mentor_profile.id, + "email": mentor_profile.email, + "online_meeting_url": mentor_profile.online_meeting_url, + "price_per_hour": mentor_profile.price_per_hour, + "service": { + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, }, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'created_at': format_datetime(self, mentorship_service.created_at), - 'duration': self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': mentorship_service.id, - 'language': mentorship_service.language, - 'logo_url': mentorship_service.logo_url, - 'max_duration': self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'status': mentorship_service.status, - 'updated_at': format_datetime(self, mentorship_service.updated_at), + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": format_datetime(self, mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": format_datetime(self, mentorship_service.updated_at), }, - 'slug': mentor_profile.slug, - 'timezone': mentor_profile.timezone, - 'status': mentor_profile.status, - 'updated_at': format_datetime(self, mentor_profile.updated_at), - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "slug": mentor_profile.slug, + "timezone": mentor_profile.timezone, + "status": mentor_profile.status, + "updated_at": format_datetime(self, mentor_profile.updated_at), + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, }, - 'overtime_minutes': float(mentorship_bill.overtime_minutes), - 'paid_at': mentorship_bill.ended_at, - 'reviewer': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "overtime_minutes": float(mentorship_bill.overtime_minutes), + "paid_at": mentorship_bill.ended_at, + "reviewer": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'started_at': format_datetime(self, mentorship_bill.ended_at), - 'status': mentorship_bill.status, - 'total_duration_in_hours': float(mentorship_bill.total_duration_in_hours), - 'total_duration_in_minutes': float(mentorship_bill.total_duration_in_minutes), - 'total_price': float(mentorship_bill.total_price), + "started_at": format_datetime(self, mentorship_bill.ended_at), + "status": mentorship_bill.status, + "total_duration_in_hours": float(mentorship_bill.total_duration_in_hours), + "total_duration_in_minutes": float(mentorship_bill.total_duration_in_minutes), + "total_price": float(mentorship_bill.total_price), **data, } @@ -278,41 +283,32 @@ def get_serializer(self, mentorship_bill, mentor_profile, mentorship_service, us # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_successfully(self, mentorship_bill, mentor_profile, mentorship_service, user, academy, data={}): request = None - APP_URL = os.getenv('APP_URL', '') + APP_URL = os.getenv("APP_URL", "") - template = loader.get_template('mentorship_invoice.html') - status_map = {'DUE': 'UNDER_REVIEW', 'APPROVED': 'READY_TO_PAY', 'PAID': 'ALREADY PAID'} + template = loader.get_template("mentorship_invoice.html") + status_map = {"DUE": "UNDER_REVIEW", "APPROVED": "READY_TO_PAY", "PAID": "ALREADY PAID"} data = { - 'API_URL': - None, - 'COMPANY_NAME': - '', - 'COMPANY_CONTACT_URL': - '', - 'COMPANY_LEGAL_NAME': - '', - 'COMPANY_ADDRESS': - '', - 'style__success': - '#99ccff', - 'style__danger': - '#ffcccc', - 'style__secondary': - '#ededed', + "API_URL": None, + "COMPANY_NAME": "", + "COMPANY_CONTACT_URL": "", + "COMPANY_LEGAL_NAME": "", + "COMPANY_ADDRESS": "", + "style__success": "#99ccff", + "style__danger": "#ffcccc", + "style__secondary": "#ededed", **get_serializer(self, mentorship_bill, mentor_profile, mentorship_service, user, academy, data={}), - 'status': - status_map[mentorship_bill.status], + "status": status_map[mentorship_bill.status], } if academy: - data['COMPANY_INFO_EMAIL'] = academy.feedback_email - data['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - data['COMPANY_LOGO'] = academy.logo_url - data['COMPANY_NAME'] = academy.name + data["COMPANY_INFO_EMAIL"] = academy.feedback_email + data["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + data["COMPANY_LOGO"] = academy.logo_url + data["COMPANY_NAME"] = academy.name - if 'heading' not in data: - data['heading'] = academy.name + if "heading" not in data: + data["heading"] = academy.name return template.render(data) @@ -320,13 +316,8 @@ def render_successfully(self, mentorship_bill, mentor_profile, mentorship_servic def render(message): request = None return loader.render_to_string( - 'message.html', - { - 'MESSAGE': message, - 'BUTTON': None, - 'BUTTON_TARGET': '_blank', - 'LINK': None - }, + "message.html", + {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None}, request, using=None, ) @@ -334,60 +325,66 @@ def render(message): def render_page_with_pending_invites(model): request = None - APP_URL = os.getenv('APP_URL', '') + APP_URL = os.getenv("APP_URL", "") profile_academies = [] - if 'profile_academy' in model: - profile_academies = model.profile_academy if isinstance(model.profile_academy, - list) else [model.profile_academy] + if "profile_academy" in model: + profile_academies = ( + model.profile_academy if isinstance(model.profile_academy, list) else [model.profile_academy] + ) # excluding the accepted invited - profile_academies = [x for x in profile_academies if x.status != 'ACTIVE'] + profile_academies = [x for x in profile_academies if x.status != "ACTIVE"] - querystr = urllib.parse.urlencode({'callback': APP_URL, 'token': model.token.key}) - url = os.getenv('API_URL') + '/v1/auth/academy/html/invite?' + querystr + querystr = urllib.parse.urlencode({"callback": APP_URL, "token": model.token.key}) + url = os.getenv("API_URL") + "/v1/auth/academy/html/invite?" + querystr return loader.render_to_string( - 'academy_invite.html', { - 'subject': - f'Invitation to study at 4Geeks.com', - 'invites': [{ - 'id': profile_academy.id, - 'academy': { - 'id': profile_academy.academy.id, - 'name': profile_academy.academy.name, - 'slug': profile_academy.academy.slug, - 'timezone': profile_academy.academy.timezone, - }, - 'role': profile_academy.role.slug, - 'created_at': profile_academy.created_at, - } for profile_academy in profile_academies], - 'LINK': - url, - 'user': { - 'id': model.user.id, - 'email': model.user.email, - 'first_name': model.user.first_name, - } - }, request) + "academy_invite.html", + { + "subject": f"Invitation to study at 4Geeks.com", + "invites": [ + { + "id": profile_academy.id, + "academy": { + "id": profile_academy.academy.id, + "name": profile_academy.academy.name, + "slug": profile_academy.academy.slug, + "timezone": profile_academy.academy.timezone, + }, + "role": profile_academy.role.slug, + "created_at": profile_academy.created_at, + } + for profile_academy in profile_academies + ], + "LINK": url, + "user": { + "id": model.user.id, + "email": model.user.email, + "first_name": model.user.first_name, + }, + }, + request, + ) class AuthenticateTestSuite(MentorshipTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test_without_auth(self): - url = reverse_lazy('mentorship:academy_bill_id_html', kwargs={'id': 1}) + url = reverse_lazy("mentorship:academy_bill_id_html", kwargs={"id": 1}) response = self.client.get(url) - hash = self.bc.format.to_base64('/v1/mentorship/academy/bill/1/html') + hash = self.bc.format.to_base64("/v1/mentorship/academy/bill/1/html") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) """ 🔽🔽🔽 GET without MentorshipBill @@ -396,24 +393,24 @@ def test_without_auth(self): def test_without_mentorship_bill(self): model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship:academy_bill_id_html', kwargs={'id': 1}) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship:academy_bill_id_html", kwargs={"id": 1}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render('Bill not found') + expected = render("Bill not found") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) """ 🔽🔽🔽 GET with one MentorshipBill @@ -422,27 +419,29 @@ def test_without_mentorship_bill(self): def test_with_mentorship_bill(self): model = self.bc.database.create(user=1, token=1, mentorship_bill=1, mentorship_service=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship:academy_bill_id_html', kwargs={'id': 1}) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship:academy_bill_id_html", kwargs={"id": 1}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_successfully(self, - model.mentorship_bill, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}) + expected = render_successfully( + self, + model.mentorship_bill, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) diff --git a/breathecode/mentorship/tests/urls/tests_academy_mentor.py b/breathecode/mentorship/tests/urls/tests_academy_mentor.py index 388cdbc98..f17e782e1 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_mentor.py +++ b/breathecode/mentorship/tests/urls/tests_academy_mentor.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import hashlib from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -20,70 +21,47 @@ def get_serializer(self, mentor_profile, mentorship_service, user, data={}): return { - 'booking_url': - mentor_profile.booking_url, - 'created_at': - self.bc.datetime.to_iso_string(mentor_profile.created_at), - 'email': - mentor_profile.email, - 'id': - mentor_profile.id, - 'one_line_bio': - mentor_profile.one_line_bio, - 'online_meeting_url': - mentor_profile.online_meeting_url, - 'price_per_hour': - mentor_profile.price_per_hour, - 'rating': - mentor_profile.rating, - 'services': [{ - 'academy': { - 'icon_url': mentorship_service.academy.icon_url, - 'id': mentorship_service.academy.id, - 'logo_url': mentorship_service.academy.logo_url, - 'name': mentorship_service.academy.name, - 'slug': mentorship_service.academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'timezone': - mentor_profile.timezone, - 'updated_at': - self.bc.datetime.to_iso_string(mentor_profile.updated_at), - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "booking_url": mentor_profile.booking_url, + "created_at": self.bc.datetime.to_iso_string(mentor_profile.created_at), + "email": mentor_profile.email, + "id": mentor_profile.id, + "one_line_bio": mentor_profile.one_line_bio, + "online_meeting_url": mentor_profile.online_meeting_url, + "price_per_hour": mentor_profile.price_per_hour, + "rating": mentor_profile.rating, + "services": [ + { + "academy": { + "icon_url": mentorship_service.academy.icon_url, + "id": mentorship_service.academy.id, + "logo_url": mentorship_service.academy.logo_url, + "name": mentorship_service.academy.name, + "slug": mentorship_service.academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "timezone": mentor_profile.timezone, + "updated_at": self.bc.datetime.to_iso_string(mentor_profile.updated_at), + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, **data, } @@ -91,98 +69,74 @@ def get_serializer(self, mentor_profile, mentorship_service, user, data={}): def post_serializer(self, mentorship_service, user, data={}): return { - 'id': - 0, - 'one_line_bio': - None, - 'slug': - '', - 'user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, + "id": 0, + "one_line_bio": None, + "slug": "", + "user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, }, - 'services': [{ - 'id': - mentorship_service.id, - 'slug': - mentorship_service.slug, - 'name': - mentorship_service.name, - 'status': - mentorship_service.status, - 'academy': { - 'id': mentorship_service.academy.id, - 'slug': mentorship_service.academy.slug, - 'name': mentorship_service.academy.name, - 'logo_url': mentorship_service.academy.logo_url, - 'icon_url': mentorship_service.academy.icon_url, - }, - 'logo_url': - mentorship_service.logo_url, - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'language': - mentorship_service.language, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - 'description': - mentorship_service.description, - }], - 'status': - 'INVITED', - 'price_per_hour': - 20.0, - 'rating': - None, - 'booking_url': - None, - 'online_meeting_url': - None, - 'timezone': - None, - 'syllabus': [], - 'email': - None, - 'created_at': - self.bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': - self.bc.datetime.to_iso_string(UTC_NOW), + "services": [ + { + "id": mentorship_service.id, + "slug": mentorship_service.slug, + "name": mentorship_service.name, + "status": mentorship_service.status, + "academy": { + "id": mentorship_service.academy.id, + "slug": mentorship_service.academy.slug, + "name": mentorship_service.academy.name, + "logo_url": mentorship_service.academy.logo_url, + "icon_url": mentorship_service.academy.icon_url, + }, + "logo_url": mentorship_service.logo_url, + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "language": mentorship_service.language, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + "description": mentorship_service.description, + } + ], + "status": "INVITED", + "price_per_hour": 20.0, + "rating": None, + "booking_url": None, + "online_meeting_url": None, + "timezone": None, + "syllabus": [], + "email": None, + "created_at": self.bc.datetime.to_iso_string(UTC_NOW), + "updated_at": self.bc.datetime.to_iso_string(UTC_NOW), **data, } def mentor_profile_columns(data={}): - token = hashlib.sha1((str(data['slug'] if 'slug' in data else '') + str(UTC_NOW)).encode('UTF-8')).hexdigest() + token = hashlib.sha1((str(data["slug"] if "slug" in data else "") + str(UTC_NOW)).encode("UTF-8")).hexdigest() return { - 'bio': None, - 'booking_url': None, - 'email': None, - 'id': 0, - 'name': '', - 'one_line_bio': None, - 'calendly_uuid': None, - 'online_meeting_url': None, - 'price_per_hour': 0, - 'rating': None, - 'slug': 'mirai-nikki', - 'status': 'INVITED', - 'timezone': None, - 'token': token, - 'user_id': 0, - 'academy_id': 0, - 'availability_report': [], + "bio": None, + "booking_url": None, + "email": None, + "id": 0, + "name": "", + "one_line_bio": None, + "calendly_uuid": None, + "online_meeting_url": None, + "price_per_hour": 0, + "rating": None, + "slug": "mirai-nikki", + "status": "INVITED", + "timezone": None, + "token": token, + "user_id": 0, + "academy_id": 0, + "availability_report": [], **data, } @@ -193,13 +147,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -210,13 +164,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -232,13 +186,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_mentor for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_mentor for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -249,12 +203,12 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_mentor', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_mentor", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") response = self.client.get(url) json = response.json() @@ -268,17 +222,19 @@ def test__get__without_data(self): """ def test__get__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_mentor", + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") response = self.client.get(url) json = response.json() @@ -288,26 +244,31 @@ def test__get__with_one_mentor_profile(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) """ 🔽🔽🔽 GET with two MentorProfile """ def test__get__with_two_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=2, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_mentor", + mentor_profile=2, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") response = self.client.get(url) json = response.json() @@ -319,26 +280,29 @@ def test__get__with_two_mentor_profile(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - self.bc.format.to_dict(model.mentor_profile)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile) + ) """ 🔽🔽🔽 GET with two MentorProfile passing service in querystring """ def test__get__with_two_mentor_profile__passing_bad_service(self): - mentorship_service = {'slug': self.bc.fake.slug()} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=2, - mentorship_service=mentorship_service, - profile_academy=1) + mentorship_service = {"slug": self.bc.fake.slug()} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_mentor", + mentor_profile=2, + mentorship_service=mentorship_service, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + f'?services={self.bc.fake.slug()}' + url = reverse_lazy("mentorship:academy_mentor") + f"?services={self.bc.fake.slug()}" response = self.client.get(url) json = response.json() @@ -347,26 +311,28 @@ def test__get__with_two_mentor_profile__passing_bad_service(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorProfile'), + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile), ) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") def test__get__with_two_mentor_profile__passing_service(self): slug = self.bc.fake.slug() - mentorship_service = {'slug': slug} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=2, - mentorship_service=mentorship_service, - profile_academy=1) + mentorship_service = {"slug": slug} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_mentor", + mentor_profile=2, + mentorship_service=mentorship_service, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + f'?service={slug}' + url = reverse_lazy("mentorship:academy_mentor") + f"?service={slug}" response = self.client.get(url) json = response.json() @@ -379,40 +345,48 @@ def test__get__with_two_mentor_profile__passing_service(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorProfile'), + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile), ) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 GET passing like """ def test__get__mentor__passing_like_wrong(self): - model = self.bc.database.create(user=[{ - 'id': 1, - 'first_name': 'John', - 'email': 'john@example.com', - }, { - 'id': 2, - 'first_name': 'Carl', - 'email': 'carl@example.com', - }], - role=1, - capability='read_mentorship_mentor', - mentor_profile=[{ - 'user_id': 1, - }, { - 'user_id': 2, - }], - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=[ + { + "id": 1, + "first_name": "John", + "email": "john@example.com", + }, + { + "id": 2, + "first_name": "Carl", + "email": "carl@example.com", + }, + ], + role=1, + capability="read_mentorship_mentor", + mentor_profile=[ + { + "user_id": 1, + }, + { + "user_id": 2, + }, + ], + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('mentorship:academy_mentor') + f'?like=luke' + url = reverse_lazy("mentorship:academy_mentor") + f"?like=luke" response = self.client.get(url) json = response.json() @@ -421,32 +395,40 @@ def test__get__mentor__passing_like_wrong(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") def test__get__mentor__passing_like(self): - model = self.bc.database.create(user=[{ - 'id': 1, - 'first_name': 'John', - 'email': 'john@example.com', - }, { - 'id': 2, - 'first_name': 'Carl', - 'email': 'carl@example.com', - }], - role=1, - capability='read_mentorship_mentor', - mentor_profile=[{ - 'user_id': 1, - }, { - 'user_id': 2, - }], - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=[ + { + "id": 1, + "first_name": "John", + "email": "john@example.com", + }, + { + "id": 2, + "first_name": "Carl", + "email": "carl@example.com", + }, + ], + role=1, + capability="read_mentorship_mentor", + mentor_profile=[ + { + "user_id": 1, + }, + { + "user_id": 2, + }, + ], + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('mentorship:academy_mentor') + f'?like=john' + url = reverse_lazy("mentorship:academy_mentor") + f"?like=john" response = self.client.get(url) json = response.json() @@ -457,29 +439,31 @@ def test__get__mentor__passing_like(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 GET with two MentorProfile passing status in querystring """ def test__get__with_two_mentor_profile__passing_bad_status(self): - statuses = ['INVITED', 'ACTIVE', 'UNLISTED', 'INNACTIVE'] + statuses = ["INVITED", "ACTIVE", "UNLISTED", "INNACTIVE"] for current_status in range(0, 3): - bad_statuses = ','.join([x for x in statuses if x != current_status]) - - mentor_profile = {'status': current_status} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=(2, mentor_profile), - profile_academy=1) + bad_statuses = ",".join([x for x in statuses if x != current_status]) + + mentor_profile = {"status": current_status} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_mentor", + mentor_profile=(2, mentor_profile), + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + f'?status={bad_statuses}' + url = reverse_lazy("mentorship:academy_mentor") + f"?status={bad_statuses}" response = self.client.get(url) json = response.json() @@ -488,63 +472,61 @@ def test__get__with_two_mentor_profile__passing_bad_status(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorProfile'), + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile), ) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") def test__get__with_two_mentor_profile__passing_status(self): - statuses = ['INVITED', 'ACTIVE', 'UNLISTED', 'INNACTIVE'] + statuses = ["INVITED", "ACTIVE", "UNLISTED", "INNACTIVE"] for n in range(0, 4): # 0, 1, 10, 11, 0 - first_bin_key = bin(n).replace('0b', '')[-2:] + first_bin_key = bin(n).replace("0b", "")[-2:] first_key = int(first_bin_key, 2) first_status = statuses[first_key] # 0, 1, 10, 11, 0 - second_bin_key = bin(n + 1).replace('0b', '')[-2:] + second_bin_key = bin(n + 1).replace("0b", "")[-2:] second_key = int(second_bin_key, 2) second_status = statuses[second_key] - mentor_profiles = [{'status': x} for x in [first_status, second_status]] - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=mentor_profiles, - mentorship_service=1, - profile_academy=1) + mentor_profiles = [{"status": x} for x in [first_status, second_status]] + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_mentor", + mentor_profile=mentor_profiles, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + f'?status={first_status},{second_status}' + url = reverse_lazy("mentorship:academy_mentor") + f"?status={first_status},{second_status}" response = self.client.get(url) json = response.json() mentor_profile = sorted(model.mentor_profile, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentor_profile[0], - model.mentorship_service, - model.user, - data={'status': second_status}), - get_serializer(self, - mentor_profile[1], - model.mentorship_service, - model.user, - data={'status': first_status}), + get_serializer( + self, mentor_profile[0], model.mentorship_service, model.user, data={"status": second_status} + ), + get_serializer( + self, mentor_profile[1], model.mentorship_service, model.user, data={"status": first_status} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorProfile'), + self.bc.database.list_of("mentorship.MentorProfile"), self.bc.format.to_dict(model.mentor_profile), ) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 GET with two MentorProfile passing syllabus in querystring @@ -552,17 +534,14 @@ def test__get__with_two_mentor_profile__passing_status(self): def test__get__with_two_mentor_profile__passing_bad_syllabus(self): slug = self.bc.fake.slug() - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=1, - syllabus=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_mentor", mentor_profile=1, syllabus=1, profile_academy=1 + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + f'?syllabus={slug}' + url = reverse_lazy("mentorship:academy_mentor") + f"?syllabus={slug}" response = self.client.get(url) json = response.json() @@ -570,32 +549,37 @@ def test__get__with_two_mentor_profile__passing_bad_syllabus(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") def test__get__with_two_mentor_profile__passing_syllabus(self): slug = self.bc.fake.slug() - syllabus = {'slug': slug} + syllabus = {"slug": slug} profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), } - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=1, - mentorship_service=1, - syllabus=syllabus, - profile_academy=profile_academy) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_mentor", + mentor_profile=1, + mentorship_service=1, + syllabus=syllabus, + profile_academy=profile_academy, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + f'?syllabus={slug}' + url = reverse_lazy("mentorship:academy_mentor") + f"?syllabus={slug}" response = self.client.get(url) json = response.json() @@ -605,35 +589,39 @@ def test__get__with_two_mentor_profile__passing_syllabus(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") def test__get__with_two_mentor_profile_with_two_syllabus__passing__syllabus(self): slug_1 = self.bc.fake.slug() slug_2 = self.bc.fake.slug() - syllabus = [{'slug': slug_1}, {'slug': slug_2}] + syllabus = [{"slug": slug_1}, {"slug": slug_2}] profile_academy = { - 'first_name': self.bc.fake.first_name(), - 'last_name': self.bc.fake.last_name(), - 'email': self.bc.fake.email(), + "first_name": self.bc.fake.first_name(), + "last_name": self.bc.fake.last_name(), + "email": self.bc.fake.email(), } model_syllabus = self.bc.database.create(syllabus=syllabus) model = self.bc.database.create( user=1, role=1, - capability='read_mentorship_mentor', - mentor_profile={'syllabus': [model_syllabus.syllabus[0], model_syllabus.syllabus[1]]}, + capability="read_mentorship_mentor", + mentor_profile={"syllabus": [model_syllabus.syllabus[0], model_syllabus.syllabus[1]]}, mentorship_service=1, syllabus=syllabus, - profile_academy=profile_academy) + profile_academy=profile_academy, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + f'?syllabus={slug_1},{slug_2}' + url = reverse_lazy("mentorship:academy_mentor") + f"?syllabus={slug_1},{slug_2}" response = self.client.get(url) json = response.json() @@ -643,38 +631,47 @@ def test__get__with_two_mentor_profile_with_two_syllabus__passing__syllabus(self self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_mentor", mentor_profile=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) - self.bc.check.calls(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=MentorProfileCache, sort='-created_at', paginate=True), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=MentorProfileCache, sort="-created_at", paginate=True), + ], + ) """ 🔽🔽🔽 POST capability @@ -686,13 +683,13 @@ def test__post__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") response = self.client.post(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_mentor for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_mentor for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -703,16 +700,16 @@ def test__post__without_capabilities(self): """ def test__post__without_slug_fields_in_body(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_mentor', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_mentor", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') + url = reverse_lazy("mentorship:academy_mentor") response = self.client.post(url) json = response.json() - expected = {'detail': 'missing-slug-field', 'status_code': 400} + expected = {"detail": "missing-slug-field", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -722,156 +719,164 @@ def test__post__without_slug_fields_in_body(self): """ def test__post__without_required_fields_in_body(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_mentor', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_mentor", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') - data = {'slug': 'mirai-nikki', 'name': 'Mirai Nikki'} - response = self.client.post(url, data, format='json') + url = reverse_lazy("mentorship:academy_mentor") + data = {"slug": "mirai-nikki", "name": "Mirai Nikki"} + response = self.client.post(url, data, format="json") json = response.json() expected = { - 'price_per_hour': ['This field is required.'], - 'services': ['This field is required.'], - 'user': ['This field is required.'], + "price_per_hour": ["This field is required."], + "services": ["This field is required."], + "user": ["This field is required."], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) """ 🔽🔽🔽 POST creating a element """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__creating_a_elements(self): email = self.bc.fake.email() - profile_academy = {'name': self.bc.fake.name(), 'email': email} - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_mentor', - profile_academy=profile_academy, - mentorship_service=1) + profile_academy = {"name": self.bc.fake.name(), "email": email} + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_mentor", profile_academy=profile_academy, mentorship_service=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') - data = {'slug': 'mirai-nikki', 'name': 'Mirai Nikki', 'price_per_hour': 20, 'services': [1], 'user': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("mentorship:academy_mentor") + data = {"slug": "mirai-nikki", "name": "Mirai Nikki", "price_per_hour": 20, "services": [1], "user": 1} + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.mentorship_service, - model.user, - data={ - 'id': 1, - 'slug': 'mirai-nikki', - 'email': email, - }) + expected = post_serializer( + self, + model.mentorship_service, + model.user, + data={ + "id": 1, + "slug": "mirai-nikki", + "email": email, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - mentor_profile_columns({ - 'id': 1, - 'name': 'Mirai Nikki', - 'slug': 'mirai-nikki', - 'bio': None, - 'user_id': 1, - 'academy_id': 1, - 'price_per_hour': 20.0, - 'email': email, - }), - ]) - - mentor_profile = self.bc.database.get('mentorship.MentorProfile', 1, dict=False) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + mentor_profile_columns( + { + "id": 1, + "name": "Mirai Nikki", + "slug": "mirai-nikki", + "bio": None, + "user_id": 1, + "academy_id": 1, + "price_per_hour": 20.0, + "email": email, + } + ), + ], + ) + + mentor_profile = self.bc.database.get("mentorship.MentorProfile", 1, dict=False) self.bc.check.queryset_with_pks(mentor_profile.services.all(), [1]) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__creating_a_element_taking_email_from_user(self): profile_academy = { - 'first_name': self.bc.fake.name(), - 'last_name': self.bc.fake.name(), + "first_name": self.bc.fake.name(), + "last_name": self.bc.fake.name(), } - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_mentor', - profile_academy=profile_academy, - mentorship_service=1) + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_mentor", profile_academy=profile_academy, mentorship_service=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') - data = {'slug': 'mirai-nikki', 'name': 'Mirai Nikki', 'price_per_hour': 20, 'services': [1], 'user': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("mentorship:academy_mentor") + data = {"slug": "mirai-nikki", "name": "Mirai Nikki", "price_per_hour": 20, "services": [1], "user": 1} + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer(self, - model.mentorship_service, - model.user, - data={ - 'id': 1, - 'slug': 'mirai-nikki', - 'email': model.user.email - }) + expected = post_serializer( + self, model.mentorship_service, model.user, data={"id": 1, "slug": "mirai-nikki", "email": model.user.email} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - mentor_profile_columns({ - 'id': 1, - 'name': 'Mirai Nikki', - 'slug': 'mirai-nikki', - 'bio': None, - 'user_id': 1, - 'academy_id': 1, - 'price_per_hour': 20.0, - 'email': model.user.email, - }), - ]) - - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock(side_effect=Exception('hello'))) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + mentor_profile_columns( + { + "id": 1, + "name": "Mirai Nikki", + "slug": "mirai-nikki", + "bio": None, + "user_id": 1, + "academy_id": 1, + "price_per_hour": 20.0, + "email": model.user.email, + } + ), + ], + ) + + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock(side_effect=Exception("hello"))) def test__post__with_one_mentor_profile__changing_to_a_success_status__without_property_set(self): - statuses = ['INVITED', 'ACTIVE', 'UNLISTED', 'INNACTIVE'] - valid_statuses = ['ACTIVE', 'UNLISTED'] + statuses = ["INVITED", "ACTIVE", "UNLISTED", "INNACTIVE"] + valid_statuses = ["ACTIVE", "UNLISTED"] for db_status in statuses: - mentor_profile = {'status': db_status} - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=1, - mentor_profile=mentor_profile) + mentor_profile = {"status": db_status} + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=1, + mentor_profile=mentor_profile, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': model.mentor_profile.id}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": model.mentor_profile.id}) good_statuses = [x for x in statuses if x != db_status and x in valid_statuses] for current_status in good_statuses: model.mentor_profile.status = db_status model.mentor_profile.save() - data = {'status': current_status} + data = {"status": current_status} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'without-first-name', 'status_code': 400} + expected = {"detail": "without-first-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) self.assertEqual(actions.mentor_is_ready.call_args_list, []) @@ -879,52 +884,52 @@ def test__post__with_one_mentor_profile__changing_to_a_success_status__without_p actions.mentor_is_ready.call_args_list = [] # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__creating_a_elements_without_name(self): email = self.bc.fake.email() - profile_academy = {'name': self.bc.fake.name(), 'email': email} - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_mentor', - profile_academy=profile_academy, - mentorship_service=1) + profile_academy = {"name": self.bc.fake.name(), "email": email} + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_mentor", profile_academy=profile_academy, mentorship_service=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') - data = {'slug': 'mirai-nikki', 'price_per_hour': 20, 'services': [1], 'user': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("mentorship:academy_mentor") + data = {"slug": "mirai-nikki", "price_per_hour": 20, "services": [1], "user": 1} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'name-not-found', 'status_code': 400} + expected = {"detail": "name-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__creating_a_elements_without_email(self): profile_academy = { - 'name': self.bc.fake.name(), + "name": self.bc.fake.name(), } - user = {'first_name': self.bc.fake.name(), 'last_name': self.bc.fake.name(), 'email': ''} - model = self.bc.database.create(user=user, - role=1, - capability='crud_mentorship_mentor', - profile_academy=profile_academy, - mentorship_service=1) + user = {"first_name": self.bc.fake.name(), "last_name": self.bc.fake.name(), "email": ""} + model = self.bc.database.create( + user=user, + role=1, + capability="crud_mentorship_mentor", + profile_academy=profile_academy, + mentorship_service=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor') - data = {'slug': 'mirai-nikki', 'price_per_hour': 20, 'services': [1], 'user': 1, 'name': 'Mirai Nikki'} - response = self.client.post(url, data, format='json') + url = reverse_lazy("mentorship:academy_mentor") + data = {"slug": "mirai-nikki", "price_per_hour": 20, "services": [1], "user": 1, "name": "Mirai Nikki"} + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'email-not-found', 'status_code': 400} + expected = {"detail": "email-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/breathecode/mentorship/tests/urls/tests_academy_mentor_id.py b/breathecode/mentorship/tests/urls/tests_academy_mentor_id.py index 351967e9a..66358451f 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_mentor_id.py +++ b/breathecode/mentorship/tests/urls/tests_academy_mentor_id.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import hashlib from datetime import timedelta from random import choices, random @@ -21,73 +22,49 @@ def get_serializer(self, mentor_profile, mentorship_service, user, data={}): return { - 'booking_url': - mentor_profile.booking_url, - 'created_at': - self.bc.datetime.to_iso_string(mentor_profile.created_at), - 'email': - mentor_profile.email, - 'id': - mentor_profile.id, - 'one_line_bio': - mentor_profile.one_line_bio, - 'online_meeting_url': - mentor_profile.online_meeting_url, - 'price_per_hour': - mentor_profile.price_per_hour, - 'rating': - mentor_profile.rating, - 'services': [{ - 'academy': { - 'icon_url': mentorship_service.academy.icon_url, - 'id': mentorship_service.academy.id, - 'logo_url': mentorship_service.academy.logo_url, - 'name': mentorship_service.academy.name, - 'slug': mentorship_service.academy.slug, - }, - 'description': - mentorship_service.description, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'timezone': - mentor_profile.timezone, - 'syllabus': [], - 'updated_at': - self.bc.datetime.to_iso_string(mentor_profile.updated_at), - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "booking_url": mentor_profile.booking_url, + "created_at": self.bc.datetime.to_iso_string(mentor_profile.created_at), + "email": mentor_profile.email, + "id": mentor_profile.id, + "one_line_bio": mentor_profile.one_line_bio, + "online_meeting_url": mentor_profile.online_meeting_url, + "price_per_hour": mentor_profile.price_per_hour, + "rating": mentor_profile.rating, + "services": [ + { + "academy": { + "icon_url": mentorship_service.academy.icon_url, + "id": mentorship_service.academy.id, + "logo_url": mentorship_service.academy.logo_url, + "name": mentorship_service.academy.name, + "slug": mentorship_service.academy.slug, + }, + "description": mentorship_service.description, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "timezone": mentor_profile.timezone, + "syllabus": [], + "updated_at": self.bc.datetime.to_iso_string(mentor_profile.updated_at), + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, **data, } @@ -95,99 +72,70 @@ def get_serializer(self, mentor_profile, mentorship_service, user, data={}): def put_serializer(self, mentor_profile, mentorship_service, user, syllabus=[], data={}): return { - 'id': - mentor_profile.id, - 'one_line_bio': - mentor_profile.one_line_bio, - 'slug': - mentor_profile.slug, - 'user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, + "id": mentor_profile.id, + "one_line_bio": mentor_profile.one_line_bio, + "slug": mentor_profile.slug, + "user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, }, - 'services': [{ - 'id': - mentorship_service.id, - 'slug': - mentorship_service.slug, - 'name': - mentorship_service.name, - 'status': - mentorship_service.status, - 'academy': { - 'id': mentorship_service.academy.id, - 'slug': mentorship_service.academy.slug, - 'name': mentorship_service.academy.name, - 'logo_url': mentorship_service.academy.logo_url, - 'icon_url': mentorship_service.academy.icon_url, - }, - 'logo_url': - mentorship_service.logo_url, - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'language': - mentorship_service.language, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - 'description': - mentorship_service.description, - }], - 'status': - mentor_profile.status, - 'price_per_hour': - mentor_profile.price_per_hour, - 'rating': - mentor_profile.rating, - 'booking_url': - mentor_profile.booking_url, - 'online_meeting_url': - mentor_profile.online_meeting_url, - 'timezone': - mentor_profile.timezone, - 'syllabus': [{ - 'id': x.id, - 'logo': x.logo, - 'name': x.name, - 'slug': x.slug - } for x in syllabus], - 'email': - mentor_profile.email, - 'created_at': - self.bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': - self.bc.datetime.to_iso_string(UTC_NOW), + "services": [ + { + "id": mentorship_service.id, + "slug": mentorship_service.slug, + "name": mentorship_service.name, + "status": mentorship_service.status, + "academy": { + "id": mentorship_service.academy.id, + "slug": mentorship_service.academy.slug, + "name": mentorship_service.academy.name, + "logo_url": mentorship_service.academy.logo_url, + "icon_url": mentorship_service.academy.icon_url, + }, + "logo_url": mentorship_service.logo_url, + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "language": mentorship_service.language, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + "description": mentorship_service.description, + } + ], + "status": mentor_profile.status, + "price_per_hour": mentor_profile.price_per_hour, + "rating": mentor_profile.rating, + "booking_url": mentor_profile.booking_url, + "online_meeting_url": mentor_profile.online_meeting_url, + "timezone": mentor_profile.timezone, + "syllabus": [{"id": x.id, "logo": x.logo, "name": x.name, "slug": x.slug} for x in syllabus], + "email": mentor_profile.email, + "created_at": self.bc.datetime.to_iso_string(UTC_NOW), + "updated_at": self.bc.datetime.to_iso_string(UTC_NOW), **data, } def mentor_profile_columns(data={}): - token = hashlib.sha1((str(data['slug'] if 'slug' in data else '') + str(UTC_NOW)).encode('UTF-8')).hexdigest() + token = hashlib.sha1((str(data["slug"] if "slug" in data else "") + str(UTC_NOW)).encode("UTF-8")).hexdigest() return { - 'bio': None, - 'booking_url': None, - 'email': None, - 'id': 0, - 'name': '', - 'online_meeting_url': None, - 'price_per_hour': 0, - 'service_id': 0, - 'slug': 'mirai-nikki', - 'status': 'INVITED', - 'timezone': None, - 'token': token, - 'user_id': 0, + "bio": None, + "booking_url": None, + "email": None, + "id": 0, + "name": "", + "online_meeting_url": None, + "price_per_hour": 0, + "service_id": 0, + "slug": "mirai-nikki", + "status": "INVITED", + "timezone": None, + "token": token, + "user_id": 0, **data, } @@ -198,13 +146,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -215,13 +163,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -237,13 +185,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_mentor for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_mentor for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -254,16 +202,16 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_mentor', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_mentor", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'This mentor does not exist on this academy', 'status_code': 404} + expected = {"detail": "This mentor does not exist on this academy", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -273,17 +221,19 @@ def test__get__without_data(self): """ def test__get__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_mentor", + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() @@ -291,36 +241,45 @@ def test__get__with_one_mentor_profile(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_mentor', - mentor_profile=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_mentor", mentor_profile=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) - - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=MentorProfileCache, sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) + + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=MentorProfileCache, sort="-created_at", paginate=True), + ], + ) """ 🔽🔽🔽 PUT capability @@ -332,13 +291,13 @@ def test__post__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_mentor for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_mentor for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -348,237 +307,265 @@ def test__post__without_capabilities(self): 🔽🔽🔽 PUT MentorProfile not found """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__not_found(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_mentor', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_mentor", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__not_found__belong_to_another_academy(self): - mentorship_service = {'academy_id': 2} - model = self.bc.database.create(user=1, - role=1, - academy=2, - capability='crud_mentorship_mentor', - mentorship_service=mentorship_service, - profile_academy=1, - mentor_profile=1) + mentorship_service = {"academy_id": 2} + model = self.bc.database.create( + user=1, + role=1, + academy=2, + capability="crud_mentorship_mentor", + mentorship_service=mentorship_service, + profile_academy=1, + mentor_profile=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) """ 🔽🔽🔽 PUT with one MentorProfile """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_profile(self): profile_academy = { - 'first_name': self.bc.fake.name(), - 'last_name': self.bc.fake.name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.name(), + "last_name": self.bc.fake.name(), + "email": self.bc.fake.email(), } - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=1) + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() - expected = put_serializer(self, - model.mentor_profile, - model.mentorship_service, - model.user, - data={'email': model.profile_academy.email}) + expected = put_serializer( + self, + model.mentor_profile, + model.mentorship_service, + model.user, + data={"email": model.profile_academy.email}, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - [{ - **self.bc.format.to_dict(model.mentor_profile), - 'email': model.profile_academy.email, - 'name': model.profile_academy.first_name + ' ' + model.profile_academy.last_name, - }]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + { + **self.bc.format.to_dict(model.mentor_profile), + "email": model.profile_academy.email, + "name": model.profile_academy.first_name + " " + model.profile_academy.last_name, + } + ], + ) """ 🔽🔽🔽 PUT with one MentorProfile passing readonly fields """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_profile__passing_randomly_fields(self): profile_academy = { - 'first_name': self.bc.fake.name(), - 'last_name': self.bc.fake.name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.name(), + "last_name": self.bc.fake.name(), + "email": self.bc.fake.email(), } - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=1) + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) - cases = ['user', 'token'] + cases = ["user", "token"] for case in cases: data = {case: 1} response = self.client.put(url, data) json = response.json() - expected = {'detail': f'{case}-read-only', 'status_code': 400} + expected = {"detail": f"{case}-read-only", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) """ 🔽🔽🔽 PUT with one MentorProfile changing to a success status """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_profile__changing_to_a_success_status(self): - statuses = ['INVITED', 'ACTIVE', 'UNLISTED', 'INNACTIVE'] - valid_statuses = ['ACTIVE', 'UNLISTED'] + statuses = ["INVITED", "ACTIVE", "UNLISTED", "INNACTIVE"] + valid_statuses = ["ACTIVE", "UNLISTED"] profile_academy = { - 'first_name': self.bc.fake.name(), - 'last_name': self.bc.fake.name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.name(), + "last_name": self.bc.fake.name(), + "email": self.bc.fake.email(), } for db_status in statuses: - mentor_profile = {'status': db_status} - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=mentor_profile) + mentor_profile = {"status": db_status} + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=mentor_profile, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': model.mentor_profile.id}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": model.mentor_profile.id}) good_statuses = [x for x in statuses if x != db_status and x in valid_statuses] for current_status in good_statuses: model.mentor_profile.status = db_status model.mentor_profile.save() - data = {'status': current_status} + data = {"status": current_status} response = self.client.put(url, data) json = response.json() - expected = put_serializer(self, - model.mentor_profile, - model.mentorship_service, - model.user, - data={ - 'email': model.profile_academy.email, - 'status': current_status - }) + expected = put_serializer( + self, + model.mentor_profile, + model.mentorship_service, + model.user, + data={"email": model.profile_academy.email, "status": current_status}, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - [{ - **self.bc.format.to_dict(model.mentor_profile), - **data, - 'email': model.profile_academy.email, - 'name': model.profile_academy.first_name + ' ' + model.profile_academy.last_name, - }]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + { + **self.bc.format.to_dict(model.mentor_profile), + **data, + "email": model.profile_academy.email, + "name": model.profile_academy.first_name + " " + model.profile_academy.last_name, + } + ], + ) self.assertEqual(actions.mentor_is_ready.call_args_list, [call(model.mentor_profile)]) # teardown actions.mentor_is_ready.call_args_list = [] # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock(side_effect=Exception('hello'))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock(side_effect=Exception("hello"))) def test__post__with_one_mentor_profile__changing_to_a_success_status__raise_a_exception(self): - statuses = ['INVITED', 'ACTIVE', 'UNLISTED', 'INNACTIVE'] - valid_statuses = ['ACTIVE', 'UNLISTED'] + statuses = ["INVITED", "ACTIVE", "UNLISTED", "INNACTIVE"] + valid_statuses = ["ACTIVE", "UNLISTED"] profile_academy = { - 'first_name': self.bc.fake.name(), - 'last_name': self.bc.fake.name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.name(), + "last_name": self.bc.fake.name(), + "email": self.bc.fake.email(), } for db_status in statuses: - mentor_profile = {'status': db_status} - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=mentor_profile) + mentor_profile = {"status": db_status} + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=mentor_profile, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': model.mentor_profile.id}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": model.mentor_profile.id}) good_statuses = [x for x in statuses if x != db_status and x in valid_statuses] for current_status in good_statuses: model.mentor_profile.status = db_status model.mentor_profile.save() - data = {'status': current_status} + data = {"status": current_status} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'hello', 'status_code': 400} + expected = {"detail": "hello", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) self.assertEqual(actions.mentor_is_ready.call_args_list, [call(model.mentor_profile)]) @@ -586,64 +573,69 @@ def test__post__with_one_mentor_profile__changing_to_a_success_status__raise_a_e actions.mentor_is_ready.call_args_list = [] # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 PUT with one MentorProfile changing to a failure status """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_profile__changing_to_a_failure_status(self): - statuses = ['INVITED', 'ACTIVE', 'UNLISTED', 'INNACTIVE'] - valid_statuses = ['ACTIVE', 'UNLISTED'] + statuses = ["INVITED", "ACTIVE", "UNLISTED", "INNACTIVE"] + valid_statuses = ["ACTIVE", "UNLISTED"] profile_academy = { - 'first_name': self.bc.fake.name(), - 'last_name': self.bc.fake.name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.name(), + "last_name": self.bc.fake.name(), + "email": self.bc.fake.email(), } for db_status in statuses: - mentor_profile = {'status': db_status} - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=mentor_profile) + mentor_profile = {"status": db_status} + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=mentor_profile, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': model.profile_academy.id}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": model.profile_academy.id}) good_statuses = [x for x in statuses if x != db_status and x not in valid_statuses] for current_status in good_statuses: model.mentor_profile.status = db_status model.mentor_profile.save() - data = {'status': current_status} + data = {"status": current_status} response = self.client.put(url, data) json = response.json() - expected = put_serializer(self, - model.mentor_profile, - model.mentorship_service, - model.user, - data={ - 'status': current_status, - 'email': model.profile_academy.email - }) + expected = put_serializer( + self, + model.mentor_profile, + model.mentorship_service, + model.user, + data={"status": current_status, "email": model.profile_academy.email}, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - [{ - **self.bc.format.to_dict(model.mentor_profile), - 'status': current_status, - 'email': model.profile_academy.email, - 'name': model.profile_academy.first_name + ' ' + model.profile_academy.last_name, - }]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + { + **self.bc.format.to_dict(model.mentor_profile), + "status": current_status, + "email": model.profile_academy.email, + "name": model.profile_academy.first_name + " " + model.profile_academy.last_name, + } + ], + ) self.assertEqual(actions.mentor_is_ready.call_args_list, []) @@ -651,203 +643,217 @@ def test__post__with_one_mentor_profile__changing_to_a_failure_status(self): actions.mentor_is_ready.call_args_list = [] # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 PUT with one MentorProfile changing all the values """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_profile__changing_all_the_values(self): - bad_statuses = ['INVITED', 'INNACTIVE'] + bad_statuses = ["INVITED", "INNACTIVE"] profile_academy = { - 'first_name': self.bc.fake.name(), - 'last_name': self.bc.fake.name(), - 'email': self.bc.fake.email() + "first_name": self.bc.fake.name(), + "last_name": self.bc.fake.name(), + "email": self.bc.fake.email(), } - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=2, - syllabus=2, - profile_academy=profile_academy, - mentor_profile=1) + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=2, + syllabus=2, + profile_academy=profile_academy, + mentor_profile=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': model.mentor_profile.id}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": model.mentor_profile.id}) name = self.bc.fake.name() bio = self.bc.fake.text() data = { - 'status': choices(bad_statuses)[0], - 'slug': self.bc.fake.slug(), - 'name': name, - 'bio': bio, - 'email': self.bc.fake.email(), - 'booking_url': self.bc.fake.url(), - 'online_meeting_url': self.bc.fake.url(), - 'timezone': self.bc.fake.name(), - 'price_per_hour': random() * 100, - 'services': [2], - 'syllabus': [2], + "status": choices(bad_statuses)[0], + "slug": self.bc.fake.slug(), + "name": name, + "bio": bio, + "email": self.bc.fake.email(), + "booking_url": self.bc.fake.url(), + "online_meeting_url": self.bc.fake.url(), + "timezone": self.bc.fake.name(), + "price_per_hour": random() * 100, + "services": [2], + "syllabus": [2], } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - del data['services'] - del data['syllabus'] - del data['bio'] - del data['name'] + del data["services"] + del data["syllabus"] + del data["bio"] + del data["name"] - expected = put_serializer(self, - model.mentor_profile, - model.mentorship_service[1], - model.user, [model.syllabus[1]], - data=data) + expected = put_serializer( + self, model.mentor_profile, model.mentorship_service[1], model.user, [model.syllabus[1]], data=data + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), - [{ - **self.bc.format.to_dict(model.mentor_profile), - **data, - 'name': name, - 'bio': bio, - }]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + { + **self.bc.format.to_dict(model.mentor_profile), + **data, + "name": name, + "bio": bio, + } + ], + ) self.bc.check.queryset_with_pks(model.mentor_profile.services.all(), [2]) self.assertEqual(actions.mentor_is_ready.call_args_list, []) - ''' + """ 🔽🔽🔽 Post - ''' + """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_without_profile_academy(self): - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=1, - mentor_profile=1) + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=1, + mentor_profile=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'without-first-name', 'status_code': 400} + expected = {"detail": "without-first-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_without_last_name(self): - profile_academy = {'first_name': self.bc.fake.name(), 'email': self.bc.fake.email()} - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=1) + profile_academy = {"first_name": self.bc.fake.name(), "email": self.bc.fake.email()} + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'without-last-name', 'status_code': 400} + expected = {"detail": "without-last-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_without_email(self): - profile_academy = {'first_name': self.bc.fake.name(), 'last_name': self.bc.fake.last_name()} - model = self.bc.database.create(user=1, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=1) + profile_academy = {"first_name": self.bc.fake.name(), "last_name": self.bc.fake.last_name()} + model = self.bc.database.create( + user=1, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'email-imposible-to-find', 'status_code': 400} + expected = {"detail": "email-imposible-to-find", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_profile_name_not_found(self): - profile_academy = {'first_name': '', 'last_name': '', 'email': self.bc.fake.email()} - user = {'first_name': '', 'last_name': '', 'email': self.bc.fake.email()} - model = self.bc.database.create(user=user, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=1) + profile_academy = {"first_name": "", "last_name": "", "email": self.bc.fake.email()} + user = {"first_name": "", "last_name": "", "email": self.bc.fake.email()} + model = self.bc.database.create( + user=user, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'without-first-name', 'status_code': 400} + expected = {"detail": "without-first-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test__post__with_one_mentor_profile_with_only_first_name(self): - profile_academy = {'first_name': self.bc.fake.first_name(), 'last_name': '', 'email': self.bc.fake.email()} - user = {'first_name': self.bc.fake.first_name(), 'last_name': '', 'email': self.bc.fake.email()} - model = self.bc.database.create(user=user, - role=1, - academy=1, - capability='crud_mentorship_mentor', - mentorship_service=1, - profile_academy=profile_academy, - mentor_profile=1) + profile_academy = {"first_name": self.bc.fake.first_name(), "last_name": "", "email": self.bc.fake.email()} + user = {"first_name": self.bc.fake.first_name(), "last_name": "", "email": self.bc.fake.email()} + model = self.bc.database.create( + user=user, + role=1, + academy=1, + capability="crud_mentorship_mentor", + mentorship_service=1, + profile_academy=profile_academy, + mentor_profile=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id", kwargs={"mentor_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'without-last-name', 'status_code': 400} + expected = {"detail": "without-last-name", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/breathecode/mentorship/tests/urls/tests_academy_mentor_id_bill.py b/breathecode/mentorship/tests/urls/tests_academy_mentor_id_bill.py index 19ceae3a7..51133750e 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_mentor_id_bill.py +++ b/breathecode/mentorship/tests/urls/tests_academy_mentor_id_bill.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import hashlib import random from datetime import timedelta @@ -27,139 +28,126 @@ def format_datetime(self, date): def get_serializer(self, mentorship_bill, mentor_profile, mentorship_service, user, data={}): return { - 'created_at': format_datetime(self, mentorship_bill.created_at), - 'ended_at': format_datetime(self, mentorship_bill.ended_at), - 'id': mentorship_bill.id, - 'mentor': { - 'booking_url': mentor_profile.booking_url, - 'id': mentor_profile.id, - 'service': { - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'duration': self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': mentorship_service.id, - 'language': mentorship_service.language, - 'max_duration': self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'status': mentorship_service.status, + "created_at": format_datetime(self, mentorship_bill.created_at), + "ended_at": format_datetime(self, mentorship_bill.ended_at), + "id": mentorship_bill.id, + "mentor": { + "booking_url": mentor_profile.booking_url, + "id": mentor_profile.id, + "service": { + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, }, - 'slug': mentor_profile.slug, - 'status': mentor_profile.status, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, }, - 'overtime_minutes': float(mentorship_bill.overtime_minutes), - 'paid_at': format_datetime(self, mentorship_bill.ended_at), - 'reviewer': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "overtime_minutes": float(mentorship_bill.overtime_minutes), + "paid_at": format_datetime(self, mentorship_bill.ended_at), + "reviewer": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'started_at': format_datetime(self, mentorship_bill.ended_at), - 'status': mentorship_bill.status, - 'total_duration_in_hours': float(mentorship_bill.total_duration_in_hours), - 'total_duration_in_minutes': float(mentorship_bill.total_duration_in_minutes), - 'total_price': float(mentorship_bill.total_price), + "started_at": format_datetime(self, mentorship_bill.ended_at), + "status": mentorship_bill.status, + "total_duration_in_hours": float(mentorship_bill.total_duration_in_hours), + "total_duration_in_minutes": float(mentorship_bill.total_duration_in_minutes), + "total_price": float(mentorship_bill.total_price), **data, } def post_serializer(self, mentor_profile, mentorship_service, user, data={}): nxt_mnth = UTC_NOW.replace(day=28, hour=23, minute=59, second=59, microsecond=999999) + timedelta(days=4) - ended_at = (nxt_mnth - timedelta(days=nxt_mnth.day)) + ended_at = nxt_mnth - timedelta(days=nxt_mnth.day) return { - 'created_at': format_datetime(self, UTC_NOW), - 'ended_at': format_datetime(self, ended_at), - 'id': 0, - 'mentor': { - 'booking_url': - mentor_profile.booking_url, - 'id': - mentor_profile.id, - 'services': [{ - 'academy': { - 'icon_url': mentorship_service.academy.icon_url, - 'id': mentorship_service.academy.id, - 'logo_url': mentorship_service.academy.logo_url, - 'name': mentorship_service.academy.name, - 'slug': mentorship_service.academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "created_at": format_datetime(self, UTC_NOW), + "ended_at": format_datetime(self, ended_at), + "id": 0, + "mentor": { + "booking_url": mentor_profile.booking_url, + "id": mentor_profile.id, + "services": [ + { + "academy": { + "icon_url": mentorship_service.academy.icon_url, + "id": mentorship_service.academy.id, + "logo_url": mentorship_service.academy.logo_url, + "name": mentorship_service.academy.name, + "slug": mentorship_service.academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, }, - 'overtime_minutes': 0, - 'paid_at': None, - 'reviewer': None, - 'started_at': format_datetime(self, UTC_NOW), - 'status': 'DUE', - 'total_duration_in_hours': 0.0, - 'total_duration_in_minutes': 0.0, - 'total_price': 0.0, + "overtime_minutes": 0, + "paid_at": None, + "reviewer": None, + "started_at": format_datetime(self, UTC_NOW), + "status": "DUE", + "total_duration_in_hours": 0.0, + "total_duration_in_minutes": 0.0, + "total_price": 0.0, **data, } def mentorship_bill_columns(data={}): nxt_mnth = UTC_NOW.replace(day=28, hour=23, minute=59, second=59, microsecond=999999) + timedelta(days=4) - ended_at = (nxt_mnth - timedelta(days=nxt_mnth.day)) + ended_at = nxt_mnth - timedelta(days=nxt_mnth.day) return { - 'id': 0, - 'status': 'DUE', - 'status_mesage': None, - 'total_duration_in_minutes': 0.0, - 'total_duration_in_hours': 0.0, - 'total_price': -0.0, - 'overtime_minutes': 0.0, - 'academy_id': 0, - 'started_at': UTC_NOW, - 'ended_at': ended_at, - 'reviewer_id': None, - 'mentor_id': 0, - 'paid_at': None, + "id": 0, + "status": "DUE", + "status_mesage": None, + "total_duration_in_minutes": 0.0, + "total_duration_in_hours": 0.0, + "total_price": -0.0, + "overtime_minutes": 0.0, + "academy_id": 0, + "started_at": UTC_NOW, + "ended_at": ended_at, + "reviewer_id": None, + "mentor_id": 0, + "paid_at": None, **data, } @@ -178,13 +166,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__post__without_auth(self): - url = reverse_lazy('mentorship:academy_mentor_id_bill', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_bill", kwargs={"mentor_id": 1}) response = self.client.post(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -195,13 +183,13 @@ def test__post__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_bill', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_bill", kwargs={"mentor_id": 1}) response = self.client.post(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -217,13 +205,13 @@ def test__post__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_bill', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_bill", kwargs={"mentor_id": 1}) response = self.client.post(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_bill for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -234,37 +222,34 @@ def test__post__without_capabilities(self): """ def test__post__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_bill', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_bill", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_bill', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_bill", kwargs={"mentor_id": 1}) response = self.client.post(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 POST with one MentorProfile, without MentorshipSession """ def test__post__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_bill", mentor_profile=1, mentorship_service=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_bill', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_bill", kwargs={"mentor_id": 1}) response = self.client.post(url) json = response.json() @@ -272,65 +257,72 @@ def test__post__with_one_mentor_profile(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 POST with one MentorProfile, with MentorshipSession, many cases where bill was created """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__with_one_mentor_profile__with_mentorship_session__allowed_statuses(self): - statuses = ['COMPLETED', 'FAILED'] + statuses = ["COMPLETED", "FAILED"] for current in range(0, 2): started_at = timezone.now() mentorship_session = { - 'started_at': None, - 'allow_billing': True, - 'status': statuses[current], - 'started_at': started_at, + "started_at": None, + "allow_billing": True, + "status": statuses[current], + "started_at": started_at, } - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_bill', - mentor_profile=1, - mentorship_session=mentorship_session, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_bill", + mentor_profile=1, + mentorship_session=mentorship_session, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_bill', kwargs={'mentor_id': model.mentor_profile.id}) + url = reverse_lazy("mentorship:academy_mentor_id_bill", kwargs={"mentor_id": model.mentor_profile.id}) response = self.client.post(url) json = response.json() expected = [ - post_serializer(self, - model.mentor_profile, - model.mentorship_service, - model.user, - data={ - 'id': - current + 1, - 'started_at': - self.bc.datetime.to_iso_string( - UTC_NOW.replace(day=1, hour=0, minute=0, second=0, microsecond=0)), - }), + post_serializer( + self, + model.mentor_profile, + model.mentorship_service, + model.user, + data={ + "id": current + 1, + "started_at": self.bc.datetime.to_iso_string( + UTC_NOW.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + ), + }, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - mentorship_bill_columns( - data={ - 'id': current + 1, - 'mentor_id': current + 1, - 'academy_id': current + 1, - 'started_at': UTC_NOW.replace(day=1, hour=0, minute=0, second=0, microsecond=0), - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + mentorship_bill_columns( + data={ + "id": current + 1, + "mentor_id": current + 1, + "academy_id": current + 1, + "started_at": UTC_NOW.replace(day=1, hour=0, minute=0, second=0, microsecond=0), + } + ), + ], + ) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('mentorship.MentorshipBill') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("mentorship.MentorshipBill") diff --git a/breathecode/mentorship/tests/urls/tests_academy_mentor_id_session.py b/breathecode/mentorship/tests/urls/tests_academy_mentor_id_session.py index ea8d308e1..5a8993462 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_mentor_id_session.py +++ b/breathecode/mentorship/tests/urls/tests_academy_mentor_id_session.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import hashlib from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -19,29 +20,29 @@ def get_tooltip(obj): - message = f'This mentorship should last no longer than {int(obj.service.duration.seconds/60)} min. <br />' + message = f"This mentorship should last no longer than {int(obj.service.duration.seconds/60)} min. <br />" if obj.started_at is None: - message += 'The mentee never joined the session. <br />' + message += "The mentee never joined the session. <br />" else: message += f'Started on {obj.started_at.strftime("%m/%d/%Y at %H:%M:%S")}. <br />' if obj.mentor_joined_at is None: - message += f'The mentor never joined' + message += f"The mentor never joined" elif obj.mentor_joined_at > obj.started_at: - message += f'The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />' + message += f"The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />" elif obj.started_at > obj.mentor_joined_at: - message += f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + message += f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" if obj.ended_at is not None: - message += f'The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />' + message += f"The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />" if (obj.ended_at - obj.started_at) > obj.mentor.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.mentor.service.duration - message += f'With extra time of {duration_to_str(extra_time)}. <br />' + message += f"With extra time of {duration_to_str(extra_time)}. <br />" else: - message += f'No extra time detected <br />' + message += f"No extra time detected <br />" else: - message += f'The mentorship has not ended yet. <br />' + message += f"The mentorship has not ended yet. <br />" if obj.ends_at is not None: - message += f'But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />' + message += f"But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />" return message @@ -49,17 +50,17 @@ def get_tooltip(obj): def get_duration_string(obj): if obj.started_at is None: - return 'Never started' + return "Never started" end_date = obj.ended_at if end_date is None: - return 'Never ended' + return "Never ended" if obj.started_at > end_date: - return 'Ended before it started' + return "Ended before it started" if (end_date - obj.started_at).days > 1: - return f'Many days' + return f"Many days" return duration_to_str(obj.ended_at - obj.started_at) @@ -78,11 +79,11 @@ def get_extra_time(obj): return None if (obj.ended_at - obj.started_at).days > 1: - return f'Many days of extra time, probably it was never closed' + return f"Many days of extra time, probably it was never closed" if (obj.ended_at - obj.started_at) > obj.mentor.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.mentor.service.duration - return f'Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.mentor.service.duration)}' + return f"Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.mentor.service.duration)}" else: return None @@ -93,7 +94,7 @@ def get_mentor_late(obj): return None if obj.started_at > obj.mentor_joined_at and (obj.started_at - obj.mentor_joined_at).seconds > (60 * 4): - return f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + return f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" else: return None @@ -101,7 +102,7 @@ def get_mentor_late(obj): def get_mente_joined(obj): if obj.started_at is None: - return 'Session did not start because mentee never joined' + return "Session did not start because mentee never joined" else: return True @@ -119,119 +120,100 @@ def get_rating(obj): def get_serializer(self, mentorship_session, mentor_profile, mentorship_service, user, data={}): return { - 'accounted_duration': mentorship_session.accounted_duration, - 'billed_str': get_billed_str(mentorship_session), - 'duration_string': get_duration_string(mentorship_session), - 'ended_at': - self.bc.datetime.to_iso_string(mentorship_session.ended_at) if mentorship_session.ended_at else None, - 'extra_time': get_extra_time(mentorship_session), - 'id': mentorship_session.id, - 'mentee_joined': get_mente_joined(mentorship_session), - 'mentee': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "accounted_duration": mentorship_session.accounted_duration, + "billed_str": get_billed_str(mentorship_session), + "duration_string": get_duration_string(mentorship_session), + "ended_at": ( + self.bc.datetime.to_iso_string(mentorship_session.ended_at) if mentorship_session.ended_at else None + ), + "extra_time": get_extra_time(mentorship_session), + "id": mentorship_session.id, + "mentee_joined": get_mente_joined(mentorship_session), + "mentee": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'mentee_left_at': mentorship_session.mentee_left_at, - 'mentor': { - 'booking_url': - mentor_profile.booking_url, - 'created_at': - self.bc.datetime.to_iso_string(mentor_profile.created_at), - 'email': - mentor_profile.email, - 'id': - mentor_profile.id, - 'one_line_bio': - mentor_profile.one_line_bio, - 'online_meeting_url': - mentor_profile.online_meeting_url, - 'price_per_hour': - mentor_profile.price_per_hour, - 'rating': - mentor_profile.rating, - 'services': [{ - 'academy': { - 'icon_url': mentorship_service.academy.icon_url, - 'id': mentorship_service.academy.id, - 'logo_url': mentorship_service.academy.logo_url, - 'name': mentorship_service.academy.name, - 'slug': mentorship_service.academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'timezone': - mentor_profile.timezone, - 'updated_at': - self.bc.datetime.to_iso_string(mentor_profile.updated_at), - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - } + "mentee_left_at": mentorship_session.mentee_left_at, + "mentor": { + "booking_url": mentor_profile.booking_url, + "created_at": self.bc.datetime.to_iso_string(mentor_profile.created_at), + "email": mentor_profile.email, + "id": mentor_profile.id, + "one_line_bio": mentor_profile.one_line_bio, + "online_meeting_url": mentor_profile.online_meeting_url, + "price_per_hour": mentor_profile.price_per_hour, + "rating": mentor_profile.rating, + "services": [ + { + "academy": { + "icon_url": mentorship_service.academy.icon_url, + "id": mentorship_service.academy.id, + "logo_url": mentorship_service.academy.logo_url, + "name": mentorship_service.academy.name, + "slug": mentorship_service.academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "timezone": mentor_profile.timezone, + "updated_at": self.bc.datetime.to_iso_string(mentor_profile.updated_at), + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + }, }, - 'mentor_joined_at': mentorship_session.mentor_joined_at, - 'mentor_late': get_mentor_late(mentorship_session), - 'mentor_left_at': mentorship_session.mentor_left_at, - 'rating': get_rating(mentorship_session), - 'started_at': - self.bc.datetime.to_iso_string(mentorship_session.started_at) if mentorship_session.started_at else None, - 'status': mentorship_session.status, - 'status_message': mentorship_session.status_message, - 'suggested_accounted_duration': mentorship_session.suggested_accounted_duration, - 'summary': mentorship_session.summary, - 'tooltip': get_tooltip(mentorship_session), + "mentor_joined_at": mentorship_session.mentor_joined_at, + "mentor_late": get_mentor_late(mentorship_session), + "mentor_left_at": mentorship_session.mentor_left_at, + "rating": get_rating(mentorship_session), + "started_at": ( + self.bc.datetime.to_iso_string(mentorship_session.started_at) if mentorship_session.started_at else None + ), + "status": mentorship_session.status, + "status_message": mentorship_session.status_message, + "suggested_accounted_duration": mentorship_session.suggested_accounted_duration, + "summary": mentorship_session.summary, + "tooltip": get_tooltip(mentorship_session), **data, } def mentor_profile_columns(data={}): - token = hashlib.sha1((str(data['slug'] if 'slug' in data else '') + str(UTC_NOW)).encode('UTF-8')).hexdigest() + token = hashlib.sha1((str(data["slug"] if "slug" in data else "") + str(UTC_NOW)).encode("UTF-8")).hexdigest() return { - 'bio': None, - 'booking_url': None, - 'email': None, - 'id': 0, - 'name': '', - 'online_meeting_url': None, - 'price_per_hour': 0, - 'service_id': 0, - 'slug': 'mirai-nikki', - 'status': 'INVITED', - 'timezone': None, - 'token': token, - 'user_id': 0, + "bio": None, + "booking_url": None, + "email": None, + "id": 0, + "name": "", + "online_meeting_url": None, + "price_per_hour": 0, + "service_id": 0, + "slug": "mirai-nikki", + "status": "INVITED", + "timezone": None, + "token": token, + "user_id": 0, **data, } @@ -242,13 +224,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -259,13 +241,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -281,13 +263,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_session for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_session for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -298,12 +280,12 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_session', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_session", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() @@ -317,70 +299,72 @@ def test__get__without_data(self): """ def test__get__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService """ def test__get__with_two_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=2, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=2, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) response = self.client.get(url) json = response.json() mentorship_session_list = sorted(model.mentorship_session, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}) for mentorship_session in mentorship_session_list + get_serializer( + self, mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ) + for mentorship_session in mentorship_session_list ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) @@ -388,30 +372,34 @@ def test__get__with_two_mentor_profile(self): 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing status """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_status(self): - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] for n in range(0, 4): first_status = statuses[n] second_status = statuses[n + 1] choices = [first_status, second_status] - mentorship_sessions = [{'status': x} for x in choices] - bad_statuses = ','.join([x for x in statuses if x not in choices]) - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_sessions = [{"status": x} for x in choices] + bad_statuses = ",".join([x for x in statuses if x not in choices]) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?status={bad_statuses}' + url = ( + reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + + f"?status={bad_statuses}" + ) response = self.client.get(url) json = response.json() @@ -420,84 +408,93 @@ def test__get__with_two_mentor_profile__passing_bad_status(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_status(self): - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] for n in range(0, 4): first_status = statuses[n] second_status = statuses[n + 1] choices = [first_status, second_status] - mentorship_sessions = [{'status': x} for x in choices] - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_sessions = [{"status": x} for x in choices] + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = (reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': model.mentor_profile.id}) + - f'?status={first_status},{second_status}') + url = ( + reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": model.mentor_profile.id}) + + f"?status={first_status},{second_status}" + ) response = self.client.get(url) json = response.json() mentorship_session_list = sorted(model.mentorship_session, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentorship_session_list[0], - model.mentor_profile, - model.mentorship_service, - model.user, - data={'status': second_status}), - get_serializer(self, - mentorship_session_list[1], - model.mentor_profile, - model.mentorship_service, - model.user, - data={'status': first_status}), + get_serializer( + self, + mentorship_session_list[0], + model.mentor_profile, + model.mentorship_service, + model.user, + data={"status": second_status}, + ), + get_serializer( + self, + mentorship_session_list[1], + model.mentor_profile, + model.mentorship_service, + model.user, + data={"status": first_status}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing billed """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_true__without_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?billed=true' + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + f"?billed=true" response = self.client.get(url) json = response.json() @@ -505,60 +502,65 @@ def test__get__with_two_mentor_profile__passing_billed_as_true__without_mentorsh self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_true__with_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_bill=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_bill=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?billed=true' + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + f"?billed=true" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_false__with_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_bill=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_bill=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?billed=false' + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + f"?billed=false" response = self.client.get(url) json = response.json() @@ -566,64 +568,72 @@ def test__get__with_two_mentor_profile__passing_billed_as_false__with_mentorship self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_false__without_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?billed=false' + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + f"?billed=false" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing started_after """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_started_after(self): utc_now = timezone.now() - mentorship_session = {'started_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"started_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?started_after={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + + f"?started_after={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() @@ -631,55 +641,64 @@ def test__get__with_two_mentor_profile__passing_bad_started_after(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_started_after(self): utc_now = timezone.now() - mentorship_session = {'started_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"started_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?started_after={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + + f"?started_after={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_started_after__without_started_at(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) @@ -689,47 +708,52 @@ def test__get__with_two_mentor_profile__passing_started_after__without_started_a self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1)), ] for case in cases: - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?started_after={case}' + url = ( + reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + f"?started_after={case}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing ended_before """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_ended_before(self): utc_now = timezone.now() - mentorship_session = {'ended_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"ended_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?ended_before={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + + f"?ended_before={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() @@ -737,55 +761,64 @@ def test__get__with_two_mentor_profile__passing_bad_ended_before(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_ended_before(self): utc_now = timezone.now() - mentorship_session = {'ended_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"ended_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?ended_before={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + + f"?ended_before={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_ended_before__without_ended_at(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) @@ -796,49 +829,54 @@ def test__get__with_two_mentor_profile__passing_ended_before__without_ended_at(s ] for case in cases: - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + \ - f'?ended_before={case}' + url = ( + reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) + f"?ended_before={case}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_session", mentorship_session=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_mentor_id_session', kwargs={'mentor_id': 1}) + url = reverse_lazy("mentorship:academy_mentor_id_session", kwargs={"mentor_id": 1}) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) diff --git a/breathecode/mentorship/tests/urls/tests_academy_service.py b/breathecode/mentorship/tests/urls/tests_academy_service.py index e5502a12d..718aa4ff0 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_service.py +++ b/breathecode/mentorship/tests/urls/tests_academy_service.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -14,65 +15,65 @@ def get_serializer(self, mentorship_service, academy, data={}): return { - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, }, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'created_at': self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': mentorship_service.id, - 'language': mentorship_service.language, - 'logo_url': mentorship_service.logo_url, - 'max_duration': self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'status': mentorship_service.status, - 'updated_at': self.bc.datetime.to_iso_string(mentorship_service.updated_at), + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), **data, } def post_serializer(data={}): return { - 'allow_mentee_to_extend': True, - 'allow_mentors_to_extend': True, - 'description': None, - 'duration': '01:00:00', - 'id': 0, - 'language': 'en', - 'logo_url': None, - 'max_duration': '02:00:00', - 'missed_meeting_duration': '00:10:00', - 'name': '', - 'slug': '', - 'status': 'DRAFT', - 'video_provider': 'GOOGLE_MEET', + "allow_mentee_to_extend": True, + "allow_mentors_to_extend": True, + "description": None, + "duration": "01:00:00", + "id": 0, + "language": "en", + "logo_url": None, + "max_duration": "02:00:00", + "missed_meeting_duration": "00:10:00", + "name": "", + "slug": "", + "status": "DRAFT", + "video_provider": "GOOGLE_MEET", **data, } def mentorship_service_columns(data={}): return { - 'academy_id': 0, - 'allow_mentee_to_extend': True, - 'allow_mentors_to_extend': True, - 'description': None, - 'duration': timedelta(seconds=3600), - 'id': 0, - 'language': 'en', - 'logo_url': None, - 'max_duration': timedelta(seconds=7200), - 'missed_meeting_duration': timedelta(seconds=600), - 'name': '', - 'slug': '', - 'status': 'DRAFT', - 'video_provider': 'GOOGLE_MEET', + "academy_id": 0, + "allow_mentee_to_extend": True, + "allow_mentors_to_extend": True, + "description": None, + "duration": timedelta(seconds=3600), + "id": 0, + "language": "en", + "logo_url": None, + "max_duration": timedelta(seconds=7200), + "missed_meeting_duration": timedelta(seconds=600), + "name": "", + "slug": "", + "status": "DRAFT", + "video_provider": "GOOGLE_MEET", **data, } @@ -83,13 +84,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -100,13 +101,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -122,13 +123,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_service for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_service for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -139,12 +140,12 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_service', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_service", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.get(url) json = response.json() @@ -158,16 +159,14 @@ def test__get__without_data(self): """ def test__get__with_one_mentorship_service(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_service', - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_service", mentorship_service=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.get(url) json = response.json() @@ -177,25 +176,26 @@ def test__get__with_one_mentorship_service(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), [ - self.bc.format.to_dict(model.mentorship_service), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), + [ + self.bc.format.to_dict(model.mentorship_service), + ], + ) """ 🔽🔽🔽 GET with two MentorshipService """ def test__get__with_two_mentorship_service(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_service', - mentorship_service=2, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_service", mentorship_service=2, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.get(url) json = response.json() @@ -207,38 +207,41 @@ def test__get__with_two_mentorship_service(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), - self.bc.format.to_dict(model.mentorship_service)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), self.bc.format.to_dict(model.mentorship_service) + ) """ 🔽🔽🔽 GET with two MentorshipService passing status in querystring """ def test__get__with_two_mentorship_service__passing_bad_status(self): - statuses = ['DRAFT', 'ACTIVE', 'UNLISTED', 'INNACTIVE'] + statuses = ["DRAFT", "ACTIVE", "UNLISTED", "INNACTIVE"] for n in range(0, 3): # 0, 1, 10, 11, 0 - current_bin_key = bin(n).replace('0b', '')[-2:] + current_bin_key = bin(n).replace("0b", "")[-2:] current_key = int(current_bin_key, 2) current_status = statuses[current_key] # 0, 1, 10, 11, 0 - bad_bin_key = bin(n + 1).replace('0b', '')[-2:] + bad_bin_key = bin(n + 1).replace("0b", "")[-2:] bad_key = int(bad_bin_key, 2) bad_status = statuses[bad_key] - mentorship_service = {'status': current_status} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_service', - mentorship_service=(2, mentorship_service), - profile_academy=1) + mentorship_service = {"status": current_status} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_service", + mentorship_service=(2, mentorship_service), + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + f'?status={bad_status}' + url = reverse_lazy("mentorship:academy_service") + f"?status={bad_status}" response = self.client.get(url) json = response.json() @@ -247,44 +250,46 @@ def test__get__with_two_mentorship_service__passing_bad_status(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipService'), + self.bc.database.list_of("mentorship.MentorshipService"), self.bc.format.to_dict(model.mentorship_service), ) - self.bc.database.delete('mentorship.MentorshipService') + self.bc.database.delete("mentorship.MentorshipService") def test__get__with_two_mentorship_service__passing_status(self): - statuses = ['DRAFT', 'ACTIVE', 'UNLISTED', 'INNACTIVE'] + statuses = ["DRAFT", "ACTIVE", "UNLISTED", "INNACTIVE"] for current_status in statuses: - mentorship_service = {'status': current_status} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_service', - mentorship_service=(2, mentorship_service), - profile_academy=1) + mentorship_service = {"status": current_status} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_service", + mentorship_service=(2, mentorship_service), + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + f'?status={current_status}' + url = reverse_lazy("mentorship:academy_service") + f"?status={current_status}" response = self.client.get(url) json = response.json() mentorship_service = sorted(model.mentorship_service, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, mentorship_service[0], model.academy, data={'status': current_status}), - get_serializer(self, mentorship_service[1], model.academy, data={'status': current_status}), + get_serializer(self, mentorship_service[0], model.academy, data={"status": current_status}), + get_serializer(self, mentorship_service[1], model.academy, data={"status": current_status}), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipService'), + self.bc.database.list_of("mentorship.MentorshipService"), self.bc.format.to_dict(model.mentorship_service), ) - self.bc.database.delete('mentorship.MentorshipService') + self.bc.database.delete("mentorship.MentorshipService") """ 🔽🔽🔽 GET passing name in querystring @@ -292,20 +297,18 @@ def test__get__with_two_mentorship_service__passing_status(self): def test__get__mentorship_service__passing_name_wrong(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_service', - mentorship_service=[{ - 'name': 'first' - }, { - 'name': 'second' - }], - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_service", + mentorship_service=[{"name": "first"}, {"name": "second"}], + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + f'?name=g' + url = reverse_lazy("mentorship:academy_service") + f"?name=g" response = self.client.get(url) json = response.json() @@ -315,28 +318,26 @@ def test__get__mentorship_service__passing_name_wrong(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipService'), + self.bc.database.list_of("mentorship.MentorshipService"), self.bc.format.to_dict(model.mentorship_service), ) - self.bc.database.delete('mentorship.MentorshipService') + self.bc.database.delete("mentorship.MentorshipService") def test__get__mentorship_service__passing_name(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_service', - mentorship_service=[{ - 'name': 'first' - }, { - 'name': 'second' - }], - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_service", + mentorship_service=[{"name": "first"}, {"name": "second"}], + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + f'?name=f' + url = reverse_lazy("mentorship:academy_service") + f"?name=f" response = self.client.get(url) json = response.json() @@ -348,34 +349,40 @@ def test__get__mentorship_service__passing_name(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipService'), + self.bc.database.list_of("mentorship.MentorshipService"), self.bc.format.to_dict(model.mentorship_service), ) - self.bc.database.delete('mentorship.MentorshipService') + self.bc.database.delete("mentorship.MentorshipService") """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_service', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_service", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) """ 🔽🔽🔽 POST capability @@ -387,13 +394,13 @@ def test__post__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.post(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_service for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_service for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -404,16 +411,16 @@ def test__post__without_capabilities(self): """ def test__post__without_required_fields_in_body(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_service', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_service", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.post(url) json = response.json() - expected = {'name': ['This field is required.'], 'slug': ['This field is required.']} + expected = {"name": ["This field is required."], "slug": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -423,27 +430,32 @@ def test__post__without_required_fields_in_body(self): """ def test__post__creating_a_element(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_service', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_service", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service') - data = {'slug': 'mirai-nikki', 'name': 'Mirai Nikki'} - response = self.client.post(url, data, format='json') + url = reverse_lazy("mentorship:academy_service") + data = {"slug": "mirai-nikki", "name": "Mirai Nikki"} + response = self.client.post(url, data, format="json") json = response.json() - expected = post_serializer({**data, 'id': 1}) + expected = post_serializer({**data, "id": 1}) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), [ - mentorship_service_columns({ - **data, - 'id': 1, - 'academy_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), + [ + mentorship_service_columns( + { + **data, + "id": 1, + "academy_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 DELETE @@ -452,133 +464,149 @@ def test__post__creating_a_element(self): def test_delete__service__without_lookups(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - role=1, - capability='crud_event', - profile_academy=1, - mentorship_service=(2)) + model = self.generate_models( + authenticate=True, role=1, capability="crud_event", profile_academy=1, mentorship_service=(2) + ) - url = reverse_lazy('mentorship:academy_service') + url = reverse_lazy("mentorship:academy_service") response = self.client.delete(url) json = response.json() - expected = {'detail': 'without-lookups-and-service-id', 'status_code': 400} + expected = {"detail": "without-lookups-and-service-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, 400) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), - self.bc.format.to_dict(model.mentorship_service)) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), self.bc.format.to_dict(model.mentorship_service) + ) def test_service__delete__can_delete(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - role=1, - capability='crud_event', - profile_academy=1, - mentorship_service=(2)) + model = self.generate_models( + authenticate=True, role=1, capability="crud_event", profile_academy=1, mentorship_service=(2) + ) - url = reverse_lazy( - 'mentorship:academy_service') + f'?id={",".join([str(x.id) for x in model.mentorship_service])}' + url = ( + reverse_lazy("mentorship:academy_service") + + f'?id={",".join([str(x.id) for x in model.mentorship_service])}' + ) response = self.client.delete(url) self.assertEqual(response.status_code, 204) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipService"), []) def test_service__delete__all_errors_and_success_cases(self): - can_delete_services = [{ - 'slug': self.bc.fake.slug(), - 'academy_id': 1, - }] - services_from_other_academy = [{ - 'academy_id': 2, - 'slug': self.bc.fake.slug(), - }, { - 'academy_id': 2, - 'slug': self.bc.fake.slug(), - }] - services_with_mentor = [{ - 'academy_id': 1, - 'slug': self.bc.fake.slug(), - }] - services_with_session = [{ - 'academy_id': 1, - 'slug': self.bc.fake.slug(), - }] + can_delete_services = [ + { + "slug": self.bc.fake.slug(), + "academy_id": 1, + } + ] + services_from_other_academy = [ + { + "academy_id": 2, + "slug": self.bc.fake.slug(), + }, + { + "academy_id": 2, + "slug": self.bc.fake.slug(), + }, + ] + services_with_mentor = [ + { + "academy_id": 1, + "slug": self.bc.fake.slug(), + } + ] + services_with_session = [ + { + "academy_id": 1, + "slug": self.bc.fake.slug(), + } + ] services = can_delete_services + services_from_other_academy + services_with_mentor + services_with_session - model = self.generate_models(user=1, - role=1, - academy=2, - capability='crud_event', - profile_academy=1, - mentorship_service=services, - mentor_profile={ - 'slug': 1, - 'services': '4' - }, - mentorship_session={ - 'slug': 1, - 'service_id': 5 - }) + model = self.generate_models( + user=1, + role=1, + academy=2, + capability="crud_event", + profile_academy=1, + mentorship_service=services, + mentor_profile={"slug": 1, "services": "4"}, + mentorship_session={"slug": 1, "service_id": 5}, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy( - 'mentorship:academy_service') + f'?id={",".join([str(x.id) for x in model.mentorship_service])}' + url = ( + reverse_lazy("mentorship:academy_service") + + f'?id={",".join([str(x.id) for x in model.mentorship_service])}' + ) response = self.client.delete(url) json = response.json() expected = { - 'success': [{ - 'status_code': - 204, - 'resources': [{ - 'pk': model.mentorship_service[0].id, - 'display_field': 'slug', - 'display_value': model.mentorship_service[0].slug, - }], - }], - 'failure': [{ - 'detail': - 'not-found', - 'status_code': - 400, - 'resources': [{ - 'pk': model.mentorship_service[1].id, - 'display_field': 'slug', - 'display_value': model.mentorship_service[1].slug, - }, { - 'pk': model.mentorship_service[2].id, - 'display_field': 'slug', - 'display_value': model.mentorship_service[2].slug, - }], - }, { - 'detail': - 'service-with-mentor', - 'status_code': - 400, - 'resources': [{ - 'pk': model.mentorship_service[3].id, - 'display_field': 'slug', - 'display_value': model.mentorship_service[3].slug, - }], - }, { - 'detail': - 'service-with-session', - 'status_code': - 400, - 'resources': [{ - 'pk': model.mentorship_service[4].id, - 'display_field': 'slug', - 'display_value': model.mentorship_service[4].slug, - }], - }] + "success": [ + { + "status_code": 204, + "resources": [ + { + "pk": model.mentorship_service[0].id, + "display_field": "slug", + "display_value": model.mentorship_service[0].slug, + } + ], + } + ], + "failure": [ + { + "detail": "not-found", + "status_code": 400, + "resources": [ + { + "pk": model.mentorship_service[1].id, + "display_field": "slug", + "display_value": model.mentorship_service[1].slug, + }, + { + "pk": model.mentorship_service[2].id, + "display_field": "slug", + "display_value": model.mentorship_service[2].slug, + }, + ], + }, + { + "detail": "service-with-mentor", + "status_code": 400, + "resources": [ + { + "pk": model.mentorship_service[3].id, + "display_field": "slug", + "display_value": model.mentorship_service[3].slug, + } + ], + }, + { + "detail": "service-with-session", + "status_code": 400, + "resources": [ + { + "pk": model.mentorship_service[4].id, + "display_field": "slug", + "display_value": model.mentorship_service[4].slug, + } + ], + }, + ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, 207) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), - self.bc.format.to_dict(model.mentorship_service[1:])) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), + self.bc.format.to_dict(model.mentorship_service[1:]), + ) diff --git a/breathecode/mentorship/tests/urls/tests_academy_service_id.py b/breathecode/mentorship/tests/urls/tests_academy_service_id.py index ce081fc58..2647ec8fd 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_service_id.py +++ b/breathecode/mentorship/tests/urls/tests_academy_service_id.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import random from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -18,74 +19,74 @@ def get_serializer(self, mentorship_service, academy, data={}): return { - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, }, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'created_at': self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'description': mentorship_service.description, - 'duration': self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': mentorship_service.id, - 'language': mentorship_service.language, - 'logo_url': mentorship_service.logo_url, - 'max_duration': self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'status': mentorship_service.status, - 'updated_at': self.bc.datetime.to_iso_string(mentorship_service.updated_at), + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "description": mentorship_service.description, + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), **data, } def put_serializer(self, mentorship_service, academy, data={}): return { - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, }, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'created_at': self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'description': mentorship_service.description, - 'duration': self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': mentorship_service.id, - 'language': mentorship_service.language, - 'logo_url': mentorship_service.logo_url, - 'max_duration': self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'status': mentorship_service.status, - 'updated_at': self.bc.datetime.to_iso_string(UTC_NOW), + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "description": mentorship_service.description, + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(UTC_NOW), **data, } def mentorship_service_columns(mentorship_service, data={}): return { - 'academy_id': mentorship_service.academy_id, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'description': mentorship_service.description, - 'duration': mentorship_service.duration, - 'id': mentorship_service.id, - 'language': mentorship_service.language, - 'logo_url': mentorship_service.logo_url, - 'max_duration': mentorship_service.max_duration, - 'missed_meeting_duration': mentorship_service.missed_meeting_duration, - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'status': mentorship_service.status, - 'video_provider': mentorship_service.video_provider, + "academy_id": mentorship_service.academy_id, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "description": mentorship_service.description, + "duration": mentorship_service.duration, + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": mentorship_service.max_duration, + "missed_meeting_duration": mentorship_service.missed_meeting_duration, + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "video_provider": mentorship_service.video_provider, **data, } @@ -96,13 +97,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -113,13 +114,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -135,13 +136,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_service for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_service for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -152,36 +153,34 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_service', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_service", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipService"), []) """ 🔽🔽🔽 GET MentorshipService found """ def test__get__with_one_mentorship_service(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_service', - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_service", mentorship_service=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() @@ -189,32 +188,41 @@ def test__get__with_one_mentorship_service(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), [ - self.bc.format.to_dict(model.mentorship_service), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), + [ + self.bc.format.to_dict(model.mentorship_service), + ], + ) """ 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_service', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_service", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) """ 🔽🔽🔽 PUT capability @@ -226,13 +234,13 @@ def test__put__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_service for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_service for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -243,37 +251,35 @@ def test__put__without_capabilities(self): """ def test__put__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_service', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_service", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipService"), []) """ 🔽🔽🔽 PUT MentorshipService found """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__put__with_one_mentorship_service(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_service', - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_service", mentorship_service=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) response = self.client.put(url) json = response.json() @@ -281,43 +287,47 @@ def test__put__with_one_mentorship_service(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), [ - self.bc.format.to_dict(model.mentorship_service), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), + [ + self.bc.format.to_dict(model.mentorship_service), + ], + ) """ 🔽🔽🔽 PUT MentorshipService found """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__put__with_one_mentorship_service__passing_arguments(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_service', - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_service", mentorship_service=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id", kwargs={"service_id": 1}) data = { - 'name': self.bc.fake.name(), - 'logo_url': self.bc.fake.url(), - 'logo_url': self.bc.fake.url(), - 'allow_mentee_to_extend': bool(random.getrandbits(1)), - 'allow_mentors_to_extend': bool(random.getrandbits(1)), - 'language': 'es', - 'status': random.choices(['DRAFT', 'ACTIVE', 'UNLISTED', 'INNACTIVE'])[0], - 'description': self.bc.fake.text(), + "name": self.bc.fake.name(), + "logo_url": self.bc.fake.url(), + "logo_url": self.bc.fake.url(), + "allow_mentee_to_extend": bool(random.getrandbits(1)), + "allow_mentors_to_extend": bool(random.getrandbits(1)), + "language": "es", + "status": random.choices(["DRAFT", "ACTIVE", "UNLISTED", "INNACTIVE"])[0], + "description": self.bc.fake.text(), } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() expected = put_serializer(self, model.mentorship_service, model.academy, data=data) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), [ - mentorship_service_columns(model.mentorship_service, data), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), + [ + mentorship_service_columns(model.mentorship_service, data), + ], + ) diff --git a/breathecode/mentorship/tests/urls/tests_academy_service_id_session.py b/breathecode/mentorship/tests/urls/tests_academy_service_id_session.py index aad95d4d9..de5b73b71 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_service_id_session.py +++ b/breathecode/mentorship/tests/urls/tests_academy_service_id_session.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import hashlib from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -19,29 +20,29 @@ def get_tooltip(obj): - message = f'This mentorship should last no longer than {int(obj.service.duration.seconds/60)} min. <br />' + message = f"This mentorship should last no longer than {int(obj.service.duration.seconds/60)} min. <br />" if obj.started_at is None: - message += 'The mentee never joined the session. <br />' + message += "The mentee never joined the session. <br />" else: message += f'Started on {obj.started_at.strftime("%m/%d/%Y at %H:%M:%S")}. <br />' if obj.mentor_joined_at is None: - message += f'The mentor never joined' + message += f"The mentor never joined" elif obj.mentor_joined_at > obj.started_at: - message += f'The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />' + message += f"The mentor joined {duration_to_str(obj.mentor_joined_at - obj.started_at)} before. <br />" elif obj.started_at > obj.mentor_joined_at: - message += f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + message += f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" if obj.ended_at is not None: - message += f'The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />' + message += f"The mentorship lasted {duration_to_str(obj.ended_at - obj.started_at)}. <br />" if (obj.ended_at - obj.started_at) > obj.mentor.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.mentor.service.duration - message += f'With extra time of {duration_to_str(extra_time)}. <br />' + message += f"With extra time of {duration_to_str(extra_time)}. <br />" else: - message += f'No extra time detected <br />' + message += f"No extra time detected <br />" else: - message += f'The mentorship has not ended yet. <br />' + message += f"The mentorship has not ended yet. <br />" if obj.ends_at is not None: - message += f'But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />' + message += f"But it was supposed to end after {duration_to_str(obj.ends_at - obj.started_at)} <br />" return message @@ -49,17 +50,17 @@ def get_tooltip(obj): def get_duration_string(obj): if obj.started_at is None: - return 'Never started' + return "Never started" end_date = obj.ended_at if end_date is None: - return 'Never ended' + return "Never ended" if obj.started_at > end_date: - return 'Ended before it started' + return "Ended before it started" if (end_date - obj.started_at).days > 1: - return f'Many days' + return f"Many days" return duration_to_str(obj.ended_at - obj.started_at) @@ -78,11 +79,11 @@ def get_extra_time(obj): return None if (obj.ended_at - obj.started_at).days > 1: - return f'Many days of extra time, probably it was never closed' + return f"Many days of extra time, probably it was never closed" if (obj.ended_at - obj.started_at) > obj.mentor.service.duration: extra_time = (obj.ended_at - obj.started_at) - obj.mentor.service.duration - return f'Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.mentor.service.duration)}' + return f"Extra time of {duration_to_str(extra_time)}, the expected duration was {duration_to_str(obj.mentor.service.duration)}" else: return None @@ -93,7 +94,7 @@ def get_mentor_late(obj): return None if obj.started_at > obj.mentor_joined_at and (obj.started_at - obj.mentor_joined_at).seconds > (60 * 4): - return f'The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />' + return f"The mentor joined {duration_to_str(obj.started_at - obj.mentor_joined_at)} after. <br />" else: return None @@ -101,7 +102,7 @@ def get_mentor_late(obj): def get_mente_joined(obj): if obj.started_at is None: - return 'Session did not start because mentee never joined' + return "Session did not start because mentee never joined" else: return True @@ -119,119 +120,100 @@ def get_rating(obj): def get_serializer(self, mentorship_session, mentor_profile, mentorship_service, user, data={}): return { - 'accounted_duration': mentorship_session.accounted_duration, - 'billed_str': get_billed_str(mentorship_session), - 'duration_string': get_duration_string(mentorship_session), - 'ended_at': - self.bc.datetime.to_iso_string(mentorship_session.ended_at) if mentorship_session.ended_at else None, - 'extra_time': get_extra_time(mentorship_session), - 'id': mentorship_session.id, - 'mentee_joined': get_mente_joined(mentorship_session), - 'mentee': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "accounted_duration": mentorship_session.accounted_duration, + "billed_str": get_billed_str(mentorship_session), + "duration_string": get_duration_string(mentorship_session), + "ended_at": ( + self.bc.datetime.to_iso_string(mentorship_session.ended_at) if mentorship_session.ended_at else None + ), + "extra_time": get_extra_time(mentorship_session), + "id": mentorship_session.id, + "mentee_joined": get_mente_joined(mentorship_session), + "mentee": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'mentee_left_at': mentorship_session.mentee_left_at, - 'mentor': { - 'booking_url': - mentor_profile.booking_url, - 'created_at': - self.bc.datetime.to_iso_string(mentor_profile.created_at), - 'email': - mentor_profile.email, - 'id': - mentor_profile.id, - 'one_line_bio': - mentor_profile.one_line_bio, - 'online_meeting_url': - mentor_profile.online_meeting_url, - 'price_per_hour': - mentor_profile.price_per_hour, - 'rating': - mentor_profile.rating, - 'services': [{ - 'academy': { - 'icon_url': mentorship_service.academy.icon_url, - 'id': mentorship_service.academy.id, - 'logo_url': mentorship_service.academy.logo_url, - 'name': mentorship_service.academy.name, - 'slug': mentorship_service.academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'timezone': - mentor_profile.timezone, - 'updated_at': - self.bc.datetime.to_iso_string(mentor_profile.updated_at), - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - } + "mentee_left_at": mentorship_session.mentee_left_at, + "mentor": { + "booking_url": mentor_profile.booking_url, + "created_at": self.bc.datetime.to_iso_string(mentor_profile.created_at), + "email": mentor_profile.email, + "id": mentor_profile.id, + "one_line_bio": mentor_profile.one_line_bio, + "online_meeting_url": mentor_profile.online_meeting_url, + "price_per_hour": mentor_profile.price_per_hour, + "rating": mentor_profile.rating, + "services": [ + { + "academy": { + "icon_url": mentorship_service.academy.icon_url, + "id": mentorship_service.academy.id, + "logo_url": mentorship_service.academy.logo_url, + "name": mentorship_service.academy.name, + "slug": mentorship_service.academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "timezone": mentor_profile.timezone, + "updated_at": self.bc.datetime.to_iso_string(mentor_profile.updated_at), + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + }, }, - 'mentor_joined_at': mentorship_session.mentor_joined_at, - 'mentor_late': get_mentor_late(mentorship_session), - 'mentor_left_at': mentorship_session.mentor_left_at, - 'rating': get_rating(mentorship_session), - 'started_at': - self.bc.datetime.to_iso_string(mentorship_session.started_at) if mentorship_session.started_at else None, - 'status': mentorship_session.status, - 'status_message': mentorship_session.status_message, - 'suggested_accounted_duration': mentorship_session.suggested_accounted_duration, - 'summary': mentorship_session.summary, - 'tooltip': get_tooltip(mentorship_session), + "mentor_joined_at": mentorship_session.mentor_joined_at, + "mentor_late": get_mentor_late(mentorship_session), + "mentor_left_at": mentorship_session.mentor_left_at, + "rating": get_rating(mentorship_session), + "started_at": ( + self.bc.datetime.to_iso_string(mentorship_session.started_at) if mentorship_session.started_at else None + ), + "status": mentorship_session.status, + "status_message": mentorship_session.status_message, + "suggested_accounted_duration": mentorship_session.suggested_accounted_duration, + "summary": mentorship_session.summary, + "tooltip": get_tooltip(mentorship_session), **data, } def mentor_profile_columns(data={}): - token = hashlib.sha1((str(data['slug'] if 'slug' in data else '') + str(UTC_NOW)).encode('UTF-8')).hexdigest() + token = hashlib.sha1((str(data["slug"] if "slug" in data else "") + str(UTC_NOW)).encode("UTF-8")).hexdigest() return { - 'bio': None, - 'booking_url': None, - 'email': None, - 'id': 0, - 'name': '', - 'online_meeting_url': None, - 'price_per_hour': 0, - 'service_id': 0, - 'slug': 'mirai-nikki', - 'status': 'INVITED', - 'timezone': None, - 'token': token, - 'user_id': 0, + "bio": None, + "booking_url": None, + "email": None, + "id": 0, + "name": "", + "online_meeting_url": None, + "price_per_hour": 0, + "service_id": 0, + "slug": "mirai-nikki", + "status": "INVITED", + "timezone": None, + "token": token, + "user_id": 0, **data, } @@ -242,13 +224,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -259,13 +241,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -281,13 +263,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_session for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_session for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -298,12 +280,12 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_session', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_session", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() @@ -317,70 +299,72 @@ def test__get__without_data(self): """ def test__get__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService """ def test__get__with_two_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=2, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=2, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) response = self.client.get(url) json = response.json() mentorship_session_list = sorted(model.mentorship_session, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}) for mentorship_session in mentorship_session_list + get_serializer( + self, mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ) + for mentorship_session in mentorship_session_list ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) @@ -388,30 +372,34 @@ def test__get__with_two_mentor_profile(self): 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing status """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_status(self): - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] for n in range(0, 4): first_status = statuses[n] second_status = statuses[n + 1] choices = [first_status, second_status] - mentorship_sessions = [{'status': x} for x in choices] - bad_statuses = ','.join([x for x in statuses if x not in choices]) - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_sessions = [{"status": x} for x in choices] + bad_statuses = ",".join([x for x in statuses if x not in choices]) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?status={bad_statuses}' + url = ( + reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + + f"?status={bad_statuses}" + ) response = self.client.get(url) json = response.json() @@ -420,85 +408,93 @@ def test__get__with_two_mentor_profile__passing_bad_status(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_status(self): - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] for n in range(0, 4): first_status = statuses[n] second_status = statuses[n + 1] choices = [first_status, second_status] - mentorship_sessions = [{'status': x} for x in choices] - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_sessions = [{"status": x} for x in choices] + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) url = ( - reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': model.mentor_profile.id}) + - f'?status={first_status},{second_status}') + reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": model.mentor_profile.id}) + + f"?status={first_status},{second_status}" + ) response = self.client.get(url) json = response.json() mentorship_session_list = sorted(model.mentorship_session, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentorship_session_list[0], - model.mentor_profile, - model.mentorship_service, - model.user, - data={'status': second_status}), - get_serializer(self, - mentorship_session_list[1], - model.mentor_profile, - model.mentorship_service, - model.user, - data={'status': first_status}), + get_serializer( + self, + mentorship_session_list[0], + model.mentor_profile, + model.mentorship_service, + model.user, + data={"status": second_status}, + ), + get_serializer( + self, + mentorship_session_list[1], + model.mentor_profile, + model.mentorship_service, + model.user, + data={"status": first_status}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing billed """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_true__without_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?billed=true' + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + f"?billed=true" response = self.client.get(url) json = response.json() @@ -506,60 +502,65 @@ def test__get__with_two_mentor_profile__passing_billed_as_true__without_mentorsh self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_true__with_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_bill=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_bill=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?billed=true' + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + f"?billed=true" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_false__with_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_bill=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_bill=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?billed=false' + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + f"?billed=false" response = self.client.get(url) json = response.json() @@ -567,64 +568,72 @@ def test__get__with_two_mentor_profile__passing_billed_as_false__with_mentorship self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_false__without_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?billed=false' + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + f"?billed=false" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing started_after """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_started_after(self): utc_now = timezone.now() - mentorship_session = {'started_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"started_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?started_after={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + + f"?started_after={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() @@ -632,55 +641,64 @@ def test__get__with_two_mentor_profile__passing_bad_started_after(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_started_after(self): utc_now = timezone.now() - mentorship_session = {'started_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"started_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?started_after={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + + f"?started_after={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_started_after__without_started_at(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) @@ -690,47 +708,53 @@ def test__get__with_two_mentor_profile__passing_started_after__without_started_a self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1)), ] for case in cases: - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?started_after={case}' + url = ( + reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + + f"?started_after={case}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing ended_before """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_ended_before(self): utc_now = timezone.now() - mentorship_session = {'ended_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"ended_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?ended_before={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + + f"?ended_before={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() @@ -738,55 +762,64 @@ def test__get__with_two_mentor_profile__passing_bad_ended_before(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_ended_before(self): utc_now = timezone.now() - mentorship_session = {'ended_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"ended_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?ended_before={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + + f"?ended_before={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_ended_before__without_ended_at(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) @@ -797,45 +830,49 @@ def test__get__with_two_mentor_profile__passing_ended_before__without_ended_at(s ] for case in cases: - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + \ - f'?ended_before={case}' + url = ( + reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + + f"?ended_before={case}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}), + get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with four MentorshipSession, MentorProfile and MentorshipService, passing mentor """ def test__get__with_four_elements__padding_bad_mentor(self): - mentorship_sessions = [{'mentee_id': x, 'mentor_id': x} for x in range(1, 5)] - mentor_profiles = [{'user_id': x} for x in range(1, 5)] - model = self.bc.database.create(user=4, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=mentor_profiles, - mentorship_service=1, - profile_academy=1) + mentorship_sessions = [{"mentee_id": x, "mentor_id": x} for x in range(1, 5)] + mentor_profiles = [{"user_id": x} for x in range(1, 5)] + model = self.bc.database.create( + user=4, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=mentor_profiles, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + f'?mentor=5,6' + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + f"?mentor=5,6" response = self.client.get(url) json = response.json() @@ -844,47 +881,53 @@ def test__get__with_four_elements__padding_bad_mentor(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) def test__get__with_four_elements__padding_mentor(self): - mentorship_sessions = [{'mentee_id': x, 'mentor_id': x} for x in range(1, 5)] - mentor_profiles = [{'user_id': x} for x in range(1, 5)] - model = self.bc.database.create(user=4, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=mentor_profiles, - mentorship_service=1, - profile_academy=1) + mentorship_sessions = [{"mentee_id": x, "mentor_id": x} for x in range(1, 5)] + mentor_profiles = [{"user_id": x} for x in range(1, 5)] + model = self.bc.database.create( + user=4, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=mentor_profiles, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + f'?mentor=1,3' + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) + f"?mentor=1,3" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session[2], - model.mentor_profile[2], - model.mentorship_service, - model.user[2], - data={}), - get_serializer(self, - model.mentorship_session[0], - model.mentor_profile[0], - model.mentorship_service, - model.user[0], - data={}), + get_serializer( + self, + model.mentorship_session[2], + model.mentor_profile[2], + model.mentorship_service, + model.user[2], + data={}, + ), + get_serializer( + self, + model.mentorship_session[0], + model.mentor_profile[0], + model.mentorship_service, + model.user[0], + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) @@ -892,25 +935,29 @@ def test__get__with_four_elements__padding_mentor(self): 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_session", mentorship_session=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_service_id_session', kwargs={'service_id': 1}) + url = reverse_lazy("mentorship:academy_service_id_session", kwargs={"service_id": 1}) self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) diff --git a/breathecode/mentorship/tests/urls/tests_academy_session.py b/breathecode/mentorship/tests/urls/tests_academy_session.py index 62c7d38fd..4d75926fe 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_session.py +++ b/breathecode/mentorship/tests/urls/tests_academy_session.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import random from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -26,176 +27,164 @@ def format_datetime(self, date): def get_serializer(self, mentorship_session, mentor_profile, mentorship_service, user, academy, data={}): return { - 'accounted_duration': mentorship_session.accounted_duration, - 'allow_billing': mentorship_session.allow_billing, - 'ended_at': format_datetime(self, mentorship_session.ended_at), - 'id': mentorship_session.id, - 'bill': mentorship_session.bill if mentorship_session.bill is None else { - 'id': mentorship_session.bill.id, - 'status': mentorship_session.bill.status - }, - 'mentee': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, + "accounted_duration": mentorship_session.accounted_duration, + "allow_billing": mentorship_session.allow_billing, + "ended_at": format_datetime(self, mentorship_session.ended_at), + "id": mentorship_session.id, + "bill": ( + mentorship_session.bill + if mentorship_session.bill is None + else {"id": mentorship_session.bill.id, "status": mentorship_session.bill.status} + ), + "mentee": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, }, - 'mentee_left_at': mentorship_session.mentee_left_at, - 'mentor': { - 'booking_url': - mentor_profile.booking_url, - 'id': - mentor_profile.id, - 'services': [{ - 'academy': { - 'icon_url': academy.icon_url, - 'id': academy.id, - 'logo_url': academy.logo_url, - 'name': academy.name, - 'slug': academy.slug, - }, - 'allow_mentee_to_extend': - mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': - mentorship_service.allow_mentors_to_extend, - 'duration': - self.bc.datetime.from_timedelta(mentorship_service.duration), - 'created_at': - self.bc.datetime.to_iso_string(mentorship_service.created_at), - 'id': - mentorship_service.id, - 'language': - mentorship_service.language, - 'logo_url': - mentorship_service.logo_url, - 'max_duration': - self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': - self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': - mentorship_service.name, - 'slug': - mentorship_service.slug, - 'status': - mentorship_service.status, - 'updated_at': - self.bc.datetime.to_iso_string(mentorship_service.updated_at), - }], - 'slug': - mentor_profile.slug, - 'status': - mentor_profile.status, - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'id': user.id, - 'last_name': user.last_name, - } + "mentee_left_at": mentorship_session.mentee_left_at, + "mentor": { + "booking_url": mentor_profile.booking_url, + "id": mentor_profile.id, + "services": [ + { + "academy": { + "icon_url": academy.icon_url, + "id": academy.id, + "logo_url": academy.logo_url, + "name": academy.name, + "slug": academy.slug, + }, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "created_at": self.bc.datetime.to_iso_string(mentorship_service.created_at), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta( + mentorship_service.missed_meeting_duration + ), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, + "updated_at": self.bc.datetime.to_iso_string(mentorship_service.updated_at), + } + ], + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "user": { + "email": user.email, + "first_name": user.first_name, + "id": user.id, + "last_name": user.last_name, + }, }, - 'mentor_joined_at': mentorship_session.mentor_joined_at, - 'mentor_left_at': mentorship_session.mentor_left_at, - 'service': { - 'id': mentorship_service.id, - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'duration': str(mentorship_service.duration.total_seconds()), + "mentor_joined_at": mentorship_session.mentor_joined_at, + "mentor_left_at": mentorship_session.mentor_left_at, + "service": { + "id": mentorship_service.id, + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "duration": str(mentorship_service.duration.total_seconds()), }, - 'started_at': format_datetime(self, mentorship_session.started_at), - 'status': mentorship_session.status, - 'summary': mentorship_session.summary, + "started_at": format_datetime(self, mentorship_session.started_at), + "status": mentorship_session.status, + "summary": mentorship_session.summary, **data, } def post_serializer(data={}): return { - 'accounted_duration': None, - 'agenda': None, - 'allow_billing': False, - 'bill': None, - 'ended_at': None, - 'ends_at': None, - 'id': 1, - 'is_online': False, - 'latitude': None, - 'longitude': None, - 'mentee': None, - 'mentee_left_at': None, - 'mentor': 1, - 'service': None, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'name': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'started_at': None, - 'starts_at': None, - 'status': 'PENDING', - 'summary': None, + "accounted_duration": None, + "agenda": None, + "allow_billing": False, + "bill": None, + "ended_at": None, + "ends_at": None, + "id": 1, + "is_online": False, + "latitude": None, + "longitude": None, + "mentee": None, + "mentee_left_at": None, + "mentor": 1, + "service": None, + "mentor_joined_at": None, + "mentor_left_at": None, + "name": None, + "online_meeting_url": None, + "online_recording_url": None, + "started_at": None, + "starts_at": None, + "status": "PENDING", + "summary": None, **data, } def put_serializer(data={}): return { - 'accounted_duration': None, - 'agenda': None, - 'allow_billing': True, - 'bill': None, - 'ended_at': None, - 'ends_at': None, - 'calendly_uuid': None, - 'id': 0, - 'is_online': False, - 'latitude': None, - 'longitude': None, - 'mentee': 0, - 'mentee_left_at': None, - 'mentor': 0, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'name': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'service': 0, - 'started_at': None, - 'starts_at': None, - 'status': 'PENDING', - 'summary': None, - 'questions_and_answers': None, + "accounted_duration": None, + "agenda": None, + "allow_billing": True, + "bill": None, + "ended_at": None, + "ends_at": None, + "calendly_uuid": None, + "id": 0, + "is_online": False, + "latitude": None, + "longitude": None, + "mentee": 0, + "mentee_left_at": None, + "mentor": 0, + "mentor_joined_at": None, + "mentor_left_at": None, + "name": None, + "online_meeting_url": None, + "online_recording_url": None, + "service": 0, + "started_at": None, + "starts_at": None, + "status": "PENDING", + "summary": None, + "questions_and_answers": None, **data, } def mentorship_session_columns(data={}): return { - 'accounted_duration': None, - 'agenda': None, - 'allow_billing': False, - 'bill_id': None, - 'ended_at': None, - 'calendly_uuid': None, - 'ends_at': None, - 'id': 1, - 'is_online': False, - 'latitude': None, - 'longitude': None, - 'mentee_id': None, - 'service_id': None, - 'mentee_left_at': None, - 'mentor_id': 1, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'name': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'started_at': None, - 'starts_at': None, - 'status': 'PENDING', - 'status_message': None, - 'suggested_accounted_duration': None, - 'summary': None, - 'questions_and_answers': None, + "accounted_duration": None, + "agenda": None, + "allow_billing": False, + "bill_id": None, + "ended_at": None, + "calendly_uuid": None, + "ends_at": None, + "id": 1, + "is_online": False, + "latitude": None, + "longitude": None, + "mentee_id": None, + "service_id": None, + "mentee_left_at": None, + "mentor_id": 1, + "mentor_joined_at": None, + "mentor_left_at": None, + "name": None, + "online_meeting_url": None, + "online_recording_url": None, + "started_at": None, + "starts_at": None, + "status": "PENDING", + "status_message": None, + "suggested_accounted_duration": None, + "summary": None, + "questions_and_answers": None, **data, } @@ -214,13 +203,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -231,13 +220,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -253,13 +242,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_session for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_session for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -270,12 +259,12 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_session', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_session", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.get(url) json = response.json() @@ -289,72 +278,84 @@ def test__get__without_data(self): """ def test__get__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_session, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService """ def test__get__with_two_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=2, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=2, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.get(url) json = response.json() mentorship_session_list = sorted(model.mentorship_session, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}) for mentorship_session in mentorship_session_list + get_serializer( + self, + mentorship_session, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ) + for mentorship_session in mentorship_session_list ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) @@ -362,30 +363,31 @@ def test__get__with_two_mentor_profile(self): 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing status """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_status(self): - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] for n in range(0, 4): first_status = statuses[n] second_status = statuses[n + 1] choices = [first_status, second_status] - mentorship_sessions = [{'status': x} for x in choices] - bad_statuses = ','.join([x for x in statuses if x not in choices]) - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_sessions = [{"status": x} for x in choices] + bad_statuses = ",".join([x for x in statuses if x not in choices]) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?status={bad_statuses}' + url = reverse_lazy("mentorship:academy_session") + f"?status={bad_statuses}" response = self.client.get(url) json = response.json() @@ -394,85 +396,92 @@ def test__get__with_two_mentor_profile__passing_bad_status(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_status(self): - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] for n in range(0, 4): first_status = statuses[n] second_status = statuses[n + 1] choices = [first_status, second_status] - mentorship_sessions = [{'status': x} for x in choices] - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_sessions = [{"status": x} for x in choices] + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + f'?status={first_status},{second_status}' + url = reverse_lazy("mentorship:academy_session") + f"?status={first_status},{second_status}" response = self.client.get(url) json = response.json() mentorship_session_list = sorted(model.mentorship_session, key=lambda x: x.created_at, reverse=True) expected = [ - get_serializer(self, - mentorship_session_list[0], - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={'status': second_status}), - get_serializer(self, - mentorship_session_list[1], - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={'status': first_status}), + get_serializer( + self, + mentorship_session_list[0], + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={"status": second_status}, + ), + get_serializer( + self, + mentorship_session_list[1], + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={"status": first_status}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing billed """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_true__without_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?billed=true' + url = reverse_lazy("mentorship:academy_session") + f"?billed=true" response = self.client.get(url) json = response.json() @@ -480,61 +489,71 @@ def test__get__with_two_mentor_profile__passing_billed_as_true__without_mentorsh self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_true__with_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_bill=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_bill=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?billed=true' + url = reverse_lazy("mentorship:academy_session") + f"?billed=true" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_session, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_false__with_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_bill=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_bill=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?billed=false' + url = reverse_lazy("mentorship:academy_session") + f"?billed=false" response = self.client.get(url) json = response.json() @@ -542,65 +561,78 @@ def test__get__with_two_mentor_profile__passing_billed_as_false__with_mentorship self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_billed_as_false__without_mentorship_bill(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?billed=false' + url = reverse_lazy("mentorship:academy_session") + f"?billed=false" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_session, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing started_after """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_started_after(self): utc_now = timezone.now() - mentorship_session = {'started_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"started_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?started_after={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_session") + + f"?started_after={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() @@ -608,56 +640,70 @@ def test__get__with_two_mentor_profile__passing_bad_started_after(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_started_after(self): utc_now = timezone.now() - mentorship_session = {'started_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"started_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?started_after={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_session") + + f"?started_after={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_session, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_started_after__without_started_at(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) @@ -667,48 +713,56 @@ def test__get__with_two_mentor_profile__passing_started_after__without_started_a self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1)), ] for case in cases: - url = reverse_lazy('mentorship:academy_session') + \ - f'?started_after={case}' + url = reverse_lazy("mentorship:academy_session") + f"?started_after={case}" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_session, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with two MentorshipSession, one MentorProfile and one MentorshipService, passing ended_before """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_bad_ended_before(self): utc_now = timezone.now() - mentorship_session = {'ended_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"ended_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?ended_before={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_session") + + f"?ended_before={self.bc.datetime.to_iso_string(utc_now - timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() @@ -716,56 +770,70 @@ def test__get__with_two_mentor_profile__passing_bad_ended_before(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_ended_before(self): utc_now = timezone.now() - mentorship_session = {'ended_at': utc_now} - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_session, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + mentorship_session = {"ended_at": utc_now} + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_session, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + \ - f'?ended_before={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}' + url = ( + reverse_lazy("mentorship:academy_session") + + f"?ended_before={self.bc.datetime.to_iso_string(utc_now + timedelta(seconds=1))}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_session, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__get__with_two_mentor_profile__passing_ended_before__without_ended_at(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=model.academy.id) self.client.force_authenticate(model.user) @@ -776,46 +844,52 @@ def test__get__with_two_mentor_profile__passing_ended_before__without_ended_at(s ] for case in cases: - url = reverse_lazy('mentorship:academy_session') + \ - f'?ended_before={case}' + url = reverse_lazy("mentorship:academy_session") + f"?ended_before={case}" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - model.academy, - data={}), + get_serializer( + self, + model.mentorship_session, + model.mentor_profile, + model.mentorship_service, + model.user, + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 GET with four MentorshipSession, MentorProfile and MentorshipService, passing mentor """ def test__get__with_four_elements__padding_bad_mentor(self): - mentorship_sessions = [{'mentee_id': x, 'mentor_id': x} for x in range(1, 5)] - mentor_profiles = [{'user_id': x, 'services': [x]} for x in range(1, 5)] - model = self.bc.database.create(user=4, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=mentor_profiles, - mentorship_service=4, - profile_academy=1) + mentorship_sessions = [{"mentee_id": x, "mentor_id": x} for x in range(1, 5)] + mentor_profiles = [{"user_id": x, "services": [x]} for x in range(1, 5)] + model = self.bc.database.create( + user=4, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=mentor_profiles, + mentorship_service=4, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('mentorship:academy_session') + f'?mentor=5,6' + url = reverse_lazy("mentorship:academy_session") + f"?mentor=5,6" response = self.client.get(url) json = response.json() @@ -824,49 +898,55 @@ def test__get__with_four_elements__padding_bad_mentor(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) def test__get__with_four_elements__padding_mentor(self): - mentorship_sessions = [{'mentee_id': x, 'mentor_id': x, 'service_id': x} for x in range(1, 5)] - mentor_profiles = [{'user_id': x, 'services': [x]} for x in range(1, 5)] - model = self.bc.database.create(user=4, - role=1, - capability='read_mentorship_session', - mentorship_session=mentorship_sessions, - mentor_profile=mentor_profiles, - mentorship_service=4, - profile_academy=1) + mentorship_sessions = [{"mentee_id": x, "mentor_id": x, "service_id": x} for x in range(1, 5)] + mentor_profiles = [{"user_id": x, "services": [x]} for x in range(1, 5)] + model = self.bc.database.create( + user=4, + role=1, + capability="read_mentorship_session", + mentorship_session=mentorship_sessions, + mentor_profile=mentor_profiles, + mentorship_service=4, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.bc.request.authenticate(model.user[0]) - url = reverse_lazy('mentorship:academy_session') + f'?mentor=1,3' + url = reverse_lazy("mentorship:academy_session") + f"?mentor=1,3" response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.mentorship_session[2], - model.mentor_profile[2], - model.mentorship_service[2], - model.user[2], - model.academy, - data={}), - get_serializer(self, - model.mentorship_session[0], - model.mentor_profile[0], - model.mentorship_service[0], - model.user[0], - model.academy, - data={}), + get_serializer( + self, + model.mentorship_session[2], + model.mentor_profile[2], + model.mentorship_service[2], + model.user[2], + model.academy, + data={}, + ), + get_serializer( + self, + model.mentorship_session[0], + model.mentor_profile[0], + model.mentorship_service[0], + model.user[0], + model.academy, + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), self.bc.format.to_dict(model.mentorship_session), ) @@ -874,28 +954,32 @@ def test__get__with_four_elements__padding_mentor(self): 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test__get__spy_extensions(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - profile_academy=1) + model = self.bc.database.create( + user=1, role=1, capability="read_mentorship_session", mentorship_session=1, profile_academy=1 + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-created_at', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-created_at", paginate=True), + ], + ) """ 🔽🔽🔽 POST capability @@ -907,13 +991,13 @@ def test__post__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.post(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_session for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_session for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -924,69 +1008,78 @@ def test__post__without_capabilities(self): """ def test__post__missing_fields(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_session', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_session", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.post(url) json = response.json() - expected = {'mentor': ['This field is required.'], 'service': ['This field is required.']} + expected = {"mentor": ["This field is required."], "service": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 POST creating a element """ def test__post__creating_a_element(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') - data = {'mentor': 1, 'service': 1} + url = reverse_lazy("mentorship:academy_session") + data = {"mentor": 1, "service": 1} response = self.client.post(url, data) json = response.json() - expected = post_serializer({ - 'allow_billing': False, - 'service': { - 'id': model.mentorship_service.id, - 'name': model.mentorship_service.name, - 'slug': model.mentorship_service.slug, - }, - 'mentor': { - 'id': model.mentor_profile.id, - 'slug': model.mentor_profile.slug, - 'status': model.mentor_profile.status, - 'user': { - 'first_name': model.mentor_profile.user.first_name, - 'last_name': model.mentor_profile.user.last_name, - 'email': model.mentor_profile.user.email, - 'id': model.mentor_profile.user.id, - } + expected = post_serializer( + { + "allow_billing": False, + "service": { + "id": model.mentorship_service.id, + "name": model.mentorship_service.name, + "slug": model.mentorship_service.slug, + }, + "mentor": { + "id": model.mentor_profile.id, + "slug": model.mentor_profile.slug, + "status": model.mentor_profile.status, + "user": { + "first_name": model.mentor_profile.user.first_name, + "last_name": model.mentor_profile.user.last_name, + "email": model.mentor_profile.user.email, + "id": model.mentor_profile.user.id, + }, + }, } - }) + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_columns({ - 'allow_billing': False, - 'service_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_columns( + { + "allow_billing": False, + "service_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 POST creating a element, passing the forbidden fields @@ -994,58 +1087,67 @@ def test__post__creating_a_element(self): def test__post__creating_a_element__passing_the_forbidden_fields(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) data = { - 'mentor': 1, - 'service': 1, + "mentor": 1, + "service": 1, # readonly fields - 'created_at': utc_now, - 'updated_at': utc_now, - 'suggested_accounted_duration': '20', - 'status_message': '101010101010101010101', + "created_at": utc_now, + "updated_at": utc_now, + "suggested_accounted_duration": "20", + "status_message": "101010101010101010101", } - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.post(url, data) json = response.json() - expected = post_serializer({ - 'allow_billing': False, - 'service': { - 'id': model.mentorship_service.id, - 'name': model.mentorship_service.name, - 'slug': model.mentorship_service.slug, - }, - 'mentor': { - 'id': model.mentor_profile.id, - 'slug': model.mentor_profile.slug, - 'status': model.mentor_profile.status, - 'user': { - 'first_name': model.mentor_profile.user.first_name, - 'last_name': model.mentor_profile.user.last_name, - 'email': model.mentor_profile.user.email, - 'id': model.mentor_profile.user.id, - } + expected = post_serializer( + { + "allow_billing": False, + "service": { + "id": model.mentorship_service.id, + "name": model.mentorship_service.name, + "slug": model.mentorship_service.slug, + }, + "mentor": { + "id": model.mentor_profile.id, + "slug": model.mentor_profile.slug, + "status": model.mentor_profile.status, + "user": { + "first_name": model.mentor_profile.user.first_name, + "last_name": model.mentor_profile.user.last_name, + "email": model.mentor_profile.user.email, + "id": model.mentor_profile.user.id, + }, + }, } - }) + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_columns({ - 'allow_billing': False, - 'service_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_columns( + { + "allow_billing": False, + "service_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 POST creating a element, passing readonly fields @@ -1053,115 +1155,128 @@ def test__post__creating_a_element__passing_the_forbidden_fields(self): def test__post__creating_a_element__passing_readonly_fields__is_online_as_true(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - fields = ['mentor_joined_at', 'mentor_left_at', 'mentee_left_at', 'started_at', 'ended_at'] + fields = ["mentor_joined_at", "mentor_left_at", "mentee_left_at", "started_at", "ended_at"] for field in fields: data = { - 'mentor': 1, - 'service': 1, - 'is_online': True, + "mentor": 1, + "service": 1, + "is_online": True, # readonly fields field: self.bc.datetime.to_iso_string(append_delta_to_datetime(utc_now)), } - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.post(url, data) json = response.json() - expected = {'detail': 'read-only-field-online', 'status_code': 400} + expected = {"detail": "read-only-field-online", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) def test__post__creating_a_element__passing_readonly_fields__is_online_as_false(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - fields = ['mentor_joined_at', 'mentor_left_at', 'mentee_left_at', 'started_at', 'ended_at'] + fields = ["mentor_joined_at", "mentor_left_at", "mentee_left_at", "started_at", "ended_at"] id = 0 for field in fields: id += 1 date = append_delta_to_datetime(utc_now) data = { - 'mentor': 1, - 'service': 1, - 'is_online': False, + "mentor": 1, + "service": 1, + "is_online": False, # readonly fields field: self.bc.datetime.to_iso_string(date), } - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.post(url, data) json = response.json() - expected = post_serializer({ - 'allow_billing': False, - 'id': id, - 'service': { - 'id': model.mentorship_service.id, - 'name': model.mentorship_service.name, - 'slug': model.mentorship_service.slug, - }, - 'mentor': { - 'id': model.mentor_profile.id, - 'slug': model.mentor_profile.slug, - 'status': model.mentor_profile.status, - 'user': { - 'first_name': model.mentor_profile.user.first_name, - 'last_name': model.mentor_profile.user.last_name, - 'email': model.mentor_profile.user.email, - 'id': model.mentor_profile.user.id, - } - }, - field: self.bc.datetime.to_iso_string(date), - }) + expected = post_serializer( + { + "allow_billing": False, + "id": id, + "service": { + "id": model.mentorship_service.id, + "name": model.mentorship_service.name, + "slug": model.mentorship_service.slug, + }, + "mentor": { + "id": model.mentor_profile.id, + "slug": model.mentor_profile.slug, + "status": model.mentor_profile.status, + "user": { + "first_name": model.mentor_profile.user.first_name, + "last_name": model.mentor_profile.user.last_name, + "email": model.mentor_profile.user.email, + "id": model.mentor_profile.user.id, + }, + }, + field: self.bc.datetime.to_iso_string(date), + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_columns({ - 'allow_billing': False, - 'id': id, - 'service_id': 1, - field: date, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_columns( + { + "allow_billing": False, + "id": id, + "service_id": 1, + field: date, + } + ), + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 POST creating a element, no cohort user """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test__post__creating_a_element__no_cohort_user(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentor_profile=1, - mentorship_bill=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentor_profile=1, + mentorship_bill=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) @@ -1170,53 +1285,55 @@ def test__post__creating_a_element__no_cohort_user(self): starts_at = append_delta_to_datetime(utc_now) ends_at = append_delta_to_datetime(utc_now) data = { - 'mentor': 1, - 'service': 1, - 'mentee': 1, - 'bill': 1, - 'name': self.bc.fake.name(), - 'is_online': bool(random.getrandbits(1)), - 'latitude': get_base_number() * random.random() * 1000, - 'longitude': get_base_number() * random.random() * 1000, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED']), - 'online_recording_url': self.bc.fake.url(), - 'allow_billing': bool(random.getrandbits(1)), - 'accounted_duration': '0' + str(accounted_duration), - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(starts_at), - 'ends_at': self.bc.datetime.to_iso_string(ends_at), + "mentor": 1, + "service": 1, + "mentee": 1, + "bill": 1, + "name": self.bc.fake.name(), + "is_online": bool(random.getrandbits(1)), + "latitude": get_base_number() * random.random() * 1000, + "longitude": get_base_number() * random.random() * 1000, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"]), + "online_recording_url": self.bc.fake.url(), + "allow_billing": bool(random.getrandbits(1)), + "accounted_duration": "0" + str(accounted_duration), + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(starts_at), + "ends_at": self.bc.datetime.to_iso_string(ends_at), } - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.post(url, data) json = response.json() - expected = {'detail': 'mentee-not-found', 'status_code': 400} + expected = {"detail": "mentee-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 POST creating a element, mentor and mentee can be same person """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test__post__creating_a_element__mentor_and_mentee_can_be_same_person(self): utc_now = timezone.now() - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentor_profile=1, - consumable=1, - mentorship_bill=1, - mentorship_service=1, - mentorship_service_set=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentor_profile=1, + consumable=1, + mentorship_bill=1, + mentorship_service=1, + mentorship_service_set=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) @@ -1225,54 +1342,56 @@ def test__post__creating_a_element__mentor_and_mentee_can_be_same_person(self): starts_at = append_delta_to_datetime(utc_now) ends_at = append_delta_to_datetime(utc_now) data = { - 'mentor': 1, - 'service': 1, - 'mentee': 1, - 'bill': 1, - 'name': self.bc.fake.name(), - 'is_online': bool(random.getrandbits(1)), - 'latitude': get_base_number() * random.random() * 1000, - 'longitude': get_base_number() * random.random() * 1000, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED']), - 'online_recording_url': self.bc.fake.url(), - 'allow_billing': bool(random.getrandbits(1)), - 'accounted_duration': '0' + str(accounted_duration), - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(starts_at), - 'ends_at': self.bc.datetime.to_iso_string(ends_at), + "mentor": 1, + "service": 1, + "mentee": 1, + "bill": 1, + "name": self.bc.fake.name(), + "is_online": bool(random.getrandbits(1)), + "latitude": get_base_number() * random.random() * 1000, + "longitude": get_base_number() * random.random() * 1000, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"]), + "online_recording_url": self.bc.fake.url(), + "allow_billing": bool(random.getrandbits(1)), + "accounted_duration": "0" + str(accounted_duration), + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(starts_at), + "ends_at": self.bc.datetime.to_iso_string(ends_at), } - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.post(url, data) json = response.json() - expected = {'detail': 'mentor-mentee-same-person', 'status_code': 400} + expected = {"detail": "mentor-mentee-same-person", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 POST creating a element, passing all the fields """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) def test__post__creating_a_element__passing_all_the_fields(self): utc_now = timezone.now() - consumable = {'user_id': 2} - model = self.bc.database.create(user=2, - role=1, - capability='crud_mentorship_session', - mentor_profile=1, - consumable=consumable, - mentorship_bill=1, - mentorship_service=1, - mentorship_service_set=1, - profile_academy=1) + consumable = {"user_id": 2} + model = self.bc.database.create( + user=2, + role=1, + capability="crud_mentorship_session", + mentor_profile=1, + consumable=consumable, + mentorship_bill=1, + mentorship_service=1, + mentorship_service_set=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.bc.request.authenticate(model.user[0]) @@ -1282,77 +1401,81 @@ def test__post__creating_a_element__passing_all_the_fields(self): starts_at = append_delta_to_datetime(utc_now) ends_at = append_delta_to_datetime(utc_now) data = { - 'mentor': 1, - 'service': 1, - 'mentee': 2, - 'bill': 1, - 'name': self.bc.fake.name(), - 'is_online': bool(random.getrandbits(1)), - 'latitude': get_base_number() * random.random() * 1000, - 'longitude': get_base_number() * random.random() * 1000, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED']), - 'online_recording_url': self.bc.fake.url(), - 'allow_billing': bool(random.getrandbits(1)), - 'accounted_duration': '0' + str(accounted_duration), - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(starts_at), - 'ends_at': self.bc.datetime.to_iso_string(ends_at), + "mentor": 1, + "service": 1, + "mentee": 2, + "bill": 1, + "name": self.bc.fake.name(), + "is_online": bool(random.getrandbits(1)), + "latitude": get_base_number() * random.random() * 1000, + "longitude": get_base_number() * random.random() * 1000, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"]), + "online_recording_url": self.bc.fake.url(), + "allow_billing": bool(random.getrandbits(1)), + "accounted_duration": "0" + str(accounted_duration), + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(starts_at), + "ends_at": self.bc.datetime.to_iso_string(ends_at), } - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.post(url, data) json = response.json() - expected = post_serializer({ - **data, - 'accounted_duration': f'{minutes * 60}.0', - 'bill': { - 'id': 1, - 'status': 'DUE' - }, - 'service': { - 'id': model.mentorship_service.id, - 'name': model.mentorship_service.name, - 'slug': model.mentorship_service.slug, - }, - 'mentor': { - 'id': model.mentor_profile.id, - 'slug': model.mentor_profile.slug, - 'status': model.mentor_profile.status, - 'user': { - 'first_name': model.mentor_profile.user.first_name, - 'last_name': model.mentor_profile.user.last_name, - 'email': model.mentor_profile.user.email, - 'id': model.mentor_profile.user.id, - } - }, - 'mentee': { - 'email': model.user[1].email, - 'first_name': model.user[1].first_name, - 'id': model.user[1].id, - 'last_name': model.user[1].last_name, - }, - }) + expected = post_serializer( + { + **data, + "accounted_duration": f"{minutes * 60}.0", + "bill": {"id": 1, "status": "DUE"}, + "service": { + "id": model.mentorship_service.id, + "name": model.mentorship_service.name, + "slug": model.mentorship_service.slug, + }, + "mentor": { + "id": model.mentor_profile.id, + "slug": model.mentor_profile.slug, + "status": model.mentor_profile.status, + "user": { + "first_name": model.mentor_profile.user.first_name, + "last_name": model.mentor_profile.user.last_name, + "email": model.mentor_profile.user.email, + "id": model.mentor_profile.user.id, + }, + }, + "mentee": { + "email": model.user[1].email, + "first_name": model.user[1].first_name, + "id": model.user[1].id, + "last_name": model.user[1].last_name, + }, + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - fields = ['bill', 'mentee', 'mentor', 'service'] + fields = ["bill", "mentee", "mentor", "service"] for field in fields: - data[f'{field}_id'] = data[field] + data[f"{field}_id"] = data[field] del data[field] - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - mentorship_session_columns({ - **data, - 'accounted_duration': accounted_duration, - 'starts_at': starts_at, - 'ends_at': ends_at, - }), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + mentorship_session_columns( + { + **data, + "accounted_duration": accounted_duration, + "starts_at": starts_at, + "ends_at": ends_at, + } + ), + ], + ) """ 🔽🔽🔽 PUT capability @@ -1364,13 +1487,13 @@ def test__put__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_session for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_session for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -1380,391 +1503,455 @@ def test__put__without_capabilities(self): 🔽🔽🔽 PUT without id """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__without_id(self): - model = self.bc.database.create(user=1, role=1, capability='crud_mentorship_session', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="crud_mentorship_session", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') + url = reverse_lazy("mentorship:academy_session") data = [{}] - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'without-id', 'status_code': 400} + expected = {"detail": "without-id", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 PUT not found the MentorshipSession """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__not_found(self): cases = [ (1, {}, False), - (2, { - 'mentorship_session': 1 - }, True), + (2, {"mentorship_session": 1}, True), ] for id, kwargs, has_instance_db in cases: - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - profile_academy=1, - **kwargs) + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_session", profile_academy=1, **kwargs + ) self.bc.request.set_headers(academy=id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') - data = [{'id': id}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_session") + data = [{"id": id}] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ] if has_instance_db else []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + ( + [ + self.bc.format.to_dict(model.mentorship_session), + ] + if has_instance_db + else [] + ), + ) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 PUT found a MentorshipSession, with one MentorProfile and MentorshipService """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__without_required_fields(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=1, - mentorship_service=1, - mentor_profile=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=1, + mentorship_service=1, + mentor_profile=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') - data = [{'id': 1}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_session") + data = [{"id": 1}] + response = self.client.put(url, data, format="json") json = response.json() - expected = [{'mentor': ['This field is required.']}] + expected = [{"mentor": ["This field is required."]}] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_required_fields(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=1, - mentorship_service=1, - mentor_profile=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=1, + mentorship_service=1, + mentor_profile=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session') - data = [{'id': 1, 'mentor': 1}] - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_session") + data = [{"id": 1, "mentor": 1}] + response = self.client.put(url, data, format="json") json = response.json() - expected = [put_serializer({ - 'id': 1, - 'mentee': 1, - 'service': 1, - 'calendly_uuid': None, - **data[0], - })] + expected = [ + put_serializer( + { + "id": 1, + "mentee": 1, + "service": 1, + "calendly_uuid": None, + **data[0], + } + ) + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 PUT with all required fields, is_online is False """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_all_required_fields__is_online_as_false(self): - mentorship_bill = {'status': random.choice(['RECALCULATE', 'DUE'])} - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + mentorship_bill = {"status": random.choice(["RECALCULATE", "DUE"])} + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] current_status = random.choice(statuses) statuses.remove(current_status) - mentorship_session = {'status': current_status} - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=mentorship_session, - mentorship_service=1, - mentorship_bill=mentorship_bill, - mentor_profile=1, - profile_academy=1) + mentorship_session = {"status": current_status} + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=mentorship_session, + mentorship_service=1, + mentorship_bill=mentorship_bill, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() - data = [{ - 'name': self.bc.fake.name(), - 'is_online': False, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'id': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'started_at': self.bc.datetime.to_iso_string(date), - 'ended_at': self.bc.datetime.to_iso_string(date), - 'mentor_joined_at': self.bc.datetime.to_iso_string(date), - 'mentor_left_at': self.bc.datetime.to_iso_string(date), - 'mentee_left_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, - }] - response = self.client.put(url, data, format='json') + data = [ + { + "name": self.bc.fake.name(), + "is_online": False, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "id": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "started_at": self.bc.datetime.to_iso_string(date), + "ended_at": self.bc.datetime.to_iso_string(date), + "mentor_joined_at": self.bc.datetime.to_iso_string(date), + "mentor_left_at": self.bc.datetime.to_iso_string(date), + "mentee_left_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, + } + ] + response = self.client.put(url, data, format="json") json = response.json() - expected = [put_serializer({ - 'id': 1, - 'mentee': 1, - 'service': 1, - 'calendly_uuid': None, - **data[0], - })] + expected = [ + put_serializer( + { + "id": 1, + "mentee": 1, + "service": 1, + "calendly_uuid": None, + **data[0], + } + ) + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - update_fields = ['bill', 'mentee', 'mentor', 'service'] + update_fields = ["bill", "mentee", "mentor", "service"] for key in update_fields: - data[0][f'{key}_id'] = data[0].pop(key) + data[0][f"{key}_id"] = data[0].pop(key) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session), - **data[0], - 'starts_at': date, - 'ends_at': date, - 'started_at': date, - 'ended_at': date, - 'mentor_joined_at': date, - 'mentor_left_at': date, - 'mentee_left_at': date, - 'suggested_accounted_duration': timedelta(0), - 'accounted_duration': timedelta(0), - 'status_message': '', - }, - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) - self.assertEqual(signals.mentorship_session_status.send_robust.call_args_list, [ - call(instance=model.mentorship_session, sender=model.mentorship_session.__class__), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + **data[0], + "starts_at": date, + "ends_at": date, + "started_at": date, + "ended_at": date, + "mentor_joined_at": date, + "mentor_left_at": date, + "mentee_left_at": date, + "suggested_accounted_duration": timedelta(0), + "accounted_duration": timedelta(0), + "status_message": "", + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) + self.assertEqual( + signals.mentorship_session_status.send_robust.call_args_list, + [ + call(instance=model.mentorship_session, sender=model.mentorship_session.__class__), + ], + ) """ 🔽🔽🔽 PUT with all required fields, is_online is False, MentorshipBill finished """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_all_required_fields__is_online_as_false__bill_finished(self): - mentorship_bill = {'status': random.choice(['APPROVED', 'PAID', 'IGNORED'])} - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + mentorship_bill = {"status": random.choice(["APPROVED", "PAID", "IGNORED"])} + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] current_status = random.choice(statuses) statuses.remove(current_status) - mentorship_session = {'status': current_status} - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=mentorship_session, - mentorship_service=1, - mentorship_bill=mentorship_bill, - mentor_profile=1, - profile_academy=1) + mentorship_session = {"status": current_status} + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=mentorship_session, + mentorship_service=1, + mentorship_bill=mentorship_bill, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() - data = [{ - 'name': self.bc.fake.name(), - 'is_online': False, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'id': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'started_at': self.bc.datetime.to_iso_string(date), - 'ended_at': self.bc.datetime.to_iso_string(date), - 'mentor_joined_at': self.bc.datetime.to_iso_string(date), - 'mentor_left_at': self.bc.datetime.to_iso_string(date), - 'mentee_left_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, - }] - response = self.client.put(url, data, format='json') + data = [ + { + "name": self.bc.fake.name(), + "is_online": False, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "id": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "started_at": self.bc.datetime.to_iso_string(date), + "ended_at": self.bc.datetime.to_iso_string(date), + "mentor_joined_at": self.bc.datetime.to_iso_string(date), + "mentor_left_at": self.bc.datetime.to_iso_string(date), + "mentee_left_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, + } + ] + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'trying-to-change-a-closed-bill', 'status_code': 400} + expected = {"detail": "trying-to-change-a-closed-bill", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session), - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + }, + ], + ) self.assertEqual(signals.mentorship_session_status.send_robust.call_args_list, []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) """ 🔽🔽🔽 PUT passing a MentorshipBill with some MentorshipSession without MentorshipService """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__passing_a_bill_with_some_session_without_service(self): - mentorship_bill = {'status': 'DUE'} - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + mentorship_bill = {"status": "DUE"} + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] current_status = random.choice(statuses) statuses.remove(current_status) - mentorship_sessions = [{ - 'status': current_status, - 'bill_id': 1, - 'service_id': 1, - }, { - 'status': current_status, - 'bill_id': 1, - 'service_id': None, - }] - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=mentorship_sessions, - mentorship_service=1, - mentorship_bill=mentorship_bill, - mentor_profile=1, - profile_academy=1) + mentorship_sessions = [ + { + "status": current_status, + "bill_id": 1, + "service_id": 1, + }, + { + "status": current_status, + "bill_id": 1, + "service_id": None, + }, + ] + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=mentorship_sessions, + mentorship_service=1, + mentorship_bill=mentorship_bill, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() - data = [{ - 'name': self.bc.fake.name(), - 'is_online': False, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'id': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'started_at': self.bc.datetime.to_iso_string(date), - 'ended_at': self.bc.datetime.to_iso_string(date), - 'mentor_joined_at': self.bc.datetime.to_iso_string(date), - 'mentor_left_at': self.bc.datetime.to_iso_string(date), - 'mentee_left_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, - }] - response = self.client.put(url, data, format='json') + data = [ + { + "name": self.bc.fake.name(), + "is_online": False, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "id": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "started_at": self.bc.datetime.to_iso_string(date), + "ended_at": self.bc.datetime.to_iso_string(date), + "mentor_joined_at": self.bc.datetime.to_iso_string(date), + "mentor_left_at": self.bc.datetime.to_iso_string(date), + "mentee_left_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, + } + ] + response = self.client.put(url, data, format="json") json = response.json() - expected = [put_serializer({ - 'id': 1, - 'mentee': 1, - 'service': 1, - **data[0], - })] + expected = [ + put_serializer( + { + "id": 1, + "mentee": 1, + "service": 1, + **data[0], + } + ) + ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - update_fields = ['bill', 'mentee', 'mentor', 'service'] + update_fields = ["bill", "mentee", "mentor", "service"] for key in update_fields: - data[0][f'{key}_id'] = data[0].pop(key) + data[0][f"{key}_id"] = data[0].pop(key) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session[0]), - **data[0], - 'starts_at': date, - 'ends_at': date, - 'started_at': date, - 'ended_at': date, - 'mentor_joined_at': date, - 'mentor_left_at': date, - 'mentee_left_at': date, - }, - self.bc.format.to_dict(model.mentorship_session[1]), - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - { - **self.bc.format.to_dict(model.mentorship_bill), - 'status': 'RECALCULATE', - }, - ]) - self.assertEqual(signals.mentorship_session_status.send_robust.call_args_list, [ - call(instance=model.mentorship_session[0], sender=model.mentorship_session[0].__class__), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session[0]), + **data[0], + "starts_at": date, + "ends_at": date, + "started_at": date, + "ended_at": date, + "mentor_joined_at": date, + "mentor_left_at": date, + "mentee_left_at": date, + }, + self.bc.format.to_dict(model.mentorship_session[1]), + ], + ) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + { + **self.bc.format.to_dict(model.mentorship_bill), + "status": "RECALCULATE", + }, + ], + ) + self.assertEqual( + signals.mentorship_session_status.send_robust.call_args_list, + [ + call(instance=model.mentorship_session[0], sender=model.mentorship_session[0].__class__), + ], + ) diff --git a/breathecode/mentorship/tests/urls/tests_academy_session_id.py b/breathecode/mentorship/tests/urls/tests_academy_session_id.py index edb5f3b8a..181a3c7b4 100644 --- a/breathecode/mentorship/tests/urls/tests_academy_session_id.py +++ b/breathecode/mentorship/tests/urls/tests_academy_session_id.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import random from datetime import timedelta from unittest.mock import MagicMock, call, patch @@ -25,79 +26,79 @@ def format_datetime(self, date): def get_serializer(self, mentorship_session, mentor_profile, mentorship_service, user, data={}): return { - 'accounted_duration': mentorship_session.accounted_duration, - 'agenda': mentorship_session.agenda, - 'bill': mentorship_session.bill, - 'allow_billing': mentorship_session.allow_billing, - 'starts_at': format_datetime(self, mentorship_session.starts_at), - 'ends_at': format_datetime(self, mentorship_session.ends_at), - 'started_at': format_datetime(self, mentorship_session.started_at), - 'ended_at': format_datetime(self, mentorship_session.ended_at), - 'id': mentorship_session.id, - 'mentee': { - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'id': user.id, + "accounted_duration": mentorship_session.accounted_duration, + "agenda": mentorship_session.agenda, + "bill": mentorship_session.bill, + "allow_billing": mentorship_session.allow_billing, + "starts_at": format_datetime(self, mentorship_session.starts_at), + "ends_at": format_datetime(self, mentorship_session.ends_at), + "started_at": format_datetime(self, mentorship_session.started_at), + "ended_at": format_datetime(self, mentorship_session.ended_at), + "id": mentorship_session.id, + "mentee": { + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, + "id": user.id, }, - 'service': { - 'id': mentorship_service.id, - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, + "service": { + "id": mentorship_service.id, + "name": mentorship_service.name, + "slug": mentorship_service.slug, }, - 'mentee_left_at': mentorship_session.mentee_left_at, - 'mentor': { - 'id': mentor_profile.id, - 'slug': mentor_profile.slug, - 'status': mentor_profile.status, - 'user': { - 'first_name': mentor_profile.user.first_name, - 'last_name': mentor_profile.user.last_name, - 'id': mentor_profile.user.id, - 'email': mentor_profile.user.email, - } + "mentee_left_at": mentorship_session.mentee_left_at, + "mentor": { + "id": mentor_profile.id, + "slug": mentor_profile.slug, + "status": mentor_profile.status, + "user": { + "first_name": mentor_profile.user.first_name, + "last_name": mentor_profile.user.last_name, + "id": mentor_profile.user.id, + "email": mentor_profile.user.email, + }, }, - 'is_online': mentorship_session.is_online, - 'latitude': mentorship_session.latitude, - 'longitude': mentorship_session.longitude, - 'mentor_joined_at': mentorship_session.mentor_joined_at, - 'mentor_left_at': mentorship_session.mentor_left_at, - 'status': mentorship_session.status, - 'summary': mentorship_session.summary, - 'name': mentorship_session.name, - 'online_meeting_url': mentorship_session.online_meeting_url, - 'online_recording_url': mentorship_session.online_recording_url, + "is_online": mentorship_session.is_online, + "latitude": mentorship_session.latitude, + "longitude": mentorship_session.longitude, + "mentor_joined_at": mentorship_session.mentor_joined_at, + "mentor_left_at": mentorship_session.mentor_left_at, + "status": mentorship_session.status, + "summary": mentorship_session.summary, + "name": mentorship_session.name, + "online_meeting_url": mentorship_session.online_meeting_url, + "online_recording_url": mentorship_session.online_recording_url, **data, } def put_serializer(data={}): return { - 'accounted_duration': None, - 'agenda': None, - 'allow_billing': True, - 'bill': None, - 'ended_at': None, - 'ends_at': None, - 'id': 0, - 'is_online': False, - 'latitude': None, - 'calendly_uuid': None, - 'longitude': None, - 'mentee': 0, - 'mentee_left_at': None, - 'mentor': 0, - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'name': None, - 'online_meeting_url': None, - 'online_recording_url': None, - 'service': 0, - 'started_at': None, - 'starts_at': None, - 'status': 'PENDING', - 'summary': None, - 'questions_and_answers': None, + "accounted_duration": None, + "agenda": None, + "allow_billing": True, + "bill": None, + "ended_at": None, + "ends_at": None, + "id": 0, + "is_online": False, + "latitude": None, + "calendly_uuid": None, + "longitude": None, + "mentee": 0, + "mentee_left_at": None, + "mentor": 0, + "mentor_joined_at": None, + "mentor_left_at": None, + "name": None, + "online_meeting_url": None, + "online_recording_url": None, + "service": 0, + "started_at": None, + "starts_at": None, + "status": "PENDING", + "summary": None, + "questions_and_answers": None, **data, } @@ -108,13 +109,13 @@ class AcademyServiceTestSuite(MentorshipTestCase): """ def test__get__without_auth(self): - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED, + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } self.assertEqual(json, expected) @@ -125,13 +126,13 @@ def test__get__without_academy_header(self): self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403, + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -147,13 +148,13 @@ def test__get__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_mentorship_session for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_mentorship_session for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -164,16 +165,16 @@ def test__get__without_capabilities(self): """ def test__get__without_data(self): - model = self.bc.database.create(user=1, role=1, capability='read_mentorship_session', profile_academy=1) + model = self.bc.database.create(user=1, role=1, capability="read_mentorship_session", profile_academy=1) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -183,34 +184,36 @@ def test__get__without_data(self): """ def test__get__with_one_mentor_profile(self): - model = self.bc.database.create(user=1, - role=1, - capability='read_mentorship_session', - mentorship_session=1, - mentor_profile=1, - mentorship_service=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="read_mentorship_session", + mentorship_session=1, + mentor_profile=1, + mentorship_service=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) response = self.client.get(url) json = response.json() - expected = get_serializer(self, - model.mentorship_session, - model.mentor_profile, - model.mentorship_service, - model.user, - data={}) + expected = get_serializer( + self, model.mentorship_session, model.mentor_profile, model.mentorship_service, model.user, data={} + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 PUT capability @@ -222,13 +225,13 @@ def test__put__without_capabilities(self): self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_mentorship_session for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_mentorship_session for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -238,594 +241,675 @@ def test__put__without_capabilities(self): 🔽🔽🔽 PUT not found the MentorshipSession """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__not_found(self): cases = [ (1, {}, False), - (2, { - 'mentorship_session': 1 - }, True), + (2, {"mentorship_session": 1}, True), ] for id, kwargs, has_instance_db in cases: - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - profile_academy=1, - **kwargs) + model = self.bc.database.create( + user=1, role=1, capability="crud_mentorship_session", profile_academy=1, **kwargs + ) self.bc.request.set_headers(academy=id) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': id}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": id}) response = self.client.put(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ] if has_instance_db else []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + ( + [ + self.bc.format.to_dict(model.mentorship_session), + ] + if has_instance_db + else [] + ), + ) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 PUT found a MentorshipSession, with one MentorProfile and MentorshipService """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__without_required_fields(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=1, - mentorship_service=1, - mentor_profile=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=1, + mentorship_service=1, + mentor_profile=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) response = self.client.put(url) json = response.json() - expected = {'mentor': ['This field is required.']} + expected = {"mentor": ["This field is required."]} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_required_fields(self): - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=1, - mentorship_service=1, - mentor_profile=1, - profile_academy=1) + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=1, + mentorship_service=1, + mentor_profile=1, + profile_academy=1, + ) self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) - data = {'mentor': 1} - response = self.client.put(url, data, format='json') + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) + data = {"mentor": 1} + response = self.client.put(url, data, format="json") json = response.json() - expected = put_serializer({ - 'id': 1, - 'mentee': 1, - 'service': 1, - **data, - }) + expected = put_serializer( + { + "id": 1, + "mentee": 1, + "service": 1, + **data, + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipBill"), []) """ 🔽🔽🔽 PUT with all required fields, is_online is False """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_all_required_fields__is_online_as_false(self): - mentorship_bill = {'status': random.choice(['RECALCULATE', 'DUE'])} - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + mentorship_bill = {"status": random.choice(["RECALCULATE", "DUE"])} + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] current_status = random.choice(statuses) statuses.remove(current_status) - mentorship_session = {'status': current_status} - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=mentorship_session, - mentorship_service=1, - mentorship_bill=mentorship_bill, - mentor_profile=1, - profile_academy=1) + mentorship_session = {"status": current_status} + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=mentorship_session, + mentorship_service=1, + mentorship_bill=mentorship_bill, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() data = { - 'name': self.bc.fake.name(), - 'is_online': False, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'started_at': self.bc.datetime.to_iso_string(date), - 'ended_at': self.bc.datetime.to_iso_string(date), - 'mentor_joined_at': self.bc.datetime.to_iso_string(date), - 'mentor_left_at': self.bc.datetime.to_iso_string(date), - 'mentee_left_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, + "name": self.bc.fake.name(), + "is_online": False, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "started_at": self.bc.datetime.to_iso_string(date), + "ended_at": self.bc.datetime.to_iso_string(date), + "mentor_joined_at": self.bc.datetime.to_iso_string(date), + "mentor_left_at": self.bc.datetime.to_iso_string(date), + "mentee_left_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = put_serializer({ - 'id': 1, - 'mentee': 1, - 'service': 1, - **data, - }) + expected = put_serializer( + { + "id": 1, + "mentee": 1, + "service": 1, + **data, + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - update_fields = ['bill', 'mentee', 'mentor', 'service'] + update_fields = ["bill", "mentee", "mentor", "service"] for key in update_fields: - data[f'{key}_id'] = data.pop(key) + data[f"{key}_id"] = data.pop(key) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session), - **data, - 'starts_at': date, - 'ends_at': date, - 'started_at': date, - 'ended_at': date, - 'mentor_joined_at': date, - 'mentor_left_at': date, - 'mentee_left_at': date, - 'suggested_accounted_duration': timedelta(0), - 'accounted_duration': timedelta(0), - 'status_message': '', - }, - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) - self.assertEqual(signals.mentorship_session_status.send_robust.call_args_list, [ - call(instance=model.mentorship_session, sender=model.mentorship_session.__class__), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + **data, + "starts_at": date, + "ends_at": date, + "started_at": date, + "ended_at": date, + "mentor_joined_at": date, + "mentor_left_at": date, + "mentee_left_at": date, + "suggested_accounted_duration": timedelta(0), + "accounted_duration": timedelta(0), + "status_message": "", + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) + self.assertEqual( + signals.mentorship_session_status.send_robust.call_args_list, + [ + call(instance=model.mentorship_session, sender=model.mentorship_session.__class__), + ], + ) """ 🔽🔽🔽 PUT with all required fields, is_online is False, MentorshipBill finished """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_all_required_fields__is_online_as_false__bill_finished(self): - mentorship_bill = {'status': random.choice(['APPROVED', 'PAID', 'IGNORED'])} - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + mentorship_bill = {"status": random.choice(["APPROVED", "PAID", "IGNORED"])} + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] current_status = random.choice(statuses) statuses.remove(current_status) - mentorship_session = {'status': current_status} - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=mentorship_session, - mentorship_service=1, - mentorship_bill=mentorship_bill, - mentor_profile=1, - profile_academy=1) + mentorship_session = {"status": current_status} + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=mentorship_session, + mentorship_service=1, + mentorship_bill=mentorship_bill, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() data = { - 'name': self.bc.fake.name(), - 'is_online': False, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'started_at': self.bc.datetime.to_iso_string(date), - 'ended_at': self.bc.datetime.to_iso_string(date), - 'mentor_joined_at': self.bc.datetime.to_iso_string(date), - 'mentor_left_at': self.bc.datetime.to_iso_string(date), - 'mentee_left_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, + "name": self.bc.fake.name(), + "is_online": False, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "started_at": self.bc.datetime.to_iso_string(date), + "ended_at": self.bc.datetime.to_iso_string(date), + "mentor_joined_at": self.bc.datetime.to_iso_string(date), + "mentor_left_at": self.bc.datetime.to_iso_string(date), + "mentee_left_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'trying-to-change-a-closed-bill', 'status_code': 400} + expected = {"detail": "trying-to-change-a-closed-bill", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session), - }, - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) self.assertEqual(str(signals.mentorship_session_status.send_robust.call_args_list), str([])) """ 🔽🔽🔽 PUT with all required fields, is_online is True, trying to edit readonly fields """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_all_required_fields__is_online_as_true__trying_to_edit_readonly_fields(self): - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=1, - mentorship_service=1, - mentorship_bill=1, - mentor_profile=1, - profile_academy=1) + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=1, + mentorship_service=1, + mentorship_bill=1, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() cases = [ { - 'mentor_joined_at': self.bc.datetime.to_iso_string(date), + "mentor_joined_at": self.bc.datetime.to_iso_string(date), }, { - 'mentor_left_at': self.bc.datetime.to_iso_string(date), + "mentor_left_at": self.bc.datetime.to_iso_string(date), }, { - 'mentee_left_at': self.bc.datetime.to_iso_string(date), + "mentee_left_at": self.bc.datetime.to_iso_string(date), }, { - 'started_at': self.bc.datetime.to_iso_string(date), + "started_at": self.bc.datetime.to_iso_string(date), }, { - 'ended_at': self.bc.datetime.to_iso_string(date), + "ended_at": self.bc.datetime.to_iso_string(date), }, ] for kwargs in cases: data = { - 'name': self.bc.fake.name(), - 'is_online': True, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, + "name": self.bc.fake.name(), + "is_online": True, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, **kwargs, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'read-only-field-online', 'status_code': 400} + expected = {"detail": "read-only-field-online", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session), - }, - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) self.assertEqual(str(signals.mentorship_session_status.send_robust.call_args_list), str([])) """ 🔽🔽🔽 PUT with all required fields, is_online is True """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_all_required_fields__is_online_as_true(self): - mentorship_bill = {'status': random.choice(['RECALCULATE', 'DUE'])} - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + mentorship_bill = {"status": random.choice(["RECALCULATE", "DUE"])} + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] current_status = random.choice(statuses) statuses.remove(current_status) - mentorship_session = {'status': current_status} - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=mentorship_session, - mentorship_service=1, - mentorship_bill=mentorship_bill, - mentor_profile=1, - profile_academy=1) + mentorship_session = {"status": current_status} + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=mentorship_session, + mentorship_service=1, + mentorship_bill=mentorship_bill, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() data = { - 'name': self.bc.fake.name(), - 'is_online': True, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, + "name": self.bc.fake.name(), + "is_online": True, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = put_serializer({ - 'id': 1, - 'mentee': 1, - 'service': 1, - **data, - }) + expected = put_serializer( + { + "id": 1, + "mentee": 1, + "service": 1, + **data, + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - update_fields = ['bill', 'mentee', 'mentor', 'service'] + update_fields = ["bill", "mentee", "mentor", "service"] for key in update_fields: - data[f'{key}_id'] = data.pop(key) + data[f"{key}_id"] = data.pop(key) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session), - **data, - 'starts_at': date, - 'ends_at': date, - 'suggested_accounted_duration': timedelta(0), - 'accounted_duration': timedelta(0), - 'status_message': 'No one joined this session, nothing will be accounted for.', - }, - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + **data, + "starts_at": date, + "ends_at": date, + "suggested_accounted_duration": timedelta(0), + "accounted_duration": timedelta(0), + "status_message": "No one joined this session, nothing will be accounted for.", + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) - self.assertEqual(signals.mentorship_session_status.send_robust.call_args_list, [ - call(instance=model.mentorship_session, sender=model.mentorship_session.__class__), - ]) + self.assertEqual( + signals.mentorship_session_status.send_robust.call_args_list, + [ + call(instance=model.mentorship_session, sender=model.mentorship_session.__class__), + ], + ) """ 🔽🔽🔽 PUT with all required fields, is_online is True, MentorshipBill finished """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) def test__put__found__with_all_required_fields__is_online_as_true__bill_finished(self): - mentorship_bill = {'status': random.choice(['APPROVED', 'PAID', 'IGNORED'])} - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=1, - mentorship_service=1, - mentorship_bill=mentorship_bill, - mentor_profile=1, - profile_academy=1) + mentorship_bill = {"status": random.choice(["APPROVED", "PAID", "IGNORED"])} + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=1, + mentorship_service=1, + mentorship_bill=mentorship_bill, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() data = { - 'name': self.bc.fake.name(), - 'is_online': True, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, + "name": self.bc.fake.name(), + "is_online": True, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'trying-to-change-a-closed-bill', 'status_code': 400} + expected = {"detail": "trying-to-change-a-closed-bill", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - update_fields = ['bill', 'mentee', 'mentor', 'service'] + update_fields = ["bill", "mentee", "mentor", "service"] for key in update_fields: - data[f'{key}_id'] = data.pop(key) + data[f"{key}_id"] = data.pop(key) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session), - }, - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - self.bc.format.to_dict(model.mentorship_bill), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + self.bc.format.to_dict(model.mentorship_bill), + ], + ) self.assertEqual(str(signals.mentorship_session_status.send_robust.call_args_list), str([])) """ 🔽🔽🔽 PUT passing a MentorshipBill with some MentorshipSession without MentorshipService """ - @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', MagicMock()) + @patch("breathecode.mentorship.signals.mentorship_session_status.send_robust", MagicMock()) # @patch('breathecode.mentorship.signals.mentorship_session_status.send_robust', # MagicMock(side_effect=[None, None, Exception('error')])) def test__put__found__passing_a_bill_with_some_session_without_service(self): - mentorship_bill = {'status': 'DUE'} - statuses = ['PENDING', 'STARTED', 'COMPLETED', 'FAILED', 'IGNORED'] + mentorship_bill = {"status": "DUE"} + statuses = ["PENDING", "STARTED", "COMPLETED", "FAILED", "IGNORED"] current_status = random.choice(statuses) statuses.remove(current_status) - mentorship_sessions = [{ - 'status': current_status, - 'bill_id': 1, - 'service_id': 1, - }, { - 'status': current_status, - 'bill_id': 1, - 'service_id': None, - }] - - model = self.bc.database.create(user=1, - role=1, - capability='crud_mentorship_session', - mentorship_session=mentorship_sessions, - mentorship_service=1, - mentorship_bill=mentorship_bill, - mentor_profile=1, - profile_academy=1) + mentorship_sessions = [ + { + "status": current_status, + "bill_id": 1, + "service_id": 1, + }, + { + "status": current_status, + "bill_id": 1, + "service_id": None, + }, + ] + + model = self.bc.database.create( + user=1, + role=1, + capability="crud_mentorship_session", + mentorship_session=mentorship_sessions, + mentorship_service=1, + mentorship_bill=mentorship_bill, + mentor_profile=1, + profile_academy=1, + ) signals.mentorship_session_status.send_robust.call_args_list = [] self.bc.request.set_headers(academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('mentorship:academy_session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship:academy_session_id", kwargs={"session_id": 1}) date = timezone.now() data = { - 'name': self.bc.fake.name(), - 'is_online': False, - 'latitude': random.random() * 180 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'longitude': random.random() * 90 * random.choice([1, -1]), - 'service': 1, - 'mentee': 1, - 'online_meeting_url': self.bc.fake.url(), - 'online_recording_url': self.bc.fake.url(), - 'status': random.choice(statuses), - 'allow_billing': bool(random.randint(0, 1)), - 'bill': 1, - 'agenda': self.bc.fake.text(), - 'summary': self.bc.fake.text(), - 'starts_at': self.bc.datetime.to_iso_string(date), - 'ends_at': self.bc.datetime.to_iso_string(date), - 'started_at': self.bc.datetime.to_iso_string(date), - 'ended_at': self.bc.datetime.to_iso_string(date), - 'mentor_joined_at': self.bc.datetime.to_iso_string(date), - 'mentor_left_at': self.bc.datetime.to_iso_string(date), - 'mentee_left_at': self.bc.datetime.to_iso_string(date), - 'mentor': 1, + "name": self.bc.fake.name(), + "is_online": False, + "latitude": random.random() * 180 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "longitude": random.random() * 90 * random.choice([1, -1]), + "service": 1, + "mentee": 1, + "online_meeting_url": self.bc.fake.url(), + "online_recording_url": self.bc.fake.url(), + "status": random.choice(statuses), + "allow_billing": bool(random.randint(0, 1)), + "bill": 1, + "agenda": self.bc.fake.text(), + "summary": self.bc.fake.text(), + "starts_at": self.bc.datetime.to_iso_string(date), + "ends_at": self.bc.datetime.to_iso_string(date), + "started_at": self.bc.datetime.to_iso_string(date), + "ended_at": self.bc.datetime.to_iso_string(date), + "mentor_joined_at": self.bc.datetime.to_iso_string(date), + "mentor_left_at": self.bc.datetime.to_iso_string(date), + "mentee_left_at": self.bc.datetime.to_iso_string(date), + "mentor": 1, } - response = self.client.put(url, data, format='json') + response = self.client.put(url, data, format="json") json = response.json() - expected = put_serializer({ - 'id': 1, - 'mentee': 1, - 'service': 1, - **data, - }) + expected = put_serializer( + { + "id": 1, + "mentee": 1, + "service": 1, + **data, + } + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - update_fields = ['bill', 'mentee', 'mentor', 'service'] + update_fields = ["bill", "mentee", "mentor", "service"] for key in update_fields: - data[f'{key}_id'] = data.pop(key) + data[f"{key}_id"] = data.pop(key) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session[0]), - **data, - 'starts_at': date, - 'ends_at': date, - 'started_at': date, - 'ended_at': date, - 'mentor_joined_at': date, - 'mentor_left_at': date, - 'mentee_left_at': date, - }, - self.bc.format.to_dict(model.mentorship_session[1]), - ]) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipBill'), [ - { - **self.bc.format.to_dict(model.mentorship_bill), - 'status': 'RECALCULATE', - }, - ]) - - self.assertEqual(signals.mentorship_session_status.send_robust.call_args_list, [ - call(instance=model.mentorship_session[0], sender=model.mentorship_session[0].__class__), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session[0]), + **data, + "starts_at": date, + "ends_at": date, + "started_at": date, + "ended_at": date, + "mentor_joined_at": date, + "mentor_left_at": date, + "mentee_left_at": date, + }, + self.bc.format.to_dict(model.mentorship_session[1]), + ], + ) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipBill"), + [ + { + **self.bc.format.to_dict(model.mentorship_bill), + "status": "RECALCULATE", + }, + ], + ) + + self.assertEqual( + signals.mentorship_session_status.send_robust.call_args_list, + [ + call(instance=model.mentorship_session[0], sender=model.mentorship_session[0].__class__), + ], + ) diff --git a/breathecode/mentorship/tests/urls/tests_check_mentorship_profile.py b/breathecode/mentorship/tests/urls/tests_check_mentorship_profile.py index ed1cc4be5..e71d1b468 100644 --- a/breathecode/mentorship/tests/urls/tests_check_mentorship_profile.py +++ b/breathecode/mentorship/tests/urls/tests_check_mentorship_profile.py @@ -1,6 +1,7 @@ """ This file just can contains duck tests refert to AcademyInviteView """ + import logging import random from unittest.mock import MagicMock, call @@ -18,7 +19,7 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch): m1 = MagicMock() - monkeypatch.setattr('logging.Logger.error', m1) + monkeypatch.setattr("logging.Logger.error", m1) yield m1 @@ -40,7 +41,7 @@ def wrapper(request1=True, request2=True): m = MagicMock(side_effect=[request1, request2]) - monkeypatch.setattr(requests, 'head', m) + monkeypatch.setattr(requests, "head", m) yield wrapper @@ -50,7 +51,7 @@ def test_no_mentors(bc: Breathecode, mock_head_request): check_mentorship_profile.delay(1) - assert logging.Logger.error.call_args_list == [call('Mentorship profile 1 not found', exc_info=True)] + assert logging.Logger.error.call_args_list == [call("Mentorship profile 1 not found", exc_info=True)] assert requests.head.call_args_list == [] @@ -62,10 +63,12 @@ def test_no_urls__no_syllabus(bc: Breathecode, mock_head_request): check_mentorship_profile.delay(1) assert logging.Logger.error.call_args_list == [] - assert bc.database.list_of('mentorship.MentorProfile') == [{ - **bc.format.to_dict(model.mentor_profile), - 'availability_report': ['no-online-meeting-url', 'no-booking-url', 'no-syllabus'], - }] + assert bc.database.list_of("mentorship.MentorProfile") == [ + { + **bc.format.to_dict(model.mentor_profile), + "availability_report": ["no-online-meeting-url", "no-booking-url", "no-syllabus"], + } + ] assert requests.head.call_args_list == [] @@ -73,15 +76,17 @@ def test_no_urls__no_syllabus(bc: Breathecode, mock_head_request): def test_with_online_meeting_url(bc: Breathecode, fake, mock_head_request): mock_head_request(request1=False, request2=False) - model = bc.database.create(mentor_profile={'online_meeting_url': fake.url()}) + model = bc.database.create(mentor_profile={"online_meeting_url": fake.url()}) check_mentorship_profile.delay(1) assert logging.Logger.error.call_args_list == [] - assert bc.database.list_of('mentorship.MentorProfile') == [{ - **bc.format.to_dict(model.mentor_profile), - 'availability_report': ['no-booking-url', 'no-syllabus', 'bad-online-meeting-url'], - }] + assert bc.database.list_of("mentorship.MentorProfile") == [ + { + **bc.format.to_dict(model.mentor_profile), + "availability_report": ["no-booking-url", "no-syllabus", "bad-online-meeting-url"], + } + ] assert requests.head.call_args_list == [ call(model.mentor_profile.online_meeting_url, timeout=30), @@ -91,15 +96,17 @@ def test_with_online_meeting_url(bc: Breathecode, fake, mock_head_request): def test_with_booking_url(bc: Breathecode, fake, mock_head_request): mock_head_request(request1=False, request2=False) - model = bc.database.create(mentor_profile={'booking_url': 'https://calendly.com/' + fake.slug()}) + model = bc.database.create(mentor_profile={"booking_url": "https://calendly.com/" + fake.slug()}) check_mentorship_profile.delay(1) assert logging.Logger.error.call_args_list == [] - assert bc.database.list_of('mentorship.MentorProfile') == [{ - **bc.format.to_dict(model.mentor_profile), - 'availability_report': ['no-online-meeting-url', 'no-syllabus', 'bad-booking-url'], - }] + assert bc.database.list_of("mentorship.MentorProfile") == [ + { + **bc.format.to_dict(model.mentor_profile), + "availability_report": ["no-online-meeting-url", "no-syllabus", "bad-booking-url"], + } + ] assert requests.head.call_args_list == [ call(model.mentor_profile.booking_url, timeout=30), @@ -114,10 +121,12 @@ def test_with_syllabus(bc: Breathecode, fake, mock_head_request): check_mentorship_profile.delay(1) assert logging.Logger.error.call_args_list == [] - assert bc.database.list_of('mentorship.MentorProfile') == [{ - **bc.format.to_dict(model.mentor_profile), - 'availability_report': ['no-online-meeting-url', 'no-booking-url'], - }] + assert bc.database.list_of("mentorship.MentorProfile") == [ + { + **bc.format.to_dict(model.mentor_profile), + "availability_report": ["no-online-meeting-url", "no-booking-url"], + } + ] assert requests.head.call_args_list == [] @@ -125,19 +134,23 @@ def test_with_syllabus(bc: Breathecode, fake, mock_head_request): def test_all_ok(bc: Breathecode, fake, mock_head_request): mock_head_request(request1=True, request2=True) - model = bc.database.create(mentor_profile={ - 'online_meeting_url': fake.url(), - 'booking_url': 'https://calendly.com/' + fake.slug(), - }, - syllabus=1) + model = bc.database.create( + mentor_profile={ + "online_meeting_url": fake.url(), + "booking_url": "https://calendly.com/" + fake.slug(), + }, + syllabus=1, + ) check_mentorship_profile.delay(1) assert logging.Logger.error.call_args_list == [] - assert bc.database.list_of('mentorship.MentorProfile') == [{ - **bc.format.to_dict(model.mentor_profile), - 'availability_report': [], - }] + assert bc.database.list_of("mentorship.MentorProfile") == [ + { + **bc.format.to_dict(model.mentor_profile), + "availability_report": [], + } + ] assert requests.head.call_args_list == [ call(model.mentor_profile.online_meeting_url, timeout=30), diff --git a/breathecode/mentorship/tests/urls_shortner/tests_meet_slug.py b/breathecode/mentorship/tests/urls_shortner/tests_meet_slug.py index 5a6f7bc90..360fbd061 100644 --- a/breathecode/mentorship/tests/urls_shortner/tests_meet_slug.py +++ b/breathecode/mentorship/tests/urls_shortner/tests_meet_slug.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import random from unittest.mock import MagicMock, patch @@ -14,79 +15,81 @@ from ..mixins import MentorshipTestCase UTC_NOW = timezone.now() -URL = 'https://netscape.bankruptcy.story' -ROOM_NAME = 'carlos-two' -ROOM_URL = '' +URL = "https://netscape.bankruptcy.story" +ROOM_NAME = "carlos-two" +ROOM_URL = "" API_KEY = random.randint(1, 1000000000) -def render(message, - mentor_profile=None, - token=None, - mentorship_session=None, - mentorship_service=None, - fix_logo=False, - start_session=False, - session_expired=False, - academy=None): - mentor_profile_slug = mentor_profile.slug if mentor_profile else 'asd' - mentorship_service_slug = mentorship_service.slug if mentorship_service else 'asd' +def render( + message, + mentor_profile=None, + token=None, + mentorship_session=None, + mentorship_service=None, + fix_logo=False, + start_session=False, + session_expired=False, + academy=None, +): + mentor_profile_slug = mentor_profile.slug if mentor_profile else "asd" + mentorship_service_slug = mentorship_service.slug if mentorship_service else "asd" environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/mentor/{mentor_profile_slug}/service/{mentorship_service_slug}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'GET', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': None, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': f'token={token and token.key or ""}', - 'CONTENT_TYPE': 'application/octet-stream' + "HTTP_COOKIE": "", + "PATH_INFO": f"/mentor/{mentor_profile_slug}/service/{mentorship_service_slug}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "GET", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": None, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": f'token={token and token.key or ""}', + "CONTENT_TYPE": "application/octet-stream", } request = WSGIRequest(environ) context = { - 'MESSAGE': message, - 'BUTTON': None, - 'BUTTON_TARGET': '_blank', - 'LINK': None, + "MESSAGE": message, + "BUTTON": None, + "BUTTON_TARGET": "_blank", + "LINK": None, } if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - if 'heading' not in context: - context['heading'] = academy.name + if "heading" not in context: + context["heading"] = academy.name if start_session: context = { **context, - 'SUBJECT': 'Mentoring Session', - 'BUTTON': 'Start Session', - 'BUTTON_TARGET': '_self', - 'LINK': f'?token={token.key}&redirect=true', + "SUBJECT": "Mentoring Session", + "BUTTON": "Start Session", + "BUTTON_TARGET": "_self", + "LINK": f"?token={token.key}&redirect=true", } if session_expired: context = { **context, - 'BUTTON': 'End Session', - 'BUTTON_TARGET': '_self', - 'LINK': f'/mentor/session/{mentorship_session.id}?token={token.key}&extend=true', + "BUTTON": "End Session", + "BUTTON_TARGET": "_self", + "LINK": f"/mentor/session/{mentorship_session.id}?token={token.key}&extend=true", } string = loader.render_to_string( - 'message.html', + "message.html", context, request, using=None, @@ -96,52 +99,52 @@ def render(message, string = string.replace('src="/static/assets/logo.png"', 'src="/static/icons/picture.png"') if session_expired: - string = string.replace('&extend=true', '') + string = string.replace("&extend=true", "") return string def render_pick_service(mentor_profile, token, mentorship_services=[], fix_logo=False, academy=None): environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/mentor/meet/{mentor_profile.slug}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'GET', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': None, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': f'token={token and token.key or ""}', - 'CONTENT_TYPE': 'application/octet-stream' + "HTTP_COOKIE": "", + "PATH_INFO": f"/mentor/meet/{mentor_profile.slug}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "GET", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": None, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": f'token={token and token.key or ""}', + "CONTENT_TYPE": "application/octet-stream", } request = WSGIRequest(environ) - base_url = f'/mentor/meet/{mentor_profile.slug}' + base_url = f"/mentor/meet/{mentor_profile.slug}" context = { - 'token': token.key, - 'services': mentorship_services, - 'mentor': mentor_profile, - 'baseUrl': base_url, + "token": token.key, + "services": mentorship_services, + "mentor": mentor_profile, + "baseUrl": base_url, } if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - if 'heading' not in context: - context['heading'] = academy.name + if "heading" not in context: + context["heading"] = academy.name - string = loader.render_to_string('pick_service.html', context, request) + string = loader.render_to_string("pick_service.html", context, request) if fix_logo: return string.replace('src="/static/assets/logo.png"', 'src="/static/icons/picture.png"') @@ -151,25 +154,29 @@ def render_pick_service(mentor_profile, token, mentorship_services=[], fix_logo= class AuthenticateTestSuite(MentorshipTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test_without_auth(self): - url = reverse_lazy('mentorship_shortner:meet_slug', kwargs={ - 'mentor_slug': 'asd', - }) + url = reverse_lazy( + "mentorship_shortner:meet_slug", + kwargs={ + "mentor_slug": "asd", + }, + ) response = self.client.get(url) - hash = self.bc.format.to_base64('/mentor/meet/asd') + hash = self.bc.format.to_base64("/mentor/meet/asd") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipService"), []) """ 🔽🔽🔽 GET without MentorProfile @@ -178,125 +185,162 @@ def test_without_auth(self): def test_without_mentor_profile(self): model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug', kwargs={ - 'mentor_slug': 'asd', - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug", + kwargs={ + "mentor_slug": "asd", + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'No mentor found with slug asd') + expected = render(f"No mentor found with slug asd") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipService"), []) """ 🔽🔽🔽 GET with MentorProfile """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock(side_effect=Exception('kjhgf'))) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock(side_effect=Exception("kjhgf"))) def test_with_mentor_profile__mentor_is_not_ready(self): model = self.bc.database.create(user=1, token=1, mentor_profile=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug', kwargs={ - 'mentor_slug': model.mentor_profile.slug, - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) expected = render( - 'This mentor is not ready, please contact the mentor directly or anyone from the academy staff.', - academy=model.academy) + "This mentor is not ready, please contact the mentor directly or anyone from the academy staff.", + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipService"), []) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test_with_mentor_profile__mentor_ready(self): model = self.bc.database.create(user=1, token=1, mentor_profile=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug', kwargs={ - 'mentor_slug': model.mentor_profile.slug, - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'This mentor is not available on any service', academy=model.academy) + expected = render(f"This mentor is not available on any service", academy=model.academy) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipService"), []) """ 🔽🔽🔽 GET with MentorProfile and MentorshipService """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test_with_mentor_profile__with_mentorship_service(self): model = self.bc.database.create(user=1, token=1, mentor_profile=1, mentorship_service=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug', kwargs={ - 'mentor_slug': model.mentor_profile.slug, - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_pick_service(model.mentor_profile, - model.token, [model.mentorship_service], - academy=model.academy) + expected = render_pick_service( + model.mentor_profile, model.token, [model.mentorship_service], academy=model.academy + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipService'), [ - self.bc.format.to_dict(model.mentorship_service), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipService"), + [ + self.bc.format.to_dict(model.mentorship_service), + ], + ) diff --git a/breathecode/mentorship/tests/urls_shortner/tests_meet_slug_service_slug.py b/breathecode/mentorship/tests/urls_shortner/tests_meet_slug_service_slug.py index 90bf7b093..939901c0e 100644 --- a/breathecode/mentorship/tests/urls_shortner/tests_meet_slug_service_slug.py +++ b/breathecode/mentorship/tests/urls_shortner/tests_meet_slug_service_slug.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os import random from datetime import timedelta @@ -25,61 +26,59 @@ from ..mixins import MentorshipTestCase UTC_NOW = timezone.now() -URL = 'https://netscape.bankruptcy.story' -ROOM_NAME = 'carlos-two' -ROOM_URL = '' +URL = "https://netscape.bankruptcy.story" +ROOM_NAME = "carlos-two" +ROOM_URL = "" API_KEY = random.randint(1, 1000000000) @pytest.fixture(autouse=True) def setup(db, fake, monkeypatch: pytest.MonkeyPatch): # os.environ['APP_URL'] = fake.url() - monkeypatch.setenv('APP_URL', fake.url()) + monkeypatch.setenv("APP_URL", fake.url()) yield def format_consumable(data={}): return { - 'cohort_set_id': None, - 'event_type_set_id': None, - 'how_many': 0, - 'id': 0, - 'mentorship_service_set_id': 0, - 'service_item_id': 0, - 'unit_type': 'UNIT', - 'user_id': 0, - 'valid_until': None, - 'sort_priority': 1, + "cohort_set_id": None, + "event_type_set_id": None, + "how_many": 0, + "id": 0, + "mentorship_service_set_id": 0, + "service_item_id": 0, + "unit_type": "UNIT", + "user_id": 0, + "valid_until": None, + "sort_priority": 1, **data, } def format_consumption_session(mentorship_service, mentor_profile, mentorship_service_set, user, consumable, data={}): return { - 'consumable_id': consumable.id, - 'duration': timedelta(), - 'eta': ..., - 'how_many': 1.0, - 'id': 0, - 'operation_code': 'default', - 'path': 'payments.MentorshipServiceSet', - 'related_id': mentorship_service_set.id, - 'related_slug': mentorship_service_set.slug, - 'request': { - 'args': [], - 'headers': { - 'academy': None - }, - 'kwargs': { - 'mentor_slug': mentor_profile.slug, - 'service_slug': mentorship_service.slug, + "consumable_id": consumable.id, + "duration": timedelta(), + "eta": ..., + "how_many": 1.0, + "id": 0, + "operation_code": "default", + "path": "payments.MentorshipServiceSet", + "related_id": mentorship_service_set.id, + "related_slug": mentorship_service_set.slug, + "request": { + "args": [], + "headers": {"academy": None}, + "kwargs": { + "mentor_slug": mentor_profile.slug, + "service_slug": mentorship_service.slug, }, - 'user': user.id + "user": user.id, }, - 'status': 'PENDING', - 'user_id': user.id, - 'was_discounted': False, + "status": "PENDING", + "user_id": user.id, + "was_discounted": False, **data, } @@ -94,18 +93,18 @@ def get_env(key, value=None): def render_message(message, data={}, academy=None): request = None - context = {'MESSAGE': message, 'BUTTON': None, 'BUTTON_TARGET': '_blank', 'LINK': None, **data} + context = {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None, **data} if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - if 'heading' not in data: - context['heading'] = academy.name + if "heading" not in data: + context["heading"] = academy.name - return loader.render_to_string('message.html', context, request) + return loader.render_to_string("message.html", context, request) def get_empty_mentorship_session_queryset(*args, **kwargs): @@ -119,69 +118,71 @@ def format_datetime(self, date): return self.bc.datetime.to_iso_string(date) -def render(message, - mentor_profile=None, - token=None, - mentorship_session=None, - mentorship_service=None, - fix_logo=False, - start_session=False, - session_expired=False, - academy=None, - data={}): - mentor_profile_slug = mentor_profile.slug if mentor_profile else 'asd' - mentorship_service_slug = mentorship_service.slug if mentorship_service else 'asd' +def render( + message, + mentor_profile=None, + token=None, + mentorship_session=None, + mentorship_service=None, + fix_logo=False, + start_session=False, + session_expired=False, + academy=None, + data={}, +): + mentor_profile_slug = mentor_profile.slug if mentor_profile else "asd" + mentorship_service_slug = mentorship_service.slug if mentorship_service else "asd" environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/mentor/{mentor_profile_slug}/service/{mentorship_service_slug}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'GET', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': None, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': f'token={token and token.key or ""}', - 'CONTENT_TYPE': 'application/octet-stream' + "HTTP_COOKIE": "", + "PATH_INFO": f"/mentor/{mentor_profile_slug}/service/{mentorship_service_slug}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "GET", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": None, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": f'token={token and token.key or ""}', + "CONTENT_TYPE": "application/octet-stream", } request = WSGIRequest(environ) - context = {'MESSAGE': message, 'BUTTON': None, 'BUTTON_TARGET': '_blank', 'LINK': None, **data} + context = {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None, **data} if start_session: context = { **context, - 'SUBJECT': 'Mentoring Session', - 'BUTTON': 'Start Session', - 'BUTTON_TARGET': '_self', - 'LINK': f'?token={token.key}&redirect=true', + "SUBJECT": "Mentoring Session", + "BUTTON": "Start Session", + "BUTTON_TARGET": "_self", + "LINK": f"?token={token.key}&redirect=true", } if session_expired: context = { **context, - 'BUTTON': 'End Session', - 'BUTTON_TARGET': '_self', - 'LINK': f'/mentor/session/{mentorship_session.id}?token={token.key}&extend=true', + "BUTTON": "End Session", + "BUTTON_TARGET": "_self", + "LINK": f"/mentor/session/{mentorship_session.id}?token={token.key}&extend=true", } if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - if 'heading' not in context: - context['heading'] = academy.name + if "heading" not in context: + context["heading"] = academy.name string = loader.render_to_string( - 'message.html', + "message.html", context, request, using=None, @@ -191,141 +192,143 @@ def render(message, string = string.replace('src="/static/assets/logo.png"', 'src="/static/icons/picture.png"') if session_expired: - string = string.replace('&extend=true', '') + string = string.replace("&extend=true", "") return string def mentor_serializer(mentor_profile, user, academy): return { - 'id': mentor_profile.id, - 'slug': mentor_profile.slug, - 'user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, + "id": mentor_profile.id, + "slug": mentor_profile.slug, + "user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, }, - 'service': { - 'id': 1, - 'slug': 'everybody-small', - 'name': 'Savannah Holden DDS', - 'status': 'DRAFT', - 'academy': { - 'id': academy.id, - 'slug': academy.slug, - 'name': academy.name, - 'logo_url': academy.logo_url, - 'icon_url': academy.icon_url, + "service": { + "id": 1, + "slug": "everybody-small", + "name": "Savannah Holden DDS", + "status": "DRAFT", + "academy": { + "id": academy.id, + "slug": academy.slug, + "name": academy.name, + "logo_url": academy.logo_url, + "icon_url": academy.icon_url, }, - 'logo_url': None, - 'duration': timedelta(seconds=3600), - 'language': 'en', - 'allow_mentee_to_extend': True, - 'allow_mentors_to_extend': True, - 'max_duration': timedelta(seconds=7200), - 'missed_meeting_duration': timedelta(seconds=600), - 'created_at': ..., - 'updated_at': ..., - 'description': None + "logo_url": None, + "duration": timedelta(seconds=3600), + "language": "en", + "allow_mentee_to_extend": True, + "allow_mentors_to_extend": True, + "max_duration": timedelta(seconds=7200), + "missed_meeting_duration": timedelta(seconds=600), + "created_at": ..., + "updated_at": ..., + "description": None, }, - 'status': mentor_profile.status, - 'price_per_hour': mentor_profile.price_per_hour, - 'booking_url': mentor_profile.booking_url, - 'online_meeting_url': mentor_profile.online_meeting_url, - 'timezone': mentor_profile.timezone, - 'syllabus': mentor_profile.syllabus, - 'email': mentor_profile.email, - 'created_at': mentor_profile.created_at, - 'updated_at': mentor_profile.updated_at, + "status": mentor_profile.status, + "price_per_hour": mentor_profile.price_per_hour, + "booking_url": mentor_profile.booking_url, + "online_meeting_url": mentor_profile.online_meeting_url, + "timezone": mentor_profile.timezone, + "syllabus": mentor_profile.syllabus, + "email": mentor_profile.email, + "created_at": mentor_profile.created_at, + "updated_at": mentor_profile.updated_at, } def session_serializer(mentor_profile, user, academy, mentorship_service): - return [{ - 'id': academy.id, - 'status': 'PENDING', - 'started_at': None, - 'ended_at': None, - 'starts_at': None, - 'ends_at': ..., - 'mentor_joined_at': None, - 'mentor_left_at': None, - 'mentee_left_at': None, - 'allow_billing': True, - 'accounted_duration': None, - 'suggested_accounted_duration': None, - 'mentor': { - 'id': mentor_profile.id, - 'slug': mentor_profile.id, - 'user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, - }, - 'service': { - 'id': mentorship_service.id, - 'slug': mentorship_service.slug, - 'name': mentorship_service.name, - 'status': mentorship_service.status, - 'academy': { - 'id': academy.id, - 'slug': academy.slug, - 'name': academy.name, - 'logo_url': academy.logo_url, - 'icon_url': academy.icon_url, + return [ + { + "id": academy.id, + "status": "PENDING", + "started_at": None, + "ended_at": None, + "starts_at": None, + "ends_at": ..., + "mentor_joined_at": None, + "mentor_left_at": None, + "mentee_left_at": None, + "allow_billing": True, + "accounted_duration": None, + "suggested_accounted_duration": None, + "mentor": { + "id": mentor_profile.id, + "slug": mentor_profile.id, + "user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, + }, + "service": { + "id": mentorship_service.id, + "slug": mentorship_service.slug, + "name": mentorship_service.name, + "status": mentorship_service.status, + "academy": { + "id": academy.id, + "slug": academy.slug, + "name": academy.name, + "logo_url": academy.logo_url, + "icon_url": academy.icon_url, + }, + "logo_url": mentorship_service.logo_url, + "duration": mentorship_service.duration, + "language": mentorship_service.language, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "max_duration": mentorship_service.max_duration, + "missed_meeting_duration": mentorship_service.missed_meeting_duration, + "created_at": mentorship_service.created_at, + "updated_at": mentorship_service.updated_at, + "description": mentorship_service.description, }, - 'logo_url': mentorship_service.logo_url, - 'duration': mentorship_service.duration, - 'language': mentorship_service.language, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'max_duration': mentorship_service.max_duration, - 'missed_meeting_duration': mentorship_service.missed_meeting_duration, - 'created_at': mentorship_service.created_at, - 'updated_at': mentorship_service.updated_at, - 'description': mentorship_service.description, + "status": mentor_profile.status, + "price_per_hour": mentor_profile.price_per_hour, + "booking_url": mentor_profile.booking_url, + "online_meeting_url": mentor_profile.online_meeting_url, + "timezone": mentor_profile.timezone, + "syllabus": mentor_profile.syllabus, + "email": mentor_profile.email, + "created_at": mentor_profile.created_at, + "updated_at": mentor_profile.updated_at, }, - 'status': mentor_profile.status, - 'price_per_hour': mentor_profile.price_per_hour, - 'booking_url': mentor_profile.booking_url, - 'online_meeting_url': mentor_profile.online_meeting_url, - 'timezone': mentor_profile.timezone, - 'syllabus': mentor_profile.syllabus, - 'email': mentor_profile.email, - 'created_at': mentor_profile.created_at, - 'updated_at': mentor_profile.updated_at, - }, - 'mentee': None - }] + "mentee": None, + } + ] def render_pick_session(mentor_profile, user, token, academy, mentorship_service, fix_logo=False): request = None - base_url = f'/mentor/meet/{mentor_profile.slug}/service/{mentorship_service.slug}?token={token.key}' + base_url = f"/mentor/meet/{mentor_profile.slug}/service/{mentorship_service.slug}?token={token.key}" booking_url = mentor_profile.booking_url - if not booking_url.endswith('?'): - booking_url += '?' + if not booking_url.endswith("?"): + booking_url += "?" context = { - 'token': token.key, - 'mentor': mentor_serializer(mentor_profile, user, academy), - 'SUBJECT': 'Mentoring Session', - 'sessions': session_serializer(mentor_profile, user, academy, mentorship_service), - 'baseUrl': base_url, + "token": token.key, + "mentor": mentor_serializer(mentor_profile, user, academy), + "SUBJECT": "Mentoring Session", + "sessions": session_serializer(mentor_profile, user, academy, mentorship_service), + "baseUrl": base_url, } if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - if 'heading' not in context: - context['heading'] = academy.name + if "heading" not in context: + context["heading"] = academy.name - string = loader.render_to_string('pick_session.html', context, request) + string = loader.render_to_string("pick_session.html", context, request) if fix_logo: return string.replace('src="/static/assets/logo.png"', 'src="/static/icons/picture.png"') @@ -335,20 +338,22 @@ def render_pick_session(mentor_profile, user, token, academy, mentorship_service def render_pick_mentee(mentor_profile, user, token, academy, mentorship_service, fix_logo=False): request = None - base_url = f'/mentor/meet/{mentor_profile.slug}/service/{mentorship_service.slug}?token={token.key}&session={academy.id}' + base_url = ( + f"/mentor/meet/{mentor_profile.slug}/service/{mentorship_service.slug}?token={token.key}&session={academy.id}" + ) booking_url = mentor_profile.booking_url - if not booking_url.endswith('?'): - booking_url += '?' + if not booking_url.endswith("?"): + booking_url += "?" context = { - 'token': token.key, - 'mentor': mentor_serializer(mentor_profile, user, academy), - 'SUBJECT': 'Mentoring Session', - 'sessions': session_serializer(mentor_profile, user, academy, mentorship_service), - 'baseUrl': base_url, + "token": token.key, + "mentor": mentor_serializer(mentor_profile, user, academy), + "SUBJECT": "Mentoring Session", + "sessions": session_serializer(mentor_profile, user, academy, mentorship_service), + "baseUrl": base_url, } - string = loader.render_to_string('pick_mentee.html', context, request) + string = loader.render_to_string("pick_mentee.html", context, request) if fix_logo: return string.replace('src="/static/assets/logo.png"', 'src="/static/icons/picture.png"') @@ -358,65 +363,65 @@ def render_pick_mentee(mentor_profile, user, token, academy, mentorship_service, def get_mentorship_session_serializer(mentorship_session, mentor_profile, user, mentorship_service, academy): return { - 'id': mentorship_session.id, - 'status': mentorship_session.status, - 'started_at': mentorship_session.started_at, - 'ended_at': mentorship_session.ended_at, - 'starts_at': mentorship_session.starts_at, - 'ends_at': mentorship_session.ends_at, - 'mentor_joined_at': mentorship_session.mentor_joined_at, - 'mentor_left_at': mentorship_session.mentor_left_at, - 'mentee_left_at': mentorship_session.mentee_left_at, - 'allow_billing': mentorship_session.allow_billing, - 'accounted_duration': mentorship_session.accounted_duration, - 'suggested_accounted_duration': mentorship_session.suggested_accounted_duration, - 'mentor': { - 'id': mentor_profile.id, - 'slug': mentor_profile.slug, - 'user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, + "id": mentorship_session.id, + "status": mentorship_session.status, + "started_at": mentorship_session.started_at, + "ended_at": mentorship_session.ended_at, + "starts_at": mentorship_session.starts_at, + "ends_at": mentorship_session.ends_at, + "mentor_joined_at": mentorship_session.mentor_joined_at, + "mentor_left_at": mentorship_session.mentor_left_at, + "mentee_left_at": mentorship_session.mentee_left_at, + "allow_billing": mentorship_session.allow_billing, + "accounted_duration": mentorship_session.accounted_duration, + "suggested_accounted_duration": mentorship_session.suggested_accounted_duration, + "mentor": { + "id": mentor_profile.id, + "slug": mentor_profile.slug, + "user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, }, - 'service': { - 'id': mentorship_service.id, - 'slug': mentorship_service.slug, - 'name': mentorship_service.name, - 'status': mentorship_service.status, - 'academy': { - 'id': academy.id, - 'slug': academy.slug, - 'name': academy.name, - 'logo_url': academy.logo_url, - 'icon_url': academy.icon_url, + "service": { + "id": mentorship_service.id, + "slug": mentorship_service.slug, + "name": mentorship_service.name, + "status": mentorship_service.status, + "academy": { + "id": academy.id, + "slug": academy.slug, + "name": academy.name, + "logo_url": academy.logo_url, + "icon_url": academy.icon_url, }, - 'logo_url': mentorship_service.logo_url, - 'duration': mentorship_service.duration, - 'language': mentorship_service.language, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'max_duration': mentorship_service.max_duration, - 'missed_meeting_duration': mentorship_service.missed_meeting_duration, - 'created_at': mentorship_service.created_at, - 'updated_at': mentorship_service.updated_at, - 'description': mentorship_service.description, + "logo_url": mentorship_service.logo_url, + "duration": mentorship_service.duration, + "language": mentorship_service.language, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "max_duration": mentorship_service.max_duration, + "missed_meeting_duration": mentorship_service.missed_meeting_duration, + "created_at": mentorship_service.created_at, + "updated_at": mentorship_service.updated_at, + "description": mentorship_service.description, }, - 'status': mentor_profile.status, - 'price_per_hour': mentor_profile.price_per_hour, - 'booking_url': mentor_profile.booking_url, - 'online_meeting_url': mentor_profile.online_meeting_url, - 'timezone': mentor_profile.timezone, - 'syllabus': [], - 'email': mentor_profile.email, - 'created_at': mentor_profile.created_at, - 'updated_at': mentor_profile.updated_at, + "status": mentor_profile.status, + "price_per_hour": mentor_profile.price_per_hour, + "booking_url": mentor_profile.booking_url, + "online_meeting_url": mentor_profile.online_meeting_url, + "timezone": mentor_profile.timezone, + "syllabus": [], + "email": mentor_profile.email, + "created_at": mentor_profile.created_at, + "updated_at": mentor_profile.updated_at, }, - 'mentee': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, + "mentee": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, }, } @@ -425,20 +430,21 @@ def render_session(mentorship_session, mentor_profile, user, mentorship_service, request = None data = { - 'subject': mentorship_session.service.name, - 'room_url': mentorship_session.online_meeting_url, - 'session': get_mentorship_session_serializer(mentorship_session, mentor_profile, user, mentorship_service, - academy), - 'userName': (token.user.first_name + ' ' + token.user.last_name).strip(), - 'backup_room_url': mentorship_session.mentor.online_meeting_url, + "subject": mentorship_session.service.name, + "room_url": mentorship_session.online_meeting_url, + "session": get_mentorship_session_serializer( + mentorship_session, mentor_profile, user, mentorship_service, academy + ), + "userName": (token.user.first_name + " " + token.user.last_name).strip(), + "backup_room_url": mentorship_session.mentor.online_meeting_url, } if token.user.id == mentorship_session.mentor.user.id: - data['leave_url'] = '/mentor/session/' + str(mentorship_session.id) + '?token=' + token.key + data["leave_url"] = "/mentor/session/" + str(mentorship_session.id) + "?token=" + token.key else: - data['leave_url'] = 'close' + data["leave_url"] = "close" - string = loader.render_to_string('daily.html', data, request) + string = loader.render_to_string("daily.html", data, request) if fix_logo: string = string.replace('src="/static/icons/picture.png"', 'src="/static/assets/icon.png"') @@ -448,61 +454,61 @@ def render_session(mentorship_session, mentor_profile, user, mentorship_service, class AuthenticateTestSuite(MentorshipTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test_without_auth(self): - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': 'asd', - 'service_slug': 'asd' - }) + url = reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", kwargs={"mentor_slug": "asd", "service_slug": "asd"} + ) response = self.client.get(url) - hash = self.bc.format.to_base64('/mentor/meet/asd/service/asd') + hash = self.bc.format.to_base64("/mentor/meet/asd/service/asd") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) """ 🔽🔽🔽 GET without MentorProfile """ def test_without_mentor_profile(self): - service = {'slug': 'join_mentorship'} + service = {"slug": "join_mentorship"} model = self.bc.database.create(user=1, token=1, service=service) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': 'asd', - 'service_slug': 'asd' - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", kwargs={"mentor_slug": "asd", "service_slug": "asd"} + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'No mentor found with slug asd') + expected = render(f"No mentor found with slug asd") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) """ 🔽🔽🔽 GET without MentorProfile @@ -510,282 +516,347 @@ def test_without_mentor_profile(self): def test_no_mentorship_service(self): slug = self.bc.fake.slug() - service = {'slug': 'join_mentorship'} + service = {"slug": "join_mentorship"} model = self.bc.database.create(user=1, token=1, mentor_profile=1, service=service) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': slug, - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + "service_slug": slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'No service found with slug {slug}', model.mentor_profile, model.token, fix_logo=False) + expected = render(f"No service found with slug {slug}", model.mentor_profile, model.token, fix_logo=False) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) """ 🔽🔽🔽 GET without MentorProfile """ def test_with_mentor_profile(self): - service = {'slug': 'join_mentorship'} - model = self.bc.database.create(user=1, - token=1, - mentor_profile=1, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=service) - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + service = {"slug": "join_mentorship"} + model = self.bc.database.create( + user=1, + token=1, + mentor_profile=1, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=service, + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'This mentor is not active at the moment', - model.mentor_profile, - model.token, - fix_logo=True, - academy=model.academy) + expected = render( + f"This mentor is not active at the moment", + model.mentor_profile, + model.token, + fix_logo=True, + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) """ 🔽🔽🔽 GET without MentorProfile, bad statuses """ def test_with_mentor_profile__bad_statuses(self): - cases = [{'status': x} for x in ['INVITED', 'INNACTIVE']] + cases = [{"status": x} for x in ["INVITED", "INNACTIVE"]] for mentor_profile in cases: - service = {'slug': 'join_mentorship'} - model = self.bc.database.create(user=1, - token=1, - mentor_profile=mentor_profile, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=service) - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + service = {"slug": "join_mentorship"} + model = self.bc.database.create( + user=1, + token=1, + mentor_profile=mentor_profile, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=service, + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'This mentor is not active at the moment', - model.mentor_profile, - model.token, - fix_logo=True, - academy=model.academy) + expected = render( + f"This mentor is not active at the moment", + model.mentor_profile, + model.token, + fix_logo=True, + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses without mentor urls """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock(side_effect=Exception())) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock(side_effect=Exception())) def test_with_mentor_profile__good_statuses__without_mentor_urls(self): - cases = [{'status': x} for x in ['ACTIVE', 'UNLISTED']] + cases = [{"status": x} for x in ["ACTIVE", "UNLISTED"]] for mentor_profile in cases: - service = {'slug': 'join_mentorship'} - model = self.bc.database.create(user=1, - token=1, - mentor_profile=mentor_profile, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=service) - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + service = {"slug": "join_mentorship"} + model = self.bc.database.create( + user=1, + token=1, + mentor_profile=mentor_profile, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=service, + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) expected = render( - f'This mentor is not ready, please contact the mentor directly or anyone from the academy ' - 'staff.', + f"This mentor is not ready, please contact the mentor directly or anyone from the academy " "staff.", model.mentor_profile, model.token, fix_logo=True, - academy=model.academy) + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, with mentee """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) def test_with_mentor_profile__good_statuses__with_mentor_urls__with_mentee(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] for mentor_profile in cases: - service = {'slug': 'join_mentorship'} - model = self.bc.database.create(user=1, - token=1, - mentor_profile=mentor_profile, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=service) - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + service = {"slug": "join_mentorship"} + model = self.bc.database.create( + user=1, + token=1, + mentor_profile=mentor_profile, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=service, + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_pick_session(model.mentor_profile, - model.user, - model.token, - model.academy, - model.mentorship_service, - fix_logo=True) + expected = render_pick_session( + model.mentor_profile, model.user, model.token, model.academy, model.mentorship_service, fix_logo=True + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, with mentee of other user """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) def test_with_mentor_profile__good_statuses__with_mentor_urls__with_mentee__not_the_same_user(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] user = self.bc.database.create(user=1).user @@ -793,233 +864,314 @@ def test_with_mentor_profile__good_statuses__with_mentor_urls__with_mentee__not_ for args in cases: id += 1 - mentor_profile = {**args, 'user_id': 1} - service = {'slug': 'join_mentorship'} - model = self.bc.database.create(user=1, - token=1, - mentor_profile=mentor_profile, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=service) - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + mentor_profile = {**args, "user_id": 1} + service = {"slug": "join_mentorship"} + model = self.bc.database.create( + user=1, + token=1, + mentor_profile=mentor_profile, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=service, + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) expected = render( - f'Hello {model.user.first_name} {model.user.last_name}, you are about to start a ' - f'{model.mentorship_service.name} with {user.first_name} {user.last_name}.', + f"Hello {model.user.first_name} {model.user.last_name}, you are about to start a " + f"{model.mentorship_service.name} with {user.first_name} {user.last_name}.", model.mentor_profile, model.token, fix_logo=True, start_session=True, - academy=model.academy) + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, MentorshipSession without mentee passing session """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) def test_with_mentor_profile__good_statuses__with_mentor_urls__session_without_mentee__passing_session(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] - service = {'slug': 'join_mentorship'} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=1, token=1, service=service) id = 0 for mentor_profile in cases: id += 1 - mentorship_session = {'mentee_id': None} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - mentorship_service=1) + mentorship_session = {"mentee_id": None} + model = self.bc.database.create( + mentor_profile=mentor_profile, mentorship_session=mentorship_session, mentorship_service=1 + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_pick_mentee(model.mentor_profile, - base.user, - base.token, - model.academy, - model.mentorship_service, - fix_logo=True) + expected = render_pick_mentee( + model.mentor_profile, base.user, base.token, model.academy, model.mentorship_service, fix_logo=True + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, MentorshipSession without mentee passing session and mentee but mentee does not exist """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) def test_with_mentor_profile__good_statuses__with_mentor_urls__session_without__passing_session__passing_mentee_does_not_exits( - self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] - - service = {'slug': 'join_mentorship'} + self, + ): + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] + + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=1, token=1, service=service) id = 0 for mentor_profile in cases: id += 1 - mentorship_session = {'mentee_id': None} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - mentorship_service=1) + mentorship_session = {"mentee_id": None} + model = self.bc.database.create( + mentor_profile=mentor_profile, mentorship_session=mentorship_session, mentorship_service=1 + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - 'session': model.mentorship_session.id, - 'mentee': 10, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + "session": model.mentorship_session.id, + "mentee": 10, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}/service/{model.mentorship_service.slug}?' - f'token={base.token.key}&session={model.academy.id}&mentee=10') + url = ( + f"/mentor/meet/{model.mentor_profile.slug}/service/{model.mentorship_service.slug}?" + f"token={base.token.key}&session={model.academy.id}&mentee=10" + ) expected = render( f'Mentee with user id 10 was not found, <a href="{url}&mentee=undefined">click ' - 'here to start the session anyway.</a>', + "here to start the session anyway.</a>", model.mentor_profile, base.token, fix_logo=True, - academy=model.academy) + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, MentorshipSession without mentee passing session and mentee, MentorshipSession with bad status """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) def test_with_mentor_profile__good_statuses__with_mentor_urls__session_without__passing_session__passing_mentee__bad_status( - self): - mentor_cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + self, + ): + mentor_cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] base = self.bc.database.create(user=1, token=1) @@ -1027,309 +1179,410 @@ def test_with_mentor_profile__good_statuses__with_mentor_urls__session_without__ for mentor_profile in mentor_cases: id += 1 - session_cases = [{ - 'status': x, - 'mentee_id': None, - } for x in ['COMPLETED', 'FAILED', 'IGNORED']] + session_cases = [ + { + "status": x, + "mentee_id": None, + } + for x in ["COMPLETED", "FAILED", "IGNORED"] + ] for mentorship_session in session_cases: - service = {'slug': 'join_mentorship'} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=1, token=1, service=service) - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - mentorship_service=1) + model = self.bc.database.create( + mentor_profile=mentor_profile, mentorship_session=mentorship_session, mentorship_service=1 + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - 'session': model.mentorship_session.id, - 'mentee': base.user.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + "session": model.mentorship_session.id, + "mentee": base.user.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + "service_slug": model.mentorship_service.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}?token={base.token.key}&session=' - f'{model.academy.id}&mentee=10') + url = ( + f"/mentor/meet/{model.mentor_profile.slug}?token={base.token.key}&session=" + f"{model.academy.id}&mentee=10" + ) expected = render( - f'This mentoring session has ended ({model.mentorship_session.status}), would you like ' + f"This mentoring session has ended ({model.mentorship_session.status}), would you like " f'<a href="/mentor/meet/{model.mentor_profile.slug}">to start a new one?</a>.', model.mentor_profile, base.token, fix_logo=True, - academy=model.academy) + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, MentorshipSession without mentee passing session and mentee but mentee does not exist """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) def test_with_mentor_profile__passing_session__passing_mentee__passing_redirect(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in cases: id += 1 - service = {'slug': 'join_mentorship'} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=1, token=1, service=service) - mentorship_session = {'mentee_id': None} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - mentorship_service=1) + mentorship_session = {"mentee_id": None} + model = self.bc.database.create( + mentor_profile=mentor_profile, mentorship_session=mentorship_session, mentorship_service=1 + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - 'session': model.mentorship_session.id, - 'mentee': base.user.id, - 'redirect': 'true', - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + "session": model.mentorship_session.id, + "mentee": base.user.id, + "redirect": "true", + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_session(model.mentorship_session, - model.mentor_profile, - base.user, - model.mentorship_service, - model.academy, - base.token, - fix_logo=True) + expected = render_session( + model.mentorship_session, + model.mentor_profile, + base.user, + model.mentorship_service, + model.academy, + base.token, + fix_logo=True, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, MentorshipSession without mentee passing session and mentee but mentee does not exist, user without name """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) def test_with_mentor_profile__without_user_name(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] - service = {'slug': 'join_mentorship'} + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] + service = {"slug": "join_mentorship"} id = 0 for mentor_profile in cases: id += 1 - user = {'first_name': '', 'last_name': ''} + user = {"first_name": "", "last_name": ""} base = self.bc.database.create(user=user, token=1, service=service) - mentorship_session = {'mentee_id': None} - academy = {'available_as_saas': False} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - academy=academy) + mentorship_session = {"mentee_id": None} + academy = {"available_as_saas": False} + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + academy=academy, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'Hello student, you are about to start a {model.mentorship_service.name} with a mentor.', - model.mentor_profile, - base.token, - fix_logo=True, - start_session=True, - academy=model.academy) + expected = render( + f"Hello student, you are about to start a {model.mentorship_service.name} with a mentor.", + model.mentor_profile, + base.token, + fix_logo=True, + start_session=True, + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('auth.User') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("auth.User") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, MentorshipSession without mentee passing session and mentee but mentee does not exist, user without name """ - @patch('breathecode.mentorship.actions.get_pending_sessions_or_create', - MagicMock(side_effect=Exception('Error inside get_pending_sessions_or_create'))) - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) + @patch( + "breathecode.mentorship.actions.get_pending_sessions_or_create", + MagicMock(side_effect=Exception("Error inside get_pending_sessions_or_create")), + ) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) def test_error_inside_get_pending_sessions_or_create(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] - permission = {'codename': 'join_mentorship'} + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] + permission = {"codename": "join_mentorship"} id = 0 for mentor_profile in cases: id += 1 - user = {'first_name': '', 'last_name': ''} + user = {"first_name": "", "last_name": ""} base = self.bc.database.create(user=user, token=1, group=1, permission=permission) - mentorship_session = {'mentee_id': None} - academy = {'available_as_saas': False} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - academy=academy) + mentorship_session = {"mentee_id": None} + academy = {"available_as_saas": False} + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + academy=academy, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_message('Error inside get_pending_sessions_or_create', academy=model.academy) + expected = render_message("Error inside get_pending_sessions_or_create", academy=model.academy) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('auth.User') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("auth.User") # TODO: disabled until have a new feature flags manager # """ @@ -1416,1245 +1669,1682 @@ def test_error_inside_get_pending_sessions_or_create(self): # self.bc.database.delete('auth.User') # self.bc.database.delete('payments.Service') - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('breathecode.mentorship.permissions.flags.Release.enable_consume_mentorships', MagicMock(return_value=True)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("breathecode.mentorship.permissions.flags.Release.enable_consume_mentorships", MagicMock(return_value=True)) def test_with_mentor_profile__academy_available_as_saas__flag_eq_true__mentee_with_no_consumables(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] - service = {'slug': 'join_mentorship'} + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] + service = {"slug": "join_mentorship"} id = 0 for mentor_profile in cases: id += 1 - user = {'first_name': '', 'last_name': ''} + user = {"first_name": "", "last_name": ""} base = self.bc.database.create(user=user, token=1, service=service) - mentorship_session = {'mentee_id': None} - academy = {'available_as_saas': True} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - academy=academy) + mentorship_session = {"mentee_id": None} + academy = {"available_as_saas": True} + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + academy=academy, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) template_data = {} - template_data['GO_BACK'] = 'Go back to Dashboard' - template_data['URL_BACK'] = 'https://4geeks.com/choose-program' - template_data['BUTTON'] = 'Get a plan' - template_data['LINK'] = f'https://4geeks.com/checkout?plan=basic&token={base.token.key}' - expected = render('You must get a plan in order to access this service', data=template_data, academy=None) + template_data["GO_BACK"] = "Go back to Dashboard" + template_data["URL_BACK"] = "https://4geeks.com/choose-program" + template_data["BUTTON"] = "Get a plan" + template_data["LINK"] = f"https://4geeks.com/checkout?plan=basic&token={base.token.key}" + expected = render("You must get a plan in order to access this service", data=template_data, academy=None) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('auth.User') - self.bc.database.delete('payments.Service') - - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('breathecode.mentorship.permissions.flags.Release.enable_consume_mentorships', MagicMock(return_value=True)) + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("auth.User") + self.bc.database.delete("payments.Service") + + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("breathecode.mentorship.permissions.flags.Release.enable_consume_mentorships", MagicMock(return_value=True)) def test_with_mentor_profile__academy_available_as_saas__flag_eq_true__mentee_with_no_consumables_with_subcription( - self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] - service = {'slug': 'join_mentorship'} + self, + ): + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] + service = {"slug": "join_mentorship"} id = 0 for mentor_profile in cases: id += 1 - user = {'first_name': '', 'last_name': ''} + user = {"first_name": "", "last_name": ""} base = self.bc.database.create(user=user, token=1, service=service, mentorship_service_set=1) - mentorship_session = {'mentee_id': None} - academy = {'available_as_saas': True} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - academy=academy, - plan={ - 'is_renewable': False, - 'mentorship_service_set': base.mentorship_service_set - }, - service=1, - subscription={ - 'user': base.user, - 'selected_mentorship_service_set': base.mentorship_service_set - }) + mentorship_session = {"mentee_id": None} + academy = {"available_as_saas": True} + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + academy=academy, + plan={"is_renewable": False, "mentorship_service_set": base.mentorship_service_set}, + service=1, + subscription={"user": base.user, "selected_mentorship_service_set": base.mentorship_service_set}, + ) model.mentorship_session.mentee = None model.mentorship_session.save() model.subscription.selected_mentorship_service_set.mentorship_services.add(model.mentorship_service) model.subscription.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) template_data = {} - template_data['GO_BACK'] = 'Go back to Dashboard' - template_data['URL_BACK'] = 'https://4geeks.com/choose-program' - template_data['BUTTON'] = 'Get more consumables' - template_data[ - 'LINK'] = f'https://4geeks.com/checkout?mentorship_service_set={base.mentorship_service_set.slug}&token={base.token.key}' - expected = render('with-consumer-not-enough-consumables', data=template_data, academy=None) + template_data["GO_BACK"] = "Go back to Dashboard" + template_data["URL_BACK"] = "https://4geeks.com/choose-program" + template_data["BUTTON"] = "Get more consumables" + template_data["LINK"] = ( + f"https://4geeks.com/checkout?mentorship_service_set={base.mentorship_service_set.slug}&token={base.token.key}" + ) + expected = render("with-consumer-not-enough-consumables", data=template_data, academy=None) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('auth.User') - self.bc.database.delete('payments.Service') - - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('breathecode.mentorship.permissions.flags.Release.enable_consume_mentorships', MagicMock(return_value=True)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("auth.User") + self.bc.database.delete("payments.Service") + + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("breathecode.mentorship.permissions.flags.Release.enable_consume_mentorships", MagicMock(return_value=True)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_mentor_profile__academy_available_as_saas__flag_eq_true__mentee_with_consumables(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] - service = {'slug': 'join_mentorship'} + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] + service = {"slug": "join_mentorship"} id = 0 for mentor_profile in cases: id += 1 - user = {'first_name': '', 'last_name': ''} + user = {"first_name": "", "last_name": ""} - mentorship_session = {'mentee_id': None} - academy = {'available_as_saas': True} + mentorship_session = {"mentee_id": None} + academy = {"available_as_saas": True} how_many = random.randint(1, 100) - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} delta = timedelta(seconds=random.randint(1, 1000)) - mentorship_service = {'max_duration': delta, 'language': 'en'} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - mentorship_service=mentorship_service, - mentorship_service_set=1, - academy=academy) - - base = self.bc.database.create(user=user, - token=1, - service=service, - mentorship_service=model.mentorship_service, - mentorship_service_set=1, - consumable=consumable) + mentorship_service = {"max_duration": delta, "language": "en"} + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + mentorship_service=mentorship_service, + mentorship_service_set=1, + academy=academy, + ) + + base = self.bc.database.create( + user=user, + token=1, + service=service, + mentorship_service=model.mentorship_service, + mentorship_service_set=1, + consumable=consumable, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'Hello student, you are about to start a {model.mentorship_service.name} with a mentor.', - model.mentor_profile, - base.token, - fix_logo=True, - start_session=True, - academy=model.academy) + expected = render( + f"Hello student, you are about to start a {model.mentorship_service.name} with a mentor.", + model.mentor_profile, + base.token, + fix_logo=True, + start_session=True, + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + format_consumable( + { + "id": base.user.id // 2, + "user_id": base.user.id, + "how_many": how_many, # this has not discounted yet + "mentorship_service_set_id": base.mentorship_service_set.id, + "service_item_id": base.consumable.id, + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), + [ + format_consumption_session( + model.mentorship_service, + model.mentor_profile, + base.mentorship_service_set, + base.user, + base.consumable, + data={ + "id": base.user.id // 2, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) + + self.bc.check.calls( + tasks.end_the_consumption_session.apply_async.call_args_list, [ - format_consumable({ - 'id': base.user.id // 2, - 'user_id': base.user.id, - 'how_many': how_many, # this has not discounted yet - 'mentorship_service_set_id': base.mentorship_service_set.id, - 'service_item_id': base.consumable.id, - }) - ]) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - format_consumption_session(model.mentorship_service, - model.mentor_profile, - base.mentorship_service_set, - base.user, - base.consumable, - data={ - 'id': base.user.id // 2, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) - - self.bc.check.calls(tasks.end_the_consumption_session.apply_async.call_args_list, [ - call(args=(id, 1), eta=UTC_NOW + delta), - ]) + call(args=(id, 1), eta=UTC_NOW + delta), + ], + ) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('auth.User') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("auth.User") + self.bc.database.delete("payments.Service") tasks.end_the_consumption_session.apply_async.call_args_list = [] - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('breathecode.mentorship.permissions.flags.Release.enable_consume_mentorships', MagicMock(return_value=True)) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("breathecode.mentorship.permissions.flags.Release.enable_consume_mentorships", MagicMock(return_value=True)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_mentor_profile__academy_available_as_saas__flag_eq_true__bypass_mentor_consume(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] - service = {'slug': 'join_mentorship'} + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] + service = {"slug": "join_mentorship"} id = 0 for mentor_profile in cases: id += 1 - user = {'first_name': '', 'last_name': ''} + user = {"first_name": "", "last_name": ""} - mentorship_session = {'mentee_id': None} - academy = {'available_as_saas': True} + mentorship_session = {"mentee_id": None} + academy = {"available_as_saas": True} delta = timedelta(seconds=random.randint(1, 1000)) - mentorship_service = {'max_duration': delta, 'language': 'en'} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=1, - service=service, - mentorship_service=mentorship_service, - mentorship_service_set=1, - academy=academy) + mentorship_service = {"max_duration": delta, "language": "en"} + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=1, + service=service, + mentorship_service=mentorship_service, + mentorship_service_set=1, + academy=academy, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': model.token.key, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": model.token.key, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_pick_session(model.mentor_profile, - model.user, - model.token, - model.academy, - model.mentorship_service, - fix_logo=True) + expected = render_pick_session( + model.mentor_profile, model.user, model.token, model.academy, model.mentorship_service, fix_logo=True + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('auth.User') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("auth.User") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, MentorshipSession without mentee passing session and mentee but mentee does not exist, with ends_at """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_mentor_profile__ends_at_less_now(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=user, token=1, service=service) ends_at = UTC_NOW - timedelta(seconds=10) - mentorship_session = {'mentee_id': None, 'ends_at': ends_at} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - mentorship_service=1) + mentorship_session = {"mentee_id": None, "ends_at": ends_at} + model = self.bc.database.create( + mentor_profile=mentor_profile, mentorship_session=mentorship_session, user=user, mentorship_service=1 + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}/service/{model.mentorship_service.slug}?' - f'token={base.token.key}&extend=true') + url = ( + f"/mentor/meet/{model.mentor_profile.slug}/service/{model.mentorship_service.slug}?" + f"token={base.token.key}&extend=true" + ) expected = render( f'The mentoring session expired {timeago.format(ends_at, UTC_NOW)}: You can <a href="{url}">' - 'extend it for another 30 minutes</a> or end the session right now.', + "extend it for another 30 minutes</a> or end the session right now.", model.mentor_profile, base.token, mentorship_session=model.mentorship_session, fix_logo=True, session_expired=True, - academy=model.academy) + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls, MentorshipSession without mentee passing session and mentee but mentee does not exist, with ends_at, with extend true """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.extend_session', MagicMock(side_effect=lambda x: x)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.extend_session", MagicMock(side_effect=lambda x: x)) def test_with_mentor_profile__ends_at_less_now__with_extend_true(self): - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=user, token=1, service=service) ends_at = UTC_NOW - timedelta(seconds=10) - mentorship_session_base = {'mentee_id': base.user.id, 'ends_at': ends_at} + mentorship_session_base = {"mentee_id": base.user.id, "ends_at": ends_at} # session, token cases = [ - ({ - **mentorship_session_base, - 'allow_mentee_to_extend': True, - 'allow_mentors_to_extend': False, - }, None), - ({ - **mentorship_session_base, - 'allow_mentee_to_extend': False, - 'allow_mentors_to_extend': True, - }, 1), + ( + { + **mentorship_session_base, + "allow_mentee_to_extend": True, + "allow_mentors_to_extend": False, + }, + None, + ), + ( + { + **mentorship_session_base, + "allow_mentee_to_extend": False, + "allow_mentors_to_extend": True, + }, + 1, + ), ] for mentorship_session, token in cases: - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=base.service) + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=base.service, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - token = model.token if 'token' in model else base.token - - querystring = self.bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + token = model.token if "token" in model else base.token + + querystring = self.bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + "service_slug": model.mentorship_service.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}/service/{model.mentorship_service.slug}?' - f'token={token.key}&extend=true&mentee={base.user.id}&session={model.mentorship_session.id}') + url = ( + f"/mentor/meet/{model.mentor_profile.slug}/service/{model.mentorship_service.slug}?" + f"token={token.key}&extend=true&mentee={base.user.id}&session={model.mentorship_session.id}" + ) expected = render( - f'The mentoring session expired {timeago.format(ends_at, UTC_NOW)}: You can ' + f"The mentoring session expired {timeago.format(ends_at, UTC_NOW)}: You can " f'<a href="{url}">extend it for another 30 minutes</a> or end the session right now.', model.mentor_profile, token, mentorship_session=model.mentorship_session, fix_logo=True, session_expired=True, - academy=model.academy) + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, with ends_at, with extend true, extend_session raise exception """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.extend_session', MagicMock(side_effect=ExtendSessionException('xyz'))) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.extend_session", MagicMock(side_effect=ExtendSessionException("xyz"))) def test_with_mentor_profile__ends_at_less_now__with_extend_true__extend_session_raise_exception(self): - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=user, token=1, service=service) ends_at = UTC_NOW - timedelta(seconds=10) - mentorship_session = {'mentee_id': base.user.id, 'ends_at': ends_at} + mentorship_session = {"mentee_id": base.user.id, "ends_at": ends_at} # session, token - cases = [({ - 'allow_mentors_to_extend': True, - 'allow_mentee_to_extend': False, - 'language': 'en', - }, 1), ({ - 'allow_mentee_to_extend': False, - 'allow_mentee_to_extend': True, - 'language': 'en', - }, None)] + cases = [ + ( + { + "allow_mentors_to_extend": True, + "allow_mentee_to_extend": False, + "language": "en", + }, + 1, + ), + ( + { + "allow_mentee_to_extend": False, + "allow_mentee_to_extend": True, + "language": "en", + }, + None, + ), + ] for mentorship_service, token in cases: - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service=mentorship_service, - service=base.service) + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service=mentorship_service, + service=base.service, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - token = model.token if 'token' in model else base.token - - querystring = self.bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + token = model.token if "token" in model else base.token + + querystring = self.bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + "service_slug": model.mentorship_service.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&' - f'mentee={base.user.id}&session={model.mentorship_session.id}') - expected = render('xyz', - model.mentor_profile, - token, - mentorship_session=model.mentorship_session, - fix_logo=True, - session_expired=True, - academy=model.academy) + url = ( + f"/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&" + f"mentee={base.user.id}&session={model.mentorship_session.id}" + ) + expected = render( + "xyz", + model.mentor_profile, + token, + mentorship_session=model.mentorship_session, + fix_logo=True, + session_expired=True, + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, with ends_at, with extend true, extend_session raise exception, session can't be extended """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_mentor_profile__ends_at_less_now__with_extend_true__session_can_not_be_extended(self): - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=user, token=1, service=service) ends_at = UTC_NOW - timedelta(seconds=10) - mentorship_session = {'mentee_id': base.user.id, 'ends_at': ends_at} + mentorship_session = {"mentee_id": base.user.id, "ends_at": ends_at} # service, token cases = [ - ({ - 'allow_mentors_to_extend': False, - 'allow_mentee_to_extend': False, - 'language': 'en', - }, None), - ({ - 'allow_mentors_to_extend': False, - 'allow_mentee_to_extend': False, - 'language': 'en', - }, 1), + ( + { + "allow_mentors_to_extend": False, + "allow_mentee_to_extend": False, + "language": "en", + }, + None, + ), + ( + { + "allow_mentors_to_extend": False, + "allow_mentee_to_extend": False, + "language": "en", + }, + 1, + ), ] for mentorship_service, token in cases: - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service=mentorship_service, - service=base.service) + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service=mentorship_service, + service=base.service, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - token = model.token if 'token' in model else base.token - - querystring = self.bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + token = model.token if "token" in model else base.token + + querystring = self.bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + "service_slug": model.mentorship_service.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&' - f'mentee={base.user.id}&session={model.mentorship_session.id}') - expected = render('The mentoring session expired 10 seconds ago and it cannot be extended.', - model.mentor_profile, - token, - mentorship_session=model.mentorship_session, - fix_logo=True, - academy=model.academy) + url = ( + f"/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&" + f"mentee={base.user.id}&session={model.mentorship_session.id}" + ) + expected = render( + "The mentoring session expired 10 seconds ago and it cannot be extended.", + model.mentor_profile, + token, + mentorship_session=model.mentorship_session, + fix_logo=True, + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") # teardown - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, with ends_at, with extend true, extend_session raise exception, redirect to session """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_mentor_profile__ends_at_less_now__with_extend_true__redirect_to_session(self): - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=user, token=1, service=service) ends_at = UTC_NOW - timedelta(seconds=3600 / 2 + 1) - mentorship_session_base = {'mentee_id': base.user.id, 'ends_at': ends_at} + mentorship_session_base = {"mentee_id": base.user.id, "ends_at": ends_at} # session, token - cases = [({ - **mentorship_session_base, - 'allow_mentors_to_extend': True, - }, None), ({ - **mentorship_session_base, - 'allow_mentee_to_extend': True, - }, 1)] + cases = [ + ( + { + **mentorship_session_base, + "allow_mentors_to_extend": True, + }, + None, + ), + ( + { + **mentorship_session_base, + "allow_mentee_to_extend": True, + }, + 1, + ), + ] for mentorship_session, token in cases: - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=base.service) + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=base.service, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - token = model.token if 'token' in model else base.token - - querystring = self.bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + token = model.token if "token" in model else base.token + + querystring = self.bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + "service_slug": model.mentorship_service.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&' - f'mentee={base.user.id}&session={model.mentorship_session.id}') - expected = '' + url = ( + f"/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&" + f"mentee={base.user.id}&session={model.mentorship_session.id}" + ) + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) expired_at = timeago.format(model.mentorship_session.ends_at, UTC_NOW) minutes = round(((model.mentorship_session.service.duration.total_seconds() / 3600) * 60) / 2) - message = (f'You have a session that expired {expired_at}. Only sessions with less than ' - f'{minutes}min from expiration can be extended (if allowed by the academy)').replace( - ' ', '%20') + message = ( + f"You have a session that expired {expired_at}. Only sessions with less than " + f"{minutes}min from expiration can be extended (if allowed by the academy)" + ).replace(" ", "%20") self.assertEqual( - response.url, f'/mentor/session/{model.mentorship_session.id}?token=' - f'{token.key}&message={message}') + response.url, + f"/mentor/session/{model.mentorship_session.id}?token=" f"{token.key}&message={message}", + ) self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, with ends_at, with extend true, extend_session raise exception, redirect to session, no saas """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_mentor_profile__redirect_to_session__no_saas(self): - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=user, token=1, service=service) ends_at = UTC_NOW - timedelta(seconds=3600 / 2 + 1) - mentorship_session_base = {'mentee_id': base.user.id, 'ends_at': ends_at} + mentorship_session_base = {"mentee_id": base.user.id, "ends_at": ends_at} mentorship_session = { **mentorship_session_base, - 'allow_mentee_to_extend': True, + "allow_mentee_to_extend": True, } token = 1 - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=base.service) + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=base.service, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - token = model.token if 'token' in model else base.token - - querystring = self.bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + token = model.token if "token" in model else base.token + + querystring = self.bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&' - f'mentee={base.user.id}&session={model.mentorship_session.id}') - expected = '' + url = ( + f"/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&" + f"mentee={base.user.id}&session={model.mentorship_session.id}" + ) + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) expired_at = timeago.format(model.mentorship_session.ends_at, UTC_NOW) minutes = round(((model.mentorship_session.service.duration.total_seconds() / 3600) * 60) / 2) - message = (f'You have a session that expired {expired_at}. Only sessions with less than ' - f'{minutes}min from expiration can be extended (if allowed by the academy)').replace(' ', '%20') - self.assertEqual(response.url, f'/mentor/session/{model.mentorship_session.id}?token=' - f'{token.key}&message={message}') + message = ( + f"You have a session that expired {expired_at}. Only sessions with less than " + f"{minutes}min from expiration can be extended (if allowed by the academy)" + ).replace(" ", "%20") + self.assertEqual( + response.url, f"/mentor/session/{model.mentorship_session.id}?token=" f"{token.key}&message={message}" + ) self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET without MentorProfile, with ends_at, with extend true, extend_session raise exception, redirect to session, saas """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_mentor_profile__redirect_to_session__saas(self): - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=user, token=1, service=service) ends_at = UTC_NOW - timedelta(seconds=3600 / 2 + 1) - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} mentorship_session = { - 'mentee_id': base.user.id, - 'ends_at': ends_at, - 'allow_mentee_to_extend': True, + "mentee_id": base.user.id, + "ends_at": ends_at, + "allow_mentee_to_extend": True, } token = 1 - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=base.service, - academy=academy) + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=base.service, + academy=academy, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - token = model.token if 'token' in model else base.token - - querystring = self.bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + token = model.token if "token" in model else base.token + + querystring = self.bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render('mentee-not-enough-consumables') + expected = render("mentee-not-enough-consumables") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') - - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.end_the_consumption_session.apply_async', MagicMock(return_value=None)) + self.bc.database.delete("mentorship.MentorProfile") + + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") + + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.end_the_consumption_session.apply_async", MagicMock(return_value=None)) def test_with_mentor_profile__redirect_to_session__saas__(self): - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} - academy = {'available_as_saas': True} + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} + academy = {"available_as_saas": True} how_many = random.randint(1, 100) - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} delta = timedelta(seconds=random.randint(1, 1000)) - mentorship_service = {'max_duration': delta, 'language': 'en'} - base = self.bc.database.create(user=user, - token=1, - service=service, - consumable=consumable, - mentorship_service=mentorship_service, - mentorship_service_set=1, - academy=academy) + mentorship_service = {"max_duration": delta, "language": "en"} + base = self.bc.database.create( + user=user, + token=1, + service=service, + consumable=consumable, + mentorship_service=mentorship_service, + mentorship_service_set=1, + academy=academy, + ) ends_at = UTC_NOW - timedelta(seconds=3600 / 2 + 1) mentorship_session = { - 'mentee_id': base.user.id, - 'ends_at': ends_at, - 'allow_mentee_to_extend': True, + "mentee_id": base.user.id, + "ends_at": ends_at, + "allow_mentee_to_extend": True, } token = 1 - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service=base.mentorship_service, - service=base.service) + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service=base.mentorship_service, + service=base.service, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - token = model.token if 'token' in model else base.token - - querystring = self.bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + token = model.token if "token" in model else base.token + + querystring = self.bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&' - f'mentee={base.user.id}&session={model.mentorship_session.id}') - expected = '' + url = ( + f"/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&" + f"mentee={base.user.id}&session={model.mentorship_session.id}" + ) + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) expired_at = timeago.format(model.mentorship_session.ends_at, UTC_NOW) minutes = round(((model.mentorship_session.service.duration.total_seconds() / 3600) * 60) / 2) - message = (f'You have a session that expired {expired_at}. Only sessions with less than ' - f'{minutes}min from expiration can be extended (if allowed by the academy)').replace(' ', '%20') - self.assertEqual(response.url, f'/mentor/session/{model.mentorship_session.id}?token=' - f'{token.key}&message={message}') + message = ( + f"You have a session that expired {expired_at}. Only sessions with less than " + f"{minutes}min from expiration can be extended (if allowed by the academy)" + ).replace(" ", "%20") + self.assertEqual( + response.url, f"/mentor/session/{model.mentorship_session.id}?token=" f"{token.key}&message={message}" + ) self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), + [ + format_consumable( + { + "id": base.user.id if base.user.id == 1 else 2, + "user_id": base.user.id, + "how_many": how_many, # this has not discounted yet + "mentorship_service_set_id": base.mentorship_service_set.id, + "service_item_id": base.consumable.id, + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.ConsumptionSession"), [ - format_consumable({ - 'id': base.user.id if base.user.id == 1 else 2, - 'user_id': base.user.id, - 'how_many': how_many, # this has not discounted yet - 'mentorship_service_set_id': base.mentorship_service_set.id, - 'service_item_id': base.consumable.id, - }) - ]) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), [ - format_consumption_session(model.mentorship_service, - model.mentor_profile, - base.mentorship_service_set, - base.user, - base.consumable, - data={ - 'id': base.user.id if base.user.id == 1 else 2, - 'duration': delta, - 'eta': UTC_NOW + delta, - }), - ]) + format_consumption_session( + model.mentorship_service, + model.mentor_profile, + base.mentorship_service_set, + base.user, + base.consumable, + data={ + "id": base.user.id if base.user.id == 1 else 2, + "duration": delta, + "eta": UTC_NOW + delta, + }, + ), + ], + ) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.ConsumptionSession') - self.bc.database.delete('payments.Consumable') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.ConsumptionSession") + self.bc.database.delete("payments.Consumable") + self.bc.database.delete("payments.Service") """ 🔽🔽🔽 GET mock get_pending_sessions_or_create to get a empty queryset """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) - @patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, - }))) - @patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.mentorship.actions.get_pending_sessions_or_create', - MagicMock(side_effect=get_empty_mentorship_session_queryset)) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) + @patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), + ) + @patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch( + "breathecode.mentorship.actions.get_pending_sessions_or_create", + MagicMock(side_effect=get_empty_mentorship_session_queryset), + ) def test_get_pending_sessions_or_create_returns_empty_queryset(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in cases: @@ -2663,340 +3353,396 @@ def test_get_pending_sessions_or_create_returns_empty_queryset(self): first_name = self.bc.fake.first_name() last_name = self.bc.fake.last_name() cases = [ - ({ - 'first_name': '', - 'last_name': '' - }, 'the mentor'), - ({ - 'first_name': first_name, - 'last_name': last_name - }, f'{first_name} {last_name}'), + ({"first_name": "", "last_name": ""}, "the mentor"), + ({"first_name": first_name, "last_name": last_name}, f"{first_name} {last_name}"), ] for user, name in cases: - service = {'slug': 'join_mentorship'} + service = {"slug": "join_mentorship"} base = self.bc.database.create(user=user, token=1, service=service) ends_at = UTC_NOW - timedelta(seconds=10) - mentorship_session = {'mentee_id': None, 'ends_at': ends_at} - model = self.bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - mentorship_service=1) + mentorship_session = {"mentee_id": None, "ends_at": ends_at} + model = self.bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + mentorship_service=1, + ) model.mentorship_session.mentee = None model.mentorship_session.save() - querystring = self.bc.format.to_querystring({ - 'token': base.token.key, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring( + { + "token": base.token.key, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={ + "mentor_slug": model.mentor_profile.slug, + "service_slug": model.mentorship_service.slug, + }, + ) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - url = f'/mentor/meet/{model.mentor_profile.slug}?token={base.token.key}&extend=true' - expected = render(f'Impossible to create or retrieve mentoring session with {name}.', - model.mentor_profile, - base.token, - mentorship_session=model.mentorship_session, - fix_logo=True, - academy=model.academy) + url = f"/mentor/meet/{model.mentor_profile.slug}?token={base.token.key}&extend=true" + expected = render( + f"Impossible to create or retrieve mentoring session with {name}.", + model.mentor_profile, + base.token, + mentorship_session=model.mentorship_session, + fix_logo=True, + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('payments.ConsumptionSession'), []) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("payments.ConsumptionSession"), []) # teardown - self.bc.database.delete('mentorship.MentorProfile') - self.bc.database.delete('auth.Permission') - self.bc.database.delete('payments.Service') + self.bc.database.delete("mentorship.MentorProfile") + self.bc.database.delete("auth.Permission") + self.bc.database.delete("payments.Service") # Given: A no SAAS student who has paid # When: auth # Then: response 200 -@pytest.mark.parametrize('cohort_user', [ - { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'ACTIVE', - }, - { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'ACTIVE', - }, - { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'GRADUATED', - }, - { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'GRADUATED', - }, -]) -@pytest.mark.parametrize('academy, cohort', [ - ( +@pytest.mark.parametrize( + "cohort_user", + [ { - 'available_as_saas': True + "finantial_status": "FULLY_PAID", + "educational_status": "ACTIVE", }, { - 'available_as_saas': False + "finantial_status": "UP_TO_DATE", + "educational_status": "ACTIVE", }, - ), - ( { - 'available_as_saas': False + "finantial_status": "FULLY_PAID", + "educational_status": "GRADUATED", }, { - 'available_as_saas': None + "finantial_status": "UP_TO_DATE", + "educational_status": "GRADUATED", }, + ], +) +@pytest.mark.parametrize( + "academy, cohort", + [ + ( + {"available_as_saas": True}, + {"available_as_saas": False}, + ), + ( + {"available_as_saas": False}, + {"available_as_saas": None}, + ), + ], +) +@patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) +@patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) + ), +) +@patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] ), -]) -@patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) -@patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, -}))) -@patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) -@patch('breathecode.mentorship.actions.get_pending_sessions_or_create', - MagicMock(side_effect=get_empty_mentorship_session_queryset)) -def test__post__auth__no_saas__finantial_status_no_late(bc: Breathecode, client: fx.Client, academy, cohort, - cohort_user): - - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': bc.fake.url(), - 'booking_url': bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] +) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) +@patch( + "breathecode.mentorship.actions.get_pending_sessions_or_create", + MagicMock(side_effect=get_empty_mentorship_session_queryset), +) +def test__post__auth__no_saas__finantial_status_no_late( + bc: Breathecode, client: fx.Client, academy, cohort, cohort_user +): + + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": bc.fake.url(), + "booking_url": bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} - base = bc.database.create(user=user, - token=1, - service=service, - academy=academy, - cohort=cohort, - cohort_user=cohort_user) + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} + base = bc.database.create( + user=user, token=1, service=service, academy=academy, cohort=cohort, cohort_user=cohort_user + ) ends_at = UTC_NOW - timedelta(seconds=3600 / 2 + 1) - mentorship_session_base = {'mentee_id': base.user.id, 'ends_at': ends_at} + mentorship_session_base = {"mentee_id": base.user.id, "ends_at": ends_at} mentorship_session = { **mentorship_session_base, - 'allow_mentee_to_extend': True, - 'name': 'Session 1', + "allow_mentee_to_extend": True, + "name": "Session 1", } token = 1 - model = bc.database.create(mentor_profile=mentor_profile, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }, - service=base.service) + model = bc.database.create( + mentor_profile=mentor_profile, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + service=base.service, + ) model.mentorship_session.mentee = None model.mentorship_session.save() token = base.token - querystring = bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = client.get(url) content = bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&' - f'mentee={base.user.id}&session={model.mentorship_session.id}') - expected = '' + url = ( + f"/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&" + f"mentee={base.user.id}&session={model.mentorship_session.id}" + ) + expected = "" # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) assert content == expected expired_at = timeago.format(model.mentorship_session.ends_at, UTC_NOW) minutes = round(((model.mentorship_session.service.duration.total_seconds() / 3600) * 60) / 2) - message = (f'You have a session that expired {expired_at}. Only sessions with less than ' - f'{minutes}min from expiration can be extended (if allowed by the academy)').replace(' ', '%20') - assert response.url == f'/mentor/session/{model.mentorship_session.id}?token={token.key}&message={message}' + message = ( + f"You have a session that expired {expired_at}. Only sessions with less than " + f"{minutes}min from expiration can be extended (if allowed by the academy)" + ).replace(" ", "%20") + assert response.url == f"/mentor/session/{model.mentorship_session.id}?token={token.key}&message={message}" assert response.status_code, status.HTTP_302_FOUND - assert bc.database.list_of('mentorship.MentorProfile') == [ + assert bc.database.list_of("mentorship.MentorProfile") == [ bc.format.to_dict(model.mentor_profile), ] - assert bc.database.list_of('payments.Consumable') == [] - assert bc.database.list_of('payments.ConsumptionSession') == [] + assert bc.database.list_of("payments.Consumable") == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] # teardown - bc.database.delete('mentorship.MentorProfile') - bc.database.delete('auth.Permission') - bc.database.delete('payments.Service') + bc.database.delete("mentorship.MentorProfile") + bc.database.delete("auth.Permission") + bc.database.delete("payments.Service") # Given: A no SAAS student who hasn't paid # When: auth # Then: response 402 -@pytest.mark.parametrize('academy, cohort', [ - ( - { - 'available_as_saas': True - }, - { - 'available_as_saas': False - }, +@pytest.mark.parametrize( + "academy, cohort", + [ + ( + {"available_as_saas": True}, + {"available_as_saas": False}, + ), + ( + {"available_as_saas": False}, + {"available_as_saas": None}, + ), + ], +) +@patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) +@patch( + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "DAILY_API_URL": URL, + "DAILY_API_KEY": API_KEY, + } + ) ), - ( - { - 'available_as_saas': False - }, - { - 'available_as_saas': None - }, +) +@patch( + "requests.request", + apply_requests_request_mock( + [ + ( + 201, + f"{URL}/v1/rooms", + { + "name": ROOM_NAME, + "url": ROOM_URL, + }, + ) + ] ), -]) -@patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) -@patch('os.getenv', MagicMock(side_effect=apply_get_env({ - 'DAILY_API_URL': URL, - 'DAILY_API_KEY': API_KEY, -}))) -@patch('requests.request', - apply_requests_request_mock([(201, f'{URL}/v1/rooms', { - 'name': ROOM_NAME, - 'url': ROOM_URL, - })])) -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) -@patch('breathecode.mentorship.actions.get_pending_sessions_or_create', - MagicMock(side_effect=get_empty_mentorship_session_queryset)) +) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) +@patch( + "breathecode.mentorship.actions.get_pending_sessions_or_create", + MagicMock(side_effect=get_empty_mentorship_session_queryset), +) def test__post__auth__no_saas__finantial_status_late(bc: Breathecode, client: fx.Client, academy, cohort): - mentor_profile_cases = [{ - 'status': x, - 'online_meeting_url': bc.fake.url(), - 'booking_url': bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + mentor_profile_cases = [ + { + "status": x, + "online_meeting_url": bc.fake.url(), + "booking_url": bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] id = 0 for mentor_profile in mentor_profile_cases: id += 1 - user = {'first_name': '', 'last_name': ''} - service = {'slug': 'join_mentorship'} - cohort_user = {'finantial_status': 'LATE', 'educational_status': 'ACTIVE'} - base = bc.database.create(user=user, - token=1, - service=service, - academy=academy, - cohort=cohort, - cohort_user=cohort_user) + user = {"first_name": "", "last_name": ""} + service = {"slug": "join_mentorship"} + cohort_user = {"finantial_status": "LATE", "educational_status": "ACTIVE"} + base = bc.database.create( + user=user, token=1, service=service, academy=academy, cohort=cohort, cohort_user=cohort_user + ) ends_at = UTC_NOW - timedelta(seconds=3600 / 2 + 1) - mentorship_session_base = {'mentee_id': base.user.id, 'ends_at': ends_at} + mentorship_session_base = {"mentee_id": base.user.id, "ends_at": ends_at} mentorship_session = { **mentorship_session_base, - 'allow_mentee_to_extend': True, + "allow_mentee_to_extend": True, } token = 1 - model = bc.database.create(mentor_profile=mentor_profile, - academy=base.academy, - mentorship_session=mentorship_session, - user=user, - token=token, - mentorship_service={ - 'language': 'en', - 'video_provider': 'DAILY' - }) + model = bc.database.create( + mentor_profile=mentor_profile, + academy=base.academy, + mentorship_session=mentorship_session, + user=user, + token=token, + mentorship_service={"language": "en", "video_provider": "DAILY"}, + ) model.mentorship_session.mentee = None model.mentorship_session.save() token = base.token - querystring = bc.format.to_querystring({ - 'token': token.key, - 'extend': 'true', - 'mentee': base.user.id, - 'session': model.mentorship_session.id, - }) - url = reverse_lazy('mentorship_shortner:meet_slug_service_slug', - kwargs={ - 'mentor_slug': model.mentor_profile.slug, - 'service_slug': model.mentorship_service.slug - }) + f'?{querystring}' + querystring = bc.format.to_querystring( + { + "token": token.key, + "extend": "true", + "mentee": base.user.id, + "session": model.mentorship_session.id, + } + ) + url = ( + reverse_lazy( + "mentorship_shortner:meet_slug_service_slug", + kwargs={"mentor_slug": model.mentor_profile.slug, "service_slug": model.mentorship_service.slug}, + ) + + f"?{querystring}" + ) response = client.get(url) content = bc.format.from_bytes(response.content) - url = (f'/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&' - f'mentee={base.user.id}&session={model.mentorship_session.id}') - expected = render_message('You must get a plan in order to access this service', - data={ - 'GO_BACK': 'Go back to Dashboard', - 'URL_BACK': 'https://4geeks.com/choose-program', - 'BUTTON': 'Get a plan', - 'LINK': f'https://4geeks.com/checkout?plan=basic&token={base.token.key}', - }) + url = ( + f"/mentor/meet/{model.mentor_profile.slug}?token={token.key}&extend=true&" + f"mentee={base.user.id}&session={model.mentorship_session.id}" + ) + expected = render_message( + "You must get a plan in order to access this service", + data={ + "GO_BACK": "Go back to Dashboard", + "URL_BACK": "https://4geeks.com/choose-program", + "BUTTON": "Get a plan", + "LINK": f"https://4geeks.com/checkout?plan=basic&token={base.token.key}", + }, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) assert content == expected expired_at = timeago.format(model.mentorship_session.ends_at, UTC_NOW) minutes = round(((model.mentorship_session.service.duration.total_seconds() / 3600) * 60) / 2) - message = (f'You have a session that expired {expired_at}. Only sessions with less than ' - f'{minutes}min from expiration can be extended (if allowed by the academy)').replace(' ', '%20') + message = ( + f"You have a session that expired {expired_at}. Only sessions with less than " + f"{minutes}min from expiration can be extended (if allowed by the academy)" + ).replace(" ", "%20") assert response.status_code, status.HTTP_402_PAYMENT_REQUIRED - assert bc.database.list_of('mentorship.MentorProfile') == [ + assert bc.database.list_of("mentorship.MentorProfile") == [ bc.format.to_dict(model.mentor_profile), ] - assert bc.database.list_of('payments.Consumable') == [] - assert bc.database.list_of('payments.ConsumptionSession') == [] + assert bc.database.list_of("payments.Consumable") == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] # teardown - bc.database.delete('mentorship.MentorProfile') - bc.database.delete('auth.Permission') - bc.database.delete('payments.Service') + bc.database.delete("mentorship.MentorProfile") + bc.database.delete("auth.Permission") + bc.database.delete("payments.Service") diff --git a/breathecode/mentorship/tests/urls_shortner/tests_session_id.py b/breathecode/mentorship/tests/urls_shortner/tests_session_id.py index 2984c7dae..19a904271 100644 --- a/breathecode/mentorship/tests/urls_shortner/tests_session_id.py +++ b/breathecode/mentorship/tests/urls_shortner/tests_session_id.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + from random import randint from unittest.mock import MagicMock, call, patch @@ -36,34 +37,29 @@ def __new__(cls, _, *args, **kwargs): def render(message, mentorship_session=None, token=None): pk = mentorship_session.id if mentorship_session else 1 environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/mentor/session/{pk}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'GET', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': None, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': f'token={token and token.key or ""}', - 'CONTENT_TYPE': 'application/octet-stream' + "HTTP_COOKIE": "", + "PATH_INFO": f"/mentor/session/{pk}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "GET", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": None, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": f'token={token and token.key or ""}', + "CONTENT_TYPE": "application/octet-stream", } request = WSGIRequest(environ) string = loader.render_to_string( - 'message.html', - { - 'MESSAGE': message, - 'BUTTON': None, - 'BUTTON_TARGET': '_blank', - 'LINK': None - }, + "message.html", + {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None}, request, using=None, ) @@ -74,54 +70,54 @@ def render(message, mentorship_session=None, token=None): def render_form(self, mentorship_session=None, token=None, data={}, post=False, fix_logo=False): mentee = mentorship_session.mentee environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/mentor/session/{mentorship_session.id}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'GET', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': None, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': f'token={token and token.key or ""}', - 'CONTENT_TYPE': 'application/octet-stream' + "HTTP_COOKIE": "", + "PATH_INFO": f"/mentor/session/{mentorship_session.id}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "GET", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": None, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": f'token={token and token.key or ""}', + "CONTENT_TYPE": "application/octet-stream", } if post: - environ['REQUEST_METHOD'] = 'POST' - environ['CONTENT_TYPE'] = 'multipart/form-data; boundary=BoUnDaRyStRiNg; charset=utf-8' + environ["REQUEST_METHOD"] = "POST" + environ["CONTENT_TYPE"] = "multipart/form-data; boundary=BoUnDaRyStRiNg; charset=utf-8" request = WSGIRequest(environ) querystring = self.bc.format.to_querystring(data) data = QueryDict(querystring, mutable=True) - data['token'] = token.key if token else '' + data["token"] = token.key if token else "" if not post: - data['status'] = 'COMPLETED' - data['summary'] = mentorship_session.summary + data["status"] = "COMPLETED" + data["summary"] = mentorship_session.summary - data['session_id'] = mentorship_session.id + data["session_id"] = mentorship_session.id if mentee and not post: - data['student_name'] = f'{mentee.first_name} {mentee.last_name}, {mentee.email}' + data["student_name"] = f"{mentee.first_name} {mentee.last_name}, {mentee.email}" form = CloseMentoringSessionForm(data) string = loader.render_to_string( - 'form.html', + "form.html", { - 'form': form, - 'disabled': False, - 'btn_lable': 'End Mentoring Session', - 'intro': 'Please fill the following information to formally end the session', - 'title': 'End Mentoring Session', + "form": form, + "disabled": False, + "btn_lable": "End Mentoring Session", + "intro": "Please fill the following information to formally end the session", + "title": "End Mentoring Session", }, request, using=None, @@ -137,23 +133,23 @@ def render_form(self, mentorship_session=None, token=None, data={}, post=False, def render_post_form(self, messages=[], mentorship_session=None, token=None, data={}, fix_logo=False): pk = mentorship_session.id if mentorship_session else 1 environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/mentor/session/{pk}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'POST', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': None, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': f'token={token and token.key or ""}', - 'CONTENT_TYPE': 'multipart/form-data; boundary=BoUnDaRyStRiNg; charset=utf-8', + "HTTP_COOKIE": "", + "PATH_INFO": f"/mentor/session/{pk}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "POST", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": None, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": f'token={token and token.key or ""}', + "CONTENT_TYPE": "multipart/form-data; boundary=BoUnDaRyStRiNg; charset=utf-8", } request = WSGIRequest(environ) @@ -161,12 +157,12 @@ def render_post_form(self, messages=[], mentorship_session=None, token=None, dat data = QueryDict(querystring, mutable=True) form = CloseMentoringSessionForm(data) - context = {'form': form} + context = {"form": form} if messages: - context['messages'] = messages + context["messages"] = messages - string = loader.render_to_string('form.html', context, request, using=None) + string = loader.render_to_string("form.html", context, request, using=None) if fix_logo: return string.replace('src="/static/assets/logo.png"', 'src="/static/icons/picture.png"') @@ -176,115 +172,119 @@ def render_post_form(self, messages=[], mentorship_session=None, token=None, dat def mentor_profile_serializer(mentor_profile, user, mentorship_service, academy): return { - 'id': mentor_profile.id, - 'slug': mentor_profile.slug, - 'user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, + "id": mentor_profile.id, + "slug": mentor_profile.slug, + "user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, }, - 'service': { - 'id': mentorship_service.id, - 'slug': mentorship_service.slug, - 'name': mentorship_service.name, - 'status': mentorship_service.status, - 'academy': { - 'id': academy.id, - 'slug': academy.slug, - 'name': academy.name, - 'logo_url': academy.logo_url, - 'icon_url': academy.icon_url, + "service": { + "id": mentorship_service.id, + "slug": mentorship_service.slug, + "name": mentorship_service.name, + "status": mentorship_service.status, + "academy": { + "id": academy.id, + "slug": academy.slug, + "name": academy.name, + "logo_url": academy.logo_url, + "icon_url": academy.icon_url, }, - 'logo_url': mentorship_service.logo_url, - 'duration': mentorship_service.duration, - 'language': mentorship_service.language, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'max_duration': mentorship_service.max_duration, - 'missed_meeting_duration': mentorship_service.missed_meeting_duration, - 'created_at': mentorship_service.created_at, - 'updated_at': mentorship_service.updated_at, - 'description': mentorship_service.description, + "logo_url": mentorship_service.logo_url, + "duration": mentorship_service.duration, + "language": mentorship_service.language, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "max_duration": mentorship_service.max_duration, + "missed_meeting_duration": mentorship_service.missed_meeting_duration, + "created_at": mentorship_service.created_at, + "updated_at": mentorship_service.updated_at, + "description": mentorship_service.description, }, - 'status': mentor_profile.status, - 'price_per_hour': mentor_profile.price_per_hour, - 'booking_url': mentor_profile.booking_url, - 'online_meeting_url': mentor_profile.online_meeting_url, - 'timezone': mentor_profile.timezone, - 'syllabus': mentor_profile.syllabus, - 'email': mentor_profile.email, - 'created_at': mentor_profile.created_at, - 'updated_at': mentor_profile.updated_at, + "status": mentor_profile.status, + "price_per_hour": mentor_profile.price_per_hour, + "booking_url": mentor_profile.booking_url, + "online_meeting_url": mentor_profile.online_meeting_url, + "timezone": mentor_profile.timezone, + "syllabus": mentor_profile.syllabus, + "email": mentor_profile.email, + "created_at": mentor_profile.created_at, + "updated_at": mentor_profile.updated_at, } def mentorship_session_serializer(mentor_profile, mentorship_service, academy, user): - mentorship_sessions = MentorshipSession.objects.filter(mentor__id=mentor_profile.id, - status__in=['STARTED', 'PENDING']) - - return [{ - 'id': mentorship_session.id, - 'status': mentorship_session.status, - 'started_at': mentorship_session.started_at, - 'ended_at': mentorship_session.ended_at, - 'starts_at': mentorship_session.starts_at, - 'ends_at': mentorship_session.ends_at, - 'mentor_joined_at': mentorship_session.mentor_joined_at, - 'mentor_left_at': mentorship_session.mentor_left_at, - 'mentee_left_at': mentorship_session.mentee_left_at, - 'allow_billing': mentorship_session.allow_billing, - 'accounted_duration': mentorship_session.accounted_duration, - 'suggested_accounted_duration': mentorship_session.suggested_accounted_duration, - 'mentor': { - 'id': mentor_profile.id, - 'slug': mentor_profile.slug, - 'user': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, - }, - 'service': { - 'id': mentorship_service.id, - 'slug': mentorship_service.slug, - 'name': mentorship_service.name, - 'status': mentorship_service.status, - 'academy': { - 'id': academy.id, - 'slug': academy.slug, - 'name': academy.name, - 'logo_url': academy.logo_url, - 'icon_url': academy.icon_url, + mentorship_sessions = MentorshipSession.objects.filter( + mentor__id=mentor_profile.id, status__in=["STARTED", "PENDING"] + ) + + return [ + { + "id": mentorship_session.id, + "status": mentorship_session.status, + "started_at": mentorship_session.started_at, + "ended_at": mentorship_session.ended_at, + "starts_at": mentorship_session.starts_at, + "ends_at": mentorship_session.ends_at, + "mentor_joined_at": mentorship_session.mentor_joined_at, + "mentor_left_at": mentorship_session.mentor_left_at, + "mentee_left_at": mentorship_session.mentee_left_at, + "allow_billing": mentorship_session.allow_billing, + "accounted_duration": mentorship_session.accounted_duration, + "suggested_accounted_duration": mentorship_session.suggested_accounted_duration, + "mentor": { + "id": mentor_profile.id, + "slug": mentor_profile.slug, + "user": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, + }, + "service": { + "id": mentorship_service.id, + "slug": mentorship_service.slug, + "name": mentorship_service.name, + "status": mentorship_service.status, + "academy": { + "id": academy.id, + "slug": academy.slug, + "name": academy.name, + "logo_url": academy.logo_url, + "icon_url": academy.icon_url, + }, + "logo_url": mentorship_service.logo_url, + "duration": mentorship_service.duration, + "language": mentorship_service.language, + "allow_mentee_to_extend": mentorship_service.allow_mentee_to_extend, + "allow_mentors_to_extend": mentorship_service.allow_mentors_to_extend, + "max_duration": mentorship_service.max_duration, + "missed_meeting_duration": mentorship_service.missed_meeting_duration, + "created_at": mentorship_service.created_at, + "updated_at": mentorship_service.updated_at, + "description": mentorship_service.description, }, - 'logo_url': mentorship_service.logo_url, - 'duration': mentorship_service.duration, - 'language': mentorship_service.language, - 'allow_mentee_to_extend': mentorship_service.allow_mentee_to_extend, - 'allow_mentors_to_extend': mentorship_service.allow_mentors_to_extend, - 'max_duration': mentorship_service.max_duration, - 'missed_meeting_duration': mentorship_service.missed_meeting_duration, - 'created_at': mentorship_service.created_at, - 'updated_at': mentorship_service.updated_at, - 'description': mentorship_service.description, + "status": mentor_profile.status, + "price_per_hour": mentor_profile.price_per_hour, + "booking_url": mentor_profile.booking_url, + "online_meeting_url": mentor_profile.online_meeting_url, + "timezone": mentor_profile.timezone, + "syllabus": mentor_profile.syllabus, + "email": mentor_profile.email, + "created_at": mentor_profile.created_at, + "updated_at": mentor_profile.updated_at, + }, + "mentee": { + "id": user.id, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, }, - 'status': mentor_profile.status, - 'price_per_hour': mentor_profile.price_per_hour, - 'booking_url': mentor_profile.booking_url, - 'online_meeting_url': mentor_profile.online_meeting_url, - 'timezone': mentor_profile.timezone, - 'syllabus': mentor_profile.syllabus, - 'email': mentor_profile.email, - 'created_at': mentor_profile.created_at, - 'updated_at': mentor_profile.updated_at, - }, - 'mentee': { - 'id': user.id, - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, } - } for mentorship_session in mentorship_sessions] + for mentorship_session in mentorship_sessions + ] def render_close_session(message, mentor_profile, user, token, mentorship_service, academy, fix_logo=False): @@ -292,25 +292,28 @@ def render_close_session(message, mentor_profile, user, token, mentorship_servic context = {} if academy: - context['COMPANY_INFO_EMAIL'] = academy.feedback_email - context['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - context['COMPANY_LOGO'] = academy.logo_url - context['COMPANY_NAME'] = academy.name + context["COMPANY_INFO_EMAIL"] = academy.feedback_email + context["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + context["COMPANY_LOGO"] = academy.logo_url + context["COMPANY_NAME"] = academy.name - if 'heading' not in context: - context['heading'] = academy.name + if "heading" not in context: + context["heading"] = academy.name string = loader.render_to_string( - 'close_session.html', { - 'token': token.key, - 'message': message, - 'mentor': mentor_profile_serializer(mentor_profile, user, mentorship_service, academy), - 'mentee': user, - 'SUBJECT': 'Close Mentoring Session', - 'sessions': mentorship_session_serializer(mentor_profile, mentorship_service, academy, user), - 'baseUrl': f'/mentor/session/{mentor_profile.id}', + "close_session.html", + { + "token": token.key, + "message": message, + "mentor": mentor_profile_serializer(mentor_profile, user, mentorship_service, academy), + "mentee": user, + "SUBJECT": "Close Mentoring Session", + "sessions": mentorship_session_serializer(mentor_profile, mentorship_service, academy, user), + "baseUrl": f"/mentor/session/{mentor_profile.id}", **context, - }, request) + }, + request, + ) if fix_logo: return string.replace('src="/static/assets/logo.png"', 'src="/static/icons/picture.png"') @@ -320,6 +323,7 @@ def render_close_session(message, mentor_profile, user, token, mentorship_servic class TestAuthenticate(LegacyAPITestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ @@ -327,65 +331,67 @@ class TestAuthenticate(LegacyAPITestCase): def test__get__without_auth(self, enable_signals): enable_signals() - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': 1}) + url = reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": 1}) response = self.client.get(url) - hash = self.bc.format.to_base64('/mentor/session/1') + hash = self.bc.format.to_base64("/mentor/session/1") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 GET without MentorshipSession """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.contrib.messages.storage.fallback.FallbackStorage.add', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.contrib.messages.storage.fallback.FallbackStorage.add", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__get__without_mentorship_session(self, enable_signals): enable_signals() model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': 1}) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": 1}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'Session not found with id 1', token=model.token) + expected = render(f"Session not found with id 1", token=model.token) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) self.assertEqual(FallbackStorage.add.call_args_list, []) """ 🔽🔽🔽 GET with MentorshipSession """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.contrib.messages.storage.fallback.FallbackStorage.add', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.contrib.messages.storage.fallback.FallbackStorage.add", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__get__with_mentorship_session(self, enable_signals): enable_signals() model = self.bc.database.create(user=1, token=1, mentorship_session=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': model.mentorship_session.id - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": model.mentorship_session.id}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -393,41 +399,49 @@ def test__get__with_mentorship_session(self, enable_signals): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), - [{ - **self.bc.format.to_dict(model.mentorship_session), - 'mentor_left_at': UTC_NOW, - }]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + "mentor_left_at": UTC_NOW, + } + ], + ) self.assertEqual(FallbackStorage.add.call_args_list, []) """ 🔽🔽🔽 GET with MentorshipSession, passing message """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.contrib.messages.storage.fallback.FallbackStorage.add', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.contrib.messages.storage.fallback.FallbackStorage.add", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__get__with_mentorship_session__passing_message(self, enable_signals): enable_signals() model = self.bc.database.create(user=1, token=1, mentorship_session=1) message = self.bc.fake.slug() - querystring = self.bc.format.to_querystring({ - 'token': model.token.key, - 'message': message, - }) + querystring = self.bc.format.to_querystring( + { + "token": model.token.key, + "message": message, + } + ) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': model.mentorship_session.id - }) + f'?{querystring}' + url = ( + reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": model.mentorship_session.id}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -435,380 +449,412 @@ def test__get__with_mentorship_session__passing_message(self, enable_signals): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), - [{ - **self.bc.format.to_dict(model.mentorship_session), - 'mentor_left_at': UTC_NOW, - }]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + "mentor_left_at": UTC_NOW, + } + ], + ) - self.assertEqual(FallbackStorage.add.call_args_list, [call(20, message, '')]) + self.assertEqual(FallbackStorage.add.call_args_list, [call(20, message, "")]) """ 🔽🔽🔽 GET with MentorshipSession """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.contrib.messages.storage.fallback.FallbackStorage.add', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.contrib.messages.storage.fallback.FallbackStorage.add", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__get__with_mentorship_session__without_mentee(self, enable_signals): enable_signals() statuses = [ # 'PENDING', # 'STARTED', - 'COMPLETED', - 'FAILED', - 'IGNORED', + "COMPLETED", + "FAILED", + "IGNORED", ] for c in statuses: - mentorship_sessions = [{'mentee_id': None}, {'status': c}] - model = self.bc.database.create(user=1, - token=1, - mentorship_session=mentorship_sessions, - mentorship_service=1) - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': model.mentorship_session[0].id - }) + f'?{querystring}' + mentorship_sessions = [{"mentee_id": None}, {"status": c}] + model = self.bc.database.create( + user=1, token=1, mentorship_session=mentorship_sessions, mentorship_service=1 + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": model.mentorship_session[0].id}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) expected = render_close_session( - 'Previous session expired without assigned mentee, it probably means the mentee ' - 'never came. It was marked as failed. Try the mentor meeting URL again.', + "Previous session expired without assigned mentee, it probably means the mentee " + "never came. It was marked as failed. Try the mentor meeting URL again.", model.mentor_profile, model.user, model.token, model.mentorship_service, model.academy, - fix_logo=True) + fix_logo=True, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [{ - **self.bc.format.to_dict(model.mentorship_session[0]), - 'mentor_left_at': - UTC_NOW, - 'status': - 'FAILED', - 'summary': ('This session expired without assigned mentee, it probably ' - 'means the mentee never came. It will be marked as failed'), - }, { - **self.bc.format.to_dict(model.mentorship_session[1]), - }]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session[0]), + "mentor_left_at": UTC_NOW, + "status": "FAILED", + "summary": ( + "This session expired without assigned mentee, it probably " + "means the mentee never came. It will be marked as failed" + ), + }, + { + **self.bc.format.to_dict(model.mentorship_session[1]), + }, + ], + ) self.assertEqual(FallbackStorage.add.call_args_list, []) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 GET with MentorshipSession """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.contrib.messages.storage.fallback.FallbackStorage.add', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.contrib.messages.storage.fallback.FallbackStorage.add", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) def test__get__with_mentorship_session__without_mentee__(self, enable_signals): enable_signals() statuses = [ - 'PENDING', - 'STARTED', + "PENDING", + "STARTED", ] for c in statuses: - mentorship_sessions = [{'mentee_id': None}, {'status': c}] - model = self.bc.database.create(user=1, - token=1, - mentorship_session=mentorship_sessions, - mentorship_service=1) - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': model.mentorship_session[0].id - }) + f'?{querystring}' + mentorship_sessions = [{"mentee_id": None}, {"status": c}] + model = self.bc.database.create( + user=1, token=1, mentorship_session=mentorship_sessions, mentorship_service=1 + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": model.mentorship_session[0].id}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) expected = render_close_session( - 'Previous session expired without assigned mentee, it probably means the mentee ' - 'never came. It was marked as failed. Try the mentor meeting URL again.', + "Previous session expired without assigned mentee, it probably means the mentee " + "never came. It was marked as failed. Try the mentor meeting URL again.", model.mentor_profile, model.user, model.token, model.mentorship_service, model.academy, - fix_logo=True) + fix_logo=True, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [{ - **self.bc.format.to_dict(model.mentorship_session[0]), - 'mentor_left_at': - UTC_NOW, - 'status': - 'FAILED', - 'summary': ('This session expired without assigned mentee, it probably ' - 'means the mentee never came. It will be marked as failed'), - }, { - **self.bc.format.to_dict(model.mentorship_session[1]), - }]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session[0]), + "mentor_left_at": UTC_NOW, + "status": "FAILED", + "summary": ( + "This session expired without assigned mentee, it probably " + "means the mentee never came. It will be marked as failed" + ), + }, + { + **self.bc.format.to_dict(model.mentorship_session[1]), + }, + ], + ) self.assertEqual(FallbackStorage.add.call_args_list, []) # teardown - self.bc.database.delete('mentorship.MentorshipSession') - - Token = self.bc.database.get_model('authenticate.Token') - token = Token.objects.filter(user=model.user, token_type='temporal').last() - - calls = [] if c != 'STARTED' else [ - call( - 'message', - model.mentor_profile.user.email, - { - 'SUBJECT': 'Mentorship session starting', - 'MESSAGE': - f'Mentee {model.user.first_name} {model.user.last_name} is joining your session, please come back to this email when the session is over to marke it as completed', - 'BUTTON': f'Finish and review this session', - 'LINK': f'/mentor/session/4?token={token.key}', - }, - academy=model.academy, - ) - ] + self.bc.database.delete("mentorship.MentorshipSession") + + Token = self.bc.database.get_model("authenticate.Token") + token = Token.objects.filter(user=model.user, token_type="temporal").last() + + calls = ( + [] + if c != "STARTED" + else [ + call( + "message", + model.mentor_profile.user.email, + { + "SUBJECT": "Mentorship session starting", + "MESSAGE": f"Mentee {model.user.first_name} {model.user.last_name} is joining your session, please come back to this email when the session is over to marke it as completed", + "BUTTON": f"Finish and review this session", + "LINK": f"/mentor/session/4?token={token.key}", + }, + academy=model.academy, + ) + ] + ) self.bc.check.calls(actions.send_email_message.call_args_list, calls) """ 🔽🔽🔽 POST without MentorshipSession, passing nothing """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__without_mentorship_session__passing_nothing(self, enable_signals): enable_signals() model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': 1}) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": 1}) + f"?{querystring}" data = {} - response = self.client.post(url, data, format='multipart') + response = self.client.post(url, data, format="multipart") content = self.bc.format.from_bytes(response.content) - expected = render_post_form(self, - messages=[Message('alert-danger', f'Invalid or expired deliver token.')], - token=model.token, - data=data) + expected = render_post_form( + self, messages=[Message("alert-danger", f"Invalid or expired deliver token.")], token=model.token, data=data + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 POST without MentorshipSession, passing token """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__without_mentorship_session__passing_token(self, enable_signals): enable_signals() model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': 1}) + f'?{querystring}' - data = {'token': model.token.key} - response = self.client.post(url, data, format='multipart') + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": 1}) + f"?{querystring}" + data = {"token": model.token.key} + response = self.client.post(url, data, format="multipart") content = self.bc.format.from_bytes(response.content) - expected = render_post_form(self, - messages=[Message('alert-danger', f'Invalid session id.')], - token=model.token, - data=data) + expected = render_post_form( + self, messages=[Message("alert-danger", f"Invalid session id.")], token=model.token, data=data + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorshipSession"), []) """ 🔽🔽🔽 POST with MentorshipSession, passing token and session_id, without requires field """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__with_mentorship_session__passing_token__passing_session_id__without_requires_field( - self, enable_signals): + self, enable_signals + ): enable_signals() model = self.bc.database.create(user=1, token=1, mentorship_session=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': 1}) + f'?{querystring}' - data = {'token': model.token.key, 'session_id': 1} - response = self.client.post(url, data, format='multipart') + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": 1}) + f"?{querystring}" + data = {"token": model.token.key, "session_id": 1} + response = self.client.post(url, data, format="multipart") content = self.bc.format.from_bytes(response.content) expected = render_form(self, model.mentorship_session, model.token, data=data, post=True, fix_logo=False) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - self.bc.format.to_dict(model.mentorship_session), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + self.bc.format.to_dict(model.mentorship_session), + ], + ) """ 🔽🔽🔽 POST with MentorshipSession, passing token and session_id, with requires field, good statuses """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__with_mentorship_session__passing_token__passing_session_id__with_requires_field__good_statuses( - self, enable_signals): + self, enable_signals + ): enable_signals() - statuses = ['COMPLETED', 'FAILED', 'IGNORED'] + statuses = ["COMPLETED", "FAILED", "IGNORED"] for s in statuses: model = self.bc.database.create(user=1, token=1, mentorship_session=1, mentorship_service=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': 1}) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": 1}) + f"?{querystring}" data = { - 'token': model.token.key, - 'session_id': model.mentorship_session.id, - 'summary': self.bc.fake.text(), - 'student_name': 'Aaaaa', - 'status': s, + "token": model.token.key, + "session_id": model.mentorship_session.id, + "summary": self.bc.fake.text(), + "student_name": "Aaaaa", + "status": s, } - response = self.client.post(url, data, format='multipart') + response = self.client.post(url, data, format="multipart") content = self.bc.format.from_bytes(response.content) - url = f'/mentor/meet/{model.mentor_profile.slug}?token={model.token.key}' + url = f"/mentor/meet/{model.mentor_profile.slug}?token={model.token.key}" expected = render_close_session( f'The mentoring session was closed successfully, you can close this window or <a href="{url}">' - 'go back to your meeting room.</a>', + "go back to your meeting room.</a>", model.mentor_profile, model.user, model.token, model.mentorship_service, model.academy, - fix_logo=False) + fix_logo=False, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorshipSession'), [ - { - **self.bc.format.to_dict(model.mentorship_session), - 'ended_at': UTC_NOW, - 'status': data['status'], - 'summary': data['summary'], - }, - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorshipSession"), + [ + { + **self.bc.format.to_dict(model.mentorship_session), + "ended_at": UTC_NOW, + "status": data["status"], + "summary": data["summary"], + }, + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") """ 🔽🔽🔽 POST with MentorshipSession, passing token and session_id, with requires field, bad statuses """ - @patch('django.template.context_processors.get_token', MagicMock(return_value='predicabletoken')) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.template.context_processors.get_token", MagicMock(return_value="predicabletoken")) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__with_mentorship_session__passing_token__passing_session_id__with_requires_field__bad_statuses( - self, enable_signals): + self, enable_signals + ): enable_signals() - statuses = ['PENDING', 'STARTED'] + statuses = ["PENDING", "STARTED"] for s in statuses: model = self.bc.database.create(user=1, token=1, mentorship_session=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:session_id', kwargs={'session_id': 1}) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship_shortner:session_id", kwargs={"session_id": 1}) + f"?{querystring}" data = { - 'token': model.token.key, - 'session_id': model.mentorship_session.id, - 'summary': self.bc.fake.text(), - 'student_name': 'Aaaaa', - 'status': s, + "token": model.token.key, + "session_id": model.mentorship_session.id, + "summary": self.bc.fake.text(), + "student_name": "Aaaaa", + "status": s, } - response = self.client.post(url, data, format='multipart') + response = self.client.post(url, data, format="multipart") content = self.bc.format.from_bytes(response.content) expected = render_form(self, model.mentorship_session, model.token, data=data, post=True, fix_logo=False) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('mentorship.MentorshipSession'), + self.bc.database.list_of("mentorship.MentorshipSession"), [ { **self.bc.format.to_dict(model.mentorship_session), # 'status': data['status'], }, - ]) + ], + ) # teardown - self.bc.database.delete('mentorship.MentorshipSession') + self.bc.database.delete("mentorship.MentorshipSession") diff --git a/breathecode/mentorship/tests/urls_shortner/tests_slug.py b/breathecode/mentorship/tests/urls_shortner/tests_slug.py index 9ffd8b969..f362c987b 100644 --- a/breathecode/mentorship/tests/urls_shortner/tests_slug.py +++ b/breathecode/mentorship/tests/urls_shortner/tests_slug.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + from unittest.mock import MagicMock, patch from django.core.handlers.wsgi import WSGIRequest @@ -23,46 +24,46 @@ def format_datetime(self, date): def render(message, mentor_profile=None, token=None, fix_logo=False, academy=None): - slug = mentor_profile.slug if mentor_profile else 'asd' + slug = mentor_profile.slug if mentor_profile else "asd" environ = { - 'HTTP_COOKIE': '', - 'PATH_INFO': f'/mentor/{slug}', - 'REMOTE_ADDR': '127.0.0.1', - 'REQUEST_METHOD': 'GET', - 'SCRIPT_NAME': '', - 'SERVER_NAME': 'testserver', - 'SERVER_PORT': '80', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'wsgi.version': (1, 0), - 'wsgi.url_scheme': 'http', - 'wsgi.input': FakePayload(b''), - 'wsgi.errors': None, - 'wsgi.multiprocess': True, - 'wsgi.multithread': False, - 'wsgi.run_once': False, - 'QUERY_STRING': f'token={token and token.key or ""}', - 'CONTENT_TYPE': 'application/octet-stream' + "HTTP_COOKIE": "", + "PATH_INFO": f"/mentor/{slug}", + "REMOTE_ADDR": "127.0.0.1", + "REQUEST_METHOD": "GET", + "SCRIPT_NAME": "", + "SERVER_NAME": "testserver", + "SERVER_PORT": "80", + "SERVER_PROTOCOL": "HTTP/1.1", + "wsgi.version": (1, 0), + "wsgi.url_scheme": "http", + "wsgi.input": FakePayload(b""), + "wsgi.errors": None, + "wsgi.multiprocess": True, + "wsgi.multithread": False, + "wsgi.run_once": False, + "QUERY_STRING": f'token={token and token.key or ""}', + "CONTENT_TYPE": "application/octet-stream", } request = WSGIRequest(environ) data = {} if academy: - data['COMPANY_INFO_EMAIL'] = academy.feedback_email - data['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - data['COMPANY_LOGO'] = academy.logo_url - data['COMPANY_NAME'] = academy.name + data["COMPANY_INFO_EMAIL"] = academy.feedback_email + data["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + data["COMPANY_LOGO"] = academy.logo_url + data["COMPANY_NAME"] = academy.name - if 'heading' not in data: - data['heading'] = academy.name + if "heading" not in data: + data["heading"] = academy.name string = loader.render_to_string( - 'message.html', + "message.html", { - 'MESSAGE': message, - 'BUTTON': None, - 'BUTTON_TARGET': '_blank', - 'LINK': None, + "MESSAGE": message, + "BUTTON": None, + "BUTTON_TARGET": "_blank", + "LINK": None, **data, }, request, @@ -78,27 +79,27 @@ def render(message, mentor_profile=None, token=None, fix_logo=False, academy=Non def render_successfully(mentor_profile, user, fix_logo=False, academy=None): request = None booking_url = mentor_profile.booking_url - if not booking_url.endswith('?'): - booking_url += '?' + if not booking_url.endswith("?"): + booking_url += "?" data = { - 'SUBJECT': 'Mentoring Session', - 'mentor': mentor_profile, - 'mentee': user, - 'booking_url': booking_url, - 'LOGO_IN_CONTENT': True, + "SUBJECT": "Mentoring Session", + "mentor": mentor_profile, + "mentee": user, + "booking_url": booking_url, + "LOGO_IN_CONTENT": True, } if academy: - data['COMPANY_INFO_EMAIL'] = academy.feedback_email - data['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - data['COMPANY_LOGO'] = academy.logo_url - data['COMPANY_NAME'] = academy.name + data["COMPANY_INFO_EMAIL"] = academy.feedback_email + data["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + data["COMPANY_LOGO"] = academy.logo_url + data["COMPANY_NAME"] = academy.name - if 'heading' not in data: - data['heading'] = academy.name + if "heading" not in data: + data["heading"] = academy.name - string = loader.render_to_string('book_session.html', data, request) + string = loader.render_to_string("book_session.html", data, request) if fix_logo: return string.replace('src="/static/assets/logo.png"', 'src="/static/icons/picture.png"') @@ -108,22 +109,23 @@ def render_successfully(mentor_profile, user, fix_logo=False, academy=None): class AuthenticateTestSuite(MentorshipTestCase): """Authentication test suite""" + """ 🔽🔽🔽 Auth """ def test_without_auth(self): - url = reverse_lazy('mentorship_shortner:slug', kwargs={'mentor_slug': 'asd'}) + url = reverse_lazy("mentorship_shortner:slug", kwargs={"mentor_slug": "asd"}) response = self.client.get(url) - hash = self.bc.format.to_base64('/mentor/asd') + hash = self.bc.format.to_base64("/mentor/asd") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) """ 🔽🔽🔽 GET without MentorProfile @@ -132,24 +134,24 @@ def test_without_auth(self): def test_without_mentor_profile(self): model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:slug', kwargs={'mentor_slug': 'asd'}) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("mentorship_shortner:slug", kwargs={"mentor_slug": "asd"}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'No mentor found with slug asd') + expected = render(f"No mentor found with slug asd") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), []) + self.assertEqual(self.bc.database.list_of("mentorship.MentorProfile"), []) """ 🔽🔽🔽 GET without MentorProfile @@ -158,130 +160,147 @@ def test_without_mentor_profile(self): def test_with_mentor_profile(self): model = self.bc.database.create(user=1, token=1, mentor_profile=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:slug', kwargs={'mentor_slug': model.mentor_profile.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy("mentorship_shortner:slug", kwargs={"mentor_slug": model.mentor_profile.slug}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'This mentor is not active', - model.mentor_profile, - model.token, - fix_logo=True, - academy=model.academy) + expected = render( + f"This mentor is not active", model.mentor_profile, model.token, fix_logo=True, academy=model.academy + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) """ 🔽🔽🔽 GET without MentorProfile, bad statuses """ def test_with_mentor_profile__bad_statuses(self): - cases = [{'status': x} for x in ['INVITED', 'INNACTIVE']] + cases = [{"status": x} for x in ["INVITED", "INNACTIVE"]] for mentor_profile in cases: model = self.bc.database.create(user=1, token=1, mentor_profile=mentor_profile) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:slug', kwargs={'mentor_slug': model.mentor_profile.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy("mentorship_shortner:slug", kwargs={"mentor_slug": model.mentor_profile.slug}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render(f'This mentor is not active', - model.mentor_profile, - model.token, - fix_logo=True, - academy=model.academy) + expected = render( + f"This mentor is not active", model.mentor_profile, model.token, fix_logo=True, academy=model.academy + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 GET without MentorProfile, good statuses without mentor urls """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock(side_effect=Exception())) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock(side_effect=Exception())) def test_with_mentor_profile__good_statuses__without_mentor_urls(self): - cases = [{'status': x} for x in ['ACTIVE', 'UNLISTED']] + cases = [{"status": x} for x in ["ACTIVE", "UNLISTED"]] for mentor_profile in cases: model = self.bc.database.create(user=1, token=1, mentor_profile=mentor_profile) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:slug', kwargs={'mentor_slug': model.mentor_profile.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy("mentorship_shortner:slug", kwargs={"mentor_slug": model.mentor_profile.slug}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) expected = render( - f'This mentor is not ready, please contact the mentor directly or anyone from the academy staff.', + f"This mentor is not ready, please contact the mentor directly or anyone from the academy staff.", model.mentor_profile, model.token, fix_logo=True, - academy=model.academy) + academy=model.academy, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") """ 🔽🔽🔽 GET without MentorProfile, good statuses with mentor urls """ - @patch('breathecode.mentorship.actions.mentor_is_ready', MagicMock()) + @patch("breathecode.mentorship.actions.mentor_is_ready", MagicMock()) def test_with_mentor_profile__good_statuses__with_mentor_urls(self): - cases = [{ - 'status': x, - 'online_meeting_url': self.bc.fake.url(), - 'booking_url': self.bc.fake.url(), - } for x in ['ACTIVE', 'UNLISTED']] + cases = [ + { + "status": x, + "online_meeting_url": self.bc.fake.url(), + "booking_url": self.bc.fake.url(), + } + for x in ["ACTIVE", "UNLISTED"] + ] for mentor_profile in cases: model = self.bc.database.create(user=1, token=1, mentor_profile=mentor_profile) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('mentorship_shortner:slug', kwargs={'mentor_slug': model.mentor_profile.slug - }) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = ( + reverse_lazy("mentorship_shortner:slug", kwargs={"mentor_slug": model.mentor_profile.slug}) + + f"?{querystring}" + ) response = self.client.get(url) content = self.bc.format.from_bytes(response.content) @@ -289,17 +308,20 @@ def test_with_mentor_profile__good_statuses__with_mentor_urls(self): # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('mentorship.MentorProfile'), [ - self.bc.format.to_dict(model.mentor_profile), - ]) + self.assertEqual( + self.bc.database.list_of("mentorship.MentorProfile"), + [ + self.bc.format.to_dict(model.mentor_profile), + ], + ) # teardown - self.bc.database.delete('mentorship.MentorProfile') + self.bc.database.delete("mentorship.MentorProfile") diff --git a/breathecode/mentorship/urls.py b/breathecode/mentorship/urls.py index cdad8fc7a..71b4d0aa3 100644 --- a/breathecode/mentorship/urls.py +++ b/breathecode/mentorship/urls.py @@ -1,31 +1,42 @@ from django.urls import path -from .views import (ServiceView, MentorView, SessionView, render_html_bill, BillView, ServiceSessionView, - MentorSessionView, UserMeSessionView, UserMeBillView, PublicMentorView, AgentView, - SupportChannelView, calendly_webhook, AcademyCalendlyOrgView) +from .views import ( + ServiceView, + MentorView, + SessionView, + render_html_bill, + BillView, + ServiceSessionView, + MentorSessionView, + UserMeSessionView, + UserMeBillView, + PublicMentorView, + AgentView, + SupportChannelView, + calendly_webhook, + AcademyCalendlyOrgView, +) -app_name = 'mentorship' +app_name = "mentorship" urlpatterns = [ - path('academy/service', ServiceView.as_view(), name='academy_service'), - path('academy/service/<int:service_id>', ServiceView.as_view(), name='academy_service_id'), - path('academy/mentor', MentorView.as_view(), name='academy_mentor'), - path('academy/agent', AgentView.as_view(), name='academy_agent'), - path('academy/supportchannel', SupportChannelView.as_view(), name='academy_supportchannel'), - path('academy/mentor/<int:mentor_id>', MentorView.as_view(), name='academy_mentor_id'), - path('academy/mentor/<int:mentor_id>/session', MentorSessionView.as_view(), name='academy_mentor_id_session'), - path('academy/session', SessionView.as_view(), name='academy_session'), - path('academy/session/<int:session_id>', SessionView.as_view(), name='academy_session_id'), - path('academy/service/<int:service_id>/session', ServiceSessionView.as_view(), name='academy_service_id_session'), - path('academy/bill', BillView.as_view(), name='academy_bill'), - path('academy/bill/<int:bill_id>', BillView.as_view(), name='academy_bill_id'), - path('academy/bill/<int:id>/html', render_html_bill, name='academy_bill_id_html'), - path('academy/mentor/<int:mentor_id>/bill', BillView.as_view(), name='academy_mentor_id_bill'), - path('user/me/session', UserMeSessionView.as_view(), name='user_session'), - path('user/me/bill', UserMeBillView.as_view(), name='user_bill'), - + path("academy/service", ServiceView.as_view(), name="academy_service"), + path("academy/service/<int:service_id>", ServiceView.as_view(), name="academy_service_id"), + path("academy/mentor", MentorView.as_view(), name="academy_mentor"), + path("academy/agent", AgentView.as_view(), name="academy_agent"), + path("academy/supportchannel", SupportChannelView.as_view(), name="academy_supportchannel"), + path("academy/mentor/<int:mentor_id>", MentorView.as_view(), name="academy_mentor_id"), + path("academy/mentor/<int:mentor_id>/session", MentorSessionView.as_view(), name="academy_mentor_id_session"), + path("academy/session", SessionView.as_view(), name="academy_session"), + path("academy/session/<int:session_id>", SessionView.as_view(), name="academy_session_id"), + path("academy/service/<int:service_id>/session", ServiceSessionView.as_view(), name="academy_service_id_session"), + path("academy/bill", BillView.as_view(), name="academy_bill"), + path("academy/bill/<int:bill_id>", BillView.as_view(), name="academy_bill_id"), + path("academy/bill/<int:id>/html", render_html_bill, name="academy_bill_id_html"), + path("academy/mentor/<int:mentor_id>/bill", BillView.as_view(), name="academy_mentor_id_bill"), + path("user/me/session", UserMeSessionView.as_view(), name="user_session"), + path("user/me/bill", UserMeBillView.as_view(), name="user_bill"), # Public Endpoints for marketing purposes - path('public/mentor', PublicMentorView.as_view(), name='public_mentor'), - + path("public/mentor", PublicMentorView.as_view(), name="public_mentor"), # hash belongs to the calendly organization - path('calendly/webhook/<str:org_hash>', calendly_webhook, name='calendly_webhook_id'), - path('academy/calendly/organization', AcademyCalendlyOrgView.as_view(), name='academy_calendly_organization'), + path("calendly/webhook/<str:org_hash>", calendly_webhook, name="calendly_webhook_id"), + path("academy/calendly/organization", AcademyCalendlyOrgView.as_view(), name="academy_calendly_organization"), ] diff --git a/breathecode/mentorship/urls_shortner.py b/breathecode/mentorship/urls_shortner.py index 09c31ef4d..977379054 100644 --- a/breathecode/mentorship/urls_shortner.py +++ b/breathecode/mentorship/urls_shortner.py @@ -1,12 +1,17 @@ from django.urls import path -from .views import (forward_booking_url, forward_booking_url_by_service, forward_meet_url, end_mentoring_session, - pick_mentorship_service) +from .views import ( + forward_booking_url, + forward_booking_url_by_service, + forward_meet_url, + end_mentoring_session, + pick_mentorship_service, +) -app_name = 'mentorship' +app_name = "mentorship" urlpatterns = [ - path('<slug:mentor_slug>', forward_booking_url, name='slug'), - path('<slug:mentor_slug>/service/<slug:service_slug>', forward_booking_url_by_service, name='slug_service_slug'), - path('meet/<slug:mentor_slug>', pick_mentorship_service, name='meet_slug'), - path('meet/<slug:mentor_slug>/service/<slug:service_slug>', forward_meet_url, name='meet_slug_service_slug'), - path('session/<int:session_id>', end_mentoring_session, name='session_id'), + path("<slug:mentor_slug>", forward_booking_url, name="slug"), + path("<slug:mentor_slug>/service/<slug:service_slug>", forward_booking_url_by_service, name="slug_service_slug"), + path("meet/<slug:mentor_slug>", pick_mentorship_service, name="meet_slug"), + path("meet/<slug:mentor_slug>/service/<slug:service_slug>", forward_meet_url, name="meet_slug_service_slug"), + path("session/<int:session_id>", end_mentoring_session, name="session_id"), ] diff --git a/breathecode/mentorship/views.py b/breathecode/mentorship/views.py index 26dc5a85c..ddb98a037 100644 --- a/breathecode/mentorship/views.py +++ b/breathecode/mentorship/views.py @@ -78,36 +78,36 @@ logger = logging.getLogger(__name__) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) @renderer_classes([PlainTextRenderer]) def calendly_webhook(request, org_hash): # Your application's webhook signing key - webhook_signing_key = os.getenv('CALENDLY_WEBHOOK_SIGNING_KEY') + webhook_signing_key = os.getenv("CALENDLY_WEBHOOK_SIGNING_KEY") # Extract the timestamp and signature from the header - calendly_signature = request.headers.get('Calendly-Webhook-Signature') - signature_hash = dict(item.split('=') for item in calendly_signature.split(',')) + calendly_signature = request.headers.get("Calendly-Webhook-Signature") + signature_hash = dict(item.split("=") for item in calendly_signature.split(",")) - t = signature_hash.get('t') # UNIX timestamp - signature = signature_hash.get('v1') + t = signature_hash.get("t") # UNIX timestamp + signature = signature_hash.get("v1") if t is None or signature is None: - raise ValidationException('Missing timestamp or signature', code=400, slug='missing-timestamp-or-signature') + raise ValidationException("Missing timestamp or signature", code=400, slug="missing-timestamp-or-signature") # Create the signed payload by concatenating the timestamp (t), the character '.', and the request body's JSON payload. signed_payload = f"{t}.{request.body.decode('utf-8')}" digest = hashlib.sha256 - hmac_obj = hmac.new(webhook_signing_key.encode('utf-8'), msg=signed_payload.encode('utf-8'), digestmod=digest) + hmac_obj = hmac.new(webhook_signing_key.encode("utf-8"), msg=signed_payload.encode("utf-8"), digestmod=digest) # Determine the expected signature by computing an HMAC with the SHA256 hash function. expected_signature = hmac_obj.hexdigest() if expected_signature != signature: # Signature is invalid! - raise ValidationException('Invalid webhook signature', code=400, slug='invalid-webhook-signature') + raise ValidationException("Invalid webhook signature", code=400, slug="invalid-webhook-signature") ### Prevent replay attacks ### @@ -122,25 +122,26 @@ def calendly_webhook(request, org_hash): if time.gmtime(int(t)) < time.gmtime(time.time() - tolerance): # Signature is invalid! # The signature's timestamp is outside of the tolerance zone defined above. - raise ValidationException('Invalid Signature. The signature\'s timestamp is outside of the tolerance zone.', - code=400, - slug='invalid-webhook-signature') + raise ValidationException( + "Invalid Signature. The signature's timestamp is outside of the tolerance zone.", + code=400, + slug="invalid-webhook-signature", + ) webhook = Calendly.add_webhook_to_log(request.data, org_hash) if webhook: async_calendly_webhook.delay(webhook.id) - if webhook.event == 'invitee.created': + if webhook.event == "invitee.created": async_mentorship_session_calendly_webhook.delay(webhook.id) else: - logger.debug('One request cannot be parsed, maybe you should update `Calendly' - '.add_webhook_to_log`') + logger.debug("One request cannot be parsed, maybe you should update `Calendly" ".add_webhook_to_log`") logger.debug(request.data) # async_eventbrite_webhook(request.data) - return Response('ok', content_type='text/plain') + return Response("ok", content_type="text/plain") # TODO: Use decorator with permissions @private_view(permission='view_mentorshipbill') @@ -148,21 +149,19 @@ def calendly_webhook(request, org_hash): def render_html_bill(request, token, id=None): item = MentorshipBill.objects.filter(id=id).first() if item is None: - return render_message(request, 'Bill not found') + return render_message(request, "Bill not found") serializer = BigBillSerializer(item, many=False) - status_map = {'DUE': 'UNDER_REVIEW', 'APPROVED': 'READY_TO_PAY', 'PAID': 'ALREADY PAID'} + status_map = {"DUE": "UNDER_REVIEW", "APPROVED": "READY_TO_PAY", "PAID": "ALREADY PAID"} data = { **serializer.data, - 'status': - status_map[serializer.data['status']], - 'title': - f'Mentor { serializer.data["mentor"]["user"]["first_name"] } ' + "status": status_map[serializer.data["status"]], + "title": f'Mentor { serializer.data["mentor"]["user"]["first_name"] } ' f'{ serializer.data["mentor"]["user"]["last_name"] } - Invoice { item.id }', } - template = get_template_content('mentorship_invoice', data, academy=item.academy) - return HttpResponse(template['html']) + template = get_template_content("mentorship_invoice", data, academy=item.academy) + return HttpResponse(template["html"]) @private_view() @@ -173,13 +172,13 @@ def forward_booking_url(request, mentor_slug, token): mentor = MentorProfile.objects.filter(slug=mentor_slug).first() if mentor is None: - return render_message(request, f'No mentor found with slug {mentor_slug}') + return render_message(request, f"No mentor found with slug {mentor_slug}") # add academy to session, will be available on html templates - request.session['academy'] = GetAcademySmallSerializer(mentor.academy).data + request.session["academy"] = GetAcademySmallSerializer(mentor.academy).data - if mentor.status not in ['ACTIVE', 'UNLISTED']: - return render_message(request, 'This mentor is not active', academy=mentor.academy) + if mentor.status not in ["ACTIVE", "UNLISTED"]: + return render_message(request, "This mentor is not active", academy=mentor.academy) try: actions.mentor_is_ready(mentor) @@ -188,36 +187,40 @@ def forward_booking_url(request, mentor_slug, token): logger.exception(e) return render_message( request, - 'This mentor is not ready, please contact the mentor directly or anyone from the academy staff.', - academy=mentor.academy) + "This mentor is not ready, please contact the mentor directly or anyone from the academy staff.", + academy=mentor.academy, + ) booking_url = mentor.booking_url - if '?' not in booking_url: - booking_url += '?' + if "?" not in booking_url: + booking_url += "?" obj = {} if mentor.academy: - obj['COMPANY_INFO_EMAIL'] = mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = mentor.academy.legal_name or mentor.academy.name - obj['COMPANY_LOGO'] = mentor.academy.logo_url - obj['COMPANY_NAME'] = mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = mentor.academy.legal_name or mentor.academy.name + obj["COMPANY_LOGO"] = mentor.academy.logo_url + obj["COMPANY_NAME"] = mentor.academy.name - if 'heading' not in obj: - obj['heading'] = mentor.academy.name + if "heading" not in obj: + obj["heading"] = mentor.academy.name return render( - request, 'book_session.html', { - 'SUBJECT': 'Mentoring Session', - 'mentor': mentor, - 'mentee': token.user, - 'booking_url': booking_url, - 'LOGO_IN_CONTENT': True, + request, + "book_session.html", + { + "SUBJECT": "Mentoring Session", + "mentor": mentor, + "mentee": token.user, + "booking_url": booking_url, + "LOGO_IN_CONTENT": True, **obj, - }) + }, + ) @private_view() -@has_permission('join_mentorship') +@has_permission("join_mentorship") def forward_booking_url_by_service(request, mentor_slug, token): # now = timezone.now() if isinstance(token, HttpResponseRedirect): @@ -225,13 +228,13 @@ def forward_booking_url_by_service(request, mentor_slug, token): mentor = MentorProfile.objects.filter(slug=mentor_slug).first() if mentor is None: - return render_message(request, f'No mentor found with slug {mentor_slug}') + return render_message(request, f"No mentor found with slug {mentor_slug}") # add academy to session, will be available on html templates - request.session['academy'] = GetAcademySmallSerializer(mentor.academy).data + request.session["academy"] = GetAcademySmallSerializer(mentor.academy).data - if mentor.status not in ['ACTIVE', 'UNLISTED']: - return render_message(request, 'This mentor is not active', academy=mentor.academy) + if mentor.status not in ["ACTIVE", "UNLISTED"]: + return render_message(request, "This mentor is not active", academy=mentor.academy) try: actions.mentor_is_ready(mentor) @@ -240,38 +243,43 @@ def forward_booking_url_by_service(request, mentor_slug, token): logger.exception(e) return render_message( request, - 'This mentor is not ready, please contact the mentor directly or anyone from the academy staff.', - academy=mentor.academy) + "This mentor is not ready, please contact the mentor directly or anyone from the academy staff.", + academy=mentor.academy, + ) booking_url = mentor.booking_url - if '?' not in booking_url: - booking_url += '?' + if "?" not in booking_url: + booking_url += "?" obj = {} if mentor.academy: - obj['COMPANY_INFO_EMAIL'] = mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = mentor.academy.legal_name or mentor.academy.name - obj['COMPANY_LOGO'] = mentor.academy.logo_url - obj['COMPANY_NAME'] = mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = mentor.academy.legal_name or mentor.academy.name + obj["COMPANY_LOGO"] = mentor.academy.logo_url + obj["COMPANY_NAME"] = mentor.academy.name - if 'heading' not in obj: - obj['heading'] = mentor.academy.name + if "heading" not in obj: + obj["heading"] = mentor.academy.name - return render(request, 'book_session.html', { - 'SUBJECT': 'Mentoring Session', - 'mentor': mentor, - 'mentee': token.user, - 'booking_url': booking_url, - **obj, - }) + return render( + request, + "book_session.html", + { + "SUBJECT": "Mentoring Session", + "mentor": mentor, + "mentee": token.user, + "booking_url": booking_url, + **obj, + }, + ) @private_view() def pick_mentorship_service(request, token, mentor_slug): - base_url = request.get_full_path().split('?')[0] + base_url = request.get_full_path().split("?")[0] mentor = MentorProfile.objects.filter(slug=mentor_slug).first() if mentor is None: - return render_message(request, f'No mentor found with slug {mentor_slug}') + return render_message(request, f"No mentor found with slug {mentor_slug}") try: actions.mentor_is_ready(mentor) @@ -281,30 +289,35 @@ def pick_mentorship_service(request, token, mentor_slug): return render_message( request, - 'This mentor is not ready, please contact the mentor directly or anyone from the academy staff.', - academy=mentor.academy) + "This mentor is not ready, please contact the mentor directly or anyone from the academy staff.", + academy=mentor.academy, + ) services = mentor.services.all() if not services: - return render_message(request, 'This mentor is not available on any service', academy=mentor.academy) + return render_message(request, "This mentor is not available on any service", academy=mentor.academy) obj = {} if mentor.academy: - obj['COMPANY_INFO_EMAIL'] = mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = mentor.academy.legal_name or mentor.academy.name - obj['COMPANY_LOGO'] = mentor.academy.logo_url - obj['COMPANY_NAME'] = mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = mentor.academy.legal_name or mentor.academy.name + obj["COMPANY_LOGO"] = mentor.academy.logo_url + obj["COMPANY_NAME"] = mentor.academy.name - if 'heading' not in obj: - obj['heading'] = mentor.academy.name + if "heading" not in obj: + obj["heading"] = mentor.academy.name - return render(request, 'pick_service.html', { - 'token': token.key, - 'services': services, - 'mentor': mentor, - 'baseUrl': base_url, - **obj, - }) + return render( + request, + "pick_service.html", + { + "token": token.key, + "services": services, + "mentor": mentor, + "baseUrl": base_url, + **obj, + }, + ) class ForwardMeetUrl: @@ -318,11 +331,11 @@ def __init__(self, request, mentor_profile, mentorship_service, token): self.mentor = mentor_profile self.service = mentorship_service - if '?' not in self.baseUrl: - self.baseUrl += '?' + if "?" not in self.baseUrl: + self.baseUrl += "?" def querystring(self): - params = ['redirect', 'extend', 'session', 'mentee'] + params = ["redirect", "extend", "session", "mentee"] result = {} for param in params: @@ -333,117 +346,129 @@ def querystring(self): def render_pick_session(self, mentor, sessions): obj = {} if self.mentor.academy: - obj['COMPANY_INFO_EMAIL'] = self.mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = self.mentor.academy.legal_name or self.mentor.academy.name - obj['COMPANY_LOGO'] = self.mentor.academy.logo_url - obj['COMPANY_NAME'] = self.mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = self.mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = self.mentor.academy.legal_name or self.mentor.academy.name + obj["COMPANY_LOGO"] = self.mentor.academy.logo_url + obj["COMPANY_NAME"] = self.mentor.academy.name - if 'heading' not in obj: - obj['heading'] = self.mentor.academy.name + if "heading" not in obj: + obj["heading"] = self.mentor.academy.name return render( - self.request, 'pick_session.html', { - 'token': self.token.key, - 'mentor': GETMentorBigSerializer(mentor, many=False).data, - 'SUBJECT': 'Mentoring Session', - 'sessions': GETSessionReportSerializer(sessions, many=True).data, - 'baseUrl': self.baseUrl, + self.request, + "pick_session.html", + { + "token": self.token.key, + "mentor": GETMentorBigSerializer(mentor, many=False).data, + "SUBJECT": "Mentoring Session", + "sessions": GETSessionReportSerializer(sessions, many=True).data, + "baseUrl": self.baseUrl, **obj, - }) + }, + ) def render_pick_mentee(self, mentor, session): obj = {} if mentor.academy: - obj['COMPANY_INFO_EMAIL'] = mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = mentor.academy.legal_name or mentor.academy.name - obj['COMPANY_LOGO'] = mentor.academy.logo_url - obj['COMPANY_NAME'] = mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = mentor.academy.legal_name or mentor.academy.name + obj["COMPANY_LOGO"] = mentor.academy.logo_url + obj["COMPANY_NAME"] = mentor.academy.name - if 'heading' not in obj: - obj['heading'] = mentor.academy.name + if "heading" not in obj: + obj["heading"] = mentor.academy.name return render( - self.request, 'pick_mentee.html', { - 'token': self.token.key, - 'mentor': GETMentorBigSerializer(mentor, many=False).data, - 'SUBJECT': 'Mentoring Session', - 'sessions': GETSessionReportSerializer(session, many=False).data, - 'baseUrl': self.baseUrl, - }) + self.request, + "pick_mentee.html", + { + "token": self.token.key, + "mentor": GETMentorBigSerializer(mentor, many=False).data, + "SUBJECT": "Mentoring Session", + "sessions": GETSessionReportSerializer(session, many=False).data, + "baseUrl": self.baseUrl, + }, + ) def render_end_session(self, message, btn_url, status=200): - return render_message(self.request, - message, - btn_label='End Session', - btn_url=btn_url, - btn_target='_self', - status=status, - academy=self.mentor.academy) + return render_message( + self.request, + message, + btn_label="End Session", + btn_url=btn_url, + btn_target="_self", + status=status, + academy=self.mentor.academy, + ) def get_user_name(self, user, default): - name = '' + name = "" if user.first_name: name = user.first_name if user.last_name: - name += ' ' + user.last_name + name += " " + user.last_name - name = re.sub(r'(\S) +(\S)', r'\1 \2', name).strip() + name = re.sub(r"(\S) +(\S)", r"\1 \2", name).strip() if not name: name = default return name def render_start_session(self, session): - student_name = self.get_user_name(session.mentee, 'student') - mentor_name = self.get_user_name(session.mentor.user, 'a mentor') - link = set_query_parameter('?' + self.request.GET.urlencode(), 'redirect', 'true') - message = (f'Hello {student_name}, you are about to start a {session.service.name} ' - f'with {mentor_name}.') + student_name = self.get_user_name(session.mentee, "student") + mentor_name = self.get_user_name(session.mentor.user, "a mentor") + link = set_query_parameter("?" + self.request.GET.urlencode(), "redirect", "true") + message = f"Hello {student_name}, you are about to start a {session.service.name} " f"with {mentor_name}." obj = {} if session.mentor.academy: - obj['COMPANY_INFO_EMAIL'] = session.mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = session.mentor.academy.legal_name or session.mentor.academy.name - obj['COMPANY_LOGO'] = session.mentor.academy.logo_url - obj['COMPANY_NAME'] = session.mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = session.mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = session.mentor.academy.legal_name or session.mentor.academy.name + obj["COMPANY_LOGO"] = session.mentor.academy.logo_url + obj["COMPANY_NAME"] = session.mentor.academy.name - if 'heading' not in obj: - obj['heading'] = session.mentor.academy.name + if "heading" not in obj: + obj["heading"] = session.mentor.academy.name return render( - self.request, 'message.html', { - 'SUBJECT': 'Mentoring Session', - 'BUTTON': 'Start Session', - 'BUTTON_TARGET': '_self', - 'LINK': link, - 'MESSAGE': message, + self.request, + "message.html", + { + "SUBJECT": "Mentoring Session", + "BUTTON": "Start Session", + "BUTTON_TARGET": "_self", + "LINK": link, + "MESSAGE": message, **obj, - }) + }, + ) def get_pending_sessions_or_create(self, mentor, service, mentee): # if specific sessions is being loaded - if self.query_params['session'] is not None: - sessions = MentorshipSession.objects.filter(id=self.query_params['session']) + if self.query_params["session"] is not None: + sessions = MentorshipSession.objects.filter(id=self.query_params["session"]) if sessions.count() == 0: - return render_message(self.request, - f'Session with id {self.query_params["session"]} not found', - status=404, - academy=mentor.academy) + return render_message( + self.request, + f'Session with id {self.query_params["session"]} not found', + status=404, + academy=mentor.academy, + ) # set service if is null sessions.filter(service__isnull=True).update(service=service) else: sessions = actions.get_pending_sessions_or_create(self.token, mentor, service, mentee) - logger.debug(f'Found {sessions.count()} sessions to close or create') + logger.debug(f"Found {sessions.count()} sessions to close or create") return sessions def get_session(self, sessions, mentee): - if self.query_params['session'] is not None: - session = sessions.filter(id=self.query_params['session']).first() + if self.query_params["session"] is not None: + session = sessions.filter(id=self.query_params["session"]).first() else: session = sessions.filter(Q(mentee=mentee) | Q(mentee__isnull=True)).first() # if the session.mentee is None it means the mentor had some pending unstarted session @@ -451,10 +476,12 @@ def get_session(self, sessions, mentee): if session and session.mentee is None and mentee is not None: session.mentee = mentee if self.token.user.id == mentee.id: - tasks_activity.add_activity.delay(mentee.id, - 'mentorship_session_checkin', - related_type='mentorship.MentorshipSession', - related_id=session.id) + tasks_activity.add_activity.delay( + mentee.id, + "mentorship_session_checkin", + related_type="mentorship.MentorshipSession", + related_id=session.id, + ) return session @@ -466,13 +493,12 @@ def __call__(self): service = self.service # add academy to session, will be available on html templates - self.request.session['academy'] = GetAcademySmallSerializer(mentor.academy).data + self.request.session["academy"] = GetAcademySmallSerializer(mentor.academy).data - if mentor.status not in ['ACTIVE', 'UNLISTED']: - return render_message(self.request, - 'This mentor is not active at the moment', - status=400, - academy=mentor.academy) + if mentor.status not in ["ACTIVE", "UNLISTED"]: + return render_message( + self.request, "This mentor is not active at the moment", status=400, academy=mentor.academy + ) try: actions.mentor_is_ready(mentor) @@ -480,9 +506,10 @@ def __call__(self): except Exception: return render_message( self.request, - 'This mentor is not ready, please contact the mentor directly or anyone from the academy staff.', + "This mentor is not ready, please contact the mentor directly or anyone from the academy staff.", status=400, - academy=mentor.academy) + academy=mentor.academy, + ) is_token_of_mentee = mentor.user.id != self.token.user.id @@ -493,52 +520,60 @@ def __call__(self): except Exception as e: return render_message(self.request, str(e), status=400, academy=mentor.academy) - if not is_token_of_mentee and sessions.count() > 0 and str(sessions.first().id) != self.query_params['session']: + if not is_token_of_mentee and sessions.count() > 0 and str(sessions.first().id) != self.query_params["session"]: return self.render_pick_session(mentor, sessions) # this also set the session.mentee session = self.get_session(sessions, mentee) if session is None: - name = self.get_user_name(mentor.user, 'the mentor') + name = self.get_user_name(mentor.user, "the mentor") - return render_message(self.request, - f'Impossible to create or retrieve mentoring session with {name}.', - status=400, - academy=mentor.academy) + return render_message( + self.request, + f"Impossible to create or retrieve mentoring session with {name}.", + status=400, + academy=mentor.academy, + ) - is_mentee_params_set = bool(self.query_params['mentee']) - is_mentee_params_undefined = self.query_params['mentee'] == 'undefined' + is_mentee_params_set = bool(self.query_params["mentee"]) + is_mentee_params_undefined = self.query_params["mentee"] == "undefined" # passing mentee query param if session.mentee is None and is_mentee_params_set and not is_mentee_params_undefined: - session.mentee = User.objects.filter(id=self.query_params['mentee']).first() + session.mentee = User.objects.filter(id=self.query_params["mentee"]).first() if session.mentee is None: return render_message( - self.request, f'Mentee with user id {self.query_params["mentee"]} was not found, ' + self.request, + f'Mentee with user id {self.query_params["mentee"]} was not found, ' f'<a href="{self.baseUrl}&mentee=undefined">click here to start the session anyway.</a>', - academy=mentor.academy) + academy=mentor.academy, + ) # passing a invalid mentee query param if session.mentee is None and not is_mentee_params_undefined: return self.render_pick_mentee(mentor, session) # session ended - if session.status not in ['PENDING', 'STARTED']: - return render_message(self.request, f'This mentoring session has ended ({session.status}), would you like ' - f'<a href="/mentor/meet/{mentor.slug}">to start a new one?</a>.', - status=400, - academy=session.mentor.academy) + if session.status not in ["PENDING", "STARTED"]: + return render_message( + self.request, + f"This mentoring session has ended ({session.status}), would you like " + f'<a href="/mentor/meet/{mentor.slug}">to start a new one?</a>.', + status=400, + academy=session.mentor.academy, + ) # Who is joining? Set meeting join in dates if not is_token_of_mentee: # only reset the joined_at it has ben more than 5min and the session has not started yey - if session.mentor_joined_at is None or (session.started_at is None and - ((self.now - session.mentor_joined_at).seconds > 300)): + if session.mentor_joined_at is None or ( + session.started_at is None and ((self.now - session.mentor_joined_at).seconds > 300) + ): session.mentor_joined_at = self.now - elif self.query_params['redirect'] is not None and session.mentee.id == self.token.user.id: + elif self.query_params["redirect"] is not None and session.mentee.id == self.token.user.id: if session.started_at is None: session.started_at = self.now - session.status = 'STARTED' + session.status = "STARTED" # if it expired already you could extend it service = session.service @@ -547,36 +582,42 @@ def __call__(self): # can extend this session? if session_ends_in_the_pass and (self.now - session.ends_at).total_seconds() > (service.duration.seconds / 2): return HttpResponseRedirect( - redirect_to=f'/mentor/session/{str(session.id)}?token={self.token.key}&message=You have a session that ' - f'expired {timeago.format(session.ends_at, self.now)}. Only sessions with less than ' - f'{round(((session.service.duration.total_seconds() / 3600) * 60)/2)}min from ' - 'expiration can be extended (if allowed by the academy)') + redirect_to=f"/mentor/session/{str(session.id)}?token={self.token.key}&message=You have a session that " + f"expired {timeago.format(session.ends_at, self.now)}. Only sessions with less than " + f"{round(((session.service.duration.total_seconds() / 3600) * 60)/2)}min from " + "expiration can be extended (if allowed by the academy)" + ) - if session_ends_in_the_pass and ((is_token_of_mentee and service.allow_mentee_to_extend) or - (not is_token_of_mentee and service.allow_mentors_to_extend)): + if session_ends_in_the_pass and ( + (is_token_of_mentee and service.allow_mentee_to_extend) + or (not is_token_of_mentee and service.allow_mentors_to_extend) + ): - if self.query_params['extend'] == 'true': + if self.query_params["extend"] == "true": try: session = actions.extend_session(session) except ExtendSessionException as e: - return self.render_end_session(str(e), - btn_url=f'/mentor/session/{str(session.id)}?token={self.token.key}', - status=400) + return self.render_end_session( + str(e), btn_url=f"/mentor/session/{str(session.id)}?token={self.token.key}", status=400 + ) - extend_url = set_query_parameter(self.request.get_full_path(), 'extend', 'true') + extend_url = set_query_parameter(self.request.get_full_path(), "extend", "true") return self.render_end_session( - f'The mentoring session expired {timeago.format(session.ends_at, self.now)}: You can ' + f"The mentoring session expired {timeago.format(session.ends_at, self.now)}: You can " f'<a href="{extend_url}">extend it for another 30 minutes</a> or end the session right ' - 'now.', - btn_url=f'/mentor/session/{str(session.id)}?token={self.token.key}') + "now.", + btn_url=f"/mentor/session/{str(session.id)}?token={self.token.key}", + ) elif session_ends_in_the_pass: - return render_message(self.request, - f'The mentoring session expired {timeago.format(session.ends_at, self.now)} and it ' - 'cannot be extended.', - status=400, - academy=mentor.academy) + return render_message( + self.request, + f"The mentoring session expired {timeago.format(session.ends_at, self.now)} and it " + "cannot be extended.", + status=400, + academy=mentor.academy, + ) # save progress so far, we are about to render the session below session.save() @@ -584,78 +625,82 @@ def __call__(self): if session.mentee is None: return render_session(self.request, session, token=self.token) - if self.query_params['redirect'] is not None or self.token.user.id == session.mentor.user.id: + if self.query_params["redirect"] is not None or self.token.user.id == session.mentor.user.id: return render_session(self.request, session, token=self.token) return self.render_start_session(session) @private_view() -@consume('join_mentorship', consumer=mentorship_service_by_url_param, format='html') +@consume("join_mentorship", consumer=mentorship_service_by_url_param, format="html") def forward_meet_url(request, mentor_profile, mentorship_service, token): handler = ForwardMeetUrl(request, mentor_profile, mentorship_service, token) return handler() -#FIXME: create a endpoint to consume the service, split the function in two +# FIXME: create a endpoint to consume the service, split the function in two @private_view() def end_mentoring_session(request, session_id, token): now = timezone.now() - if request.method == 'POST': + if request.method == "POST": _dict = request.POST.copy() form = CloseMentoringSessionForm(_dict) - token_key = _dict.get('token') + token_key = _dict.get("token") token = Token.objects.filter(key=token_key).first() if token is None or (token.expires_at is not None and token.expires_at < now): - messages.error(request, 'Invalid or expired deliver token.') - return render(request, 'form.html', {'form': form}) + messages.error(request, "Invalid or expired deliver token.") + return render(request, "form.html", {"form": form}) - session_id_from_body = _dict.get('session_id') + session_id_from_body = _dict.get("session_id") session = MentorshipSession.objects.filter(id=session_id_from_body).first() if session is None: - messages.error(request, 'Invalid session id.') - return render(request, 'form.html', {'form': form}) + messages.error(request, "Invalid session id.") + return render(request, "form.html", {"form": form}) if form.is_valid(): if close_mentoring_session(session=session, data=_dict): - pending_sessions = MentorshipSession.objects.filter(mentor__id=session.mentor.id, - status__in=['STARTED', 'PENDING']) + pending_sessions = MentorshipSession.objects.filter( + mentor__id=session.mentor.id, status__in=["STARTED", "PENDING"] + ) obj = {} if session.mentor.academy: - obj['COMPANY_INFO_EMAIL'] = session.mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = session.mentor.academy.legal_name or session.mentor.academy.name - obj['COMPANY_LOGO'] = session.mentor.academy.logo_url - obj['COMPANY_NAME'] = session.mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = session.mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = session.mentor.academy.legal_name or session.mentor.academy.name + obj["COMPANY_LOGO"] = session.mentor.academy.logo_url + obj["COMPANY_NAME"] = session.mentor.academy.name - if 'heading' not in obj: - obj['heading'] = session.mentor.academy.name + if "heading" not in obj: + obj["heading"] = session.mentor.academy.name return render( - request, 'close_session.html', { - 'token': token.key, - 'message': - f'The mentoring session was closed successfully, you can close this window or <a href="/mentor/meet/{session.mentor.slug}?token={token.key}">go back to your meeting room.</a>', - 'mentor': GETMentorBigSerializer(session.mentor, many=False).data, - 'SUBJECT': 'Close Mentoring Session', - 'sessions': GETSessionReportSerializer(pending_sessions, many=True).data, - 'baseUrl': request.get_full_path(), + request, + "close_session.html", + { + "token": token.key, + "message": f'The mentoring session was closed successfully, you can close this window or <a href="/mentor/meet/{session.mentor.slug}?token={token.key}">go back to your meeting room.</a>', + "mentor": GETMentorBigSerializer(session.mentor, many=False).data, + "SUBJECT": "Close Mentoring Session", + "sessions": GETSessionReportSerializer(pending_sessions, many=True).data, + "baseUrl": request.get_full_path(), **obj, - }) + }, + ) else: - return render_message(request, - 'There was a problem ending the mentoring session', - academy=session.mentor.academy) + return render_message( + request, "There was a problem ending the mentoring session", academy=session.mentor.academy + ) - elif request.method == 'GET': + elif request.method == "GET": session = MentorshipSession.objects.filter(id=session_id).first() if session is None: - return render_message(request, f'Session not found with id {str(session_id)}') + return render_message(request, f"Session not found with id {str(session_id)}") # add academy to session, will be available on html templates - request.session['academy'] = (GetAcademySmallSerializer(session.service.academy).data - if session.service else None) + request.session["academy"] = ( + GetAcademySmallSerializer(session.service.academy).data if session.service else None + ) # this GET request occurs when the mentor leaves the session session.mentor_left_at = now @@ -664,86 +709,92 @@ def end_mentoring_session(request, session_id, token): mentee = session.mentee if mentee is None: - session.status = 'FAILED' - session.summary = ('This session expired without assigned mentee, it probably means the mentee ' - 'never came. It will be marked as failed') + session.status = "FAILED" + session.summary = ( + "This session expired without assigned mentee, it probably means the mentee " + "never came. It will be marked as failed" + ) session.save() - pending_sessions = MentorshipSession.objects.filter(mentor__id=session.mentor.id, - status__in=['STARTED', 'PENDING']) + pending_sessions = MentorshipSession.objects.filter( + mentor__id=session.mentor.id, status__in=["STARTED", "PENDING"] + ) obj = {} if session.mentor.academy: - obj['COMPANY_INFO_EMAIL'] = session.mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = session.mentor.academy.legal_name or session.mentor.academy.name - obj['COMPANY_LOGO'] = session.mentor.academy.logo_url - obj['COMPANY_NAME'] = session.mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = session.mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = session.mentor.academy.legal_name or session.mentor.academy.name + obj["COMPANY_LOGO"] = session.mentor.academy.logo_url + obj["COMPANY_NAME"] = session.mentor.academy.name - if 'heading' not in obj: - obj['heading'] = session.mentor.academy.name + if "heading" not in obj: + obj["heading"] = session.mentor.academy.name return render( - request, 'close_session.html', { - 'token': - token.key, - 'message': - 'Previous session expired without assigned mentee, it probably means the mentee never came. It was ' - 'marked as failed. Try the mentor meeting URL again.', - 'mentor': - GETMentorBigSerializer(session.mentor, many=False).data, - 'SUBJECT': - 'Close Mentoring Session', - 'sessions': - GETSessionReportSerializer(pending_sessions, many=True).data, - 'baseUrl': - request.get_full_path(), + request, + "close_session.html", + { + "token": token.key, + "message": "Previous session expired without assigned mentee, it probably means the mentee never came. It was " + "marked as failed. Try the mentor meeting URL again.", + "mentor": GETMentorBigSerializer(session.mentor, many=False).data, + "SUBJECT": "Close Mentoring Session", + "sessions": GETSessionReportSerializer(pending_sessions, many=True).data, + "baseUrl": request.get_full_path(), **obj, - }) + }, + ) _dict = request.GET.copy() - _dict['token'] = request.GET.get('token', None) - _dict['status'] = 'COMPLETED' - _dict['summary'] = session.summary - _dict['student_name'] = f'{mentee.first_name} {mentee.last_name}, {mentee.email}' - _dict['session_id'] = session.id + _dict["token"] = request.GET.get("token", None) + _dict["status"] = "COMPLETED" + _dict["summary"] = session.summary + _dict["student_name"] = f"{mentee.first_name} {mentee.last_name}, {mentee.email}" + _dict["session_id"] = session.id form = CloseMentoringSessionForm(_dict) - msg = request.GET.get('message', None) - if msg is not None and msg != '': + msg = request.GET.get("message", None) + if msg is not None and msg != "": messages.info(request, msg) obj = {} if session.mentor.academy: - obj['COMPANY_INFO_EMAIL'] = session.mentor.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = session.mentor.academy.legal_name or session.mentor.academy.name - obj['COMPANY_LOGO'] = session.mentor.academy.logo_url - obj['COMPANY_NAME'] = session.mentor.academy.name + obj["COMPANY_INFO_EMAIL"] = session.mentor.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = session.mentor.academy.legal_name or session.mentor.academy.name + obj["COMPANY_LOGO"] = session.mentor.academy.logo_url + obj["COMPANY_NAME"] = session.mentor.academy.name - if 'heading' not in obj: - obj['heading'] = session.mentor.academy.name + if "heading" not in obj: + obj["heading"] = session.mentor.academy.name return render( - request, 'form.html', { - 'form': form, - 'disabled': session.status not in ['PENDING', 'STARTED'], - 'btn_lable': - 'End Mentoring Session' if session.status in ['PENDING', 'STARTED'] else 'Mentoring session already ended', - 'intro': 'Please fill the following information to formally end the session', - 'title': 'End Mentoring Session', + request, + "form.html", + { + "form": form, + "disabled": session.status not in ["PENDING", "STARTED"], + "btn_lable": ( + "End Mentoring Session" + if session.status in ["PENDING", "STARTED"] + else "Mentoring session already ended" + ), + "intro": "Please fill the following information to formally end the session", + "title": "End Mentoring Session", **obj, - }) + }, + ) class ServiceView(APIView, HeaderLimitOffsetPagination, GenerateLookupsMixin): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_mentorship_service') + @capable_of("read_mentorship_service") def get(self, request, service_id=None, academy_id=None): handler = self.extensions(request) if service_id is not None: service = MentorshipService.objects.filter(id=service_id, academy__id=academy_id).first() if service is None: - raise ValidationException('This service does not exist on this academy', code=404, slug='not-found') + raise ValidationException("This service does not exist on this academy", code=404, slug="not-found") serializer = GETServiceBigSerializer(service) return Response(serializer.data, status=status.HTTP_200_OK) @@ -751,13 +802,13 @@ def get(self, request, service_id=None, academy_id=None): items = MentorshipService.objects.filter(academy__id=academy_id) lookup = {} - if 'status' in self.request.GET: - param = self.request.GET.get('status') - lookup['status__in'] = param.split(',') + if "status" in self.request.GET: + param = self.request.GET.get("status") + lookup["status__in"] = param.split(",") - name = request.GET.get('name', None) + name = request.GET.get("name", None) if name is not None: - lookup['name__icontains'] = name + lookup["name__icontains"] = name items = items.filter(name__icontains=name) items = items.filter(**lookup) @@ -767,48 +818,46 @@ def get(self, request, service_id=None, academy_id=None): return handler.response(serializer.data) - @capable_of('crud_mentorship_service') + @capable_of("crud_mentorship_service") def post(self, request, academy_id=None): - serializer = ServicePOSTSerializer(data=request.data, context={'request': request, 'academy_id': academy_id}) + serializer = ServicePOSTSerializer(data=request.data, context={"request": request, "academy_id": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_mentorship_service') + @capable_of("crud_mentorship_service") def put(self, request, service_id=None, academy_id=None): if service_id is None: - raise ValidationException('Missing service_id') + raise ValidationException("Missing service_id") service = MentorshipService.objects.filter(id=service_id, academy__id=academy_id).first() if service is None: - raise ValidationException('This service does not exist', code=404, slug='not-found') - - serializer = ServicePUTSerializer(service, - data=request.data, - context={ - 'request': request, - 'academy_id': academy_id - }) + raise ValidationException("This service does not exist", code=404, slug="not-found") + + serializer = ServicePUTSerializer( + service, data=request.data, context={"request": request, "academy_id": academy_id} + ) if serializer.is_valid(): serializer.save() serializer = GETServiceBigSerializer(service, many=False) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_event') + @capable_of("crud_event") def delete(self, request, academy_id=None, service_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if not lookups and not service_id: - raise ValidationException('provide arguments in the url', code=400, slug='without-lookups-and-service-id') + raise ValidationException("provide arguments in the url", code=400, slug="without-lookups-and-service-id") if lookups and service_id: - raise ValidationException('service_id in url ' - 'in bulk mode request, use querystring style instead', - code=400, - slug='lookups-and-session-id-together') + raise ValidationException( + "service_id in url " "in bulk mode request, use querystring style instead", + code=400, + slug="lookups-and-session-id-together", + ) if lookups: alls = MentorshipService.objects.filter(**lookups) @@ -816,7 +865,7 @@ def delete(self, request, academy_id=None, service_id=None): from_other_academy = alls.exclude(academy__id=academy_id) with_mentor = MentorshipService.objects.none() with_sessions = MentorshipService.objects.none() - for id in lookups['id__in']: + for id in lookups["id__in"]: mentor = MentorProfile.objects.filter(academy__id=academy_id, services=id).first() if mentor is not None: @@ -827,8 +876,8 @@ def delete(self, request, academy_id=None, service_id=None): with_sessions |= MentorshipService.objects.filter(id=session.service.id) valids = alls.exclude( - Q(id__in=with_mentor.all()) | Q(id__in=with_sessions.all()) - | Q(id__in=from_other_academy.all())) + Q(id__in=with_mentor.all()) | Q(id__in=with_sessions.all()) | Q(id__in=from_other_academy.all()) + ) responses = [] if valids: @@ -836,27 +885,36 @@ def delete(self, request, academy_id=None, service_id=None): if from_other_academy: responses.append( - MultiStatusResponse('Service doest not exist or does not belong to this academy', - code=400, - slug='not-found', - queryset=from_other_academy)) + MultiStatusResponse( + "Service doest not exist or does not belong to this academy", + code=400, + slug="not-found", + queryset=from_other_academy, + ) + ) if with_mentor: responses.append( - MultiStatusResponse('Only services that are not assigned to a mentor can be deleted.', - code=400, - slug='service-with-mentor', - queryset=with_mentor)) + MultiStatusResponse( + "Only services that are not assigned to a mentor can be deleted.", + code=400, + slug="service-with-mentor", + queryset=with_mentor, + ) + ) if with_sessions: responses.append( - MultiStatusResponse('Only services without a session can be deleted.', - code=400, - slug='service-with-session', - queryset=with_sessions)) + MultiStatusResponse( + "Only services without a session can be deleted.", + code=400, + slug="service-with-session", + queryset=with_sessions, + ) + ) if from_other_academy or with_mentor or with_sessions: - response = response_207(responses, 'slug') + response = response_207(responses, "slug") valids.delete() return response @@ -865,26 +923,27 @@ def delete(self, request, academy_id=None, service_id=None): service = MentorshipService.objects.filter(academy__id=academy_id, id=service_id).first() if service is None: - raise ValidationException('Service doest not exist or does not belong to this academy', slug='not-found') + raise ValidationException("Service doest not exist or does not belong to this academy", slug="not-found") mentor = MentorProfile.objects.filter(academy__id=academy_id, services=service.id).first() if mentor is not None: - raise ValidationException('Only services that are not assigned to a mentor can be deleted.', - slug='service-with-mentor') + raise ValidationException( + "Only services that are not assigned to a mentor can be deleted.", slug="service-with-mentor" + ) session = MentorshipSession.objects.filter(service=service.id).first() if session is not None: - raise ValidationException('Only services without a session can be deleted.', slug='service-with-session') + raise ValidationException("Only services without a session can be deleted.", slug="service-with-session") service.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) class MentorView(APIView, HeaderLimitOffsetPagination): - extensions = APIViewExtensions(cache=MentorProfileCache, sort='-created_at', paginate=True) + extensions = APIViewExtensions(cache=MentorProfileCache, sort="-created_at", paginate=True) - @capable_of('read_mentorship_mentor') + @capable_of("read_mentorship_mentor") def get(self, request, mentor_id=None, academy_id=None): handler = self.extensions(request) @@ -895,7 +954,7 @@ def get(self, request, mentor_id=None, academy_id=None): if mentor_id is not None: mentor = MentorProfile.objects.filter(id=mentor_id, services__academy__id=academy_id).first() if mentor is None: - raise ValidationException('This mentor does not exist on this academy', code=404) + raise ValidationException("This mentor does not exist on this academy", code=404) serializer = GETMentorBigSerializer(mentor) return Response(serializer.data, status=status.HTTP_200_OK) @@ -903,21 +962,21 @@ def get(self, request, mentor_id=None, academy_id=None): items = MentorProfile.objects.filter(academy__id=academy_id) lookup = {} - if 'services' in self.request.GET: - param = self.request.GET.get('services', '').split(',') - lookup['services__slug__in'] = param + if "services" in self.request.GET: + param = self.request.GET.get("services", "").split(",") + lookup["services__slug__in"] = param - if 'status' in self.request.GET: - param = self.request.GET.get('status', 'ACTIVE') - lookup['status__in'] = [s.strip().upper() for s in param.split(',')] + if "status" in self.request.GET: + param = self.request.GET.get("status", "ACTIVE") + lookup["status__in"] = [s.strip().upper() for s in param.split(",")] - if 'syllabus' in self.request.GET: - param = self.request.GET.get('syllabus') - lookup['syllabus__slug__in'] = [s.strip().lower() for s in param.split(',')] + if "syllabus" in self.request.GET: + param = self.request.GET.get("syllabus") + lookup["syllabus__slug__in"] = [s.strip().lower() for s in param.split(",")] - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: - items = query_like_by_full_name(like=like, items=items, prefix='user__') + items = query_like_by_full_name(like=like, items=items, prefix="user__") items = items.filter(**lookup).distinct() items = handler.queryset(items) @@ -925,20 +984,22 @@ def get(self, request, mentor_id=None, academy_id=None): return handler.response(serializer.data) - @capable_of('crud_mentorship_mentor') + @capable_of("crud_mentorship_mentor") def post(self, request, academy_id=None): utc_now = timezone.now() - if not 'slug' in request.data: - raise ValidationException('Missing slug field in the request', slug='missing-slug-field') + if not "slug" in request.data: + raise ValidationException("Missing slug field in the request", slug="missing-slug-field") - token = hashlib.sha1((str(request.data['slug']) + str(utc_now)).encode('UTF-8')).hexdigest() + token = hashlib.sha1((str(request.data["slug"]) + str(utc_now)).encode("UTF-8")).hexdigest() - serializer = MentorSerializer(data={ - **request.data, - 'token': token, - 'academy': academy_id, - }) + serializer = MentorSerializer( + data={ + **request.data, + "token": token, + "academy": academy_id, + } + ) if serializer.is_valid(): mentor = serializer.save() @@ -947,58 +1008,65 @@ def post(self, request, academy_id=None): return Response(_serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_mentorship_mentor') + @capable_of("crud_mentorship_mentor") def put(self, request, mentor_id=None, academy_id=None): lang = get_user_language(request) if mentor_id is None: - raise ValidationException('Missing mentor ID on the URL', 404) + raise ValidationException("Missing mentor ID on the URL", 404) mentor = MentorProfile.objects.filter(id=mentor_id, services__academy__id=academy_id).first() if mentor is None: - raise ValidationException('This mentor does not exist for this academy', code=404, slug='not-found') + raise ValidationException("This mentor does not exist for this academy", code=404, slug="not-found") user = ProfileAcademy.objects.filter(user__id=mentor.user.id, academy__id=academy_id).first() if user is None: raise ValidationException( - translation(lang, - en='This user does not exist for this academy', - es='Este usuario no existe para esta academia', - slug='profile-academy-not-found'), + translation( + lang, + en="This user does not exist for this academy", + es="Este usuario no existe para esta academia", + slug="profile-academy-not-found", + ), code=400, ) - if user.first_name is None or user.first_name.strip() == '': + if user.first_name is None or user.first_name.strip() == "": raise ValidationException( - translation(lang, - en='This mentor does not have a first name', - es='Este mentor no tiene nombre', - slug='without-first-name'), + translation( + lang, + en="This mentor does not have a first name", + es="Este mentor no tiene nombre", + slug="without-first-name", + ), code=400, ) - if user.last_name is None or user.last_name.strip() == '': + if user.last_name is None or user.last_name.strip() == "": raise ValidationException( - translation(lang, - en='This mentor does not have a last name', - es='Este mentor no tiene apellido', - slug='without-last-name'), + translation( + lang, + en="This mentor does not have a last name", + es="Este mentor no tiene apellido", + slug="without-last-name", + ), code=400, ) - if 'user' in request.data: - raise ValidationException('Mentor user cannot be updated, please create a new mentor instead', - slug='user-read-only') + if "user" in request.data: + raise ValidationException( + "Mentor user cannot be updated, please create a new mentor instead", slug="user-read-only" + ) - if 'token' in request.data: - raise ValidationException('Mentor token cannot be updated', slug='token-read-only') + if "token" in request.data: + raise ValidationException("Mentor token cannot be updated", slug="token-read-only") data = {} for key in request.data.keys(): data[key] = request.data[key] - serializer = MentorUpdateSerializer(mentor, data=data, context={'request': request, 'academy_id': academy_id}) + serializer = MentorUpdateSerializer(mentor, data=data, context={"request": request, "academy_id": academy_id}) if serializer.is_valid(): mentor = serializer.save() _serializer = GETMentorBigSerializer(mentor) @@ -1009,16 +1077,16 @@ def put(self, request, mentor_id=None, academy_id=None): class AgentView(APIView, HeaderLimitOffsetPagination): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_mentorship_agent') + @capable_of("read_mentorship_agent") def get(self, request, agent_id=None, academy_id=None): handler = self.extensions(request) if agent_id is not None: agent = SupportAgent.objects.filter(id=agent_id, channel__academy__id=academy_id).first() if agent is None: - raise ValidationException('This agent does not exist on this academy', code=404) + raise ValidationException("This agent does not exist on this academy", code=404) serializer = GETAgentSmallSerializer(agent) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1026,17 +1094,17 @@ def get(self, request, agent_id=None, academy_id=None): items = SupportAgent.objects.filter(channel__academy__id=academy_id) lookup = {} - if 'channel' in self.request.GET: - param = self.request.GET.get('channel', '').split(',') - lookup['channel__slug__in'] = param + if "channel" in self.request.GET: + param = self.request.GET.get("channel", "").split(",") + lookup["channel__slug__in"] = param - if 'status' in self.request.GET: - param = self.request.GET.get('status', 'ACTIVE') - lookup['status__in'] = [s.strip().upper() for s in param.split(',')] + if "status" in self.request.GET: + param = self.request.GET.get("status", "ACTIVE") + lookup["status__in"] = [s.strip().upper() for s in param.split(",")] - if 'syllabus' in self.request.GET: - param = self.request.GET.get('syllabus') - lookup['channel__syllabis__slug'] = param + if "syllabus" in self.request.GET: + param = self.request.GET.get("syllabus") + lookup["channel__syllabis__slug"] = param items = items.filter(**lookup) items = handler.queryset(items) @@ -1047,16 +1115,16 @@ def get(self, request, agent_id=None, academy_id=None): class SupportChannelView(APIView, HeaderLimitOffsetPagination): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_mentorship_agent') + @capable_of("read_mentorship_agent") def get(self, request, supportchannel_id=None, academy_id=None): handler = self.extensions(request) if supportchannel_id is not None: channel = SupportChannel.objects.filter(id=supportchannel_id, academy__id=academy_id).first() if channel is None: - raise ValidationException('This support channel does not exist on this academy', code=404) + raise ValidationException("This support channel does not exist on this academy", code=404) serializer = GETSupportChannelSerializer(channel) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1064,9 +1132,9 @@ def get(self, request, supportchannel_id=None, academy_id=None): items = SupportChannel.objects.filter(academy__id=academy_id) lookup = {} - if 'syllabus' in self.request.GET: - param = self.request.GET.get('syllabus') - lookup['syllabis__slug'] = param + if "syllabus" in self.request.GET: + param = self.request.GET.get("syllabus") + lookup["syllabis__slug"] = param items = items.filter(**lookup) items = handler.queryset(items) @@ -1076,16 +1144,16 @@ def get(self, request, supportchannel_id=None, academy_id=None): class SessionView(APIView, HeaderLimitOffsetPagination): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_mentorship_session') + @capable_of("read_mentorship_session") def get(self, request, session_id=None, academy_id=None): handler = self.extensions(request) if session_id is not None: session = MentorshipSession.objects.filter(id=session_id, mentor__services__academy__id=academy_id).first() if session is None: - raise ValidationException('This session does not exist on this academy', code=404, slug='not-found') + raise ValidationException("This session does not exist on this academy", code=404, slug="not-found") serializer = SessionBigSerializer(session) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1093,40 +1161,40 @@ def get(self, request, session_id=None, academy_id=None): items = MentorshipSession.objects.filter(mentor__services__academy__id=academy_id) lookup = {} - _status = request.GET.get('status', '') - if _status != '': - _status = [s.strip().upper() for s in _status.split(',')] - _status = list(filter(lambda s: s != '', _status)) + _status = request.GET.get("status", "") + if _status != "": + _status = [s.strip().upper() for s in _status.split(",")] + _status = list(filter(lambda s: s != "", _status)) items = items.filter(status__in=_status) - billed = request.GET.get('billed', '') - if billed == 'true': + billed = request.GET.get("billed", "") + if billed == "true": items = items.filter(bill__isnull=False) - elif billed == 'false': + elif billed == "false": items = items.filter(bill__isnull=True) - started_after = request.GET.get('started_after', '') - if started_after != '': + started_after = request.GET.get("started_after", "") + if started_after != "": items = items.filter(Q(started_at__gte=started_after) | Q(started_at__isnull=True)) - ended_before = request.GET.get('ended_before', '') - if ended_before != '': + ended_before = request.GET.get("ended_before", "") + if ended_before != "": items = items.filter(Q(ended_at__lte=ended_before) | Q(ended_at__isnull=True)) - mentor = request.GET.get('mentor', None) + mentor = request.GET.get("mentor", None) if mentor is not None: - if ',' in mentor or mentor.isnumeric(): - lookup['mentor__id__in'] = mentor.split(',') + if "," in mentor or mentor.isnumeric(): + lookup["mentor__id__in"] = mentor.split(",") else: - items = query_like_by_full_name(like=mentor, items=items, prefix='mentor__user__') + items = query_like_by_full_name(like=mentor, items=items, prefix="mentor__user__") - mentee = request.GET.get('student', None) + mentee = request.GET.get("student", None) if mentee is not None: - items = query_like_by_full_name(like=mentee, items=items, prefix='mentee__') + items = query_like_by_full_name(like=mentee, items=items, prefix="mentee__") - service = request.GET.get('service', None) + service = request.GET.get("service", None) if service is not None: - lookup['service__slug__icontains'] = service + lookup["service__slug__icontains"] = service items = items.filter(**lookup).distinct() items = handler.queryset(items) @@ -1134,29 +1202,32 @@ def get(self, request, session_id=None, academy_id=None): return handler.response(serializer.data) - @capable_of('crud_mentorship_session') + @capable_of("crud_mentorship_session") def post(self, request, academy_id=None): - serializer = SessionSerializer(data=request.data, context={'request': request, 'academy_id': academy_id}) + serializer = SessionSerializer(data=request.data, context={"request": request, "academy_id": academy_id}) if serializer.is_valid(): session = serializer.save() return Response(SessionBigSerializer(session).data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_mentorship_session') + @capable_of("crud_mentorship_session") def put(self, request, academy_id=None, session_id=None): many = isinstance(request.data, list) if not many: current = MentorshipSession.objects.filter(id=session_id, mentor__services__academy__id=academy_id).first() if current is None: - raise ValidationException('This session does not exist on this academy', code=404, slug='not-found') + raise ValidationException("This session does not exist on this academy", code=404, slug="not-found") - if current.bill and (current.bill.status == 'APPROVED' or current.bill.status == 'PAID' - or current.bill.status == 'IGNORED'): - raise ValidationException('Sessions associated with a closed bill cannot be edited', - code=400, - slug='trying-to-change-a-closed-bill') + if current.bill and ( + current.bill.status == "APPROVED" or current.bill.status == "PAID" or current.bill.status == "IGNORED" + ): + raise ValidationException( + "Sessions associated with a closed bill cannot be edited", + code=400, + slug="trying-to-change-a-closed-bill", + ) data = {} for key in request.data.keys(): @@ -1168,33 +1239,33 @@ def put(self, request, academy_id=None, session_id=None): for x in request.data: index = index + 1 - if 'id' not in x: - raise ValidationException('Cannot determine session in ' - f'index {index}', slug='without-id') + if "id" not in x: + raise ValidationException("Cannot determine session in " f"index {index}", slug="without-id") - instance = MentorshipSession.objects.filter(id=x['id'], - mentor__services__academy__id=academy_id).first() + instance = MentorshipSession.objects.filter( + id=x["id"], mentor__services__academy__id=academy_id + ).first() if not instance: - raise ValidationException(f'Session({x["id"]}) does not exist on this academy', - code=404, - slug='not-found') + raise ValidationException( + f'Session({x["id"]}) does not exist on this academy', code=404, slug="not-found" + ) current.append(instance) - if instance.bill and (instance.bill.status == 'APPROVED' or instance.bill.status == 'PAID' - or instance.bill.status == 'IGNORED'): + if instance.bill and ( + instance.bill.status == "APPROVED" + or instance.bill.status == "PAID" + or instance.bill.status == "IGNORED" + ): raise ValidationException( - f'Sessions associated with a closed bill cannot be edited (index {index})', + f"Sessions associated with a closed bill cannot be edited (index {index})", code=400, - slug='trying-to-change-a-closed-bill') - - serializer = SessionPUTSerializer(current, - data=data, - context={ - 'request': request, - 'academy_id': academy_id - }, - many=many) + slug="trying-to-change-a-closed-bill", + ) + + serializer = SessionPUTSerializer( + current, data=data, context={"request": request, "academy_id": academy_id}, many=many + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) @@ -1202,42 +1273,43 @@ def put(self, request, academy_id=None, session_id=None): class ServiceSessionView(APIView, HeaderLimitOffsetPagination): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_mentorship_session') + @capable_of("read_mentorship_session") def get(self, request, service_id, academy_id=None): handler = self.extensions(request) if service_id is None: - raise ValidationException('Missing service id', code=404) + raise ValidationException("Missing service id", code=404) - items = MentorshipSession.objects.filter(mentor__services__id=service_id, - mentor__services__academy__id=academy_id) + items = MentorshipSession.objects.filter( + mentor__services__id=service_id, mentor__services__academy__id=academy_id + ) lookup = {} - _status = request.GET.get('status', '') - if _status != '': - _status = [s.strip().upper() for s in _status.split(',')] - _status = list(filter(lambda s: s != '', _status)) + _status = request.GET.get("status", "") + if _status != "": + _status = [s.strip().upper() for s in _status.split(",")] + _status = list(filter(lambda s: s != "", _status)) items = items.filter(status__in=_status) - billed = request.GET.get('billed', '') - if billed == 'true': + billed = request.GET.get("billed", "") + if billed == "true": items = items.filter(bill__isnull=False) - elif billed == 'false': + elif billed == "false": items = items.filter(bill__isnull=True) - started_after = request.GET.get('started_after', '') - if started_after != '': + started_after = request.GET.get("started_after", "") + if started_after != "": items = items.filter(Q(started_at__gte=started_after) | Q(started_at__isnull=True)) - ended_before = request.GET.get('ended_before', '') - if ended_before != '': + ended_before = request.GET.get("ended_before", "") + if ended_before != "": items = items.filter(Q(ended_at__lte=ended_before) | Q(ended_at__isnull=True)) - mentor = request.GET.get('mentor', None) + mentor = request.GET.get("mentor", None) if mentor is not None: - lookup['mentor__id__in'] = mentor.split(',') + lookup["mentor__id__in"] = mentor.split(",") items = items.filter(**lookup) items = handler.queryset(items) @@ -1247,36 +1319,36 @@ def get(self, request, service_id, academy_id=None): class MentorSessionView(APIView, HeaderLimitOffsetPagination): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_mentorship_session') + @capable_of("read_mentorship_session") def get(self, request, mentor_id, academy_id=None): handler = self.extensions(request) if mentor_id is None: - raise ValidationException('Missing mentor id', code=404) + raise ValidationException("Missing mentor id", code=404) items = MentorshipSession.objects.filter(mentor__id=mentor_id, mentor__services__academy__id=academy_id) lookup = {} - _status = request.GET.get('status', '') - if _status != '': - _status = [s.strip().upper() for s in _status.split(',')] - _status = list(filter(lambda s: s != '', _status)) + _status = request.GET.get("status", "") + if _status != "": + _status = [s.strip().upper() for s in _status.split(",")] + _status = list(filter(lambda s: s != "", _status)) items = items.filter(status__in=_status) - billed = request.GET.get('billed', '') - if billed == 'true': + billed = request.GET.get("billed", "") + if billed == "true": items = items.filter(bill__isnull=False) - elif billed == 'false': + elif billed == "false": items = items.filter(bill__isnull=True) - started_after = request.GET.get('started_after', '') - if started_after != '': + started_after = request.GET.get("started_after", "") + if started_after != "": items = items.filter(Q(started_at__gte=started_after) | Q(started_at__isnull=True)) - ended_before = request.GET.get('ended_before', '') - if ended_before != '': + ended_before = request.GET.get("ended_before", "") + if ended_before != "": items = items.filter(Q(ended_at__lte=ended_before) | Q(ended_at__isnull=True)) items = items.filter(**lookup) @@ -1287,18 +1359,18 @@ def get(self, request, mentor_id, academy_id=None): class BillView(APIView, HeaderLimitOffsetPagination): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) - @capable_of('read_mentorship_bill') + @capable_of("read_mentorship_bill") def get(self, request, bill_id=None, academy_id=None): handler = self.extensions(request) if bill_id is not None: bill = MentorshipBill.objects.filter(id=bill_id, academy__id=academy_id).first() if bill is None: - raise ValidationException('This mentorship bill does not exist on this academy', - code=404, - slug='not-found') + raise ValidationException( + "This mentorship bill does not exist on this academy", code=404, slug="not-found" + ) serializer = BigBillSerializer(bill) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1306,23 +1378,23 @@ def get(self, request, bill_id=None, academy_id=None): items = MentorshipBill.objects.filter(academy__id=academy_id) lookup = {} - _status = request.GET.get('status', '') - if _status != '': - _status = [s.strip().upper() for s in _status.split(',')] - _status = list(filter(lambda s: s != '', _status)) + _status = request.GET.get("status", "") + if _status != "": + _status = [s.strip().upper() for s in _status.split(",")] + _status = list(filter(lambda s: s != "", _status)) items = items.filter(status__in=_status) - after = request.GET.get('after', '') - if after != '': + after = request.GET.get("after", "") + if after != "": items = items.filter(created_at__gte=after) - before = request.GET.get('before', '') - if before != '': + before = request.GET.get("before", "") + if before != "": items = items.filter(created_at__lte=before) - mentor = request.GET.get('mentor', None) + mentor = request.GET.get("mentor", None) if mentor is not None: - lookup['mentor__id__in'] = mentor.split(',') + lookup["mentor__id__in"] = mentor.split(",") items = items.filter(**lookup) items = handler.queryset(items) @@ -1330,84 +1402,84 @@ def get(self, request, bill_id=None, academy_id=None): return handler.response(serializer.data) - @capable_of('crud_mentorship_bill') + @capable_of("crud_mentorship_bill") def post(self, request, academy_id=None, mentor_id=None): if mentor_id is None: - raise ValidationException('Missing mentor ID on the URL', code=404, slug='argument-not-provided') + raise ValidationException("Missing mentor ID on the URL", code=404, slug="argument-not-provided") mentor = MentorProfile.objects.filter(id=mentor_id, services__academy__id=academy_id).first() if mentor is None: - raise ValidationException('This mentor does not exist for this academy', code=404, slug='not-found') + raise ValidationException("This mentor does not exist for this academy", code=404, slug="not-found") bills = generate_mentor_bills(mentor) serializer = GETBillSmallSerializer(bills, many=True) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_mentorship_bill') + @capable_of("crud_mentorship_bill") def put(self, request, bill_id=None, academy_id=None): many = isinstance(request.data, list) if many and bill_id: - raise ValidationException('Avoid using bulk mode passing id in the url', - code=404, - slug='bulk-mode-and-bill-id') + raise ValidationException( + "Avoid using bulk mode passing id in the url", code=404, slug="bulk-mode-and-bill-id" + ) if many: bill = [] for obj in request.data: - if 'id' not in obj: - raise ValidationException('Bill id must be provided in bulk mode', - code=404, - slug='missing-some-id-in-body') + if "id" not in obj: + raise ValidationException( + "Bill id must be provided in bulk mode", code=404, slug="missing-some-id-in-body" + ) - if not (elem := MentorshipBill.objects.filter(id=obj['id']).first()): - raise ValidationException(f'Bill {obj["id"]} not found', code=404, slug='some-not-found') + if not (elem := MentorshipBill.objects.filter(id=obj["id"]).first()): + raise ValidationException(f'Bill {obj["id"]} not found', code=404, slug="some-not-found") - if elem.status == 'RECALCULATE' and 'status' in obj and obj['status'] != 'RECALCULATE': - raise ValidationException('This bill must be regenerated before you can update its status', - code=400, - slug='trying-edit-status-to-dirty-bill') + if elem.status == "RECALCULATE" and "status" in obj and obj["status"] != "RECALCULATE": + raise ValidationException( + "This bill must be regenerated before you can update its status", + code=400, + slug="trying-edit-status-to-dirty-bill", + ) bill.append(elem) else: if bill_id is None: - raise ValidationException('Missing bill ID on the URL', code=404, slug='without-bulk-mode-and-bill-id') + raise ValidationException("Missing bill ID on the URL", code=404, slug="without-bulk-mode-and-bill-id") bill = MentorshipBill.objects.filter(id=bill_id, academy__id=academy_id).first() if bill is None: - raise ValidationException('This bill does not exist for this academy', code=404, slug='not-found') - - if bill.status == 'RECALCULATE' and 'status' in request.data and request.data['status'] != 'RECALCULATE': - raise ValidationException('This bill must be regenerated before you can update its status', - code=400, - slug='trying-edit-status-to-dirty-bill') - - serializer = MentorshipBillPUTSerializer(bill, - data=request.data, - many=many, - context={ - 'request': request, - 'academy_id': academy_id - }) + raise ValidationException("This bill does not exist for this academy", code=404, slug="not-found") + + if bill.status == "RECALCULATE" and "status" in request.data and request.data["status"] != "RECALCULATE": + raise ValidationException( + "This bill must be regenerated before you can update its status", + code=400, + slug="trying-edit-status-to-dirty-bill", + ) + + serializer = MentorshipBillPUTSerializer( + bill, data=request.data, many=many, context={"request": request, "academy_id": academy_id} + ) if serializer.is_valid(): serializer.save() _serializer = GETBillSmallSerializer(bill, many=many) return Response(_serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_mentorship_bill') + @capable_of("crud_mentorship_bill") def delete(self, request, bill_id=None, academy_id=None): if bill_id is None: - raise ValidationException('Missing bill ID on the URL', 404) + raise ValidationException("Missing bill ID on the URL", 404) bill = MentorshipBill.objects.filter(id=bill_id, academy__id=academy_id).first() if bill is None: - raise ValidationException('This bill does not exist for this academy', code=404, slug='not-found') + raise ValidationException("This bill does not exist for this academy", code=404, slug="not-found") - if bill.status == 'PAID': - raise ValidationException('Paid bills cannot be deleted', slug='paid-bill') + if bill.status == "PAID": + raise ValidationException("Paid bills cannot be deleted", slug="paid-bill") bill.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) @@ -1418,41 +1490,41 @@ class UserMeSessionView(APIView, HeaderLimitOffsetPagination): List all snippets, or create a new snippet. """ - @has_permission('get_my_mentoring_sessions') + @has_permission("get_my_mentoring_sessions") def get(self, request): items = MentorshipSession.objects.filter(Q(mentor__user__id=request.user.id) | Q(mentee__id=request.user.id)) lookup = {} - _status = request.GET.get('status', '') - if _status != '': - _status = [s.strip().upper() for s in _status.split(',')] - _status = list(filter(lambda s: s != '', _status)) + _status = request.GET.get("status", "") + if _status != "": + _status = [s.strip().upper() for s in _status.split(",")] + _status = list(filter(lambda s: s != "", _status)) items = items.filter(status__in=_status) - billed = request.GET.get('billed', '') - if billed == 'true': + billed = request.GET.get("billed", "") + if billed == "true": items = items.filter(bill__isnull=False) - elif billed == 'false': + elif billed == "false": items = items.filter(bill__isnull=True) - started_after = request.GET.get('started_after', '') - if started_after != '': + started_after = request.GET.get("started_after", "") + if started_after != "": items = items.filter(Q(started_at__gte=started_after) | Q(started_at__isnull=True)) - ended_before = request.GET.get('ended_before', '') - if ended_before != '': + ended_before = request.GET.get("ended_before", "") + if ended_before != "": items = items.filter(Q(ended_at__lte=ended_before) | Q(ended_at__isnull=True)) - mentee = request.GET.get('mentee', None) + mentee = request.GET.get("mentee", None) if mentee is not None: - lookup['mentee__id__in'] = mentee.split(',') + lookup["mentee__id__in"] = mentee.split(",") - mentor = request.GET.get('mentor', None) + mentor = request.GET.get("mentor", None) if mentee is not None: - lookup['mentor__id__in'] = mentor.split(',') + lookup["mentor__id__in"] = mentor.split(",") - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") page = self.paginate_queryset(items, request) serializer = BillSessionSerializer(page, many=True) @@ -1465,13 +1537,13 @@ def get(self, request): class UserMeBillView(APIView, HeaderLimitOffsetPagination): - @has_permission('get_my_mentoring_sessions') + @has_permission("get_my_mentoring_sessions") def get(self, request, bill_id=None): if bill_id is not None: bill = MentorshipBill.objects.filter(id=bill_id, mentor__user__id=request.user.id).first() if bill is None: - raise ValidationException('This mentorship bill does not exist', code=404) + raise ValidationException("This mentorship bill does not exist", code=404) serializer = BigBillSerializer(bill) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1479,25 +1551,25 @@ def get(self, request, bill_id=None): items = MentorshipBill.objects.filter(mentor__user__id=request.user.id) lookup = {} - _status = request.GET.get('status', '') - if _status != '': - _status = [s.strip().upper() for s in _status.split(',')] - _status = list(filter(lambda s: s != '', _status)) + _status = request.GET.get("status", "") + if _status != "": + _status = [s.strip().upper() for s in _status.split(",")] + _status = list(filter(lambda s: s != "", _status)) items = items.filter(status__in=_status) - after = request.GET.get('after', '') - if after != '': + after = request.GET.get("after", "") + if after != "": items = items.filter(created_at__gte=after) - before = request.GET.get('before', '') - if before != '': + before = request.GET.get("before", "") + if before != "": items = items.filter(created_at__lte=before) - mentee = request.GET.get('mentee', None) + mentee = request.GET.get("mentee", None) if mentee is not None: - lookup['mentee__id__in'] = mentee.split(',') + lookup["mentee__id__in"] = mentee.split(",") - items = items.filter(**lookup).order_by('-created_at') + items = items.filter(**lookup).order_by("-created_at") page = self.paginate_queryset(items, request) serializer = GETBillSmallSerializer(page, many=True) @@ -1508,22 +1580,22 @@ def get(self, request, bill_id=None): class PublicMentorView(APIView, HeaderLimitOffsetPagination): - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) permission_classes = [AllowAny] def get(self, request): handler = self.extensions(request) - items = MentorProfile.objects.filter(status='ACTIVE') + items = MentorProfile.objects.filter(status="ACTIVE") lookup = {} - if 'services' in self.request.GET: - param = self.request.GET.get('services', '').split(',') - lookup['services__slug__in'] = param + if "services" in self.request.GET: + param = self.request.GET.get("services", "").split(",") + lookup["services__slug__in"] = param - if 'syllabus' in self.request.GET: - param = self.request.GET.get('syllabus') - lookup['syllabus__slug'] = param + if "syllabus" in self.request.GET: + param = self.request.GET.get("syllabus") + lookup["syllabus__slug"] = param items = items.filter(**lookup) items = handler.queryset(items) @@ -1538,12 +1610,12 @@ class AcademyCalendlyOrgView(APIView): Manage the calendly integration for academy """ - @capable_of('read_calendly_organization') + @capable_of("read_calendly_organization") def get(self, request, academy_id): org = CalendlyOrganization.objects.filter(academy__id=academy_id).first() if org is None: - raise ValidationException('Organization not found for this academy', 404) + raise ValidationException("Organization not found for this academy", 404) serializer = CalendlyOrganizationBigSerializer(org, many=False) @@ -1551,28 +1623,28 @@ def get(self, request, academy_id): subscriptions = cal.get_subscriptions(org.uri) org_dict = { - 'subscriptions': subscriptions, + "subscriptions": subscriptions, **serializer.data, } return Response(org_dict) - @capable_of('create_calendly_organization') + @capable_of("create_calendly_organization") def post(self, request, academy_id): lang = get_user_language(request) organization = CalendlyOrganization.objects.filter(academy__id=academy_id).first() if organization is not None: - raise ValidationException('Academy already has a calendly organization associated', slug='already-created') + raise ValidationException("Academy already has a calendly organization associated", slug="already-created") - serializer = CalendlyOrganizationSerializer(data={ - **request.data, 'academy': academy_id - }, - context={ - 'lang': lang, - 'academy_id': academy_id, - }) + serializer = CalendlyOrganizationSerializer( + data={**request.data, "academy": academy_id}, + context={ + "lang": lang, + "academy_id": academy_id, + }, + ) if serializer.is_valid(): organization = serializer.save() @@ -1581,12 +1653,12 @@ def post(self, request, academy_id): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('reset_calendly_organization') + @capable_of("reset_calendly_organization") def put(self, request, academy_id): organization = CalendlyOrganization.objects.filter(academy__id=academy_id).first() if not organization: - raise ValidationException('Calendly Organization not found for this academy', slug='org-not-found') + raise ValidationException("Calendly Organization not found for this academy", slug="org-not-found") organization.reset_hash() @@ -1597,12 +1669,12 @@ def put(self, request, academy_id): serializer = CalendlyOrganizationBigSerializer(organization) return Response(serializer.data, status=status.HTTP_400_BAD_REQUEST) - @capable_of('delete_calendly_organization') + @capable_of("delete_calendly_organization") def delete(self, request, academy_id): organization = CalendlyOrganization.objects.filter(academy__id=academy_id).first() if not organization: - raise ValidationException('Calendly Organization not found for this academy', slug='org-not-found') + raise ValidationException("Calendly Organization not found for this academy", slug="org-not-found") cal = Calendly(token=organization.access_token) cal.unsubscribe_all(organization.uri) diff --git a/breathecode/middlewares.py b/breathecode/middlewares.py index 59491627e..33c77bf76 100644 --- a/breathecode/middlewares.py +++ b/breathecode/middlewares.py @@ -11,20 +11,20 @@ from django.utils.decorators import sync_and_async_middleware from django.utils.deprecation import MiddlewareMixin -ENV = os.getenv('ENV', '') -IS_TEST = ENV not in ['production', 'staging', 'development'] -IS_DEV = ENV != 'production' -ENABLE_LIST_OPTIONS = ['true', '1', 'yes', 'y'] +ENV = os.getenv("ENV", "") +IS_TEST = ENV not in ["production", "staging", "development"] +IS_DEV = ENV != "production" +ENABLE_LIST_OPTIONS = ["true", "1", "yes", "y"] @functools.lru_cache(maxsize=1) def is_compression_enabled(): - return os.getenv('COMPRESSION', '1').lower() in ENABLE_LIST_OPTIONS + return os.getenv("COMPRESSION", "1").lower() in ENABLE_LIST_OPTIONS @functools.lru_cache(maxsize=1) def min_compression_size(): - return int(os.getenv('MIN_COMPRESSION_SIZE', '10')) + return int(os.getenv("MIN_COMPRESSION_SIZE", "10")) def must_compress(data): @@ -37,7 +37,7 @@ def must_compress(data): @functools.lru_cache(maxsize=1) def use_gzip(): - return os.getenv('USE_GZIP', '0').lower() in ENABLE_LIST_OPTIONS + return os.getenv("USE_GZIP", "0").lower() in ENABLE_LIST_OPTIONS class CompressResponseMiddleware(MiddlewareMixin): @@ -51,11 +51,11 @@ def _compress(self, response, encoding, alg): compressed_content = alg(response.streaming_content) response.streaming_content = compressed_content - response['Content-Encoding'] = encoding + response["Content-Encoding"] = encoding # response['Content-Length'] = str(len(compressed_content)) def _must_compress(self, response): - self._has_content = hasattr(response, 'content') + self._has_content = hasattr(response, "content") if self._has_content: return must_compress(response.content) @@ -65,39 +65,43 @@ def _must_compress(self, response): def process_response(self, request, response): # If the response is already compressed, do nothing - if 'Content-Encoding' in response.headers or is_compression_enabled() is False or self._must_compress( - response) is False or IS_TEST: + if ( + "Content-Encoding" in response.headers + or is_compression_enabled() is False + or self._must_compress(response) is False + or IS_TEST + ): return response # Compress the response if it's large enough if response.content: - accept_encoding = request.META.get('HTTP_ACCEPT_ENCODING', '') + accept_encoding = request.META.get("HTTP_ACCEPT_ENCODING", "") dont_force_gzip = not use_gzip() # sort by compression ratio and speed - if 'zstd' in accept_encoding and dont_force_gzip: - self._compress(response, 'zstd', zstandard.compress) + if "zstd" in accept_encoding and dont_force_gzip: + self._compress(response, "zstd", zstandard.compress) - elif ('deflate' in accept_encoding or '*' in accept_encoding) and dont_force_gzip: - self._compress(response, 'deflate', zlib.compress) + elif ("deflate" in accept_encoding or "*" in accept_encoding) and dont_force_gzip: + self._compress(response, "deflate", zlib.compress) - elif 'gzip' in accept_encoding: - self._compress(response, 'gzip', gzip.compress) + elif "gzip" in accept_encoding: + self._compress(response, "gzip", gzip.compress) - elif IS_DEV and 'br' in accept_encoding and 'PostmanRuntime' in request.META.get('HTTP_USER_AGENT', ''): - self._compress(response, 'br', brotli.compress) + elif IS_DEV and "br" in accept_encoding and "PostmanRuntime" in request.META.get("HTTP_USER_AGENT", ""): + self._compress(response, "br", brotli.compress) return response @sync_and_async_middleware def static_redirect_middleware(get_response): - path = '/static' + path = "/static" def redirect(request): - bucket = os.getenv('STATIC_BUCKET') - gcs_base_url = f'https://storage.googleapis.com/{bucket}' + bucket = os.getenv("STATIC_BUCKET") + gcs_base_url = f"https://storage.googleapis.com/{bucket}" full_url = f"{gcs_base_url}{request.path.replace(path, '')}" return HttpResponseRedirect(full_url) @@ -105,7 +109,7 @@ def redirect(request): if iscoroutinefunction(get_response): async def middleware(request): - if request.path.startswith(f'{path}/'): + if request.path.startswith(f"{path}/"): return redirect(request) response = await get_response(request) @@ -114,7 +118,7 @@ async def middleware(request): else: def middleware(request): - if request.path.startswith(f'{path}/'): + if request.path.startswith(f"{path}/"): return redirect(request) response = get_response(request) diff --git a/breathecode/monitoring/actions.py b/breathecode/monitoring/actions.py index efcaf30f3..11ad184a8 100644 --- a/breathecode/monitoring/actions.py +++ b/breathecode/monitoring/actions.py @@ -22,7 +22,7 @@ logger = logging.getLogger(__name__) -USER_AGENT = 'BreathecodeMonitoring/1.0' +USER_AGENT = "BreathecodeMonitoring/1.0" SCRIPT_HEADER = """ # from django.conf import settings # import breathecode.settings as app_settings @@ -36,37 +36,42 @@ def test_link(url, test_pattern=None): - headers = {'User-Agent': USER_AGENT} + headers = {"User-Agent": USER_AGENT} result = { - 'url': url, - 'status_code': 404, - 'status_text': '', - 'payload': None, + "url": url, + "status_code": 404, + "status_text": "", + "payload": None, } try: r = requests.get(url, headers=headers, timeout=2) length = 0 - if 'content-length' in r.headers: - length = r.headers['content-length'] - result['status_code'] = r.status_code + if "content-length" in r.headers: + length = r.headers["content-length"] + result["status_code"] = r.status_code # if status is one error, we should need see the status text - result['payload'] = r.text - - if (test_pattern is None and not (result['status_code'] >= 200 and result['status_code'] <= 299) - and int(length) > 3000): - result['status_code'] = 400 - result['status_text'] = ('Timeout: The payload of this request is too long ' - '(more than 3 MB), remove the test_pattern to avoid timeout') + result["payload"] = r.text + + if ( + test_pattern is None + and not (result["status_code"] >= 200 and result["status_code"] <= 299) + and int(length) > 3000 + ): + result["status_code"] = 400 + result["status_text"] = ( + "Timeout: The payload of this request is too long " + "(more than 3 MB), remove the test_pattern to avoid timeout" + ) except requests.Timeout: - result['status_code'] = 500 - result['status_text'] = 'Connection Timeout' + result["status_code"] = 500 + result["status_text"] = "Connection Timeout" except requests.ConnectionError: - result['status_code'] = 404 - result['status_text'] = 'Connection Error 404' + result["status_code"] = 404 + result["status_text"] = "Connection Error 404" logger.debug(f'Tested {url} {result["status_text"]} with {result["status_code"]}') return result @@ -77,28 +82,29 @@ def subscribe_repository(subs_id, settings=None): subscription = RepositorySubscription.objects.filter(id=subs_id).first() try: if subscription is None: - raise Exception(f'Invalid subscription id {subs_id}') + raise Exception(f"Invalid subscription id {subs_id}") if settings is None: settings = AcademyAuthSettings.objects.filter(academy__id=subscription.owner.id).first() if settings is None: raise Exception( - f'Github credentials and settings have not been found for the academy {subscription.owner.id}') + f"Github credentials and settings have not been found for the academy {subscription.owner.id}" + ) if settings.academy.id != subscription.owner.id: - raise Exception('Provided auth settings don\'t belong to the academy subscription owner') + raise Exception("Provided auth settings don't belong to the academy subscription owner") _owner, _repo_name = subscription.get_repo_name() gb = Github(org=settings.github_username, token=settings.github_owner.credentialsgithub.token) result = gb.subscribe_to_repo(_owner, _repo_name, subscription.token) - subscription.status = 'OPERATIONAL' - subscription.status_message = 'OK' - subscription.hook_id = result['id'] + subscription.status = "OPERATIONAL" + subscription.status_message = "OK" + subscription.hook_id = result["id"] subscription.save() except Exception as e: - subscription.status = 'CRITICAL' - subscription.status_message = 'Error subscribing to repo: ' + str(e) + subscription.status = "CRITICAL" + subscription.status_message = "Error subscribing to repo: " + str(e) subscription.save() return subscription @@ -107,37 +113,37 @@ def get_website_text(endp): """Make a request to get the content of the given URL.""" res = test_link(endp.url, endp.test_pattern) - status_code = res['status_code'] - payload = res['payload'] + status_code = res["status_code"] + payload = res["payload"] endp.last_check = timezone.now() if status_code > 399: - endp.status = 'CRITICAL' + endp.status = "CRITICAL" endp.severity_level = 100 - endp.status_text = 'Status above 399' + endp.status_text = "Status above 399" elif status_code > 299: - endp.status = 'MINOR' + endp.status = "MINOR" endp.severity_level = 5 - endp.status_text = 'Status in the 3xx range, maybe a cached reponse?' + endp.status_text = "Status in the 3xx range, maybe a cached reponse?" elif status_code > 199: endp.severity_level = 5 - endp.status = 'OPERATIONAL' - endp.status_text = 'Status withing the 2xx range' + endp.status = "OPERATIONAL" + endp.status_text = "Status withing the 2xx range" else: - endp.status = 'MINOR' + endp.status = "MINOR" endp.severity_level = 0 - endp.status_text = 'Uknown status code, lower than 200' + endp.status_text = "Uknown status code, lower than 200" if endp.test_pattern and status_code == 200 and payload: if not re.search(endp.test_pattern, payload): endp.response_text = payload - endp.status = 'MINOR' + endp.status = "MINOR" endp.severity_level = 5 - endp.status_text = f'Status is 200 but regex {endp.test_pattern} was rejected' + endp.status_text = f"Status is 200 but regex {endp.test_pattern} was rejected" else: endp.response_text = None @@ -155,54 +161,55 @@ def get_website_text(endp): def run_app_diagnostic(app, report=False): failed_endpoints = [] # data to be send to slack - results = {'severity_level': 0, 'details': ''} - logger.debug(f'Testing application {app.title}') + results = {"severity_level": 0, "details": ""} + logger.debug(f"Testing application {app.title}") now = timezone.now() _endpoints = app.endpoint_set.all() for endpoint in _endpoints: if endpoint.last_check is not None and endpoint.last_check > now - timezone.timedelta( - minutes=endpoint.frequency_in_minutes): - logger.debug(f'Ignoring {endpoint.url} because frequency hast not been met') - endpoint.status_text = 'Ignored because its paused' + minutes=endpoint.frequency_in_minutes + ): + logger.debug(f"Ignoring {endpoint.url} because frequency hast not been met") + endpoint.status_text = "Ignored because its paused" endpoint.save() continue if endpoint.paused_until is not None and endpoint.paused_until > now: - logger.debug(f'Ignoring endpoint:{endpoint.url} monitor because its paused') - endpoint.status_text = 'Ignored because its paused' + logger.debug(f"Ignoring endpoint:{endpoint.url} monitor because its paused") + endpoint.status_text = "Ignored because its paused" endpoint.save() continue # Starting the test - logger.debug(f'Testing endpoint: {endpoint.url}') - endpoint.status = 'LOADING' + logger.debug(f"Testing endpoint: {endpoint.url}") + endpoint.status = "LOADING" endpoint.save() e = get_website_text(endpoint) - if e.status != 'OPERATIONAL': - if e.severity_level > results['severity_level']: - results['severity_level'] = e.severity_level + if e.status != "OPERATIONAL": + if e.severity_level > results["severity_level"]: + results["severity_level"] = e.severity_level if e.special_status_text: - results['details'] += e.special_status_text + results["details"] += e.special_status_text if e.status not in results: results[e.status] = [] results[e.status].append(e.url) failed_endpoints.append(e) - if results['severity_level'] == 0: - results['status'] = 'OPERATIONAL' - elif results['severity_level'] > 10: - results['status'] = 'CRITICAL' + if results["severity_level"] == 0: + results["status"] = "OPERATIONAL" + elif results["severity_level"] > 10: + results["status"] = "CRITICAL" else: - results['status'] = 'MINOR' + results["status"] = "MINOR" - results['slack_payload'] = render_snooze_text_endpoint(failed_endpoints) # converting to json to send to slack + results["slack_payload"] = render_snooze_text_endpoint(failed_endpoints) # converting to json to send to slack # JSON Details to be shown on the error report - results['details'] = json.dumps(results, indent=4) + results["details"] = json.dumps(results, indent=4) - app.status = results['status'] - app.response_text = results['text'] + app.status = results["status"] + app.response_text = results["text"] app.save() return results @@ -210,50 +217,50 @@ def run_app_diagnostic(app, report=False): def run_endpoint_diagnostic(endpoint_id): endpoint = Endpoint.objects.get(id=endpoint_id) - results = {'severity_level': 0, 'details': '', 'log': ''} + results = {"severity_level": 0, "details": "", "log": ""} - logger.debug(f'Testing endpoint {endpoint.url}') + logger.debug(f"Testing endpoint {endpoint.url}") now = timezone.now() - if (endpoint.last_check and endpoint.last_check > now - timezone.timedelta(minutes=endpoint.frequency_in_minutes)): - logger.debug(f'Ignoring {endpoint.url} because frequency hast not been met') - endpoint.status_text = 'Ignored because its paused' + if endpoint.last_check and endpoint.last_check > now - timezone.timedelta(minutes=endpoint.frequency_in_minutes): + logger.debug(f"Ignoring {endpoint.url} because frequency hast not been met") + endpoint.status_text = "Ignored because its paused" endpoint.save() return False if endpoint.paused_until and endpoint.paused_until > now: - logger.debug(f'Ignoring endpoint:{endpoint.url} monitor because its paused') - endpoint.status_text = 'Ignored because its paused' + logger.debug(f"Ignoring endpoint:{endpoint.url} monitor because its paused") + endpoint.status_text = "Ignored because its paused" endpoint.save() return False # Starting the test - logger.debug(f'Testing endpoint: {endpoint.url}') - endpoint.status = 'LOADING' + logger.debug(f"Testing endpoint: {endpoint.url}") + endpoint.status = "LOADING" endpoint.save() e = get_website_text(endpoint) - results['text'] = e.response_text - if e.status != 'OPERATIONAL': - if e.severity_level > results['severity_level']: - results['severity_level'] = e.severity_level + results["text"] = e.response_text + if e.status != "OPERATIONAL": + if e.severity_level > results["severity_level"]: + results["severity_level"] = e.severity_level if e.special_status_text: - results['details'] += e.special_status_text + results["details"] += e.special_status_text if e.status not in results: results[e.status] = [] results[e.status].append(e.url) - if results['severity_level'] == 0: - results['status'] = 'OPERATIONAL' - elif results['severity_level'] > 10: - results['status'] = 'CRITICAL' + if results["severity_level"] == 0: + results["status"] = "OPERATIONAL" + elif results["severity_level"] > 10: + results["status"] = "CRITICAL" else: - results['status'] = 'MINOR' + results["status"] = "MINOR" - results['slack_payload'] = render_snooze_text_endpoint([endpoint]) # converting to json to send to slack + results["slack_payload"] = render_snooze_text_endpoint([endpoint]) # converting to json to send to slack - results['details'] = json.dumps(results, indent=4) - endpoint.response_text = results['text'] + results["details"] = json.dumps(results, indent=4) + endpoint.response_text = results["text"] endpoint.save() return results @@ -261,7 +268,7 @@ def run_endpoint_diagnostic(endpoint_id): def run_script(script): results = { - 'severity_level': 0, + "severity_level": 0, } import contextlib @@ -278,38 +285,40 @@ def stdout_io(stdout=None): content = None exception = None - if script.script_slug and script.script_slug != 'other': + if script.script_slug and script.script_slug != "other": dir_path = os.path.dirname(os.path.realpath(__file__)) header = SCRIPT_HEADER - content = header + \ - open(f'{dir_path}/scripts/{script.script_slug}.py').read() + content = header + open(f"{dir_path}/scripts/{script.script_slug}.py").read() elif script.script_body: content = script.script_body else: - exception = WrongScriptConfiguration(f'Script not found or its body is empty: {script.script_slug}') + exception = WrongScriptConfiguration(f"Script not found or its body is empty: {script.script_slug}") if content or exception: - local = {'result': {'status': 'OPERATIONAL'}} + local = {"result": {"status": "OPERATIONAL"}} with stdout_io() as s: try: if exception: raise exception if script.application is None: - raise Exception(f'Script {script.script_slug} does not belong to any application') + raise Exception(f"Script {script.script_slug} does not belong to any application") exec( - content, { - 'academy': script.application.academy, - 'ADMIN_URL': os.getenv('ADMIN_URL', ''), - 'API_URL': os.getenv('API_URL', ''), - }, local) + content, + { + "academy": script.application.academy, + "ADMIN_URL": os.getenv("ADMIN_URL", ""), + "API_URL": os.getenv("API_URL", ""), + }, + local, + ) script.status_code = 0 - script.status = 'OPERATIONAL' - script.special_status_text = 'OK' - results['severity_level'] = 5 + script.status = "OPERATIONAL" + script.special_status_text = "OK" + results["severity_level"] = 5 script.response_text = s.getvalue() except ScriptNotification as e: @@ -319,46 +328,47 @@ def stdout_io(stdout=None): script.special_status_text = e.title if e.btn_url is not None: - results['btn'] = {'url': e.btn_url, 'label': 'More details'} + results["btn"] = {"url": e.btn_url, "label": "More details"} if e.btn_label is not None: - results['btn']['label'] = e.btn_label + results["btn"]["label"] = e.btn_label else: - results['btn'] = None + results["btn"] = None if e.status is not None: script.status = e.status - results['severity_level'] = 5 if e.status != 'CRITICAL' else 100 + results["severity_level"] = 5 if e.status != "CRITICAL" else 100 else: - script.status = 'MINOR' - results['severity_level'] = 5 - results['error_slug'] = e.slug + script.status = "MINOR" + results["severity_level"] = 5 + results["error_slug"] = e.slug except WrongScriptConfiguration as e: script.special_status_text = str(e)[:255] script.response_text = str(e) script.status_code = 1 - script.status = 'CRITICAL' - results['error_slug'] = 'wrong-configuration' - results['btn'] = None - results['severity_level'] = 100 + script.status = "CRITICAL" + results["error_slug"] = "wrong-configuration" + results["btn"] = None + results["severity_level"] = 100 except Exception as e: import traceback + script.special_status_text = str(e)[:255] - script.response_text = ''.join(traceback.format_exception(None, e, e.__traceback__)) + script.response_text = "".join(traceback.format_exception(None, e, e.__traceback__)) script.status_code = 1 - script.status = 'CRITICAL' - results['error_slug'] = 'unknown' - results['btn'] = None - results['severity_level'] = 100 + script.status = "CRITICAL" + results["error_slug"] = "unknown" + results["btn"] = None + results["severity_level"] = 100 script.last_run = timezone.now() script.save() - results['status'] = script.status - results['text'] = script.response_text - results['title'] = script.special_status_text - results['slack_payload'] = render_snooze_script([script]) # converting to json to send to slack + results["status"] = script.status + results["text"] = script.response_text + results["title"] = script.special_status_text + results["slack_payload"] = render_snooze_script([script]) # converting to json to send to slack return results @@ -370,23 +380,24 @@ def download_csv(module, model_name, ids_to_download, academy_id=None): download = CSVDownload() try: - downloads_bucket = os.getenv('DOWNLOADS_BUCKET', None) + downloads_bucket = os.getenv("DOWNLOADS_BUCKET", None) if downloads_bucket is None: - raise Exception('Unknown DOWNLOADS_BUCKET configuration, please set env variable') + raise Exception("Unknown DOWNLOADS_BUCKET configuration, please set env variable") # separated downloads by academy academy = Academy.objects.filter(id=academy_id).first() - download.name = '' + download.name = "" if academy is not None: download.academy = academy download.name += academy.slug # import model (table) being downloaded import importlib + model = getattr(importlib.import_module(module), model_name) # finish the file name with <academy_slug>+<model_name>+<epoc_time>.csv - download.name = model_name + str(int(time.time())) + '.csv' + download.name = model_name + str(int(time.time())) + ".csv" download.save() meta = model._meta @@ -394,7 +405,7 @@ def download_csv(module, model_name, ids_to_download, academy_id=None): # rebuild query from the admin queryset = model.objects.filter(pk__in=ids_to_download) - #write csv + # write csv buffer = StringIO() writer = csv.writer(buffer) writer.writerow(field_names) @@ -403,15 +414,16 @@ def download_csv(module, model_name, ids_to_download, academy_id=None): # upload to google cloud bucket from ..services.google_cloud import Storage + storage = Storage() - cloud_file = storage.file(os.getenv('DOWNLOADS_BUCKET', None), download.name) - cloud_file.upload(buffer.getvalue(), content_type='text/csv') + cloud_file = storage.file(os.getenv("DOWNLOADS_BUCKET", None), download.name) + cloud_file.upload(buffer.getvalue(), content_type="text/csv") download.url = cloud_file.url() - download.status = 'DONE' + download.status = "DONE" download.save() return True except Exception as e: - download.status = 'ERROR' + download.status = "ERROR" download.status_message = str(e) download.save() return False @@ -422,7 +434,7 @@ def unsubscribe_repository(subs_id, force_delete=True): subs = RepositorySubscription.objects.filter(id=subs_id).first() if subs.hook_id is None: - raise Exception('Subscription is missing a github hook id') + raise Exception("Subscription is missing a github hook id") settings = AcademyAuthSettings.objects.filter(academy__id=subs.owner.id).first() gb = Github(org=settings.github_username, token=settings.github_owner.credentialsgithub.token) @@ -431,18 +443,19 @@ def unsubscribe_repository(subs_id, force_delete=True): gb.unsubscribe_from_repo(_owner, _repo_name, subs.hook_id) # you can delete the subscription after unsubscribing - if force_delete: subs.delete() + if force_delete: + subs.delete() else: - subs.status = 'DISABLED' + subs.status = "DISABLED" subs.hook_id = None - subs.status_message = 'disabled successfully' + subs.status_message = "disabled successfully" subs.save() return subs return True except Exception as e: - subs.status = 'CRITICAL' - subs.status_message = 'Cannot unsubscribe subscription: ' + str(e) + subs.status = "CRITICAL" + subs.status_message = "Cannot unsubscribe subscription: " + str(e) subs.save() return False @@ -451,24 +464,24 @@ def add_github_webhook(context: dict, academy_slug: str): """Add one incoming webhook request to log""" if not context or not len(context): - logger.error('Missing webhook payload') + logger.error("Missing webhook payload") return None - if 'action' not in context: - if context['scope'] == 'push': - context['action'] = 'push' + if "action" not in context: + if context["scope"] == "push": + context["action"] = "push" else: - logger.error('Missing action param on the webhook payload') + logger.error("Missing action param on the webhook payload") logger.error(context) return None - webhook = RepositoryWebhook(webhook_action=context['action'], scope=context['scope'], academy_slug=academy_slug) + webhook = RepositoryWebhook(webhook_action=context["action"], scope=context["scope"], academy_slug=academy_slug) - if 'repository' in context: - webhook.repository = context['repository']['html_url'] + if "repository" in context: + webhook.repository = context["repository"]["html_url"] webhook.payload = json.dumps(context) - webhook.status = 'PENDING' + webhook.status = "PENDING" webhook.save() return webhook @@ -477,15 +490,15 @@ def add_github_webhook(context: dict, academy_slug: str): def add_stripe_webhook(context: dict) -> StripeEvent: try: event = StripeEvent( - stripe_id=context['id'], - type=context['type'], - status='PENDING', - data=context['data'], - request=context['request'], + stripe_id=context["id"], + type=context["type"], + status="PENDING", + data=context["data"], + request=context["request"], ) event.save() except Exception: - raise ValidationException('Invalid stripe webhook payload', code=400, slug='invalid-stripe-webhook-payload') + raise ValidationException("Invalid stripe webhook payload", code=400, slug="invalid-stripe-webhook-payload") return event diff --git a/breathecode/monitoring/admin.py b/breathecode/monitoring/admin.py index c717d9f58..fd8ed7883 100644 --- a/breathecode/monitoring/admin.py +++ b/breathecode/monitoring/admin.py @@ -25,7 +25,7 @@ from .tasks import async_unsubscribe_repo -@admin.display(description='Run Applications Diagnostic') +@admin.display(description="Run Applications Diagnostic") def test_app(modeladmin, request, queryset): # stay this here for use the poor mocking system from .tasks import monitor_app @@ -38,30 +38,31 @@ class CustomAppModelForm(forms.ModelForm): class Meta: model = Application - fields = '__all__' + fields = "__all__" def __init__(self, *args, **kwargs): super(CustomAppModelForm, self).__init__(*args, **kwargs) - if hasattr(self.instance, 'academy') and self.instance.academy is not None: - self.fields['notify_slack_channel'].queryset = SlackChannel.objects.filter( - team__academy__id=self.instance.academy.id) # or something else + if hasattr(self.instance, "academy") and self.instance.academy is not None: + self.fields["notify_slack_channel"].queryset = SlackChannel.objects.filter( + team__academy__id=self.instance.academy.id + ) # or something else # Register your models here. @admin.register(Application) class ApplicationAdmin(admin.ModelAdmin): form = CustomAppModelForm - list_display = ('title', 'current_status', 'academy', 'paused_until', 'status_text') + list_display = ("title", "current_status", "academy", "paused_until", "status_text") actions = [test_app] - list_filter = ['status', 'academy__slug'] - raw_id_fields = ['notify_slack_channel'] + list_filter = ["status", "academy__slug"] + raw_id_fields = ["notify_slack_channel"] def current_status(self, obj): colors = { - 'OPERATIONAL': 'bg-success', - 'CRITICAL': 'bg-error', - 'MINOR': 'bg-warning', - 'LOADING': 'bg-warning', + "OPERATIONAL": "bg-success", + "CRITICAL": "bg-error", + "MINOR": "bg-warning", + "LOADING": "bg-warning", } now = timezone.now() if obj.paused_until is not None and obj.paused_until > now: @@ -70,7 +71,7 @@ def current_status(self, obj): return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") -@admin.display(description='Test Endpoint') +@admin.display(description="Test Endpoint") def test_endpoint(modeladmin, request, queryset): # stay this here for use the poor mocking system from .tasks import test_endpoint @@ -79,7 +80,7 @@ def test_endpoint(modeladmin, request, queryset): test_endpoint.delay(end.id) -@admin.display(description='PAUSE for 1 day') +@admin.display(description="PAUSE for 1 day") def pause_for_one_day(modeladmin, request, queryset): for end in queryset.all(): end.paused_until = timezone.now() + timezone.timedelta(days=1) @@ -89,18 +90,18 @@ def pause_for_one_day(modeladmin, request, queryset): # Register your models here. @admin.register(Endpoint) class EndpointAdmin(admin.ModelAdmin): - list_display = ('url', 'current_status', 'test_pattern', 'status_code', 'paused_until', 'last_check') + list_display = ("url", "current_status", "test_pattern", "status_code", "paused_until", "last_check") actions = [test_endpoint, pause_for_one_day] - list_filter = ['status', 'application__title'] + list_filter = ["status", "application__title"] def get_readonly_fields(self, request, obj=None): - return ['status_text'] + return ["status_text"] def current_status(self, obj): colors = { - 'OPERATIONAL': 'bg-success', - 'CRITICAL': 'bg-error', - 'MINOR': 'bg-warning', + "OPERATIONAL": "bg-success", + "CRITICAL": "bg-error", + "MINOR": "bg-warning", } now = timezone.now() if obj.paused_until is not None and obj.paused_until > now: @@ -109,7 +110,7 @@ def current_status(self, obj): return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") -@admin.display(description='Run Script') +@admin.display(description="Run Script") def run_single_script(modeladmin, request, queryset): # stay this here for use the poor mocking system from .tasks import execute_scripts @@ -125,34 +126,41 @@ def __init__(self, *args, **kwargs): options = [] dir_path = os.path.dirname(os.path.realpath(__file__)) - files = os.listdir(dir_path + '/scripts') + files = os.listdir(dir_path + "/scripts") for file_name in files: - if '.py' not in file_name: + if ".py" not in file_name: continue doc = file_name - with open(dir_path + '/scripts/' + file_name) as f: + with open(dir_path + "/scripts/" + file_name) as f: doc = ast.get_docstring(ast.parse(f.read())) options.append((file_name[0:-3], doc)) - options.append(('other', 'other')) + options.append(("other", "other")) # timezones = [(x, x) for x in pytz.common_timezones] - self.fields['script_slug'] = forms.ChoiceField(choices=options) + self.fields["script_slug"] = forms.ChoiceField(choices=options) @admin.register(MonitorScript) class MonitorScriptAdmin(admin.ModelAdmin): form = CustomForm - list_display = ('script_slug', 'application', 'current_status', 'frequency_delta', 'status_code', 'paused_until', - 'last_run') + list_display = ( + "script_slug", + "application", + "current_status", + "frequency_delta", + "status_code", + "paused_until", + "last_run", + ) actions = [run_single_script] - list_filter = ['status', 'application__title'] + list_filter = ["status", "application__title"] def current_status(self, obj): colors = { - 'OPERATIONAL': 'bg-success', - 'CRITICAL': 'bg-error', - 'FATAL': 'bg-error', # important: this status was deprecated and deleted! - 'MINOR': 'bg-warning', + "OPERATIONAL": "bg-success", + "CRITICAL": "bg-error", + "FATAL": "bg-error", # important: this status was deprecated and deleted! + "MINOR": "bg-warning", } now = timezone.now() if obj.paused_until is not None and obj.paused_until > now: @@ -163,28 +171,28 @@ def current_status(self, obj): @admin.register(CSVDownload) class CSVDownloadAdmin(admin.ModelAdmin): - list_display = ('id', 'name', 'current_status', 'created_at', 'finished_at', 'download') - list_filter = ['academy', 'status'] + list_display = ("id", "name", "current_status", "created_at", "finished_at", "download") + list_filter = ["academy", "status"] def current_status(self, obj): colors = { - 'DONE': 'bg-success', - 'ERROR': 'bg-error', - 'LOADING': 'bg-warning', + "DONE": "bg-success", + "ERROR": "bg-error", + "LOADING": "bg-warning", } return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") def download(self, obj): - if obj.status == 'DONE': + if obj.status == "DONE": return format_html(f"<a href='/v1/monitoring/download/{obj.id}?raw=true' target='_blank'>download</span>") - return format_html('nothing to download') + return format_html("nothing to download") @admin.register(CSVUpload) class CSVUploadAdmin(admin.ModelAdmin): - list_display = ('name', 'url', 'status', 'academy', 'hash') - list_filter = ['academy', 'status'] - search_fields = ['name', 'url', 'hash'] + list_display = ("name", "url", "status", "academy", "hash") + list_filter = ["academy", "status"] + search_fields = ["name", "url", "hash"] def delete_subscription(modeladmin, request, queryset): @@ -197,10 +205,10 @@ def delete_subscription(modeladmin, request, queryset): def disable_subscription(modeladmin, request, queryset): # stay this here for use the poor mocking system for subs in queryset.all(): - if subs.hook_id is not None and subs.hook_id != '': + if subs.hook_id is not None and subs.hook_id != "": unsubscribe_repository(subs.id, force_delete=False) else: - subs.status = 'DISABLED' + subs.status = "DISABLED" subs.save() @@ -209,7 +217,7 @@ def activate_subscription(modeladmin, request, queryset): for subs in queryset.all(): try: subscription = subscribe_repository(subs.id) - if subscription.status != 'OPERATIONAL': + if subscription.status != "OPERATIONAL": raise Exception(subscription.status_message) except Exception as e: messages.error(request, str(e)) @@ -218,41 +226,43 @@ def activate_subscription(modeladmin, request, queryset): @admin.register(RepositorySubscription) class RepositorySubscriptionAdmin(admin.ModelAdmin): - list_display = ('id', 'current_status', 'hook_id', 'repo', 'owner', 'shared') - list_filter = ['status', 'owner'] - search_fields = ['repository', 'token', 'hook_id'] - readonly_fields = ['token'] + list_display = ("id", "current_status", "hook_id", "repo", "owner", "shared") + list_filter = ["status", "owner"] + search_fields = ["repository", "token", "hook_id"] + readonly_fields = ["token"] actions = [delete_subscription, disable_subscription, activate_subscription] def get_actions(self, request): actions = super(RepositorySubscriptionAdmin, self).get_actions(request) - if 'delete_selected' in actions: - del actions['delete_selected'] + if "delete_selected" in actions: + del actions["delete_selected"] return actions def has_delete_permission(self, request, obj=None): # Return False to remove the "Delete" button from the update form. # You can add additional logic here if you want to conditionally # enable the delete button for certain cases. - if obj and obj.status == 'DISABLED': + if obj and obj.status == "DISABLED": return True return False def repo(self, obj): - return format_html(f""" + return format_html( + f""" <a rel='noopener noreferrer' target='_blank' href='{obj.repository}/settings/hooks'>{obj.repository}</a> - """) + """ + ) def shared(self, obj): - return format_html(''.join([o.name for o in obj.shared_with.all()])) + return format_html("".join([o.name for o in obj.shared_with.all()])) def current_status(self, obj): colors = { - 'OPERATIONAL': 'bg-success', - 'CRITICAL': 'bg-error', - 'DISABLED': 'bg-warning', - None: 'bg-warning', + "OPERATIONAL": "bg-success", + "CRITICAL": "bg-error", + "DISABLED": "bg-warning", + None: "bg-warning", } return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") @@ -265,27 +275,27 @@ def process_webhook(modeladmin, request, queryset): @admin.register(RepositoryWebhook) class RepositoryWebhookAdmin(admin.ModelAdmin): - list_display = ('id', 'webhook_action', 'scope', 'current_status', 'run_at', 'academy_slug', 'diff') - list_filter = ['status', 'webhook_action', 'scope', 'academy_slug'] + list_display = ("id", "webhook_action", "scope", "current_status", "run_at", "academy_slug", "diff") + list_filter = ["status", "webhook_action", "scope", "academy_slug"] actions = [process_webhook] def current_status(self, obj): colors = { - 'DONE': 'bg-success', - 'ERROR': 'bg-error', - 'PENDING': 'bg-warning', + "DONE": "bg-success", + "ERROR": "bg-error", + "PENDING": "bg-warning", } return format_html(f"<span class='badge {colors[obj.status]}'>{obj.status}</span>") def diff(self, obj): - label = 'nothing to compare' + label = "nothing to compare" if obj.payload: _payload = json.loads(obj.payload) - if 'compare' in _payload: - label = 'compare' - if 'head_commit' in _payload: - _l = len(_payload['head_commit']['id']) - label = _payload['head_commit']['id'][_l - 8:] + if "compare" in _payload: + label = "compare" + if "head_commit" in _payload: + _l = len(_payload["head_commit"]["id"]) + label = _payload["head_commit"]["id"][_l - 8 :] return format_html(f"<a target='_blank' href='{_payload['compare']}'>{label}</a>") return label @@ -293,15 +303,15 @@ def diff(self, obj): @admin.register(Supervisor) class SupervisorAdmin(admin.ModelAdmin): - list_display = ('task_module', 'task_name', 'delta', 'ran_at') + list_display = ("task_module", "task_name", "delta", "ran_at") list_filter = [] - search_fields = ['task_module', 'task_name'] + search_fields = ["task_module", "task_name"] actions = [] @admin.register(SupervisorIssue) class SupervisorIssueAdmin(admin.ModelAdmin): - list_display = ('supervisor', 'occurrences', 'error', 'ran_at') - list_filter = ['supervisor'] - search_fields = ['supervisor__task_module', 'supervisor__task_name'] + list_display = ("supervisor", "occurrences", "error", "ran_at") + list_filter = ["supervisor"] + search_fields = ["supervisor__task_module", "supervisor__task_name"] actions = [] diff --git a/breathecode/monitoring/apps.py b/breathecode/monitoring/apps.py index 80269e938..fbe5a4903 100644 --- a/breathecode/monitoring/apps.py +++ b/breathecode/monitoring/apps.py @@ -5,8 +5,8 @@ class MonitoringConfig(AppConfig): - name = 'breathecode.monitoring' + name = "breathecode.monitoring" def ready(self): - logger.debug('Loading monitoring.receivers') + logger.debug("Loading monitoring.receivers") from . import receivers # noqa: F401 diff --git a/breathecode/monitoring/decorators.py b/breathecode/monitoring/decorators.py index d909d9bc7..9bbf5f2d5 100644 --- a/breathecode/monitoring/decorators.py +++ b/breathecode/monitoring/decorators.py @@ -9,14 +9,14 @@ class WebhookTask(Task): - pending_status = 'pending...' + pending_status = "pending..." def initialize(self, webhook_id): webhook = RepositoryWebhook.objects.filter(id=webhook_id).first() if webhook is None: - raise Exception(f'Github Webhook with id {webhook_id} not found') - webhook.status = 'PENDING' + raise Exception(f"Github Webhook with id {webhook_id} not found") + webhook.status = "PENDING" webhook.status_text = self.pending_status webhook.save() return webhook @@ -33,19 +33,19 @@ def __call__(self, *args, **kwargs): if isinstance(_webhook, RepositoryWebhook): webhook = _webhook - webhook.status = 'DONE' + webhook.status = "DONE" else: - raise Exception('Error while running async webhook task: type != ' + str(type(_webhook))) + raise Exception("Error while running async webhook task: type != " + str(type(_webhook))) except Exception as ex: - webhook.status = 'ERROR' + webhook.status = "ERROR" webhook.status_text = str(ex)[:255] logger.exception(ex) webhook.run_at = timezone.now() if webhook.status_text == self.pending_status: - webhook.status_text = 'finished' + webhook.status_text = "finished" webhook.save() - logger.debug(f'Github Webook processing status: {webhook.status}') + logger.debug(f"Github Webook processing status: {webhook.status}") return webhook.status diff --git a/breathecode/monitoring/management/commands/garbage_collect_monitoring.py b/breathecode/monitoring/management/commands/garbage_collect_monitoring.py index 1a808263f..e017dbfe2 100644 --- a/breathecode/monitoring/management/commands/garbage_collect_monitoring.py +++ b/breathecode/monitoring/management/commands/garbage_collect_monitoring.py @@ -5,7 +5,7 @@ class Command(BaseCommand): - help = 'Delete logs and other garbage' + help = "Delete logs and other garbage" def handle(self, *args, **options): diff --git a/breathecode/monitoring/management/commands/monitor.py b/breathecode/monitoring/management/commands/monitor.py index 96a4d8eb9..64fa6a630 100644 --- a/breathecode/monitoring/management/commands/monitor.py +++ b/breathecode/monitoring/management/commands/monitor.py @@ -17,49 +17,51 @@ def __init__(self, expression, **extra): class Command(BaseCommand): - help = 'Sync academies from old breathecode' + help = "Sync academies from old breathecode" def add_arguments(self, parser): - parser.add_argument('entity', type=str) + parser.add_argument("entity", type=str) parser.add_argument( - '--override', - action='store_true', - help='Delete and add again', + "--override", + action="store_true", + help="Delete and add again", ) - parser.add_argument('--limit', action='store', dest='limit', type=int, default=0, help='How many to import') + parser.add_argument("--limit", action="store", dest="limit", type=int, default=0, help="How many to import") def handle(self, *args, **options): try: - func = getattr(self, options['entity'], None) + func = getattr(self, options["entity"], None) except TypeError: self.stderr.write(self.style.ERROR(f'Sync method for {options["entity"]} no Found!')) return except KeyError: - self.stderr.write(self.style.ERROR('Entity arguments is not set')) + self.stderr.write(self.style.ERROR("Entity arguments is not set")) return if not callable(func): - self.stderr.write(self.style.ERROR('Entity not found')) + self.stderr.write(self.style.ERROR("Entity not found")) return func(options) def apps(self, options): - apps = Application.objects.all().values_list('id', flat=True) + apps = Application.objects.all().values_list("id", flat=True) for app_id in apps: monitor_app.delay(app_id) - self.stdout.write(self.style.SUCCESS(f'Enqueued {len(apps)} apps for diagnostic')) + self.stdout.write(self.style.SUCCESS(f"Enqueued {len(apps)} apps for diagnostic")) def scripts(self, options): now = timezone.now() - scripts = MonitorScript.objects\ - .filter(Q(last_run__isnull=True) | Q(last_run__lte= now - F('frequency_delta')))\ - .exclude(application__paused_until__isnull=False, application__paused_until__gte=now)\ - .exclude(paused_until__isnull=False, paused_until__gte=now).values_list('id', flat=True) + scripts = ( + MonitorScript.objects.filter(Q(last_run__isnull=True) | Q(last_run__lte=now - F("frequency_delta"))) + .exclude(application__paused_until__isnull=False, application__paused_until__gte=now) + .exclude(paused_until__isnull=False, paused_until__gte=now) + .values_list("id", flat=True) + ) for script_id in scripts: execute_scripts.delay(script_id) - self.stdout.write(self.style.SUCCESS(f'Enqueued {len(scripts)} scripts for execution')) + self.stdout.write(self.style.SUCCESS(f"Enqueued {len(scripts)} scripts for execution")) diff --git a/breathecode/monitoring/management/commands/run_script.py b/breathecode/monitoring/management/commands/run_script.py index 3807c4d9d..6a1db3690 100644 --- a/breathecode/monitoring/management/commands/run_script.py +++ b/breathecode/monitoring/management/commands/run_script.py @@ -5,16 +5,16 @@ class Command(BaseCommand): - help = 'Sync academies from old breathecode' + help = "Sync academies from old breathecode" def add_arguments(self, parser): - parser.add_argument('path', type=str) + parser.add_argument("path", type=str) def mock_application(self): - academy = Academy.objects.filter(slug='fake-sample-academy-delete-me-wililii').first() + academy = Academy.objects.filter(slug="fake-sample-academy-delete-me-wililii").first() if academy is None: - academy = Academy(slug='fake-sample-academy-delete-me-wililii') + academy = Academy(slug="fake-sample-academy-delete-me-wililii") academy.save() if academy.application_set.count() == 0: app = Application(academy=academy) @@ -23,32 +23,34 @@ def mock_application(self): return academy.application_set.first() def handle(self, *args, **options): - if options['path'] is None: - print('Please specify the script path') - script_slug = options['path'].split('.')[0] + if options["path"] is None: + print("Please specify the script path") + script_slug = options["path"].split(".")[0] application = self.mock_application() - print('Attempting to run script: ' + self.style.WARNING(script_slug)) - print('Academy for this test is set to: ' + self.style.WARNING(application.academy.slug)) + print("Attempting to run script: " + self.style.WARNING(script_slug)) + print("Academy for this test is set to: " + self.style.WARNING(application.academy.slug)) script = MonitorScript.objects.filter(script_slug=script_slug).first() if script is None: script = MonitorScript(script_slug=script_slug, application=application) result = run_script(script) - self.stdout.write(self.style.SUCCESS('The script was tested with the following outcome:')) + self.stdout.write(self.style.SUCCESS("The script was tested with the following outcome:")) - stdout = result['text'] - del result['text'] - del result['slack_payload'] + stdout = result["text"] + del result["text"] + del result["slack_payload"] - if 'details' in result: - del result['details'] + if "details" in result: + del result["details"] - print('Details: ', result) - print('\nStdout: ') + print("Details: ", result) + print("\nStdout: ") print(stdout) - if 'btn' in result and result['btn'] is not None: + if "btn" in result and result["btn"] is not None: self.stdout.write( - self.style.SUCCESS('The script has this call to action, ' + result['btn']['label'] + ': ' + - result['btn']['url'])) + self.style.SUCCESS( + "The script has this call to action, " + result["btn"]["label"] + ": " + result["btn"]["url"] + ) + ) diff --git a/breathecode/monitoring/management/commands/supervisor.py b/breathecode/monitoring/management/commands/supervisor.py index 548360875..c098afb53 100644 --- a/breathecode/monitoring/management/commands/supervisor.py +++ b/breathecode/monitoring/management/commands/supervisor.py @@ -9,7 +9,7 @@ class Command(BaseCommand): - help = 'Run all supervisors' + help = "Run all supervisors" def handle(self, *args, **options): self.utc_now = timezone.now() @@ -17,12 +17,14 @@ def handle(self, *args, **options): SupervisorIssue.objects.filter(ran_at__lte=self.utc_now - timedelta(days=7)).delete() for fn_module, fn_name, delta in paths: - Supervisor.objects.get_or_create(task_module=fn_module, - task_name=fn_name, - defaults={ - 'delta': delta, - 'ran_at': None, - }) + Supervisor.objects.get_or_create( + task_module=fn_module, + task_name=fn_name, + defaults={ + "delta": delta, + "ran_at": None, + }, + ) self.run_supervisors() self.fix_issues() @@ -34,7 +36,8 @@ def run_supervisors(self): if supervisor.ran_at is None or self.utc_now - supervisor.delta > supervisor.ran_at: run_supervisor.delay(supervisor.id) self.stdout.write( - self.style.SUCCESS(f'Supervisor {supervisor.task_module}.{supervisor.task_name} scheduled')) + self.style.SUCCESS(f"Supervisor {supervisor.task_module}.{supervisor.task_name} scheduled") + ) def fix_issues(self): issues = SupervisorIssue.objects.filter(fixed=None, attempts__lt=3) @@ -42,4 +45,6 @@ def fix_issues(self): fix_issue.delay(issue.id) self.stdout.write( self.style.SUCCESS( - f'Issue {issue.supervisor.task_module}.{issue.supervisor.task_name} scheduled to be fixed')) + f"Issue {issue.supervisor.task_module}.{issue.supervisor.task_name} scheduled to be fixed" + ) + ) diff --git a/breathecode/monitoring/migrations/0001_initial.py b/breathecode/monitoring/migrations/0001_initial.py index 1efb8ff3f..96612b545 100644 --- a/breathecode/monitoring/migrations/0001_initial.py +++ b/breathecode/monitoring/migrations/0001_initial.py @@ -11,36 +11,42 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='Application', + name="Application", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(max_length=100)), - ('status', - models.CharField(choices=[('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.CharField(max_length=100)), + ( + "status", + models.CharField( + choices=[("OPERATIONAL", "Operational"), ("MINOR", "Minor"), ("CRITICAL", "Critical")], + default="OPERATIONAL", + max_length=20, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='Endpoint', + name="Endpoint", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('url', models.CharField(max_length=255)), - ('test_pattern', models.CharField(max_length=100)), - ('frequency_in_minutes', models.FloatField(default=0)), - ('status_code', models.FloatField(default=0)), - ('response_text', models.TextField()), - ('last_check', models.DateTimeField()), - ('status', - models.CharField(choices=[('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("url", models.CharField(max_length=255)), + ("test_pattern", models.CharField(max_length=100)), + ("frequency_in_minutes", models.FloatField(default=0)), + ("status_code", models.FloatField(default=0)), + ("response_text", models.TextField()), + ("last_check", models.DateTimeField()), + ( + "status", + models.CharField( + choices=[("OPERATIONAL", "Operational"), ("MINOR", "Minor"), ("CRITICAL", "Critical")], + default="OPERATIONAL", + max_length=20, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/breathecode/monitoring/migrations/0002_auto_20201021_0027.py b/breathecode/monitoring/migrations/0002_auto_20201021_0027.py index 085fc7705..552dbf6ab 100644 --- a/breathecode/monitoring/migrations/0002_auto_20201021_0027.py +++ b/breathecode/monitoring/migrations/0002_auto_20201021_0027.py @@ -7,22 +7,23 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('monitoring', '0001_initial'), + ("admissions", "0011_auto_20201006_0058"), + ("monitoring", "0001_initial"), ] operations = [ migrations.AddField( - model_name='application', - name='academy', - field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='admissions.academy'), + model_name="application", + name="academy", + field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy"), preserve_default=False, ), migrations.AddField( - model_name='endpoint', - name='application', - field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, - to='monitoring.application'), + model_name="endpoint", + name="application", + field=models.ForeignKey( + default=1, on_delete=django.db.models.deletion.CASCADE, to="monitoring.application" + ), preserve_default=False, ), ] diff --git a/breathecode/monitoring/migrations/0003_auto_20201021_2100.py b/breathecode/monitoring/migrations/0003_auto_20201021_2100.py index 3529c5612..72dbc5a60 100644 --- a/breathecode/monitoring/migrations/0003_auto_20201021_2100.py +++ b/breathecode/monitoring/migrations/0003_auto_20201021_2100.py @@ -6,37 +6,35 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0002_auto_20201021_0027'), + ("monitoring", "0002_auto_20201021_0027"), ] operations = [ migrations.AlterField( - model_name='endpoint', - name='frequency_in_minutes', + model_name="endpoint", + name="frequency_in_minutes", field=models.FloatField(default=30), ), migrations.AlterField( - model_name='endpoint', - name='last_check', + model_name="endpoint", + name="last_check", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='endpoint', - name='response_text', + model_name="endpoint", + name="response_text", field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='endpoint', - name='status_code', + model_name="endpoint", + name="status_code", field=models.FloatField(default=200), ), migrations.AlterField( - model_name='endpoint', - name='test_pattern', - field=models.CharField(blank=True, - default=None, - help_text='If left blank sys will only ping', - max_length=100, - null=True), + model_name="endpoint", + name="test_pattern", + field=models.CharField( + blank=True, default=None, help_text="If left blank sys will only ping", max_length=100, null=True + ), ), ] diff --git a/breathecode/monitoring/migrations/0004_auto_20201021_2126.py b/breathecode/monitoring/migrations/0004_auto_20201021_2126.py index e09a4cc3d..51eb11f5a 100644 --- a/breathecode/monitoring/migrations/0004_auto_20201021_2126.py +++ b/breathecode/monitoring/migrations/0004_auto_20201021_2126.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0003_auto_20201021_2100'), + ("monitoring", "0003_auto_20201021_2100"), ] operations = [ migrations.AddField( - model_name='application', - name='status_text', + model_name="application", + name="status_text", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( - model_name='endpoint', - name='status_text', + model_name="endpoint", + name="status_text", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), ] diff --git a/breathecode/monitoring/migrations/0005_auto_20201021_2152.py b/breathecode/monitoring/migrations/0005_auto_20201021_2152.py index 20ca67951..ac9719ec1 100644 --- a/breathecode/monitoring/migrations/0005_auto_20201021_2152.py +++ b/breathecode/monitoring/migrations/0005_auto_20201021_2152.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0004_auto_20201021_2126'), + ("monitoring", "0004_auto_20201021_2126"), ] operations = [ migrations.AddField( - model_name='application', - name='notify_email', + model_name="application", + name="notify_email", field=models.CharField(default=None, max_length=255, null=True), ), migrations.AddField( - model_name='endpoint', - name='severity_level', + model_name="endpoint", + name="severity_level", field=models.IntegerField(default=0), ), ] diff --git a/breathecode/monitoring/migrations/0006_auto_20201222_0127.py b/breathecode/monitoring/migrations/0006_auto_20201222_0127.py index caad16ea8..4d168a7c3 100644 --- a/breathecode/monitoring/migrations/0006_auto_20201222_0127.py +++ b/breathecode/monitoring/migrations/0006_auto_20201222_0127.py @@ -7,48 +7,50 @@ class Migration(migrations.Migration): dependencies = [ - ('notify', '0007_auto_20201111_2218'), - ('monitoring', '0005_auto_20201021_2152'), + ("notify", "0007_auto_20201111_2218"), + ("monitoring", "0005_auto_20201021_2152"), ] operations = [ migrations.AddField( - model_name='application', - name='notify_slack_channel', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='notify.slackchannel'), + model_name="application", + name="notify_slack_channel", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="notify.slackchannel", + ), ), migrations.AddField( - model_name='application', - name='paused_until', - field=models.DateTimeField(blank=True, - default=None, - help_text='if you want to stop checking for a period of time', - null=True), + model_name="application", + name="paused_until", + field=models.DateTimeField( + blank=True, default=None, help_text="if you want to stop checking for a period of time", null=True + ), ), migrations.AddField( - model_name='endpoint', - name='paused_until', - field=models.DateTimeField(blank=True, - default=None, - help_text='if you want to stop checking for a period of time', - null=True), + model_name="endpoint", + name="paused_until", + field=models.DateTimeField( + blank=True, default=None, help_text="if you want to stop checking for a period of time", null=True + ), ), migrations.AddField( - model_name='endpoint', - name='special_status_text', - field=models.CharField(blank=True, - default=None, - help_text='Add a message for people to see when is down', - max_length=255, - null=True), + model_name="endpoint", + name="special_status_text", + field=models.CharField( + blank=True, + default=None, + help_text="Add a message for people to see when is down", + max_length=255, + null=True, + ), ), migrations.AlterField( - model_name='endpoint', - name='status_text', + model_name="endpoint", + name="status_text", field=models.CharField(blank=True, default=None, editable=False, max_length=255, null=True), ), ] diff --git a/breathecode/monitoring/migrations/0007_auto_20201222_0949.py b/breathecode/monitoring/migrations/0007_auto_20201222_0949.py index 2eabac0d8..b3b7fe2cb 100644 --- a/breathecode/monitoring/migrations/0007_auto_20201222_0949.py +++ b/breathecode/monitoring/migrations/0007_auto_20201222_0949.py @@ -7,25 +7,26 @@ class Migration(migrations.Migration): dependencies = [ - ('notify', '0008_remove_slackteam_credentials'), - ('monitoring', '0006_auto_20201222_0127'), + ("notify", "0008_remove_slackteam_credentials"), + ("monitoring", "0006_auto_20201222_0127"), ] operations = [ migrations.AlterField( - model_name='application', - name='notify_email', + model_name="application", + name="notify_email", field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AlterField( - model_name='application', - name='notify_slack_channel', + model_name="application", + name="notify_slack_channel", field=models.ForeignKey( blank=True, default=None, - help_text='Please pick an academy first to be able to see the available slack channels to notify', + help_text="Please pick an academy first to be able to see the available slack channels to notify", null=True, on_delete=django.db.models.deletion.SET_NULL, - to='notify.slackchannel'), + to="notify.slackchannel", + ), ), ] diff --git a/breathecode/monitoring/migrations/0008_monitorscript.py b/breathecode/monitoring/migrations/0008_monitorscript.py index 8237d3d0f..3c22c829c 100644 --- a/breathecode/monitoring/migrations/0008_monitorscript.py +++ b/breathecode/monitoring/migrations/0008_monitorscript.py @@ -7,44 +7,60 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0007_auto_20201222_0949'), + ("monitoring", "0007_auto_20201222_0949"), ] operations = [ migrations.CreateModel( - name='MonitorScript', + name="MonitorScript", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('script_slug', models.SlugField(blank=True, default=None, null=True)), - ('script_body', models.TextField(blank=True, default=None, null=True)), - ('frequency_delta', - models.DurationField(default='00:30:00', - help_text='How long to wait for the next execution, defaults to 30 minutes')), - ('status_code', models.FloatField(default=200)), - ('severity_level', models.IntegerField(default=0)), - ('status_text', models.CharField(blank=True, default=None, editable=False, max_length=255, null=True)), - ('special_status_text', - models.CharField(blank=True, - default=None, - help_text='Add a message for people to see when is down', - max_length=255, - null=True)), - ('response_text', models.TextField(blank=True, default=None, null=True)), - ('last_run', models.DateTimeField(blank=True, default=None, null=True)), - ('status', - models.CharField(choices=[('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20)), - ('paused_until', - models.DateTimeField(blank=True, - default=None, - help_text='if you want to stop checking for a period of time', - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('application', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='monitoring.application')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("script_slug", models.SlugField(blank=True, default=None, null=True)), + ("script_body", models.TextField(blank=True, default=None, null=True)), + ( + "frequency_delta", + models.DurationField( + default="00:30:00", help_text="How long to wait for the next execution, defaults to 30 minutes" + ), + ), + ("status_code", models.FloatField(default=200)), + ("severity_level", models.IntegerField(default=0)), + ("status_text", models.CharField(blank=True, default=None, editable=False, max_length=255, null=True)), + ( + "special_status_text", + models.CharField( + blank=True, + default=None, + help_text="Add a message for people to see when is down", + max_length=255, + null=True, + ), + ), + ("response_text", models.TextField(blank=True, default=None, null=True)), + ("last_run", models.DateTimeField(blank=True, default=None, null=True)), + ( + "status", + models.CharField( + choices=[("OPERATIONAL", "Operational"), ("MINOR", "Minor"), ("CRITICAL", "Critical")], + default="OPERATIONAL", + max_length=20, + ), + ), + ( + "paused_until", + models.DateTimeField( + blank=True, + default=None, + help_text="if you want to stop checking for a period of time", + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "application", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="monitoring.application"), + ), ], ), ] diff --git a/breathecode/monitoring/migrations/0009_auto_20210220_1651.py b/breathecode/monitoring/migrations/0009_auto_20210220_1651.py index 51c29dbf3..4ae2ad890 100644 --- a/breathecode/monitoring/migrations/0009_auto_20210220_1651.py +++ b/breathecode/monitoring/migrations/0009_auto_20210220_1651.py @@ -6,32 +6,50 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0008_monitorscript'), + ("monitoring", "0008_monitorscript"), ] operations = [ migrations.AlterField( - model_name='application', - name='status', - field=models.CharField(choices=[('LOADING', 'Loading'), ('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20), + model_name="application", + name="status", + field=models.CharField( + choices=[ + ("LOADING", "Loading"), + ("OPERATIONAL", "Operational"), + ("MINOR", "Minor"), + ("CRITICAL", "Critical"), + ], + default="OPERATIONAL", + max_length=20, + ), ), migrations.AlterField( - model_name='endpoint', - name='status', - field=models.CharField(choices=[('LOADING', 'Loading'), ('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20), + model_name="endpoint", + name="status", + field=models.CharField( + choices=[ + ("LOADING", "Loading"), + ("OPERATIONAL", "Operational"), + ("MINOR", "Minor"), + ("CRITICAL", "Critical"), + ], + default="OPERATIONAL", + max_length=20, + ), ), migrations.AlterField( - model_name='monitorscript', - name='status', - field=models.CharField(choices=[('LOADING', 'Loading'), ('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20), + model_name="monitorscript", + name="status", + field=models.CharField( + choices=[ + ("LOADING", "Loading"), + ("OPERATIONAL", "Operational"), + ("MINOR", "Minor"), + ("CRITICAL", "Critical"), + ], + default="OPERATIONAL", + max_length=20, + ), ), ] diff --git a/breathecode/monitoring/migrations/0009_auto_20210225_0142.py b/breathecode/monitoring/migrations/0009_auto_20210225_0142.py index d9d880d9c..21c4a1213 100644 --- a/breathecode/monitoring/migrations/0009_auto_20210225_0142.py +++ b/breathecode/monitoring/migrations/0009_auto_20210225_0142.py @@ -6,32 +6,50 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0008_monitorscript'), + ("monitoring", "0008_monitorscript"), ] operations = [ migrations.AlterField( - model_name='application', - name='status', - field=models.CharField(choices=[('LOADING', 'Loading'), ('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20), + model_name="application", + name="status", + field=models.CharField( + choices=[ + ("LOADING", "Loading"), + ("OPERATIONAL", "Operational"), + ("MINOR", "Minor"), + ("CRITICAL", "Critical"), + ], + default="OPERATIONAL", + max_length=20, + ), ), migrations.AlterField( - model_name='endpoint', - name='status', - field=models.CharField(choices=[('LOADING', 'Loading'), ('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20), + model_name="endpoint", + name="status", + field=models.CharField( + choices=[ + ("LOADING", "Loading"), + ("OPERATIONAL", "Operational"), + ("MINOR", "Minor"), + ("CRITICAL", "Critical"), + ], + default="OPERATIONAL", + max_length=20, + ), ), migrations.AlterField( - model_name='monitorscript', - name='status', - field=models.CharField(choices=[('LOADING', 'Loading'), ('OPERATIONAL', 'Operational'), ('MINOR', 'Minor'), - ('CRITICAL', 'Critical')], - default='OPERATIONAL', - max_length=20), + model_name="monitorscript", + name="status", + field=models.CharField( + choices=[ + ("LOADING", "Loading"), + ("OPERATIONAL", "Operational"), + ("MINOR", "Minor"), + ("CRITICAL", "Critical"), + ], + default="OPERATIONAL", + max_length=20, + ), ), ] diff --git a/breathecode/monitoring/migrations/0010_merge_20210302_0120.py b/breathecode/monitoring/migrations/0010_merge_20210302_0120.py index 085d768bf..eb9efc056 100644 --- a/breathecode/monitoring/migrations/0010_merge_20210302_0120.py +++ b/breathecode/monitoring/migrations/0010_merge_20210302_0120.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0009_auto_20210225_0142'), - ('monitoring', '0009_auto_20210220_1651'), + ("monitoring", "0009_auto_20210225_0142"), + ("monitoring", "0009_auto_20210220_1651"), ] operations = [] diff --git a/breathecode/monitoring/migrations/0011_auto_20210306_0548.py b/breathecode/monitoring/migrations/0011_auto_20210306_0548.py index 84cfef6ae..8a05f94f5 100644 --- a/breathecode/monitoring/migrations/0011_auto_20210306_0548.py +++ b/breathecode/monitoring/migrations/0011_auto_20210306_0548.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0010_merge_20210302_0120'), + ("monitoring", "0010_merge_20210302_0120"), ] operations = [ migrations.AlterField( - model_name='endpoint', - name='status_code', + model_name="endpoint", + name="status_code", field=models.IntegerField(default=200), ), ] diff --git a/breathecode/monitoring/migrations/0012_auto_20210312_0618.py b/breathecode/monitoring/migrations/0012_auto_20210312_0618.py index 49331abab..5937e9b36 100644 --- a/breathecode/monitoring/migrations/0012_auto_20210312_0618.py +++ b/breathecode/monitoring/migrations/0012_auto_20210312_0618.py @@ -7,19 +7,21 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0011_auto_20210306_0548'), + ("monitoring", "0011_auto_20210306_0548"), ] operations = [ migrations.AlterField( - model_name='monitorscript', - name='frequency_delta', - field=models.DurationField(default=datetime.timedelta(seconds=1800), - help_text='How long to wait for the next execution, defaults to 30 minutes'), + model_name="monitorscript", + name="frequency_delta", + field=models.DurationField( + default=datetime.timedelta(seconds=1800), + help_text="How long to wait for the next execution, defaults to 30 minutes", + ), ), migrations.AlterField( - model_name='monitorscript', - name='status_code', + model_name="monitorscript", + name="status_code", field=models.IntegerField(default=200), ), ] diff --git a/breathecode/monitoring/migrations/0013_auto_20210923_0305.py b/breathecode/monitoring/migrations/0013_auto_20210923_0305.py index a7ebddc0b..dc5488a5c 100644 --- a/breathecode/monitoring/migrations/0013_auto_20210923_0305.py +++ b/breathecode/monitoring/migrations/0013_auto_20210923_0305.py @@ -6,28 +6,26 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0012_auto_20210312_0618'), + ("monitoring", "0012_auto_20210312_0618"), ] operations = [ migrations.AddField( - model_name='monitorscript', - name='notify_email', + model_name="monitorscript", + name="notify_email", field=models.CharField( blank=True, default=None, - help_text= - 'Only specify if need to override the application.notify_email, you can add many comma separated.', + help_text="Only specify if need to override the application.notify_email, you can add many comma separated.", max_length=255, - null=True), + null=True, + ), ), migrations.AlterField( - model_name='application', - name='notify_email', - field=models.CharField(blank=True, - default=None, - help_text='Comma separated list of emails', - max_length=255, - null=True), + model_name="application", + name="notify_email", + field=models.CharField( + blank=True, default=None, help_text="Comma separated list of emails", max_length=255, null=True + ), ), ] diff --git a/breathecode/monitoring/migrations/0014_csvdownload.py b/breathecode/monitoring/migrations/0014_csvdownload.py index 1c73153c1..b1ac27c15 100644 --- a/breathecode/monitoring/migrations/0014_csvdownload.py +++ b/breathecode/monitoring/migrations/0014_csvdownload.py @@ -7,25 +7,29 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0031_academy_icon_url'), - ('monitoring', '0013_auto_20210923_0305'), + ("admissions", "0031_academy_icon_url"), + ("monitoring", "0013_auto_20210923_0305"), ] operations = [ migrations.CreateModel( - name='CSVDownload', + name="CSVDownload", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255)), - ('url', models.URLField()), - ('status', - models.CharField(choices=[('LOADING', 'Loading'), ('ERROR', 'Error'), ('DONE', 'Done')], - default='LOADING', - max_length=20)), - ('status_message', models.TextField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('finished_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=255)), + ("url", models.URLField()), + ( + "status", + models.CharField( + choices=[("LOADING", "Loading"), ("ERROR", "Error"), ("DONE", "Done")], + default="LOADING", + max_length=20, + ), + ), + ("status_message", models.TextField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("finished_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), ] diff --git a/breathecode/monitoring/migrations/0015_alter_csvdownload_academy.py b/breathecode/monitoring/migrations/0015_alter_csvdownload_academy.py index 51f6e59c1..f634114a9 100644 --- a/breathecode/monitoring/migrations/0015_alter_csvdownload_academy.py +++ b/breathecode/monitoring/migrations/0015_alter_csvdownload_academy.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0031_academy_icon_url'), - ('monitoring', '0014_csvdownload'), + ("admissions", "0031_academy_icon_url"), + ("monitoring", "0014_csvdownload"), ] operations = [ migrations.AlterField( - model_name='csvdownload', - name='academy', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="csvdownload", + name="academy", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), ), ] diff --git a/breathecode/monitoring/migrations/0016_csvupload.py b/breathecode/monitoring/migrations/0016_csvupload.py index 4c228b7fc..56d8b99b4 100644 --- a/breathecode/monitoring/migrations/0016_csvupload.py +++ b/breathecode/monitoring/migrations/0016_csvupload.py @@ -7,32 +7,40 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0041_cohortuser_watching'), - ('monitoring', '0015_alter_csvdownload_academy'), + ("admissions", "0041_cohortuser_watching"), + ("monitoring", "0015_alter_csvdownload_academy"), ] operations = [ migrations.CreateModel( - name='CSVUpload', + name="CSVUpload", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255)), - ('url', models.URLField()), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('DONE', 'Done')], - default='PENDING', - max_length=20)), - ('status_message', models.TextField(blank=True, default=None, null=True)), - ('log', models.CharField(max_length=50)), - ('hash', models.CharField(max_length=64)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('finished_at', models.DateTimeField(auto_now=True)), - ('academy', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=255)), + ("url", models.URLField()), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("ERROR", "Error"), ("DONE", "Done")], + default="PENDING", + max_length=20, + ), + ), + ("status_message", models.TextField(blank=True, default=None, null=True)), + ("log", models.CharField(max_length=50)), + ("hash", models.CharField(max_length=64)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("finished_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), + ), ], ), ] diff --git a/breathecode/monitoring/migrations/0017_repositorysubscription_repositorywebhook.py b/breathecode/monitoring/migrations/0017_repositorysubscription_repositorywebhook.py index 60092a783..5226d360c 100644 --- a/breathecode/monitoring/migrations/0017_repositorysubscription_repositorywebhook.py +++ b/breathecode/monitoring/migrations/0017_repositorysubscription_repositorywebhook.py @@ -7,55 +7,75 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0052_alter_cohort_kickoff_date'), - ('monitoring', '0016_csvupload'), + ("admissions", "0052_alter_cohort_kickoff_date"), + ("monitoring", "0016_csvupload"), ] operations = [ migrations.CreateModel( - name='RepositoryWebhook', + name="RepositoryWebhook", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('webhook_action', - models.CharField(blank=True, - default=None, - help_text='The specific action that was triggered on github for this webhook', - max_length=100, - null=True)), - ('scope', - models.CharField( - blank=True, - default=None, - help_text= - 'The specific entity that triggered this webhook, for example: issues, issues_comment, etc.', - max_length=100, - null=True)), - ('run_at', - models.DateTimeField(blank=True, default=None, help_text='Date/time that the webhook ran', null=True)), - ('repository', models.URLField(help_text='Github repo where the event occured', max_length=255)), - ('payload', - models.JSONField(help_text='Info that came on the request, it varies depending on the webhook type')), - ('academy_slug', models.SlugField()), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('ERROR', 'Error')], - default='PENDING', - max_length=9)), - ('status_text', models.CharField(blank=True, default=None, max_length=255, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "webhook_action", + models.CharField( + blank=True, + default=None, + help_text="The specific action that was triggered on github for this webhook", + max_length=100, + null=True, + ), + ), + ( + "scope", + models.CharField( + blank=True, + default=None, + help_text="The specific entity that triggered this webhook, for example: issues, issues_comment, etc.", + max_length=100, + null=True, + ), + ), + ( + "run_at", + models.DateTimeField( + blank=True, default=None, help_text="Date/time that the webhook ran", null=True + ), + ), + ("repository", models.URLField(help_text="Github repo where the event occured", max_length=255)), + ( + "payload", + models.JSONField( + help_text="Info that came on the request, it varies depending on the webhook type" + ), + ), + ("academy_slug", models.SlugField()), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("ERROR", "Error")], + default="PENDING", + max_length=9, + ), + ), + ("status_text", models.CharField(blank=True, default=None, max_length=255, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='RepositorySubscription', + name="RepositorySubscription", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('repository', models.URLField(help_text='Github repo where the event occured', max_length=255)), - ('token', models.CharField(max_length=255, unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('shared_with', - models.ManyToManyField(blank=True, related_name='repo_subscription', to='admissions.Academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("repository", models.URLField(help_text="Github repo where the event occured", max_length=255)), + ("token", models.CharField(max_length=255, unique=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("owner", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "shared_with", + models.ManyToManyField(blank=True, related_name="repo_subscription", to="admissions.Academy"), + ), ], ), ] diff --git a/breathecode/monitoring/migrations/0018_auto_20230705_1635.py b/breathecode/monitoring/migrations/0018_auto_20230705_1635.py index d1ba966b3..054d99bea 100644 --- a/breathecode/monitoring/migrations/0018_auto_20230705_1635.py +++ b/breathecode/monitoring/migrations/0018_auto_20230705_1635.py @@ -6,34 +6,37 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0017_repositorysubscription_repositorywebhook'), + ("monitoring", "0017_repositorysubscription_repositorywebhook"), ] operations = [ migrations.CreateModel( - name='StripeEvent', + name="StripeEvent", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('stripe_id', models.CharField(blank=True, - default=None, - help_text='Stripe id', - max_length=32, - null=True)), - ('type', models.CharField(help_text='Stripe event type', max_length=50)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('ERROR', 'Error')], - default='PENDING', - max_length=9)), - ('status_texts', models.JSONField(blank=True, default=dict)), - ('data', models.JSONField(blank=True, default=dict)), - ('request', models.JSONField(blank=True, default=dict)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "stripe_id", + models.CharField(blank=True, default=None, help_text="Stripe id", max_length=32, null=True), + ), + ("type", models.CharField(help_text="Stripe event type", max_length=50)), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("ERROR", "Error")], + default="PENDING", + max_length=9, + ), + ), + ("status_texts", models.JSONField(blank=True, default=dict)), + ("data", models.JSONField(blank=True, default=dict)), + ("request", models.JSONField(blank=True, default=dict)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.AlterField( - model_name='repositorysubscription', - name='repository', - field=models.URLField(help_text='Github repo where the event ocurred', max_length=255), + model_name="repositorysubscription", + name="repository", + field=models.URLField(help_text="Github repo where the event ocurred", max_length=255), ), ] diff --git a/breathecode/monitoring/migrations/0019_repositorysubscription_hook_id_and_more.py b/breathecode/monitoring/migrations/0019_repositorysubscription_hook_id_and_more.py index 68ed0f0e5..7b45925ea 100644 --- a/breathecode/monitoring/migrations/0019_repositorysubscription_hook_id_and_more.py +++ b/breathecode/monitoring/migrations/0019_repositorysubscription_hook_id_and_more.py @@ -6,25 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0018_auto_20230705_1635'), + ("monitoring", "0018_auto_20230705_1635"), ] operations = [ migrations.AddField( - model_name='repositorysubscription', - name='hook_id', - field=models.IntegerField(blank=True, default=None, help_text='Assigned from github', null=True), + model_name="repositorysubscription", + name="hook_id", + field=models.IntegerField(blank=True, default=None, help_text="Assigned from github", null=True), ), migrations.AddField( - model_name='repositorysubscription', - name='status', - field=models.CharField(choices=[('OPERATIONAL', 'Operational'), ('CRITICAL', 'Critical')], - default='CRITICAL', - max_length=20), + model_name="repositorysubscription", + name="status", + field=models.CharField( + choices=[("OPERATIONAL", "Operational"), ("CRITICAL", "Critical")], default="CRITICAL", max_length=20 + ), ), migrations.AddField( - model_name='repositorysubscription', - name='status_message', - field=models.TextField(blank=True, default='Waiting for ping', null=True), + model_name="repositorysubscription", + name="status_message", + field=models.TextField(blank=True, default="Waiting for ping", null=True), ), ] diff --git a/breathecode/monitoring/migrations/0020_alter_repositorysubscription_status.py b/breathecode/monitoring/migrations/0020_alter_repositorysubscription_status.py index 917d54813..1806455f8 100644 --- a/breathecode/monitoring/migrations/0020_alter_repositorysubscription_status.py +++ b/breathecode/monitoring/migrations/0020_alter_repositorysubscription_status.py @@ -6,16 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0019_repositorysubscription_hook_id_and_more'), + ("monitoring", "0019_repositorysubscription_hook_id_and_more"), ] operations = [ migrations.AlterField( - model_name='repositorysubscription', - name='status', - field=models.CharField(choices=[('OPERATIONAL', 'Operational'), ('CRITICAL', 'Critical'), - ('DISABLED', 'Disabled')], - default='CRITICAL', - max_length=20), + model_name="repositorysubscription", + name="status", + field=models.CharField( + choices=[("OPERATIONAL", "Operational"), ("CRITICAL", "Critical"), ("DISABLED", "Disabled")], + default="CRITICAL", + max_length=20, + ), ), ] diff --git a/breathecode/monitoring/migrations/0021_supervisor_supervisorissue.py b/breathecode/monitoring/migrations/0021_supervisor_supervisorissue.py index 5d9da2cb2..b50e4d679 100644 --- a/breathecode/monitoring/migrations/0021_supervisor_supervisorissue.py +++ b/breathecode/monitoring/migrations/0021_supervisor_supervisorissue.py @@ -8,31 +8,37 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0020_alter_repositorysubscription_status'), + ("monitoring", "0020_alter_repositorysubscription_status"), ] operations = [ migrations.CreateModel( - name='Supervisor', + name="Supervisor", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('task_module', models.CharField(max_length=200)), - ('task_name', models.CharField(max_length=200)), - ('delta', - models.DurationField(default=datetime.timedelta(seconds=1800), - help_text='How long to wait for the next execution, defaults to 30 minutes')), - ('ran_at', models.DateTimeField(blank=True, default=None, null=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("task_module", models.CharField(max_length=200)), + ("task_name", models.CharField(max_length=200)), + ( + "delta", + models.DurationField( + default=datetime.timedelta(seconds=1800), + help_text="How long to wait for the next execution, defaults to 30 minutes", + ), + ), + ("ran_at", models.DateTimeField(blank=True, default=None, null=True)), ], ), migrations.CreateModel( - name='SupervisorIssue', + name="SupervisorIssue", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('occurrences', models.PositiveIntegerField(blank=True, default=1)), - ('error', models.TextField(max_length=255)), - ('ran_at', models.DateTimeField(blank=True, default=None, null=True)), - ('supervisor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='monitoring.supervisor')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("occurrences", models.PositiveIntegerField(blank=True, default=1)), + ("error", models.TextField(max_length=255)), + ("ran_at", models.DateTimeField(blank=True, default=None, null=True)), + ( + "supervisor", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="monitoring.supervisor"), + ), ], ), ] diff --git a/breathecode/monitoring/migrations/0022_supervisorissue_attempts_supervisorissue_code_and_more.py b/breathecode/monitoring/migrations/0022_supervisorissue_attempts_supervisorissue_code_and_more.py index 0e69887b4..861fb25cc 100644 --- a/breathecode/monitoring/migrations/0022_supervisorissue_attempts_supervisorissue_code_and_more.py +++ b/breathecode/monitoring/migrations/0022_supervisorissue_attempts_supervisorissue_code_and_more.py @@ -6,28 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0021_supervisor_supervisorissue'), + ("monitoring", "0021_supervisor_supervisorissue"), ] operations = [ migrations.AddField( - model_name='supervisorissue', - name='attempts', + model_name="supervisorissue", + name="attempts", field=models.PositiveIntegerField(blank=True, default=0), ), migrations.AddField( - model_name='supervisorissue', - name='code', + model_name="supervisorissue", + name="code", field=models.SlugField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='supervisorissue', - name='fixed', + model_name="supervisorissue", + name="fixed", field=models.BooleanField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='supervisorissue', - name='params', + model_name="supervisorissue", + name="params", field=models.JSONField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/monitoring/migrations/0023_repositorysubscription_last_call.py b/breathecode/monitoring/migrations/0023_repositorysubscription_last_call.py index 1b1b0e505..deb83fe79 100644 --- a/breathecode/monitoring/migrations/0023_repositorysubscription_last_call.py +++ b/breathecode/monitoring/migrations/0023_repositorysubscription_last_call.py @@ -6,16 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('monitoring', '0022_supervisorissue_attempts_supervisorissue_code_and_more'), + ("monitoring", "0022_supervisorissue_attempts_supervisorissue_code_and_more"), ] operations = [ migrations.AddField( - model_name='repositorysubscription', - name='last_call', - field=models.DateTimeField(blank=True, - default=None, - help_text='Last time github notified updates on this repo subscription', - null=True), + model_name="repositorysubscription", + name="last_call", + field=models.DateTimeField( + blank=True, + default=None, + help_text="Last time github notified updates on this repo subscription", + null=True, + ), ), ] diff --git a/breathecode/monitoring/models.py b/breathecode/monitoring/models.py index 22ca89335..e564e5e93 100644 --- a/breathecode/monitoring/models.py +++ b/breathecode/monitoring/models.py @@ -12,19 +12,19 @@ from breathecode.admissions.models import Academy from breathecode.notify.models import SlackChannel -__all__ = ['Application', 'Endpoint', 'MonitorScript'] +__all__ = ["Application", "Endpoint", "MonitorScript"] -GITHUB_URL_PATTERN = re.compile(r'https:\/\/github\.com\/(?P<user>[^\/]+)\/(?P<repo>[^\/]+)\/?') +GITHUB_URL_PATTERN = re.compile(r"https:\/\/github\.com\/(?P<user>[^\/]+)\/(?P<repo>[^\/]+)\/?") -LOADING = 'LOADING' -OPERATIONAL = 'OPERATIONAL' -MINOR = 'MINOR' -CRITICAL = 'CRITICAL' +LOADING = "LOADING" +OPERATIONAL = "OPERATIONAL" +MINOR = "MINOR" +CRITICAL = "CRITICAL" STATUS = ( - (LOADING, 'Loading'), - (OPERATIONAL, 'Operational'), - (MINOR, 'Minor'), - (CRITICAL, 'Critical'), + (LOADING, "Loading"), + (OPERATIONAL, "Operational"), + (MINOR, "Minor"), + (CRITICAL, "Critical"), ) @@ -33,25 +33,23 @@ class Application(models.Model): academy = models.ForeignKey(Academy, on_delete=models.CASCADE) status_text = models.CharField(max_length=255, default=None, null=True, blank=True) - notify_email = models.CharField(max_length=255, - blank=True, - default=None, - null=True, - help_text='Comma separated list of emails') + notify_email = models.CharField( + max_length=255, blank=True, default=None, null=True, help_text="Comma separated list of emails" + ) notify_slack_channel = models.ForeignKey( SlackChannel, on_delete=models.SET_NULL, blank=True, default=None, null=True, - help_text='Please pick an academy first to be able to see the available slack channels to notify') + help_text="Please pick an academy first to be able to see the available slack channels to notify", + ) status = models.CharField(max_length=20, choices=STATUS, default=OPERATIONAL) - paused_until = models.DateTimeField(null=True, - blank=True, - default=None, - help_text='if you want to stop checking for a period of time') + paused_until = models.DateTimeField( + null=True, blank=True, default=None, help_text="if you want to stop checking for a period of time" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -63,20 +61,16 @@ def __str__(self): class Endpoint(models.Model): url = models.CharField(max_length=255) - test_pattern = models.CharField(max_length=100, - default=None, - null=True, - blank=True, - help_text='If left blank sys will only ping') + test_pattern = models.CharField( + max_length=100, default=None, null=True, blank=True, help_text="If left blank sys will only ping" + ) frequency_in_minutes = models.FloatField(default=30) status_code = models.IntegerField(default=200) severity_level = models.IntegerField(default=0) status_text = models.CharField(max_length=255, default=None, null=True, blank=True, editable=False) - special_status_text = models.CharField(max_length=255, - default=None, - null=True, - blank=True, - help_text='Add a message for people to see when is down') + special_status_text = models.CharField( + max_length=255, default=None, null=True, blank=True, help_text="Add a message for people to see when is down" + ) response_text = models.TextField(default=None, null=True, blank=True) last_check = models.DateTimeField(default=None, null=True, blank=True) @@ -84,10 +78,9 @@ class Endpoint(models.Model): application = models.ForeignKey(Application, on_delete=models.CASCADE) - paused_until = models.DateTimeField(null=True, - blank=True, - default=None, - help_text='if you want to stop checking for a period of time') + paused_until = models.DateTimeField( + null=True, blank=True, default=None, help_text="if you want to stop checking for a period of time" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -101,8 +94,9 @@ class MonitorScript(models.Model): script_slug = models.SlugField(default=None, null=True, blank=True) script_body = models.TextField(default=None, null=True, blank=True) - frequency_delta = models.DurationField(default=timedelta(minutes=30), - help_text='How long to wait for the next execution, defaults to 30 minutes') + frequency_delta = models.DurationField( + default=timedelta(minutes=30), help_text="How long to wait for the next execution, defaults to 30 minutes" + ) status_code = models.IntegerField(default=200) severity_level = models.IntegerField(default=0) notify_email = models.CharField( @@ -110,13 +104,12 @@ class MonitorScript(models.Model): blank=True, default=None, null=True, - help_text='Only specify if need to override the application.notify_email, you can add many comma separated.') + help_text="Only specify if need to override the application.notify_email, you can add many comma separated.", + ) status_text = models.CharField(max_length=255, default=None, null=True, blank=True, editable=False) - special_status_text = models.CharField(max_length=255, - default=None, - null=True, - blank=True, - help_text='Add a message for people to see when is down') + special_status_text = models.CharField( + max_length=255, default=None, null=True, blank=True, help_text="Add a message for people to see when is down" + ) response_text = models.TextField(default=None, null=True, blank=True) last_run = models.DateTimeField(default=None, null=True, blank=True) @@ -124,26 +117,25 @@ class MonitorScript(models.Model): application = models.ForeignKey(Application, on_delete=models.CASCADE) - paused_until = models.DateTimeField(null=True, - blank=True, - default=None, - help_text='if you want to stop checking for a period of time') + paused_until = models.DateTimeField( + null=True, blank=True, default=None, help_text="if you want to stop checking for a period of time" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - slug = 'unknown' if not self.script_slug else self.script_slug - return f'{slug}({self.id})' + slug = "unknown" if not self.script_slug else self.script_slug + return f"{slug}({self.id})" -LOADING = 'LOADING' -ERROR = 'ERROR' -DONE = 'DONE' +LOADING = "LOADING" +ERROR = "ERROR" +DONE = "DONE" DOWNLOAD_STATUS = ( - (LOADING, 'Loading'), - (ERROR, 'Error'), - (DONE, 'Done'), + (LOADING, "Loading"), + (ERROR, "Error"), + (DONE, "Done"), ) @@ -159,11 +151,11 @@ class CSVDownload(models.Model): finished_at = models.DateTimeField(auto_now=True, editable=False) -PENDING = 'PENDING' +PENDING = "PENDING" UPLOAD_STATUS = ( - (PENDING, 'Pending'), - (ERROR, 'Error'), - (DONE, 'Done'), + (PENDING, "Pending"), + (ERROR, "Error"), + (DONE, "Done"), ) @@ -179,27 +171,26 @@ class CSVUpload(models.Model): finished_at = models.DateTimeField(auto_now=True, editable=False) -DISABLED = 'DISABLED' -SUBSCRIPTION_STATUS = ((OPERATIONAL, 'Operational'), (CRITICAL, 'Critical'), (DISABLED, 'Disabled')) +DISABLED = "DISABLED" +SUBSCRIPTION_STATUS = ((OPERATIONAL, "Operational"), (CRITICAL, "Critical"), (DISABLED, "Disabled")) class RepositorySubscription(models.Model): - repository = models.URLField(max_length=255, help_text='Github repo where the event ocurred') + repository = models.URLField(max_length=255, help_text="Github repo where the event ocurred") token = models.CharField(max_length=255, unique=True) owner = models.ForeignKey(Academy, on_delete=models.CASCADE) - shared_with = models.ManyToManyField(Academy, blank=True, related_name='repo_subscription') + shared_with = models.ManyToManyField(Academy, blank=True, related_name="repo_subscription") - hook_id = models.IntegerField(default=None, null=True, blank=True, help_text='Assigned from github') + hook_id = models.IntegerField(default=None, null=True, blank=True, help_text="Assigned from github") - last_call = models.DateTimeField(default=None, - null=True, - blank=True, - help_text='Last time github notified updates on this repo subscription') + last_call = models.DateTimeField( + default=None, null=True, blank=True, help_text="Last time github notified updates on this repo subscription" + ) # disabled means it will be ignored from now on status = models.CharField(max_length=20, choices=SUBSCRIPTION_STATUS, default=CRITICAL) - status_message = models.TextField(null=True, blank=True, default='Waiting for ping') + status_message = models.TextField(null=True, blank=True, default="Waiting for ping") created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -209,17 +200,18 @@ def get_repo_name(self): # Split the path to get the repository name # The path usually is "/username/repository_name" - path_parts = parsed_url.path.strip('/').split('/') + path_parts = parsed_url.path.strip("/").split("/") # Check if the URL path has at least two parts (username and repository_name) if len(path_parts) >= 2: # The repository name is the second part of the path return path_parts[0], path_parts[1] else: - raise Exception(f'Invalid URL format for: {self.repository}') + raise Exception(f"Invalid URL format for: {self.repository}") def save(self, *args, **kwargs): from breathecode.assignments.models import RepositoryDeletionOrder + if not self.pk: self.token = binascii.hexlify(os.urandom(20)).decode() @@ -227,30 +219,32 @@ def save(self, *args, **kwargs): match = GITHUB_URL_PATTERN.search(self.repository) if match: - user = match.group('user') - repo_name = match.group('repo') + user = match.group("user") + repo_name = match.group("repo") - RepositoryDeletionOrder.objects.filter(provider=RepositoryDeletionOrder.Provider.GITHUB, - repository_user__iexact=user, - repository_name__iexact=repo_name).exclude( - Q(status=RepositoryDeletionOrder.Status.DELETED) - | Q(status=RepositoryDeletionOrder.Status.CANCELLED)).delete() + RepositoryDeletionOrder.objects.filter( + provider=RepositoryDeletionOrder.Provider.GITHUB, + repository_user__iexact=user, + repository_name__iexact=repo_name, + ).exclude( + Q(status=RepositoryDeletionOrder.Status.DELETED) | Q(status=RepositoryDeletionOrder.Status.CANCELLED) + ).delete() -PENDING = 'PENDING' -DONE = 'DONE' -ERROR = 'ERROR' +PENDING = "PENDING" +DONE = "DONE" +ERROR = "ERROR" WEBHOOK_STATUS = ( - (PENDING, 'Pending'), - (DONE, 'Done'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (DONE, "Done"), + (ERROR, "Error"), ) class StripeEvent(models.Model): - stripe_id = models.CharField(max_length=32, null=True, default=None, blank=True, help_text='Stripe id') + stripe_id = models.CharField(max_length=32, null=True, default=None, blank=True, help_text="Stripe id") - type = models.CharField(max_length=50, help_text='Stripe event type') + type = models.CharField(max_length=50, help_text="Stripe event type") status = models.CharField(max_length=9, choices=WEBHOOK_STATUS, default=PENDING) status_texts = models.JSONField(default=dict, blank=True) @@ -277,21 +271,24 @@ def save(self, *args, **kwargs): class RepositoryWebhook(models.Model): - webhook_action = models.CharField(max_length=100, - blank=True, - null=True, - default=None, - help_text='The specific action that was triggered on github for this webhook') + webhook_action = models.CharField( + max_length=100, + blank=True, + null=True, + default=None, + help_text="The specific action that was triggered on github for this webhook", + ) scope = models.CharField( max_length=100, blank=True, null=True, default=None, - help_text='The specific entity that triggered this webhook, for example: issues, issues_comment, etc.') - run_at = models.DateTimeField(help_text='Date/time that the webhook ran', blank=True, null=True, default=None) - repository = models.URLField(max_length=255, help_text='Github repo where the event occured') + help_text="The specific entity that triggered this webhook, for example: issues, issues_comment, etc.", + ) + run_at = models.DateTimeField(help_text="Date/time that the webhook ran", blank=True, null=True, default=None) + repository = models.URLField(max_length=255, help_text="Github repo where the event occured") - payload = models.JSONField(help_text='Info that came on the request, it varies depending on the webhook type') + payload = models.JSONField(help_text="Info that came on the request, it varies depending on the webhook type") academy_slug = models.SlugField() @@ -302,7 +299,7 @@ class RepositoryWebhook(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'Webhook {self.webhook_action} {self.status} => {self.status_text}' + return f"Webhook {self.webhook_action} {self.status} => {self.status_text}" def get_payload(self): return json.loads(self.payload) @@ -311,13 +308,14 @@ def get_payload(self): class Supervisor(models.Model): task_module = models.CharField(max_length=200) task_name = models.CharField(max_length=200) - delta = models.DurationField(default=timedelta(minutes=30), - help_text='How long to wait for the next execution, defaults to 30 minutes') + delta = models.DurationField( + default=timedelta(minutes=30), help_text="How long to wait for the next execution, defaults to 30 minutes" + ) ran_at = models.DateTimeField(default=None, null=True, blank=True) def __str__(self): - return f'{self.task_module}.{self.task_name} ({self.delta})' + return f"{self.task_module}.{self.task_name} ({self.delta})" def save(self, *args, **kwargs): self.full_clean() diff --git a/breathecode/monitoring/scripts/alert_gitpoduser_deletion.py b/breathecode/monitoring/scripts/alert_gitpoduser_deletion.py index 446b709e9..19fa581e1 100644 --- a/breathecode/monitoring/scripts/alert_gitpoduser_deletion.py +++ b/breathecode/monitoring/scripts/alert_gitpoduser_deletion.py @@ -14,23 +14,25 @@ in_three_days = timezone.now() + datetime.timedelta(days=3) -gitpod_users_to_delete = GitpodUser.objects.filter( - expires_at__lt=in_three_days).filter(Q(academy__id=academy.id) | Q(academy__isnull=True)) +gitpod_users_to_delete = GitpodUser.objects.filter(expires_at__lt=in_three_days).filter( + Q(academy__id=academy.id) | Q(academy__isnull=True) +) -content_html = '' +content_html = "" for u in gitpod_users_to_delete: # beware!! from_now cannot be used inside a map or join function, you have to do a traditional for loop if u.user is not None: - content_html += f'- {u.user.first_name} {u.user.last_name} ({u.github_username}) in {from_now(u.expires_at, include_days=True)}: {u.delete_status} \n' + content_html += f"- {u.user.first_name} {u.user.last_name} ({u.github_username}) in {from_now(u.expires_at, include_days=True)}: {u.delete_status} \n" else: - content_html += f'- {u.github_username} in {from_now(u.expires_at, include_days=True)}: {u.delete_status} \n' + content_html += f"- {u.github_username} in {from_now(u.expires_at, include_days=True)}: {u.delete_status} \n" if len(gitpod_users_to_delete) > 0: raise ScriptNotification( - f'The following {len(gitpod_users_to_delete)} Gitpod users will be delete soon: \n\n' + content_html, - status='CRITICAL', - title=f'{str(len(gitpod_users_to_delete))} Gitpod users from {academy.name} will be deleted', - slug='gitpod-users-to-delete', - btn_url=ADMIN_URL + '/admin/gitpod?location=' + academy.slug) + f"The following {len(gitpod_users_to_delete)} Gitpod users will be delete soon: \n\n" + content_html, + status="CRITICAL", + title=f"{str(len(gitpod_users_to_delete))} Gitpod users from {academy.name} will be deleted", + slug="gitpod-users-to-delete", + btn_url=ADMIN_URL + "/admin/gitpod?location=" + academy.slug, + ) -print(f'No gitpod users to delete for {academy.name}') +print(f"No gitpod users to delete for {academy.name}") diff --git a/breathecode/monitoring/scripts/alert_pending_leads.py b/breathecode/monitoring/scripts/alert_pending_leads.py index 916a70fd4..ee50625b6 100644 --- a/breathecode/monitoring/scripts/alert_pending_leads.py +++ b/breathecode/monitoring/scripts/alert_pending_leads.py @@ -10,21 +10,23 @@ from breathecode.utils import ScriptNotification from breathecode.utils.datetime_integer import from_now -pending_leads = FormEntry.objects.filter( - storage_status='PENDING').filter(Q(academy__id=academy.id) | Q(location=academy.slug)) +pending_leads = FormEntry.objects.filter(storage_status="PENDING").filter( + Q(academy__id=academy.id) | Q(location=academy.slug) +) -leads_html = '' +leads_html = "" for l in pending_leads: # beware!! from_now cannot be used inside a map or join function, you have to do a traditional for loop - leads_html += f'- {l.first_name} {l.last_name} {l.email} added {from_now(l.created_at)} ago. \n' + leads_html += f"- {l.first_name} {l.last_name} {l.email} added {from_now(l.created_at)} ago. \n" if len(pending_leads) > 0: raise ScriptNotification( - f'The following {len(pending_leads)} leads could not be added to CRM and need to be reviewed: \n\n' + - leads_html, - status='CRITICAL', - title=f'{str(len(pending_leads))} leads from {academy.name} could not be added to CRM', - slug='pending-academy-leads', - btn_url=ADMIN_URL + '/growth/leads?location=' + academy.slug + '&limit=10&offset=0&storage_status=PENDING') + f"The following {len(pending_leads)} leads could not be added to CRM and need to be reviewed: \n\n" + + leads_html, + status="CRITICAL", + title=f"{str(len(pending_leads))} leads from {academy.name} could not be added to CRM", + slug="pending-academy-leads", + btn_url=ADMIN_URL + "/growth/leads?location=" + academy.slug + "&limit=10&offset=0&storage_status=PENDING", + ) -print(f'No pending leads for {academy.name}') +print(f"No pending leads for {academy.name}") diff --git a/breathecode/monitoring/scripts/alert_tags_with_no_type.py b/breathecode/monitoring/scripts/alert_tags_with_no_type.py index 32b0c2cff..e5879c426 100644 --- a/breathecode/monitoring/scripts/alert_tags_with_no_type.py +++ b/breathecode/monitoring/scripts/alert_tags_with_no_type.py @@ -20,8 +20,9 @@ - Tags with type=OTHER are not involved in the marketing process, probably used internaly for other things. - The tag functionality keeps being developed and there may be other cases not specified in this email. """, - status='CRITICAL', - title=f'There are {str(total_pending_tags)} tags without type in {academy.name}', - slug='academy-has-tags-without-type') + status="CRITICAL", + title=f"There are {str(total_pending_tags)} tags without type in {academy.name}", + slug="academy-has-tags-without-type", + ) -print(f'No tags without a type from {str(tags.count())} records') +print(f"No tags without a type from {str(tags.count())} records") diff --git a/breathecode/monitoring/scripts/check_cohort_status_ended_cohort.py b/breathecode/monitoring/scripts/check_cohort_status_ended_cohort.py index 2261c3fb2..2ca30a17a 100644 --- a/breathecode/monitoring/scripts/check_cohort_status_ended_cohort.py +++ b/breathecode/monitoring/scripts/check_cohort_status_ended_cohort.py @@ -9,16 +9,21 @@ from breathecode.admissions.models import Cohort from django.utils import timezone -to_fix_cohort_stage = Cohort.objects.filter(ending_date__lt=timezone.now(), academy__id=academy.id)\ - .exclude(stage__in=['ENDED','DELETED']).values_list('name', flat=True) +to_fix_cohort_stage = ( + Cohort.objects.filter(ending_date__lt=timezone.now(), academy__id=academy.id) + .exclude(stage__in=["ENDED", "DELETED"]) + .values_list("name", flat=True) +) if len(to_fix_cohort_stage) > 0: - to_fix_cohort_name = ('\n').join(['- ' + cohort_name for cohort_name in to_fix_cohort_stage]) + to_fix_cohort_name = ("\n").join(["- " + cohort_name for cohort_name in to_fix_cohort_stage]) - raise ScriptNotification(f'These cohorts ended but their stage is different that ENDED: \n {to_fix_cohort_name} ', - status='CRITICAL', - title=f'There are {str(len(to_fix_cohort_stage))} cohorts that should be marked as ENDED', - slug='cohort-stage-should-be-ended') + raise ScriptNotification( + f"These cohorts ended but their stage is different that ENDED: \n {to_fix_cohort_name} ", + status="CRITICAL", + title=f"There are {str(len(to_fix_cohort_stage))} cohorts that should be marked as ENDED", + slug="cohort-stage-should-be-ended", + ) else: - print('Everything up to date') + print("Everything up to date") diff --git a/breathecode/monitoring/scripts/check_cohort_status_started.py b/breathecode/monitoring/scripts/check_cohort_status_started.py index 96caaec15..b23d10cec 100644 --- a/breathecode/monitoring/scripts/check_cohort_status_started.py +++ b/breathecode/monitoring/scripts/check_cohort_status_started.py @@ -9,17 +9,21 @@ from breathecode.admissions.models import Cohort from django.utils import timezone -to_fix_cohort_stage = Cohort.objects.filter(kickoff_date__lt=timezone.now(), academy__id=academy.id, - stage='PREWORK').exclude(never_ends=True).values_list('name', flat=True) +to_fix_cohort_stage = ( + Cohort.objects.filter(kickoff_date__lt=timezone.now(), academy__id=academy.id, stage="PREWORK") + .exclude(never_ends=True) + .values_list("name", flat=True) +) if len(to_fix_cohort_stage) > 0: - to_fix_cohort_name = ('\n').join(['- ' + cohort_name for cohort_name in to_fix_cohort_stage]) + to_fix_cohort_name = ("\n").join(["- " + cohort_name for cohort_name in to_fix_cohort_stage]) raise ScriptNotification( - f'These cohorts need to me marked as started or any other further status because the starting date already passed: \n {to_fix_cohort_name} ', - status='CRITICAL', - title=f'There are {str(len(to_fix_cohort_stage))} cohort that should be marked as STARTED or further', - slug='cohort-stage-should-be-started') + f"These cohorts need to me marked as started or any other further status because the starting date already passed: \n {to_fix_cohort_name} ", + status="CRITICAL", + title=f"There are {str(len(to_fix_cohort_stage))} cohort that should be marked as STARTED or further", + slug="cohort-stage-should-be-started", + ) else: - print('All cohort status are consistent with the kickoff date') + print("All cohort status are consistent with the kickoff date") diff --git a/breathecode/monitoring/scripts/check_cohort_user_status_ended_cohort.py b/breathecode/monitoring/scripts/check_cohort_user_status_ended_cohort.py index e45e87724..417090ffb 100644 --- a/breathecode/monitoring/scripts/check_cohort_user_status_ended_cohort.py +++ b/breathecode/monitoring/scripts/check_cohort_user_status_ended_cohort.py @@ -8,19 +8,21 @@ from breathecode.utils import ScriptNotification from breathecode.admissions.models import CohortUser -active_user_on_ended_cohort = CohortUser.objects.filter(cohort__stage='ENDED', - educational_status='ACTIVE', - cohort__academy__id=academy.id).exclude(cohort__never_ends=True) +active_user_on_ended_cohort = CohortUser.objects.filter( + cohort__stage="ENDED", educational_status="ACTIVE", cohort__academy__id=academy.id +).exclude(cohort__never_ends=True) active_user_on_ended_cohort_list = [ - '- ' + item.user.first_name + ' ' + item.user.last_name + ' (' + item.user.email + ') => ' + item.cohort.name + "- " + item.user.first_name + " " + item.user.last_name + " (" + item.user.email + ") => " + item.cohort.name for item in active_user_on_ended_cohort ] -active_user_on_ended_cohort_list_names = ('\n').join(active_user_on_ended_cohort_list) +active_user_on_ended_cohort_list_names = ("\n").join(active_user_on_ended_cohort_list) if len(active_user_on_ended_cohort_list): - raise ScriptNotification(f'This users: {active_user_on_ended_cohort_list_names} are active on ended cohorts', - slug='ended-cohort-had-active-users') + raise ScriptNotification( + f"This users: {active_user_on_ended_cohort_list_names} are active on ended cohorts", + slug="ended-cohort-had-active-users", + ) -print('Everything up to date') +print("Everything up to date") diff --git a/breathecode/monitoring/scripts/check_shortlinks_with_destination_status_error.py b/breathecode/monitoring/scripts/check_shortlinks_with_destination_status_error.py index 177dea952..d9739614b 100644 --- a/breathecode/monitoring/scripts/check_shortlinks_with_destination_status_error.py +++ b/breathecode/monitoring/scripts/check_shortlinks_with_destination_status_error.py @@ -12,23 +12,30 @@ from breathecode.marketing.models import ShortLink from breathecode.utils import ScriptNotification from django.db.models import Q + # start your code here # Filtered list of shortlink objects with destination status error or not found destination_status_error_or_not_found = ShortLink.objects.filter( - Q(destination_status='ERROR') | Q(destination_status='NOT_FOUND')) + Q(destination_status="ERROR") | Q(destination_status="NOT_FOUND") +) destination_status_error_or_not_found_list = [ - '- URL: ' + item.destination + ' Status: ' + item.destination_status + ' Last clicked: ' + - f'{item.lastclick_at.strftime("%m/%d/%Y, %H:%M:%S") if item.lastclick_at != None else "never"}' + "- URL: " + + item.destination + + " Status: " + + item.destination_status + + " Last clicked: " + + f'{item.lastclick_at.strftime("%m/%d/%Y, %H:%M:%S") if item.lastclick_at != None else "never"}' for item in destination_status_error_or_not_found ] # Joining the list together for a display format -destination_status_error_or_not_found_list_display = ('\n').join(destination_status_error_or_not_found_list) +destination_status_error_or_not_found_list_display = ("\n").join(destination_status_error_or_not_found_list) if len(destination_status_error_or_not_found_list_display) > 0: raise ScriptNotification( - f'These shortlinks: {destination_status_error_or_not_found_list_display} are not working properly.', - slug='short-link-bad-destination-status') + f"These shortlinks: {destination_status_error_or_not_found_list_display} are not working properly.", + slug="short-link-bad-destination-status", + ) -print('All shortlinks working properly') +print("All shortlinks working properly") diff --git a/breathecode/monitoring/scripts/count_duplicate_githubacademyusers.py b/breathecode/monitoring/scripts/count_duplicate_githubacademyusers.py index df3349539..c524d4800 100644 --- a/breathecode/monitoring/scripts/count_duplicate_githubacademyusers.py +++ b/breathecode/monitoring/scripts/count_duplicate_githubacademyusers.py @@ -9,30 +9,35 @@ from breathecode.authenticate.models import GithubAcademyUser # Query to find duplicate users for the same academy -duplicate_users = GithubAcademyUser.objects.values( - 'user__id', 'academy__id').annotate(user_count=Count('user__id')).filter(user_count__gt=1) +duplicate_users = ( + GithubAcademyUser.objects.values("user__id", "academy__id") + .annotate(user_count=Count("user__id")) + .filter(user_count__gt=1) +) # Extract duplicate user_ids and academy_ids from the query result -duplicate_user_ids = [entry['user__id'] for entry in duplicate_users] -duplicate_academy_ids = [entry['academy__id'] for entry in duplicate_users] +duplicate_user_ids = [entry["user__id"] for entry in duplicate_users] +duplicate_academy_ids = [entry["academy__id"] for entry in duplicate_users] # Find the actual duplicate records duplicate_records = GithubAcademyUser.objects.filter( - Q(user__id__in=duplicate_user_ids) & Q(academy__id__in=duplicate_academy_ids)) + Q(user__id__in=duplicate_user_ids) & Q(academy__id__in=duplicate_academy_ids) +) duplicate_amount = duplicate_records.count() if duplicate_amount > 0: def to_string(_gu): - return _gu.user.first_name + ' ' + _gu.user.last_name + ' from academy: ' + _gu.academy.name + return _gu.user.first_name + " " + _gu.user.last_name + " from academy: " + _gu.academy.name - duplicates = ('\n').join(['- ' + to_string(gu) for gu in duplicate_records]) + duplicates = ("\n").join(["- " + to_string(gu) for gu in duplicate_records]) raise ScriptNotification( - f'Following users have duplicate academy users, this problem needs to be fixed before we can bill the provisioning services: \n {duplicates} ', - status='CRITICAL', - title=f'There are {str(len(duplicate_amount))} duplicate Github Academy Users', - slug='duplicate-github-academy-users') + f"Following users have duplicate academy users, this problem needs to be fixed before we can bill the provisioning services: \n {duplicates} ", + status="CRITICAL", + title=f"There are {str(len(duplicate_amount))} duplicate Github Academy Users", + slug="duplicate-github-academy-users", + ) else: - print('No github academy users are duplicated') + print("No github academy users are duplicated") diff --git a/breathecode/monitoring/scripts/event_marked_as_draft.py b/breathecode/monitoring/scripts/event_marked_as_draft.py index c9407cc12..58a177856 100644 --- a/breathecode/monitoring/scripts/event_marked_as_draft.py +++ b/breathecode/monitoring/scripts/event_marked_as_draft.py @@ -9,18 +9,20 @@ from breathecode.utils import ScriptNotification from breathecode.utils.datetime_integer import from_now -pendings = Event.objects.filter(status='DRAFT', academy__id=academy.id) +pendings = Event.objects.filter(status="DRAFT", academy__id=academy.id) total_pendings = pendings.count() if total_pendings > 0: - msg = '' + msg = "" for event in pendings: msg += f'- <a href="{ADMIN_URL}/events/event/{event.id}?location={academy.slug}">{event.title}</a> added {from_now(event.created_at)} ago. \n' # noqa: F821 - raise ScriptNotification(f'There are {total_pendings} pending event to published or deleted \n\n' + msg, - status='CRITICAL', - title=f'There are {total_pendings} draft events to published or deleted in {academy.name}', - slug='draft-events', - btn_url=ADMIN_URL + '/events/list?location=' + academy.slug) + raise ScriptNotification( + f"There are {total_pendings} pending event to published or deleted \n\n" + msg, + status="CRITICAL", + title=f"There are {total_pendings} draft events to published or deleted in {academy.name}", + slug="draft-events", + btn_url=ADMIN_URL + "/events/list?location=" + academy.slug, + ) -print(f'There are no draft events for {academy.slug}') +print(f"There are no draft events for {academy.slug}") diff --git a/breathecode/monitoring/scripts/event_published_without_tags.py b/breathecode/monitoring/scripts/event_published_without_tags.py index bff5d9033..1d02d3699 100644 --- a/breathecode/monitoring/scripts/event_published_without_tags.py +++ b/breathecode/monitoring/scripts/event_published_without_tags.py @@ -10,21 +10,22 @@ from django.utils import timezone from breathecode.utils.datetime_integer import from_now -published_without_tags = Event.objects.filter(status='ACTIVE', - academy__id=academy.id, - tags='', - ending_at__gt=timezone.now()) +published_without_tags = Event.objects.filter( + status="ACTIVE", academy__id=academy.id, tags="", ending_at__gt=timezone.now() +) total = published_without_tags.count() if total > 0: - msg = '' + msg = "" for event in published_without_tags: msg += f'- <a href="{ADMIN_URL}/events/event/{event.id}?location={academy.slug}">{event.title}</a> added {from_now(event.created_at)} ago. \n' # noqa: F821 - raise ScriptNotification(f'There are {total} published events without tags \n\n' + msg, - status='CRITICAL', - title=f'There are {total} events published without tags at {academy.name}', - slug='events-without-tags', - btn_url=ADMIN_URL + '/events/list?location=' + academy.slug) + raise ScriptNotification( + f"There are {total} published events without tags \n\n" + msg, + status="CRITICAL", + title=f"There are {total} events published without tags at {academy.name}", + slug="events-without-tags", + btn_url=ADMIN_URL + "/events/list?location=" + academy.slug, + ) -print(f'There are no events without tags for {academy.slug}') +print(f"There are no events without tags for {academy.slug}") diff --git a/breathecode/monitoring/scripts/monitor_slack_integration.py b/breathecode/monitoring/scripts/monitor_slack_integration.py index 3eccd6703..e4d1a3c13 100644 --- a/breathecode/monitoring/scripts/monitor_slack_integration.py +++ b/breathecode/monitoring/scripts/monitor_slack_integration.py @@ -13,17 +13,20 @@ slack = SlackTeam.objects.filter(academy__id=academy.id).first() if slack is None: - raise ScriptNotification(f'No slack integration has been found for academy={academy.id}', - title='No slack integration has been found', - status='MINOR') + raise ScriptNotification( + f"No slack integration has been found for academy={academy.id}", + title="No slack integration has been found", + status="MINOR", + ) owner_credentials = CredentialsSlack.objects.filter(user__id=slack.owner.id).first() if owner_credentials is None: raise ScriptNotification( - 'The academy slack integration is not finished, the team owner needs to connect with slack', - title='The academy slack integration is not finished, the team owner needs to connect with slack', - status='MINOR') + "The academy slack integration is not finished, the team owner needs to connect with slack", + title="The academy slack integration is not finished, the team owner needs to connect with slack", + status="MINOR", + ) api = client.Slack(owner_credentials.token) -data = api.post('api.test') -print('data', data) +data = api.post("api.test") +print("data", data) diff --git a/breathecode/monitoring/scripts/pending_academy_github_users.py b/breathecode/monitoring/scripts/pending_academy_github_users.py index b9ccb7b7b..2eb750ad2 100644 --- a/breathecode/monitoring/scripts/pending_academy_github_users.py +++ b/breathecode/monitoring/scripts/pending_academy_github_users.py @@ -8,17 +8,17 @@ from breathecode.authenticate.models import GithubAcademyUser from breathecode.utils import ScriptNotification -pending = GithubAcademyUser.objects.filter(academy=academy).exclude(storage_action__in=['ADD', 'DELETE']) +pending = GithubAcademyUser.objects.filter(academy=academy).exclude(storage_action__in=["ADD", "DELETE"]) if pending.exists(): - invite = pending.filter(storage_action='INVITE') - ignore = pending.filter(storage_action='IGNORE') + invite = pending.filter(storage_action="INVITE") + ignore = pending.filter(storage_action="IGNORE") raise ScriptNotification( - f'There are {str(invite.count())} github users marked as invite and {str(ignore.count())} ' - 'marked as ignore', - status='CRITICAL', - title=f'There are {str(invite.count())} github users marked as invite and {str(ignore.count())} ' - 'marked as ignore', - slug=f'{str(invite.count())}-invite-and-{str(ignore.count())}-ignore') + f"There are {str(invite.count())} github users marked as invite and {str(ignore.count())} " "marked as ignore", + status="CRITICAL", + title=f"There are {str(invite.count())} github users marked as invite and {str(ignore.count())} " + "marked as ignore", + slug=f"{str(invite.count())}-invite-and-{str(ignore.count())}-ignore", + ) -print('All good') +print("All good") diff --git a/breathecode/monitoring/scripts/pending_asset_errors.py b/breathecode/monitoring/scripts/pending_asset_errors.py index c39193290..0123820a4 100644 --- a/breathecode/monitoring/scripts/pending_asset_errors.py +++ b/breathecode/monitoring/scripts/pending_asset_errors.py @@ -6,19 +6,20 @@ from breathecode.registry.models import AssetErrorLog from breathecode.utils.datetime_integer import from_now -errors = AssetErrorLog.objects.filter(status='ERROR') +errors = AssetErrorLog.objects.filter(status="ERROR") total_errors = errors.count() if total_errors == 0: - print('No asset errors found') + print("No asset errors found") else: - content = '' + content = "" for e in errors: - content += f'- {e.slug} with path {str(e.path)} since {from_now(e.created_at)} ago \n' + content += f"- {e.slug} with path {str(e.path)} since {from_now(e.created_at)} ago \n" - raise ScriptNotification(f'There are {str(total_errors)} erros on the asset log: \n\n' - f'{content}', - status='CRITICAL', - title=f'There are {str(total_errors)} erros on the asset log:', - slug='asset-errors') + raise ScriptNotification( + f"There are {str(total_errors)} erros on the asset log: \n\n" f"{content}", + status="CRITICAL", + title=f"There are {str(total_errors)} erros on the asset log:", + slug="asset-errors", + ) diff --git a/breathecode/monitoring/scripts/pending_provisioning_bills.py b/breathecode/monitoring/scripts/pending_provisioning_bills.py index ff917fa0e..c3c51ed83 100644 --- a/breathecode/monitoring/scripts/pending_provisioning_bills.py +++ b/breathecode/monitoring/scripts/pending_provisioning_bills.py @@ -8,18 +8,19 @@ from breathecode.utils import ScriptNotification from django.db.models import Q -bills = ProvisioningBill.objects.filter(academy__id=academy.id, status='ERROR') -activities = ProvisioningUserConsumption.objects.filter(Q(bills__academy__id=academy.id), status='ERROR') +bills = ProvisioningBill.objects.filter(academy__id=academy.id, status="ERROR") +activities = ProvisioningUserConsumption.objects.filter(Q(bills__academy__id=academy.id), status="ERROR") how_many_bills = bills.count() how_many_activities = activities.count() if how_many_bills > 0 or how_many_activities > 0: raise ScriptNotification( - f'There are {str(bills.count())} provisioning bills and {str(activities.count())} provisioning ' - 'user consumptions with errors', - status='CRITICAL', - title=f'There are {str(bills.count())} bills and {str(activities.count())} user consumptions with errors', - slug=f'{how_many_bills}-bills-and-{how_many_activities}-activities-with-errors') + f"There are {str(bills.count())} provisioning bills and {str(activities.count())} provisioning " + "user consumptions with errors", + status="CRITICAL", + title=f"There are {str(bills.count())} bills and {str(activities.count())} user consumptions with errors", + slug=f"{how_many_bills}-bills-and-{how_many_activities}-activities-with-errors", + ) -print('All good') +print("All good") diff --git a/breathecode/monitoring/scripts/review_reminder.py b/breathecode/monitoring/scripts/review_reminder.py index ac692fccd..dd400f9e1 100644 --- a/breathecode/monitoring/scripts/review_reminder.py +++ b/breathecode/monitoring/scripts/review_reminder.py @@ -21,32 +21,52 @@ def calculate_weeks(date_created, current_date): EIGHT_WEEKS_AGO = TODAY - timedelta(weeks=8) # Cohorts that ended no more than 4 weeks ago -reviews = Review.objects.filter(status='PENDING', - cohort__academy__id=academy.id, - cohort__ending_date__gte=EIGHT_WEEKS_AGO, - cohort__kickoff_date__lte=TODAY) +reviews = Review.objects.filter( + status="PENDING", + cohort__academy__id=academy.id, + cohort__ending_date__gte=EIGHT_WEEKS_AGO, + cohort__kickoff_date__lte=TODAY, +) -call_to_action = f'Click here to <a href="{ADMIN_URL}/growth/reviews?location={academy.slug}">see a more detailed list</a>' +call_to_action = ( + f'Click here to <a href="{ADMIN_URL}/growth/reviews?location={academy.slug}">see a more detailed list</a>' +) help_info = f'🆘 Need help? Learn more about <a href="https://4geeksacademy.notion.site/Student-Reviews-762eb87ae8d84c26b305d7f5c677776f">how reviews work at 4Geeks</a>' # exclude cohorts that never end -reviews = reviews.exclude(cohort__never_ends=True).exclude(cohort__stage__in=['DELETED', 'INACTIVE']) +reviews = reviews.exclude(cohort__never_ends=True).exclude(cohort__stage__in=["DELETED", "INACTIVE"]) total_reviews = reviews.count() if total_reviews == 0: - print(f'No Pending Reviews for academy {academy.slug}') + print(f"No Pending Reviews for academy {academy.slug}") else: - review_names = ('\n').join([ - '- Ask ' + (r.author.first_name + ' ' + r.author.last_name + ' (' + - str(r.nps_previous_rating if not None else '0') + '/10) from ' + r.cohort.name + - ' to review us at ' + '<a href="' + r.platform.review_signup + '">' + r.platform.name + '</a>') - for r in reviews - ]) + review_names = ("\n").join( + [ + "- Ask " + + ( + r.author.first_name + + " " + + r.author.last_name + + " (" + + str(r.nps_previous_rating if not None else "0") + + "/10) from " + + r.cohort.name + + " to review us at " + + '<a href="' + + r.platform.review_signup + + '">' + + r.platform.name + + "</a>" + ) + for r in reviews + ] + ) raise ScriptNotification( - f'There are {str(total_reviews)} reviews to be requested because the students gave us 8 or more on the NPS survey: ' - f'\n {review_names} \n\n {call_to_action} \n\n {help_info}', - status='CRITICAL', - title=f'There are {str(total_reviews)} reviews pending to be requested at {academy.name}', - slug='cohort-have-pending-reviews') + f"There are {str(total_reviews)} reviews to be requested because the students gave us 8 or more on the NPS survey: " + f"\n {review_names} \n\n {call_to_action} \n\n {help_info}", + status="CRITICAL", + title=f"There are {str(total_reviews)} reviews pending to be requested at {academy.name}", + slug="cohort-have-pending-reviews", + ) diff --git a/breathecode/monitoring/scripts/send_survey_reminder.py b/breathecode/monitoring/scripts/send_survey_reminder.py index f73895936..73fc37730 100644 --- a/breathecode/monitoring/scripts/send_survey_reminder.py +++ b/breathecode/monitoring/scripts/send_survey_reminder.py @@ -27,12 +27,12 @@ def calculate_weeks(date_created, current_date): cohorts = cohorts.filter(ending_date__gte=TWO_WEEKS_AGO, kickoff_date__lte=TODAY) # exclude cohorts that never end -cohorts = cohorts.exclude(never_ends=True).exclude(stage__in=['DELETED', 'INACTIVE']) +cohorts = cohorts.exclude(never_ends=True).exclude(stage__in=["DELETED", "INACTIVE"]) cohorts_with_pending_surveys = [] if not cohorts: - print('No Active cohorts found for this academy') + print("No Active cohorts found for this academy") not_sent = Survey.objects.filter(cohort__academy__id=ACADEMY_ID, cohort__isnull=False, sent_at__isnull=True) not_sent = [ @@ -40,38 +40,42 @@ def calculate_weeks(date_created, current_date): for sur in not_sent ] if len(not_sent) > 0: - not_sent = ('\n').join(not_sent) + not_sent = ("\n").join(not_sent) else: - not_sent = 'No other surveys have issues' + not_sent = "No other surveys have issues" for cohort in cohorts: - lastest_survey = Survey.objects.filter(cohort__id=cohort.id, status__in=['SENT', 'PARTIAL'], - sent_at__isnull=False).order_by('-sent_at').first() + lastest_survey = ( + Survey.objects.filter(cohort__id=cohort.id, status__in=["SENT", "PARTIAL"], sent_at__isnull=False) + .order_by("-sent_at") + .first() + ) if lastest_survey is None: sent_at = cohort.kickoff_date.date() num_weeks = calculate_weeks(sent_at, timezone.now().date()) if num_weeks > 2: - cohorts_with_pending_surveys.append(cohort.name + f': No previous survey was found.') + cohorts_with_pending_surveys.append(cohort.name + f": No previous survey was found.") else: sent_at = lastest_survey.sent_at.date() num_weeks = calculate_weeks(sent_at, timezone.now().date()) if num_weeks > 2: cohorts_with_pending_surveys.append( - cohort.name + - f': Last survey was <a href="{ADMIN_URL}/feedback/surveys/{cohort.slug}/{lastest_survey.id}?location={academy.slug}">{num_weeks} weeks ago</a>' + cohort.name + + f': Last survey was <a href="{ADMIN_URL}/feedback/surveys/{cohort.slug}/{lastest_survey.id}?location={academy.slug}">{num_weeks} weeks ago</a>' ) if len(cohorts_with_pending_surveys) > 0: - cohort_names = ('\n').join(['- ' + cohort_name for cohort_name in cohorts_with_pending_surveys]) + cohort_names = ("\n").join(["- " + cohort_name for cohort_name in cohorts_with_pending_surveys]) raise ScriptNotification( - f'There are {str(len(cohorts_with_pending_surveys))} surveys pending to be sent on these cohorts: ' - f'\n {cohort_names}' - f'\n\n Also, the following surveys have no sent date, you should delete or resolve their issues: \n' - f'\n {not_sent}', - status='MINOR', - title=f'There are {str(len(cohorts_with_pending_surveys))} surveys pending to be sent at {academy.name}', - slug='cohort-have-pending-surveys') + f"There are {str(len(cohorts_with_pending_surveys))} surveys pending to be sent on these cohorts: " + f"\n {cohort_names}" + f"\n\n Also, the following surveys have no sent date, you should delete or resolve their issues: \n" + f"\n {not_sent}", + status="MINOR", + title=f"There are {str(len(cohorts_with_pending_surveys))} surveys pending to be sent at {academy.name}", + slug="cohort-have-pending-surveys", + ) -print('No reminders') +print("No reminders") diff --git a/breathecode/monitoring/serializers.py b/breathecode/monitoring/serializers.py index 98dc4a837..51594aee2 100644 --- a/breathecode/monitoring/serializers.py +++ b/breathecode/monitoring/serializers.py @@ -61,87 +61,104 @@ class RepositorySubscriptionSerializer(serializers.ModelSerializer): class Meta: model = RepositorySubscription - fields = '__all__' + fields = "__all__" def validate(self, data): - academy_id = self.context['academy'] - lang = self.context['lang'] + academy_id = self.context["academy"] + lang = self.context["lang"] # If creating if self.instance is None: - if 'repository' not in data or data['repository'] == '': + if "repository" not in data or data["repository"] == "": raise ValidationException( - translation(lang, - en='You must specify a repository url', - es='Debes especificar el URL del repositorio a subscribir', - slug='missing-repo')) + translation( + lang, + en="You must specify a repository url", + es="Debes especificar el URL del repositorio a subscribir", + slug="missing-repo", + ) + ) url_validator = URLValidator() try: - url_validator(data['repository']) - if 'github.com' not in data['repository']: - raise serializers.ValidationError('Only GitHub repositories can be subscribed to') + url_validator(data["repository"]) + if "github.com" not in data["repository"]: + raise serializers.ValidationError("Only GitHub repositories can be subscribed to") except serializers.ValidationError as e: raise ValidationException( - translation(lang, - en=str(e), - es='La URL del repositorio debe ser valida y apuntar a github.com', - slug='invalid-repo-url')) + translation( + lang, + en=str(e), + es="La URL del repositorio debe ser valida y apuntar a github.com", + slug="invalid-repo-url", + ) + ) - subs = RepositorySubscription.objects.filter(owner__id=academy_id, repository=data['repository']).first() + subs = RepositorySubscription.objects.filter(owner__id=academy_id, repository=data["repository"]).first() # Sabe repo and academy subscription cannot be CREATED twice if subs is not None: raise ValidationException( translation( lang, - en= - 'There is already another subscription for the same repository and owner, make sure you have access?', - es='Ya existe una subscripcion para este mismo repositorio y owner, asegurate de tener accesso', - slug='duplicated-repo-subscription')) + en="There is already another subscription for the same repository and owner, make sure you have access?", + es="Ya existe una subscripcion para este mismo repositorio y owner, asegurate de tener accesso", + slug="duplicated-repo-subscription", + ) + ) # If updating if self.instance: - if 'status' in data and data['status'] != self.instance.status and data['status'] not in [ - 'DISABLED', 'OPERATIONAL' - ]: + if ( + "status" in data + and data["status"] != self.instance.status + and data["status"] not in ["DISABLED", "OPERATIONAL"] + ): raise ValidationException( - translation(lang, - en='Repo Subscription status cannot be manually set to ' + data['status'], - es='El status de esta subscripción no puede asignarse manualmente como ' + - data['status'], - slug='cannot-manually-set-status')) + translation( + lang, + en="Repo Subscription status cannot be manually set to " + data["status"], + es="El status de esta subscripción no puede asignarse manualmente como " + data["status"], + slug="cannot-manually-set-status", + ) + ) - if 'repository' in data and data['repository'] != self.instance.repository: + if "repository" in data and data["repository"] != self.instance.repository: raise ValidationException( translation( lang, - en='You cannot update a subscription repository, create a new one instead', - es= - 'No puedes modificar el repositorio de una subscripción, crea una nueva subscripción en su lugar', - slug='cannot-manually-update-repo')) + en="You cannot update a subscription repository, create a new one instead", + es="No puedes modificar el repositorio de una subscripción, crea una nueva subscripción en su lugar", + slug="cannot-manually-update-repo", + ) + ) return super().validate(data) def create(self, validated_data): - academy_id = self.context['academy'] - lang = self.context['lang'] + academy_id = self.context["academy"] + lang = self.context["lang"] settings = AcademyAuthSettings.objects.filter(academy__id=academy_id).first() if settings is None: raise ValidationException( - translation(lang, - en='Github credentials and settings have not been found for the academy', - es='No se han encontrado credenciales y configuración de Github para esta academia', - slug='github-settings-not-found')) - - instance = super(RepositorySubscriptionSerializer, self).create({ - **validated_data, - 'owner': settings.academy, - }) + translation( + lang, + en="Github credentials and settings have not been found for the academy", + es="No se han encontrado credenciales y configuración de Github para esta academia", + slug="github-settings-not-found", + ) + ) + + instance = super(RepositorySubscriptionSerializer, self).create( + { + **validated_data, + "owner": settings.academy, + } + ) try: subscription = subscribe_repository(instance.id, settings) - if subscription.status != 'OPERATIONAL': + if subscription.status != "OPERATIONAL": raise Exception(subscription.status_message) except Exception as e: logger.error(str(e)) @@ -149,16 +166,18 @@ def create(self, validated_data): translation( lang, en=str(e), - es='Error al intentar subscribirse al repositorio, revisa la subscripción para mas detalles', - slug='github-error')) - + es="Error al intentar subscribirse al repositorio, revisa la subscripción para mas detalles", + slug="github-error", + ) + ) + return instance def update(self, instance, validated_data): - if instance.status == 'DISABLED' and validated_data['status'] == 'OPERATIONAL': + if instance.status == "DISABLED" and validated_data["status"] == "OPERATIONAL": async_subscribe_repo.delay(instance.id) - elif instance.status == 'OPERATIONAL' and validated_data['status'] == 'DISABLED': + elif instance.status == "OPERATIONAL" and validated_data["status"] == "DISABLED": async_unsubscribe_repo.delay(instance.id, force_delete=False) return super().update(instance, validated_data) diff --git a/breathecode/monitoring/signals.py b/breathecode/monitoring/signals.py index af01e5082..cbf244ef7 100644 --- a/breathecode/monitoring/signals.py +++ b/breathecode/monitoring/signals.py @@ -2,6 +2,7 @@ For each signal you want other apps to be able to receive, you have to declare a new variable here like this: """ + from django import dispatch github_webhook = dispatch.Signal() diff --git a/breathecode/monitoring/tasks.py b/breathecode/monitoring/tasks.py index 7efdab948..e1da1bbb1 100644 --- a/breathecode/monitoring/tasks.py +++ b/breathecode/monitoring/tasks.py @@ -19,48 +19,56 @@ @shared_task(bind=True, priority=TaskPriority.MONITORING.value) def test_endpoint(self, endpoint_id): - logger.debug('Starting monitor_app') + logger.debug("Starting monitor_app") endpoint = Endpoint.objects.get(id=endpoint_id) now = timezone.now() if endpoint.paused_until is not None and endpoint.paused_until > now: - logger.debug(f'Ignoring App: {endpoint.url} monitor because its paused') + logger.debug(f"Ignoring App: {endpoint.url} monitor because its paused") return True - logger.debug(f'Running diagnostic for: {endpoint.url} ') + logger.debug(f"Running diagnostic for: {endpoint.url} ") result = run_endpoint_diagnostic(endpoint.id) if not result: # the endpoint diagnostic did not run. return False - if result['status'] != 'OPERATIONAL': + if result["status"] != "OPERATIONAL": if endpoint.application.notify_email: send_email_message( - 'diagnostic', - endpoint.application.notify_email, { - 'subject': f'Errors found on app {endpoint.application.title} endpoint {endpoint.url}', - 'details': result['details'], + "diagnostic", + endpoint.application.notify_email, + { + "subject": f"Errors found on app {endpoint.application.title} endpoint {endpoint.url}", + "details": result["details"], }, - academy=endpoint.application.academy) - - if (endpoint.application.notify_slack_channel and endpoint.application.academy - and hasattr(endpoint.application.academy, 'slackteam') - and hasattr(endpoint.application.academy.slackteam.owner, 'credentialsslack')): - - send_slack_raw('diagnostic', - endpoint.application.academy.slackteam.owner.credentialsslack.token, - endpoint.application.notify_slack_channel.slack_id, { - 'subject': f'Errors found on app {endpoint.application.title} endpoint {endpoint.url}', - **result, - }, - academy=endpoint.application.academy) + academy=endpoint.application.academy, + ) + + if ( + endpoint.application.notify_slack_channel + and endpoint.application.academy + and hasattr(endpoint.application.academy, "slackteam") + and hasattr(endpoint.application.academy.slackteam.owner, "credentialsslack") + ): + + send_slack_raw( + "diagnostic", + endpoint.application.academy.slackteam.owner.credentialsslack.token, + endpoint.application.notify_slack_channel.slack_id, + { + "subject": f"Errors found on app {endpoint.application.title} endpoint {endpoint.url}", + **result, + }, + academy=endpoint.application.academy, + ) @shared_task(bind=True, priority=TaskPriority.MONITORING.value) def monitor_app(self, app_id): - logger.debug('Starting monitor_app') - endpoints = Endpoint.objects.filter(application__id=app_id).values_list('id', flat=True) + logger.debug("Starting monitor_app") + endpoints = Endpoint.objects.filter(application__id=app_id).values_list("id", flat=True) for endpoint_id in endpoints: test_endpoint.delay(endpoint_id) @@ -68,20 +76,20 @@ def monitor_app(self, app_id): @shared_task(bind=True, priority=TaskPriority.MONITORING.value) def execute_scripts(self, script_id): script = MonitorScript.objects.get(id=script_id) - logger.debug(f'Starting execute_scripts for {script.script_slug}') + logger.debug(f"Starting execute_scripts for {script.script_slug}") app = script.application now = timezone.now() if script.paused_until is not None and script.paused_until > now: - logger.debug('Ignoring script exec because its paused') + logger.debug("Ignoring script exec because its paused") return True result = run_script(script) - if result['status'] != 'OPERATIONAL': - logger.debug('Errors found, sending script report to ') - subject = f'Errors have been found on {app.title} script {script.id} (slug: {script.script_slug})' - if 'title' in result and result['title'] is not None and result['title'] != '': - subject = result['title'] + if result["status"] != "OPERATIONAL": + logger.debug("Errors found, sending script report to ") + subject = f"Errors have been found on {app.title} script {script.id} (slug: {script.script_slug})" + if "title" in result and result["title"] is not None and result["title"] != "": + subject = result["title"] email = None if script.notify_email is not None: @@ -91,28 +99,39 @@ def execute_scripts(self, script_id): if email is None: logger.debug( - f'No email set for monitoring app or script, skiping email notification for {script.script_slug}') + f"No email set for monitoring app or script, skiping email notification for {script.script_slug}" + ) else: - logger.debug(f'Sending script notification report to {email}') - - send_email_message('diagnostic', - email, { - 'subject': subject, - 'details': result['text'], - 'button': result['btn'], - }, - academy=script.application.academy) - - if (app.notify_slack_channel and app.academy and hasattr(app.academy, 'slackteam') - and hasattr(app.academy.slackteam.owner, 'credentialsslack')): + logger.debug(f"Sending script notification report to {email}") + + send_email_message( + "diagnostic", + email, + { + "subject": subject, + "details": result["text"], + "button": result["btn"], + }, + academy=script.application.academy, + ) + + if ( + app.notify_slack_channel + and app.academy + and hasattr(app.academy, "slackteam") + and hasattr(app.academy.slackteam.owner, "credentialsslack") + ): try: - send_slack_raw('diagnostic', - app.academy.slackteam.owner.credentialsslack.token, - app.notify_slack_channel.slack_id, { - 'subject': subject, - **result, - }, - academy=script.application.academy) + send_slack_raw( + "diagnostic", + app.academy.slackteam.owner.credentialsslack.token, + app.notify_slack_channel.slack_id, + { + "subject": subject, + **result, + }, + academy=script.application.academy, + ) except Exception: return False return False @@ -122,39 +141,39 @@ def execute_scripts(self, script_id): @shared_task(bind=True, priority=TaskPriority.MARKETING.value) def async_download_csv(self, module, model_name, ids_to_download): - logger.debug('Starting to download csv for ') + logger.debug("Starting to download csv for ") return download_csv(module, model_name, ids_to_download) @shared_task(bind=True, priority=TaskPriority.MARKETING.value) def async_unsubscribe_repo(self, subs_id, force_delete): - logger.debug('Async unsubscribe from repo') + logger.debug("Async unsubscribe from repo") return unsubscribe_repository(subs_id, force_delete) != False @shared_task(bind=True, priority=TaskPriority.MARKETING.value) def async_subscribe_repo(self, subs_id): - logger.debug('Async subscribe to repo') + logger.debug("Async subscribe to repo") subscription = subscribe_repository(subs_id) - return subscription != False and subscription.status != 'OPERATIONAL' + return subscription != False and subscription.status != "OPERATIONAL" @task(priority=TaskPriority.MARKETING.value) def run_supervisor(supervisor_id: int, **_: Any): - logger.debug(f'Run supervisor {supervisor_id}') + logger.debug(f"Run supervisor {supervisor_id}") supervisor = Supervisor.objects.filter(id=supervisor_id).first() if not supervisor: - raise RetryTask(f'Supervisor {supervisor_id} not found') + raise RetryTask(f"Supervisor {supervisor_id} not found") try: module = importlib.import_module(supervisor.task_module) except ModuleNotFoundError: - raise AbortTask(f'Module {supervisor.task_module} not found') + raise AbortTask(f"Module {supervisor.task_module} not found") try: func = getattr(module, supervisor.task_name) except AttributeError: - raise AbortTask(f'Supervisor {supervisor.task_module}.{supervisor.task_name} not found') + raise AbortTask(f"Supervisor {supervisor.task_module}.{supervisor.task_name} not found") supervisor.ran_at = timezone.now() func() @@ -163,29 +182,29 @@ def run_supervisor(supervisor_id: int, **_: Any): @task(priority=TaskPriority.MARKETING.value) def fix_issue(issue_id: int, **_: Any): - logger.debug(f'Fix issue {issue_id}') + logger.debug(f"Fix issue {issue_id}") issue = SupervisorIssue.objects.filter(id=issue_id).first() if not issue: - raise RetryTask(f'Issue {issue_id} not found') + raise RetryTask(f"Issue {issue_id} not found") if not issue.code: - raise AbortTask(f'Issue {issue_id} has no code') + raise AbortTask(f"Issue {issue_id} has no code") supervisor = issue.supervisor try: module = importlib.import_module(supervisor.task_module) except ModuleNotFoundError: - raise AbortTask(f'Module {supervisor.task_module} not found') + raise AbortTask(f"Module {supervisor.task_module} not found") - fn_name = issue.code.replace('-', '_') + fn_name = issue.code.replace("-", "_") try: func = getattr(module, fn_name) except AttributeError: - raise AbortTask(f'Supervisor {supervisor.task_module}.{fn_name} not found') + raise AbortTask(f"Supervisor {supervisor.task_module}.{fn_name} not found") if issue.attempts >= func.attempts: - raise AbortTask(f'Supervisor {supervisor.task_module}.{fn_name} has reached max attempts') + raise AbortTask(f"Supervisor {supervisor.task_module}.{fn_name} has reached max attempts") issue.ran_at = timezone.now() issue.attempts += 1 diff --git a/breathecode/monitoring/tests/admin/tests_pause_for_one_day.py b/breathecode/monitoring/tests/admin/tests_pause_for_one_day.py index f812bb2f1..f32472123 100644 --- a/breathecode/monitoring/tests/admin/tests_pause_for_one_day.py +++ b/breathecode/monitoring/tests/admin/tests_pause_for_one_day.py @@ -28,12 +28,14 @@ def tests_pause_for_one_day_length_1(self): self.assertEqual(result, None) - endpoints = [{ - **endpoint, 'paused_until': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['paused_until'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), 'frequency_in_minutes': 30.0 - } for model in models]) + endpoints = [ + {**endpoint, "paused_until": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["paused_until"]) + ] + self.assertEqual( + endpoints, [{**self.model_to_dict(model, "endpoint"), "frequency_in_minutes": 30.0} for model in models] + ) def tests_pause_for_one_day_length_3(self): request = HttpRequest() @@ -43,9 +45,11 @@ def tests_pause_for_one_day_length_3(self): self.assertEqual(result, None) - endpoints = [{ - **endpoint, 'paused_until': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['paused_until'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), 'frequency_in_minutes': 30.0 - } for model in models]) + endpoints = [ + {**endpoint, "paused_until": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["paused_until"]) + ] + self.assertEqual( + endpoints, [{**self.model_to_dict(model, "endpoint"), "frequency_in_minutes": 30.0} for model in models] + ) diff --git a/breathecode/monitoring/tests/admin/tests_run_single_script.py b/breathecode/monitoring/tests/admin/tests_run_single_script.py index 3f3277369..49f7fd2a3 100644 --- a/breathecode/monitoring/tests/admin/tests_run_single_script.py +++ b/breathecode/monitoring/tests/admin/tests_run_single_script.py @@ -8,7 +8,7 @@ from ...models import MonitorScript RUN_SCRIPT_MOCK = MagicMock() -RUN_SCRIPT_PATH = 'breathecode.monitoring.tasks.execute_scripts' +RUN_SCRIPT_PATH = "breathecode.monitoring.tasks.execute_scripts" # This tests check functions are called, remember that this functions are @@ -38,7 +38,7 @@ def tests_run_single_script_length_1(self): result = run_single_script(None, request, MonitorScript.objects.all()) self.assertEqual(result, None) - self.assertEqual(mock_run_script.call_args_list, [call(model['monitor_script'].id) for model in models]) + self.assertEqual(mock_run_script.call_args_list, [call(model["monitor_script"].id) for model in models]) @patch(RUN_SCRIPT_PATH, RUN_SCRIPT_MOCK) def tests_run_single_script_length_3(self): @@ -51,4 +51,4 @@ def tests_run_single_script_length_3(self): result = run_single_script(None, request, MonitorScript.objects.all()) self.assertEqual(result, None) - self.assertEqual(mock_run_script.call_args_list, [call(model['monitor_script'].id) for model in models]) + self.assertEqual(mock_run_script.call_args_list, [call(model["monitor_script"].id) for model in models]) diff --git a/breathecode/monitoring/tests/admin/tests_test_app.py b/breathecode/monitoring/tests/admin/tests_test_app.py index d1c0e451a..c32a739ea 100644 --- a/breathecode/monitoring/tests/admin/tests_test_app.py +++ b/breathecode/monitoring/tests/admin/tests_test_app.py @@ -2,12 +2,13 @@ from django.http.request import HttpRequest from ..mixins import MonitoringTestCase + # that 'import as' is thanks pytest think 'test_app' is one fixture from ...admin import test_app as check_app from ...models import Application CURRENT_MOCK = MagicMock() -CURRENT_PATH = 'breathecode.monitoring.tasks.monitor_app' +CURRENT_PATH = "breathecode.monitoring.tasks.monitor_app" # This tests check functions are called, remember that this functions are @@ -37,7 +38,7 @@ def tests_test_app_length_1(self): result = check_app(None, request, Application.objects.all()) self.assertEqual(result, None) - self.assertEqual(mock.call_args_list, [call(model['application'].id) for model in models]) + self.assertEqual(mock.call_args_list, [call(model["application"].id) for model in models]) @patch(CURRENT_PATH, CURRENT_MOCK) def tests_test_app_length_3(self): @@ -50,4 +51,4 @@ def tests_test_app_length_3(self): result = check_app(None, request, Application.objects.all()) self.assertEqual(result, None) - self.assertEqual(mock.call_args_list, [call(model['application'].id) for model in models]) + self.assertEqual(mock.call_args_list, [call(model["application"].id) for model in models]) diff --git a/breathecode/monitoring/tests/admin/tests_test_endpoint.py b/breathecode/monitoring/tests/admin/tests_test_endpoint.py index 4f40396dd..c225f3bda 100644 --- a/breathecode/monitoring/tests/admin/tests_test_endpoint.py +++ b/breathecode/monitoring/tests/admin/tests_test_endpoint.py @@ -2,12 +2,13 @@ from django.http.request import HttpRequest from ..mixins import MonitoringTestCase + # that 'import as' is thanks pytest think 'test_endpoint' is one fixture from ...admin import test_endpoint as check_endpoint from ...models import Endpoint CURRENT_MOCK = MagicMock() -CURRENT_PATH = 'breathecode.monitoring.tasks.test_endpoint' +CURRENT_PATH = "breathecode.monitoring.tasks.test_endpoint" # This tests check functions are called, remember that this functions are @@ -37,7 +38,7 @@ def tests_test_endpoint_length_1(self): result = check_endpoint(None, request, Endpoint.objects.all()) self.assertEqual(result, None) - self.assertEqual(mock.call_args_list, [call(model['endpoint'].id) for model in models]) + self.assertEqual(mock.call_args_list, [call(model["endpoint"].id) for model in models]) @patch(CURRENT_PATH, CURRENT_MOCK) def tests_test_endpoint_length_3(self): @@ -50,4 +51,4 @@ def tests_test_endpoint_length_3(self): result = check_endpoint(None, request, Endpoint.objects.all()) self.assertEqual(result, None) - self.assertEqual(mock.call_args_list, [call(model['endpoint'].id) for model in models]) + self.assertEqual(mock.call_args_list, [call(model["endpoint"].id) for model in models]) diff --git a/breathecode/monitoring/tests/management/commands/tests_monitor.py b/breathecode/monitoring/tests/management/commands/tests_monitor.py index 287c3a118..c59d42cbe 100644 --- a/breathecode/monitoring/tests/management/commands/tests_monitor.py +++ b/breathecode/monitoring/tests/management/commands/tests_monitor.py @@ -1,12 +1,24 @@ from datetime import timedelta from django.utils import timezone from unittest.mock import patch, MagicMock, call, mock_open -from breathecode.tests.mocks import (GOOGLE_CLOUD_PATH, apply_google_cloud_client_mock, apply_google_cloud_bucket_mock, - apply_google_cloud_blob_mock, MAILGUN_PATH, MAILGUN_INSTANCES, - apply_mailgun_requests_post_mock, SLACK_PATH, SLACK_INSTANCES, - apply_slack_requests_request_mock, REQUESTS_PATH, REQUESTS_INSTANCES, - apply_requests_get_mock, LOGGING_PATH, LOGGING_INSTANCES, - apply_logging_logger_mock) +from breathecode.tests.mocks import ( + GOOGLE_CLOUD_PATH, + apply_google_cloud_client_mock, + apply_google_cloud_bucket_mock, + apply_google_cloud_blob_mock, + MAILGUN_PATH, + MAILGUN_INSTANCES, + apply_mailgun_requests_post_mock, + SLACK_PATH, + SLACK_INSTANCES, + apply_slack_requests_request_mock, + REQUESTS_PATH, + REQUESTS_INSTANCES, + apply_requests_get_mock, + LOGGING_PATH, + LOGGING_INSTANCES, + apply_logging_logger_mock, +) from ...mixins import MonitoringTestCase from ....management.commands.monitor import Command @@ -16,20 +28,20 @@ class AcademyCohortTestSuite(MonitoringTestCase): 🔽🔽🔽 With bad entity 🔽🔽🔽 """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', {})])) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", {})])) def tests_monitor_without_entity(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - mock_breathecode = REQUESTS_INSTANCES['get'] + mock_breathecode = REQUESTS_INSTANCES["get"] mock_breathecode.call_args_list = [] command = Command() @@ -38,36 +50,36 @@ def tests_monitor_without_entity(self): self.assertEqual(command.handle(), None) self.assertEqual(command.stdout.write.call_args_list, []) - self.assertEqual(command.stderr.write.call_args_list, [call('Entity arguments is not set')]) + self.assertEqual(command.stderr.write.call_args_list, [call("Entity arguments is not set")]) self.assertEqual(self.all_endpoint_dict(), []) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) self.assertEqual(mock_breathecode.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', {})])) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", {})])) def tests_monitor_with_bad_entity(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - mock_breathecode = REQUESTS_INSTANCES['get'] + mock_breathecode = REQUESTS_INSTANCES["get"] mock_breathecode.call_args_list = [] command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='they-killed-kenny'), None) + self.assertEqual(command.handle(entity="they-killed-kenny"), None) self.assertEqual(command.stdout.write.call_args_list, []) - self.assertEqual(command.stderr.write.call_args_list, [call('Entity not found')]) + self.assertEqual(command.stderr.write.call_args_list, [call("Entity not found")]) self.assertEqual(self.all_endpoint_dict(), []) self.assertEqual(mock_mailgun.call_args_list, []) @@ -78,30 +90,31 @@ def tests_monitor_with_bad_entity(self): # 🔽🔽🔽 App entity 🔽🔽🔽 # """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', {})])) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", {})])) def tests_monitor_with_entity_apps_without_application(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 0 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 0 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) self.assertEqual(self.all_application_dict(), []) self.assertEqual(self.all_endpoint_dict(), []) import requests + mock_breathecode = requests.get mock_breathecode.call_args_list = [] @@ -109,17 +122,17 @@ def tests_monitor_with_entity_apps_without_application(self): self.assertEqual(mock_slack.call_args_list, []) self.assertEqual(mock_breathecode.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', {})])) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", {})])) def tests_monitor_with_entity_apps_without_endpoints(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] model = self.generate_models(application=True) @@ -127,15 +140,21 @@ def tests_monitor_with_entity_apps_without_endpoints(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) self.assertEqual(self.all_endpoint_dict(), []) import requests + mock_breathecode = requests.get mock_breathecode.call_args_list = [] @@ -143,22 +162,22 @@ def tests_monitor_with_entity_apps_without_endpoints(self): self.assertEqual(mock_slack.call_args_list, []) self.assertEqual(mock_breathecode.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', {})])) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", {})])) def tests_monitor_with_entity_apps_with_bad_endpoint_paused_until(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] endpoint_kwargs = { - 'url': 'https://potato.io', - 'paused_until': timezone.now() + timedelta(minutes=2), + "url": "https://potato.io", + "paused_until": timezone.now() + timedelta(minutes=2), } model = self.generate_models(application=True, endpoint=True, endpoint_kwargs=endpoint_kwargs) @@ -166,21 +185,32 @@ def tests_monitor_with_entity_apps_with_bad_endpoint_paused_until(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) - self.assertEqual(self.all_endpoint_dict(), [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'severity_level': 0, - 'status_text': None, - }]) + self.assertEqual( + self.all_endpoint_dict(), + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "severity_level": 0, + "status_text": None, + } + ], + ) import requests + mock_breathecode = requests.get mock_breathecode.call_args_list = [] @@ -188,22 +218,22 @@ def tests_monitor_with_entity_apps_with_bad_endpoint_paused_until(self): self.assertEqual(mock_slack.call_args_list, []) self.assertEqual(mock_breathecode.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', {})])) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", {})])) def tests_monitor_with_entity_apps_with_endpoint_paused_until(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] endpoint_kwargs = { - 'url': 'https://potato.io', - 'paused_until': timezone.now(), + "url": "https://potato.io", + "paused_until": timezone.now(), } model = self.generate_models(application=True, endpoint=True, endpoint_kwargs=endpoint_kwargs) @@ -211,25 +241,38 @@ def tests_monitor_with_entity_apps_with_endpoint_paused_until(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': None, - 'severity_level': 5, - 'status_text': 'Status withing the 2xx range', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": None, + "severity_level": 5, + "status_text": "Status withing the 2xx range", + } + ], + ) import requests + mock_breathecode = requests.get mock_breathecode.call_args_list = [] @@ -237,22 +280,22 @@ def tests_monitor_with_entity_apps_with_endpoint_paused_until(self): self.assertEqual(mock_slack.call_args_list, []) self.assertEqual(mock_breathecode.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', {})])) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", {})])) def tests_monitor_with_entity_apps_with_bad_application_paused_until(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] endpoint_kwargs = { - 'url': 'https://potato.io', - 'paused_until': timezone.now() + timedelta(minutes=2), + "url": "https://potato.io", + "paused_until": timezone.now() + timedelta(minutes=2), } model = self.generate_models(application=True, endpoint=True, endpoint_kwargs=endpoint_kwargs) @@ -260,462 +303,607 @@ def tests_monitor_with_entity_apps_with_bad_application_paused_until(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) - self.assertEqual(self.all_endpoint_dict(), [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'severity_level': 0, - }]) + self.assertEqual( + self.all_endpoint_dict(), + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "severity_level": 0, + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) self.assertEqual(mock_breathecode.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(100, 'https://potato.io', {})])) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(100, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_100(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={'url': 'https://potato.io'}) + model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={"url": "https://potato.io"}) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': '{}', - 'severity_level': 0, - 'status': 'MINOR', - 'status_code': 100, - 'status_text': 'Uknown status code, lower than 200', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "{}", + "severity_level": 0, + "status": "MINOR", + "status_code": 100, + "status_text": "Uknown status code, lower than 200", + } + ], + ) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) import requests - mock_breathecode = requests.get - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) + mock_breathecode = requests.get - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', {})])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_200(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={'url': 'https://potato.io'}) + model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={"url": "https://potato.io"}) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': None, - 'severity_level': 5, - 'status_text': 'Status withing the 2xx range', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": None, + "severity_level": 5, + "status_text": "Status withing the 2xx range", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', 'is not ok')])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", "is not ok")])) def tests_monitor_with_entity_apps_status_200_with_bad_regex(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - endpoint_kwargs = {'url': 'https://potato.io', 'test_pattern': '^ok$'} + endpoint_kwargs = {"url": "https://potato.io", "test_pattern": "^ok$"} model = self.generate_models(application=True, endpoint=True, endpoint_kwargs=endpoint_kwargs) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': 'is not ok', - 'severity_level': 5, - 'status': 'MINOR', - 'status_text': 'Status is 200 but regex ^ok$ was rejected', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "is not ok", + "severity_level": 5, + "status": "MINOR", + "status_text": "Status is 200 but regex ^ok$ was rejected", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(200, 'https://potato.io', 'ok')])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(200, "https://potato.io", "ok")])) def tests_monitor_with_entity_apps_status_200_with_regex(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - endpoint_kwargs = {'url': 'https://potato.io', 'test_pattern': '^ok$'} + endpoint_kwargs = {"url": "https://potato.io", "test_pattern": "^ok$"} model = self.generate_models(application=True, endpoint=True, endpoint_kwargs=endpoint_kwargs) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'severity_level': 5, - 'status_text': 'Status withing the 2xx range', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "severity_level": 5, + "status_text": "Status withing the 2xx range", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(300, 'https://potato.io', {})])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(300, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_300(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={'url': 'https://potato.io'}) + model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={"url": "https://potato.io"}) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': '{}', - 'severity_level': 5, - 'status': 'MINOR', - 'status_code': 300, - 'status_text': 'Status in the 3xx range, maybe a cached reponse?', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "{}", + "severity_level": 5, + "status": "MINOR", + "status_code": 300, + "status_text": "Status in the 3xx range, maybe a cached reponse?", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(400, 'https://potato.io', {})])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(400, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_400(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={'url': 'https://potato.io'}) + model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={"url": "https://potato.io"}) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': '{}', - 'severity_level': 100, - 'status': 'CRITICAL', - 'status_code': 400, - 'status_text': 'Status above 399', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "{}", + "severity_level": 100, + "status": "CRITICAL", + "status_code": 400, + "status_text": "Status above 399", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(404, 'https://potato.io', 'ok')])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(404, "https://potato.io", "ok")])) def tests_monitor_with_entity_apps_status_404_with_regex(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - endpoint_kwargs = {'url': 'https://potato.io', 'test_pattern': '^ok$'} + endpoint_kwargs = {"url": "https://potato.io", "test_pattern": "^ok$"} model = self.generate_models(application=True, endpoint=True, endpoint_kwargs=endpoint_kwargs) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': 'ok', - 'severity_level': 100, - 'status': 'CRITICAL', - 'status_code': 404, - 'status_text': 'Status above 399', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "ok", + "severity_level": 100, + "status": "CRITICAL", + "status_code": 404, + "status_text": "Status above 399", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(500, 'https://potato.io', {})])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(500, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_500(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={'url': 'https://potato.io'}) + model = self.generate_models(application=True, endpoint=True, endpoint_kwargs={"url": "https://potato.io"}) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': '{}', - 'severity_level': 100, - 'status': 'CRITICAL', - 'status_code': 500, - 'status_text': 'Status above 399', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "{}", + "severity_level": 100, + "status": "CRITICAL", + "status_code": 500, + "status_text": "Status above 399", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(500, 'https://potato.io', {})])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(500, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_500_with_email(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - application_kwargs = {'notify_email': 'pokemon@potato.io'} + application_kwargs = {"notify_email": "pokemon@potato.io"} - endpoint_kwargs = {'url': 'https://potato.io'} + endpoint_kwargs = {"url": "https://potato.io"} - model = self.generate_models(application=True, - endpoint=True, - application_kwargs=application_kwargs, - endpoint_kwargs=endpoint_kwargs) + model = self.generate_models( + application=True, endpoint=True, application_kwargs=application_kwargs, endpoint_kwargs=endpoint_kwargs + ) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': '{}', - 'severity_level': 100, - 'status': 'CRITICAL', - 'status_code': 500, - 'status_text': 'Status above 399', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "{}", + "severity_level": 100, + "status": "CRITICAL", + "status_code": 500, + "status_text": "Status above 399", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(len(mock_mailgun.call_args_list), 1) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(500, 'https://potato.io', {})])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(500, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_500_with_notify_slack_channel_without_slack_team(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - endpoint_kwargs = {'url': 'https://potato.io'} + endpoint_kwargs = {"url": "https://potato.io"} model = self.generate_models( application=True, @@ -723,186 +911,232 @@ def tests_monitor_with_entity_apps_status_500_with_notify_slack_channel_without_ slack_channel=True, credentials_slack=True, # academy=True, slack_team=True, - endpoint_kwargs=endpoint_kwargs) + endpoint_kwargs=endpoint_kwargs, + ) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': '{}', - 'severity_level': 100, - 'status': 'CRITICAL', - 'status_code': 500, - 'status_text': 'Status above 399', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "{}", + "severity_level": 100, + "status": "CRITICAL", + "status_code": 500, + "status_text": "Status above 399", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(500, 'https://potato.io', {})])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(500, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_500_with_notify_slack_channel_without_slack_models(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - endpoint_kwargs = {'url': 'https://potato.io'} + endpoint_kwargs = {"url": "https://potato.io"} - model = self.generate_models(application=True, - endpoint=True, - slack_channel=True, - credentials_slack=True, - endpoint_kwargs=endpoint_kwargs) + model = self.generate_models( + application=True, endpoint=True, slack_channel=True, credentials_slack=True, endpoint_kwargs=endpoint_kwargs + ) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': '{}', - 'severity_level': 100, - 'status': 'CRITICAL', - 'status_code': 500, - 'status_text': 'Status above 399', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "{}", + "severity_level": 100, + "status": "CRITICAL", + "status_code": 500, + "status_text": "Status above 399", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch(REQUESTS_PATH['get'], apply_requests_get_mock([(500, 'https://potato.io', {})])) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch(REQUESTS_PATH["get"], apply_requests_get_mock([(500, "https://potato.io", {})])) def tests_monitor_with_entity_apps_status_500_with_notify_slack_channel_with_slack_models(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - endpoint_kwargs = {'url': 'https://potato.io'} + endpoint_kwargs = {"url": "https://potato.io"} - model = self.generate_models(application=True, - endpoint=True, - slack_channel=True, - credentials_slack=True, - slack_team=True, - academy=True, - endpoint_kwargs=endpoint_kwargs) + model = self.generate_models( + application=True, + endpoint=True, + slack_channel=True, + credentials_slack=True, + slack_team=True, + academy=True, + endpoint_kwargs=endpoint_kwargs, + ) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='apps'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 apps for diagnostic')]) + self.assertEqual(command.handle(entity="apps"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 apps for diagnostic")]) self.assertEqual(command.stderr.write.call_args_list, []) - self.assertEqual(self.all_application_dict(), [{ - **self.model_to_dict(model, 'application'), - }]) - - endpoints = [{ - **endpoint, 'last_check': None - } for endpoint in self.all_endpoint_dict() if self.assertDatetime(endpoint['last_check'])] - self.assertEqual(endpoints, [{ - **self.model_to_dict(model, 'endpoint'), - 'frequency_in_minutes': 30.0, - 'response_text': '{}', - 'severity_level': 100, - 'status': 'CRITICAL', - 'status_code': 500, - 'status_text': 'Status above 399', - }]) + self.assertEqual( + self.all_application_dict(), + [ + { + **self.model_to_dict(model, "application"), + } + ], + ) + + endpoints = [ + {**endpoint, "last_check": None} + for endpoint in self.all_endpoint_dict() + if self.assertDatetime(endpoint["last_check"]) + ] + self.assertEqual( + endpoints, + [ + { + **self.model_to_dict(model, "endpoint"), + "frequency_in_minutes": 30.0, + "response_text": "{}", + "severity_level": 100, + "status": "CRITICAL", + "status_code": 500, + "status_text": "Status above 399", + } + ], + ) import requests + mock_breathecode = requests.get self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(len(mock_slack.call_args_list), 1) - self.assertEqual(mock_breathecode.call_args_list, - [call('https://potato.io', headers={'User-Agent': 'BreathecodeMonitoring/1.0'}, timeout=2)]) + self.assertEqual( + mock_breathecode.call_args_list, + [call("https://potato.io", headers={"User-Agent": "BreathecodeMonitoring/1.0"}, timeout=2)], + ) """ 🔽🔽🔽 Scripts entity 🔽🔽🔽 """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) def tests_monitor_with_entity_scripts_without_data(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 0 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 0 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) self.assertEqual(self.all_monitor_script_dict(), []) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) def tests_monitor_with_entity_scripts_doesnt_exist_or_not_have_body(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] model = self.generate_models(monitor_script=True) @@ -911,40 +1145,43 @@ def tests_monitor_with_entity_scripts_doesnt_exist_or_not_have_body(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) - monitor_scripts = [{ - **x, 'last_run': None - } for x in self.all_monitor_script_dict() if self.assertDatetime(x['last_run'])] + monitor_scripts = [ + {**x, "last_run": None} for x in self.all_monitor_script_dict() if self.assertDatetime(x["last_run"]) + ] - self.assertEqual(monitor_scripts, [ - { - **self.model_to_dict(model, 'monitor_script'), - 'status_code': 1, - 'status': 'CRITICAL', - 'special_status_text': 'Script not found or its body is empty: None', - 'response_text': 'Script not found or its body is empty: None', - }, - ]) + self.assertEqual( + monitor_scripts, + [ + { + **self.model_to_dict(model, "monitor_script"), + "status_code": 1, + "status": "CRITICAL", + "special_status_text": "Script not found or its body is empty: None", + "response_text": "Script not found or its body is empty: None", + }, + ], + ) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) def tests_monitor_with_entity_scripts_in_body_with_successful_execution(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - monitor_script_kwargs = {'script_body': 'print(\'aaaa\')'} + monitor_script_kwargs = {"script_body": "print('aaaa')"} model = self.generate_models(monitor_script=True, monitor_script_kwargs=monitor_script_kwargs) @@ -952,41 +1189,47 @@ def tests_monitor_with_entity_scripts_in_body_with_successful_execution(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) - monitor_scripts = [{ - **x, 'last_run': None - } for x in self.all_monitor_script_dict() if self.assertDatetime(x['last_run'])] - self.assertEqual(monitor_scripts, [{ - **self.model_to_dict(model, 'monitor_script'), - 'response_text': 'aaaa\n', - 'status_code': 0, - 'special_status_text': 'OK', - }]) + monitor_scripts = [ + {**x, "last_run": None} for x in self.all_monitor_script_dict() if self.assertDatetime(x["last_run"]) + ] + self.assertEqual( + monitor_scripts, + [ + { + **self.model_to_dict(model, "monitor_script"), + "response_text": "aaaa\n", + "status_code": 0, + "special_status_text": "OK", + } + ], + ) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) def tests_monitor_with_entity_scripts_in_body_with_minor_error(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] monitor_script_kwargs = { - 'script_body': - '\n'.join([ - 'from breathecode.utils import ScriptNotification', - "raise ScriptNotification('thus spoke kishibe rohan', status='MINOR')" - ]) + "script_body": "\n".join( + [ + "from breathecode.utils import ScriptNotification", + "raise ScriptNotification('thus spoke kishibe rohan', status='MINOR')", + ] + ) } model = self.generate_models(monitor_script=True, monitor_script_kwargs=monitor_script_kwargs) @@ -995,43 +1238,47 @@ def tests_monitor_with_entity_scripts_in_body_with_minor_error(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) - monitor_scripts = [{ - **x, 'last_run': None - } for x in self.all_monitor_script_dict() if self.assertDatetime(x['last_run'])] + monitor_scripts = [ + {**x, "last_run": None} for x in self.all_monitor_script_dict() if self.assertDatetime(x["last_run"]) + ] self.assertEqual( monitor_scripts, - [{ - **self.model_to_dict(model, 'monitor_script'), - 'response_text': monitor_scripts[0]['response_text'], - 'status': 'MINOR', - 'status_code': 1, - }]) + [ + { + **self.model_to_dict(model, "monitor_script"), + "response_text": monitor_scripts[0]["response_text"], + "status": "MINOR", + "status_code": 1, + } + ], + ) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) def tests_monitor_with_entity_scripts_in_body_with_critical_error(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] monitor_script_kwargs = { - 'script_body': - '\n'.join([ - 'from breathecode.utils import ScriptNotification', - "raise ScriptNotification('thus spoke kishibe rohan', status='CRITICAL')" - ]) + "script_body": "\n".join( + [ + "from breathecode.utils import ScriptNotification", + "raise ScriptNotification('thus spoke kishibe rohan', status='CRITICAL')", + ] + ) } model = self.generate_models(monitor_script=True, monitor_script_kwargs=monitor_script_kwargs) @@ -1040,39 +1287,42 @@ def tests_monitor_with_entity_scripts_in_body_with_critical_error(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) - monitor_scripts = [{ - **x, 'last_run': None - } for x in self.all_monitor_script_dict() if self.assertDatetime(x['last_run'])] + monitor_scripts = [ + {**x, "last_run": None} for x in self.all_monitor_script_dict() if self.assertDatetime(x["last_run"]) + ] self.assertEqual( monitor_scripts, - [{ - **self.model_to_dict(model, 'monitor_script'), - 'response_text': monitor_scripts[0]['response_text'], - 'status': 'CRITICAL', - 'status_code': 1, - }]) + [ + { + **self.model_to_dict(model, "monitor_script"), + "response_text": monitor_scripts[0]["response_text"], + "status": "CRITICAL", + "status_code": 1, + } + ], + ) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) - @patch('builtins.open', mock_open(read_data="print(\'aaaa\')")) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) + @patch("builtins.open", mock_open(read_data="print('aaaa')")) def tests_monitor_with_entity_scripts_in_file_with_successful_execution(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - monitor_script_kwargs = {'script_slug': 'they-killed-kenny'} + monitor_script_kwargs = {"script_slug": "they-killed-kenny"} model = self.generate_models(monitor_script=True, monitor_script_kwargs=monitor_script_kwargs) @@ -1080,42 +1330,52 @@ def tests_monitor_with_entity_scripts_in_file_with_successful_execution(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) - monitor_scripts = [{ - **x, 'last_run': None - } for x in self.all_monitor_script_dict() if self.assertDatetime(x['last_run'])] - self.assertEqual(monitor_scripts, [{ - **self.model_to_dict(model, 'monitor_script'), - 'response_text': 'aaaa\n', - 'status_code': 0, - 'special_status_text': 'OK', - }]) + monitor_scripts = [ + {**x, "last_run": None} for x in self.all_monitor_script_dict() if self.assertDatetime(x["last_run"]) + ] + self.assertEqual( + monitor_scripts, + [ + { + **self.model_to_dict(model, "monitor_script"), + "response_text": "aaaa\n", + "status_code": 0, + "special_status_text": "OK", + } + ], + ) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) @patch( - 'builtins.open', - mock_open(read_data='\n'.join([ - 'from breathecode.utils import ScriptNotification', - "raise ScriptNotification('thus spoke kishibe rohan', status='MINOR')" - ]))) + "builtins.open", + mock_open( + read_data="\n".join( + [ + "from breathecode.utils import ScriptNotification", + "raise ScriptNotification('thus spoke kishibe rohan', status='MINOR')", + ] + ) + ), + ) def tests_monitor_with_entity_scripts_in_file_with_minor_error(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - monitor_script_kwargs = {'script_slug': 'they-killed-kenny'} + monitor_script_kwargs = {"script_slug": "they-killed-kenny"} model = self.generate_models(monitor_script=True, monitor_script_kwargs=monitor_script_kwargs) @@ -1123,44 +1383,52 @@ def tests_monitor_with_entity_scripts_in_file_with_minor_error(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) - monitor_scripts = [{ - **x, 'last_run': None - } for x in self.all_monitor_script_dict() if self.assertDatetime(x['last_run'])] + monitor_scripts = [ + {**x, "last_run": None} for x in self.all_monitor_script_dict() if self.assertDatetime(x["last_run"]) + ] self.assertEqual( monitor_scripts, - [{ - **self.model_to_dict(model, 'monitor_script'), - 'response_text': monitor_scripts[0]['response_text'], - 'status': 'MINOR', - 'status_code': 1, - }]) + [ + { + **self.model_to_dict(model, "monitor_script"), + "response_text": monitor_scripts[0]["response_text"], + "status": "MINOR", + "status_code": 1, + } + ], + ) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) @patch( - 'builtins.open', - mock_open(read_data='\n'.join([ - 'from breathecode.utils import ScriptNotification', - "raise ScriptNotification('thus spoke kishibe rohan', status='CRITICAL')" - ]))) + "builtins.open", + mock_open( + read_data="\n".join( + [ + "from breathecode.utils import ScriptNotification", + "raise ScriptNotification('thus spoke kishibe rohan', status='CRITICAL')", + ] + ) + ), + ) def tests_monitor_with_entity_scripts_in_file_with_critical_error(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - monitor_script_kwargs = {'script_slug': 'they-killed-kenny'} + monitor_script_kwargs = {"script_slug": "they-killed-kenny"} model = self.generate_models(monitor_script=True, monitor_script_kwargs=monitor_script_kwargs) @@ -1168,75 +1436,88 @@ def tests_monitor_with_entity_scripts_in_file_with_critical_error(self): command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) - monitor_scripts = [{ - **x, 'last_run': None - } for x in self.all_monitor_script_dict() if self.assertDatetime(x['last_run'])] + monitor_scripts = [ + {**x, "last_run": None} for x in self.all_monitor_script_dict() if self.assertDatetime(x["last_run"]) + ] self.assertEqual( monitor_scripts, - [{ - **self.model_to_dict(model, 'monitor_script'), - 'response_text': monitor_scripts[0]['response_text'], - 'status': 'CRITICAL', - 'status_code': 1, - }]) + [ + { + **self.model_to_dict(model, "monitor_script"), + "response_text": monitor_scripts[0]["response_text"], + "status": "CRITICAL", + "status_code": 1, + } + ], + ) self.assertEqual(mock_mailgun.call_args_list, []) self.assertEqual(mock_slack.call_args_list, []) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) - @patch(MAILGUN_PATH['post'], apply_mailgun_requests_post_mock()) - @patch(SLACK_PATH['request'], apply_slack_requests_request_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) + @patch(MAILGUN_PATH["post"], apply_mailgun_requests_post_mock()) + @patch(SLACK_PATH["request"], apply_slack_requests_request_mock()) @patch( - 'builtins.open', - mock_open(read_data='\n'.join([ - 'from breathecode.utils import ScriptNotification', - "raise ScriptNotification('thus spoke kishibe rohan', status='CRITICAL')" - ]))) + "builtins.open", + mock_open( + read_data="\n".join( + [ + "from breathecode.utils import ScriptNotification", + "raise ScriptNotification('thus spoke kishibe rohan', status='CRITICAL')", + ] + ) + ), + ) def tests_monitor_with_entity_scripts_in_file_with_critical_error_with_notify(self): - mock_mailgun = MAILGUN_INSTANCES['post'] + mock_mailgun = MAILGUN_INSTANCES["post"] mock_mailgun.call_args_list = [] - mock_slack = SLACK_INSTANCES['request'] + mock_slack = SLACK_INSTANCES["request"] mock_slack.call_args_list = [] - application_kwargs = {'notify_email': 'pokemon@potato.io'} + application_kwargs = {"notify_email": "pokemon@potato.io"} - monitor_script_kwargs = {'script_slug': 'they-killed-kenny'} + monitor_script_kwargs = {"script_slug": "they-killed-kenny"} - model = self.generate_models(monitor_script=True, - slack_channel=True, - credentials_slack=True, - slack_team=True, - academy=True, - application=True, - application_kwargs=application_kwargs, - monitor_script_kwargs=monitor_script_kwargs) + model = self.generate_models( + monitor_script=True, + slack_channel=True, + credentials_slack=True, + slack_team=True, + academy=True, + application=True, + application_kwargs=application_kwargs, + monitor_script_kwargs=monitor_script_kwargs, + ) command = Command() command.stdout.write = MagicMock() command.stderr.write = MagicMock() - self.assertEqual(command.handle(entity='scripts'), None) - self.assertEqual(command.stdout.write.call_args_list, [call('Enqueued 1 scripts for execution')]) + self.assertEqual(command.handle(entity="scripts"), None) + self.assertEqual(command.stdout.write.call_args_list, [call("Enqueued 1 scripts for execution")]) self.assertEqual(command.stderr.write.call_args_list, []) - monitor_scripts = [{ - **x, 'last_run': None - } for x in self.all_monitor_script_dict() if self.assertDatetime(x['last_run'])] + monitor_scripts = [ + {**x, "last_run": None} for x in self.all_monitor_script_dict() if self.assertDatetime(x["last_run"]) + ] self.assertEqual( monitor_scripts, - [{ - **self.model_to_dict(model, 'monitor_script'), - 'response_text': monitor_scripts[0]['response_text'], - 'status': 'CRITICAL', - 'status_code': 1, - }]) + [ + { + **self.model_to_dict(model, "monitor_script"), + "response_text": monitor_scripts[0]["response_text"], + "status": "CRITICAL", + "status_code": 1, + } + ], + ) self.assertEqual(len(mock_mailgun.call_args_list), 1) self.assertEqual(len(mock_slack.call_args_list), 1) diff --git a/breathecode/monitoring/tests/management/commands/tests_supervisor.py b/breathecode/monitoring/tests/management/commands/tests_supervisor.py index 445e90df3..42f38c93d 100644 --- a/breathecode/monitoring/tests/management/commands/tests_supervisor.py +++ b/breathecode/monitoring/tests/management/commands/tests_supervisor.py @@ -23,10 +23,13 @@ def __init__(self, bc: Breathecode): def list(self): supervisors = SupervisorModel.objects.all() - return [{ - 'task_module': supervisor.task_module, - 'task_name': supervisor.task_name, - } for supervisor in supervisors] + return [ + { + "task_module": supervisor.task_module, + "task_name": supervisor.task_name, + } + for supervisor in supervisors + ] @sync_to_async def alist(self): @@ -65,18 +68,18 @@ def supervisor(db, bc: Breathecode): def patch(monkeypatch: pytest.MonkeyPatch): m1 = MagicMock() m2 = MagicMock() - monkeypatch.setattr('breathecode.monitoring.tasks.run_supervisor.delay', m1) - monkeypatch.setattr('breathecode.monitoring.tasks.fix_issue.delay', m2) + monkeypatch.setattr("breathecode.monitoring.tasks.run_supervisor.delay", m1) + monkeypatch.setattr("breathecode.monitoring.tasks.fix_issue.delay", m2) keys = paths.copy() for x in keys: - if '.tests.' in x[0]: + if ".tests." in x[0]: paths.remove(x) yield m1, m2 -@pytest.fixture(autouse=True, scope='module') +@pytest.fixture(autouse=True, scope="module") def setup(): yield @@ -87,38 +90,42 @@ def setup(): def db(data={}): return { - 'delta': timedelta(seconds=3600), - 'ran_at': None, - 'task_module': '', - 'task_name': '', + "delta": timedelta(seconds=3600), + "ran_at": None, + "task_module": "", + "task_name": "", **data, } def remove_ids(dbs): - return [x for x in dbs if x.pop('id')] + return [x for x in dbs if x.pop("id")] class TestIssue: - @pytest.mark.parametrize('with_supervisor, with_issues', [ - (False, False), - (True, False), - (True, True), - ]) - def tests_older_issues_are_removed(self, database: dfx.Database, supervisor: Supervisor, patch, with_supervisor, - with_issues, utc_now): + @pytest.mark.parametrize( + "with_supervisor, with_issues", + [ + (False, False), + (True, False), + (True, True), + ], + ) + def tests_older_issues_are_removed( + self, database: dfx.Database, supervisor: Supervisor, patch, with_supervisor, with_issues, utc_now + ): extra = {} if with_supervisor: - extra['supervisor'] = { - 'delta': timedelta(seconds=3600), - 'ran_at': None, - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + extra["supervisor"] = { + "delta": timedelta(seconds=3600), + "ran_at": None, + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", } if with_issues: - extra['supervisor_issue'] = (2, {'ran_at': utc_now - timedelta(days=7, seconds=1)}) + extra["supervisor_issue"] = (2, {"ran_at": utc_now - timedelta(days=7, seconds=1)}) model = database.create(**extra) @@ -129,31 +136,41 @@ def tests_older_issues_are_removed(self, database: dfx.Database, supervisor: Sup assert command.handle() == None assert { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", } in supervisor.list() - assert supervisor.log('breathecode.payments.supervisors', 'supervise_all_consumption_sessions') == [] - assert db({ - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', - }) in remove_ids(database.list_of('monitoring.Supervisor')) - assert database.list_of('monitoring.SupervisorIssue') == [] - assert call(supervisor.id('breathecode.payments.supervisors', - 'supervise_all_consumption_sessions')) in run_supervisor_mock.call_args_list + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [] + assert db( + { + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", + } + ) in remove_ids(database.list_of("monitoring.Supervisor")) + assert database.list_of("monitoring.SupervisorIssue") == [] + assert ( + call(supervisor.id("breathecode.payments.supervisors", "supervise_all_consumption_sessions")) + in run_supervisor_mock.call_args_list + ) assert fix_issue_mock.call_args_list == [] - def tests_recent_issues_keeps__available_attempts(self, bc: Breathecode, database: dfx.Database, - supervisor: Supervisor, patch, utc_now, random: cfx.Random): - model = database.create(supervisor={ - 'delta': timedelta(seconds=3600), - 'ran_at': None, - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', - }, - supervisor_issue=(2, { - 'ran_at': utc_now - timedelta(days=random.int(0, 6)), - 'attempts': 0, - })) + def tests_recent_issues_keeps__available_attempts( + self, bc: Breathecode, database: dfx.Database, supervisor: Supervisor, patch, utc_now, random: cfx.Random + ): + model = database.create( + supervisor={ + "delta": timedelta(seconds=3600), + "ran_at": None, + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", + }, + supervisor_issue=( + 2, + { + "ran_at": utc_now - timedelta(days=random.int(0, 6)), + "attempts": 0, + }, + ), + ) run_supervisor_mock, fix_issue_mock = patch run_supervisor_mock.call_args_list = [] @@ -162,32 +179,43 @@ def tests_recent_issues_keeps__available_attempts(self, bc: Breathecode, databas assert command.handle() == None assert { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", } in supervisor.list() - assert supervisor.log('breathecode.payments.supervisors', - 'supervise_all_consumption_sessions') == [x.error for x in model.supervisor_issue] - assert db({ - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', - }) in remove_ids(database.list_of('monitoring.Supervisor')) - assert database.list_of('monitoring.SupervisorIssue') == bc.format.to_dict(model.supervisor_issue) - assert call(supervisor.id('breathecode.payments.supervisors', - 'supervise_all_consumption_sessions')) in run_supervisor_mock.call_args_list + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [ + x.error for x in model.supervisor_issue + ] + assert db( + { + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", + } + ) in remove_ids(database.list_of("monitoring.Supervisor")) + assert database.list_of("monitoring.SupervisorIssue") == bc.format.to_dict(model.supervisor_issue) + assert ( + call(supervisor.id("breathecode.payments.supervisors", "supervise_all_consumption_sessions")) + in run_supervisor_mock.call_args_list + ) assert fix_issue_mock.call_args_list == [call(1), call(2)] - def tests_recent_issues_keeps__no_available_attempts(self, bc: Breathecode, database: dfx.Database, - supervisor: Supervisor, patch, utc_now, random: cfx.Random): - model = database.create(supervisor={ - 'delta': timedelta(seconds=3600), - 'ran_at': None, - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', - }, - supervisor_issue=(2, { - 'ran_at': utc_now - timedelta(days=random.int(0, 6)), - 'attempts': 3, - })) + def tests_recent_issues_keeps__no_available_attempts( + self, bc: Breathecode, database: dfx.Database, supervisor: Supervisor, patch, utc_now, random: cfx.Random + ): + model = database.create( + supervisor={ + "delta": timedelta(seconds=3600), + "ran_at": None, + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", + }, + supervisor_issue=( + 2, + { + "ran_at": utc_now - timedelta(days=random.int(0, 6)), + "attempts": 3, + }, + ), + ) run_supervisor_mock, fix_issue_mock = patch run_supervisor_mock.call_args_list = [] @@ -196,40 +224,49 @@ def tests_recent_issues_keeps__no_available_attempts(self, bc: Breathecode, data assert command.handle() == None assert { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", } in supervisor.list() - assert supervisor.log('breathecode.payments.supervisors', - 'supervise_all_consumption_sessions') == [x.error for x in model.supervisor_issue] - assert db({ - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', - }) in remove_ids(database.list_of('monitoring.Supervisor')) - assert database.list_of('monitoring.SupervisorIssue') == bc.format.to_dict(model.supervisor_issue) - assert call(supervisor.id('breathecode.payments.supervisors', - 'supervise_all_consumption_sessions')) in run_supervisor_mock.call_args_list + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [ + x.error for x in model.supervisor_issue + ] + assert db( + { + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", + } + ) in remove_ids(database.list_of("monitoring.Supervisor")) + assert database.list_of("monitoring.SupervisorIssue") == bc.format.to_dict(model.supervisor_issue) + assert ( + call(supervisor.id("breathecode.payments.supervisors", "supervise_all_consumption_sessions")) + in run_supervisor_mock.call_args_list + ) assert fix_issue_mock.call_args_list == [] class TestSupervision: - @pytest.mark.parametrize('supervised_at', [ - None, - timedelta(days=2, seconds=1), - ]) - def tests_pending_to_be_scheduled(self, database: dfx.Database, supervisor: Supervisor, patch, supervised_at, - utc_now): + @pytest.mark.parametrize( + "supervised_at", + [ + None, + timedelta(days=2, seconds=1), + ], + ) + def tests_pending_to_be_scheduled( + self, database: dfx.Database, supervisor: Supervisor, patch, supervised_at, utc_now + ): delta = timedelta(days=2) ran_at = utc_now - supervised_at if supervised_at else None s = { - 'delta': delta, - 'ran_at': ran_at, - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "delta": delta, + "ran_at": ran_at, + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", } - supervisor_issues = (2, {'ran_at': utc_now - timedelta(days=7, seconds=1)}) + supervisor_issues = (2, {"ran_at": utc_now - timedelta(days=7, seconds=1)}) model = database.create(supervisor=s, supervisor_issue=supervisor_issues) @@ -240,19 +277,23 @@ def tests_pending_to_be_scheduled(self, database: dfx.Database, supervisor: Supe assert command.handle() == None assert { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", } in supervisor.list() - assert supervisor.log('breathecode.payments.supervisors', 'supervise_all_consumption_sessions') == [] - assert db({ - 'delta': delta, - 'ran_at': ran_at, - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', - }) in remove_ids(database.list_of('monitoring.Supervisor')) - assert database.list_of('monitoring.SupervisorIssue') == [] - assert call(supervisor.id('breathecode.payments.supervisors', - 'supervise_all_consumption_sessions')) in run_supervisor_mock.call_args_list + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [] + assert db( + { + "delta": delta, + "ran_at": ran_at, + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", + } + ) in remove_ids(database.list_of("monitoring.Supervisor")) + assert database.list_of("monitoring.SupervisorIssue") == [] + assert ( + call(supervisor.id("breathecode.payments.supervisors", "supervise_all_consumption_sessions")) + in run_supervisor_mock.call_args_list + ) assert fix_issue_mock.call_args_list == [] def tests_in_cooldown(self, database: dfx.Database, supervisor: Supervisor, patch, utc_now): @@ -260,13 +301,13 @@ def tests_in_cooldown(self, database: dfx.Database, supervisor: Supervisor, patc delta = timedelta(days=2) ran_at = utc_now - timedelta(days=1) s = { - 'delta': delta, - 'ran_at': ran_at, - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "delta": delta, + "ran_at": ran_at, + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", } - supervisor_issues = (2, {'ran_at': utc_now - timedelta(days=7, seconds=1)}) + supervisor_issues = (2, {"ran_at": utc_now - timedelta(days=7, seconds=1)}) model = database.create(supervisor=s, supervisor_issue=supervisor_issues) @@ -277,17 +318,21 @@ def tests_in_cooldown(self, database: dfx.Database, supervisor: Supervisor, patc assert command.handle() == None assert { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", } in supervisor.list() - assert supervisor.log('breathecode.payments.supervisors', 'supervise_all_consumption_sessions') == [] - assert db({ - 'delta': delta, - 'ran_at': ran_at, - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', - }) in remove_ids(database.list_of('monitoring.Supervisor')) - assert database.list_of('monitoring.SupervisorIssue') == [] - assert call(supervisor.id('breathecode.payments.supervisors', - 'supervise_all_consumption_sessions')) not in run_supervisor_mock.call_args_list + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [] + assert db( + { + "delta": delta, + "ran_at": ran_at, + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", + } + ) in remove_ids(database.list_of("monitoring.Supervisor")) + assert database.list_of("monitoring.SupervisorIssue") == [] + assert ( + call(supervisor.id("breathecode.payments.supervisors", "supervise_all_consumption_sessions")) + not in run_supervisor_mock.call_args_list + ) assert fix_issue_mock.call_args_list == [] diff --git a/breathecode/monitoring/tests/mixins/__init__.py b/breathecode/monitoring/tests/mixins/__init__.py index 64a6f6bf0..11fcd1349 100644 --- a/breathecode/monitoring/tests/mixins/__init__.py +++ b/breathecode/monitoring/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Mixins """ + from .monitoring_test_case import MonitoringTestCase # noqa: F401 diff --git a/breathecode/monitoring/tests/mixins/monitoring_test_case.py b/breathecode/monitoring/tests/mixins/monitoring_test_case.py index 97fe73381..b1b4c36f6 100644 --- a/breathecode/monitoring/tests/mixins/monitoring_test_case.py +++ b/breathecode/monitoring/tests/mixins/monitoring_test_case.py @@ -1,14 +1,22 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + BreathecodeMixin, +) from breathecode.authenticate.models import Token -class MonitoringTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, BreathecodeMixin): +class MonitoringTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, HeadersMixin, DatetimeMixin, BreathecodeMixin +): """AdmissionsTestCase with auth methods""" def setUp(self): @@ -21,109 +29,105 @@ def tearDown(self): def get_token_key(self, id=None): kwargs = {} if id: - kwargs['id'] = id - return Token.objects.filter(**kwargs).values_list('key', flat=True).first() + kwargs["id"] = id + return Token.objects.filter(**kwargs).values_list("key", flat=True).first() def check_email_contain_a_correct_token(self, lang, dicts, mock, model): token = self.get_token_key() - question = dicts[0]['title'] + question = dicts[0]["title"] link = f"https://nps.4geeks.com/{dicts[0]['id']}?token={token}" args_list = mock.call_args_list template = get_template_content( - 'nps', { - 'QUESTION': question, - 'HIGHEST': dicts[0]['highest'], - 'LOWEST': dicts[0]['lowest'], - 'SUBJECT': question, - 'ANSWER_ID': dicts[0]['id'], - 'BUTTON': strings[lang]['button_label'], - 'LINK': link, - }, ['email']) + "nps", + { + "QUESTION": question, + "HIGHEST": dicts[0]["highest"], + "LOWEST": dicts[0]["lowest"], + "SUBJECT": question, + "ANSWER_ID": dicts[0]["id"], + "BUTTON": strings[lang]["button_label"], + "LINK": link, + }, + ["email"], + ) - self.assertEqual(args_list, [ - call(f'https://api.mailgun.net/v3/{os.environ.get("MAILGUN_DOMAIN")}/messages', - auth=('api', os.environ.get('MAILGUN_API_KEY', '')), - data={ - 'from': f"4Geeks <mailgun@{os.environ.get('MAILGUN_DOMAIN')}>", - 'to': model['user'].email, - 'subject': template['subject'], - 'text': template['text'], - 'html': template['html'] - }) - ]) + self.assertEqual( + args_list, + [ + call( + f'https://api.mailgun.net/v3/{os.environ.get("MAILGUN_DOMAIN")}/messages', + auth=("api", os.environ.get("MAILGUN_API_KEY", "")), + data={ + "from": f"4Geeks <mailgun@{os.environ.get('MAILGUN_DOMAIN')}>", + "to": model["user"].email, + "subject": template["subject"], + "text": template["text"], + "html": template["html"], + }, + ) + ], + ) - html = template['html'] - del template['html'] + html = template["html"] + del template["html"] self.assertEqual( - template, { - 'SUBJECT': - question, - 'subject': - question, - 'text': - '\n' - '\n' - 'Please take 2 min to answer the following question:\n' - '\n' - f'{question}\n' - '\n' - 'Click here to vote: ' - f'{link}' - '\n' - '\n' - '\n' - '\n' - 'The 4Geeks Team' - }) + template, + { + "SUBJECT": question, + "subject": question, + "text": "\n" + "\n" + "Please take 2 min to answer the following question:\n" + "\n" + f"{question}\n" + "\n" + "Click here to vote: " + f"{link}" + "\n" + "\n" + "\n" + "\n" + "The 4Geeks Team", + }, + ) self.assertToken(token) self.assertTrue(link in html) def check_slack_contain_a_correct_token(self, lang, dicts, mock, model): token = self.get_token_key() - slack_token = model['slack_team'].owner.credentialsslack.token - slack_id = model['slack_user'].slack_id + slack_token = model["slack_team"].owner.credentialsslack.token + slack_id = model["slack_user"].slack_id args_list = mock.call_args_list - question = dicts[0]['title'] - answer = strings[lang]['button_label'] + question = dicts[0]["title"] + answer = strings[lang]["button_label"] expected = [ - call(method='POST', - url='https://slack.com/api/chat.postMessage', - headers={ - 'Authorization': f'Bearer {slack_token}', - 'Content-type': 'application/json' - }, - params=None, - json={ - 'channel': - slack_id, - 'private_metadata': - '', - 'blocks': [{ - 'type': 'header', - 'text': { - 'type': 'plain_text', - 'text': question, - 'emoji': True - } - }, { - 'type': - 'actions', - 'elements': [{ - 'type': 'button', - 'text': { - 'type': 'plain_text', - 'text': answer, - 'emoji': True - }, - 'url': f'https://nps.4geeks.com/1?token={token}' - }] - }], - 'parse': - 'full' - }) + call( + method="POST", + url="https://slack.com/api/chat.postMessage", + headers={"Authorization": f"Bearer {slack_token}", "Content-type": "application/json"}, + params=None, + json={ + "channel": slack_id, + "private_metadata": "", + "blocks": [ + {"type": "header", "text": {"type": "plain_text", "text": question, "emoji": True}}, + { + "type": "actions", + "elements": [ + { + "type": "button", + "text": {"type": "plain_text", "text": answer, "emoji": True}, + "url": f"https://nps.4geeks.com/1?token={token}", + } + ], + }, + ], + "parse": "full", + }, + ) ] self.assertEqual(args_list, expected) diff --git a/breathecode/monitoring/tests/scripts/tests_check_cohort_status_ended_cohort.py b/breathecode/monitoring/tests/scripts/tests_check_cohort_status_ended_cohort.py index 41a161467..5a147c535 100644 --- a/breathecode/monitoring/tests/scripts/tests_check_cohort_status_ended_cohort.py +++ b/breathecode/monitoring/tests/scripts/tests_check_cohort_status_ended_cohort.py @@ -1,12 +1,24 @@ from datetime import timedelta from django.utils import timezone from unittest.mock import patch, MagicMock, call, mock_open -from breathecode.tests.mocks import (GOOGLE_CLOUD_PATH, apply_google_cloud_client_mock, apply_google_cloud_bucket_mock, - apply_google_cloud_blob_mock, MAILGUN_PATH, MAILGUN_INSTANCES, - apply_mailgun_requests_post_mock, SLACK_PATH, SLACK_INSTANCES, - apply_slack_requests_request_mock, REQUESTS_PATH, REQUESTS_INSTANCES, - apply_requests_get_mock, LOGGING_PATH, LOGGING_INSTANCES, - apply_logging_logger_mock) +from breathecode.tests.mocks import ( + GOOGLE_CLOUD_PATH, + apply_google_cloud_client_mock, + apply_google_cloud_bucket_mock, + apply_google_cloud_blob_mock, + MAILGUN_PATH, + MAILGUN_INSTANCES, + apply_mailgun_requests_post_mock, + SLACK_PATH, + SLACK_INSTANCES, + apply_slack_requests_request_mock, + REQUESTS_PATH, + REQUESTS_INSTANCES, + apply_requests_get_mock, + LOGGING_PATH, + LOGGING_INSTANCES, + apply_logging_logger_mock, +) from ..mixins import MonitoringTestCase from breathecode.monitoring.actions import run_script from breathecode.admissions.models import Cohort, Academy @@ -17,95 +29,105 @@ class AcademyCohortTestSuite(MonitoringTestCase): 🔽🔽🔽 Check for cohort.stage == 'ENDED' """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_cohort__status_ended_date_greater_than_now(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_status_ended_cohort"} ending_date = timezone.now() + timedelta(weeks=2) - model = self.generate_models(cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_kwargs={'ending_date': ending_date}) + model = self.generate_models( + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_kwargs={"ending_date": ending_date}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{**self.model_to_dict(model, 'monitor_script')}]) + self.assertEqual(self.all_monitor_script_dict(), [{**self.model_to_dict(model, "monitor_script")}]) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_cohort__ending_date_passed_with_status_ended(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_status_ended_cohort"} ending_date = timezone.now() - timedelta(weeks=2) - model = self.generate_models(cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_kwargs={ - 'ending_date': ending_date, - 'stage': 'ENDED' - }) + model = self.generate_models( + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_kwargs={"ending_date": ending_date, "stage": "ENDED"}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] - expected = {'severity_level': 5, 'status': 'OPERATIONAL'} + expected = {"severity_level": 5, "status": "OPERATIONAL"} self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_cohort__ending_date_passed_with_status_final_project(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_status_ended_cohort"} ending_date = timezone.now() - timedelta(weeks=2) - model = self.generate_models(cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_kwargs={ - 'ending_date': ending_date, - 'stage': 'FINAL_PROJECT' - }) + model = self.generate_models( + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_kwargs={"ending_date": ending_date, "stage": "FINAL_PROJECT"}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'btn': None, - 'severity_level': 100, - 'status': 'CRITICAL', - 'error_slug': 'cohort-stage-should-be-ended', + "btn": None, + "severity_level": 100, + "status": "CRITICAL", + "error_slug": "cohort-stage-should-be-ended", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) diff --git a/breathecode/monitoring/tests/scripts/tests_check_cohort_user_status_ended_cohort.py b/breathecode/monitoring/tests/scripts/tests_check_cohort_user_status_ended_cohort.py index 3d2bcd8d9..1ec545cdb 100644 --- a/breathecode/monitoring/tests/scripts/tests_check_cohort_user_status_ended_cohort.py +++ b/breathecode/monitoring/tests/scripts/tests_check_cohort_user_status_ended_cohort.py @@ -16,233 +16,279 @@ class AcademyCohortTestSuite(MonitoringTestCase): 🔽🔽🔽 Bad educational status """ - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_user_status__postponed_ended_cohort(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_user_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_user_status_ended_cohort"} - model = self.generate_models(cohort_user=True, - cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_user_kwargs={'educational_status': 'POSTPONED'}, - cohort_kwargs={'stage': 'ENDED'}) + model = self.generate_models( + cohort_user=True, + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_user_kwargs={"educational_status": "POSTPONED"}, + cohort_kwargs={"stage": "ENDED"}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_user_status__suspended_ended_cohort(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_user_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_user_status_ended_cohort"} - model = self.generate_models(cohort_user=True, - cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_user_kwargs={'educational_status': 'SUSPENDED'}, - cohort_kwargs={'stage': 'ENDED'}) + model = self.generate_models( + cohort_user=True, + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_user_kwargs={"educational_status": "SUSPENDED"}, + cohort_kwargs={"stage": "ENDED"}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': script['status'], + "severity_level": 5, + "status": script["status"], } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_user_status__graduated_ended_cohort(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_user_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_user_status_ended_cohort"} - model = self.generate_models(cohort_user=True, - cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_user_kwargs={'educational_status': 'GRADUATED'}, - cohort_kwargs={'stage': 'ENDED'}) + model = self.generate_models( + cohort_user=True, + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_user_kwargs={"educational_status": "GRADUATED"}, + cohort_kwargs={"stage": "ENDED"}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) - - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) + + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_user_status__dropped_ended_cohort(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_user_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_user_status_ended_cohort"} - model = self.generate_models(cohort_user=True, - cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_user_kwargs={'educational_status': 'DROPPED'}, - cohort_kwargs={'stage': 'ENDED'}) + model = self.generate_models( + cohort_user=True, + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_user_kwargs={"educational_status": "DROPPED"}, + cohort_kwargs={"stage": "ENDED"}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) """ 🔽🔽🔽 Active student in one ended cohort """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_user_status__active_ended_cohort(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_user_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_user_status_ended_cohort"} - model = self.generate_models(cohort_user=True, - cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_user_kwargs={'educational_status': 'ACTIVE'}, - cohort_kwargs={'stage': 'ENDED'}) + model = self.generate_models( + cohort_user=True, + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_user_kwargs={"educational_status": "ACTIVE"}, + cohort_kwargs={"stage": "ENDED"}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'btn': None, - 'severity_level': 5, - 'status': 'MINOR', - 'error_slug': 'ended-cohort-had-active-users', + "btn": None, + "severity_level": 5, + "status": "MINOR", + "error_slug": "ended-cohort-had-active-users", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) - - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) + + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_user_status__active_non_ended_cohort(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_user_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_user_status_ended_cohort"} - model = self.generate_models(cohort_user=True, - cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_user_kwargs={'educational_status': 'ACTIVE'}, - cohort_kwargs={'stage': 'FINAL_PROJECT'}) + model = self.generate_models( + cohort_user=True, + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_user_kwargs={"educational_status": "ACTIVE"}, + cohort_kwargs={"stage": "FINAL_PROJECT"}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) """ 🔽🔽🔽 Active student in one never ends cohort """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_check_user_status__cohort_never_ends(self): - monitor_script_kwargs = {'script_slug': 'check_cohort_user_status_ended_cohort'} + monitor_script_kwargs = {"script_slug": "check_cohort_user_status_ended_cohort"} - model = self.generate_models(cohort_user=True, - cohort=True, - academy=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_user_kwargs={'educational_status': 'ACTIVE'}, - cohort_kwargs={ - 'stage': 'ENDED', - 'never_ends': True - }) + model = self.generate_models( + cohort_user=True, + cohort=True, + academy=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_user_kwargs={"educational_status": "ACTIVE"}, + cohort_kwargs={"stage": "ENDED", "never_ends": True}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] - expected = {'severity_level': 5, 'status': 'OPERATIONAL'} + expected = {"severity_level": 5, "status": "OPERATIONAL"} self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) diff --git a/breathecode/monitoring/tests/scripts/tests_check_shortlink_destination_status.py b/breathecode/monitoring/tests/scripts/tests_check_shortlink_destination_status.py index bf92f1ec1..9bd5996c9 100644 --- a/breathecode/monitoring/tests/scripts/tests_check_shortlink_destination_status.py +++ b/breathecode/monitoring/tests/scripts/tests_check_shortlink_destination_status.py @@ -10,262 +10,271 @@ def tests_send_list_of_shortlinks_when_operational(self): Checking to see if shortlink script runs with properly with working shortlinks """ - monitor_script_kwargs = {'script_slug': 'check_shortlinks_with_destination_status_error'} + monitor_script_kwargs = {"script_slug": "check_shortlinks_with_destination_status_error"} model = self.generate_models(academy=True, monitor_script=True, monitor_script_kwargs=monitor_script_kwargs) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'text': 'All shortlinks working properly\n', - 'title': 'OK' + "severity_level": 5, + "status": "OPERATIONAL", + "text": "All shortlinks working properly\n", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) def tests_send_list_of_shortlinks_when_operational_with_shortlink_model(self): """ Checking to see if any ShortLink has destination status = 'ERROR' """ - monitor_script_kwargs = {'script_slug': 'check_shortlinks_with_destination_status_error'} + monitor_script_kwargs = {"script_slug": "check_shortlinks_with_destination_status_error"} - model = self.generate_models(academy=True, - monitor_script=True, - short_link=True, - monitor_script_kwargs=monitor_script_kwargs) + model = self.generate_models( + academy=True, monitor_script=True, short_link=True, monitor_script_kwargs=monitor_script_kwargs + ) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'text': 'All shortlinks working properly\n', - 'title': 'OK' + "severity_level": 5, + "status": "OPERATIONAL", + "text": "All shortlinks working properly\n", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) def tests_send_list_of_shortlinks_when_destination_status_error(self): """ Checking to see if any ShortLink has destination status = 'ERROR' """ - monitor_script_kwargs = {'script_slug': 'check_shortlinks_with_destination_status_error'} + monitor_script_kwargs = {"script_slug": "check_shortlinks_with_destination_status_error"} - short_link_kwargs = {'destination_status': 'ERROR'} + short_link_kwargs = {"destination_status": "ERROR"} - model = self.generate_models(academy=True, - monitor_script=True, - short_link=True, - short_link_kwargs=short_link_kwargs, - monitor_script_kwargs=monitor_script_kwargs) + model = self.generate_models( + academy=True, + monitor_script=True, + short_link=True, + short_link_kwargs=short_link_kwargs, + monitor_script_kwargs=monitor_script_kwargs, + ) - db = self.model_to_dict(model, 'monitor_script') + db = self.model_to_dict(model, "monitor_script") script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'btn': - None, - 'severity_level': - 5, - 'title': - None, - 'error_slug': - 'short-link-bad-destination-status', - 'status': - 'MINOR', - 'text': - f'These shortlinks: - URL: {model.short_link.destination} Status: ' - f'{model.short_link.destination_status} Last clicked: never are not working properly.', + "btn": None, + "severity_level": 5, + "title": None, + "error_slug": "short-link-bad-destination-status", + "status": "MINOR", + "text": f"These shortlinks: - URL: {model.short_link.destination} Status: " + f"{model.short_link.destination_status} Last clicked: never are not working properly.", } self.assertEqual(script, expected) db_values = self.all_monitor_script_dict() - self.assertDatetime(db_values[0]['last_run']) - del db_values[0]['last_run'] - del db['last_run'] - self.assertEqual(db_values, - [{ - **db, 'status': 'MINOR', - 'response_text': f'These shortlinks: - URL: {model.short_link.destination} Status: ' - f'{model.short_link.destination_status} Last clicked: never are not working properly.', - 'status_code': 1 - }]) + self.assertDatetime(db_values[0]["last_run"]) + del db_values[0]["last_run"] + del db["last_run"] + self.assertEqual( + db_values, + [ + { + **db, + "status": "MINOR", + "response_text": f"These shortlinks: - URL: {model.short_link.destination} Status: " + f"{model.short_link.destination_status} Last clicked: never are not working properly.", + "status_code": 1, + } + ], + ) def tests_send_list_of_shortlinks_when_destination_status_not_found(self): """ Checking to see if any ShortLink has destination status = 'NOT_FOUND' """ - monitor_script_kwargs = {'script_slug': 'check_shortlinks_with_destination_status_error'} + monitor_script_kwargs = {"script_slug": "check_shortlinks_with_destination_status_error"} - short_link_kwargs = {'destination_status': 'NOT_FOUND'} + short_link_kwargs = {"destination_status": "NOT_FOUND"} - model = self.generate_models(academy=True, - monitor_script=True, - short_link=True, - short_link_kwargs=short_link_kwargs, - monitor_script_kwargs=monitor_script_kwargs) + model = self.generate_models( + academy=True, + monitor_script=True, + short_link=True, + short_link_kwargs=short_link_kwargs, + monitor_script_kwargs=monitor_script_kwargs, + ) - db = self.model_to_dict(model, 'monitor_script') + db = self.model_to_dict(model, "monitor_script") script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'btn': - None, - 'severity_level': - 5, - 'title': - None, - 'error_slug': - 'short-link-bad-destination-status', - 'status': - 'MINOR', - 'text': - f'These shortlinks: - URL: {model.short_link.destination} Status: ' - f'{model.short_link.destination_status} Last clicked: never are not working properly.', + "btn": None, + "severity_level": 5, + "title": None, + "error_slug": "short-link-bad-destination-status", + "status": "MINOR", + "text": f"These shortlinks: - URL: {model.short_link.destination} Status: " + f"{model.short_link.destination_status} Last clicked: never are not working properly.", } self.assertEqual(script, expected) db_values = self.all_monitor_script_dict() - self.assertDatetime(db_values[0]['last_run']) - del db_values[0]['last_run'] - del db['last_run'] - self.assertEqual(db_values, - [{ - **db, 'status': 'MINOR', - 'response_text': f'These shortlinks: - URL: {model.short_link.destination} Status: ' - f'{model.short_link.destination_status} Last clicked: never are not working properly.', - 'status_code': 1 - }]) + self.assertDatetime(db_values[0]["last_run"]) + del db_values[0]["last_run"] + del db["last_run"] + self.assertEqual( + db_values, + [ + { + **db, + "status": "MINOR", + "response_text": f"These shortlinks: - URL: {model.short_link.destination} Status: " + f"{model.short_link.destination_status} Last clicked: never are not working properly.", + "status_code": 1, + } + ], + ) def tests_send_list_of_shortlinks_when_destination_status_error_with_lastclick_at(self): """ Checking to see if any ShortLink has destination_status = 'ERROR' with lastclick_at information """ - monitor_script_kwargs = {'script_slug': 'check_shortlinks_with_destination_status_error'} + monitor_script_kwargs = {"script_slug": "check_shortlinks_with_destination_status_error"} dt = datetime.datetime.now() - short_link_kwargs = {'destination_status': 'ERROR', 'lastclick_at': dt} + short_link_kwargs = {"destination_status": "ERROR", "lastclick_at": dt} - model = self.generate_models(academy=True, - monitor_script=True, - short_link=True, - short_link_kwargs=short_link_kwargs, - monitor_script_kwargs=monitor_script_kwargs) + model = self.generate_models( + academy=True, + monitor_script=True, + short_link=True, + short_link_kwargs=short_link_kwargs, + monitor_script_kwargs=monitor_script_kwargs, + ) - db = self.model_to_dict(model, 'monitor_script') + db = self.model_to_dict(model, "monitor_script") script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'btn': - None, - 'severity_level': - 5, - 'title': - None, - 'error_slug': - 'short-link-bad-destination-status', - 'status': - 'MINOR', - 'text': - f'These shortlinks: - URL: {model.short_link.destination} Status: ' - f'{model.short_link.destination_status} Last clicked: ' + "btn": None, + "severity_level": 5, + "title": None, + "error_slug": "short-link-bad-destination-status", + "status": "MINOR", + "text": f"These shortlinks: - URL: {model.short_link.destination} Status: " + f"{model.short_link.destination_status} Last clicked: " f'{model.short_link.lastclick_at.strftime("%m/%d/%Y, %H:%M:%S")} are not working properly.', } self.assertEqual(script, expected) db_values = self.all_monitor_script_dict() - self.assertDatetime(db_values[0]['last_run']) - del db_values[0]['last_run'] - del db['last_run'] - - self.assertEqual(db_values, [{ - **db, 'status': - 'MINOR', - 'response_text': - f'These shortlinks: - URL: {model.short_link.destination} Status: ' - f'{model.short_link.destination_status} Last clicked: {model.short_link.lastclick_at.strftime("%m/%d/%Y, %H:%M:%S")} are not working properly.', - 'status_code': - 1 - }]) + self.assertDatetime(db_values[0]["last_run"]) + del db_values[0]["last_run"] + del db["last_run"] + + self.assertEqual( + db_values, + [ + { + **db, + "status": "MINOR", + "response_text": f"These shortlinks: - URL: {model.short_link.destination} Status: " + f'{model.short_link.destination_status} Last clicked: {model.short_link.lastclick_at.strftime("%m/%d/%Y, %H:%M:%S")} are not working properly.', + "status_code": 1, + } + ], + ) def tests_send_list_of_shortlinks_when_destination_status_not_found_with_lastclick_at(self): """ Checking to see if any ShortLink has destination_status = 'NOT_FOUND' with lastclick_at information """ - monitor_script_kwargs = {'script_slug': 'check_shortlinks_with_destination_status_error'} + monitor_script_kwargs = {"script_slug": "check_shortlinks_with_destination_status_error"} dt = datetime.datetime.now() - short_link_kwargs = {'destination_status': 'NOT_FOUND', 'lastclick_at': dt} + short_link_kwargs = {"destination_status": "NOT_FOUND", "lastclick_at": dt} - model = self.generate_models(academy=True, - monitor_script=True, - short_link=True, - short_link_kwargs=short_link_kwargs, - monitor_script_kwargs=monitor_script_kwargs) + model = self.generate_models( + academy=True, + monitor_script=True, + short_link=True, + short_link_kwargs=short_link_kwargs, + monitor_script_kwargs=monitor_script_kwargs, + ) - db = self.model_to_dict(model, 'monitor_script') + db = self.model_to_dict(model, "monitor_script") script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'btn': - None, - 'severity_level': - 5, - 'title': - None, - 'error_slug': - 'short-link-bad-destination-status', - 'status': - 'MINOR', - 'text': - f'These shortlinks: - URL: {model.short_link.destination} Status: ' - f'{model.short_link.destination_status} Last clicked: ' + "btn": None, + "severity_level": 5, + "title": None, + "error_slug": "short-link-bad-destination-status", + "status": "MINOR", + "text": f"These shortlinks: - URL: {model.short_link.destination} Status: " + f"{model.short_link.destination_status} Last clicked: " f'{model.short_link.lastclick_at.strftime("%m/%d/%Y, %H:%M:%S")} are not working properly.', } self.assertEqual(script, expected) db_values = self.all_monitor_script_dict() - self.assertDatetime(db_values[0]['last_run']) - del db_values[0]['last_run'] - del db['last_run'] - - self.assertEqual(db_values, [{ - **db, 'status': - 'MINOR', - 'response_text': - f'These shortlinks: - URL: {model.short_link.destination} Status: ' - f'{model.short_link.destination_status} Last clicked: {model.short_link.lastclick_at.strftime("%m/%d/%Y, %H:%M:%S")} are not working properly.', - 'status_code': - 1 - }]) + self.assertDatetime(db_values[0]["last_run"]) + del db_values[0]["last_run"] + del db["last_run"] + + self.assertEqual( + db_values, + [ + { + **db, + "status": "MINOR", + "response_text": f"These shortlinks: - URL: {model.short_link.destination} Status: " + f'{model.short_link.destination_status} Last clicked: {model.short_link.lastclick_at.strftime("%m/%d/%Y, %H:%M:%S")} are not working properly.', + "status_code": 1, + } + ], + ) diff --git a/breathecode/monitoring/tests/scripts/tests_event_marked_as_draft.py b/breathecode/monitoring/tests/scripts/tests_event_marked_as_draft.py index 9517a22fa..acbe5871c 100644 --- a/breathecode/monitoring/tests/scripts/tests_event_marked_as_draft.py +++ b/breathecode/monitoring/tests/scripts/tests_event_marked_as_draft.py @@ -25,24 +25,24 @@ def test_event_marked_as_draft__zero_events(self): application: Application(id=1) """ - monitor_script = {'script_slug': 'event_marked_as_draft'} + monitor_script = {"script_slug": "event_marked_as_draft"} model = self.bc.database.create(monitor_script=monitor_script) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] + del script["slack_payload"] + del script["text"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [{ - **self.bc.format.to_dict(model.monitor_script) - }]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), [{**self.bc.format.to_dict(model.monitor_script)}] + ) """ 🔽🔽🔽 With one Event with status ACTIVE @@ -66,25 +66,25 @@ def test_event_marked_as_draft__one_event__status_active(self): application: Application(id=1) """ - monitor_script = {'script_slug': 'event_marked_as_draft'} - event = {'status': 'ACTIVE'} + monitor_script = {"script_slug": "event_marked_as_draft"} + event = {"status": "ACTIVE"} model = self.bc.database.create(monitor_script=monitor_script, event=event) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] + del script["slack_payload"] + del script["text"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [{ - **self.bc.format.to_dict(model.monitor_script) - }]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), [{**self.bc.format.to_dict(model.monitor_script)}] + ) """ 🔽🔽🔽 With one Event with status DELETED @@ -108,25 +108,25 @@ def test_event_marked_as_draft__one_event__status_deleted(self): application: Application(id=1) """ - monitor_script = {'script_slug': 'event_marked_as_draft'} - event = {'status': 'DELETED'} + monitor_script = {"script_slug": "event_marked_as_draft"} + event = {"status": "DELETED"} model = self.bc.database.create(monitor_script=monitor_script, event=event) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] + del script["slack_payload"] + del script["text"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [{ - **self.bc.format.to_dict(model.monitor_script) - }]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), [{**self.bc.format.to_dict(model.monitor_script)}] + ) """ 🔽🔽🔽 With one Event with status DRAFT @@ -150,34 +150,34 @@ def test_event_marked_as_draft__one_event__status_draft(self): application: Application(id=1) """ - monitor_script = {'script_slug': 'event_marked_as_draft'} - event = {'status': 'DRAFT'} + monitor_script = {"script_slug": "event_marked_as_draft"} + event = {"status": "DRAFT"} model = self.bc.database.create(monitor_script=monitor_script, event=event) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] + del script["slack_payload"] + del script["text"] expected = { - 'btn': { - 'label': 'More details', - 'url': f'/events/list?location={model.academy.slug}' - }, - 'severity_level': 100, - 'status': 'CRITICAL', - 'error_slug': 'draft-events', - 'title': f'There are 1 draft events to published or deleted in {model.academy.name}', + "btn": {"label": "More details", "url": f"/events/list?location={model.academy.slug}"}, + "severity_level": 100, + "status": "CRITICAL", + "error_slug": "draft-events", + "title": f"There are 1 draft events to published or deleted in {model.academy.name}", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - { - **self.bc.format.to_dict(model.monitor_script), - 'status': 'CRITICAL', - 'status_text': None, - }, - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + { + **self.bc.format.to_dict(model.monitor_script), + "status": "CRITICAL", + "status_text": None, + }, + ], + ) """ 🔽🔽🔽 With two Event with status DRAFT @@ -204,31 +204,31 @@ def test_event_marked_as_draft__two_events__status_draft(self): application: Application(id=1) """ - monitor_script = {'script_slug': 'event_marked_as_draft'} - event = {'status': 'DRAFT'} + monitor_script = {"script_slug": "event_marked_as_draft"} + event = {"status": "DRAFT"} model = self.bc.database.create(monitor_script=monitor_script, event=(2, event)) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] + del script["slack_payload"] + del script["text"] expected = { - 'btn': { - 'label': 'More details', - 'url': f'/events/list?location={model.academy.slug}' - }, - 'severity_level': 100, - 'status': 'CRITICAL', - 'error_slug': 'draft-events', - 'title': f'There are 2 draft events to published or deleted in {model.academy.name}', + "btn": {"label": "More details", "url": f"/events/list?location={model.academy.slug}"}, + "severity_level": 100, + "status": "CRITICAL", + "error_slug": "draft-events", + "title": f"There are 2 draft events to published or deleted in {model.academy.name}", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - { - **self.bc.format.to_dict(model.monitor_script), - 'status': 'CRITICAL', - 'status_text': None, - }, - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + { + **self.bc.format.to_dict(model.monitor_script), + "status": "CRITICAL", + "status_text": None, + }, + ], + ) diff --git a/breathecode/monitoring/tests/scripts/tests_pending_academy_github_users.py b/breathecode/monitoring/tests/scripts/tests_pending_academy_github_users.py index fea907d13..de5e35241 100644 --- a/breathecode/monitoring/tests/scripts/tests_pending_academy_github_users.py +++ b/breathecode/monitoring/tests/scripts/tests_pending_academy_github_users.py @@ -9,104 +9,116 @@ class AcademyCohortTestSuite(MonitoringTestCase): # When: nothing was provided # Then: nothing happens def test_nothing_provided(self): - monitor_script = {'script_slug': 'pending_academy_github_users'} + monitor_script = {"script_slug": "pending_academy_github_users"} model = self.bc.database.create(monitor_script=monitor_script) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'text': 'All good\n', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "text": "All good\n", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - self.bc.format.to_dict(model.monitor_script), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + self.bc.format.to_dict(model.monitor_script), + ], + ) # Given: 4 GithubAcademyUser # When: academy 1 run script and 2 are marked as add and 2 as delete # Then: Don't report anything because the academy is different def test_all_right_cases(self): - github_academy_users = [{'storage_action': 'ADD'} for _ in range(2)] - github_academy_users += [{'storage_action': 'DELETE'} for _ in range(2)] - monitor_script = {'script_slug': 'pending_academy_github_users'} - model = self.bc.database.create(monitor_script=monitor_script, - academy=1, - github_academy_user=github_academy_users) + github_academy_users = [{"storage_action": "ADD"} for _ in range(2)] + github_academy_users += [{"storage_action": "DELETE"} for _ in range(2)] + monitor_script = {"script_slug": "pending_academy_github_users"} + model = self.bc.database.create( + monitor_script=monitor_script, academy=1, github_academy_user=github_academy_users + ) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'text': 'All good\n', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "text": "All good\n", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - self.bc.format.to_dict(model.monitor_script), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + self.bc.format.to_dict(model.monitor_script), + ], + ) # Given: 4 GithubAcademyUser # When: academy 1 run script and 2 are marked as invite and 2 as ignore # Then: Don't report anything because the academy is different def test_all_wrong_cases(self): - github_academy_users = [{'storage_action': 'INVITE'} for _ in range(2)] - github_academy_users += [{'storage_action': 'IGNORE'} for _ in range(2)] - monitor_script = {'script_slug': 'pending_academy_github_users'} - model = self.bc.database.create(monitor_script=monitor_script, - academy=1, - github_academy_user=github_academy_users) + github_academy_users = [{"storage_action": "INVITE"} for _ in range(2)] + github_academy_users += [{"storage_action": "IGNORE"} for _ in range(2)] + monitor_script = {"script_slug": "pending_academy_github_users"} + model = self.bc.database.create( + monitor_script=monitor_script, academy=1, github_academy_user=github_academy_users + ) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'btn': None, - 'error_slug': '2-invite-and-2-ignore', - 'severity_level': 100, - 'status': 'CRITICAL', - 'text': 'There are 2 github users marked as invite and 2 marked as ignore', - 'title': 'There are 2 github users marked as invite and 2 marked as ignore', + "btn": None, + "error_slug": "2-invite-and-2-ignore", + "severity_level": 100, + "status": "CRITICAL", + "text": "There are 2 github users marked as invite and 2 marked as ignore", + "title": "There are 2 github users marked as invite and 2 marked as ignore", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - self.bc.format.to_dict(model.monitor_script), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + self.bc.format.to_dict(model.monitor_script), + ], + ) # Given: 4 GithubAcademyUser from other academy # When: academy 1 run script and 2 are marked as invite and 2 as ignore # Then: Don't report anything because the academy is different def test_errors_from_other_academies_are_ignored(self): - github_academy_users = [{'storage_action': 'INVITE', 'academy_id': 2} for _ in range(2)] - github_academy_users += [{'storage_action': 'IGNORE', 'academy_id': 2} for _ in range(2)] - monitor_script = {'script_slug': 'pending_academy_github_users'} - model = self.bc.database.create(monitor_script=monitor_script, - academy=2, - github_academy_user=github_academy_users) + github_academy_users = [{"storage_action": "INVITE", "academy_id": 2} for _ in range(2)] + github_academy_users += [{"storage_action": "IGNORE", "academy_id": 2} for _ in range(2)] + monitor_script = {"script_slug": "pending_academy_github_users"} + model = self.bc.database.create( + monitor_script=monitor_script, academy=2, github_academy_user=github_academy_users + ) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'text': 'All good\n', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "text": "All good\n", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - self.bc.format.to_dict(model.monitor_script), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + self.bc.format.to_dict(model.monitor_script), + ], + ) diff --git a/breathecode/monitoring/tests/scripts/tests_pending_provisioning_bills.py b/breathecode/monitoring/tests/scripts/tests_pending_provisioning_bills.py index fe25abf2c..4aa97c5a0 100644 --- a/breathecode/monitoring/tests/scripts/tests_pending_provisioning_bills.py +++ b/breathecode/monitoring/tests/scripts/tests_pending_provisioning_bills.py @@ -9,124 +9,142 @@ class AcademyCohortTestSuite(MonitoringTestCase): # When: nothing was provided # Then: nothing happens def test_nothing_provided(self): - monitor_script = {'script_slug': 'pending_provisioning_bills'} + monitor_script = {"script_slug": "pending_provisioning_bills"} model = self.bc.database.create(monitor_script=monitor_script) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'text': 'All good\n', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "text": "All good\n", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - self.bc.format.to_dict(model.monitor_script), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + self.bc.format.to_dict(model.monitor_script), + ], + ) # Given: 1 ProvisioningBill per status and academy and # -> 1 ProvisioningActivity per status and academy # When: academy 1 run script # Then: nothing to report def test_all_right_cases(self): - provisioning_bill_statuses = ['DUE', 'DISPUTED', 'IGNORED', 'PENDING', 'PAID'] - provisioning_bills = [{'status': s, 'academy_id': 1} for s in provisioning_bill_statuses] + provisioning_bill_statuses = ["DUE", "DISPUTED", "IGNORED", "PENDING", "PAID"] + provisioning_bills = [{"status": s, "academy_id": 1} for s in provisioning_bill_statuses] - provisioning_activity_statuses = ['PENDING', 'PERSISTED'] - provisioning_activities = [{'status': s, 'bill_id': 1} for s in provisioning_activity_statuses] - provisioning_activities += [{'status': s, 'bill_id': None} for s in provisioning_activity_statuses] + provisioning_activity_statuses = ["PENDING", "PERSISTED"] + provisioning_activities = [{"status": s, "bill_id": 1} for s in provisioning_activity_statuses] + provisioning_activities += [{"status": s, "bill_id": None} for s in provisioning_activity_statuses] - monitor_script = {'script_slug': 'pending_provisioning_bills'} - model = self.bc.database.create(monitor_script=monitor_script, - academy=2, - provisioning_bill=provisioning_bills, - provisioning_user_consumption=provisioning_activities) + monitor_script = {"script_slug": "pending_provisioning_bills"} + model = self.bc.database.create( + monitor_script=monitor_script, + academy=2, + provisioning_bill=provisioning_bills, + provisioning_user_consumption=provisioning_activities, + ) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'text': 'All good\n', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "text": "All good\n", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - self.bc.format.to_dict(model.monitor_script), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + self.bc.format.to_dict(model.monitor_script), + ], + ) # Given: 1 ProvisioningBill per wrong status and academy and # -> 1 ProvisioningActivity per wrong status and academy # When: academy 1 run script # Then: report 1 bill and 2 activities with errors def test_all_wrong_cases(self): - provisioning_bill_statuses = ['ERROR'] - provisioning_bills = [{'status': s, 'academy_id': 1} for s in provisioning_bill_statuses] + provisioning_bill_statuses = ["ERROR"] + provisioning_bills = [{"status": s, "academy_id": 1} for s in provisioning_bill_statuses] - provisioning_activity_statuses = ['ERROR'] - provisioning_activities = [{'status': s, 'bill_id': 1} for s in provisioning_activity_statuses] - provisioning_activities += [{'status': s, 'bill_id': None} for s in provisioning_activity_statuses] + provisioning_activity_statuses = ["ERROR"] + provisioning_activities = [{"status": s, "bill_id": 1} for s in provisioning_activity_statuses] + provisioning_activities += [{"status": s, "bill_id": None} for s in provisioning_activity_statuses] - monitor_script = {'script_slug': 'pending_provisioning_bills'} - model = self.bc.database.create(monitor_script=monitor_script, - academy=2, - provisioning_bill=provisioning_bills, - provisioning_user_consumption=provisioning_activities) + monitor_script = {"script_slug": "pending_provisioning_bills"} + model = self.bc.database.create( + monitor_script=monitor_script, + academy=2, + provisioning_bill=provisioning_bills, + provisioning_user_consumption=provisioning_activities, + ) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'btn': None, - 'error_slug': '1-bills-and-2-activities-with-errors', - 'severity_level': 100, - 'status': 'CRITICAL', - 'text': 'There are 1 provisioning bills and 2 provisioning user consumptions with errors', - 'title': 'There are 1 bills and 2 user consumptions with errors', + "btn": None, + "error_slug": "1-bills-and-2-activities-with-errors", + "severity_level": 100, + "status": "CRITICAL", + "text": "There are 1 provisioning bills and 2 provisioning user consumptions with errors", + "title": "There are 1 bills and 2 user consumptions with errors", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - self.bc.format.to_dict(model.monitor_script), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + self.bc.format.to_dict(model.monitor_script), + ], + ) # Given: 1 ProvisioningBill per wrong status and academy 2 and # -> 1 ProvisioningActivity per wrong status and academy 2 # When: academy 1 run script # Then: Don't report anything because the academy is different def test_errors_from_other_academies_are_ignored(self): - provisioning_bill_statuses = ['ERROR'] - provisioning_bills = [{'status': s, 'academy_id': 2} for s in provisioning_bill_statuses] + provisioning_bill_statuses = ["ERROR"] + provisioning_bills = [{"status": s, "academy_id": 2} for s in provisioning_bill_statuses] - provisioning_activity_statuses = ['ERROR'] - provisioning_activities = [{'status': s, 'bill_id': 1} for s in provisioning_activity_statuses] + provisioning_activity_statuses = ["ERROR"] + provisioning_activities = [{"status": s, "bill_id": 1} for s in provisioning_activity_statuses] - monitor_script = {'script_slug': 'pending_provisioning_bills'} - model = self.bc.database.create(monitor_script=monitor_script, - academy=2, - provisioning_bill=provisioning_bills, - provisioning_user_consumption=provisioning_activities) + monitor_script = {"script_slug": "pending_provisioning_bills"} + model = self.bc.database.create( + monitor_script=monitor_script, + academy=2, + provisioning_bill=provisioning_bills, + provisioning_user_consumption=provisioning_activities, + ) script = run_script(model.monitor_script) - del script['slack_payload'] + del script["slack_payload"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', - 'text': 'All good\n', - 'title': 'OK', + "severity_level": 5, + "status": "OPERATIONAL", + "text": "All good\n", + "title": "OK", } self.assertEqual(script, expected) - self.assertEqual(self.bc.database.list_of('monitoring.MonitorScript'), [ - self.bc.format.to_dict(model.monitor_script), - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.MonitorScript"), + [ + self.bc.format.to_dict(model.monitor_script), + ], + ) diff --git a/breathecode/monitoring/tests/scripts/tests_send_survey_reminder.py b/breathecode/monitoring/tests/scripts/tests_send_survey_reminder.py index ceda93217..e41a5431e 100644 --- a/breathecode/monitoring/tests/scripts/tests_send_survey_reminder.py +++ b/breathecode/monitoring/tests/scripts/tests_send_survey_reminder.py @@ -1,8 +1,12 @@ from datetime import timedelta from django.utils import timezone from unittest.mock import patch -from breathecode.tests.mocks import (GOOGLE_CLOUD_PATH, apply_google_cloud_client_mock, apply_google_cloud_bucket_mock, - apply_google_cloud_blob_mock) +from breathecode.tests.mocks import ( + GOOGLE_CLOUD_PATH, + apply_google_cloud_client_mock, + apply_google_cloud_bucket_mock, + apply_google_cloud_blob_mock, +) from ..mixins import MonitoringTestCase from breathecode.monitoring.actions import run_script @@ -12,121 +16,133 @@ class AcademyCohortTestSuite(MonitoringTestCase): 🔽🔽🔽 With bad entity 🔽🔽🔽 """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_send_survey__reminder_no_survey(self): - monitor_script_kwargs = {'script_slug': 'send_survey_reminder'} + monitor_script_kwargs = {"script_slug": "send_survey_reminder"} - model = self.generate_models(academy=True, - skip_cohort=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs) + model = self.generate_models( + academy=True, skip_cohort=True, monitor_script=True, monitor_script_kwargs=monitor_script_kwargs + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_send_survey__ending_date_less_than_now(self): - monitor_script_kwargs = {'script_slug': 'send_survey_reminder'} + monitor_script_kwargs = {"script_slug": "send_survey_reminder"} ending_date = timezone.now() - timedelta(weeks=1) sent_at = timezone.now() - timedelta(weeks=5) - model = self.generate_models(cohort=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_kwargs={'ending_date': ending_date}, - survey=True, - survey_kwargs={'sent_at': sent_at}) + model = self.generate_models( + cohort=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_kwargs={"ending_date": ending_date}, + survey=True, + survey_kwargs={"sent_at": sent_at}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) - self.assertEqual(self.all_monitor_script_dict(), [{ - **self.model_to_dict(model, 'monitor_script'), - }]) + self.assertEqual( + self.all_monitor_script_dict(), + [ + { + **self.model_to_dict(model, "monitor_script"), + } + ], + ) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_send_survey__kickoff_date_greater_than_now(self): - monitor_script_kwargs = {'script_slug': 'send_survey_reminder'} + monitor_script_kwargs = {"script_slug": "send_survey_reminder"} kickoff_date = timezone.now() + timedelta(weeks=1) sent_at = timezone.now() - timedelta(weeks=5) - model = self.generate_models(cohort=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_kwargs={'kickoff_date': kickoff_date}, - survey=True, - survey_kwargs={'sent_at': sent_at}) + model = self.generate_models( + cohort=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_kwargs={"kickoff_date": kickoff_date}, + survey=True, + survey_kwargs={"sent_at": sent_at}, + ) script = run_script(model.monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] - expected = {'severity_level': 5, 'status': 'OPERATIONAL'} + expected = {"severity_level": 5, "status": "OPERATIONAL"} self.assertEqual(script, expected) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_send_survey__latest_survey_less_four_weeks(self): - monitor_script_kwargs = {'script_slug': 'send_survey_reminder'} + monitor_script_kwargs = {"script_slug": "send_survey_reminder"} ending_date = timezone.now() + timedelta(weeks=4) kickoff_date = timezone.now() - timedelta(weeks=12) - base = self.generate_models(academy=True, - cohort=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_kwargs={ - 'ending_date': ending_date, - 'kickoff_date': kickoff_date - }) + base = self.generate_models( + academy=True, + cohort=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_kwargs={"ending_date": ending_date, "kickoff_date": kickoff_date}, + ) sent_at = timezone.now() - timedelta(weeks=1) models = [ - self.generate_models(survey=True, survey_kwargs={'sent_at': sent_at}, models=base) for _ in range(0, 2) + self.generate_models(survey=True, survey_kwargs={"sent_at": sent_at}, models=base) for _ in range(0, 2) ] script = run_script(models[1].monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) @@ -134,55 +150,55 @@ def tests_send_survey__latest_survey_less_four_weeks(self): 🔽🔽🔽 Cohort have pending surveys to send """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_send_survey__latest_survey_greater_four_weeks(self): - monitor_script_kwargs = {'script_slug': 'send_survey_reminder'} + monitor_script_kwargs = {"script_slug": "send_survey_reminder"} ending_date = timezone.now() + timedelta(days=2) kickoff_date = timezone.now() - timedelta(weeks=8) - base = self.generate_models(academy=True, - cohort=True, - monitor_script=True, - monitor_script_kwargs=monitor_script_kwargs, - cohort_kwargs={ - 'ending_date': ending_date, - 'kickoff_date': kickoff_date, - 'stage': 'STARTED', - }) + base = self.generate_models( + academy=True, + cohort=True, + monitor_script=True, + monitor_script_kwargs=monitor_script_kwargs, + cohort_kwargs={ + "ending_date": ending_date, + "kickoff_date": kickoff_date, + "stage": "STARTED", + }, + ) sent_at = timezone.now() - timedelta(weeks=6) models = [ - self.generate_models(survey=True, survey_kwargs={ - 'status': 'SENT', - 'sent_at': sent_at - }, models=base) for _ in range(0, 2) + self.generate_models(survey=True, survey_kwargs={"status": "SENT", "sent_at": sent_at}, models=base) + for _ in range(0, 2) ] script = run_script(models[1].monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'btn': None, - 'severity_level': 5, - 'error_slug': 'cohort-have-pending-surveys', - 'status': 'MINOR', + "btn": None, + "severity_level": 5, + "error_slug": "cohort-have-pending-surveys", + "status": "MINOR", } self.assertEqual(script, expected) - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_send_survey__latest_survey_greater_four_weeks__two_cohorts__two_survey(self): - monitor_script_kwargs = {'script_slug': 'send_survey_reminder'} + monitor_script_kwargs = {"script_slug": "send_survey_reminder"} ending_date = timezone.now() + timedelta(days=2) kickoff_date = timezone.now() - timedelta(days=12) @@ -196,30 +212,30 @@ def tests_send_survey__latest_survey_greater_four_weeks__two_cohorts__two_survey sent_at = timezone.now() - timedelta(weeks=6) models = [ - self.generate_models(survey=True, - cohort=True, - survey_kwargs={ - 'status': 'SENT', - 'sent_at': sent_at - }, - models=base, - cohort_kwargs={ - 'ending_date': ending_date, - 'kickoff_date': kickoff_date, - 'stage': 'FINAL_PROJECT', - }) for _ in range(0, 2) + self.generate_models( + survey=True, + cohort=True, + survey_kwargs={"status": "SENT", "sent_at": sent_at}, + models=base, + cohort_kwargs={ + "ending_date": ending_date, + "kickoff_date": kickoff_date, + "stage": "FINAL_PROJECT", + }, + ) + for _ in range(0, 2) ] script = run_script(models[1].monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'btn': None, - 'severity_level': 5, - 'error_slug': 'cohort-have-pending-surveys', - 'status': 'MINOR', + "btn": None, + "severity_level": 5, + "error_slug": "cohort-have-pending-surveys", + "status": "MINOR", } self.assertEqual(script, expected) @@ -228,12 +244,12 @@ def tests_send_survey__latest_survey_greater_four_weeks__two_cohorts__two_survey 🔽🔽🔽 Cohort that never ends """ - @patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock()) - @patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock()) - @patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock()) + @patch(GOOGLE_CLOUD_PATH["client"], apply_google_cloud_client_mock()) + @patch(GOOGLE_CLOUD_PATH["bucket"], apply_google_cloud_bucket_mock()) + @patch(GOOGLE_CLOUD_PATH["blob"], apply_google_cloud_blob_mock()) def tests_send_survey__latest_survey_greater_four_weeks__cohort_never_ends(self): - monitor_script_kwargs = {'script_slug': 'send_survey_reminder'} + monitor_script_kwargs = {"script_slug": "send_survey_reminder"} ending_date = timezone.now() + timedelta(days=2) kickoff_date = timezone.now() - timedelta(days=2) @@ -244,25 +260,26 @@ def tests_send_survey__latest_survey_greater_four_weeks__cohort_never_ends(self) monitor_script_kwargs=monitor_script_kwargs, cohort_kwargs={ # 'ending_date': ending_date, - 'kickoff_date': kickoff_date, - 'never_ends': True - }) + "kickoff_date": kickoff_date, + "never_ends": True, + }, + ) sent_at = timezone.now() - timedelta(weeks=6) models = [ - self.generate_models(survey=True, survey_kwargs={'sent_at': sent_at}, models=base) for _ in range(0, 2) + self.generate_models(survey=True, survey_kwargs={"sent_at": sent_at}, models=base) for _ in range(0, 2) ] script = run_script(models[1].monitor_script) - del script['slack_payload'] - del script['text'] - del script['title'] + del script["slack_payload"] + del script["text"] + del script["title"] expected = { - 'severity_level': 5, - 'status': 'OPERATIONAL', + "severity_level": 5, + "status": "OPERATIONAL", } self.assertEqual(script, expected) diff --git a/breathecode/monitoring/tests/tasks/tests_fix_issue.py b/breathecode/monitoring/tests/tasks/tests_fix_issue.py index a2c139f20..cc46daee3 100644 --- a/breathecode/monitoring/tests/tasks/tests_fix_issue.py +++ b/breathecode/monitoring/tests/tasks/tests_fix_issue.py @@ -17,6 +17,7 @@ PATHS = paths.copy() + class Supervisor: def __init__(self, bc: Breathecode): @@ -24,10 +25,13 @@ def __init__(self, bc: Breathecode): def list(self): supervisors = SupervisorModel.objects.all() - return [{ - 'task_module': supervisor.task_module, - 'task_name': supervisor.task_name, - } for supervisor in supervisors] + return [ + { + "task_module": supervisor.task_module, + "task_name": supervisor.task_name, + } + for supervisor in supervisors + ] @sync_to_async def alist(self): @@ -48,33 +52,40 @@ def log(self, module, name): def alog(self, module, name): return self.log(module, name) + @pytest.fixture def supervisor(db, bc: Breathecode): yield Supervisor(bc) + @supervisor_decorator(delta=timedelta(days=1)) def my_supervisor(): pass + @issue_decorator(my_supervisor) def issue_returns_none(): return None + @issue_decorator(my_supervisor) def issue_returns_false(): return False + @issue_decorator(my_supervisor) def issue_returns_true(): return True + @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('logging.Logger.error', MagicMock()) + monkeypatch.setattr("logging.Logger.error", MagicMock()) yield -@pytest.fixture(autouse=True, scope='module') + +@pytest.fixture(autouse=True, scope="module") def setup_module(): yield @@ -83,140 +94,172 @@ def setup_module(): if x not in PATHS: paths.remove(x) + def db(data={}): return { - 'delta': timedelta(seconds=3600), - 'id': 0, - 'ran_at': None, - 'task_module': '', - 'task_name': '', + "delta": timedelta(seconds=3600), + "id": 0, + "ran_at": None, + "task_module": "", + "task_name": "", **data, } + def tests_not_found(database: capy.Database): database.create() fix_issue.delay(1) - assert database.list_of('monitoring.Supervisor') == [] - assert database.list_of('monitoring.SupervisorIssue') == [] + assert database.list_of("monitoring.Supervisor") == [] + assert database.list_of("monitoring.SupervisorIssue") == [] assert Logger.error.call_args_list == [ - call('Issue 1 not found', exc_info=True), + call("Issue 1 not found", exc_info=True), ] + def tests_issue_with_no_code(database: capy.Database, format: capy.Format): - task_module = 'breathecode.monitoring.tests.tasks.tests_fix_issue' - task_name = 'my_supervisor' - model = database.create(supervisor={ - 'task_module': 'breathecode.monitoring.tests.tasks.tests_fix_issue', - 'task_name': 'my_supervisor', - 'delta': timedelta(days=1), - }, - supervisor_issue=1) + task_module = "breathecode.monitoring.tests.tasks.tests_fix_issue" + task_name = "my_supervisor" + model = database.create( + supervisor={ + "task_module": "breathecode.monitoring.tests.tasks.tests_fix_issue", + "task_name": "my_supervisor", + "delta": timedelta(days=1), + }, + supervisor_issue=1, + ) fix_issue.delay(1) - assert database.list_of('monitoring.Supervisor') == [ - db({ - 'id': 1, - 'task_module': task_module, - 'task_name': task_name, - 'delta': timedelta(days=1), - }), + assert database.list_of("monitoring.Supervisor") == [ + db( + { + "id": 1, + "task_module": task_module, + "task_name": task_name, + "delta": timedelta(days=1), + } + ), ] - assert database.list_of('monitoring.SupervisorIssue') == [format.to_obj_repr(model.supervisor_issue)] + assert database.list_of("monitoring.SupervisorIssue") == [format.to_obj_repr(model.supervisor_issue)] assert Logger.error.call_args_list == [ - call('Issue 1 has no code', exc_info=True), + call("Issue 1 has no code", exc_info=True), ] -@pytest.mark.parametrize('task_module, code, error', [ - ('x', 'all_ok_supervisor', 'Module x not found'), - ('breathecode.monitoring.tests.tasks.tests_fix_issue', 'x', - 'Supervisor breathecode.monitoring.tests.tasks.tests_fix_issue.x not found'), -]) -def tests_supervisor_handler_not_found(database: capy.Database, format: capy.Format, error: str, task_module: str, - code: str): - task_name = 'my_supervisor' - model = database.create(supervisor={ - 'task_module': task_module, - 'task_name': task_name, - 'delta': timedelta(days=1), - }, - supervisor_issue={'code': code}) + +@pytest.mark.parametrize( + "task_module, code, error", + [ + ("x", "all_ok_supervisor", "Module x not found"), + ( + "breathecode.monitoring.tests.tasks.tests_fix_issue", + "x", + "Supervisor breathecode.monitoring.tests.tasks.tests_fix_issue.x not found", + ), + ], +) +def tests_supervisor_handler_not_found( + database: capy.Database, format: capy.Format, error: str, task_module: str, code: str +): + task_name = "my_supervisor" + model = database.create( + supervisor={ + "task_module": task_module, + "task_name": task_name, + "delta": timedelta(days=1), + }, + supervisor_issue={"code": code}, + ) fix_issue.delay(1) - assert database.list_of('monitoring.Supervisor') == [ - db({ - 'id': 1, - 'task_module': task_module, - 'task_name': task_name, - 'delta': timedelta(days=1), - }), + assert database.list_of("monitoring.Supervisor") == [ + db( + { + "id": 1, + "task_module": task_module, + "task_name": task_name, + "delta": timedelta(days=1), + } + ), ] - assert database.list_of('monitoring.SupervisorIssue') == [format.to_obj_repr(model.supervisor_issue)] + assert database.list_of("monitoring.SupervisorIssue") == [format.to_obj_repr(model.supervisor_issue)] assert Logger.error.call_args_list == [ call(error, exc_info=True), ] -@pytest.mark.parametrize('res', [None, False, True]) -def tests_issue_returns_x(database: capy.Database, format: capy.Format, - res: str, utc_now: datetime): - model = database.create(supervisor={ - 'task_module': 'breathecode.monitoring.tests.tasks.tests_fix_issue', - 'task_name': 'my_supervisor', - 'delta': timedelta(days=1), - }, - supervisor_issue={'code': f'issue-returns-{str(res).lower()}'}) + +@pytest.mark.parametrize("res", [None, False, True]) +def tests_issue_returns_x(database: capy.Database, format: capy.Format, res: str, utc_now: datetime): + model = database.create( + supervisor={ + "task_module": "breathecode.monitoring.tests.tasks.tests_fix_issue", + "task_name": "my_supervisor", + "delta": timedelta(days=1), + }, + supervisor_issue={"code": f"issue-returns-{str(res).lower()}"}, + ) fix_issue.delay(1) - assert database.list_of('monitoring.Supervisor') == [ - db({ - 'id': 1, - 'task_module': 'breathecode.monitoring.tests.tasks.tests_fix_issue', - 'task_name': 'my_supervisor', - 'delta': timedelta(days=1), - 'ran_at': None, - }), + assert database.list_of("monitoring.Supervisor") == [ + db( + { + "id": 1, + "task_module": "breathecode.monitoring.tests.tasks.tests_fix_issue", + "task_name": "my_supervisor", + "delta": timedelta(days=1), + "ran_at": None, + } + ), + ] + assert database.list_of("monitoring.SupervisorIssue") == [ + { + **format.to_obj_repr(model.supervisor_issue), + "attempts": 1, + "fixed": res, + "ran_at": utc_now, + } ] - assert database.list_of('monitoring.SupervisorIssue') == [{ - **format.to_obj_repr(model.supervisor_issue), - 'attempts': 1, - 'fixed': res, - 'ran_at': utc_now, - }] assert Logger.error.call_args_list == [] + def tests_issue_with_3_attempts(database: capy.Database, format: capy.Format): - code = 'issue-returns-true' - task_module = 'breathecode.monitoring.tests.tasks.tests_fix_issue' - model = database.create(supervisor={ - 'task_module': task_module, - 'task_name': 'my_supervisor', - 'delta': timedelta(days=1), - }, - supervisor_issue={ - 'code': code, - 'attempts': 3, - }) + code = "issue-returns-true" + task_module = "breathecode.monitoring.tests.tasks.tests_fix_issue" + model = database.create( + supervisor={ + "task_module": task_module, + "task_name": "my_supervisor", + "delta": timedelta(days=1), + }, + supervisor_issue={ + "code": code, + "attempts": 3, + }, + ) fix_issue.delay(1) - assert database.list_of('monitoring.Supervisor') == [ - db({ - 'id': 1, - 'task_module': task_module, - 'task_name': 'my_supervisor', - 'delta': timedelta(days=1), - 'ran_at': None, - }), + assert database.list_of("monitoring.Supervisor") == [ + db( + { + "id": 1, + "task_module": task_module, + "task_name": "my_supervisor", + "delta": timedelta(days=1), + "ran_at": None, + } + ), + ] + assert database.list_of("monitoring.SupervisorIssue") == [ + { + **format.to_obj_repr(model.supervisor_issue), + "attempts": 3, + "fixed": None, + } ] - assert database.list_of('monitoring.SupervisorIssue') == [{ - **format.to_obj_repr(model.supervisor_issue), - 'attempts': 3, - 'fixed': None, - }] assert Logger.error.call_args_list == [ - call(f'Supervisor {task_module}.{code.replace('-', '_')} has reached max attempts', exc_info=True), + call(f"Supervisor {task_module}.{code.replace('-', '_')} has reached max attempts", exc_info=True), ] diff --git a/breathecode/monitoring/tests/tasks/tests_run_supervisor.py b/breathecode/monitoring/tests/tasks/tests_run_supervisor.py index 7812849ab..30a979d26 100644 --- a/breathecode/monitoring/tests/tasks/tests_run_supervisor.py +++ b/breathecode/monitoring/tests/tasks/tests_run_supervisor.py @@ -23,10 +23,13 @@ def __init__(self, bc: Breathecode): def list(self): supervisors = SupervisorModel.objects.all() - return [{ - 'task_module': supervisor.task_module, - 'task_name': supervisor.task_name, - } for supervisor in supervisors] + return [ + { + "task_module": supervisor.task_module, + "task_name": supervisor.task_name, + } + for supervisor in supervisors + ] @sync_to_async def alist(self): @@ -61,17 +64,17 @@ def all_ok_supervisor(): @supervisor_decorator(delta=timedelta(days=1)) def something_went_wrong_supervisor(): for n in range(3): - yield f'Something went wrong {n}' + yield f"Something went wrong {n}" @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('logging.Logger.error', MagicMock()) + monkeypatch.setattr("logging.Logger.error", MagicMock()) yield -@pytest.fixture(autouse=True, scope='module') +@pytest.fixture(autouse=True, scope="module") def setup_module(): yield @@ -83,11 +86,11 @@ def setup_module(): def db(data={}): return { - 'delta': timedelta(seconds=3600), - 'id': 0, - 'ran_at': None, - 'task_module': '', - 'task_name': '', + "delta": timedelta(seconds=3600), + "id": 0, + "ran_at": None, + "task_module": "", + "task_name": "", **data, } @@ -98,43 +101,53 @@ def tests_not_found(database: dfx.Database, supervisor: Supervisor): run_supervisor.delay(1) assert supervisor.list() == [] - assert database.list_of('monitoring.Supervisor') == [] - assert database.list_of('monitoring.SupervisorIssue') == [] + assert database.list_of("monitoring.Supervisor") == [] + assert database.list_of("monitoring.SupervisorIssue") == [] assert Logger.error.call_args_list == [ - call('Supervisor 1 not found', exc_info=True), + call("Supervisor 1 not found", exc_info=True), ] -@pytest.mark.parametrize('task_module, task_name, error', [ - ('x', 'all_ok_supervisor', 'Module x not found'), - ('breathecode.monitoring.tests.tasks.tests_run_supervisor', 'x', - 'Supervisor breathecode.monitoring.tests.tasks.tests_run_supervisor.x not found'), -]) +@pytest.mark.parametrize( + "task_module, task_name, error", + [ + ("x", "all_ok_supervisor", "Module x not found"), + ( + "breathecode.monitoring.tests.tasks.tests_run_supervisor", + "x", + "Supervisor breathecode.monitoring.tests.tasks.tests_run_supervisor.x not found", + ), + ], +) def tests_supervisor_handler_not_found(database: dfx.Database, supervisor: Supervisor, task_module, task_name, error): - database.create(supervisor={ - 'task_module': task_module, - 'task_name': task_name, - 'delta': timedelta(days=1), - }) + database.create( + supervisor={ + "task_module": task_module, + "task_name": task_name, + "delta": timedelta(days=1), + } + ) run_supervisor.delay(1) assert supervisor.list() == [ { - 'task_module': task_module, - 'task_name': task_name, + "task_module": task_module, + "task_name": task_name, }, ] assert supervisor.log(task_module, task_name) == [] - assert database.list_of('monitoring.Supervisor') == [ - db({ - 'id': 1, - 'task_module': task_module, - 'task_name': task_name, - 'delta': timedelta(days=1), - }), + assert database.list_of("monitoring.Supervisor") == [ + db( + { + "id": 1, + "task_module": task_module, + "task_name": task_name, + "delta": timedelta(days=1), + } + ), ] - assert database.list_of('monitoring.SupervisorIssue') == [] + assert database.list_of("monitoring.SupervisorIssue") == [] assert Logger.error.call_args_list == [ call(error, exc_info=True), ] @@ -143,93 +156,100 @@ def tests_supervisor_handler_not_found(database: dfx.Database, supervisor: Super def tests_supervision_with_not_issues(database: dfx.Database, supervisor: Supervisor, utc_now: datetime): database.create( supervisor={ - 'task_module': 'breathecode.monitoring.tests.tasks.tests_run_supervisor', - 'task_name': 'all_ok_supervisor', - 'delta': timedelta(days=1), - }) + "task_module": "breathecode.monitoring.tests.tasks.tests_run_supervisor", + "task_name": "all_ok_supervisor", + "delta": timedelta(days=1), + } + ) run_supervisor.delay(1) assert supervisor.list() == [ { - 'task_module': 'breathecode.monitoring.tests.tasks.tests_run_supervisor', - 'task_name': 'all_ok_supervisor', + "task_module": "breathecode.monitoring.tests.tasks.tests_run_supervisor", + "task_name": "all_ok_supervisor", }, ] - assert supervisor.log('breathecode.monitoring.tests.tasks.tests_run_supervisor', 'all_ok_supervisor') == [] - assert database.list_of('monitoring.Supervisor') == [ - db({ - 'id': 1, - 'task_module': 'breathecode.monitoring.tests.tasks.tests_run_supervisor', - 'task_name': 'all_ok_supervisor', - 'delta': timedelta(days=1), - 'ran_at': utc_now, - }), + assert supervisor.log("breathecode.monitoring.tests.tasks.tests_run_supervisor", "all_ok_supervisor") == [] + assert database.list_of("monitoring.Supervisor") == [ + db( + { + "id": 1, + "task_module": "breathecode.monitoring.tests.tasks.tests_run_supervisor", + "task_name": "all_ok_supervisor", + "delta": timedelta(days=1), + "ran_at": utc_now, + } + ), ] - assert database.list_of('monitoring.SupervisorIssue') == [] + assert database.list_of("monitoring.SupervisorIssue") == [] assert Logger.error.call_args_list == [] def tests_supervision_with_issues(database: dfx.Database, supervisor: Supervisor, utc_now: datetime): database.create( supervisor={ - 'task_module': 'breathecode.monitoring.tests.tasks.tests_run_supervisor', - 'task_name': 'something_went_wrong_supervisor', - 'delta': timedelta(days=1), - }) + "task_module": "breathecode.monitoring.tests.tasks.tests_run_supervisor", + "task_name": "something_went_wrong_supervisor", + "delta": timedelta(days=1), + } + ) run_supervisor.delay(1) assert supervisor.list() == [ { - 'task_module': 'breathecode.monitoring.tests.tasks.tests_run_supervisor', - 'task_name': 'something_went_wrong_supervisor', + "task_module": "breathecode.monitoring.tests.tasks.tests_run_supervisor", + "task_name": "something_went_wrong_supervisor", }, ] - assert supervisor.log('breathecode.monitoring.tests.tasks.tests_run_supervisor', - 'something_went_wrong_supervisor') == [f'Something went wrong {x}' for x in range(3)] - assert database.list_of('monitoring.Supervisor') == [ - db({ - 'id': 1, - 'task_module': 'breathecode.monitoring.tests.tasks.tests_run_supervisor', - 'task_name': 'something_went_wrong_supervisor', - 'delta': timedelta(days=1), - 'ran_at': utc_now, - }), + assert supervisor.log( + "breathecode.monitoring.tests.tasks.tests_run_supervisor", "something_went_wrong_supervisor" + ) == [f"Something went wrong {x}" for x in range(3)] + assert database.list_of("monitoring.Supervisor") == [ + db( + { + "id": 1, + "task_module": "breathecode.monitoring.tests.tasks.tests_run_supervisor", + "task_name": "something_went_wrong_supervisor", + "delta": timedelta(days=1), + "ran_at": utc_now, + } + ), ] - assert database.list_of('monitoring.SupervisorIssue') == [ + assert database.list_of("monitoring.SupervisorIssue") == [ { - 'error': 'Something went wrong 0', - 'id': 1, - 'occurrences': 1, - 'ran_at': utc_now, - 'supervisor_id': 1, - 'attempts': 0, - 'code': None, - 'fixed': None, - 'params': None, + "error": "Something went wrong 0", + "id": 1, + "occurrences": 1, + "ran_at": utc_now, + "supervisor_id": 1, + "attempts": 0, + "code": None, + "fixed": None, + "params": None, }, { - 'error': 'Something went wrong 1', - 'id': 2, - 'occurrences': 1, - 'ran_at': utc_now, - 'supervisor_id': 1, - 'attempts': 0, - 'code': None, - 'fixed': None, - 'params': None, + "error": "Something went wrong 1", + "id": 2, + "occurrences": 1, + "ran_at": utc_now, + "supervisor_id": 1, + "attempts": 0, + "code": None, + "fixed": None, + "params": None, }, { - 'error': 'Something went wrong 2', - 'id': 3, - 'occurrences': 1, - 'ran_at': utc_now, - 'supervisor_id': 1, - 'attempts': 0, - 'code': None, - 'fixed': None, - 'params': None, + "error": "Something went wrong 2", + "id": 3, + "occurrences": 1, + "ran_at": utc_now, + "supervisor_id": 1, + "attempts": 0, + "code": None, + "fixed": None, + "params": None, }, ] assert Logger.error.call_args_list == [] diff --git a/breathecode/monitoring/tests/urls/tests_stripe_webhook.py b/breathecode/monitoring/tests/urls/tests_stripe_webhook.py index 7fe237504..6031b487d 100644 --- a/breathecode/monitoring/tests/urls/tests_stripe_webhook.py +++ b/breathecode/monitoring/tests/urls/tests_stripe_webhook.py @@ -12,7 +12,7 @@ from ..mixins import MonitoringTestCase CURRENT_MOCK = MagicMock() -CURRENT_PATH = 'breathecode.monitoring.tasks.test_endpoint' +CURRENT_PATH = "breathecode.monitoring.tasks.test_endpoint" # This tests check functions are called, remember that this functions are @@ -22,92 +22,94 @@ class AcademyCohortTestSuite(MonitoringTestCase): # When: no signature # Then: return 403 - @patch('breathecode.monitoring.signals.stripe_webhook.send_robust', MagicMock(return_value=None)) + @patch("breathecode.monitoring.signals.stripe_webhook.send_robust", MagicMock(return_value=None)) def tests_no_signature(self): # model = self.bc.database.create() - url = reverse_lazy('monitoring:stripe_webhook') + url = reverse_lazy("monitoring:stripe_webhook") response = self.client.post(url) json = response.json() - expected = {'detail': 'not-allowed', 'status_code': 403} + expected = {"detail": "not-allowed", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), []) + self.assertEqual(self.bc.database.list_of("monitoring.StripeEvent"), []) # When: invalid payload # Then: return 400 - @patch('breathecode.monitoring.signals.stripe_webhook.send_robust', MagicMock(return_value=None)) - @patch('stripe.Webhook.construct_event', MagicMock(side_effect=ValueError('x'))) + @patch("breathecode.monitoring.signals.stripe_webhook.send_robust", MagicMock(return_value=None)) + @patch("stripe.Webhook.construct_event", MagicMock(side_effect=ValueError("x"))) def tests_invalid_payload(self): - url = reverse_lazy('monitoring:stripe_webhook') - self.bc.request.set_headers(stripe_signature='123') + url = reverse_lazy("monitoring:stripe_webhook") + self.bc.request.set_headers(stripe_signature="123") response = self.client.post(url) json = response.json() - expected = {'detail': 'invalid-payload', 'status_code': 400} + expected = {"detail": "invalid-payload", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), []) + self.assertEqual(self.bc.database.list_of("monitoring.StripeEvent"), []) # When: invalid payload, inside action # Then: return 400 - @patch('breathecode.monitoring.signals.stripe_webhook.send_robust', MagicMock(return_value=None)) - @patch('stripe.Webhook.construct_event', MagicMock(return_value={})) + @patch("breathecode.monitoring.signals.stripe_webhook.send_robust", MagicMock(return_value=None)) + @patch("stripe.Webhook.construct_event", MagicMock(return_value={})) def tests_invalid_payload__inside_action(self): - url = reverse_lazy('monitoring:stripe_webhook') - self.bc.request.set_headers(stripe_signature='123') + url = reverse_lazy("monitoring:stripe_webhook") + self.bc.request.set_headers(stripe_signature="123") response = self.client.post(url) json = response.json() - expected = {'detail': 'invalid-stripe-webhook-payload', 'status_code': 400} + expected = {"detail": "invalid-stripe-webhook-payload", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), []) + self.assertEqual(self.bc.database.list_of("monitoring.StripeEvent"), []) # When: success # Then: return 200 - @patch('breathecode.monitoring.signals.stripe_webhook.send_robust', MagicMock(return_value=None)) + @patch("breathecode.monitoring.signals.stripe_webhook.send_robust", MagicMock(return_value=None)) def tests_success(self): - url = reverse_lazy('monitoring:stripe_webhook') - self.bc.request.set_headers(stripe_signature='123') + url = reverse_lazy("monitoring:stripe_webhook") + self.bc.request.set_headers(stripe_signature="123") data = { - 'id': self.bc.fake.slug(), - 'type': self.bc.fake.slug(), - 'data': { - 'object': 'x' - }, - 'request': { - 'object': 'x' - }, + "id": self.bc.fake.slug(), + "type": self.bc.fake.slug(), + "data": {"object": "x"}, + "request": {"object": "x"}, } - with patch('stripe.Webhook.construct_event', MagicMock(side_effect=[data])): + with patch("stripe.Webhook.construct_event", MagicMock(side_effect=[data])): response = self.client.post(url) json = response.json() - expected = {'success': True} + expected = {"success": True} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - data['stripe_id'] = data.pop('id') - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), [ - { - **data, - 'id': 1, - 'status': 'PENDING', - 'status_texts': {}, - }, - ]) - - StripeEvent = self.bc.database.get_model('monitoring.StripeEvent') + data["stripe_id"] = data.pop("id") + self.assertEqual( + self.bc.database.list_of("monitoring.StripeEvent"), + [ + { + **data, + "id": 1, + "status": "PENDING", + "status_texts": {}, + }, + ], + ) + + StripeEvent = self.bc.database.get_model("monitoring.StripeEvent") event = StripeEvent.objects.filter(id=1).first() - self.bc.check.calls(signals.stripe_webhook.send_robust.call_args_list, [ - call(event=event, sender=event.__class__), - ]) + self.bc.check.calls( + signals.stripe_webhook.send_robust.call_args_list, + [ + call(event=event, sender=event.__class__), + ], + ) diff --git a/breathecode/monitoring/urls.py b/breathecode/monitoring/urls.py index e10001056..7e36ade6d 100644 --- a/breathecode/monitoring/urls.py +++ b/breathecode/monitoring/urls.py @@ -1,17 +1,24 @@ from django.urls import path -from .views import (get_apps, get_endpoints, get_download, get_upload, process_github_webhook, process_stripe_webhook, - RepositorySubscriptionView) +from .views import ( + get_apps, + get_endpoints, + get_download, + get_upload, + process_github_webhook, + process_stripe_webhook, + RepositorySubscriptionView, +) -app_name = 'monitoring' +app_name = "monitoring" urlpatterns = [ - path('application', get_apps), - path('endpoint', get_endpoints), - path('download', get_download), - path('download/<int:download_id>', get_download), - path('upload', get_upload), - path('upload/<int:upload_id>', get_upload), - path('reposubscription', RepositorySubscriptionView.as_view()), - path('reposubscription/<int:subscription_id>', RepositorySubscriptionView.as_view()), - path('github/webhook/<str:subscription_token>', process_github_webhook), - path('stripe/webhook', process_stripe_webhook, name='stripe_webhook'), + path("application", get_apps), + path("endpoint", get_endpoints), + path("download", get_download), + path("download/<int:download_id>", get_download), + path("upload", get_upload), + path("upload/<int:upload_id>", get_upload), + path("reposubscription", RepositorySubscriptionView.as_view()), + path("reposubscription/<int:subscription_id>", RepositorySubscriptionView.as_view()), + path("github/webhook/<str:subscription_token>", process_github_webhook), + path("stripe/webhook", process_stripe_webhook, name="stripe_webhook"), ] diff --git a/breathecode/monitoring/views.py b/breathecode/monitoring/views.py index 1e73e45e5..eab4ab1bf 100644 --- a/breathecode/monitoring/views.py +++ b/breathecode/monitoring/views.py @@ -35,22 +35,22 @@ def get_stripe_webhook_secret(): - return os.getenv('STRIPE_WEBHOOK_SECRET', '') + return os.getenv("STRIPE_WEBHOOK_SECRET", "") -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_endpoints(request): return Response([], status=status.HTTP_200_OK) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_apps(request): return Response([], status=status.HTTP_200_OK) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_download(request, download_id=None): lang = get_user_language(request) @@ -62,32 +62,35 @@ def get_download(request, download_id=None): if download_id is not None: download = CSVDownload.objects.filter(id=download_id).first() if download is None: - raise ValidationException(f'CSV Download {download_id} not found', code=status.HTTP_404_NOT_FOUND) + raise ValidationException(f"CSV Download {download_id} not found", code=status.HTTP_404_NOT_FOUND) - raw = request.GET.get('raw', '') - if raw == 'true': + raw = request.GET.get("raw", "") + if raw == "true": from ..services.google_cloud import Storage try: storage = Storage() - cloud_file = storage.file(os.getenv('DOWNLOADS_BUCKET', None), download.name) + cloud_file = storage.file(os.getenv("DOWNLOADS_BUCKET", None), download.name) buffer = cloud_file.stream_download() except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) return StreamingHttpResponse( buffer.all(), - content_type='text/csv', - headers={'Content-Disposition': f'attachment; filename={download.name}'}, + content_type="text/csv", + headers={"Content-Disposition": f"attachment; filename={download.name}"}, ) else: serializer = CSVDownloadSmallSerializer(download, many=False) @@ -99,7 +102,7 @@ def get_download(request, download_id=None): return Response(serializer.data, status=status.HTTP_200_OK) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_upload(request, upload_id=None): @@ -110,7 +113,7 @@ def get_upload(request, upload_id=None): if upload_id is not None: upload = CSVUpload.objects.filter(id=upload_id).first() if upload is None: - raise ValidationException(f'CSV Upload {upload_id} not found', code=status.HTTP_404_NOT_FOUND) + raise ValidationException(f"CSV Upload {upload_id} not found", code=status.HTTP_404_NOT_FOUND) serializer = CSVUploadSmallSerializer(upload, many=False) return Response(serializer.data, status=status.HTTP_200_OK) @@ -121,52 +124,54 @@ def get_upload(request, upload_id=None): return Response(serializer.data, status=status.HTTP_200_OK) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def process_github_webhook(request, subscription_token): subscription = RepositorySubscription.objects.filter(token=subscription_token).first() if subscription is None: - raise ValidationException(f'Subscription not found with token {subscription_token}') + raise ValidationException(f"Subscription not found with token {subscription_token}") - if subscription.status == 'DISABLED': - logger.debug('Ignored because subscription has been disabled') + if subscription.status == "DISABLED": + logger.debug("Ignored because subscription has been disabled") async_unsubscribe_repo.delayed(subscription.hook_id, force_delete=False) - return Response('Ignored because subscription has been disabled', status=status.HTTP_200_OK) + return Response("Ignored because subscription has been disabled", status=status.HTTP_200_OK) academy_slugs = set([subscription.owner.slug] + [academy.slug for academy in subscription.shared_with.all()]) payload = request.data.copy() - payload['scope'] = request.headers['X-GitHub-Event'] + payload["scope"] = request.headers["X-GitHub-Event"] - if 'repository' in payload and subscription.repository != payload['repository']['html_url']: - raise ValidationException('Webhook was called from a different repository than its original subscription: ' + - payload['repository']['html_url']) + if "repository" in payload and subscription.repository != payload["repository"]["html_url"]: + raise ValidationException( + "Webhook was called from a different repository than its original subscription: " + + payload["repository"]["html_url"] + ) - if payload['scope'] == 'ping': - subscription.status = 'OPERATIONAL' - subscription.status_message = 'Answered github ping successfully' + if payload["scope"] == "ping": + subscription.status = "OPERATIONAL" + subscription.status_message = "Answered github ping successfully" subscription.save() - return Response('Ready', status=status.HTTP_200_OK) + return Response("Ready", status=status.HTTP_200_OK) subscription.last_call = timezone.now() subscription.save() for academy_slug in academy_slugs: webhook = add_github_webhook(payload, academy_slug) if webhook: - logger.debug('triggering signal github_webhook: ' + payload['scope']) + logger.debug("triggering signal github_webhook: " + payload["scope"]) github_webhook.send_robust(instance=webhook, sender=RepositoryWebhook) return Response(payload, status=status.HTTP_200_OK) else: - logger.debug(f'Error at processing github webhook from academy {academy_slug}') - raise ValidationException(f'Error at processing github webhook from academy {academy_slug}') + logger.debug(f"Error at processing github webhook from academy {academy_slug}") + raise ValidationException(f"Error at processing github webhook from academy {academy_slug}") -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def process_stripe_webhook(request): event = None payload = request.data - sig_header = request.headers.get('Stripe-Signature', None) + sig_header = request.headers.get("Stripe-Signature", None) endpoint_secret = get_stripe_webhook_secret() try: @@ -176,24 +181,25 @@ def process_stripe_webhook(request): event = stripe.Webhook.construct_event(payload, sig_header, endpoint_secret) except ValueError: - raise ValidationException('Invalid payload', code=400, slug='invalid-payload') + raise ValidationException("Invalid payload", code=400, slug="invalid-payload") except stripe.error.SignatureVerificationError: - raise ValidationException('Not allowed', code=403, slug='not-allowed') + raise ValidationException("Not allowed", code=403, slug="not-allowed") if event := add_stripe_webhook(event): signals.stripe_webhook.send_robust(event=event, sender=event.__class__) - return Response({'success': True}) + return Response({"success": True}) class RepositorySubscriptionView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) - @capable_of('read_asset') + extensions = APIViewExtensions(sort="-created_at", paginate=True) + + @capable_of("read_asset") def get(self, request, academy_id=None): handler = self.extensions(request) @@ -201,8 +207,8 @@ def get(self, request, academy_id=None): items = RepositorySubscription.objects.filter(Q(shared_with=_academy) | Q(owner=_academy)) lookup = {} - if 'repository' in self.request.GET: - param = self.request.GET.get('repository') + if "repository" in self.request.GET: + param = self.request.GET.get("repository") items = items.filter(repository=param) items = items.filter(**lookup) @@ -212,40 +218,36 @@ def get(self, request, academy_id=None): return handler.response(serializer.data) - @capable_of('crud_asset') + @capable_of("crud_asset") def post(self, request, academy_id=None): lang = get_user_language(request) - serializer = RepositorySubscriptionSerializer(data=request.data, - context={ - 'request': request, - 'academy': academy_id, - 'lang': lang - }) + serializer = RepositorySubscriptionSerializer( + data=request.data, context={"request": request, "academy": academy_id, "lang": lang} + ) if serializer.is_valid(): instance = serializer.save() return Response(RepoSubscriptionSmallSerializer(instance).data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_asset') + @capable_of("crud_asset") def put(self, request, academy_id=None, subscription_id=None): lang = get_user_language(request) subs = RepositorySubscription.objects.filter(id=subscription_id).first() if subs is None: raise ValidationException( - translation(lang, - en=f'No subscription has been found with id {subscription_id}', - es=f'No se ha encontrado una subscripcion con id {subscription_id}', - slug='subscription-not-found')) - - serializer = RepositorySubscriptionSerializer(subs, - data=request.data, - context={ - 'request': request, - 'academy': academy_id, - 'lang': lang - }) + translation( + lang, + en=f"No subscription has been found with id {subscription_id}", + es=f"No se ha encontrado una subscripcion con id {subscription_id}", + slug="subscription-not-found", + ) + ) + + serializer = RepositorySubscriptionSerializer( + subs, data=request.data, context={"request": request, "academy": academy_id, "lang": lang} + ) if serializer.is_valid(): instance = serializer.save() return Response(RepoSubscriptionSmallSerializer(instance).data, status=status.HTTP_201_CREATED) diff --git a/breathecode/notify/actions.py b/breathecode/notify/actions.py index 3879facc9..4c4564cde 100644 --- a/breathecode/notify/actions.py +++ b/breathecode/notify/actions.py @@ -17,8 +17,8 @@ from .models import Device, SlackChannel, SlackTeam, SlackUser, SlackUserTeam push_service = None -FIREBASE_KEY = os.getenv('FIREBASE_KEY', None) -if FIREBASE_KEY is not None and FIREBASE_KEY != '': +FIREBASE_KEY = os.getenv("FIREBASE_KEY", None) +if FIREBASE_KEY is not None and FIREBASE_KEY != "": push_service = FCMNotification(api_key=FIREBASE_KEY) logger = logging.getLogger(__name__) @@ -29,35 +29,37 @@ def send_email_message(template_slug, to, data=None, force=False, inline_css=Fal if data is None: data = {} - if to is None or to == '' or (isinstance(to, list) and len(to) == 0): - raise ValidationException(f'Invalid email to send notification to {str(to)}') + if to is None or to == "" or (isinstance(to, list) and len(to) == 0): + raise ValidationException(f"Invalid email to send notification to {str(to)}") if isinstance(to, list) == False: to = [to] - if os.getenv('EMAIL_NOTIFICATIONS_ENABLED', False) == 'TRUE' or force: - template = get_template_content(template_slug, data, ['email'], inline_css=inline_css, academy=academy) - - result = requests.post(f"https://api.mailgun.net/v3/{os.environ.get('MAILGUN_DOMAIN')}/messages", - auth=('api', os.environ.get('MAILGUN_API_KEY', '')), - data={ - 'from': f"4Geeks <mailgun@{os.environ.get('MAILGUN_DOMAIN')}>", - 'to': to, - 'subject': template['subject'], - 'text': template['text'], - 'html': template['html'] - }, - timeout=2) + if os.getenv("EMAIL_NOTIFICATIONS_ENABLED", False) == "TRUE" or force: + template = get_template_content(template_slug, data, ["email"], inline_css=inline_css, academy=academy) + + result = requests.post( + f"https://api.mailgun.net/v3/{os.environ.get('MAILGUN_DOMAIN')}/messages", + auth=("api", os.environ.get("MAILGUN_API_KEY", "")), + data={ + "from": f"4Geeks <mailgun@{os.environ.get('MAILGUN_DOMAIN')}>", + "to": to, + "subject": template["subject"], + "text": template["text"], + "html": template["html"], + }, + timeout=2, + ) if result.status_code != 200: - logger.error(f'Error sending email, mailgun status code: {str(result.status_code)}') + logger.error(f"Error sending email, mailgun status code: {str(result.status_code)}") logger.error(result.text) else: - logger.debug('Email notification ' + template_slug + ' sent') + logger.debug("Email notification " + template_slug + " sent") return result.status_code == 200 else: - logger.warning(f'Email to {to} not sent because EMAIL_NOTIFICATIONS_ENABLED != TRUE') + logger.warning(f"Email to {to} not sent because EMAIL_NOTIFICATIONS_ENABLED != TRUE") return True @@ -66,15 +68,15 @@ def send_sms(slug, phone_number, data=None, academy=None): if data is None: data = {} - template = get_template_content(slug, data, ['sms'], academy=academy) + template = get_template_content(slug, data, ["sms"], academy=academy) # Your Account Sid and Auth Token from twilio.com/console # DANGER! This is insecure. See http://twil.io/secure - twillio_sid = os.environ.get('TWILLIO_SID') - twillio_secret = os.environ.get('TWILLIO_SECRET') + twillio_sid = os.environ.get("TWILLIO_SID") + twillio_secret = os.environ.get("TWILLIO_SECRET") client = Client(twillio_sid, twillio_secret) try: - client.messages.create(body=template['sms'], from_='+15017122661', to='+1' + phone_number) + client.messages.create(body=template["sms"], from_="+15017122661", to="+1" + phone_number) return True except Exception: return False @@ -87,22 +89,22 @@ def send_slack(slug, slackuser=None, team=None, slackchannel=None, data=None, ac data = {} if academy: - data['COMPANY_INFO_EMAIL'] = academy.feedback_email - data['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - data['COMPANY_LOGO'] = academy.logo_url - data['COMPANY_NAME'] = academy.name + data["COMPANY_INFO_EMAIL"] = academy.feedback_email + data["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + data["COMPANY_LOGO"] = academy.logo_url + data["COMPANY_NAME"] = academy.name - if 'heading' not in data: - data['heading'] = academy.name + if "heading" not in data: + data["heading"] = academy.name remitent_id = None if slackuser is None and slackchannel is None: - message = 'No slack entity (user or cohort) was found or given' + message = "No slack entity (user or cohort) was found or given" logger.error(message) raise Exception(message) credentials = None - if team is not None and hasattr(team.owner, 'credentialsslack'): + if team is not None and hasattr(team.owner, "credentialsslack"): credentials = team.owner.credentialsslack if slackuser is not None: @@ -113,7 +115,7 @@ def send_slack(slug, slackuser=None, team=None, slackchannel=None, data=None, ac remitent_id = slackchannel.slack_id if slackchannel.team is None: - message = f'The slack channel {slackchannel.name} must belong to a slack team' + message = f"The slack channel {slackchannel.name} must belong to a slack team" logger.error(message) raise Exception(message) elif credentials is None: @@ -123,43 +125,40 @@ def send_slack(slug, slackuser=None, team=None, slackchannel=None, data=None, ac return send_slack_raw(slug, credentials.token, remitent_id, data) else: - message = 'Team owner not has slack credentials' + message = "Team owner not has slack credentials" logger.error(message) raise Exception(message) # if would like to specify slack channel or user id and team def send_slack_raw(slug, token, channel_id, data=None, academy=None): - logger.debug(f'Sending slack message to {str(channel_id)}') + logger.debug(f"Sending slack message to {str(channel_id)}") if data is None: data = {} try: - if 'slack_payload' in data: - payload = data['slack_payload'] + if "slack_payload" in data: + payload = data["slack_payload"] else: - template = get_template_content(slug, data, ['slack'], academy=academy) - payload = json.loads(template['slack']) - if 'blocks' in payload: - payload = payload['blocks'] + template = get_template_content(slug, data, ["slack"], academy=academy) + payload = json.loads(template["slack"]) + if "blocks" in payload: + payload = payload["blocks"] # for modals mainly - meta = '' - if 'private_metadata' in payload: - meta = payload['private_metadata'] + meta = "" + if "private_metadata" in payload: + meta = payload["private_metadata"] api = client.Slack(token) - data = api.post('chat.postMessage', { - 'channel': channel_id, - 'private_metadata': meta, - 'blocks': payload, - 'parse': 'full' - }) - logger.debug(f'Notification to {str(channel_id)} sent') + data = api.post( + "chat.postMessage", {"channel": channel_id, "private_metadata": meta, "blocks": payload, "parse": "full"} + ) + logger.debug(f"Notification to {str(channel_id)} sent") return True except Exception: - logger.exception(f'Error sending notification to {str(channel_id)}') + logger.exception(f"Error sending notification to {str(channel_id)}") return False @@ -168,22 +167,24 @@ def send_fcm(slug, registration_ids, data=None, academy=None): if data is None: data = {} - if (len(registration_ids) > 0 and push_service): - template = get_template_content(slug, data, ['email', 'fms'], academy=academy) + if len(registration_ids) > 0 and push_service: + template = get_template_content(slug, data, ["email", "fms"], academy=academy) - if 'fms' not in template: - raise APIException('The template ' + slug + ' does not seem to have a valid FMS version') + if "fms" not in template: + raise APIException("The template " + slug + " does not seem to have a valid FMS version") - message_title = template['subject'] - message_body = template['fms'] - if 'DATA' not in data: - raise Exception('There is no data for the notification') - message_data = data['DATA'] + message_title = template["subject"] + message_body = template["fms"] + if "DATA" not in data: + raise Exception("There is no data for the notification") + message_data = data["DATA"] - result = push_service.notify_multiple_devices(registration_ids=registration_ids, - message_title=message_title, - message_body=message_body, - data_message=message_data) + result = push_service.notify_multiple_devices( + registration_ids=registration_ids, + message_title=message_title, + message_body=message_body, + data_message=message_data, + ) # if(result["failure"] or not result["success"]): # raise APIException("Problem sending the notification") @@ -208,16 +209,16 @@ def get_template_content(slug, data=None, formats=None, inline_css=False, academ if data is None: data = {} - #d = Context({ 'username': username }) + # d = Context({ 'username': username }) con = { - 'API_URL': os.environ.get('API_URL'), - 'COMPANY_NAME': os.environ.get('COMPANY_NAME', ''), - 'COMPANY_CONTACT_URL': os.environ.get('COMPANY_CONTACT_URL', ''), - 'COMPANY_LEGAL_NAME': os.environ.get('COMPANY_LEGAL_NAME', ''), - 'COMPANY_ADDRESS': os.environ.get('COMPANY_ADDRESS', ''), - 'style__success': '#99ccff', - 'style__danger': '#ffcccc', - 'style__secondary': '#ededed', + "API_URL": os.environ.get("API_URL"), + "COMPANY_NAME": os.environ.get("COMPANY_NAME", ""), + "COMPANY_CONTACT_URL": os.environ.get("COMPANY_CONTACT_URL", ""), + "COMPANY_LEGAL_NAME": os.environ.get("COMPANY_LEGAL_NAME", ""), + "COMPANY_ADDRESS": os.environ.get("COMPANY_ADDRESS", ""), + "style__success": "#99ccff", + "style__danger": "#ffcccc", + "style__secondary": "#ededed", } z = con.copy() # start with x's keys and values @@ -226,48 +227,48 @@ def get_template_content(slug, data=None, formats=None, inline_css=False, academ templates = {} if academy: - z['COMPANY_INFO_EMAIL'] = academy.feedback_email - z['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - z['COMPANY_LOGO'] = academy.logo_url - z['COMPANY_NAME'] = academy.name - - if 'heading' not in z: - z['heading'] = academy.name - - if formats is None or 'email' in formats: - if 'SUBJECT' in z: - templates['SUBJECT'] = z['SUBJECT'] - templates['subject'] = z['SUBJECT'] - elif 'subject' in z: - templates['SUBJECT'] = z['subject'] - templates['subject'] = z['subject'] + z["COMPANY_INFO_EMAIL"] = academy.feedback_email + z["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + z["COMPANY_LOGO"] = academy.logo_url + z["COMPANY_NAME"] = academy.name + + if "heading" not in z: + z["heading"] = academy.name + + if formats is None or "email" in formats: + if "SUBJECT" in z: + templates["SUBJECT"] = z["SUBJECT"] + templates["subject"] = z["SUBJECT"] + elif "subject" in z: + templates["SUBJECT"] = z["subject"] + templates["subject"] = z["subject"] else: - templates['SUBJECT'] = 'No subject specified', - templates['subject'] = 'No subject specified' + templates["SUBJECT"] = ("No subject specified",) + templates["subject"] = "No subject specified" - plaintext = get_template(slug + '.txt') - html = get_template(slug + '.html') - templates['text'] = plaintext.render(z) - templates['html'] = html.render(z) + plaintext = get_template(slug + ".txt") + html = get_template(slug + ".html") + templates["text"] = plaintext.render(z) + templates["html"] = html.render(z) - if formats is not None and 'html' in formats: - html = get_template(slug + '.html') - templates['html'] = html.render(z) + if formats is not None and "html" in formats: + html = get_template(slug + ".html") + templates["html"] = html.render(z) - if 'html' in templates and inline_css: - templates['html'] = transform(templates['html']) + if "html" in templates and inline_css: + templates["html"] = transform(templates["html"]) - if formats is not None and 'slack' in formats: - fms = get_template(slug + '.slack') - templates['slack'] = fms.render(z) + if formats is not None and "slack" in formats: + fms = get_template(slug + ".slack") + templates["slack"] = fms.render(z) - if formats is not None and 'fms' in formats: - fms = get_template(slug + '.fms') - templates['fms'] = fms.render(z) + if formats is not None and "fms" in formats: + fms = get_template(slug + ".fms") + templates["fms"] = fms.render(z) - if formats is not None and 'sms' in formats: - sms = get_template(slug + '.sms') - templates['sms'] = sms.render(z) + if formats is not None and "sms" in formats: + sms = get_template(slug + ".sms") + templates["sms"] = sms.render(z) return templates @@ -275,49 +276,58 @@ def get_template_content(slug, data=None, formats=None, inline_css=False, academ def sync_slack_team_channel(team_id): from breathecode.authenticate.models import CredentialsSlack - logger.debug(f'Sync slack team {team_id}: looking for channels') + logger.debug(f"Sync slack team {team_id}: looking for channels") team = SlackTeam.objects.filter(id=team_id).first() if team is None: - raise Exception('Invalid team id: ' + str(team_id)) + raise Exception("Invalid team id: " + str(team_id)) credentials = CredentialsSlack.objects.filter(team_id=team.slack_id).first() if credentials is None or credentials.token is None: - raise Exception(f'No credentials found for this team {team_id}') + raise Exception(f"No credentials found for this team {team_id}") # Starting to sync, I need to reset the status - team.sync_status = 'INCOMPLETED' + team.sync_status = "INCOMPLETED" team.synqued_at = timezone.now() team.save() api = client.Slack(credentials.token) - data = api.get('conversations.list', { - 'types': 'public_channel,private_channel', - 'limit': 300, - }) - - channels = data['channels'] - while 'response_metadata' in data and 'next_cursor' in data['response_metadata'] and data['response_metadata'][ - 'next_cursor'] != '': - data = api.get('conversations.list', { - 'limit': 300, - 'cursor': data['response_metadata']['next_cursor'], - 'types': 'public_channel,private_channel', - }) - channels = channels + data['channels'] - - logger.debug(f'Found {str(len(channels))} channels, starting to sync') + data = api.get( + "conversations.list", + { + "types": "public_channel,private_channel", + "limit": 300, + }, + ) + + channels = data["channels"] + while ( + "response_metadata" in data + and "next_cursor" in data["response_metadata"] + and data["response_metadata"]["next_cursor"] != "" + ): + data = api.get( + "conversations.list", + { + "limit": 300, + "cursor": data["response_metadata"]["next_cursor"], + "types": "public_channel,private_channel", + }, + ) + channels = channels + data["channels"] + + logger.debug(f"Found {str(len(channels))} channels, starting to sync") for channel in channels: # only sync channels - if channel['is_channel'] == False and channel['is_group'] == False and channel['is_general'] == False: + if channel["is_channel"] == False and channel["is_group"] == False and channel["is_general"] == False: continue # will raise exception if it fails sync_slack_channel(channel, team) # finished sync, status back to normal - team.sync_status = 'COMPLETED' + team.sync_status = "COMPLETED" team.save() return True @@ -326,42 +336,45 @@ def sync_slack_team_channel(team_id): def sync_slack_team_users(team_id): from breathecode.authenticate.models import CredentialsSlack - logger.debug(f'Sync slack team {team_id}: looking for users') + logger.debug(f"Sync slack team {team_id}: looking for users") team = SlackTeam.objects.filter(id=team_id).first() if team is None: - raise Exception('Invalid team id: ' + str(team_id)) + raise Exception("Invalid team id: " + str(team_id)) credentials = CredentialsSlack.objects.filter(team_id=team.slack_id).first() if credentials is None: - raise Exception(f'No credentials found for this team {team_id}') + raise Exception(f"No credentials found for this team {team_id}") # Starting to sync, I need to reset the status - team.sync_status = 'INCOMPLETED' + team.sync_status = "INCOMPLETED" team.synqued_at = timezone.now() team.save() api = client.Slack(credentials.token) - data = api.get('users.list', {'limit': 300}) - - members = data['members'] - while 'response_metadata' in data and 'next_cursor' in data['response_metadata'] and data['response_metadata'][ - 'next_cursor'] != '': - data = api.get('users.list', {'limit': 300, 'cursor': data['response_metadata']['next_cursor']}) - members = members + data['members'] - - logger.debug(f'Found {str(len(members))} members, starting to sync') + data = api.get("users.list", {"limit": 300}) + + members = data["members"] + while ( + "response_metadata" in data + and "next_cursor" in data["response_metadata"] + and data["response_metadata"]["next_cursor"] != "" + ): + data = api.get("users.list", {"limit": 300, "cursor": data["response_metadata"]["next_cursor"]}) + members = members + data["members"] + + logger.debug(f"Found {str(len(members))} members, starting to sync") for member in members: # ignore bots - if member['is_bot'] or member['name'] == 'slackbot': + if member["is_bot"] or member["name"] == "slackbot": continue # will raise exception if it fails sync_slack_user(member, team) # finished sync, status back to normal - team.sync_status = 'COMPLETED' + team.sync_status = "COMPLETED" team.save() return True @@ -369,26 +382,29 @@ def sync_slack_team_users(team_id): def sync_slack_user(payload, team=None): - if team is None and 'team_id' in payload: - team = SlackTeam.objects.filter(id=payload['team_id']).first() + if team is None and "team_id" in payload: + team = SlackTeam.objects.filter(id=payload["team_id"]).first() if team is None: - raise Exception('Invalid or missing team') + raise Exception("Invalid or missing team") - slack_user = SlackUser.objects.filter(slack_id=payload['id']).first() + slack_user = SlackUser.objects.filter(slack_id=payload["id"]).first() user = None if slack_user is None: - slack_user = SlackUser(slack_id=payload['id'], ) + slack_user = SlackUser( + slack_id=payload["id"], + ) slack_user.save() - if 'email' not in payload['profile']: - logger.fatal('User without email') + if "email" not in payload["profile"]: + logger.fatal("User without email") logger.fatal(payload) - raise Exception('Slack users are not coming with emails from the API') + raise Exception("Slack users are not coming with emails from the API") - cohort_user = CohortUser.objects.filter(user__email=payload['profile']['email'], - cohort__academy__id=team.academy.id).first() + cohort_user = CohortUser.objects.filter( + user__email=payload["profile"]["email"], cohort__academy__id=team.academy.id + ).first() if cohort_user is not None: user = cohort_user.user else: @@ -405,21 +421,21 @@ def sync_slack_user(payload, team=None): ) if user is None: - user_team.sync_status = 'INCOMPLETED' - user_team.sync_message = 'No user found on breathecode with this email' + user_team.sync_status = "INCOMPLETED" + user_team.sync_message = "No user found on breathecode with this email" else: - user_team.sync_status = 'COMPLETED' + user_team.sync_status = "COMPLETED" user_team.save() - slack_user.status_text = payload['profile']['status_text'] - slack_user.status_emoji = payload['profile']['status_emoji'] + slack_user.status_text = payload["profile"]["status_text"] + slack_user.status_emoji = payload["profile"]["status_emoji"] - if 'real_name' in payload: - slack_user.real_name = payload['real_name'] + if "real_name" in payload: + slack_user.real_name = payload["real_name"] - slack_user.display_name = payload['name'] + slack_user.display_name = payload["name"] slack_user.user = user - slack_user.email = payload['profile']['email'] + slack_user.email = payload["profile"]["email"] slack_user.synqued_at = timezone.now() slack_user.save() @@ -430,32 +446,32 @@ def sync_slack_channel(payload, team=None): logger.debug(f"Synching channel {payload['name_normalized']}...") - if team is None and 'team_id' in payload: - team = SlackTeam.objects.filter(id=payload['team_id']).first() + if team is None and "team_id" in payload: + team = SlackTeam.objects.filter(id=payload["team_id"]).first() if team is None: - raise Exception('Invalid or missing team') + raise Exception("Invalid or missing team") - slack_channel = SlackChannel.objects.filter(slack_id=payload['id']).first() + slack_channel = SlackChannel.objects.filter(slack_id=payload["id"]).first() if slack_channel is None: - cohort = Cohort.objects.filter(slug=payload['name_normalized']).first() + cohort = Cohort.objects.filter(slug=payload["name_normalized"]).first() if cohort is None: logger.warning(f"Slack channel {payload['name_normalized']} has no corresponding cohort in breathecode") slack_channel = SlackChannel( - slack_id=payload['id'], + slack_id=payload["id"], team=team, - sync_status='INCOMPLETED', + sync_status="INCOMPLETED", cohort=cohort, ) - slack_channel.name = payload['name_normalized'] - slack_channel.topic = payload['topic'] - slack_channel.purpose = payload['purpose'] + slack_channel.name = payload["name_normalized"] + slack_channel.topic = payload["topic"] + slack_channel.purpose = payload["purpose"] slack_channel.synqued_at = timezone.now() - slack_channel.sync_status = 'COMPLETED' + slack_channel.sync_status = "COMPLETED" slack_channel.save() return slack_channel diff --git a/breathecode/notify/admin.py b/breathecode/notify/admin.py index e180461cb..662cbef2a 100644 --- a/breathecode/notify/admin.py +++ b/breathecode/notify/admin.py @@ -21,20 +21,20 @@ # Register your models here. @admin.register(Device) class DeviceAdmin(admin.ModelAdmin): - list_display = ('user', 'registration_id') + list_display = ("user", "registration_id") -@admin.display(description='Import channels from slack') +@admin.display(description="Import channels from slack") def sync_channels(modeladmin, request, queryset): - logger.debug('Bulk sync channels') + logger.debug("Bulk sync channels") teams = queryset.all() for team in teams: sync_slack_team_channel(team.id) -@admin.display(description='Import users from slack') +@admin.display(description="Import users from slack") def sync_users(modeladmin, request, queryset): - logger.debug('Bulk sync channels') + logger.debug("Bulk sync channels") teams = queryset.all() for team in teams: async_slack_team_users.delay(team.id) @@ -42,89 +42,101 @@ def sync_users(modeladmin, request, queryset): @admin.register(SlackTeam) class SlackTeamAdmin(admin.ModelAdmin): - list_display = ('slack_id', 'sync_status', 'synqued_at', 'academy', 'name', 'owner', 'updated_at') + list_display = ("slack_id", "sync_status", "synqued_at", "academy", "name", "owner", "updated_at") actions = [sync_channels, sync_users] @admin.register(SlackUser) class SlackUserAdmin(admin.ModelAdmin, AdminExportCsvMixin): search_fields = [ - 'slack_id', 'display_name', 'real_name', 'email', 'user__email', 'user__first_name', 'user__last_name' + "slack_id", + "display_name", + "real_name", + "email", + "user__email", + "user__first_name", + "user__last_name", ] - raw_id_fields = ['user'] - list_display = ('slack_id', 'user_link', 'display_name', 'real_name', 'email', 'updated_at') - actions = ['export_as_csv'] + raw_id_fields = ["user"] + list_display = ("slack_id", "user_link", "display_name", "real_name", "email", "updated_at") + actions = ["export_as_csv"] def user_link(self, obj): if obj.user is not None: - return format_html('<a href="%s">%s</a>' % - (reverse('admin:auth_user_change', args=(obj.user.id, )), escape(obj))) + return format_html( + '<a href="%s">%s</a>' % (reverse("admin:auth_user_change", args=(obj.user.id,)), escape(obj)) + ) else: - return 'Missing BC user' + return "Missing BC user" @admin.register(SlackUserTeam) class SlackUserTeamAdmin(admin.ModelAdmin, AdminExportCsvMixin): search_fields = [ - 'slack_user__email', 'slack_user__user__first_name', 'slack_user__user__last_name', 'slack_team__id', - 'slack_team__name' + "slack_user__email", + "slack_user__user__first_name", + "slack_user__user__last_name", + "slack_team__id", + "slack_team__name", ] - raw_id_fields = ['slack_user'] - list_display = ('slack_user', 'sync_status', 'breathecode_user', 'slack_team', 'created_at') - list_filter = ['slack_team__academy__slug', 'slack_team__name', 'sync_status'] - actions = ['export_as_csv'] + raw_id_fields = ["slack_user"] + list_display = ("slack_user", "sync_status", "breathecode_user", "slack_team", "created_at") + list_filter = ["slack_team__academy__slug", "slack_team__name", "sync_status"] + actions = ["export_as_csv"] def breathecode_user(self, obj): if obj.slack_user.user is not None: return format_html( - '<a href="%s">%s</a>' % - (reverse('admin:auth_user_change', args=(obj.slack_user.user.id, )), escape(obj.slack_user.user))) + '<a href="%s">%s</a>' + % (reverse("admin:auth_user_change", args=(obj.slack_user.user.id,)), escape(obj.slack_user.user)) + ) else: - return 'Missing BC user' + return "Missing BC user" @admin.register(SlackChannel) class SlackChannelAdmin(admin.ModelAdmin, AdminExportCsvMixin): - search_fields = ['name', 'cohort__name'] - list_display = ('slack_id', 'sync_status', 'cohort_link', 'name', 'synqued_at') - list_filter = ['sync_status', 'team__slack_id', 'team__academy__slug'] - actions = ['export_as_csv'] + search_fields = ["name", "cohort__name"] + list_display = ("slack_id", "sync_status", "cohort_link", "name", "synqued_at") + list_filter = ["sync_status", "team__slack_id", "team__academy__slug"] + actions = ["export_as_csv"] def cohort_link(self, obj): if obj.cohort is not None: - return format_html('<a href="%s">%s</a>' % - (reverse('admin:auth_user_change', args=(obj.cohort.id, )), escape(obj))) + return format_html( + '<a href="%s">%s</a>' % (reverse("admin:auth_user_change", args=(obj.cohort.id,)), escape(obj)) + ) else: - return 'No BC cohort' + return "No BC cohort" -@admin.display(description='💬 Send slack test notification') +@admin.display(description="💬 Send slack test notification") def test_user_notification(modeladmin, request, queryset): users = queryset.all() for u in users: - logger.debug(f'Testing slack notification for {u.id}') - send_slack('test_message', slackuser=u.slackuser, data={'MESSAGE': 'Hello World'}) + logger.debug(f"Testing slack notification for {u.id}") + send_slack("test_message", slackuser=u.slackuser, data={"MESSAGE": "Hello World"}) @admin.register(UserProxy) class UserAdmin(UserAdmin): - list_display = ('username', 'email', 'first_name', 'last_name') + list_display = ("username", "email", "first_name", "last_name") actions = [test_user_notification] -@admin.display(description='💬 Send slack test notification') +@admin.display(description="💬 Send slack test notification") def test_cohort_notification(modeladmin, request, queryset): cohorts = queryset.all() for c in cohorts: - logger.debug(f'Testing slack notification for cohort {c.id}') - send_slack('test_message', slackchannel=c.slackchannel, data={'MESSAGE': 'Hello World'}) + logger.debug(f"Testing slack notification for cohort {c.id}") + send_slack("test_message", slackchannel=c.slackchannel, data={"MESSAGE": "Hello World"}) @admin.register(CohortProxy) class CohortAdmin(AdmissionsCohortAdmin): - list_display = ('id', 'slug', 'stage', 'name', 'kickoff_date', 'syllabus_version', 'schedule') + list_display = ("id", "slug", "stage", "name", "kickoff_date", "syllabus_version", "schedule") actions = [test_cohort_notification] @@ -144,7 +156,7 @@ class Meta: def __init__(self, *args, **kwargs): super(HookForm, self).__init__(*args, **kwargs) - self.fields['event'] = forms.ChoiceField(choices=self.get_admin_events()) + self.fields["event"] = forms.ChoiceField(choices=self.get_admin_events()) @classmethod def get_admin_events(cls): @@ -152,11 +164,11 @@ def get_admin_events(cls): class HookAdmin(admin.ModelAdmin): - list_display = ['user', 'target', 'event', 'service_id', 'total_calls', 'last_response_code', 'last_call_at'] - search_fields = ['user__username', 'event', 'target', 'service_id'] - list_filter = ['event', 'last_response_code'] + list_display = ["user", "target", "event", "service_id", "total_calls", "last_response_code", "last_call_at"] + search_fields = ["user__username", "event", "target", "service_id"] + list_filter = ["event", "last_response_code"] raw_id_fields = [ - 'user', + "user", ] form = HookForm @@ -166,6 +178,6 @@ class HookAdmin(admin.ModelAdmin): @admin.register(HookError) class HookErrorAdmin(admin.ModelAdmin): - list_display = ['event', 'message', 'created_at', 'updated_at'] - search_fields = ['message', 'event'] - list_filter = ['event'] + list_display = ["event", "message", "created_at", "updated_at"] + search_fields = ["message", "event"] + list_filter = ["event"] diff --git a/breathecode/notify/apps.py b/breathecode/notify/apps.py index a042c13f4..a5092cd55 100644 --- a/breathecode/notify/apps.py +++ b/breathecode/notify/apps.py @@ -2,7 +2,7 @@ class NotifyConfig(AppConfig): - name = 'breathecode.notify' + name = "breathecode.notify" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/notify/management/commands/sync_messaging.py b/breathecode/notify/management/commands/sync_messaging.py index 2f5664278..c65f4eb01 100644 --- a/breathecode/notify/management/commands/sync_messaging.py +++ b/breathecode/notify/management/commands/sync_messaging.py @@ -3,32 +3,32 @@ from ...models import SlackTeam from ...tasks import async_slack_team_users, async_slack_team_channel -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" class Command(BaseCommand): - help = 'Sync academies from old breathecode' + help = "Sync academies from old breathecode" def add_arguments(self, parser): - parser.add_argument('entity', type=str) + parser.add_argument("entity", type=str) parser.add_argument( - '--cohorts', + "--cohorts", type=str, default=None, - help='Cohorts slugs to sync', + help="Cohorts slugs to sync", ) parser.add_argument( - '--students', + "--students", type=str, default=None, - help='Cohorts slugs to sync', + help="Cohorts slugs to sync", ) - parser.add_argument('--limit', action='store', dest='limit', type=int, default=0, help='How many to import') + parser.add_argument("--limit", action="store", dest="limit", type=int, default=0, help="How many to import") def handle(self, *args, **options): try: - func = getattr(self, options['entity'], 'entity_not_found') + func = getattr(self, options["entity"], "entity_not_found") except TypeError: print(f'Sync method for {options["entity"]} no Found!') func(options) diff --git a/breathecode/notify/migrations/0001_initial.py b/breathecode/notify/migrations/0001_initial.py index 378c79a46..74f1e53b2 100644 --- a/breathecode/notify/migrations/0001_initial.py +++ b/breathecode/notify/migrations/0001_initial.py @@ -10,115 +10,149 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('authenticate', '0010_auto_20201105_0531'), + ("admissions", "0011_auto_20201006_0058"), + ("authenticate", "0010_auto_20201105_0531"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='SlackTeam', + name="SlackTeam", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slack_id', models.CharField(max_length=50)), - ('name', models.CharField(max_length=100)), - ('sync_status', - models.CharField(choices=[('INCOMPLETED', 'Incompleted'), ('COMPLETED', 'Completed')], - default='INCOMPLETED', - help_text='Automatically set when synqued from slack', - max_length=15)), - ('sync_message', - models.CharField(blank=True, - default=None, - help_text='Contains any success or error messages depending on the status', - max_length=100, - null=True)), - ('synqued_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', - models.OneToOneField(blank=True, on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('credentials', - models.OneToOneField(blank=True, - on_delete=django.db.models.deletion.CASCADE, - to='authenticate.credentialsslack')), - ('owner', - models.OneToOneField(blank=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slack_id", models.CharField(max_length=50)), + ("name", models.CharField(max_length=100)), + ( + "sync_status", + models.CharField( + choices=[("INCOMPLETED", "Incompleted"), ("COMPLETED", "Completed")], + default="INCOMPLETED", + help_text="Automatically set when synqued from slack", + max_length=15, + ), + ), + ( + "sync_message", + models.CharField( + blank=True, + default=None, + help_text="Contains any success or error messages depending on the status", + max_length=100, + null=True, + ), + ), + ("synqued_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "academy", + models.OneToOneField( + blank=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + ( + "credentials", + models.OneToOneField( + blank=True, on_delete=django.db.models.deletion.CASCADE, to="authenticate.credentialsslack" + ), + ), + ( + "owner", + models.OneToOneField( + blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), migrations.CreateModel( - name='SlackUser', + name="SlackUser", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slack_id', models.CharField(max_length=50)), - ('status_text', models.CharField(blank=True, max_length=255, null=True)), - ('status_emoji', models.CharField(blank=True, max_length=100, null=True)), - ('real_name', models.CharField(blank=True, max_length=100, null=True)), - ('display_name', models.CharField(blank=True, max_length=100, null=True)), - ('email', models.CharField(blank=True, max_length=100, null=True)), - ('sync_status', - models.CharField(choices=[('INCOMPLETED', 'Incompleted'), ('COMPLETED', 'Completed')], - default='INCOMPLETED', - max_length=15)), - ('sync_message', - models.CharField(blank=True, - default=None, - help_text='Contains any success or error messages depending on the status', - max_length=100, - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='notify.slackteam')), - ('user', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slack_id", models.CharField(max_length=50)), + ("status_text", models.CharField(blank=True, max_length=255, null=True)), + ("status_emoji", models.CharField(blank=True, max_length=100, null=True)), + ("real_name", models.CharField(blank=True, max_length=100, null=True)), + ("display_name", models.CharField(blank=True, max_length=100, null=True)), + ("email", models.CharField(blank=True, max_length=100, null=True)), + ( + "sync_status", + models.CharField( + choices=[("INCOMPLETED", "Incompleted"), ("COMPLETED", "Completed")], + default="INCOMPLETED", + max_length=15, + ), + ), + ( + "sync_message", + models.CharField( + blank=True, + default=None, + help_text="Contains any success or error messages depending on the status", + max_length=100, + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="notify.slackteam")), + ( + "user", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), migrations.CreateModel( - name='SlackChannel', + name="SlackChannel", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slack_id', models.CharField(max_length=50)), - ('name', models.CharField(blank=True, max_length=100, null=True)), - ('topic', models.CharField(blank=True, max_length=255, null=True)), - ('purpose', models.CharField(blank=True, max_length=100, null=True)), - ('sync_status', - models.CharField(choices=[('INCOMPLETED', 'Incompleted'), ('COMPLETED', 'Completed')], - default='INCOMPLETED', - max_length=15)), - ('sync_message', - models.CharField(blank=True, - default=None, - help_text='Contains any success or error messages depending on the status', - max_length=100, - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('cohort', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), - ('team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='notify.slackteam')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slack_id", models.CharField(max_length=50)), + ("name", models.CharField(blank=True, max_length=100, null=True)), + ("topic", models.CharField(blank=True, max_length=255, null=True)), + ("purpose", models.CharField(blank=True, max_length=100, null=True)), + ( + "sync_status", + models.CharField( + choices=[("INCOMPLETED", "Incompleted"), ("COMPLETED", "Completed")], + default="INCOMPLETED", + max_length=15, + ), + ), + ( + "sync_message", + models.CharField( + blank=True, + default=None, + help_text="Contains any success or error messages depending on the status", + max_length=100, + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "cohort", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), + ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="notify.slackteam")), ], ), migrations.CreateModel( - name='Device', + name="Device", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('registration_id', models.TextField(unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("registration_id", models.TextField(unique=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "user", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), ] diff --git a/breathecode/notify/migrations/0002_auto_20201105_0612.py b/breathecode/notify/migrations/0002_auto_20201105_0612.py index 0524e6410..293488986 100644 --- a/breathecode/notify/migrations/0002_auto_20201105_0612.py +++ b/breathecode/notify/migrations/0002_auto_20201105_0612.py @@ -7,28 +7,27 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('notify', '0001_initial'), + ("admissions", "0011_auto_20201006_0058"), + ("notify", "0001_initial"), ] operations = [ migrations.AddField( - model_name='slackchannel', - name='synqued_at', + model_name="slackchannel", + name="synqued_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='slackuser', - name='synqued_at', + model_name="slackuser", + name="synqued_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='slackchannel', - name='cohort', - field=models.OneToOneField(blank=True, - default=1, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="slackchannel", + name="cohort", + field=models.OneToOneField( + blank=True, default=1, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), preserve_default=False, ), ] diff --git a/breathecode/notify/migrations/0003_auto_20201105_0619.py b/breathecode/notify/migrations/0003_auto_20201105_0619.py index bd3f57316..6a7c4f353 100644 --- a/breathecode/notify/migrations/0003_auto_20201105_0619.py +++ b/breathecode/notify/migrations/0003_auto_20201105_0619.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('notify', '0002_auto_20201105_0612'), + ("notify", "0002_auto_20201105_0612"), ] operations = [ migrations.AlterField( - model_name='slackchannel', - name='purpose', + model_name="slackchannel", + name="purpose", field=models.CharField(blank=True, max_length=500, null=True), ), migrations.AlterField( - model_name='slackchannel', - name='topic', + model_name="slackchannel", + name="topic", field=models.CharField(blank=True, max_length=500, null=True), ), ] diff --git a/breathecode/notify/migrations/0004_auto_20201105_0620.py b/breathecode/notify/migrations/0004_auto_20201105_0620.py index 8fade2cd6..b373fe63f 100644 --- a/breathecode/notify/migrations/0004_auto_20201105_0620.py +++ b/breathecode/notify/migrations/0004_auto_20201105_0620.py @@ -7,18 +7,16 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0011_auto_20201006_0058'), - ('notify', '0003_auto_20201105_0619'), + ("admissions", "0011_auto_20201006_0058"), + ("notify", "0003_auto_20201105_0619"), ] operations = [ migrations.AlterField( - model_name='slackchannel', - name='cohort', - field=models.OneToOneField(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="slackchannel", + name="cohort", + field=models.OneToOneField( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), ), ] diff --git a/breathecode/notify/migrations/0005_auto_20201105_0650.py b/breathecode/notify/migrations/0005_auto_20201105_0650.py index 86ea4d50b..45e17d741 100644 --- a/breathecode/notify/migrations/0005_auto_20201105_0650.py +++ b/breathecode/notify/migrations/0005_auto_20201105_0650.py @@ -10,42 +10,44 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('admissions', '0011_auto_20201006_0058'), - ('auth', '0012_alter_user_first_name_max_length'), - ('notify', '0004_auto_20201105_0620'), + ("admissions", "0011_auto_20201006_0058"), + ("auth", "0012_alter_user_first_name_max_length"), + ("notify", "0004_auto_20201105_0620"), ] operations = [ migrations.CreateModel( - name='CohortProxy', + name="CohortProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('admissions.cohort', ), + bases=("admissions.cohort",), ), migrations.CreateModel( - name='UserProxy', + name="UserProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('auth.user', ), + bases=("auth.user",), managers=[ - ('objects', django.contrib.auth.models.UserManager()), + ("objects", django.contrib.auth.models.UserManager()), ], ), migrations.AlterField( - model_name='slackuser', - name='user', - field=models.OneToOneField(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="slackuser", + name="user", + field=models.OneToOneField( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/breathecode/notify/migrations/0006_auto_20201111_2031.py b/breathecode/notify/migrations/0006_auto_20201111_2031.py index 6740d287c..cff2ecdc0 100644 --- a/breathecode/notify/migrations/0006_auto_20201111_2031.py +++ b/breathecode/notify/migrations/0006_auto_20201111_2031.py @@ -9,50 +9,58 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('notify', '0005_auto_20201105_0650'), + ("notify", "0005_auto_20201105_0650"), ] operations = [ migrations.RemoveField( - model_name='slackuser', - name='sync_message', + model_name="slackuser", + name="sync_message", ), migrations.RemoveField( - model_name='slackuser', - name='sync_status', + model_name="slackuser", + name="sync_status", ), migrations.RemoveField( - model_name='slackuser', - name='synqued_at', + model_name="slackuser", + name="synqued_at", ), migrations.RemoveField( - model_name='slackuser', - name='team', + model_name="slackuser", + name="team", ), migrations.AlterField( - model_name='slackteam', - name='owner', + model_name="slackteam", + name="owner", field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.CreateModel( - name='SlackUserTeam', + name="SlackUserTeam", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('sync_status', - models.CharField(choices=[('INCOMPLETED', 'Incompleted'), ('COMPLETED', 'Completed')], - default='INCOMPLETED', - max_length=15)), - ('sync_message', - models.CharField(blank=True, - default=None, - help_text='Contains any success or error messages depending on the status', - max_length=100, - null=True)), - ('synqued_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('slack_team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='notify.slackteam')), - ('slack_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='notify.slackuser')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "sync_status", + models.CharField( + choices=[("INCOMPLETED", "Incompleted"), ("COMPLETED", "Completed")], + default="INCOMPLETED", + max_length=15, + ), + ), + ( + "sync_message", + models.CharField( + blank=True, + default=None, + help_text="Contains any success or error messages depending on the status", + max_length=100, + null=True, + ), + ), + ("synqued_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("slack_team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="notify.slackteam")), + ("slack_user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="notify.slackuser")), ], ), ] diff --git a/breathecode/notify/migrations/0007_auto_20201111_2218.py b/breathecode/notify/migrations/0007_auto_20201111_2218.py index a4704c7d3..2dab722cc 100644 --- a/breathecode/notify/migrations/0007_auto_20201111_2218.py +++ b/breathecode/notify/migrations/0007_auto_20201111_2218.py @@ -7,18 +7,20 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0012_auto_20201110_0727'), - ('notify', '0006_auto_20201111_2031'), + ("authenticate", "0012_auto_20201110_0727"), + ("notify", "0006_auto_20201111_2031"), ] operations = [ migrations.AlterField( - model_name='slackteam', - name='credentials', - field=models.OneToOneField(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='authenticate.credentialsslack'), + model_name="slackteam", + name="credentials", + field=models.OneToOneField( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="authenticate.credentialsslack", + ), ), ] diff --git a/breathecode/notify/migrations/0008_remove_slackteam_credentials.py b/breathecode/notify/migrations/0008_remove_slackteam_credentials.py index 15d11e088..003013817 100644 --- a/breathecode/notify/migrations/0008_remove_slackteam_credentials.py +++ b/breathecode/notify/migrations/0008_remove_slackteam_credentials.py @@ -6,12 +6,12 @@ class Migration(migrations.Migration): dependencies = [ - ('notify', '0007_auto_20201111_2218'), + ("notify", "0007_auto_20201111_2218"), ] operations = [ migrations.RemoveField( - model_name='slackteam', - name='credentials', + model_name="slackteam", + name="credentials", ), ] diff --git a/breathecode/notify/migrations/0009_hook.py b/breathecode/notify/migrations/0009_hook.py index 201b473d5..339ac0973 100644 --- a/breathecode/notify/migrations/0009_hook.py +++ b/breathecode/notify/migrations/0009_hook.py @@ -9,26 +9,28 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('notify', '0008_remove_slackteam_credentials'), + ("notify", "0008_remove_slackteam_credentials"), ] operations = [ migrations.CreateModel( - name='Hook', + name="Hook", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created', models.DateTimeField(auto_now_add=True)), - ('updated', models.DateTimeField(auto_now=True)), - ('event', models.CharField(db_index=True, max_length=64, verbose_name='Event')), - ('target', models.URLField(max_length=255, verbose_name='Target URL')), - ('user', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='hooks', - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ("event", models.CharField(db_index=True, max_length=64, verbose_name="Event")), + ("target", models.URLField(max_length=255, verbose_name="Target URL")), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, related_name="hooks", to=settings.AUTH_USER_MODEL + ), + ), ], options={ - 'abstract': False, - 'swappable': 'HOOK_CUSTOM_MODEL', + "abstract": False, + "swappable": "HOOK_CUSTOM_MODEL", }, ), ] diff --git a/breathecode/notify/migrations/0010_auto_20220901_0323.py b/breathecode/notify/migrations/0010_auto_20220901_0323.py index 5a6c0b511..af5cdee68 100644 --- a/breathecode/notify/migrations/0010_auto_20220901_0323.py +++ b/breathecode/notify/migrations/0010_auto_20220901_0323.py @@ -6,46 +6,45 @@ class Migration(migrations.Migration): dependencies = [ - ('notify', '0009_hook'), + ("notify", "0009_hook"), ] operations = [ migrations.RenameField( - model_name='hook', - old_name='created', - new_name='created_at', + model_name="hook", + old_name="created", + new_name="created_at", ), migrations.RenameField( - model_name='hook', - old_name='updated', - new_name='updated_at', + model_name="hook", + old_name="updated", + new_name="updated_at", ), migrations.AddField( - model_name='hook', - name='last_call_at', + model_name="hook", + name="last_call_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='hook', - name='last_response_code', + model_name="hook", + name="last_response_code", field=models.IntegerField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='hook', - name='sample_data', - field=models.JSONField(blank=True, - default=None, - help_text='Use this as an example on what you will be receiving', - null=True), + model_name="hook", + name="sample_data", + field=models.JSONField( + blank=True, default=None, help_text="Use this as an example on what you will be receiving", null=True + ), ), migrations.AddField( - model_name='hook', - name='service_id', - field=models.CharField(blank=True, default=None, max_length=64, null=True, verbose_name='Service ID'), + model_name="hook", + name="service_id", + field=models.CharField(blank=True, default=None, max_length=64, null=True, verbose_name="Service ID"), ), migrations.AddField( - model_name='hook', - name='total_calls', + model_name="hook", + name="total_calls", field=models.IntegerField(default=0), ), ] diff --git a/breathecode/notify/migrations/0011_alter_hook_user.py b/breathecode/notify/migrations/0011_alter_hook_user.py index 3abba2f32..df352dd2b 100644 --- a/breathecode/notify/migrations/0011_alter_hook_user.py +++ b/breathecode/notify/migrations/0011_alter_hook_user.py @@ -8,16 +8,16 @@ class Migration(migrations.Migration): dependencies = [ - ('notify', '0010_auto_20220901_0323'), + ("notify", "0010_auto_20220901_0323"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.AlterField( - model_name='hook', - name='user', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='%(class)ss', - to=settings.AUTH_USER_MODEL), + model_name="hook", + name="user", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, related_name="%(class)ss", to=settings.AUTH_USER_MODEL + ), ), ] diff --git a/breathecode/notify/migrations/0012_hookerror.py b/breathecode/notify/migrations/0012_hookerror.py index 2a33a48da..5dcd0d0e4 100644 --- a/breathecode/notify/migrations/0012_hookerror.py +++ b/breathecode/notify/migrations/0012_hookerror.py @@ -7,19 +7,19 @@ class Migration(migrations.Migration): dependencies = [ - ('notify', '0011_alter_hook_user'), + ("notify", "0011_alter_hook_user"), ] operations = [ migrations.CreateModel( - name='HookError', + name="HookError", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('message', models.CharField(max_length=255)), - ('event', models.CharField(db_index=True, max_length=64, verbose_name='Event')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('hooks', models.ManyToManyField(blank=True, related_name='errors', to=settings.HOOK_CUSTOM_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("message", models.CharField(max_length=255)), + ("event", models.CharField(db_index=True, max_length=64, verbose_name="Event")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("hooks", models.ManyToManyField(blank=True, related_name="errors", to=settings.HOOK_CUSTOM_MODEL)), ], ), ] diff --git a/breathecode/notify/models.py b/breathecode/notify/models.py index 023d1a61d..cdebf3c70 100644 --- a/breathecode/notify/models.py +++ b/breathecode/notify/models.py @@ -8,10 +8,10 @@ from breathecode.admissions.models import Academy, Cohort -__all__ = ['UserProxy', 'CohortProxy', 'Device', 'SlackTeam', 'SlackUser', 'SlackUserTeam', 'SlackChannel', 'Hook'] -AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User') -if getattr(settings, 'HOOK_CUSTOM_MODEL', None) is None: - settings.HOOK_CUSTOM_MODEL = 'notify.Hook' +__all__ = ["UserProxy", "CohortProxy", "Device", "SlackTeam", "SlackUser", "SlackUserTeam", "SlackChannel", "Hook"] +AUTH_USER_MODEL = getattr(settings, "AUTH_USER_MODEL", "auth.User") +if getattr(settings, "HOOK_CUSTOM_MODEL", None) is None: + settings.HOOK_CUSTOM_MODEL = "notify.Hook" class UserProxy(User): @@ -36,11 +36,11 @@ def __str__(self): return self.user.registration_id -INCOMPLETED = 'INCOMPLETED' -COMPLETED = 'COMPLETED' +INCOMPLETED = "INCOMPLETED" +COMPLETED = "COMPLETED" SYNC_STATUS = ( - (INCOMPLETED, 'Incompleted'), - (COMPLETED, 'Completed'), + (INCOMPLETED, "Incompleted"), + (COMPLETED, "Completed"), ) @@ -55,22 +55,23 @@ class SlackTeam(models.Model): academy = models.OneToOneField(Academy, on_delete=models.CASCADE, blank=True) - sync_status = models.CharField(max_length=15, - choices=SYNC_STATUS, - default=INCOMPLETED, - help_text='Automatically set when synqued from slack') - sync_message = models.CharField(max_length=100, - blank=True, - null=True, - default=None, - help_text='Contains any success or error messages depending on the status') + sync_status = models.CharField( + max_length=15, choices=SYNC_STATUS, default=INCOMPLETED, help_text="Automatically set when synqued from slack" + ) + sync_message = models.CharField( + max_length=100, + blank=True, + null=True, + default=None, + help_text="Contains any success or error messages depending on the status", + ) synqued_at = models.DateTimeField(default=None, blank=True, null=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.slack_id})' + return f"{self.name} ({self.slack_id})" class SlackUser(models.Model): @@ -95,11 +96,13 @@ class SlackUserTeam(models.Model): slack_team = models.ForeignKey(SlackTeam, on_delete=models.CASCADE) sync_status = models.CharField(max_length=15, choices=SYNC_STATUS, default=INCOMPLETED) - sync_message = models.CharField(max_length=100, - blank=True, - null=True, - default=None, - help_text='Contains any success or error messages depending on the status') + sync_message = models.CharField( + max_length=100, + blank=True, + null=True, + default=None, + help_text="Contains any success or error messages depending on the status", + ) synqued_at = models.DateTimeField(default=None, blank=True, null=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) @@ -118,36 +121,38 @@ class SlackChannel(models.Model): purpose = models.CharField(max_length=500, blank=True, null=True) sync_status = models.CharField(max_length=15, choices=SYNC_STATUS, default=INCOMPLETED) - sync_message = models.CharField(max_length=100, - blank=True, - null=True, - default=None, - help_text='Contains any success or error messages depending on the status') + sync_message = models.CharField( + max_length=100, + blank=True, + null=True, + default=None, + help_text="Contains any success or error messages depending on the status", + ) synqued_at = models.DateTimeField(default=None, blank=True, null=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - name = self.name if self.name else 'Unknown' - return f'{name}({self.slack_id})' + name = self.name if self.name else "Unknown" + return f"{name}({self.slack_id})" class AbstractHook(models.Model): """ Stores a representation of a Hook. """ + created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) - user = models.ForeignKey(AUTH_USER_MODEL, related_name='%(class)ss', on_delete=models.CASCADE) - event = models.CharField('Event', max_length=64, db_index=True) - target = models.URLField('Target URL', max_length=255) - service_id = models.CharField('Service ID', max_length=64, null=True, default=None, blank=True) - sample_data = models.JSONField(null=True, - default=None, - blank=True, - help_text='Use this as an example on what you will be receiving') + user = models.ForeignKey(AUTH_USER_MODEL, related_name="%(class)ss", on_delete=models.CASCADE) + event = models.CharField("Event", max_length=64, db_index=True) + target = models.URLField("Target URL", max_length=255) + service_id = models.CharField("Service ID", max_length=64, null=True, default=None, blank=True) + sample_data = models.JSONField( + null=True, default=None, blank=True, help_text="Use this as an example on what you will be receiving" + ) total_calls = models.IntegerField(default=0) last_call_at = models.DateTimeField(null=True, blank=True, default=None) @@ -158,12 +163,13 @@ class Meta: def clean(self): from .utils.hook_manager import HookManager + """ Validation for events. """ if self.event not in HookManager.HOOK_EVENTS.keys(): - raise ValidationError('Invalid hook event {evt}.'.format(evt=self.event)) + raise ValidationError("Invalid hook event {evt}.".format(evt=self.event)) def dict(self): - return {'id': self.id, 'event': self.event, 'target': self.target} + return {"id": self.id, "event": self.event, "target": self.target} def serialize_hook(self, instance): """ @@ -172,13 +178,13 @@ def serialize_hook(self, instance): """ from .utils.hook_manager import HookManager - if getattr(instance, 'serialize_hook', None) and callable(instance.serialize_hook): + if getattr(instance, "serialize_hook", None) and callable(instance.serialize_hook): return instance.serialize_hook(hook=self) - if getattr(settings, 'HOOK_SERIALIZER', None): + if getattr(settings, "HOOK_SERIALIZER", None): serializer = HookManager.get_module(settings.HOOK_SERIALIZER) return serializer(instance, hook=self) # if no user defined serializers, fallback to the django builtin! - data = serializers.serialize('python', [instance])[0] + data = serializers.serialize("python", [instance])[0] for k, v in data.items(): if isinstance(v, OrderedDict): data[k] = dict(v) @@ -187,25 +193,25 @@ def serialize_hook(self, instance): data = dict(data) return { - 'hook': self.dict(), - 'data': data, + "hook": self.dict(), + "data": data, } def __unicode__(self): - return u'{} => {}'.format(self.event, self.target) + return "{} => {}".format(self.event, self.target) class Hook(AbstractHook): class Meta(AbstractHook.Meta): - swappable = 'HOOK_CUSTOM_MODEL' + swappable = "HOOK_CUSTOM_MODEL" class HookError(models.Model): """Hook Error.""" message = models.CharField(max_length=255) - event = models.CharField('Event', max_length=64, db_index=True) - hooks = models.ManyToManyField(Hook, related_name='errors', blank=True) + event = models.CharField("Event", max_length=64, db_index=True) + hooks = models.ManyToManyField(Hook, related_name="errors", blank=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) diff --git a/breathecode/notify/receivers.py b/breathecode/notify/receivers.py index 6e330ad96..a4ef3b1e7 100644 --- a/breathecode/notify/receivers.py +++ b/breathecode/notify/receivers.py @@ -35,17 +35,19 @@ @receiver(mentorship_session_status, sender=MentorshipSession) def post_mentoring_session_status(sender: Type[MentorshipSession], instance: MentorshipSession, **kwargs): - if instance.status == 'STARTED': - logger.debug('Mentorship has started, notifying the mentor') + if instance.status == "STARTED": + logger.debug("Mentorship has started, notifying the mentor") send_mentorship_starting_notification.delay(instance.id) model_label = get_model_label(instance) serializer = SessionHookSerializer(instance) - HookManager.process_model_event(instance, - model_label, - 'mentorship_session_status', - payload_override=serializer.data, - academy_override=instance.mentor.academy) + HookManager.process_model_event( + instance, + model_label, + "mentorship_session_status", + payload_override=serializer.data, + academy_override=instance.mentor.academy, + ) def get_model_label(instance): @@ -55,38 +57,38 @@ def get_model_label(instance): try: return opts.label except AttributeError: - return '.'.join([opts.app_label, opts.object_name]) + return ".".join([opts.app_label, opts.object_name]) # Django Rest Hooks Receivers -@receiver(post_save, dispatch_uid='instance-saved-hook') +@receiver(post_save, dispatch_uid="instance-saved-hook") def model_saved(sender, instance, created, raw, using, **kwargs): """ Automatically triggers "created" and "updated" actions. """ model_label = get_model_label(instance) - action = 'created' if created else 'updated' + action = "created" if created else "updated" HookManager.process_model_event(instance, model_label, action) -@receiver(post_delete, dispatch_uid='instance-deleted-hook') +@receiver(post_delete, dispatch_uid="instance-deleted-hook") def model_deleted(sender, instance, using, **kwargs): """ Automatically triggers "deleted" actions. """ model_label = get_model_label(instance) - HookManager.process_model_event(instance, model_label, 'deleted') + HookManager.process_model_event(instance, model_label, "deleted") @receiver(form_entry_won_or_lost, sender=FormEntry) def form_entry_updated(sender, instance, **kwargs): - logger.debug('Sending formentry to hook') + logger.debug("Sending formentry to hook") model_label = get_model_label(instance) serializer = FormEntryHookSerializer(instance) - HookManager.process_model_event(instance, model_label, 'won_or_lost', payload_override=serializer.data) + HookManager.process_model_event(instance, model_label, "won_or_lost", payload_override=serializer.data) @receiver(cohort_stage_updated, sender=Cohort) @@ -95,40 +97,44 @@ def new_cohort_stage_updated(sender, instance, **kwargs): model_label = get_model_label(instance) serializer = CohortHookSerializer(instance) - HookManager.process_model_event(instance, model_label, 'cohort_stage_updated', payload_override=serializer.data) + HookManager.process_model_event(instance, model_label, "cohort_stage_updated", payload_override=serializer.data) @receiver(new_form_entry_deal, sender=FormEntry) def new_form_entry_deal(sender, instance, **kwargs): - logger.debug('Sending formentry with new deal to hook') + logger.debug("Sending formentry with new deal to hook") model_label = get_model_label(instance) serializer = FormEntryHookSerializer(instance) - HookManager.process_model_event(instance, model_label, 'new_deal', payload_override=serializer.data) + HookManager.process_model_event(instance, model_label, "new_deal", payload_override=serializer.data) @receiver(new_event_attendee, sender=EventCheckin) def handle_new_event_attendee(sender, instance, **kwargs): - logger.debug('Sending new event attendance') + logger.debug("Sending new event attendance") model_label = get_model_label(instance) serializer = EventHookCheckinSerializer(instance) - HookManager.process_model_event(instance, - model_label, - 'new_event_attendee', - payload_override=serializer.data, - academy_override=instance.event.academy) + HookManager.process_model_event( + instance, + model_label, + "new_event_attendee", + payload_override=serializer.data, + academy_override=instance.event.academy, + ) @receiver(new_event_order, sender=EventCheckin) def handle_new_event_order(sender, instance, **kwargs): - logger.debug('Sending new event order') + logger.debug("Sending new event order") model_label = get_model_label(instance) serializer = EventHookCheckinSerializer(instance) - HookManager.process_model_event(instance, - model_label, - 'new_event_order', - payload_override=serializer.data, - academy_override=instance.event.academy) + HookManager.process_model_event( + instance, + model_label, + "new_event_order", + payload_override=serializer.data, + academy_override=instance.event.academy, + ) @receiver(event_status_updated, sender=Event) @@ -136,58 +142,60 @@ def handle_event_status_updated(sender, instance, **kwargs): # logger.debug('Sending event_status_updated hook with new event status') model_label = get_model_label(instance) serializer = EventJoinSmallSerializer(instance) - HookManager.process_model_event(instance, model_label, 'event_status_updated', payload_override=serializer.data) + HookManager.process_model_event(instance, model_label, "event_status_updated", payload_override=serializer.data) @receiver(asset_status_updated, sender=Asset) def handle_asset_status_updated(sender, instance, **kwargs): - logger.debug('Sending asset to hook with new status') + logger.debug("Sending asset to hook with new status") model_label = get_model_label(instance) serializer = AssetHookSerializer(instance) - HookManager.process_model_event(instance, model_label, 'asset_status_updated', payload_override=serializer.data) + HookManager.process_model_event(instance, model_label, "asset_status_updated", payload_override=serializer.data) @receiver(invite_status_updated, sender=UserInvite) def handle_invite_accepted(sender, instance, **kwargs): model_label = get_model_label(instance) - HookManager.process_model_event(instance, model_label, 'invite_status_updated') + HookManager.process_model_event(instance, model_label, "invite_status_updated") @receiver(student_edu_status_updated, sender=CohortUser) def edu_status_updated(sender, instance, **kwargs): - logger.debug('Sending student to hook with new edu status') + logger.debug("Sending student to hook with new edu status") academy = instance.cohort.academy if instance.cohort is not None else None model_label = get_model_label(instance) serializer = CohortUserHookSerializer(instance) - HookManager.process_model_event(instance, - model_label, - 'edu_status_updated', - payload_override=serializer.data, - academy_override=academy) + HookManager.process_model_event( + instance, model_label, "edu_status_updated", payload_override=serializer.data, academy_override=academy + ) @receiver(planfinancing_created, sender=PlanFinancing) def new_planfinancing_created(sender, instance, **kwargs): - logger.debug('Sending new PlanFinancing to hook') + logger.debug("Sending new PlanFinancing to hook") model_label = get_model_label(instance) serializer = GetPlanFinancingSerializer(instance) - HookManager.process_model_event(instance, - model_label, - 'planfinancing_created', - payload_override=serializer.data, - academy_override=instance.academy) + HookManager.process_model_event( + instance, + model_label, + "planfinancing_created", + payload_override=serializer.data, + academy_override=instance.academy, + ) @receiver(subscription_created, sender=Subscription) def new_subscription_created(sender, instance, **kwargs): - logger.debug('Sending new Subscription to hook') + logger.debug("Sending new Subscription to hook") model_label = get_model_label(instance) serializer = GetSubscriptionHookSerializer(instance) - HookManager.process_model_event(instance, - model_label, - 'subscription_created', - payload_override=serializer.data, - academy_override=instance.academy) + HookManager.process_model_event( + instance, + model_label, + "subscription_created", + payload_override=serializer.data, + academy_override=instance.academy, + ) @receiver(m2m_changed, sender=HookError.hooks.through) diff --git a/breathecode/notify/serializers.py b/breathecode/notify/serializers.py index 2b55203e8..c4ed0dd45 100644 --- a/breathecode/notify/serializers.py +++ b/breathecode/notify/serializers.py @@ -24,21 +24,21 @@ class HookSerializer(serializers.ModelSerializer): class Meta: model = Hook - read_only_fields = ('user', ) - exclude = ['sample_data'] + read_only_fields = ("user",) + exclude = ["sample_data"] def validate(self, data): - if data['event'] not in settings.HOOK_EVENTS: - err_msg = 'Unexpected event {}'.format(data['event']) - raise ValidationException(err_msg, slug='invalid-event') + if data["event"] not in settings.HOOK_EVENTS: + err_msg = "Unexpected event {}".format(data["event"]) + raise ValidationException(err_msg, slug="invalid-event") # superadmins can subscribe to any hook without needed an academy token - if not self.context['request'].user.is_superuser: - academy = Academy.objects.filter(slug=self.context['request'].user.username).first() + if not self.context["request"].user.is_superuser: + academy = Academy.objects.filter(slug=self.context["request"].user.username).first() if academy is None: - raise ValidationException('No valid academy token found', slug='invalid-academy-token') + raise ValidationException("No valid academy token found", slug="invalid-academy-token") - data['user'] = self.context['request'].user + data["user"] = self.context["request"].user return super().validate(data) diff --git a/breathecode/notify/tasks.py b/breathecode/notify/tasks.py index 304342de5..4f843dc67 100644 --- a/breathecode/notify/tasks.py +++ b/breathecode/notify/tasks.py @@ -21,7 +21,7 @@ def get_api_url(): - return os.getenv('API_URL', '') + return os.getenv("API_URL", "") logger = logging.getLogger(__name__) @@ -29,74 +29,75 @@ def get_api_url(): @shared_task(priority=TaskPriority.REALTIME.value) def async_slack_team_channel(team_id): - logger.debug('Starting async_slack_team_channel') + logger.debug("Starting async_slack_team_channel") return sync_slack_team_channel(team_id) @shared_task(priority=TaskPriority.REALTIME.value) def send_mentorship_starting_notification(session_id): - logger.debug('Starting send_mentorship_starting_notification') + logger.debug("Starting send_mentorship_starting_notification") session = MentorshipSession.objects.filter(id=session_id, mentee__isnull=False).first() if not session: - logger.error(f'No mentorship session found for {session_id}') + logger.error(f"No mentorship session found for {session_id}") return False - token, created = Token.get_or_create(session.mentor.user, token_type='temporal', hours_length=2) + token, created = Token.get_or_create(session.mentor.user, token_type="temporal", hours_length=2) actions.send_email_message( - 'message', - session.mentor.user.email, { - 'SUBJECT': 'Mentorship session starting', - 'MESSAGE': - f'Mentee {session.mentee.first_name} {session.mentee.last_name} is joining your session, please come back to this email when the session is over to marke it as completed', - 'BUTTON': 'Finish and review this session', - 'LINK': f'{get_api_url()}/mentor/session/{session.id}?token={token.key}', + "message", + session.mentor.user.email, + { + "SUBJECT": "Mentorship session starting", + "MESSAGE": f"Mentee {session.mentee.first_name} {session.mentee.last_name} is joining your session, please come back to this email when the session is over to marke it as completed", + "BUTTON": "Finish and review this session", + "LINK": f"{get_api_url()}/mentor/session/{session.id}?token={token.key}", }, - academy=session.mentor.academy) + academy=session.mentor.academy, + ) return True @shared_task(priority=TaskPriority.REALTIME.value) def async_slack_team_users(team_id): - logger.debug('Starting async_slack_team_users') + logger.debug("Starting async_slack_team_users") return sync_slack_team_users(team_id) @shared_task(priority=TaskPriority.REALTIME.value) def async_slack_action(post_data): - logger.debug('Starting async_slack_action') + logger.debug("Starting async_slack_action") try: client = Slack() success = client.execute_action(context=post_data) if success: - logger.debug('Successfully process slack action') + logger.debug("Successfully process slack action") return True else: - logger.error('Error processing slack action') + logger.error("Error processing slack action") return False except Exception: - logger.exception('Error processing slack action') + logger.exception("Error processing slack action") return False @shared_task(priority=TaskPriority.REALTIME.value) def async_slack_command(post_data): - logger.debug('Starting async_slack_command') + logger.debug("Starting async_slack_command") try: client = Slack() success = client.execute_command(context=post_data) if success: - logger.debug('Successfully process slack command') + logger.debug("Successfully process slack command") return True else: - logger.error('Error processing slack command') + logger.error("Error processing slack command") return False except Exception: - logger.exception('Error processing slack command') + logger.exception("Error processing slack command") return False @@ -120,7 +121,7 @@ def parse_payload(payload: dict): for key in payload.keys(): # TypeError("string indices must be integers, not 'str'") if isinstance(payload[key], datetime): - payload[key] = payload[key].isoformat().replace('+00:00', 'Z') + payload[key] = payload[key].isoformat().replace("+00:00", "Z") elif isinstance(payload[key], Decimal): payload[key] = str(payload[key]) @@ -137,7 +138,7 @@ def parse_payload(payload: dict): return payload - logger.info('Starting async_deliver_hook') + logger.info("Starting async_deliver_hook") has_response = False @@ -153,10 +154,9 @@ def parse_payload(payload: dict): payload = l encoded_payload = json.dumps(payload, cls=DjangoJSONEncoder) - response = requests.post(url=target, - data=encoded_payload, - headers={'Content-Type': 'application/json'}, - timeout=2) + response = requests.post( + url=target, data=encoded_payload, headers={"Content-Type": "application/json"}, timeout=2 + ) has_response = True if hook_id: @@ -171,8 +171,8 @@ def parse_payload(payload: dict): if not isinstance(data, list): data = [] - if 'data' in payload and isinstance(payload['data'], dict): - data.append(payload['data']) + if "data" in payload and isinstance(payload["data"], dict): + data.append(payload["data"]) elif isinstance(payload, dict): data.append(json.loads(encoded_payload)) @@ -188,6 +188,6 @@ def parse_payload(payload: dict): except Exception as e: logger.error(payload) if has_response: - raise AbortTask(f'Error while trying to save hook call with status code {response.status_code}. {payload}') + raise AbortTask(f"Error while trying to save hook call with status code {response.status_code}. {payload}") raise e diff --git a/breathecode/notify/tests/mixins/__init__.py b/breathecode/notify/tests/mixins/__init__.py index 100c4c2a9..491e1482d 100644 --- a/breathecode/notify/tests/mixins/__init__.py +++ b/breathecode/notify/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Notify mixins """ + from .notify_test_case import NotifyTestCase # noqa: F401 diff --git a/breathecode/notify/tests/mixins/notify_test_case.py b/breathecode/notify/tests/mixins/notify_test_case.py index 8554cf567..695f814bb 100644 --- a/breathecode/notify/tests/mixins/notify_test_case.py +++ b/breathecode/notify/tests/mixins/notify_test_case.py @@ -3,12 +3,29 @@ """ from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, Sha256Mixin, BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + Sha256Mixin, + BreathecodeMixin, +) -class NotifyTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, HeadersMixin, - DatetimeMixin, Sha256Mixin, BreathecodeMixin): +class NotifyTestCase( + APITestCase, + GenerateModelsMixin, + CacheMixin, + TokenMixin, + GenerateQueriesMixin, + HeadersMixin, + DatetimeMixin, + Sha256Mixin, + BreathecodeMixin, +): """FeedbackTestCase with auth methods""" def tearDown(self): diff --git a/breathecode/notify/tests/tasks/tests_async_deliver_hook.py b/breathecode/notify/tests/tasks/tests_async_deliver_hook.py index aaf2890cd..6b3d0cdba 100644 --- a/breathecode/notify/tests/tasks/tests_async_deliver_hook.py +++ b/breathecode/notify/tests/tasks/tests_async_deliver_hook.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import patch, MagicMock, call from django.urls.base import reverse_lazy @@ -29,17 +30,17 @@ def test_no_hook_id(self, fake, enable_hook_manager): fake.slug(): fake.slug(), fake.slug(): fake.slug(), fake.slug(): fake.slug(), - 'latitude': Decimal('25.758059600000000'), - 'longitude': Decimal('-80.377022000000000'), - 'date': timezone.now(), + "latitude": Decimal("25.758059600000000"), + "longitude": Decimal("-80.377022000000000"), + "date": timezone.now(), } url = fake.url() - with patch('requests.post', apply_requests_post_mock([(201, url, {})])): + with patch("requests.post", apply_requests_post_mock([(201, url, {})])): res = async_deliver_hook(url, data) assert res == None - assert self.bc.database.list_of('notify.Hook') == [] + assert self.bc.database.list_of("notify.Hook") == [] def test_hook_not_found(self, fake, enable_hook_manager): enable_hook_manager() @@ -48,19 +49,19 @@ def test_hook_not_found(self, fake, enable_hook_manager): fake.slug(): fake.slug(), fake.slug(): fake.slug(), fake.slug(): fake.slug(), - 'latitude': Decimal('25.758059600000000'), - 'longitude': Decimal('-80.377022000000000'), - 'date': timezone.now(), + "latitude": Decimal("25.758059600000000"), + "longitude": Decimal("-80.377022000000000"), + "date": timezone.now(), } url = fake.url() - with patch('requests.post', apply_requests_post_mock([(201, url, {})])): - with self.assertRaisesMessage(Hook.DoesNotExist, 'Hook matching query does not exist.'): + with patch("requests.post", apply_requests_post_mock([(201, url, {})])): + with self.assertRaisesMessage(Hook.DoesNotExist, "Hook matching query does not exist."): async_deliver_hook(url, data, hook_id=1) - assert self.bc.database.list_of('notify.Hook') == [] + assert self.bc.database.list_of("notify.Hook") == [] - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_with_hook(self, fake, enable_hook_manager): enable_hook_manager() @@ -68,29 +69,29 @@ def test_with_hook(self, fake, enable_hook_manager): fake.slug(): fake.slug(), fake.slug(): fake.slug(), fake.slug(): fake.slug(), - 'latitude': Decimal('25.758059600000000'), - 'longitude': Decimal('-80.377022000000000'), - 'date': timezone.now(), + "latitude": Decimal("25.758059600000000"), + "longitude": Decimal("-80.377022000000000"), + "date": timezone.now(), } model = self.bc.database.create(hook=1) url = fake.url() - with patch('requests.post', apply_requests_post_mock([(201, url, {})])): + with patch("requests.post", apply_requests_post_mock([(201, url, {})])): res = async_deliver_hook(url, data, hook_id=1) assert res == None - assert self.bc.database.list_of('notify.Hook') == [ + assert self.bc.database.list_of("notify.Hook") == [ { **self.bc.format.to_dict(model.hook), - 'total_calls': model.hook.total_calls + 1, - 'last_call_at': UTC_NOW, - 'last_response_code': 201, - 'sample_data': [ + "total_calls": model.hook.total_calls + 1, + "last_call_at": UTC_NOW, + "last_response_code": 201, + "sample_data": [ { **data, - 'latitude': str(data['latitude']), - 'longitude': str(data['longitude']), + "latitude": str(data["latitude"]), + "longitude": str(data["longitude"]), }, ], }, @@ -103,16 +104,16 @@ def test_with_hook__returns_410(self, fake, enable_hook_manager): fake.slug(): fake.slug(), fake.slug(): fake.slug(), fake.slug(): fake.slug(), - 'latitude': Decimal('25.758059600000000'), - 'longitude': Decimal('-80.377022000000000'), - 'date': timezone.now(), + "latitude": Decimal("25.758059600000000"), + "longitude": Decimal("-80.377022000000000"), + "date": timezone.now(), } model = self.bc.database.create(hook=1) url = fake.url() - with patch('requests.post', apply_requests_post_mock([(410, url, {})])): + with patch("requests.post", apply_requests_post_mock([(410, url, {})])): res = async_deliver_hook(url, data, hook_id=1) assert res == None - assert self.bc.database.list_of('notify.Hook') == [] + assert self.bc.database.list_of("notify.Hook") == [] diff --git a/breathecode/notify/tests/urls/tests_slack_command.py b/breathecode/notify/tests/urls/tests_slack_command.py index 7a3647223..b95350bac 100644 --- a/breathecode/notify/tests/urls/tests_slack_command.py +++ b/breathecode/notify/tests/urls/tests_slack_command.py @@ -1,6 +1,7 @@ """ Test /answer """ + import re, urllib from unittest.mock import patch, MagicMock, call from django.urls.base import reverse_lazy @@ -13,32 +14,32 @@ class NotifyTestSuite(NotifyTestCase): """Test /answer""" - @patch('breathecode.services.slack.client.Slack.__init__', MagicMock(return_value=None)) - @patch('breathecode.services.slack.client.Slack.execute_command', MagicMock(return_value='potato')) + @patch("breathecode.services.slack.client.Slack.__init__", MagicMock(return_value=None)) + @patch("breathecode.services.slack.client.Slack.execute_command", MagicMock(return_value="potato")) def test_slack_command___return_correct_value(self): """Testing when any other word than the implemented command is entered.""" - url = reverse_lazy('notify:slack_command') - data = {'text': ''} - response = self.client.post(url, data, format='json') + url = reverse_lazy("notify:slack_command") + data = {"text": ""} + response = self.client.post(url, data, format="json") json = response.json() - expected = 'Processing...' + expected = "Processing..." self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(Slack.__init__.call_args_list, [call()]) self.assertEqual(Slack.execute_command.call_args_list, [call(context=data)]) - @patch('breathecode.services.slack.client.Slack.__init__', MagicMock(return_value=None)) - @patch('breathecode.services.slack.client.Slack.execute_command', MagicMock(side_effect=Exception('pokemon'))) + @patch("breathecode.services.slack.client.Slack.__init__", MagicMock(return_value=None)) + @patch("breathecode.services.slack.client.Slack.execute_command", MagicMock(side_effect=Exception("pokemon"))) def test_slack_command___raise_exception(self): """Testing when exception is prompted.""" - url = reverse_lazy('notify:slack_command') - data = {'text': ''} - response = self.client.post(url, data, format='json') + url = reverse_lazy("notify:slack_command") + data = {"text": ""} + response = self.client.post(url, data, format="json") json = response.json() - expected = 'Processing...' + expected = "Processing..." self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/breathecode/notify/tests/utils/tests_hook_manager.py b/breathecode/notify/tests/utils/tests_hook_manager.py index 899348a5e..5441bdef4 100644 --- a/breathecode/notify/tests/utils/tests_hook_manager.py +++ b/breathecode/notify/tests/utils/tests_hook_manager.py @@ -11,7 +11,7 @@ @pytest.fixture(autouse=True) def mocks(db, monkeypatch): m1 = MagicMock() - monkeypatch.setattr(tasks.async_deliver_hook, 'delay', m1) + monkeypatch.setattr(tasks.async_deliver_hook, "delay", m1) yield m1 @@ -22,59 +22,53 @@ def get_model_label(instance): try: return opts.label except AttributeError: - return '.'.join([opts.app_label, opts.object_name]) + return ".".join([opts.app_label, opts.object_name]) def test_transform_timedeltas(bc: Breathecode, enable_signals, enable_hook_manager, mocks): enable_hook_manager() mock = mocks - model = bc.database.create(hook={'event': 'survey.created'}, - user={ - 'username': 'test', - 'is_superuser': True - }, - academy={ - 'slug': 'test', - 'available_as_saas': True, - }, - survey=1) + model = bc.database.create( + hook={"event": "survey.created"}, + user={"username": "test", "is_superuser": True}, + academy={ + "slug": "test", + "available_as_saas": True, + }, + survey=1, + ) - x = {'feedback.Survey': {'created': ('survey.created', False)}} - HookManager.HOOK_EVENTS = {'survey.created': 'feedback.Survey.created+'} + x = {"feedback.Survey": {"created": ("survey.created", False)}} + HookManager.HOOK_EVENTS = {"survey.created": "feedback.Survey.created+"} # HookManager._HOOK_EVENT_ACTIONS_CONFIG = x HookManager._HOOK_EVENT_ACTIONS_CONFIG = None HookManager.process_model_event( model.survey, get_model_label(model.survey.__class__), - 'created', + "created", payload_override={ - 'delta': timedelta(days=3), - 'children': [ - { - 'delta': timedelta(days=4) - }, - { - 'delta': timedelta(days=5) - }, - ] + "delta": timedelta(days=3), + "children": [ + {"delta": timedelta(days=4)}, + {"delta": timedelta(days=5)}, + ], }, ) - assert bc.database.list_of('feedback.Survey') == [bc.format.to_dict(model.survey)] + assert bc.database.list_of("feedback.Survey") == [bc.format.to_dict(model.survey)] assert mock.call_args_list == [ - call(model.hook.target, { - 'delta': 259200.0, - 'children': [ - { - 'delta': 345600.0 - }, - { - 'delta': 432000.0 - }, - ] - }, - hook_id=1), + call( + model.hook.target, + { + "delta": 259200.0, + "children": [ + {"delta": 345600.0}, + {"delta": 432000.0}, + ], + }, + hook_id=1, + ), ] diff --git a/breathecode/notify/urls.py b/breathecode/notify/urls.py index 1247119a5..e8b8752db 100644 --- a/breathecode/notify/urls.py +++ b/breathecode/notify/urls.py @@ -1,16 +1,23 @@ from django.urls import path -from .views import (test_email, preview_template, process_interaction, slack_command, preview_slack_template, HooksView, - get_sample_data) +from .views import ( + test_email, + preview_template, + process_interaction, + slack_command, + preview_slack_template, + HooksView, + get_sample_data, +) -app_name = 'notify' +app_name = "notify" urlpatterns = [ - path('preview/<slug>', preview_template), - path('preview/slack/<slug>', preview_slack_template), - path('test/email/<email>', test_email), - path('slack/interaction', process_interaction), - path('hook/subscribe', HooksView.as_view()), - path('hook/subscribe/<int:hook_id>', HooksView.as_view()), - path('hook/sample', get_sample_data), - path('hook/<int:hook_id>/sample', get_sample_data), - path('slack/command', slack_command, name='slack_command'), + path("preview/<slug>", preview_template), + path("preview/slack/<slug>", preview_slack_template), + path("test/email/<email>", test_email), + path("slack/interaction", process_interaction), + path("hook/subscribe", HooksView.as_view()), + path("hook/subscribe/<int:hook_id>", HooksView.as_view()), + path("hook/sample", get_sample_data), + path("hook/<int:hook_id>/sample", get_sample_data), + path("slack/command", slack_command, name="slack_command"), ] diff --git a/breathecode/notify/utils/hook_manager.py b/breathecode/notify/utils/hook_manager.py index 528092dbe..a47098a9a 100644 --- a/breathecode/notify/utils/hook_manager.py +++ b/breathecode/notify/utils/hook_manager.py @@ -17,9 +17,9 @@ class HookManagerClass(object): HOOK_EVENTS = {} def __init__(self): - self.HOOK_EVENTS = getattr(settings, 'HOOK_EVENTS', None) + self.HOOK_EVENTS = getattr(settings, "HOOK_EVENTS", None) if self.HOOK_EVENTS is None: - raise Exception('You need to define settings.HOOK_EVENTS!') + raise Exception("You need to define settings.HOOK_EVENTS!") def get_event_actions_config(self): if self._HOOK_EVENT_ACTIONS_CONFIG is None: @@ -27,8 +27,8 @@ def get_event_actions_config(self): for event_name, auto in self.HOOK_EVENTS.items(): if not auto: continue - model_label, action = auto.rsplit('.', 1) - action_parts = action.rsplit('+', 1) + model_label, action = auto.rsplit(".", 1) + action_parts = action.rsplit("+", 1) action = action_parts[0] ignore_user_override = False if len(action_parts) == 2: @@ -36,8 +36,10 @@ def get_event_actions_config(self): model_config = self._HOOK_EVENT_ACTIONS_CONFIG.setdefault(model_label, {}) if action in model_config: - raise ImproperlyConfigured('settings.HOOK_EVENTS have a dublicate {action} for model ' - '{model_label}'.format(action=action, model_label=model_label)) + raise ImproperlyConfigured( + "settings.HOOK_EVENTS have a dublicate {action} for model " + "{model_label}".format(action=action, model_label=model_label) + ) model_config[action] = ( event_name, ignore_user_override, @@ -54,7 +56,7 @@ def get_module(self, path): from importlib import import_module try: - mod_name, func_name = path.rsplit('.', 1) + mod_name, func_name = path.rsplit(".", 1) mod = import_module(mod_name) except ImportError as e: raise ImportError('Error importing alert function {0}: "{1}"'.format(mod_name, e)) @@ -71,36 +73,37 @@ def get_hook_model(self): Returns the Custom Hook model if defined in settings, otherwise the default Hook model. """ - model_label = getattr(settings, 'HOOK_CUSTOM_MODEL', None) + model_label = getattr(settings, "HOOK_CUSTOM_MODEL", None) if django_apps: - model_label = (model_label or 'notify.Hook').replace('.models.', '.') + model_label = (model_label or "notify.Hook").replace(".models.", ".") try: return django_apps.get_model(model_label, require_ready=False) except ValueError: raise ImproperlyConfigured( - f"Invalid model {model_label}, HOOK_CUSTOM_MODEL must be of the form 'app_label.model_name'") + f"Invalid model {model_label}, HOOK_CUSTOM_MODEL must be of the form 'app_label.model_name'" + ) except LookupError: - raise ImproperlyConfigured("HOOK_CUSTOM_MODEL refers to model '%s' that has not been installed" % - model_label) + raise ImproperlyConfigured( + "HOOK_CUSTOM_MODEL refers to model '%s' that has not been installed" % model_label + ) else: - if model_label not in (None, 'notify.Hook'): + if model_label not in (None, "notify.Hook"): try: self.get_module(settings.HOOK_CUSTOM_MODEL) except ImportError: - raise ImproperlyConfigured("HOOK_CUSTOM_MODEL refers to model '%s' that cannot be imported" % - model_label) - - def find_and_fire_hook(self, - event_name, - instance, - user_override=None, - payload_override=None, - academy_override=None): + raise ImproperlyConfigured( + "HOOK_CUSTOM_MODEL refers to model '%s' that cannot be imported" % model_label + ) + + def find_and_fire_hook( + self, event_name, instance, user_override=None, payload_override=None, academy_override=None + ): """ Look up Hooks that apply """ try: from django.contrib.auth import get_user_model + user_cls = get_user_model() except ImportError: from django.contrib.auth.models import User as user_cls # noqa: N813 @@ -108,20 +111,21 @@ def find_and_fire_hook(self, if event_name not in self.HOOK_EVENTS.keys(): raise Exception('"{}" does not exist in `settings.HOOK_EVENTS`.'.format(event_name)) - filters = {'event': event_name} + filters = {"event": event_name} # only process hooks from instances from the same academy if academy_override is not None: - superadmins = user_cls.objects.filter(is_superuser=True).values_list('username', flat=True) - filters['user__username__in'] = [academy_override.slug] + list(superadmins) - elif hasattr(instance, 'academy') and instance.academy is not None: - superadmins = user_cls.objects.filter(is_superuser=True).values_list('username', flat=True) - filters['user__username__in'] = [instance.academy.slug] + list(superadmins) + superadmins = user_cls.objects.filter(is_superuser=True).values_list("username", flat=True) + filters["user__username__in"] = [academy_override.slug] + list(superadmins) + elif hasattr(instance, "academy") and instance.academy is not None: + superadmins = user_cls.objects.filter(is_superuser=True).values_list("username", flat=True) + filters["user__username__in"] = [instance.academy.slug] + list(superadmins) else: logger.debug( - f'Only admin will receive hook notification for {event_name} because entity has not academy property') + f"Only admin will receive hook notification for {event_name} because entity has not academy property" + ) # Only the admin can retrieve events from objects that don't belong to any academy - filters['user__is_superuser'] = True + filters["user__is_superuser"] = True # Ignore the user if the user_override is False # if user_override is not False: @@ -161,17 +165,18 @@ def process_model_event( """ if event_name is False and (model is False or action is False): - raise TypeError('process_model_event() requires either `event_name` argument or ' - 'both `model` and `action` arguments.') + raise TypeError( + "process_model_event() requires either `event_name` argument or " "both `model` and `action` arguments." + ) if event_name: if trust_event_name: pass elif event_name in self.HOOK_EVENTS: auto = self.HOOK_EVENTS[event_name] if auto: - allowed_model, allowed_action = auto.rsplit('.', 1) + allowed_model, allowed_action = auto.rsplit(".", 1) - allowed_action_parts = allowed_action.rsplit('+', 1) + allowed_action_parts = allowed_action.rsplit("+", 1) allowed_action = allowed_action_parts[0] model = model or allowed_model @@ -189,12 +194,14 @@ def process_model_event( user_override = False if event_name: - logger.debug(f'process_model_event for event_name={event_name}, user_override={user_override}') - self.find_and_fire_hook(event_name, - instance, - user_override=user_override, - payload_override=payload_override, - academy_override=academy_override) + logger.debug(f"process_model_event for event_name={event_name}, user_override={user_override}") + self.find_and_fire_hook( + event_name, + instance, + user_override=user_override, + payload_override=payload_override, + academy_override=academy_override, + ) def serialize(self, payload: dict | list | tuple) -> dict | list | tuple: @@ -236,7 +243,7 @@ def deliver_hook(self, hook, instance, payload_override=None, academy_override=N if callable(payload): payload = payload(hook, instance) - logger.debug(f'Calling delayed task deliver_hook for hook {hook.id}') + logger.debug(f"Calling delayed task deliver_hook for hook {hook.id}") self.serialize(payload) diff --git a/breathecode/notify/views.py b/breathecode/notify/views.py index 0c6e7f4b5..84bd135de 100644 --- a/breathecode/notify/views.py +++ b/breathecode/notify/views.py @@ -19,21 +19,21 @@ logger = logging.getLogger(__name__) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def preview_template(request, slug): - template = get_template_content(slug, request.GET, formats=['html']) - return HttpResponse(template['html']) + template = get_template_content(slug, request.GET, formats=["html"]) + return HttpResponse(template["html"]) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def preview_slack_template(request, slug): - template = get_template_content(slug, request.GET, ['slack']) - return HttpResponse(template['slack']) + template = get_template_content(slug, request.GET, ["slack"]) + return HttpResponse(template["slack"]) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def test_email(request, email): # tags = sync_user_issues() @@ -41,39 +41,40 @@ def test_email(request, email): pass -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def process_interaction(request): try: async_slack_action.delay(request.POST) - logger.debug('Slack action enqueued') - return Response('Processing...', status=status.HTTP_200_OK) + logger.debug("Slack action enqueued") + return Response("Processing...", status=status.HTTP_200_OK) except Exception as e: - logger.exception('Error processing slack action') + logger.exception("Error processing slack action") return Response(str(e), status=status.HTTP_200_OK) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def slack_command(request): try: async_slack_command.delay(request.data) - logger.debug('Slack command enqueued') - return Response('Processing...', status=status.HTTP_200_OK) + logger.debug("Slack command enqueued") + return Response("Processing...", status=status.HTTP_200_OK) except Exception as e: - logger.exception('Error processing slack command') + logger.exception("Error processing slack command") return Response(str(e), status=status.HTTP_200_OK) -@api_view(['GET']) +@api_view(["GET"]) def get_sample_data(request, hook_id=None): if hook_id is not None: hook = Hook.objects.filter(user__id=request.user.id, id=hook_id).first() if hook is None: - return Response({'details': 'No hook found with this filters for sample data'}, - status=status.HTTP_400_BAD_REQUEST) + return Response( + {"details": "No hook found with this filters for sample data"}, status=status.HTTP_400_BAD_REQUEST + ) if hook.sample_data is None: return Response([]) @@ -82,29 +83,32 @@ def get_sample_data(request, hook_id=None): items = Hook.objects.filter(user__id=request.user.id) filtered = False - event = request.GET.get('event', None) + event = request.GET.get("event", None) if event is not None: filtered = True - items = items.filter(event__in=event.split(',')) + items = items.filter(event__in=event.split(",")) - service_id = request.GET.get('service_id', None) + service_id = request.GET.get("service_id", None) if service_id is not None: filtered = True - items = items.filter(service_id__in=service_id.split(',')) + items = items.filter(service_id__in=service_id.split(",")) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(Q(event__icontains=like) | Q(target__icontains=like)) if not filtered: - return Response({'details': 'Please specify hook id or filters get have an idea on what sample data you want'}, - status=status.HTTP_400_BAD_REQUEST) + return Response( + {"details": "Please specify hook id or filters get have an idea on what sample data you want"}, + status=status.HTTP_400_BAD_REQUEST, + ) single = items.first() if single is None: - return Response({'details': 'No hook found with this filters for sample data'}, - status=status.HTTP_400_BAD_REQUEST) + return Response( + {"details": "No hook found with this filters for sample data"}, status=status.HTTP_400_BAD_REQUEST + ) return Response(single.sample_data) @@ -114,22 +118,22 @@ class HooksView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) + extensions = APIViewExtensions(sort="-created_at", paginate=True) def get(self, request): handler = self.extensions(request) items = Hook.objects.filter(user__id=request.user.id) - event = request.GET.get('event', None) + event = request.GET.get("event", None) if event is not None: - items = items.filter(event__in=event.split(',')) + items = items.filter(event__in=event.split(",")) - service_id = request.GET.get('service_id', None) + service_id = request.GET.get("service_id", None) if service_id is not None: - items = items.filter(service_id__in=service_id.split(',')) + items = items.filter(service_id__in=service_id.split(",")) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter(Q(event__icontains=like) | Q(target__icontains=like)) @@ -140,7 +144,7 @@ def get(self, request): def post(self, request): - serializer = HookSerializer(data=request.data, context={'request': request}) + serializer = HookSerializer(data=request.data, context={"request": request}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -150,11 +154,15 @@ def put(self, request, hook_id): hook = Hook.objects.filter(id=hook_id, user__id=request.user.id).first() if hook is None: - raise ValidationException(f'Hook {hook_id} not found for this user', slug='hook-not-found') - - serializer = HookSerializer(instance=hook, data=request.data, context={ - 'request': request, - }) + raise ValidationException(f"Hook {hook_id} not found for this user", slug="hook-not-found") + + serializer = HookSerializer( + instance=hook, + data=request.data, + context={ + "request": request, + }, + ) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -168,21 +176,21 @@ def delete(self, request, hook_id=None): items = items.filter(id=hook_id) filtered = True else: - event = request.GET.get('event', None) + event = request.GET.get("event", None) if event is not None: filtered = True - items = items.filter(event__in=event.split(',')) + items = items.filter(event__in=event.split(",")) - service_id = request.GET.get('service_id', None) + service_id = request.GET.get("service_id", None) if service_id is not None: filtered = True - items = items.filter(service_id__in=service_id.split(',')) + items = items.filter(service_id__in=service_id.split(",")) if not filtered: - raise ValidationException('Please include some filter in the URL') + raise ValidationException("Please include some filter in the URL") total = items.count() for item in items: item.delete() - return Response({'details': f'Unsubscribed from {total} hooks'}, status=status.HTTP_200_OK) + return Response({"details": f"Unsubscribed from {total} hooks"}, status=status.HTTP_200_OK) diff --git a/breathecode/payments/actions.py b/breathecode/payments/actions.py index e313dec70..ec42423ce 100644 --- a/breathecode/payments/actions.py +++ b/breathecode/payments/actions.py @@ -37,17 +37,17 @@ def calculate_relative_delta(unit: float, unit_type: str): delta_args = {} - if unit_type == 'DAY': - delta_args['days'] = unit + if unit_type == "DAY": + delta_args["days"] = unit - elif unit_type == 'WEEK': - delta_args['weeks'] = unit + elif unit_type == "WEEK": + delta_args["weeks"] = unit - elif unit_type == 'MONTH': - delta_args['months'] = unit + elif unit_type == "MONTH": + delta_args["months"] = unit - elif unit_type == 'YEAR': - delta_args['years'] = unit + elif unit_type == "YEAR": + delta_args["years"] = unit return relativedelta(**delta_args) @@ -64,21 +64,21 @@ def __init__(self, request: Request, lang: Optional[str] = None, query: Optional self.lang = lang else: - self.lang = request.META.get('HTTP_ACCEPT_LANGUAGE') + self.lang = request.META.get("HTTP_ACCEPT_LANGUAGE") if not self.lang and request.user.id: settings = get_user_settings(request.user.id) self.lang = settings.lang if not self.lang: - self.lang = 'en' + self.lang = "en" - self.academy_slug = request.GET.get('academy') or request.data.get('academy') + self.academy_slug = request.GET.get("academy") or request.data.get("academy") - if cohort := request.GET.get('cohort') or request.data.get('cohort'): + if cohort := request.GET.get("cohort") or request.data.get("cohort"): self.cohort = self._get_instance(Cohort, cohort, self.academy_slug) - if syllabus := request.GET.get('syllabus') or request.data.get('syllabus'): + if syllabus := request.GET.get("syllabus") or request.data.get("syllabus"): self.syllabus = self._get_instance(Syllabus, syllabus, self.academy_slug) def _get_pk(self, pk): @@ -87,17 +87,16 @@ def _get_pk(self, pk): return 0 - def _get_instance(self, - model: Type[Cohort | Syllabus], - pk: str, - academy: Optional[str] = None) -> Optional[Cohort | Syllabus]: + def _get_instance( + self, model: Type[Cohort | Syllabus], pk: str, academy: Optional[str] = None + ) -> Optional[Cohort | Syllabus]: args = [] kwargs = {} if isinstance(pk, int) or pk.isnumeric(): - kwargs['id'] = int(pk) + kwargs["id"] = int(pk) else: - kwargs['slug'] = pk + kwargs["slug"] = pk if academy and model == Syllabus: args.append(Q(academy_owner__slug=academy) | Q(academy_owner__id=self._get_pk(academy)) | Q(private=False)) @@ -108,10 +107,13 @@ def _get_instance(self, resource = model.objects.filter(*args, **kwargs).first() if not resource: raise ValidationException( - translation(self.lang, - en=f'{model.__name__} not found', - es=f'{model.__name__} no encontrada', - slug=f'{model.__name__.lower()}-not-found')) + translation( + self.lang, + en=f"{model.__name__} not found", + es=f"{model.__name__} no encontrada", + slug=f"{model.__name__.lower()}-not-found", + ) + ) return resource @@ -119,20 +121,23 @@ def _cohort_handler(self, on_boarding: Optional[bool] = None, auto: bool = False additional_args = {} if on_boarding is not None: - additional_args['is_onboarding'] = on_boarding + additional_args["is_onboarding"] = on_boarding if not self.cohort.syllabus_version: return Plan.objects.none() if not additional_args and auto: - additional_args['is_onboarding'] = not CohortUser.objects.filter( - cohort__syllabus_version__syllabus=self.cohort.syllabus_version.syllabus).exists() - - args = (self.query, ) if self.query else tuple() - plans = Plan.objects.filter(*args, - cohort_set__cohorts__id=self.cohort.id, - cohort_set__cohorts__stage__in=['INACTIVE', 'PREWORK'], - **additional_args).distinct() + additional_args["is_onboarding"] = not CohortUser.objects.filter( + cohort__syllabus_version__syllabus=self.cohort.syllabus_version.syllabus + ).exists() + + args = (self.query,) if self.query else tuple() + plans = Plan.objects.filter( + *args, + cohort_set__cohorts__id=self.cohort.id, + cohort_set__cohorts__stage__in=["INACTIVE", "PREWORK"], + **additional_args, + ).distinct() return plans @@ -140,17 +145,20 @@ def _syllabus_handler(self, on_boarding: Optional[bool] = None, auto: bool = Fal additional_args = {} if on_boarding is not None: - additional_args['is_onboarding'] = on_boarding + additional_args["is_onboarding"] = on_boarding if not additional_args and auto: - additional_args['is_onboarding'] = not CohortUser.objects.filter( - cohort__syllabus_version__syllabus=self.syllabus).exists() - - args = (self.query, ) if self.query else tuple() - plans = Plan.objects.filter(*args, - cohort_set__cohorts__syllabus_version__syllabus=self.syllabus, - cohort_set__cohorts__stage__in=['INACTIVE', 'PREWORK'], - **additional_args).distinct() + additional_args["is_onboarding"] = not CohortUser.objects.filter( + cohort__syllabus_version__syllabus=self.syllabus + ).exists() + + args = (self.query,) if self.query else tuple() + plans = Plan.objects.filter( + *args, + cohort_set__cohorts__syllabus_version__syllabus=self.syllabus, + cohort_set__cohorts__stage__in=["INACTIVE", "PREWORK"], + **additional_args, + ).distinct() return plans @@ -161,18 +169,18 @@ def get_plans_belongs(self, on_boarding: Optional[bool] = None, auto: bool = Fal if self.cohort: return self._cohort_handler(on_boarding, auto) - raise NotImplementedError('Resource handler not implemented') + raise NotImplementedError("Resource handler not implemented") def get_plans_belongs_from_request(self): - is_onboarding = self.request.data.get('is_onboarding') or self.request.GET.get('is_onboarding') + is_onboarding = self.request.data.get("is_onboarding") or self.request.GET.get("is_onboarding") additional_args = {} if is_onboarding: - additional_args['is_onboarding'] = is_onboarding + additional_args["is_onboarding"] = is_onboarding if not additional_args: - additional_args['auto'] = True + additional_args["auto"] = True return self.get_plans_belongs(**additional_args) @@ -195,42 +203,55 @@ def ask_to_add_plan_and_charge_it_in_the_bag(plan: Plan, user: User, lang: str): # avoid bought a free trial for financing if this was bought before if not price and plan_have_free_trial and not plan.is_renewable and subscriptions.filter(valid_until__gte=utc_now): raise ValidationException( - translation(lang, - en='Free trial plans can\'t be bought again', - es='Los planes de prueba no pueden ser comprados de nuevo', - slug='free-trial-plan-for-financing'), + translation( + lang, + en="Free trial plans can't be bought again", + es="Los planes de prueba no pueden ser comprados de nuevo", + slug="free-trial-plan-for-financing", + ), code=400, ) # avoid bought a plan if it doesn't have a price yet after free trial if not price and subscriptions: raise ValidationException( - translation(lang, - en='Free trial plans can\'t be bought more than once', - es='Los planes de prueba no pueden ser comprados más de una vez', - slug='free-trial-already-bought'), + translation( + lang, + en="Free trial plans can't be bought more than once", + es="Los planes de prueba no pueden ser comprados más de una vez", + slug="free-trial-already-bought", + ), code=400, ) # avoid financing plans if it was financed before if not plan.is_renewable and PlanFinancing.objects.filter(user=user, plans=plan): raise ValidationException( - translation(lang, - en='You already have or had a financing on this plan', - es='Ya tienes o tuviste un financiamiento en este plan', - slug='plan-already-financed'), + translation( + lang, + en="You already have or had a financing on this plan", + es="Ya tienes o tuviste un financiamiento en este plan", + slug="plan-already-financed", + ), code=400, ) # avoid to buy a plan if exists a subscription with same plan with remaining days - if price and plan.is_renewable and subscriptions.filter( - Q(Q(status='CANCELLED') | Q(status='DEPRECATED'), valid_until=None, next_payment_at__gte=utc_now) - | Q(valid_until__gte=utc_now)): + if ( + price + and plan.is_renewable + and subscriptions.filter( + Q(Q(status="CANCELLED") | Q(status="DEPRECATED"), valid_until=None, next_payment_at__gte=utc_now) + | Q(valid_until__gte=utc_now) + ) + ): raise ValidationException( - translation(lang, - en='You already have a subscription to this plan', - es='Ya tienes una suscripción a este plan', - slug='plan-already-bought'), + translation( + lang, + en="You already have a subscription to this plan", + es="Ya tienes una suscripción a este plan", + slug="plan-already-bought", + ), code=400, ) @@ -249,23 +270,23 @@ def __init__(self, request: Request, bag: Bag, lang: str) -> None: self.lang = lang self.bag = bag - self.service_items = request.data.get('service_items') - self.plans = request.data.get('plans') - self.selected_cohort_set = request.data.get('cohort_set') - self.selected_event_type_set = request.data.get('event_type_set') - self.selected_mentorship_service_set = request.data.get('mentorship_service_set') + self.service_items = request.data.get("service_items") + self.plans = request.data.get("plans") + self.selected_cohort_set = request.data.get("cohort_set") + self.selected_event_type_set = request.data.get("event_type_set") + self.selected_mentorship_service_set = request.data.get("mentorship_service_set") self.plans_not_found = set() self.service_items_not_found = set() self.cohort_sets_not_found = set() - def _lookups(self, value, offset=''): + def _lookups(self, value, offset=""): args = () kwargs = {} - slug_key = f'{offset}slug__in' - pk_key = f'{offset}id__in' + slug_key = f"{offset}slug__in" + pk_key = f"{offset}id__in" - values = value.split(',') if isinstance(value, str) and ',' in value else [value] + values = value.split(",") if isinstance(value, str) and "," in value else [value] for v in values: if slug_key not in kwargs and (not isinstance(v, str) or not v.isnumeric()): kwargs[slug_key] = [] @@ -280,42 +301,60 @@ def _lookups(self, value, offset=''): kwargs[slug_key].append(v) if len(kwargs) > 1: - args = (Q(**{slug_key: kwargs[slug_key]}) | Q(**{pk_key: kwargs[pk_key]}), ) + args = (Q(**{slug_key: kwargs[slug_key]}) | Q(**{pk_key: kwargs[pk_key]}),) kwargs = {} return args, kwargs def _more_than_one_generator(self, en, es): - return translation(self.lang, - en=f'You can only select one {en}', - es=f'Solo puedes seleccionar una {es}', - slug=f'more-than-one-{en}-selected') + return translation( + self.lang, + en=f"You can only select one {en}", + es=f"Solo puedes seleccionar una {es}", + slug=f"more-than-one-{en}-selected", + ) def _validate_selected_resources(self): - if self.selected_cohort_set and not isinstance(self.selected_cohort_set, int) and not isinstance( - self.selected_cohort_set, str): - raise ValidationException(translation(self.lang, - en='The cohort needs to be a id or slug', - es='El cohort debe ser un id o slug'), - slug='cohort-not-id-or-slug') - - if self.selected_event_type_set and not isinstance(self.selected_event_type_set, int) and not isinstance( - self.selected_event_type_set, str): - raise ValidationException(translation(self.lang, - en='The event type set needs to be a id or slug', - es='El event type set debe ser un id o slug'), - slug='event-type-set-not-id-or-slug') - - if self.selected_mentorship_service_set and not isinstance(self.selected_mentorship_service_set, - int) and not isinstance( - self.selected_mentorship_service_set, str): - raise ValidationException(translation(self.lang, - en='The mentorship service set needs to be a id or slug', - es='El mentorship service set debe ser un id o slug'), - slug='mentorship-service-set-not-id-or-slug') + if ( + self.selected_cohort_set + and not isinstance(self.selected_cohort_set, int) + and not isinstance(self.selected_cohort_set, str) + ): + raise ValidationException( + translation(self.lang, en="The cohort needs to be a id or slug", es="El cohort debe ser un id o slug"), + slug="cohort-not-id-or-slug", + ) + + if ( + self.selected_event_type_set + and not isinstance(self.selected_event_type_set, int) + and not isinstance(self.selected_event_type_set, str) + ): + raise ValidationException( + translation( + self.lang, + en="The event type set needs to be a id or slug", + es="El event type set debe ser un id o slug", + ), + slug="event-type-set-not-id-or-slug", + ) + + if ( + self.selected_mentorship_service_set + and not isinstance(self.selected_mentorship_service_set, int) + and not isinstance(self.selected_mentorship_service_set, str) + ): + raise ValidationException( + translation( + self.lang, + en="The mentorship service set needs to be a id or slug", + es="El mentorship service set debe ser un id o slug", + ), + slug="mentorship-service-set-not-id-or-slug", + ) def _reset_bag(self): - if 'checking' in self.request.build_absolute_uri(): + if "checking" in self.request.build_absolute_uri(): self.bag.service_items.clear() self.bag.plans.clear() self.bag.token = None @@ -325,32 +364,44 @@ def _validate_service_items_format(self): if isinstance(self.service_items, list): for item in self.service_items: if not isinstance(item, dict): - raise ValidationException(translation(self.lang, - en='The service item needs to be a object', - es='El service item debe ser un objeto'), - slug='service-item-not-object') - - if 'how_many' not in item or 'service' not in item or not isinstance( - item['how_many'], int) or not isinstance(item['service'], int): - raise ValidationException(translation( - self.lang, - en='The service item needs to have the keys of the integer type how_many and service', - es='El service item debe tener las llaves de tipo entero how_many y service'), - slug='service-item-malformed') + raise ValidationException( + translation( + self.lang, + en="The service item needs to be a object", + es="El service item debe ser un objeto", + ), + slug="service-item-not-object", + ) + + if ( + "how_many" not in item + or "service" not in item + or not isinstance(item["how_many"], int) + or not isinstance(item["service"], int) + ): + raise ValidationException( + translation( + self.lang, + en="The service item needs to have the keys of the integer type how_many and service", + es="El service item debe tener las llaves de tipo entero how_many y service", + ), + slug="service-item-malformed", + ) def _get_service_items_that_not_found(self): if isinstance(self.service_items, list): for service_item in self.service_items: kwargs = {} - if service_item['service'] and (isinstance(service_item['service'], int) - or service_item['service'].isnumeric()): - kwargs['id'] = int(service_item['service']) + if service_item["service"] and ( + isinstance(service_item["service"], int) or service_item["service"].isnumeric() + ): + kwargs["id"] = int(service_item["service"]) else: - kwargs['slug'] = service_item['service'] + kwargs["slug"] = service_item["service"] if not Service.objects.filter(**kwargs): - self.service_items_not_found.add(service_item['service']) + self.service_items_not_found.add(service_item["service"]) def _get_plans_that_not_found(self): if isinstance(self.plans, list): @@ -359,37 +410,40 @@ def _get_plans_that_not_found(self): exclude = {} if plan and (isinstance(plan, int) or plan.isnumeric()): - kwargs['id'] = int(plan) + kwargs["id"] = int(plan) else: - kwargs['slug'] = plan + kwargs["slug"] = plan if self.selected_cohort_set and isinstance(self.selected_cohort_set, int): - kwargs['cohort_set'] = self.selected_cohort_set + kwargs["cohort_set"] = self.selected_cohort_set elif self.selected_cohort_set and isinstance(self.selected_cohort_set, str): - kwargs['cohort_set__slug'] = self.selected_cohort_set + kwargs["cohort_set__slug"] = self.selected_cohort_set if not Plan.objects.filter(**kwargs).exclude(**exclude): self.plans_not_found.add(plan) def _report_items_not_found(self): if self.service_items_not_found or self.plans_not_found or self.cohort_sets_not_found: - raise ValidationException(translation( - self.lang, - en=f'Items not found: services={self.service_items_not_found}, plans={self.plans_not_found}, ' - f'cohorts={self.cohort_sets_not_found}', - es=f'Elementos no encontrados: servicios={self.service_items_not_found}, ' - f'planes={self.plans_not_found}, cohortes={self.cohort_sets_not_found}', - slug='some-items-not-found'), - code=404) + raise ValidationException( + translation( + self.lang, + en=f"Items not found: services={self.service_items_not_found}, plans={self.plans_not_found}, " + f"cohorts={self.cohort_sets_not_found}", + es=f"Elementos no encontrados: servicios={self.service_items_not_found}, " + f"planes={self.plans_not_found}, cohortes={self.cohort_sets_not_found}", + slug="some-items-not-found", + ), + code=404, + ) def _add_service_items_to_bag(self): if isinstance(self.service_items, list): for service_item in self.service_items: - args, kwargs = self._lookups(service_item['service']) + args, kwargs = self._lookups(service_item["service"]) service = Service.objects.filter(*args, **kwargs).first() - service_item, _ = ServiceItem.objects.get_or_create(service=service, how_many=service_item['how_many']) + service_item, _ = ServiceItem.objects.get_or_create(service=service, how_many=service_item["how_many"]) self.bag.service_items.add(service_item) def _add_plans_to_bag(self): @@ -409,15 +463,19 @@ def _validate_just_one_plan(self): if how_many_plans > 1: - raise ValidationException(self._more_than_one_generator(en='plan', es='plan'), code=400) + raise ValidationException(self._more_than_one_generator(en="plan", es="plan"), code=400) def _validate_buy_plans_or_service_items(self): if self.bag.plans.count() and self.bag.service_items.count(): - raise ValidationException(translation(self.lang, - en="You can't select a plan and a services at the same time", - es='No puedes seleccionar un plan y servicios al mismo tiempo', - slug='one-plan-and-many-services'), - code=400) + raise ValidationException( + translation( + self.lang, + en="You can't select a plan and a services at the same time", + es="No puedes seleccionar un plan y servicios al mismo tiempo", + slug="one-plan-and-many-services", + ), + code=400, + ) def _ask_to_add_plan_and_charge_it_in_the_bag(self): for plan in self.bag.plans.all(): @@ -455,7 +513,7 @@ def get_amount(bag: Bag, currency: Currency, lang: str) -> tuple[float, float, f price_per_year = 0 if not currency: - currency, _ = Currency.objects.get_or_create(code='USD', name='United States dollar') + currency, _ = Currency.objects.get_or_create(code="USD", name="United States dollar") for service_item in bag.service_items.all(): if service_item.service.currency != currency: @@ -475,11 +533,11 @@ def get_amount(bag: Bag, currency: Currency, lang: str) -> tuple[float, float, f must_it_be_charged = ask_to_add_plan_and_charge_it_in_the_bag(plan, user, lang) # this prices is just used if it are generating a subscription - if not bag.how_many_installments and (bag.chosen_period != 'NO_SET' or must_it_be_charged): - price_per_month += (plan.price_per_month or 0) - price_per_quarter += (plan.price_per_quarter or 0) - price_per_half += (plan.price_per_half or 0) - price_per_year += (plan.price_per_year or 0) + if not bag.how_many_installments and (bag.chosen_period != "NO_SET" or must_it_be_charged): + price_per_month += plan.price_per_month or 0 + price_per_quarter += plan.price_per_quarter or 0 + price_per_half += plan.price_per_half or 0 + price_per_year += plan.price_per_year or 0 return price_per_month, price_per_quarter, price_per_half, price_per_year @@ -487,32 +545,36 @@ def get_amount(bag: Bag, currency: Currency, lang: str) -> tuple[float, float, f def get_amount_by_chosen_period(bag: Bag, chosen_period: str, lang: str) -> float: amount = 0 - if chosen_period == 'MONTH' and bag.amount_per_month: + if chosen_period == "MONTH" and bag.amount_per_month: amount = bag.amount_per_month - elif chosen_period == 'QUARTER' and bag.amount_per_quarter: + elif chosen_period == "QUARTER" and bag.amount_per_quarter: amount = bag.amount_per_quarter - elif chosen_period == 'HALF' and bag.amount_per_half: + elif chosen_period == "HALF" and bag.amount_per_half: amount = bag.amount_per_half - elif chosen_period == 'YEAR' and bag.amount_per_year: + elif chosen_period == "YEAR" and bag.amount_per_year: amount = bag.amount_per_year # free trial if not amount and (bag.amount_per_month or bag.amount_per_quarter or bag.amount_per_half or bag.amount_per_year): - raise ValidationException(translation(lang, - en=f'The period {chosen_period} is disabled for this bag', - es=f'El periodo {chosen_period} está deshabilitado para esta bolsa', - slug='period-disabled-for-bag'), - code=400) + raise ValidationException( + translation( + lang, + en=f"The period {chosen_period} is disabled for this bag", + es=f"El periodo {chosen_period} está deshabilitado para esta bolsa", + slug="period-disabled-for-bag", + ), + code=400, + ) return amount -def get_bag_from_subscription(subscription: Subscription, - settings: Optional[UserSetting] = None, - lang: Optional[str] = None) -> Bag: +def get_bag_from_subscription( + subscription: Subscription, settings: Optional[UserSetting] = None, lang: Optional[str] = None +) -> Bag: bag = Bag() if not lang and not settings: @@ -524,13 +586,16 @@ def get_bag_from_subscription(subscription: Subscription, last_invoice = subscription.invoices.filter().last() if not last_invoice: raise Exception( - translation(lang, - en='Invalid subscription, this has no invoices', - es='Suscripción invalida, esta no tiene facturas', - slug='subscription-has-no-invoices')) + translation( + lang, + en="Invalid subscription, this has no invoices", + es="Suscripción invalida, esta no tiene facturas", + slug="subscription-has-no-invoices", + ) + ) - bag.status = 'RENEWAL' - bag.type = 'CHARGE' + bag.status = "RENEWAL" + bag.type = "CHARGE" bag.academy = subscription.academy bag.currency = last_invoice.currency bag.user = subscription.user @@ -544,7 +609,8 @@ def get_bag_from_subscription(subscription: Subscription, bag.plans.add(plan) bag.amount_per_month, bag.amount_per_quarter, bag.amount_per_half, bag.amount_per_year = get_amount( - bag, last_invoice.currency, lang) + bag, last_invoice.currency, lang + ) bag.save() @@ -560,13 +626,16 @@ def get_bag_from_plan_financing(plan_financing: PlanFinancing, settings: Optiona last_invoice = plan_financing.invoices.filter().last() if not last_invoice: raise Exception( - translation(settings.lang, - en='Invalid plan financing, this has not charge', - es='Plan financing es invalido, este no tiene cargos', - slug='plan-financing-has-no-invoices')) + translation( + settings.lang, + en="Invalid plan financing, this has not charge", + es="Plan financing es invalido, este no tiene cargos", + slug="plan-financing-has-no-invoices", + ) + ) - bag.status = 'RENEWAL' - bag.type = 'CHARGE' + bag.status = "RENEWAL" + bag.type = "CHARGE" bag.academy = plan_financing.academy bag.currency = last_invoice.currency bag.user = plan_financing.user @@ -579,30 +648,32 @@ def get_bag_from_plan_financing(plan_financing: PlanFinancing, settings: Optiona return bag -def filter_consumables(request: WSGIRequest, - items: QuerySet[Consumable], - queryset: QuerySet, - key: str, - custom_query_key: Optional[str] = None): +def filter_consumables( + request: WSGIRequest, + items: QuerySet[Consumable], + queryset: QuerySet, + key: str, + custom_query_key: Optional[str] = None, +): if ids := request.GET.get(key): try: - ids = [int(x) for x in ids.split(',')] + ids = [int(x) for x in ids.split(",")] except Exception: - raise ValidationException(f'{key} param must be integer') + raise ValidationException(f"{key} param must be integer") query_key = custom_query_key or key - queryset |= items.filter(**{f'{query_key}__id__in': ids}) + queryset |= items.filter(**{f"{query_key}__id__in": ids}) - if slugs := request.GET.get(f'{key}_slug'): - slugs = slugs.split(',') + if slugs := request.GET.get(f"{key}_slug"): + slugs = slugs.split(",") query_key = custom_query_key or key - queryset |= items.filter(**{f'{query_key}__slug__in': slugs}) + queryset |= items.filter(**{f"{query_key}__slug__in": slugs}) if not ids and not slugs: query_key = custom_query_key or key - queryset |= items.filter(**{f'{query_key}__isnull': False}) + queryset |= items.filter(**{f"{query_key}__isnull": False}) queryset = queryset.distinct() return queryset @@ -613,41 +684,46 @@ def get_balance_by_resource(queryset: QuerySet, key: str): ids = {getattr(x, key).id for x in queryset} for id in ids: - current = queryset.filter(**{f'{key}__id': id}) + current = queryset.filter(**{f"{key}__id": id}) instance = current.first() balance = {} items = [] units = {x[0] for x in SERVICE_UNITS} for unit in units: per_unit = current.filter(unit_type=unit) - balance[unit.lower()] = -1 if per_unit.filter( - how_many=-1).exists() else per_unit.aggregate(Sum('how_many'))['how_many__sum'] + balance[unit.lower()] = ( + -1 if per_unit.filter(how_many=-1).exists() else per_unit.aggregate(Sum("how_many"))["how_many__sum"] + ) for x in queryset: valid_until = x.valid_until if valid_until: - valid_until = re.sub(r'\+00:00$', 'Z', valid_until.replace(tzinfo=UTC).isoformat()) - - items.append({ - 'id': x.id, - 'how_many': x.how_many, - 'unit_type': x.unit_type, - 'valid_until': x.valid_until, - }) - - result.append({ - 'id': getattr(instance, key).id, - 'slug': getattr(instance, key).slug, - 'balance': balance, - 'items': items, - }) + valid_until = re.sub(r"\+00:00$", "Z", valid_until.replace(tzinfo=UTC).isoformat()) + + items.append( + { + "id": x.id, + "how_many": x.how_many, + "unit_type": x.unit_type, + "valid_until": x.valid_until, + } + ) + + result.append( + { + "id": getattr(instance, key).id, + "slug": getattr(instance, key).slug, + "balance": balance, + "items": items, + } + ) return result @lru_cache(maxsize=1) def max_coupons_allowed(): try: - return int(os.getenv('MAX_COUPONS_ALLOWED', '1')) + return int(os.getenv("MAX_COUPONS_ALLOWED", "1")) except Exception: return 1 @@ -656,12 +732,12 @@ def max_coupons_allowed(): def get_available_coupons(plan: Plan, coupons: Optional[list[str]] = None) -> list[Coupon]: def get_total_spent_coupons(coupon: Coupon) -> int: - sub_kwargs = {'invoices__bag__coupons': coupon} + sub_kwargs = {"invoices__bag__coupons": coupon} if coupon.offered_at: - sub_kwargs['created_at__gte'] = coupon.offered_at + sub_kwargs["created_at__gte"] = coupon.offered_at if coupon.expires_at: - sub_kwargs['created_at__lte'] = coupon.expires_at + sub_kwargs["created_at__lte"] = coupon.expires_at how_many_subscriptions = Subscription.objects.filter(**sub_kwargs).count() how_many_plan_financings = PlanFinancing.objects.filter(**sub_kwargs).count() @@ -694,17 +770,20 @@ def manage_coupon(coupon: Coupon) -> None: Q(offered_at=None) | Q(offered_at__lte=timezone.now()), Q(expires_at=None) | Q(expires_at__gte=timezone.now()), ) - cou_fields = ('id', 'slug', 'how_many_offers', 'offered_at', 'expires_at') + cou_fields = ("id", "slug", "how_many_offers", "offered_at", "expires_at") - special_offers = Coupon.objects.filter( - *cou_args, - auto=True).exclude(Q(how_many_offers=0) | Q(discount_type=Coupon.Discount.NO_DISCOUNT)).only(*cou_fields) + special_offers = ( + Coupon.objects.filter(*cou_args, auto=True) + .exclude(Q(how_many_offers=0) | Q(discount_type=Coupon.Discount.NO_DISCOUNT)) + .only(*cou_fields) + ) for coupon in special_offers: manage_coupon(coupon) - valid_coupons = Coupon.objects.filter(*cou_args, slug__in=coupons, - auto=False).exclude(how_many_offers=0).only(*cou_fields) + valid_coupons = ( + Coupon.objects.filter(*cou_args, slug__in=coupons, auto=False).exclude(how_many_offers=0).only(*cou_fields) + ) max = max_coupons_allowed() for coupon in valid_coupons[0:max]: diff --git a/breathecode/payments/admin.py b/breathecode/payments/admin.py index c3e408254..4a8b9d134 100644 --- a/breathecode/payments/admin.py +++ b/breathecode/payments/admin.py @@ -45,63 +45,68 @@ @admin.register(Currency) class CurrencyAdmin(admin.ModelAdmin): - list_display = ('id', 'code', 'name', 'decimals') - search_fields = ['code', 'code'] + list_display = ("id", "code", "name", "decimals") + search_fields = ["code", "code"] @admin.register(Service) class ServiceAdmin(admin.ModelAdmin): - list_display = ('id', 'slug', 'owner', 'private') - list_filter = ['owner'] - search_fields = ['slug', 'title', 'groups__name'] + list_display = ("id", "slug", "owner", "private") + list_filter = ["owner"] + search_fields = ["slug", "title", "groups__name"] @admin.register(ServiceTranslation) class ServiceTranslationAdmin(admin.ModelAdmin): - list_display = ('id', 'lang', 'title', 'description', 'service') - list_filter = ['service__owner', 'lang'] - search_fields = ['service__slug', 'title', 'service__groups__name'] + list_display = ("id", "lang", "title", "description", "service") + list_filter = ["service__owner", "lang"] + search_fields = ["service__slug", "title", "service__groups__name"] @admin.register(ServiceItem) class ServiceItemAdmin(admin.ModelAdmin): - list_display = ('id', 'unit_type', 'how_many', 'service') - list_filter = ['service__owner'] + list_display = ("id", "unit_type", "how_many", "service") + list_filter = ["service__owner"] search_fields = [ - 'service__slug', 'service__title', 'service__groups__name', 'service__cohorts__slug', - 'service__mentorship_services__slug' + "service__slug", + "service__title", + "service__groups__name", + "service__cohorts__slug", + "service__mentorship_services__slug", ] @admin.register(ServiceItemFeature) class ServiceItemFeatureAdmin(admin.ModelAdmin): - list_display = ('id', 'service_item', 'lang', 'one_line_desc') - list_filter = ['service_item__service__owner', 'lang'] + list_display = ("id", "service_item", "lang", "one_line_desc") + list_filter = ["service_item__service__owner", "lang"] search_fields = [ - 'service_item__service__slug', 'service_item__service__title', 'service_item__service__groups__name' + "service_item__service__slug", + "service_item__service__title", + "service_item__service__groups__name", ] @admin.register(FinancingOption) class FinancingOptionAdmin(admin.ModelAdmin): - list_display = ('id', 'monthly_price', 'currency', 'how_many_months') - list_filter = ['currency__code'] + list_display = ("id", "monthly_price", "currency", "how_many_months") + list_filter = ["currency__code"] @admin.register(Plan) class PlanAdmin(admin.ModelAdmin): - list_display = ('id', 'slug', 'status', 'trial_duration', 'trial_duration_unit', 'owner') - list_filter = ['trial_duration_unit', 'owner'] - search_fields = ['lang', 'title'] - raw_id_fields = ['owner'] - filter_horizontal = ('invites', ) + list_display = ("id", "slug", "status", "trial_duration", "trial_duration_unit", "owner") + list_filter = ["trial_duration_unit", "owner"] + search_fields = ["lang", "title"] + raw_id_fields = ["owner"] + filter_horizontal = ("invites",) @admin.register(PlanTranslation) class PlanTranslationAdmin(admin.ModelAdmin): - list_display = ('id', 'lang', 'title', 'description', 'plan') - list_filter = ['plan__owner', 'lang'] - search_fields = ['title', 'plan__slug'] + list_display = ("id", "lang", "title", "description", "plan") + list_filter = ["plan__owner", "lang"] + search_fields = ["title", "plan__slug"] def grant_service_permissions(modeladmin, request, queryset): @@ -111,18 +116,18 @@ def grant_service_permissions(modeladmin, request, queryset): @admin.register(Consumable) class ConsumableAdmin(admin.ModelAdmin): - list_display = ('id', 'unit_type', 'how_many', 'service_item', 'user', 'valid_until') - list_filter = ['unit_type', 'service_item__service__slug'] - search_fields = ['service_item__service__slug'] - raw_id_fields = ['user', 'service_item', 'cohort_set', 'event_type_set', 'mentorship_service_set'] + list_display = ("id", "unit_type", "how_many", "service_item", "user", "valid_until") + list_filter = ["unit_type", "service_item__service__slug"] + search_fields = ["service_item__service__slug"] + raw_id_fields = ["user", "service_item", "cohort_set", "event_type_set", "mentorship_service_set"] actions = [grant_service_permissions] @admin.register(Invoice) class InvoiceAdmin(admin.ModelAdmin): - list_display = ('id', 'amount', 'currency', 'paid_at', 'status', 'stripe_id', 'user', 'academy') - list_filter = ['status', 'academy'] - raw_id_fields = ['user', 'currency', 'bag', 'academy'] + list_display = ("id", "amount", "currency", "paid_at", "status", "stripe_id", "user", "academy") + list_filter = ["status", "academy"] + raw_id_fields = ["user", "currency", "bag", "academy"] def renew_subscription_consumables(modeladmin, request, queryset): @@ -132,20 +137,32 @@ def renew_subscription_consumables(modeladmin, request, queryset): @admin.register(Subscription) class SubscriptionAdmin(admin.ModelAdmin): - list_display = ('id', 'paid_at', 'status', 'is_refundable', 'next_payment_at', 'pay_every', 'pay_every_unit', - 'user') - list_filter = ['status', 'is_refundable', 'pay_every_unit'] - search_fields = ['user__email', 'user__first_name', 'user__last_name'] + list_display = ( + "id", + "paid_at", + "status", + "is_refundable", + "next_payment_at", + "pay_every", + "pay_every_unit", + "user", + ) + list_filter = ["status", "is_refundable", "pay_every_unit"] + search_fields = ["user__email", "user__first_name", "user__last_name"] raw_id_fields = [ - 'user', 'academy', 'selected_cohort_set', 'selected_mentorship_service_set', 'selected_event_type_set' + "user", + "academy", + "selected_cohort_set", + "selected_mentorship_service_set", + "selected_event_type_set", ] actions = [renew_subscription_consumables] @admin.register(SubscriptionServiceItem) class SubscriptionServiceItemAdmin(admin.ModelAdmin): - list_display = ('id', 'subscription', 'service_item') - list_filter = ['subscription__user__email', 'subscription__user__first_name', 'subscription__user__last_name'] + list_display = ("id", "subscription", "service_item") + list_filter = ["subscription__user__email", "subscription__user__first_name", "subscription__user__last_name"] def renew_plan_financing_consumables(modeladmin, request, queryset): @@ -155,26 +172,30 @@ def renew_plan_financing_consumables(modeladmin, request, queryset): @admin.register(PlanFinancing) class PlanFinancingAdmin(admin.ModelAdmin): - list_display = ('id', 'next_payment_at', 'valid_until', 'status', 'user') - list_filter = ['status'] - search_fields = ['user__email', 'user__first_name', 'user__last_name'] + list_display = ("id", "next_payment_at", "valid_until", "status", "user") + list_filter = ["status"] + search_fields = ["user__email", "user__first_name", "user__last_name"] raw_id_fields = [ - 'user', 'academy', 'selected_cohort_set', 'selected_mentorship_service_set', 'selected_event_type_set' + "user", + "academy", + "selected_cohort_set", + "selected_mentorship_service_set", + "selected_event_type_set", ] actions = [renew_plan_financing_consumables] def add_cohort_set_to_the_subscriptions(modeladmin, request, queryset): if queryset.count() > 1: - raise forms.ValidationError('You just can select one subscription at a time') + raise forms.ValidationError("You just can select one subscription at a time") - cohort_set_id = queryset.values_list('id', flat=True).first() + cohort_set_id = queryset.values_list("id", flat=True).first() if not cohort_set_id: return subscriptions = Subscription.objects.filter( - Q(valid_until__isnull=True) - | Q(valid_until__gt=timezone.now()), selected_cohort_set=None).exclude(status__in=['CANCELLED', 'DEPRECATED']) + Q(valid_until__isnull=True) | Q(valid_until__gt=timezone.now()), selected_cohort_set=None + ).exclude(status__in=["CANCELLED", "DEPRECATED"]) for item in subscriptions: tasks.add_cohort_set_to_subscription.delay(item.id, cohort_set_id) @@ -182,71 +203,71 @@ def add_cohort_set_to_the_subscriptions(modeladmin, request, queryset): @admin.register(CohortSet) class CohortSetAdmin(admin.ModelAdmin): - list_display = ('id', 'slug', 'academy') - list_filter = ['academy__slug'] - search_fields = ['slug', 'academy__slug', 'academy__name'] + list_display = ("id", "slug", "academy") + list_filter = ["academy__slug"] + search_fields = ["slug", "academy__slug", "academy__name"] actions = [add_cohort_set_to_the_subscriptions] def formfield_for_manytomany(self, db_field, request, **kwargs): - if db_field.name == 'cohorts': - kwargs['widget'] = admin.widgets.FilteredSelectMultiple(db_field.verbose_name, False) + if db_field.name == "cohorts": + kwargs["widget"] = admin.widgets.FilteredSelectMultiple(db_field.verbose_name, False) return super().formfield_for_manytomany(db_field, request, **kwargs) @admin.register(CohortSetTranslation) class CohortSetTranslationAdmin(admin.ModelAdmin): - list_display = ('id', 'cohort_set', 'lang', 'title', 'description', 'short_description') - list_filter = ['lang'] - search_fields = ['slug', 'academy__slug', 'academy__name'] + list_display = ("id", "cohort_set", "lang", "title", "description", "short_description") + list_filter = ["lang"] + search_fields = ["slug", "academy__slug", "academy__name"] @admin.register(MentorshipServiceSet) class MentorshipServiceSetAdmin(admin.ModelAdmin): - list_display = ('id', 'slug', 'academy') - list_filter = ['academy__slug'] - search_fields = ['slug', 'academy__slug', 'academy__name'] + list_display = ("id", "slug", "academy") + list_filter = ["academy__slug"] + search_fields = ["slug", "academy__slug", "academy__name"] @admin.register(CohortSetCohort) class CohortSetCohortAdmin(admin.ModelAdmin): - list_display = ('id', 'cohort_set', 'cohort') - list_filter = ['cohort_set__academy__slug'] - search_fields = ['cohort_set__slug', 'cohort_set__name', 'cohort__slug', 'cohort__name'] - raw_id_fields = ['cohort'] + list_display = ("id", "cohort_set", "cohort") + list_filter = ["cohort_set__academy__slug"] + search_fields = ["cohort_set__slug", "cohort_set__name", "cohort__slug", "cohort__name"] + raw_id_fields = ["cohort"] @admin.register(MentorshipServiceSetTranslation) class MentorshipServiceSetTranslationAdmin(admin.ModelAdmin): - list_display = ('id', 'mentorship_service_set', 'lang', 'title', 'description', 'short_description') - list_filter = ['lang'] - search_fields = ['slug', 'academy__slug', 'academy__name'] + list_display = ("id", "mentorship_service_set", "lang", "title", "description", "short_description") + list_filter = ["lang"] + search_fields = ["slug", "academy__slug", "academy__name"] @admin.register(EventTypeSet) class EventTypeSetAdmin(admin.ModelAdmin): - list_display = ('id', 'slug', 'academy') - list_filter = ['academy__slug'] - search_fields = ['slug', 'academy__slug', 'academy__name'] - raw_id_fields = ['academy'] + list_display = ("id", "slug", "academy") + list_filter = ["academy__slug"] + search_fields = ["slug", "academy__slug", "academy__name"] + raw_id_fields = ["academy"] @admin.register(EventTypeSetTranslation) class EventTypeSetTranslationAdmin(admin.ModelAdmin): - list_display = ('id', 'event_type_set', 'lang', 'title', 'description', 'short_description') - list_filter = ['lang'] - search_fields = ['slug', 'academy__slug', 'academy__name'] - raw_id_fields = ['event_type_set'] + list_display = ("id", "event_type_set", "lang", "title", "description", "short_description") + list_filter = ["lang"] + search_fields = ["slug", "academy__slug", "academy__name"] + raw_id_fields = ["event_type_set"] @admin.register(PlanServiceItem) class PlanServiceItemAdmin(admin.ModelAdmin): - list_display = ('id', 'plan', 'service_item') - list_filter = ['plan__slug', 'plan__owner__slug'] + list_display = ("id", "plan", "service_item") + list_filter = ["plan__slug", "plan__owner__slug"] @admin.register(PlanServiceItemHandler) class PlanServiceItemHandlerAdmin(admin.ModelAdmin): - list_display = ('id', 'handler', 'subscription', 'plan_financing') + list_display = ("id", "handler", "subscription", "plan_financing") def renew_consumables(modeladmin, request, queryset): @@ -256,7 +277,7 @@ def renew_consumables(modeladmin, request, queryset): @admin.register(ServiceStockScheduler) class ServiceStockSchedulerAdmin(admin.ModelAdmin): - list_display = ('id', 'subscription', 'service_item', 'plan_financing', 'valid_until') + list_display = ("id", "subscription", "service_item", "plan_financing", "valid_until") actions = [renew_consumables] def subscription(self, obj): @@ -280,86 +301,117 @@ def plan_financing(self, obj): @admin.register(PaymentContact) class PaymentContactAdmin(admin.ModelAdmin): - list_display = ('id', 'user', 'stripe_id') - search_fields = ['user__email', 'user__first_name', 'user__last_name'] - raw_id_fields = ['user'] + list_display = ("id", "user", "stripe_id") + search_fields = ["user__email", "user__first_name", "user__last_name"] + raw_id_fields = ["user"] @admin.register(FinancialReputation) class FinancialReputationAdmin(admin.ModelAdmin): - list_display = ('id', 'user', 'in_4geeks', 'in_stripe') - list_filter = ['in_4geeks', 'in_stripe'] - search_fields = ['user__email', 'user__first_name', 'user__last_name'] - raw_id_fields = ['user'] + list_display = ("id", "user", "in_4geeks", "in_stripe") + list_filter = ["in_4geeks", "in_stripe"] + search_fields = ["user__email", "user__first_name", "user__last_name"] + raw_id_fields = ["user"] @admin.register(Bag) class BagAdmin(admin.ModelAdmin): - list_display = ('id', 'status', 'type', 'chosen_period', 'academy', 'user', 'is_recurrent', 'was_delivered') - list_filter = ['status', 'type', 'chosen_period', 'academy', 'is_recurrent'] - search_fields = ['user__email', 'user__first_name', 'user__last_name'] - raw_id_fields = ['user', 'academy'] + list_display = ("id", "status", "type", "chosen_period", "academy", "user", "is_recurrent", "was_delivered") + list_filter = ["status", "type", "chosen_period", "academy", "is_recurrent"] + search_fields = ["user__email", "user__first_name", "user__last_name"] + raw_id_fields = ["user", "academy"] @admin.register(PlanOffer) class PlanOfferAdmin(admin.ModelAdmin): - list_display = ('id', 'original_plan', 'suggested_plan', 'show_modal', 'expires_at') - list_filter = ['show_modal'] - search_fields = ['original_plan__slug', 'suggested_plan__slug'] - raw_id_fields = ['original_plan', 'suggested_plan'] + list_display = ("id", "original_plan", "suggested_plan", "show_modal", "expires_at") + list_filter = ["show_modal"] + search_fields = ["original_plan__slug", "suggested_plan__slug"] + raw_id_fields = ["original_plan", "suggested_plan"] @admin.register(PlanOfferTranslation) class PlanOfferTranslationAdmin(admin.ModelAdmin): - list_display = ('id', 'offer', 'lang', 'title', 'description', 'short_description') - list_filter = ['lang'] - search_fields = ['title'] - raw_id_fields = ['offer'] + list_display = ("id", "offer", "lang", "title", "description", "short_description") + list_filter = ["lang"] + search_fields = ["title"] + raw_id_fields = ["offer"] @admin.register(AcademyService) class AcademyServiceAdmin(admin.ModelAdmin): - list_display = ('service', 'academy', 'price_per_unit', 'currency', 'bundle_size', 'max_amount') - list_filter = ['academy', 'currency'] - search_fields = ['service'] - raw_id_fields = ['service', 'academy'] + list_display = ("service", "academy", "price_per_unit", "currency", "bundle_size", "max_amount") + list_filter = ["academy", "currency"] + search_fields = ["service"] + raw_id_fields = ["service", "academy"] @admin.register(ConsumptionSession) class ConsumptionSessionAdmin(admin.ModelAdmin): - list_display = ('id', 'user', 'consumable', 'eta', 'duration', 'how_many', 'status', 'was_discounted', 'path', - 'related_id', 'related_slug') - list_filter = ['was_discounted', 'status', 'duration'] + list_display = ( + "id", + "user", + "consumable", + "eta", + "duration", + "how_many", + "status", + "was_discounted", + "path", + "related_id", + "related_slug", + ) + list_filter = ["was_discounted", "status", "duration"] search_fields = [ - 'user__email', 'user__id', 'user__first_name', 'user__last_name', 'path', 'related_slug', 'related_id', - 'consumable__service_item__service__slug' + "user__email", + "user__id", + "user__first_name", + "user__last_name", + "path", + "related_slug", + "related_id", + "consumable__service_item__service__slug", ] - raw_id_fields = ['user', 'consumable'] + raw_id_fields = ["user", "consumable"] @admin.register(Seller) class SellerAdmin(admin.ModelAdmin): - list_display = ('id', 'name', 'user', 'is_active') - list_filter = ['is_active'] - search_fields = ['name', 'user__email', 'user__id', 'user__first_name', 'user__last_name'] - raw_id_fields = ['user'] + list_display = ("id", "name", "user", "is_active") + list_filter = ["is_active"] + search_fields = ["name", "user__email", "user__id", "user__first_name", "user__last_name"] + raw_id_fields = ["user"] @admin.register(Coupon) class CouponAdmin(admin.ModelAdmin): - list_display = ('id', 'slug', 'discount_type', 'discount_value', 'referral_type', 'referral_value', 'auto', - 'seller', 'offered_at', 'expires_at') - list_filter = ['discount_type', 'referral_type', 'auto'] + list_display = ( + "id", + "slug", + "discount_type", + "discount_value", + "referral_type", + "referral_value", + "auto", + "seller", + "offered_at", + "expires_at", + ) + list_filter = ["discount_type", "referral_type", "auto"] search_fields = [ - 'slug', 'seller__name', 'seller__user__email', 'seller__user__id', 'seller__user__first_name', - 'seller__user__last_name' + "slug", + "seller__name", + "seller__user__email", + "seller__user__id", + "seller__user__first_name", + "seller__user__last_name", ] - raw_id_fields = ['seller'] + raw_id_fields = ["seller"] @admin.register(PaymentMethod) class PaymentMethodAdmin(admin.ModelAdmin): - list_display = ('title', 'description', 'academy', 'third_party_link', 'lang') - list_filter = ['academy__name', 'lang'] - raw_id_fields = ['academy'] - search_fields = ['title', 'academy__name'] + list_display = ("title", "description", "academy", "third_party_link", "lang") + list_filter = ["academy__name", "lang"] + raw_id_fields = ["academy"] + search_fields = ["title", "academy__name"] diff --git a/breathecode/payments/apps.py b/breathecode/payments/apps.py index b1df6b8ce..83529ece9 100644 --- a/breathecode/payments/apps.py +++ b/breathecode/payments/apps.py @@ -2,8 +2,8 @@ class PaymentsConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'breathecode.payments' + default_auto_field = "django.db.models.BigAutoField" + name = "breathecode.payments" def ready(self): from . import receivers # noqa: F401 diff --git a/breathecode/payments/management/commands/make_charges.py b/breathecode/payments/management/commands/make_charges.py index 933a6dd70..cd905ad40 100644 --- a/breathecode/payments/management/commands/make_charges.py +++ b/breathecode/payments/management/commands/make_charges.py @@ -11,30 +11,41 @@ # renew the subscriptions every 1 hours class Command(BaseCommand): - help = 'Renew credits' + help = "Renew credits" def handle(self, *args, **options): utc_now = timezone.now() - statuses = ['CANCELLED', 'DEPRECATED', 'FREE_TRIAL', 'EXPIRED'] - - avoid_expire_these_statuses = Q(status='EXPIRED') | Q(status='ERROR') | Q(status='PAYMENT_ISSUE') | Q( - status='FULLY_PAID') | Q(status='FREE_TRIAL') | Q(status='CANCELLED') | Q(status='DEPRECATED') - - subscription_args = (Q(valid_until__isnull=True) | Q(valid_until__gt=utc_now), ) - financing_args = (Q(plan_expires_at__isnull=True) | Q(plan_expires_at__gt=utc_now), ) + statuses = ["CANCELLED", "DEPRECATED", "FREE_TRIAL", "EXPIRED"] + + avoid_expire_these_statuses = ( + Q(status="EXPIRED") + | Q(status="ERROR") + | Q(status="PAYMENT_ISSUE") + | Q(status="FULLY_PAID") + | Q(status="FREE_TRIAL") + | Q(status="CANCELLED") + | Q(status="DEPRECATED") + ) + + subscription_args = (Q(valid_until__isnull=True) | Q(valid_until__gt=utc_now),) + financing_args = (Q(plan_expires_at__isnull=True) | Q(plan_expires_at__gt=utc_now),) params = { - 'next_payment_at__lte': utc_now + timedelta(days=1), + "next_payment_at__lte": utc_now + timedelta(days=1), } - Subscription.objects.filter(Q(valid_until__gte=utc_now) | Q(valid_until__isnull=True), - next_payment_at__lte=utc_now - timedelta(days=7), - status='PAYMENT_ISSUE').update(status='EXPIRED') + Subscription.objects.filter( + Q(valid_until__gte=utc_now) | Q(valid_until__isnull=True), + next_payment_at__lte=utc_now - timedelta(days=7), + status="PAYMENT_ISSUE", + ).update(status="EXPIRED") Subscription.objects.filter(valid_until__lte=utc_now).exclude(avoid_expire_these_statuses).update( - status='EXPIRED') + status="EXPIRED" + ) PlanFinancing.objects.filter(plan_expires_at__lte=utc_now).exclude(avoid_expire_these_statuses).update( - status='EXPIRED') + status="EXPIRED" + ) subscriptions = Subscription.objects.filter(*subscription_args, **params) plan_financings = PlanFinancing.objects.filter(*financing_args, **params) @@ -42,7 +53,7 @@ def handle(self, *args, **options): for status in statuses: subscriptions = subscriptions.exclude(status=status) - statuses.append('FULLY_PAID') + statuses.append("FULLY_PAID") for status in statuses: plan_financings = plan_financings.exclude(status=status) diff --git a/breathecode/payments/management/commands/renew_consumables.py b/breathecode/payments/management/commands/renew_consumables.py index ddc3957cb..12b167f51 100644 --- a/breathecode/payments/management/commands/renew_consumables.py +++ b/breathecode/payments/management/commands/renew_consumables.py @@ -8,7 +8,7 @@ # renew the credits every 1 hours class Command(BaseCommand): - help = 'Renew credits' + help = "Renew credits" def handle(self, *args, **options): self.utc_now = timezone.now() @@ -16,18 +16,19 @@ def handle(self, *args, **options): self.plan_financing() def subscriptions(self): - subscriptions = Subscription.objects.filter().exclude(status='CANCELLED').exclude(status='DEPRECATED') + subscriptions = Subscription.objects.filter().exclude(status="CANCELLED").exclude(status="DEPRECATED") if subscriptions.count() == 0: return - subscription_ids = list(subscriptions.values_list('id', flat=True)) + subscription_ids = list(subscriptions.values_list("id", flat=True)) - no_need_to_renew = ServiceStockScheduler.objects.filter( - consumables__valid_until__gte=self.utc_now + - timedelta(hours=2)).exclude(plan_handler__subscription__status='CANCELLED').exclude( - plan_handler__subscription__status='DEPRECATED').exclude( - plan_handler__subscription__status='PAYMENT_ISSUE') + no_need_to_renew = ( + ServiceStockScheduler.objects.filter(consumables__valid_until__gte=self.utc_now + timedelta(hours=2)) + .exclude(plan_handler__subscription__status="CANCELLED") + .exclude(plan_handler__subscription__status="DEPRECATED") + .exclude(plan_handler__subscription__status="PAYMENT_ISSUE") + ) for subscription in no_need_to_renew: subscription_ids.remove(subscription.id) @@ -36,17 +37,18 @@ def subscriptions(self): tasks.renew_subscription_consumables.delay(subscription_id) def plan_financing(self): - plan_financings = PlanFinancing.objects.filter().exclude(status__in=['CANCELLED', 'DEPRECATED']).only( - 'id', 'user') + plan_financings = ( + PlanFinancing.objects.filter().exclude(status__in=["CANCELLED", "DEPRECATED"]).only("id", "user") + ) if plan_financings.count() == 0: return - plan_financing_ids = list(plan_financings.values_list('id', flat=True)) + plan_financing_ids = list(plan_financings.values_list("id", flat=True)) no_need_to_renew = ServiceStockScheduler.objects.filter( - consumables__valid_until__gte=self.utc_now + timedelta(hours=2)).exclude( - plan_handler__plan_financing__status__in=['CANCELLED', 'DEPRECATED', 'PAYMENT_ISSUE']) + consumables__valid_until__gte=self.utc_now + timedelta(hours=2) + ).exclude(plan_handler__plan_financing__status__in=["CANCELLED", "DEPRECATED", "PAYMENT_ISSUE"]) for plan_financing in no_need_to_renew: plan_financing_ids.remove(plan_financing.id) diff --git a/breathecode/payments/management/commands/retry_pending_bags.py b/breathecode/payments/management/commands/retry_pending_bags.py index ee095c446..c94b621a9 100644 --- a/breathecode/payments/management/commands/retry_pending_bags.py +++ b/breathecode/payments/management/commands/retry_pending_bags.py @@ -9,11 +9,11 @@ # renew the credits every 1 hours class Command(BaseCommand): - help = 'Renew credits' + help = "Renew credits" def handle(self, *args, **options): now = timezone.now() - bags = Bag.objects.filter(was_delivered=False, created_at__lte=now - timedelta(minutes=10), status='PAID') + bags = Bag.objects.filter(was_delivered=False, created_at__lte=now - timedelta(minutes=10), status="PAID") hm_processed = 0 hm_failed = 0 @@ -34,4 +34,6 @@ def handle(self, *args, **options): total = hm_processed + hm_failed self.stdout.write( self.style.SUCCESS( - f'Rescheduled {total} bags where {hm_processed} were processed and {hm_failed} were failed.')) + f"Rescheduled {total} bags where {hm_processed} were processed and {hm_failed} were failed." + ) + ) diff --git a/breathecode/payments/management/commands/set_currencies.py b/breathecode/payments/management/commands/set_currencies.py index ebb0357ff..c1e5d6abf 100644 --- a/breathecode/payments/management/commands/set_currencies.py +++ b/breathecode/payments/management/commands/set_currencies.py @@ -4,81 +4,82 @@ from breathecode.payments.models import Currency usa = { - 'code': 'us', - 'name': 'USA', + "code": "us", + "name": "USA", } venezuela = { - 'code': 've', - 'name': 'Venezuela', + "code": "ve", + "name": "Venezuela", } uruguay = { - 'code': 'uy', - 'name': 'Uruguay', + "code": "uy", + "name": "Uruguay", } online = { - 'code': 'on', - 'name': 'Online', + "code": "on", + "name": "Online", } spain = { - 'code': 'es', - 'name': 'Spain', + "code": "es", + "name": "Spain", } costa_rica = { - 'code': 'cr', - 'name': 'Costa Rica', + "code": "cr", + "name": "Costa Rica", } chile = { - 'code': 'cl', - 'name': 'Chile', + "code": "cl", + "name": "Chile", } canada = { - 'code': 'ca', - 'name': 'Canada', + "code": "ca", + "name": "Canada", } currencies = [ { - 'code': 'USD', - 'name': 'United States dollar', - 'decimals': 2, - 'countries': { - 'main': [usa, online, venezuela, uruguay, chile, canada, costa_rica], + "code": "USD", + "name": "United States dollar", + "decimals": 2, + "countries": { + "main": [usa, online, venezuela, uruguay, chile, canada, costa_rica], }, }, { - 'code': 'EUR', - 'name': 'Euro', - 'decimals': 2, - 'countries': { - 'main': [spain], + "code": "EUR", + "name": "Euro", + "decimals": 2, + "countries": { + "main": [spain], }, }, { - 'code': 'CLP', - 'name': 'peso chileno', - 'decimals': 0, - 'countries': { - 'main': [chile], + "code": "CLP", + "name": "peso chileno", + "decimals": 0, + "countries": { + "main": [chile], }, }, ] class Command(BaseCommand): - help = 'Set currencies' + help = "Set currencies" def handle(self, *args, **options): for currency in currencies: - c, _ = Currency.objects.get_or_create(code=currency['code'], - name=currency['name'], - decimals=currency['decimals']) - for country in currency['countries']['main']: - Academy.objects.filter(country__code=country['code'], - country__name=country['name']).update(main_currency=c) + c, _ = Currency.objects.get_or_create( + code=currency["code"], name=currency["name"], decimals=currency["decimals"] + ) + for country in currency["countries"]["main"]: + Academy.objects.filter(country__code=country["code"], country__name=country["name"]).update( + main_currency=c + ) diff --git a/breathecode/payments/management/commands/set_services.py b/breathecode/payments/management/commands/set_services.py index b8efa1502..803f3b912 100644 --- a/breathecode/payments/management/commands/set_services.py +++ b/breathecode/payments/management/commands/set_services.py @@ -2,69 +2,69 @@ from breathecode.payments.models import Service, ServiceTranslation -GROUP = {'codename': 'CLASSROOM'} +GROUP = {"codename": "CLASSROOM"} SERVICES = [ { - 'slug': 'backend-with-django', - 'private': False, - 'price_per_unit': 4000, - 'currency': 'USD', - 'groups': ['CLASSROOM'], + "slug": "backend-with-django", + "private": False, + "price_per_unit": 4000, + "currency": "USD", + "groups": ["CLASSROOM"], # 'cohorts': '^miami-backend-\w+$', - 'translations': { - 'en': { - 'title': 'Backend with DJango', - 'description': '...', + "translations": { + "en": { + "title": "Backend with DJango", + "description": "...", }, - 'es': { - 'title': 'Backend con DJango', - 'description': '...', + "es": { + "title": "Backend con DJango", + "description": "...", }, - } + }, }, { - 'slug': 'frontend-with-react', - 'private': False, - 'price_per_unit': 4000, - 'currency': 'USD', - 'groups': ['CLASSROOM'], - 'translations': { - 'en': { - 'title': 'Backend with DJango', - 'description': '...', + "slug": "frontend-with-react", + "private": False, + "price_per_unit": 4000, + "currency": "USD", + "groups": ["CLASSROOM"], + "translations": { + "en": { + "title": "Backend with DJango", + "description": "...", }, - 'es': { - 'title': 'Backend con DJango', - 'description': '...', + "es": { + "title": "Backend con DJango", + "description": "...", }, - } + }, }, ] class Command(BaseCommand): - help = 'Set currencies' + help = "Set currencies" def handle(self, *args, **options): # groups for service in SERVICES: s, _ = Service.objects.get_or_create( - slug=service['slug'], + slug=service["slug"], defaults={ - 'price_per_unit': service['price_per_unit'], - 'owner': None, - 'private': False, - 'groups': [], + "price_per_unit": service["price_per_unit"], + "owner": None, + "private": False, + "groups": [], }, ) - for lang in service['translations']: + for lang in service["translations"]: ServiceTranslation.objects.get_or_create( service=s, lang=lang, defaults={ - 'title': service['translations'][lang]['title'], - 'description': service['translations'][lang]['description'], + "title": service["translations"][lang]["title"], + "description": service["translations"][lang]["description"], }, ) diff --git a/breathecode/payments/migrations/0001_initial.py b/breathecode/payments/migrations/0001_initial.py index 011318e82..14c7c3286 100644 --- a/breathecode/payments/migrations/0001_initial.py +++ b/breathecode/payments/migrations/0001_initial.py @@ -12,329 +12,441 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('admissions', '0047_merge_20220924_0611'), - ('auth', '0012_alter_user_first_name_max_length'), - ('mentorship', '0016_alter_mentorshipbill_status'), + ("admissions", "0047_merge_20220924_0611"), + ("auth", "0012_alter_user_first_name_max_length"), + ("mentorship", "0016_alter_mentorshipbill_status"), ] operations = [ migrations.CreateModel( - name='Bag', + name="Bag", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('amount_per_month', models.FloatField(default=0)), - ('amount_per_quarter', models.FloatField(default=0)), - ('amount_per_half', models.FloatField(default=0)), - ('amount_per_year', models.FloatField(default=0)), - ('status', - models.CharField(choices=[('RENEWAL', 'Renewal'), ('CHECKING', 'Checking'), ('PAID', 'Paid')], - default='CHECKING', - max_length=8)), - ('type', models.CharField(choices=[('BAG', 'Bag'), ('PREVIEW', 'Preview')], default='BAG', - max_length=7)), - ('chosen_period', - models.CharField(choices=[('MONTH', 'Month'), ('QUARTER', 'Quarter'), ('HALF', 'Half'), - ('YEAR', 'Year')], - default='MONTH', - max_length=7)), - ('is_recurrent', models.BooleanField(default=False)), - ('was_delivered', models.BooleanField(default=False)), - ('token', models.CharField(blank=True, db_index=True, default=None, max_length=40, null=True)), - ('expires_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("amount_per_month", models.FloatField(default=0)), + ("amount_per_quarter", models.FloatField(default=0)), + ("amount_per_half", models.FloatField(default=0)), + ("amount_per_year", models.FloatField(default=0)), + ( + "status", + models.CharField( + choices=[("RENEWAL", "Renewal"), ("CHECKING", "Checking"), ("PAID", "Paid")], + default="CHECKING", + max_length=8, + ), + ), + ( + "type", + models.CharField(choices=[("BAG", "Bag"), ("PREVIEW", "Preview")], default="BAG", max_length=7), + ), + ( + "chosen_period", + models.CharField( + choices=[("MONTH", "Month"), ("QUARTER", "Quarter"), ("HALF", "Half"), ("YEAR", "Year")], + default="MONTH", + max_length=7, + ), + ), + ("is_recurrent", models.BooleanField(default=False)), + ("was_delivered", models.BooleanField(default=False)), + ("token", models.CharField(blank=True, db_index=True, default=None, max_length=40, null=True)), + ("expires_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='Consumable', + name="Consumable", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('unit_type', models.CharField(choices=[('UNIT', 'Unit')], default='UNIT', max_length=10)), - ('how_many', models.IntegerField(default=-1)), - ('valid_until', models.DateTimeField(blank=True, default=None, null=True)), - ('cohort', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort')), - ('mentorship_service', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='mentorship.mentorshipservice')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("unit_type", models.CharField(choices=[("UNIT", "Unit")], default="UNIT", max_length=10)), + ("how_many", models.IntegerField(default=-1)), + ("valid_until", models.DateTimeField(blank=True, default=None, null=True)), + ( + "cohort", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.cohort", + ), + ), + ( + "mentorship_service", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="mentorship.mentorshipservice", + ), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='Currency', + name="Currency", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('code', models.CharField(max_length=3, unique=True)), - ('name', models.CharField(max_length=20, unique=True)), - ('countries', - models.ManyToManyField(help_text='Countries that use this currency officially', - related_name='currencies', - to='admissions.Country')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("code", models.CharField(max_length=3, unique=True)), + ("name", models.CharField(max_length=20, unique=True)), + ( + "countries", + models.ManyToManyField( + help_text="Countries that use this currency officially", + related_name="currencies", + to="admissions.Country", + ), + ), ], ), migrations.CreateModel( - name='Invoice', + name="Invoice", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('amount', - models.FloatField( - default=0, - help_text='If amount is 0, transaction will not be sent to stripe or any other payment processor.') - ), - ('paid_at', models.DateTimeField()), - ('status', - models.CharField(choices=[('FULFILLED', 'Fulfilled'), ('REJECTED', 'Rejected'), ('PENDING', 'Pending'), - ('REFUNDED', 'Refunded'), ('DISPUTED_AS_FRAUD', 'Disputed as fraud')], - default='PENDING', - max_length=17)), - ('stripe_id', models.CharField(blank=True, default=None, max_length=32, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('bag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.bag')), - ('currency', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.currency')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "amount", + models.FloatField( + default=0, + help_text="If amount is 0, transaction will not be sent to stripe or any other payment processor.", + ), + ), + ("paid_at", models.DateTimeField()), + ( + "status", + models.CharField( + choices=[ + ("FULFILLED", "Fulfilled"), + ("REJECTED", "Rejected"), + ("PENDING", "Pending"), + ("REFUNDED", "Refunded"), + ("DISPUTED_AS_FRAUD", "Disputed as fraud"), + ], + default="PENDING", + max_length=17, + ), + ), + ("stripe_id", models.CharField(blank=True, default=None, max_length=32, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("bag", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.bag")), + ("currency", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.currency")), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( - name='Plan', + name="Plan", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('price_per_month', models.FloatField(default=0)), - ('price_per_quarter', models.FloatField(default=0)), - ('price_per_half', models.FloatField(default=0)), - ('price_per_year', models.FloatField(default=0)), - ('slug', models.CharField(max_length=60, unique=True)), - ('status', - models.CharField(choices=[('DRAFT', 'Draft'), ('ACTIVE', 'Active'), ('UNLISTED', 'Unlisted'), - ('DELETED', 'Deleted'), ('DISCONTINUED', 'Discontinued')], - default='DRAFT', - max_length=12)), - ('pay_every', models.IntegerField(default=1)), - ('pay_every_unit', - models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - max_length=10)), - ('trial_duration', models.IntegerField(default=1)), - ('trial_duration_unit', - models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - max_length=10)), - ('currency', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.currency')), - ('owner', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("price_per_month", models.FloatField(default=0)), + ("price_per_quarter", models.FloatField(default=0)), + ("price_per_half", models.FloatField(default=0)), + ("price_per_year", models.FloatField(default=0)), + ("slug", models.CharField(max_length=60, unique=True)), + ( + "status", + models.CharField( + choices=[ + ("DRAFT", "Draft"), + ("ACTIVE", "Active"), + ("UNLISTED", "Unlisted"), + ("DELETED", "Deleted"), + ("DISCONTINUED", "Discontinued"), + ], + default="DRAFT", + max_length=12, + ), + ), + ("pay_every", models.IntegerField(default=1)), + ( + "pay_every_unit", + models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + max_length=10, + ), + ), + ("trial_duration", models.IntegerField(default=1)), + ( + "trial_duration_unit", + models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + max_length=10, + ), + ), + ("currency", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.currency")), + ( + "owner", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='Service', + name="Service", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('price_per_unit', models.FloatField(default=0)), - ('slug', models.CharField(max_length=60, unique=True)), - ('private', models.BooleanField(default=True)), - ('trial_duration', models.IntegerField(default=1)), - ('trial_duration_unit', - models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - max_length=10)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('currency', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.currency')), - ('groups', models.ManyToManyField(to='auth.Group')), - ('owner', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("price_per_unit", models.FloatField(default=0)), + ("slug", models.CharField(max_length=60, unique=True)), + ("private", models.BooleanField(default=True)), + ("trial_duration", models.IntegerField(default=1)), + ( + "trial_duration_unit", + models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + max_length=10, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("currency", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.currency")), + ("groups", models.ManyToManyField(to="auth.Group")), + ( + "owner", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='ServiceItem', + name="ServiceItem", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('unit_type', models.CharField(choices=[('UNIT', 'Unit')], default='UNIT', max_length=10)), - ('how_many', models.IntegerField(default=-1)), - ('is_renewable', models.BooleanField(default=False)), - ('renew_at', models.IntegerField(default=1)), - ('renew_at_unit', - models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - max_length=10)), - ('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.service')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("unit_type", models.CharField(choices=[("UNIT", "Unit")], default="UNIT", max_length=10)), + ("how_many", models.IntegerField(default=-1)), + ("is_renewable", models.BooleanField(default=False)), + ("renew_at", models.IntegerField(default=1)), + ( + "renew_at_unit", + models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + max_length=10, + ), + ), + ("service", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.service")), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='Subscription', + name="Subscription", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('paid_at', models.DateTimeField()), - ('status', - models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error')], - default='ACTIVE', - max_length=13)), - ('status_message', models.CharField(blank=True, default=None, max_length=150, null=True)), - ('is_refundable', models.BooleanField(default=True)), - ('valid_until', models.DateTimeField()), - ('pay_every', models.IntegerField(default=1)), - ('pay_every_unit', - models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - max_length=10)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('invoices', models.ManyToManyField(to='payments.Invoice')), - ('plans', models.ManyToManyField(to='payments.Plan')), - ('service_items', models.ManyToManyField(to='payments.ServiceItem')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("paid_at", models.DateTimeField()), + ( + "status", + models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ], + default="ACTIVE", + max_length=13, + ), + ), + ("status_message", models.CharField(blank=True, default=None, max_length=150, null=True)), + ("is_refundable", models.BooleanField(default=True)), + ("valid_until", models.DateTimeField()), + ("pay_every", models.IntegerField(default=1)), + ( + "pay_every_unit", + models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + max_length=10, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("invoices", models.ManyToManyField(to="payments.Invoice")), + ("plans", models.ManyToManyField(to="payments.Plan")), + ("service_items", models.ManyToManyField(to="payments.ServiceItem")), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( - name='ServiceTranslation', + name="ServiceTranslation", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('title', models.CharField(max_length=60)), - ('description', models.CharField(max_length=255)), - ('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.service')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + max_length=5, validators=[breathecode.utils.validators.language.validate_language_code] + ), + ), + ("title", models.CharField(max_length=60)), + ("description", models.CharField(max_length=255)), + ("service", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.service")), ], ), migrations.CreateModel( - name='ServiceStockScheduler', + name="ServiceStockScheduler", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('last_renew', models.DateTimeField(default=None, null=True)), - ('is_belongs_to_plan', models.BooleanField(default=False)), - ('consumables', models.ManyToManyField(to='payments.Consumable')), - ('service_item', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.serviceitem')), - ('subscription', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.subscription')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("last_renew", models.DateTimeField(default=None, null=True)), + ("is_belongs_to_plan", models.BooleanField(default=False)), + ("consumables", models.ManyToManyField(to="payments.Consumable")), + ( + "service_item", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.serviceitem"), + ), + ( + "subscription", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.subscription"), + ), ], ), migrations.CreateModel( - name='PlanTranslation', + name="PlanTranslation", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('title', models.CharField(max_length=60)), - ('description', models.CharField(max_length=255)), - ('plan', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.plan')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + max_length=5, validators=[breathecode.utils.validators.language.validate_language_code] + ), + ), + ("title", models.CharField(max_length=60)), + ("description", models.CharField(max_length=255)), + ("plan", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.plan")), ], ), migrations.AddField( - model_name='plan', - name='service_items', - field=models.ManyToManyField(to='payments.ServiceItem'), + model_name="plan", + name="service_items", + field=models.ManyToManyField(to="payments.ServiceItem"), ), migrations.CreateModel( - name='PaymentContact', + name="PaymentContact", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('stripe_id', models.CharField(max_length=20)), - ('user', - models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, - related_name='payment_contact', - to=settings.AUTH_USER_MODEL)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("stripe_id", models.CharField(max_length=20)), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="payment_contact", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='Fixture', + name="Fixture", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('cohort_pattern', models.CharField(blank=True, default=None, max_length=80, null=True)), - ('renew_every', models.IntegerField(default=1)), - ('renew_every_unit', - models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - max_length=10)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('cohorts', models.ManyToManyField(to='admissions.Cohort')), - ('mentorship_services', models.ManyToManyField(to='mentorship.MentorshipService')), - ('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.service')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("cohort_pattern", models.CharField(blank=True, default=None, max_length=80, null=True)), + ("renew_every", models.IntegerField(default=1)), + ( + "renew_every_unit", + models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + max_length=10, + ), + ), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("cohorts", models.ManyToManyField(to="admissions.Cohort")), + ("mentorship_services", models.ManyToManyField(to="mentorship.MentorshipService")), + ("service", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.service")), ], ), migrations.CreateModel( - name='FinancialReputation', + name="FinancialReputation", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('in_4geeks', - models.CharField(choices=[('FULFILLED', 'Fulfilled'), ('REJECTED', 'Rejected'), ('PENDING', 'Pending'), - ('REFUNDED', 'Refunded'), ('DISPUTED_AS_FRAUD', 'Disputed as fraud')], - default='GOOD', - max_length=17)), - ('in_stripe', - models.CharField(choices=[('FULFILLED', 'Fulfilled'), ('REJECTED', 'Rejected'), ('PENDING', 'Pending'), - ('REFUNDED', 'Refunded'), ('DISPUTED_AS_FRAUD', 'Disputed as fraud')], - default='GOOD', - max_length=17)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', - models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, - related_name='reputation', - to=settings.AUTH_USER_MODEL)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "in_4geeks", + models.CharField( + choices=[ + ("FULFILLED", "Fulfilled"), + ("REJECTED", "Rejected"), + ("PENDING", "Pending"), + ("REFUNDED", "Refunded"), + ("DISPUTED_AS_FRAUD", "Disputed as fraud"), + ], + default="GOOD", + max_length=17, + ), + ), + ( + "in_stripe", + models.CharField( + choices=[ + ("FULFILLED", "Fulfilled"), + ("REJECTED", "Rejected"), + ("PENDING", "Pending"), + ("REFUNDED", "Refunded"), + ("DISPUTED_AS_FRAUD", "Disputed as fraud"), + ], + default="GOOD", + max_length=17, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="reputation", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.AddField( - model_name='consumable', - name='service_item', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.serviceitem'), + model_name="consumable", + name="service_item", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.serviceitem"), ), migrations.AddField( - model_name='consumable', - name='user', + model_name="consumable", + name="user", field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.AddField( - model_name='bag', - name='currency', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.currency'), + model_name="bag", + name="currency", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.currency"), ), migrations.AddField( - model_name='bag', - name='plans', - field=models.ManyToManyField(to='payments.Plan'), + model_name="bag", + name="plans", + field=models.ManyToManyField(to="payments.Plan"), ), migrations.AddField( - model_name='bag', - name='service_items', - field=models.ManyToManyField(to='payments.ServiceItem'), + model_name="bag", + name="service_items", + field=models.ManyToManyField(to="payments.ServiceItem"), ), migrations.AddField( - model_name='bag', - name='user', + model_name="bag", + name="user", field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ] diff --git a/breathecode/payments/migrations/0002_consumable_event_type.py b/breathecode/payments/migrations/0002_consumable_event_type.py index dc9700b03..ca98198d2 100644 --- a/breathecode/payments/migrations/0002_consumable_event_type.py +++ b/breathecode/payments/migrations/0002_consumable_event_type.py @@ -7,18 +7,16 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0033_auto_20221208_1246'), - ('payments', '0001_initial'), + ("events", "0033_auto_20221208_1246"), + ("payments", "0001_initial"), ] operations = [ migrations.AddField( - model_name='consumable', - name='event_type', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='events.eventtype'), + model_name="consumable", + name="event_type", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to="events.eventtype" + ), ), ] diff --git a/breathecode/payments/migrations/0003_auto_20221213_0942.py b/breathecode/payments/migrations/0003_auto_20221213_0942.py index de709783b..eaf4216ac 100644 --- a/breathecode/payments/migrations/0003_auto_20221213_0942.py +++ b/breathecode/payments/migrations/0003_auto_20221213_0942.py @@ -8,232 +8,259 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), - ('mentorship', '0017_auto_20221130_0504'), - ('auth', '0012_alter_user_first_name_max_length'), + ("admissions", "0048_academy_main_currency"), + ("mentorship", "0017_auto_20221130_0504"), + ("auth", "0012_alter_user_first_name_max_length"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('payments', '0002_consumable_event_type'), + ("payments", "0002_consumable_event_type"), ] operations = [ migrations.CreateModel( - name='FinancingOption', + name="FinancingOption", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('monthly_price', models.IntegerField(default=1)), - ('how_many_months', models.IntegerField(default=1)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("monthly_price", models.IntegerField(default=1)), + ("how_many_months", models.IntegerField(default=1)), ], ), migrations.CreateModel( - name='PaymentServiceScheduler', + name="PaymentServiceScheduler", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('cohort_pattern', models.CharField(blank=True, default=None, max_length=80, null=True)), - ('renew_every', models.IntegerField(default=1)), - ('renew_every_unit', - models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - max_length=10)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('cohorts', models.ManyToManyField(blank=True, to='admissions.Cohort')), - ('mentorship_services', models.ManyToManyField(blank=True, to='mentorship.MentorshipService')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("cohort_pattern", models.CharField(blank=True, default=None, max_length=80, null=True)), + ("renew_every", models.IntegerField(default=1)), + ( + "renew_every_unit", + models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + max_length=10, + ), + ), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("cohorts", models.ManyToManyField(blank=True, to="admissions.Cohort")), + ("mentorship_services", models.ManyToManyField(blank=True, to="mentorship.MentorshipService")), ], ), migrations.CreateModel( - name='PlanFinancing', + name="PlanFinancing", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('status', - models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error')], - default='ACTIVE', - max_length=13)), - ('status_message', models.CharField(blank=True, default=None, max_length=150, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('paid_at', models.DateTimeField()), - ('pay_until', models.DateTimeField()), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('invoices', models.ManyToManyField(blank=True, to='payments.Invoice')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "status", + models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ], + default="ACTIVE", + max_length=13, + ), + ), + ("status_message", models.CharField(blank=True, default=None, max_length=150, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("paid_at", models.DateTimeField()), + ("pay_until", models.DateTimeField()), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("invoices", models.ManyToManyField(blank=True, to="payments.Invoice")), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='PlanServiceItem', + name="PlanServiceItem", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), ], ), migrations.CreateModel( - name='SubscriptionServiceItem', + name="SubscriptionServiceItem", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('service_item', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.serviceitem')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "service_item", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.serviceitem"), + ), ], ), migrations.RemoveField( - model_name='servicestockscheduler', - name='is_belongs_to_plan', + model_name="servicestockscheduler", + name="is_belongs_to_plan", ), migrations.RemoveField( - model_name='servicestockscheduler', - name='service_item', + model_name="servicestockscheduler", + name="service_item", ), migrations.RemoveField( - model_name='servicestockscheduler', - name='subscription', + model_name="servicestockscheduler", + name="subscription", ), migrations.AddField( - model_name='plan', - name='is_onboarding', + model_name="plan", + name="is_onboarding", field=models.BooleanField(default=False), ), migrations.AlterField( - model_name='bag', - name='plans', - field=models.ManyToManyField(blank=True, to='payments.Plan'), + model_name="bag", + name="plans", + field=models.ManyToManyField(blank=True, to="payments.Plan"), ), migrations.AlterField( - model_name='bag', - name='service_items', - field=models.ManyToManyField(blank=True, to='payments.ServiceItem'), + model_name="bag", + name="service_items", + field=models.ManyToManyField(blank=True, to="payments.ServiceItem"), ), migrations.AlterField( - model_name='currency', - name='countries', - field=models.ManyToManyField(blank=True, - help_text='Countries that use this currency officially', - related_name='currencies', - to='admissions.Country'), + model_name="currency", + name="countries", + field=models.ManyToManyField( + blank=True, + help_text="Countries that use this currency officially", + related_name="currencies", + to="admissions.Country", + ), ), migrations.AlterField( - model_name='service', - name='groups', - field=models.ManyToManyField(blank=True, to='auth.Group'), + model_name="service", + name="groups", + field=models.ManyToManyField(blank=True, to="auth.Group"), ), migrations.AlterField( - model_name='servicestockscheduler', - name='consumables', - field=models.ManyToManyField(blank=True, to='payments.Consumable'), + model_name="servicestockscheduler", + name="consumables", + field=models.ManyToManyField(blank=True, to="payments.Consumable"), ), migrations.AlterField( - model_name='servicestockscheduler', - name='last_renew', + model_name="servicestockscheduler", + name="last_renew", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='subscription', - name='invoices', - field=models.ManyToManyField(blank=True, to='payments.Invoice'), + model_name="subscription", + name="invoices", + field=models.ManyToManyField(blank=True, to="payments.Invoice"), ), migrations.AlterField( - model_name='subscription', - name='plans', - field=models.ManyToManyField(blank=True, to='payments.Plan'), + model_name="subscription", + name="plans", + field=models.ManyToManyField(blank=True, to="payments.Plan"), + ), + migrations.DeleteModel( + name="Fixture", ), - migrations.DeleteModel(name='Fixture', ), migrations.AddField( - model_name='subscriptionserviceitem', - name='subscription', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.subscription'), + model_name="subscriptionserviceitem", + name="subscription", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.subscription"), ), migrations.AddField( - model_name='planserviceitem', - name='plan', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.plan'), + model_name="planserviceitem", + name="plan", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.plan"), ), migrations.AddField( - model_name='planserviceitem', - name='plan_financing', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.planfinancing'), + model_name="planserviceitem", + name="plan_financing", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.planfinancing", + ), ), migrations.AddField( - model_name='planserviceitem', - name='service_item', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.serviceitem'), + model_name="planserviceitem", + name="service_item", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.serviceitem"), ), migrations.AddField( - model_name='planserviceitem', - name='subscription', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.subscription'), + model_name="planserviceitem", + name="subscription", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.subscription", + ), ), migrations.AddField( - model_name='planfinancing', - name='plans', - field=models.ManyToManyField(blank=True, to='payments.Plan'), + model_name="planfinancing", + name="plans", + field=models.ManyToManyField(blank=True, to="payments.Plan"), ), migrations.AddField( - model_name='planfinancing', - name='user', + model_name="planfinancing", + name="user", field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.AddField( - model_name='paymentservicescheduler', - name='service', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.service'), + model_name="paymentservicescheduler", + name="service", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.service"), ), migrations.AddField( - model_name='financingoption', - name='currency', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.currency'), + model_name="financingoption", + name="currency", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.currency"), ), migrations.AddField( - model_name='plan', - name='financing_options', - field=models.ManyToManyField(blank=True, to='payments.FinancingOption'), + model_name="plan", + name="financing_options", + field=models.ManyToManyField(blank=True, to="payments.FinancingOption"), ), migrations.AddField( - model_name='servicestockscheduler', - name='plan_handler', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.planserviceitem'), + model_name="servicestockscheduler", + name="plan_handler", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.planserviceitem", + ), ), migrations.AddField( - model_name='servicestockscheduler', - name='subscription_handler', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.subscriptionserviceitem'), + model_name="servicestockscheduler", + name="subscription_handler", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.subscriptionserviceitem", + ), ), migrations.RemoveField( - model_name='plan', - name='service_items', - field=models.ManyToManyField(blank=True, through='payments.PlanServiceItem', to='payments.ServiceItem'), + model_name="plan", + name="service_items", + field=models.ManyToManyField(blank=True, through="payments.PlanServiceItem", to="payments.ServiceItem"), ), migrations.AddField( - model_name='plan', - name='service_items', - field=models.ManyToManyField(blank=True, through='payments.PlanServiceItem', to='payments.ServiceItem'), + model_name="plan", + name="service_items", + field=models.ManyToManyField(blank=True, through="payments.PlanServiceItem", to="payments.ServiceItem"), ), migrations.RemoveField( - model_name='subscription', - name='service_items', - field=models.ManyToManyField(blank=True, - through='payments.SubscriptionServiceItem', - to='payments.ServiceItem'), + model_name="subscription", + name="service_items", + field=models.ManyToManyField( + blank=True, through="payments.SubscriptionServiceItem", to="payments.ServiceItem" + ), ), migrations.AddField( - model_name='subscription', - name='service_items', - field=models.ManyToManyField(blank=True, - through='payments.SubscriptionServiceItem', - to='payments.ServiceItem'), + model_name="subscription", + name="service_items", + field=models.ManyToManyField( + blank=True, through="payments.SubscriptionServiceItem", to="payments.ServiceItem" + ), ), ] diff --git a/breathecode/payments/migrations/0004_auto_20221214_1309.py b/breathecode/payments/migrations/0004_auto_20221214_1309.py index 7745d26f4..4a5f28d8b 100644 --- a/breathecode/payments/migrations/0004_auto_20221214_1309.py +++ b/breathecode/payments/migrations/0004_auto_20221214_1309.py @@ -8,63 +8,80 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0003_auto_20221213_0942'), + ("payments", "0003_auto_20221213_0942"), ] operations = [ migrations.RemoveField( - model_name='planserviceitem', - name='plan_financing', + model_name="planserviceitem", + name="plan_financing", ), migrations.RemoveField( - model_name='planserviceitem', - name='subscription', + model_name="planserviceitem", + name="subscription", ), migrations.CreateModel( - name='ServiceItemFeature', + name="ServiceItemFeature", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('unit_type', models.CharField(choices=[('UNIT', 'Unit')], default='UNIT', max_length=10)), - ('how_many', models.IntegerField(default=-1)), - ('lang', - models.CharField(max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('description', models.CharField(max_length=255)), - ('one_line_desc', models.CharField(max_length=30)), - ('service_item', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.serviceitem')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("unit_type", models.CharField(choices=[("UNIT", "Unit")], default="UNIT", max_length=10)), + ("how_many", models.IntegerField(default=-1)), + ( + "lang", + models.CharField( + max_length=5, validators=[breathecode.utils.validators.language.validate_language_code] + ), + ), + ("description", models.CharField(max_length=255)), + ("one_line_desc", models.CharField(max_length=30)), + ( + "service_item", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.serviceitem"), + ), ], options={ - 'abstract': False, + "abstract": False, }, ), migrations.CreateModel( - name='PlanServiceItemHandler', + name="PlanServiceItemHandler", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('handler', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='payments.planserviceitem')), - ('plan_financing', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.planfinancing')), - ('subscription', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.subscription')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "handler", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.planserviceitem"), + ), + ( + "plan_financing", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.planfinancing", + ), + ), + ( + "subscription", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.subscription", + ), + ), ], ), migrations.AlterField( - model_name='servicestockscheduler', - name='plan_handler', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.planserviceitemhandler'), + model_name="servicestockscheduler", + name="plan_handler", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.planserviceitemhandler", + ), ), ] diff --git a/breathecode/payments/migrations/0005_auto_20221214_1411.py b/breathecode/payments/migrations/0005_auto_20221214_1411.py index 027da3264..b63d6cdcb 100644 --- a/breathecode/payments/migrations/0005_auto_20221214_1411.py +++ b/breathecode/payments/migrations/0005_auto_20221214_1411.py @@ -6,16 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0004_auto_20221214_1309'), + ("payments", "0004_auto_20221214_1309"), ] operations = [ migrations.RemoveField( - model_name='serviceitemfeature', - name='how_many', + model_name="serviceitemfeature", + name="how_many", ), migrations.RemoveField( - model_name='serviceitemfeature', - name='unit_type', + model_name="serviceitemfeature", + name="unit_type", ), ] diff --git a/breathecode/payments/migrations/0006_auto_20221223_2218.py b/breathecode/payments/migrations/0006_auto_20221223_2218.py index cb66140f4..790124ea4 100644 --- a/breathecode/payments/migrations/0006_auto_20221223_2218.py +++ b/breathecode/payments/migrations/0006_auto_20221223_2218.py @@ -6,42 +6,42 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), - ('payments', '0005_auto_20221214_1411'), + ("admissions", "0048_academy_main_currency"), + ("payments", "0005_auto_20221214_1411"), ] operations = [ migrations.RemoveField( - model_name='plan', - name='pay_every', + model_name="plan", + name="pay_every", ), migrations.RemoveField( - model_name='plan', - name='pay_every_unit', + model_name="plan", + name="pay_every_unit", ), migrations.AddField( - model_name='bag', - name='selected_cohorts', - field=models.ManyToManyField(blank=True, to='admissions.Cohort'), + model_name="bag", + name="selected_cohorts", + field=models.ManyToManyField(blank=True, to="admissions.Cohort"), ), migrations.AlterField( - model_name='plan', - name='price_per_half', + model_name="plan", + name="price_per_half", field=models.FloatField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='plan', - name='price_per_month', + model_name="plan", + name="price_per_month", field=models.FloatField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='plan', - name='price_per_quarter', + model_name="plan", + name="price_per_quarter", field=models.FloatField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='plan', - name='price_per_year', + model_name="plan", + name="price_per_year", field=models.FloatField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/payments/migrations/0007_auto_20221229_1133.py b/breathecode/payments/migrations/0007_auto_20221229_1133.py index 25e56c9df..ac0bc1681 100644 --- a/breathecode/payments/migrations/0007_auto_20221229_1133.py +++ b/breathecode/payments/migrations/0007_auto_20221229_1133.py @@ -8,58 +8,62 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), - ('payments', '0006_auto_20221223_2218'), + ("admissions", "0048_academy_main_currency"), + ("payments", "0006_auto_20221223_2218"), ] operations = [ migrations.CreateModel( - name='PlanOffer', + name="PlanOffer", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('from_syllabus', models.ManyToManyField(to='admissions.Syllabus')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("from_syllabus", models.ManyToManyField(to="admissions.Syllabus")), ], ), migrations.AddField( - model_name='bag', - name='how_many_installments', + model_name="bag", + name="how_many_installments", field=models.IntegerField(default=0), ), migrations.AddField( - model_name='plan', - name='is_renewable', + model_name="plan", + name="is_renewable", field=models.BooleanField( default=True, - help_text='Is if true, it will create a reneweval subscription instead of a plan financing'), + help_text="Is if true, it will create a reneweval subscription instead of a plan financing", + ), ), migrations.AlterField( - model_name='plan', - name='financing_options', - field=models.ManyToManyField(blank=True, - help_text='If the plan is renew, it would be ignore', - to='payments.FinancingOption'), + model_name="plan", + name="financing_options", + field=models.ManyToManyField( + blank=True, help_text="If the plan is renew, it would be ignore", to="payments.FinancingOption" + ), ), migrations.CreateModel( - name='PlanOfferTranslation', + name="PlanOfferTranslation", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('title', models.CharField(max_length=60)), - ('description', models.CharField(max_length=255)), - ('short_description', models.CharField(max_length=255)), - ('offer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.planoffer')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + max_length=5, validators=[breathecode.utils.validators.language.validate_language_code] + ), + ), + ("title", models.CharField(max_length=60)), + ("description", models.CharField(max_length=255)), + ("short_description", models.CharField(max_length=255)), + ("offer", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.planoffer")), ], ), migrations.AddField( - model_name='planoffer', - name='original_plan', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.plan'), + model_name="planoffer", + name="original_plan", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.plan"), ), migrations.AddField( - model_name='planoffer', - name='suggested_plans', - field=models.ManyToManyField(related_name='_payments_planoffer_suggested_plans_+', to='payments.Plan'), + model_name="planoffer", + name="suggested_plans", + field=models.ManyToManyField(related_name="_payments_planoffer_suggested_plans_+", to="payments.Plan"), ), ] diff --git a/breathecode/payments/migrations/0008_auto_20221230_1044.py b/breathecode/payments/migrations/0008_auto_20221230_1044.py index 4d93297f4..5e41b61f7 100644 --- a/breathecode/payments/migrations/0008_auto_20221230_1044.py +++ b/breathecode/payments/migrations/0008_auto_20221230_1044.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0007_auto_20221229_1133'), + ("payments", "0007_auto_20221229_1133"), ] operations = [ migrations.AddField( - model_name='plan', - name='schedulers', - field=models.ManyToManyField(blank=True, to='payments.PaymentServiceScheduler'), + model_name="plan", + name="schedulers", + field=models.ManyToManyField(blank=True, to="payments.PaymentServiceScheduler"), ), migrations.AlterField( - model_name='financingoption', - name='monthly_price', + model_name="financingoption", + name="monthly_price", field=models.FloatField(default=1), ), ] diff --git a/breathecode/payments/migrations/0009_auto_20221231_0202.py b/breathecode/payments/migrations/0009_auto_20221231_0202.py index 99b143f51..f4aa48f03 100644 --- a/breathecode/payments/migrations/0009_auto_20221231_0202.py +++ b/breathecode/payments/migrations/0009_auto_20221231_0202.py @@ -7,43 +7,44 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0048_academy_main_currency'), - ('mentorship', '0017_auto_20221130_0504'), - ('payments', '0008_auto_20221230_1044'), + ("admissions", "0048_academy_main_currency"), + ("mentorship", "0017_auto_20221130_0504"), + ("payments", "0008_auto_20221230_1044"), ] operations = [ migrations.CreateModel( - name='MentorshipServiceSet', + name="MentorshipServiceSet", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=100, unique=True)), - ('name', models.CharField(max_length=150)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('mentorship_services', models.ManyToManyField(blank=True, to='mentorship.MentorshipService')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=100, unique=True)), + ("name", models.CharField(max_length=150)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("mentorship_services", models.ManyToManyField(blank=True, to="mentorship.MentorshipService")), ], ), migrations.RemoveField( - model_name='plan', - name='schedulers', + model_name="plan", + name="schedulers", ), migrations.AddField( - model_name='planserviceitem', - name='cohort_pattern', + model_name="planserviceitem", + name="cohort_pattern", field=models.CharField(blank=True, default=None, max_length=80, null=True), ), migrations.AddField( - model_name='planserviceitem', - name='cohorts', - field=models.ManyToManyField(blank=True, to='admissions.Cohort'), + model_name="planserviceitem", + name="cohorts", + field=models.ManyToManyField(blank=True, to="admissions.Cohort"), + ), + migrations.DeleteModel( + name="PaymentServiceScheduler", ), - migrations.DeleteModel(name='PaymentServiceScheduler', ), migrations.AddField( - model_name='planserviceitem', - name='mentorship_service_set', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.mentorshipserviceset'), + model_name="planserviceitem", + name="mentorship_service_set", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="payments.mentorshipserviceset" + ), ), ] diff --git a/breathecode/payments/migrations/0010_asyncconsumable.py b/breathecode/payments/migrations/0010_asyncconsumable.py index a0acba2b2..6ef00bec0 100644 --- a/breathecode/payments/migrations/0010_asyncconsumable.py +++ b/breathecode/payments/migrations/0010_asyncconsumable.py @@ -7,19 +7,19 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0009_auto_20221231_0202'), + ("payments", "0009_auto_20221231_0202"), ] operations = [ migrations.CreateModel( - name='AsyncConsumable', + name="AsyncConsumable", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('expires_at', models.DateTimeField()), - ('path', models.CharField(max_length=200)), - ('related_pk', models.CharField(blank=True, default=None, max_length=200, null=True)), - ('related_slug', models.CharField(blank=True, default=None, max_length=200, null=True)), - ('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.service')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("expires_at", models.DateTimeField()), + ("path", models.CharField(max_length=200)), + ("related_pk", models.CharField(blank=True, default=None, max_length=200, null=True)), + ("related_slug", models.CharField(blank=True, default=None, max_length=200, null=True)), + ("service", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.service")), ], ), ] diff --git a/breathecode/payments/migrations/0011_auto_20230106_0029.py b/breathecode/payments/migrations/0011_auto_20230106_0029.py index 2b46ca766..36512d7be 100644 --- a/breathecode/payments/migrations/0011_auto_20230106_0029.py +++ b/breathecode/payments/migrations/0011_auto_20230106_0029.py @@ -9,30 +9,38 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('payments', '0010_asyncconsumable'), + ("payments", "0010_asyncconsumable"), ] operations = [ migrations.CreateModel( - name='ConsumptionSession', + name="ConsumptionSession", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('eta', models.DateTimeField()), - ('duration', models.DurationField()), - ('how_many', models.FloatField(default=0)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('CANCELLED', 'Cancelled')], - default='PENDING', - max_length=12)), - ('was_discounted', models.BooleanField(default=False)), - ('request', models.JSONField()), - ('path', models.CharField(blank=True, max_length=200)), - ('related_id', models.IntegerField(blank=True, default=None, max_length=200, null=True)), - ('related_slug', models.CharField(blank=True, default=None, max_length=200, null=True)), - ('consumable', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='payments.consumable')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("eta", models.DateTimeField()), + ("duration", models.DurationField()), + ("how_many", models.FloatField(default=0)), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("CANCELLED", "Cancelled")], + default="PENDING", + max_length=12, + ), + ), + ("was_discounted", models.BooleanField(default=False)), + ("request", models.JSONField()), + ("path", models.CharField(blank=True, max_length=200)), + ("related_id", models.IntegerField(blank=True, default=None, max_length=200, null=True)), + ("related_slug", models.CharField(blank=True, default=None, max_length=200, null=True)), + ( + "consumable", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.consumable"), + ), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), - migrations.DeleteModel(name='AsyncConsumable', ), + migrations.DeleteModel( + name="AsyncConsumable", + ), ] diff --git a/breathecode/payments/migrations/0012_auto_20230115_0509.py b/breathecode/payments/migrations/0012_auto_20230115_0509.py index 31b0c70c9..bff0b73cc 100644 --- a/breathecode/payments/migrations/0012_auto_20230115_0509.py +++ b/breathecode/payments/migrations/0012_auto_20230115_0509.py @@ -7,18 +7,18 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0011_auto_20230106_0029'), + ("payments", "0011_auto_20230106_0029"), ] operations = [ migrations.AlterField( - model_name='consumptionsession', - name='duration', + model_name="consumptionsession", + name="duration", field=models.DurationField(default=datetime.timedelta), ), migrations.AlterField( - model_name='consumptionsession', - name='request', + model_name="consumptionsession", + name="request", field=models.JSONField(blank=True, default=dict), ), ] diff --git a/breathecode/payments/migrations/0012_auto_20230117_2124.py b/breathecode/payments/migrations/0012_auto_20230117_2124.py index 564ca09d8..8beae246a 100644 --- a/breathecode/payments/migrations/0012_auto_20230117_2124.py +++ b/breathecode/payments/migrations/0012_auto_20230117_2124.py @@ -7,18 +7,18 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0011_auto_20230106_0029'), + ("payments", "0011_auto_20230106_0029"), ] operations = [ migrations.AlterField( - model_name='consumptionsession', - name='duration', + model_name="consumptionsession", + name="duration", field=models.DurationField(default=datetime.timedelta), ), migrations.AlterField( - model_name='consumptionsession', - name='request', + model_name="consumptionsession", + name="request", field=models.JSONField(blank=True, default=dict), ), ] diff --git a/breathecode/payments/migrations/0012_auto_20230123_1621.py b/breathecode/payments/migrations/0012_auto_20230123_1621.py index d72aa8d30..07b995de6 100644 --- a/breathecode/payments/migrations/0012_auto_20230123_1621.py +++ b/breathecode/payments/migrations/0012_auto_20230123_1621.py @@ -7,18 +7,18 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0011_auto_20230106_0029'), + ("payments", "0011_auto_20230106_0029"), ] operations = [ migrations.AlterField( - model_name='consumptionsession', - name='duration', + model_name="consumptionsession", + name="duration", field=models.DurationField(default=datetime.timedelta), ), migrations.AlterField( - model_name='consumptionsession', - name='request', + model_name="consumptionsession", + name="request", field=models.JSONField(blank=True, default=dict), ), ] diff --git a/breathecode/payments/migrations/0013_currency_decimals.py b/breathecode/payments/migrations/0013_currency_decimals.py index c493149e1..6ce5da808 100644 --- a/breathecode/payments/migrations/0013_currency_decimals.py +++ b/breathecode/payments/migrations/0013_currency_decimals.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0012_auto_20230115_0509'), + ("payments", "0012_auto_20230115_0509"), ] operations = [ migrations.AddField( - model_name='currency', - name='decimals', + model_name="currency", + name="decimals", field=models.IntegerField(default=0), ), ] diff --git a/breathecode/payments/migrations/0013_merge_0012_auto_20230115_0509_0012_auto_20230117_2124.py b/breathecode/payments/migrations/0013_merge_0012_auto_20230115_0509_0012_auto_20230117_2124.py index 6b4ecf06f..496e9a792 100644 --- a/breathecode/payments/migrations/0013_merge_0012_auto_20230115_0509_0012_auto_20230117_2124.py +++ b/breathecode/payments/migrations/0013_merge_0012_auto_20230115_0509_0012_auto_20230117_2124.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0012_auto_20230115_0509'), - ('payments', '0012_auto_20230117_2124'), + ("payments", "0012_auto_20230115_0509"), + ("payments", "0012_auto_20230117_2124"), ] operations = [] diff --git a/breathecode/payments/migrations/0013_rename_valid_until_subscription_next_payment_at.py b/breathecode/payments/migrations/0013_rename_valid_until_subscription_next_payment_at.py index f27a89a63..50a819fe3 100644 --- a/breathecode/payments/migrations/0013_rename_valid_until_subscription_next_payment_at.py +++ b/breathecode/payments/migrations/0013_rename_valid_until_subscription_next_payment_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0012_auto_20230115_0509'), + ("payments", "0012_auto_20230115_0509"), ] operations = [ migrations.RenameField( - model_name='subscription', - old_name='valid_until', - new_name='next_payment_at', + model_name="subscription", + old_name="valid_until", + new_name="next_payment_at", ), ] diff --git a/breathecode/payments/migrations/0014_merge_20230120_2311.py b/breathecode/payments/migrations/0014_merge_20230120_2311.py index 482350892..ecb1f384e 100644 --- a/breathecode/payments/migrations/0014_merge_20230120_2311.py +++ b/breathecode/payments/migrations/0014_merge_20230120_2311.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0013_currency_decimals'), - ('payments', '0013_merge_0012_auto_20230115_0509_0012_auto_20230117_2124'), + ("payments", "0013_currency_decimals"), + ("payments", "0013_merge_0012_auto_20230115_0509_0012_auto_20230117_2124"), ] operations = [] diff --git a/breathecode/payments/migrations/0014_subscription_valid_until.py b/breathecode/payments/migrations/0014_subscription_valid_until.py index 55ac4b30f..ade72babf 100644 --- a/breathecode/payments/migrations/0014_subscription_valid_until.py +++ b/breathecode/payments/migrations/0014_subscription_valid_until.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0013_rename_valid_until_subscription_next_payment_at'), + ("payments", "0013_rename_valid_until_subscription_next_payment_at"), ] operations = [ migrations.AddField( - model_name='subscription', - name='valid_until', + model_name="subscription", + name="valid_until", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/payments/migrations/0015_merge_20230123_2138.py b/breathecode/payments/migrations/0015_merge_20230123_2138.py index 71c4c47ee..07b6feb60 100644 --- a/breathecode/payments/migrations/0015_merge_20230123_2138.py +++ b/breathecode/payments/migrations/0015_merge_20230123_2138.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0014_merge_20230120_2311'), - ('payments', '0014_subscription_valid_until'), + ("payments", "0014_merge_20230120_2311"), + ("payments", "0014_subscription_valid_until"), ] operations = [] diff --git a/breathecode/payments/migrations/0015_merge_20230125_0054.py b/breathecode/payments/migrations/0015_merge_20230125_0054.py index ac9cb2561..30a751fa5 100644 --- a/breathecode/payments/migrations/0015_merge_20230125_0054.py +++ b/breathecode/payments/migrations/0015_merge_20230125_0054.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0014_merge_20230120_2311'), - ('payments', '0014_subscription_valid_until'), + ("payments", "0014_merge_20230120_2311"), + ("payments", "0014_subscription_valid_until"), ] operations = [] diff --git a/breathecode/payments/migrations/0016_merge_20230125_2040.py b/breathecode/payments/migrations/0016_merge_20230125_2040.py index cb02c11a1..e0aedb1a7 100644 --- a/breathecode/payments/migrations/0016_merge_20230125_2040.py +++ b/breathecode/payments/migrations/0016_merge_20230125_2040.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0012_auto_20230123_1621'), - ('payments', '0015_merge_20230125_0054'), + ("payments", "0012_auto_20230123_1621"), + ("payments", "0015_merge_20230125_0054"), ] operations = [] diff --git a/breathecode/payments/migrations/0017_merge_20230126_2006.py b/breathecode/payments/migrations/0017_merge_20230126_2006.py index 34526a883..ead8c885a 100644 --- a/breathecode/payments/migrations/0017_merge_20230126_2006.py +++ b/breathecode/payments/migrations/0017_merge_20230126_2006.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0015_merge_20230123_2138'), - ('payments', '0016_merge_20230125_2040'), + ("payments", "0015_merge_20230123_2138"), + ("payments", "0016_merge_20230125_2040"), ] operations = [] diff --git a/breathecode/payments/migrations/0018_alter_consumptionsession_related_id.py b/breathecode/payments/migrations/0018_alter_consumptionsession_related_id.py index a693bc14d..79917c139 100644 --- a/breathecode/payments/migrations/0018_alter_consumptionsession_related_id.py +++ b/breathecode/payments/migrations/0018_alter_consumptionsession_related_id.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0017_merge_20230126_2006'), + ("payments", "0017_merge_20230126_2006"), ] operations = [ migrations.AlterField( - model_name='consumptionsession', - name='related_id', + model_name="consumptionsession", + name="related_id", field=models.IntegerField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/payments/migrations/0019_auto_20230214_0438.py b/breathecode/payments/migrations/0019_auto_20230214_0438.py index 398755f27..2b5f29075 100644 --- a/breathecode/payments/migrations/0019_auto_20230214_0438.py +++ b/breathecode/payments/migrations/0019_auto_20230214_0438.py @@ -7,135 +7,157 @@ class Migration(migrations.Migration): dependencies = [ - ('events', '0040_alter_venue_zip_code'), - ('admissions', '0053_alter_cohort_kickoff_date'), - ('payments', '0018_alter_consumptionsession_related_id'), + ("events", "0040_alter_venue_zip_code"), + ("admissions", "0053_alter_cohort_kickoff_date"), + ("payments", "0018_alter_consumptionsession_related_id"), ] operations = [ migrations.RenameField( - model_name='planfinancing', - old_name='paid_at', - new_name='next_payment_at', + model_name="planfinancing", + old_name="paid_at", + new_name="next_payment_at", ), migrations.RenameField( - model_name='planfinancing', - old_name='pay_until', - new_name='valid_until', + model_name="planfinancing", + old_name="pay_until", + new_name="valid_until", ), migrations.RenameField( - model_name='servicestockscheduler', - old_name='last_renew', - new_name='valid_until', + model_name="servicestockscheduler", + old_name="last_renew", + new_name="valid_until", ), migrations.AddField( - model_name='invoice', - name='refund_stripe_id', + model_name="invoice", + name="refund_stripe_id", field=models.CharField(blank=True, default=None, max_length=32, null=True), ), migrations.AddField( - model_name='invoice', - name='refunded_at', + model_name="invoice", + name="refunded_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='plan', - name='time_of_life', + model_name="plan", + name="time_of_life", field=models.IntegerField(blank=True, default=1, null=True), ), migrations.AddField( - model_name='plan', - name='time_of_life_unit', - field=models.CharField(blank=True, - choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - max_length=10, - null=True), + model_name="plan", + name="time_of_life_unit", + field=models.CharField( + blank=True, + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + max_length=10, + null=True, + ), ), migrations.AddField( - model_name='planfinancing', - name='monthly_price', + model_name="planfinancing", + name="monthly_price", field=models.FloatField(default=0), ), migrations.AddField( - model_name='planfinancing', - name='plan_expires_at', + model_name="planfinancing", + name="plan_expires_at", field=models.DateTimeField(default=None, null=True), ), migrations.AddField( - model_name='subscriptionserviceitem', - name='cohorts', - field=models.ManyToManyField(blank=True, to='admissions.Cohort'), + model_name="subscriptionserviceitem", + name="cohorts", + field=models.ManyToManyField(blank=True, to="admissions.Cohort"), ), migrations.AddField( - model_name='subscriptionserviceitem', - name='mentorship_service_set', - field=models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.mentorshipserviceset'), + model_name="subscriptionserviceitem", + name="mentorship_service_set", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="payments.mentorshipserviceset" + ), ), migrations.AlterField( - model_name='bag', - name='chosen_period', - field=models.CharField(choices=[('NO_SET', 'No set'), ('MONTH', 'Month'), ('QUARTER', 'Quarter'), - ('HALF', 'Half'), ('YEAR', 'Year')], - default='NO_SET', - max_length=7), + model_name="bag", + name="chosen_period", + field=models.CharField( + choices=[ + ("NO_SET", "No set"), + ("MONTH", "Month"), + ("QUARTER", "Quarter"), + ("HALF", "Half"), + ("YEAR", "Year"), + ], + default="NO_SET", + max_length=7, + ), ), migrations.AlterField( - model_name='bag', - name='type', - field=models.CharField(choices=[('BAG', 'Bag'), ('CHARGE', 'Charge'), ('PREVIEW', 'Preview')], - default='BAG', - max_length=7), + model_name="bag", + name="type", + field=models.CharField( + choices=[("BAG", "Bag"), ("CHARGE", "Charge"), ("PREVIEW", "Preview")], default="BAG", max_length=7 + ), ), migrations.AlterField( - model_name='consumable', - name='cohort', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.cohort'), + model_name="consumable", + name="cohort", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.cohort", + ), ), migrations.AlterField( - model_name='consumable', - name='event_type', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='events.eventtype'), + model_name="consumable", + name="event_type", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to="events.eventtype" + ), ), migrations.AlterField( - model_name='planfinancing', - name='status', - field=models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error'), - ('FULLY_PAID', 'Fully Paid')], - default='ACTIVE', - max_length=13), + model_name="planfinancing", + name="status", + field=models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ("FULLY_PAID", "Fully Paid"), + ], + default="ACTIVE", + max_length=13, + ), ), migrations.AlterField( - model_name='planfinancing', - name='status_message', + model_name="planfinancing", + name="status_message", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), migrations.AlterField( - model_name='subscription', - name='status', - field=models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error'), - ('FULLY_PAID', 'Fully Paid')], - default='ACTIVE', - max_length=13), + model_name="subscription", + name="status", + field=models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ("FULLY_PAID", "Fully Paid"), + ], + default="ACTIVE", + max_length=13, + ), ), migrations.AlterField( - model_name='subscription', - name='status_message', + model_name="subscription", + name="status_message", field=models.CharField(blank=True, default=None, max_length=250, null=True), ), ] diff --git a/breathecode/payments/migrations/0020_auto_20230223_0634.py b/breathecode/payments/migrations/0020_auto_20230223_0634.py index eb1e1781e..a5b18fa5f 100644 --- a/breathecode/payments/migrations/0020_auto_20230223_0634.py +++ b/breathecode/payments/migrations/0020_auto_20230223_0634.py @@ -11,1147 +11,1288 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('auth', '0012_alter_user_first_name_max_length'), - ('admissions', '0054_cohortuser_history_log'), - ('events', '0042_alter_eventtype_icon_url'), - ('mentorship', '0020_alter_mentorshipservice_language'), - ('payments', '0019_auto_20230214_0438'), + ("auth", "0012_alter_user_first_name_max_length"), + ("admissions", "0054_cohortuser_history_log"), + ("events", "0042_alter_eventtype_icon_url"), + ("mentorship", "0020_alter_mentorshipservice_language"), + ("payments", "0019_auto_20230214_0438"), ] operations = [ migrations.CreateModel( - name='EventTypeSet', + name="EventTypeSet", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', - models.SlugField( - help_text= - 'A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens', - max_length=100, - unique=True)), - ('academy', - models.ForeignKey(help_text='Academy owner', - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('event_types', models.ManyToManyField(blank=True, help_text='Event types', to='events.EventType')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "slug", + models.SlugField( + help_text="A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", + max_length=100, + unique=True, + ), + ), + ( + "academy", + models.ForeignKey( + help_text="Academy owner", on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + ("event_types", models.ManyToManyField(blank=True, help_text="Event types", to="events.EventType")), ], ), migrations.RemoveField( - model_name='mentorshipserviceset', - name='name', + model_name="mentorshipserviceset", + name="name", ), migrations.RemoveField( - model_name='planserviceitem', - name='mentorship_service_set', + model_name="planserviceitem", + name="mentorship_service_set", ), migrations.AddField( - model_name='bag', - name='selected_mentorship_service_sets', - field=models.ManyToManyField(blank=True, - help_text='Selected mentorship service sets for the plans of services', - to='payments.MentorshipServiceSet'), + model_name="bag", + name="selected_mentorship_service_sets", + field=models.ManyToManyField( + blank=True, + help_text="Selected mentorship service sets for the plans of services", + to="payments.MentorshipServiceSet", + ), ), migrations.AddField( - model_name='planfinancing', - name='cohort_selected', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="planfinancing", + name="cohort_selected", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.cohort", + ), ), migrations.AddField( - model_name='planfinancing', - name='mentorship_service_set_selected', - field=models.ForeignKey(blank=True, - default=None, - help_text='Mentorship service set which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.mentorshipserviceset'), + model_name="planfinancing", + name="mentorship_service_set_selected", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Mentorship service set which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.mentorshipserviceset", + ), ), migrations.AddField( - model_name='planserviceitem', - name='mentorship_service_sets', + model_name="planserviceitem", + name="mentorship_service_sets", field=models.ManyToManyField( blank=True, - help_text='Available mentorship service sets to be sold in this service and plan', - to='payments.MentorshipServiceSet'), + help_text="Available mentorship service sets to be sold in this service and plan", + to="payments.MentorshipServiceSet", + ), ), migrations.AddField( - model_name='subscription', - name='cohort_selected', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="subscription", + name="cohort_selected", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.cohort", + ), ), migrations.AddField( - model_name='subscription', - name='mentorship_service_set_selected', - field=models.ForeignKey(blank=True, - default=None, - help_text='Mentorship service set which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.mentorshipserviceset'), + model_name="subscription", + name="mentorship_service_set_selected", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Mentorship service set which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.mentorshipserviceset", + ), ), migrations.AlterField( - model_name='bag', - name='academy', - field=models.ForeignKey(help_text='Academy owner', - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="bag", + name="academy", + field=models.ForeignKey( + help_text="Academy owner", on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), ), migrations.AlterField( - model_name='bag', - name='amount_per_half', - field=models.FloatField(default=0, help_text='Amount per half'), + model_name="bag", + name="amount_per_half", + field=models.FloatField(default=0, help_text="Amount per half"), ), migrations.AlterField( - model_name='bag', - name='amount_per_month', - field=models.FloatField(default=0, help_text='Amount per month'), + model_name="bag", + name="amount_per_month", + field=models.FloatField(default=0, help_text="Amount per month"), ), migrations.AlterField( - model_name='bag', - name='amount_per_quarter', - field=models.FloatField(default=0, help_text='Amount per quarter'), + model_name="bag", + name="amount_per_quarter", + field=models.FloatField(default=0, help_text="Amount per quarter"), ), migrations.AlterField( - model_name='bag', - name='amount_per_year', - field=models.FloatField(default=0, help_text='Amount per year'), + model_name="bag", + name="amount_per_year", + field=models.FloatField(default=0, help_text="Amount per year"), ), migrations.AlterField( - model_name='bag', - name='chosen_period', - field=models.CharField(choices=[('NO_SET', 'No set'), ('MONTH', 'Month'), ('QUARTER', 'Quarter'), - ('HALF', 'Half'), ('YEAR', 'Year')], - default='NO_SET', - help_text='Chosen period used to calculate the amount and build the subscription', - max_length=7), + model_name="bag", + name="chosen_period", + field=models.CharField( + choices=[ + ("NO_SET", "No set"), + ("MONTH", "Month"), + ("QUARTER", "Quarter"), + ("HALF", "Half"), + ("YEAR", "Year"), + ], + default="NO_SET", + help_text="Chosen period used to calculate the amount and build the subscription", + max_length=7, + ), ), migrations.AlterField( - model_name='bag', - name='currency', - field=models.ForeignKey(help_text='Currency', - on_delete=django.db.models.deletion.CASCADE, - to='payments.currency'), + model_name="bag", + name="currency", + field=models.ForeignKey( + help_text="Currency", on_delete=django.db.models.deletion.CASCADE, to="payments.currency" + ), ), migrations.AlterField( - model_name='bag', - name='expires_at', + model_name="bag", + name="expires_at", field=models.DateTimeField( blank=True, default=None, - help_text='Expiration date of the bag, used for preview bag together with the token', - null=True), + help_text="Expiration date of the bag, used for preview bag together with the token", + null=True, + ), ), migrations.AlterField( - model_name='bag', - name='how_many_installments', - field=models.IntegerField(default=0, - help_text='How many installments to collect and build the plan financing'), + model_name="bag", + name="how_many_installments", + field=models.IntegerField( + default=0, help_text="How many installments to collect and build the plan financing" + ), ), migrations.AlterField( - model_name='bag', - name='is_recurrent', - field=models.BooleanField(default=False, help_text='will it be a recurrent payment?'), + model_name="bag", + name="is_recurrent", + field=models.BooleanField(default=False, help_text="will it be a recurrent payment?"), ), migrations.AlterField( - model_name='bag', - name='plans', - field=models.ManyToManyField(blank=True, help_text='Plans', to='payments.Plan'), + model_name="bag", + name="plans", + field=models.ManyToManyField(blank=True, help_text="Plans", to="payments.Plan"), ), migrations.AlterField( - model_name='bag', - name='selected_cohorts', - field=models.ManyToManyField(blank=True, - help_text='Selected cohorts for the plans of services', - to='admissions.Cohort'), + model_name="bag", + name="selected_cohorts", + field=models.ManyToManyField( + blank=True, help_text="Selected cohorts for the plans of services", to="admissions.Cohort" + ), ), migrations.AlterField( - model_name='bag', - name='service_items', - field=models.ManyToManyField(blank=True, help_text='Service items', to='payments.ServiceItem'), + model_name="bag", + name="service_items", + field=models.ManyToManyField(blank=True, help_text="Service items", to="payments.ServiceItem"), ), migrations.AlterField( - model_name='bag', - name='status', - field=models.CharField(choices=[('RENEWAL', 'Renewal'), ('CHECKING', 'Checking'), ('PAID', 'Paid')], - default='CHECKING', - help_text='Bag status', - max_length=8), + model_name="bag", + name="status", + field=models.CharField( + choices=[("RENEWAL", "Renewal"), ("CHECKING", "Checking"), ("PAID", "Paid")], + default="CHECKING", + help_text="Bag status", + max_length=8, + ), ), migrations.AlterField( - model_name='bag', - name='token', - field=models.CharField(blank=True, - db_index=True, - default=None, - help_text='Token of the bag', - max_length=40, - null=True), + model_name="bag", + name="token", + field=models.CharField( + blank=True, db_index=True, default=None, help_text="Token of the bag", max_length=40, null=True + ), ), migrations.AlterField( - model_name='bag', - name='type', - field=models.CharField(choices=[('BAG', 'Bag'), ('CHARGE', 'Charge'), ('PREVIEW', 'Preview')], - default='BAG', - help_text='Bag type', - max_length=7), + model_name="bag", + name="type", + field=models.CharField( + choices=[("BAG", "Bag"), ("CHARGE", "Charge"), ("PREVIEW", "Preview")], + default="BAG", + help_text="Bag type", + max_length=7, + ), ), migrations.AlterField( - model_name='bag', - name='user', - field=models.ForeignKey(help_text='Customer', - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="bag", + name="user", + field=models.ForeignKey( + help_text="Customer", on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), migrations.AlterField( - model_name='bag', - name='was_delivered', - field=models.BooleanField(default=False, help_text='Was it delivered to the user?'), + model_name="bag", + name="was_delivered", + field=models.BooleanField(default=False, help_text="Was it delivered to the user?"), ), migrations.AlterField( - model_name='consumable', - name='cohort', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.cohort'), + model_name="consumable", + name="cohort", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.cohort", + ), ), migrations.AlterField( - model_name='consumable', - name='event_type', - field=models.ForeignKey(blank=True, - default=None, - help_text='Event type which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='events.eventtype'), + model_name="consumable", + name="event_type", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Event type which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="events.eventtype", + ), ), migrations.AlterField( - model_name='consumable', - name='how_many', - field=models.IntegerField(default=-1, help_text='How many units of this service can be used'), + model_name="consumable", + name="how_many", + field=models.IntegerField(default=-1, help_text="How many units of this service can be used"), ), migrations.AlterField( - model_name='consumable', - name='mentorship_service', - field=models.ForeignKey(blank=True, - default=None, - help_text='Mentorship service which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='mentorship.mentorshipservice'), + model_name="consumable", + name="mentorship_service", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Mentorship service which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="mentorship.mentorshipservice", + ), ), migrations.AlterField( - model_name='consumable', - name='service_item', + model_name="consumable", + name="service_item", field=models.ForeignKey( - help_text='Service item, we remind the service item to know how many units was issued', + help_text="Service item, we remind the service item to know how many units was issued", on_delete=django.db.models.deletion.CASCADE, - to='payments.serviceitem'), + to="payments.serviceitem", + ), ), migrations.AlterField( - model_name='consumable', - name='unit_type', - field=models.CharField(choices=[('UNIT', 'Unit')], - default='UNIT', - help_text='Unit type (e.g. UNIT))', - max_length=10), + model_name="consumable", + name="unit_type", + field=models.CharField( + choices=[("UNIT", "Unit")], default="UNIT", help_text="Unit type (e.g. UNIT))", max_length=10 + ), ), migrations.AlterField( - model_name='consumable', - name='user', - field=models.ForeignKey(help_text='Customer', - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="consumable", + name="user", + field=models.ForeignKey( + help_text="Customer", on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), migrations.AlterField( - model_name='consumable', - name='valid_until', + model_name="consumable", + name="valid_until", field=models.DateTimeField( blank=True, default=None, - help_text='Valid until, this is null if the consumable is valid until resources are exhausted', - null=True), + help_text="Valid until, this is null if the consumable is valid until resources are exhausted", + null=True, + ), ), migrations.AlterField( - model_name='consumptionsession', - name='consumable', - field=models.ForeignKey(help_text='Consumable', - on_delete=django.db.models.deletion.CASCADE, - to='payments.consumable'), + model_name="consumptionsession", + name="consumable", + field=models.ForeignKey( + help_text="Consumable", on_delete=django.db.models.deletion.CASCADE, to="payments.consumable" + ), ), migrations.AlterField( - model_name='consumptionsession', - name='duration', - field=models.DurationField(default=datetime.timedelta, help_text='Duration of the session'), + model_name="consumptionsession", + name="duration", + field=models.DurationField(default=datetime.timedelta, help_text="Duration of the session"), ), migrations.AlterField( - model_name='consumptionsession', - name='eta', - field=models.DateTimeField(help_text='Estimated time of arrival'), + model_name="consumptionsession", + name="eta", + field=models.DateTimeField(help_text="Estimated time of arrival"), ), migrations.AlterField( - model_name='consumptionsession', - name='how_many', - field=models.FloatField(default=0, help_text='How many units of this service can be used'), + model_name="consumptionsession", + name="how_many", + field=models.FloatField(default=0, help_text="How many units of this service can be used"), ), migrations.AlterField( - model_name='consumptionsession', - name='path', - field=models.CharField(blank=True, help_text='Path of the request', max_length=200), + model_name="consumptionsession", + name="path", + field=models.CharField(blank=True, help_text="Path of the request", max_length=200), ), migrations.AlterField( - model_name='consumptionsession', - name='related_id', - field=models.IntegerField(blank=True, default=None, help_text='Related id', null=True), + model_name="consumptionsession", + name="related_id", + field=models.IntegerField(blank=True, default=None, help_text="Related id", null=True), ), migrations.AlterField( - model_name='consumptionsession', - name='related_slug', + model_name="consumptionsession", + name="related_slug", field=models.CharField( blank=True, default=None, - help_text= - "Related slug, it's human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", + help_text="Related slug, it's human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", max_length=200, - null=True), + null=True, + ), ), migrations.AlterField( - model_name='consumptionsession', - name='request', + model_name="consumptionsession", + name="request", field=models.JSONField( blank=True, default=dict, - help_text="Request parameters, it's used to remind and recover and consumption session"), + help_text="Request parameters, it's used to remind and recover and consumption session", + ), ), migrations.AlterField( - model_name='consumptionsession', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('DONE', 'Done'), ('CANCELLED', 'Cancelled')], - default='PENDING', - help_text='Status of the session', - max_length=12), + model_name="consumptionsession", + name="status", + field=models.CharField( + choices=[("PENDING", "Pending"), ("DONE", "Done"), ("CANCELLED", "Cancelled")], + default="PENDING", + help_text="Status of the session", + max_length=12, + ), ), migrations.AlterField( - model_name='consumptionsession', - name='user', - field=models.ForeignKey(help_text='Customer', - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="consumptionsession", + name="user", + field=models.ForeignKey( + help_text="Customer", on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), migrations.AlterField( - model_name='consumptionsession', - name='was_discounted', - field=models.BooleanField(default=False, help_text='Was it discounted'), + model_name="consumptionsession", + name="was_discounted", + field=models.BooleanField(default=False, help_text="Was it discounted"), ), migrations.AlterField( - model_name='currency', - name='code', - field=models.CharField(help_text='ISO 4217 currency code (e.g. USD, EUR, MXN)', max_length=3, unique=True), + model_name="currency", + name="code", + field=models.CharField(help_text="ISO 4217 currency code (e.g. USD, EUR, MXN)", max_length=3, unique=True), ), migrations.AlterField( - model_name='currency', - name='decimals', - field=models.IntegerField(default=0, help_text='Number of decimals (e.g. 2 for USD and EUR, 0 for JPY)'), + model_name="currency", + name="decimals", + field=models.IntegerField(default=0, help_text="Number of decimals (e.g. 2 for USD and EUR, 0 for JPY)"), ), migrations.AlterField( - model_name='currency', - name='name', - field=models.CharField(help_text='Currency name (e.g. US Dollar, Euro, Mexican Peso)', - max_length=20, - unique=True), + model_name="currency", + name="name", + field=models.CharField( + help_text="Currency name (e.g. US Dollar, Euro, Mexican Peso)", max_length=20, unique=True + ), ), migrations.AlterField( - model_name='financialreputation', - name='in_4geeks', - field=models.CharField(choices=[('FULFILLED', 'Fulfilled'), ('REJECTED', 'Rejected'), - ('PENDING', 'Pending'), ('REFUNDED', 'Refunded'), - ('DISPUTED_AS_FRAUD', 'Disputed as fraud')], - default='GOOD', - help_text='4Geeks reputation', - max_length=17), + model_name="financialreputation", + name="in_4geeks", + field=models.CharField( + choices=[ + ("FULFILLED", "Fulfilled"), + ("REJECTED", "Rejected"), + ("PENDING", "Pending"), + ("REFUNDED", "Refunded"), + ("DISPUTED_AS_FRAUD", "Disputed as fraud"), + ], + default="GOOD", + help_text="4Geeks reputation", + max_length=17, + ), ), migrations.AlterField( - model_name='financialreputation', - name='in_stripe', - field=models.CharField(choices=[('FULFILLED', 'Fulfilled'), ('REJECTED', 'Rejected'), - ('PENDING', 'Pending'), ('REFUNDED', 'Refunded'), - ('DISPUTED_AS_FRAUD', 'Disputed as fraud')], - default='GOOD', - help_text='Stripe reputation', - max_length=17), + model_name="financialreputation", + name="in_stripe", + field=models.CharField( + choices=[ + ("FULFILLED", "Fulfilled"), + ("REJECTED", "Rejected"), + ("PENDING", "Pending"), + ("REFUNDED", "Refunded"), + ("DISPUTED_AS_FRAUD", "Disputed as fraud"), + ], + default="GOOD", + help_text="Stripe reputation", + max_length=17, + ), ), migrations.AlterField( - model_name='financialreputation', - name='user', - field=models.OneToOneField(help_text='Customer', - on_delete=django.db.models.deletion.CASCADE, - related_name='reputation', - to=settings.AUTH_USER_MODEL), + model_name="financialreputation", + name="user", + field=models.OneToOneField( + help_text="Customer", + on_delete=django.db.models.deletion.CASCADE, + related_name="reputation", + to=settings.AUTH_USER_MODEL, + ), ), migrations.AlterField( - model_name='financingoption', - name='currency', - field=models.ForeignKey(help_text='Currency', - on_delete=django.db.models.deletion.CASCADE, - to='payments.currency'), + model_name="financingoption", + name="currency", + field=models.ForeignKey( + help_text="Currency", on_delete=django.db.models.deletion.CASCADE, to="payments.currency" + ), ), migrations.AlterField( - model_name='financingoption', - name='how_many_months', - field=models.IntegerField(default=1, - help_text='How many months and installments to collect (e.g. 1, 2, 3, ...)'), + model_name="financingoption", + name="how_many_months", + field=models.IntegerField( + default=1, help_text="How many months and installments to collect (e.g. 1, 2, 3, ...)" + ), ), migrations.AlterField( - model_name='financingoption', - name='monthly_price', - field=models.FloatField(default=1, help_text='Monthly price (e.g. 1, 2, 3, ...)'), + model_name="financingoption", + name="monthly_price", + field=models.FloatField(default=1, help_text="Monthly price (e.g. 1, 2, 3, ...)"), ), migrations.AlterField( - model_name='invoice', - name='academy', - field=models.ForeignKey(help_text='Academy owner', - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="invoice", + name="academy", + field=models.ForeignKey( + help_text="Academy owner", on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), ), migrations.AlterField( - model_name='invoice', - name='bag', - field=models.ForeignKey(help_text='Bag', on_delete=django.db.models.deletion.CASCADE, to='payments.bag'), + model_name="invoice", + name="bag", + field=models.ForeignKey(help_text="Bag", on_delete=django.db.models.deletion.CASCADE, to="payments.bag"), ), migrations.AlterField( - model_name='invoice', - name='currency', - field=models.ForeignKey(help_text='Currency of the invoice', - on_delete=django.db.models.deletion.CASCADE, - to='payments.currency'), + model_name="invoice", + name="currency", + field=models.ForeignKey( + help_text="Currency of the invoice", on_delete=django.db.models.deletion.CASCADE, to="payments.currency" + ), ), migrations.AlterField( - model_name='invoice', - name='paid_at', - field=models.DateTimeField(help_text='Date when the invoice was paid'), + model_name="invoice", + name="paid_at", + field=models.DateTimeField(help_text="Date when the invoice was paid"), ), migrations.AlterField( - model_name='invoice', - name='refund_stripe_id', - field=models.CharField(blank=True, - default=None, - help_text='Stripe id for refunding', - max_length=32, - null=True), + model_name="invoice", + name="refund_stripe_id", + field=models.CharField( + blank=True, default=None, help_text="Stripe id for refunding", max_length=32, null=True + ), ), migrations.AlterField( - model_name='invoice', - name='refunded_at', - field=models.DateTimeField(blank=True, - default=None, - help_text='Date when the invoice was refunded', - null=True), + model_name="invoice", + name="refunded_at", + field=models.DateTimeField( + blank=True, default=None, help_text="Date when the invoice was refunded", null=True + ), ), migrations.AlterField( - model_name='invoice', - name='status', - field=models.CharField(choices=[('FULFILLED', 'Fulfilled'), ('REJECTED', 'Rejected'), - ('PENDING', 'Pending'), ('REFUNDED', 'Refunded'), - ('DISPUTED_AS_FRAUD', 'Disputed as fraud')], - default='PENDING', - help_text='Invoice status', - max_length=17), + model_name="invoice", + name="status", + field=models.CharField( + choices=[ + ("FULFILLED", "Fulfilled"), + ("REJECTED", "Rejected"), + ("PENDING", "Pending"), + ("REFUNDED", "Refunded"), + ("DISPUTED_AS_FRAUD", "Disputed as fraud"), + ], + default="PENDING", + help_text="Invoice status", + max_length=17, + ), ), migrations.AlterField( - model_name='invoice', - name='stripe_id', - field=models.CharField(blank=True, default=None, help_text='Stripe id', max_length=32, null=True), + model_name="invoice", + name="stripe_id", + field=models.CharField(blank=True, default=None, help_text="Stripe id", max_length=32, null=True), ), migrations.AlterField( - model_name='invoice', - name='user', - field=models.ForeignKey(help_text='Customer', - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="invoice", + name="user", + field=models.ForeignKey( + help_text="Customer", on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), migrations.AlterField( - model_name='mentorshipserviceset', - name='slug', + model_name="mentorshipserviceset", + name="slug", field=models.SlugField( - help_text= - 'A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens', + help_text="A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", max_length=100, - unique=True), + unique=True, + ), ), migrations.AlterField( - model_name='paymentcontact', - name='stripe_id', - field=models.CharField(help_text='Stripe id', max_length=20), + model_name="paymentcontact", + name="stripe_id", + field=models.CharField(help_text="Stripe id", max_length=20), ), migrations.AlterField( - model_name='paymentcontact', - name='user', - field=models.OneToOneField(help_text='Customer', - on_delete=django.db.models.deletion.CASCADE, - related_name='payment_contact', - to=settings.AUTH_USER_MODEL), + model_name="paymentcontact", + name="user", + field=models.OneToOneField( + help_text="Customer", + on_delete=django.db.models.deletion.CASCADE, + related_name="payment_contact", + to=settings.AUTH_USER_MODEL, + ), ), migrations.AlterField( - model_name='plan', - name='currency', - field=models.ForeignKey(help_text='Currency', - on_delete=django.db.models.deletion.CASCADE, - to='payments.currency'), + model_name="plan", + name="currency", + field=models.ForeignKey( + help_text="Currency", on_delete=django.db.models.deletion.CASCADE, to="payments.currency" + ), ), migrations.AlterField( - model_name='plan', - name='financing_options', - field=models.ManyToManyField(blank=True, - help_text='Available financing options', - to='payments.FinancingOption'), + model_name="plan", + name="financing_options", + field=models.ManyToManyField( + blank=True, help_text="Available financing options", to="payments.FinancingOption" + ), ), migrations.AlterField( - model_name='plan', - name='is_onboarding', - field=models.BooleanField(default=False, help_text='Is onboarding plan?'), + model_name="plan", + name="is_onboarding", + field=models.BooleanField(default=False, help_text="Is onboarding plan?"), ), migrations.AlterField( - model_name='plan', - name='owner', - field=models.ForeignKey(blank=True, - help_text='Academy owner', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="plan", + name="owner", + field=models.ForeignKey( + blank=True, + help_text="Academy owner", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), ), migrations.AlterField( - model_name='plan', - name='price_per_half', - field=models.FloatField(blank=True, default=None, help_text='Price per half', null=True), + model_name="plan", + name="price_per_half", + field=models.FloatField(blank=True, default=None, help_text="Price per half", null=True), ), migrations.AlterField( - model_name='plan', - name='price_per_month', - field=models.FloatField(blank=True, default=None, help_text='Price per month', null=True), + model_name="plan", + name="price_per_month", + field=models.FloatField(blank=True, default=None, help_text="Price per month", null=True), ), migrations.AlterField( - model_name='plan', - name='price_per_quarter', - field=models.FloatField(blank=True, default=None, help_text='Price per quarter', null=True), + model_name="plan", + name="price_per_quarter", + field=models.FloatField(blank=True, default=None, help_text="Price per quarter", null=True), ), migrations.AlterField( - model_name='plan', - name='price_per_year', - field=models.FloatField(blank=True, default=None, help_text='Price per year', null=True), + model_name="plan", + name="price_per_year", + field=models.FloatField(blank=True, default=None, help_text="Price per year", null=True), ), migrations.AlterField( - model_name='plan', - name='slug', + model_name="plan", + name="slug", field=models.CharField( - help_text= - 'A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens', + help_text="A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", max_length=60, - unique=True), + unique=True, + ), ), migrations.AlterField( - model_name='plan', - name='status', - field=models.CharField(choices=[('DRAFT', 'Draft'), ('ACTIVE', 'Active'), ('UNLISTED', 'Unlisted'), - ('DELETED', 'Deleted'), ('DISCONTINUED', 'Discontinued')], - default='DRAFT', - help_text='Status', - max_length=12), + model_name="plan", + name="status", + field=models.CharField( + choices=[ + ("DRAFT", "Draft"), + ("ACTIVE", "Active"), + ("UNLISTED", "Unlisted"), + ("DELETED", "Deleted"), + ("DISCONTINUED", "Discontinued"), + ], + default="DRAFT", + help_text="Status", + max_length=12, + ), ), migrations.AlterField( - model_name='plan', - name='time_of_life', - field=models.IntegerField(blank=True, - default=1, - help_text='Timelife of plan (e.g. 1, 2, 3, ...)', - null=True), + model_name="plan", + name="time_of_life", + field=models.IntegerField( + blank=True, default=1, help_text="Timelife of plan (e.g. 1, 2, 3, ...)", null=True + ), ), migrations.AlterField( - model_name='plan', - name='time_of_life_unit', - field=models.CharField(blank=True, - choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - help_text='Timelife unit (e.g. DAY, WEEK, MONTH or YEAR)', - max_length=10, - null=True), + model_name="plan", + name="time_of_life_unit", + field=models.CharField( + blank=True, + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + help_text="Timelife unit (e.g. DAY, WEEK, MONTH or YEAR)", + max_length=10, + null=True, + ), ), migrations.AlterField( - model_name='plan', - name='trial_duration', - field=models.IntegerField(default=1, help_text='Trial duration (e.g. 1, 2, 3, ...)'), + model_name="plan", + name="trial_duration", + field=models.IntegerField(default=1, help_text="Trial duration (e.g. 1, 2, 3, ...)"), ), migrations.AlterField( - model_name='plan', - name='trial_duration_unit', - field=models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - help_text='Trial duration unit (e.g. DAY, WEEK, MONTH or YEAR)', - max_length=10), + model_name="plan", + name="trial_duration_unit", + field=models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + help_text="Trial duration unit (e.g. DAY, WEEK, MONTH or YEAR)", + max_length=10, + ), ), migrations.AlterField( - model_name='planfinancing', - name='academy', - field=models.ForeignKey(help_text='Academy owner', - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="planfinancing", + name="academy", + field=models.ForeignKey( + help_text="Academy owner", on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), ), migrations.AlterField( - model_name='planfinancing', - name='invoices', - field=models.ManyToManyField(blank=True, help_text='Invoices', to='payments.Invoice'), + model_name="planfinancing", + name="invoices", + field=models.ManyToManyField(blank=True, help_text="Invoices", to="payments.Invoice"), ), migrations.AlterField( - model_name='planfinancing', - name='monthly_price', - field=models.FloatField(default=0, - help_text='Monthly price, we keep this to avoid we changes him/her amount'), + model_name="planfinancing", + name="monthly_price", + field=models.FloatField( + default=0, help_text="Monthly price, we keep this to avoid we changes him/her amount" + ), ), migrations.AlterField( - model_name='planfinancing', - name='next_payment_at', - field=models.DateTimeField(help_text='Next payment date'), + model_name="planfinancing", + name="next_payment_at", + field=models.DateTimeField(help_text="Next payment date"), ), migrations.AlterField( - model_name='planfinancing', - name='plan_expires_at', - field=models.DateTimeField(default=None, - help_text='Plan expires at, after this date the plan will not be renewed', - null=True), + model_name="planfinancing", + name="plan_expires_at", + field=models.DateTimeField( + default=None, help_text="Plan expires at, after this date the plan will not be renewed", null=True + ), ), migrations.AlterField( - model_name='planfinancing', - name='plans', - field=models.ManyToManyField(blank=True, help_text='Plans to be suplied', to='payments.Plan'), + model_name="planfinancing", + name="plans", + field=models.ManyToManyField(blank=True, help_text="Plans to be suplied", to="payments.Plan"), ), migrations.AlterField( - model_name='planfinancing', - name='status', - field=models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error'), - ('FULLY_PAID', 'Fully Paid')], - default='ACTIVE', - help_text='Status', - max_length=13), + model_name="planfinancing", + name="status", + field=models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ("FULLY_PAID", "Fully Paid"), + ], + default="ACTIVE", + help_text="Status", + max_length=13, + ), ), migrations.AlterField( - model_name='planfinancing', - name='status_message', - field=models.CharField(blank=True, - default=None, - help_text='Error message if status is ERROR', - max_length=250, - null=True), + model_name="planfinancing", + name="status_message", + field=models.CharField( + blank=True, default=None, help_text="Error message if status is ERROR", max_length=250, null=True + ), ), migrations.AlterField( - model_name='planfinancing', - name='user', - field=models.ForeignKey(help_text='Customer', - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="planfinancing", + name="user", + field=models.ForeignKey( + help_text="Customer", on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), migrations.AlterField( - model_name='planfinancing', - name='valid_until', + model_name="planfinancing", + name="valid_until", field=models.DateTimeField( - help_text= - 'Valid until, before this date each month the customer must pay, after this date the plan financing will be destroyed and if it is belonging to a cohort, the certificate will be issued after pay every installments' + help_text="Valid until, before this date each month the customer must pay, after this date the plan financing will be destroyed and if it is belonging to a cohort, the certificate will be issued after pay every installments" ), ), migrations.AlterField( - model_name='planoffer', - name='from_syllabus', - field=models.ManyToManyField(help_text='Syllabus from which the plan is offered', to='admissions.Syllabus'), + model_name="planoffer", + name="from_syllabus", + field=models.ManyToManyField(help_text="Syllabus from which the plan is offered", to="admissions.Syllabus"), ), migrations.AlterField( - model_name='planoffer', - name='original_plan', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='original_plan', - to='payments.plan'), + model_name="planoffer", + name="original_plan", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, related_name="original_plan", to="payments.plan" + ), ), migrations.AlterField( - model_name='planoffer', - name='suggested_plans', - field=models.ManyToManyField(help_text='Suggested plans', - related_name='_payments_planoffer_suggested_plans_+', - to='payments.Plan'), + model_name="planoffer", + name="suggested_plans", + field=models.ManyToManyField( + help_text="Suggested plans", related_name="_payments_planoffer_suggested_plans_+", to="payments.Plan" + ), ), migrations.AlterField( - model_name='planoffertranslation', - name='description', - field=models.CharField(help_text='Description of the plan offer', max_length=255), + model_name="planoffertranslation", + name="description", + field=models.CharField(help_text="Description of the plan offer", max_length=255), ), migrations.AlterField( - model_name='planoffertranslation', - name='lang', - field=models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code]), + model_name="planoffertranslation", + name="lang", + field=models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), ), migrations.AlterField( - model_name='planoffertranslation', - name='offer', - field=models.ForeignKey(help_text='Plan offer', - on_delete=django.db.models.deletion.CASCADE, - to='payments.planoffer'), + model_name="planoffertranslation", + name="offer", + field=models.ForeignKey( + help_text="Plan offer", on_delete=django.db.models.deletion.CASCADE, to="payments.planoffer" + ), ), migrations.AlterField( - model_name='planoffertranslation', - name='short_description', - field=models.CharField(help_text='Short description of the plan offer', max_length=255), + model_name="planoffertranslation", + name="short_description", + field=models.CharField(help_text="Short description of the plan offer", max_length=255), ), migrations.AlterField( - model_name='planoffertranslation', - name='title', - field=models.CharField(help_text='Title of the plan offer', max_length=60), + model_name="planoffertranslation", + name="title", + field=models.CharField(help_text="Title of the plan offer", max_length=60), ), migrations.AlterField( - model_name='planserviceitem', - name='cohort_pattern', - field=models.CharField(blank=True, - default=None, - help_text='Cohort pattern to find cohorts to be sold in this plan', - max_length=80, - null=True), + model_name="planserviceitem", + name="cohort_pattern", + field=models.CharField( + blank=True, + default=None, + help_text="Cohort pattern to find cohorts to be sold in this plan", + max_length=80, + null=True, + ), ), migrations.AlterField( - model_name='planserviceitem', - name='cohorts', - field=models.ManyToManyField(blank=True, - help_text='Available cohorts to be sold in this this service and plan', - to='admissions.Cohort'), + model_name="planserviceitem", + name="cohorts", + field=models.ManyToManyField( + blank=True, + help_text="Available cohorts to be sold in this this service and plan", + to="admissions.Cohort", + ), ), migrations.AlterField( - model_name='planserviceitem', - name='plan', - field=models.ForeignKey(help_text='Plan', on_delete=django.db.models.deletion.CASCADE, to='payments.plan'), + model_name="planserviceitem", + name="plan", + field=models.ForeignKey(help_text="Plan", on_delete=django.db.models.deletion.CASCADE, to="payments.plan"), ), migrations.AlterField( - model_name='planserviceitem', - name='service_item', - field=models.ForeignKey(help_text='Service item', - on_delete=django.db.models.deletion.CASCADE, - to='payments.serviceitem'), + model_name="planserviceitem", + name="service_item", + field=models.ForeignKey( + help_text="Service item", on_delete=django.db.models.deletion.CASCADE, to="payments.serviceitem" + ), ), migrations.AlterField( - model_name='planserviceitemhandler', - name='handler', - field=models.ForeignKey(help_text='Plan service item', - on_delete=django.db.models.deletion.CASCADE, - to='payments.planserviceitem'), + model_name="planserviceitemhandler", + name="handler", + field=models.ForeignKey( + help_text="Plan service item", + on_delete=django.db.models.deletion.CASCADE, + to="payments.planserviceitem", + ), ), migrations.AlterField( - model_name='planserviceitemhandler', - name='plan_financing', - field=models.ForeignKey(blank=True, - default=None, - help_text='Plan financing', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.planfinancing'), + model_name="planserviceitemhandler", + name="plan_financing", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Plan financing", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.planfinancing", + ), ), migrations.AlterField( - model_name='planserviceitemhandler', - name='subscription', - field=models.ForeignKey(blank=True, - default=None, - help_text='Subscription', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.subscription'), + model_name="planserviceitemhandler", + name="subscription", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Subscription", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.subscription", + ), ), migrations.AlterField( - model_name='plantranslation', - name='description', - field=models.CharField(help_text='Description of the plan', max_length=255), + model_name="plantranslation", + name="description", + field=models.CharField(help_text="Description of the plan", max_length=255), ), migrations.AlterField( - model_name='plantranslation', - name='lang', - field=models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code]), + model_name="plantranslation", + name="lang", + field=models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), ), migrations.AlterField( - model_name='plantranslation', - name='title', - field=models.CharField(help_text='Title of the plan', max_length=60), + model_name="plantranslation", + name="title", + field=models.CharField(help_text="Title of the plan", max_length=60), ), migrations.AlterField( - model_name='service', - name='currency', - field=models.ForeignKey(help_text='Currency', - on_delete=django.db.models.deletion.CASCADE, - to='payments.currency'), + model_name="service", + name="currency", + field=models.ForeignKey( + help_text="Currency", on_delete=django.db.models.deletion.CASCADE, to="payments.currency" + ), ), migrations.AlterField( - model_name='service', - name='groups', - field=models.ManyToManyField(blank=True, - help_text='Groups that can access the customer that bought this service', - to='auth.Group'), + model_name="service", + name="groups", + field=models.ManyToManyField( + blank=True, help_text="Groups that can access the customer that bought this service", to="auth.Group" + ), ), migrations.AlterField( - model_name='service', - name='owner', - field=models.ForeignKey(blank=True, - help_text='Academy owner', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="service", + name="owner", + field=models.ForeignKey( + blank=True, + help_text="Academy owner", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), ), migrations.AlterField( - model_name='service', - name='price_per_unit', - field=models.FloatField(default=0, help_text='Price per unit'), + model_name="service", + name="price_per_unit", + field=models.FloatField(default=0, help_text="Price per unit"), ), migrations.AlterField( - model_name='service', - name='private', - field=models.BooleanField(default=True, help_text='If the asset is private or not'), + model_name="service", + name="private", + field=models.BooleanField(default=True, help_text="If the asset is private or not"), ), migrations.AlterField( - model_name='service', - name='slug', + model_name="service", + name="slug", field=models.CharField( - help_text= - 'A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens', + help_text="A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", max_length=60, - unique=True), + unique=True, + ), ), migrations.AlterField( - model_name='service', - name='trial_duration', - field=models.IntegerField(default=1, help_text='Trial duration (e.g. 1, 2, 3, ...)'), + model_name="service", + name="trial_duration", + field=models.IntegerField(default=1, help_text="Trial duration (e.g. 1, 2, 3, ...)"), ), migrations.AlterField( - model_name='service', - name='trial_duration_unit', - field=models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - help_text='Trial duration unit (e.g. DAY, WEEK, MONTH or YEAR)', - max_length=10), + model_name="service", + name="trial_duration_unit", + field=models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + help_text="Trial duration unit (e.g. DAY, WEEK, MONTH or YEAR)", + max_length=10, + ), ), migrations.AlterField( - model_name='serviceitem', - name='how_many', - field=models.IntegerField(default=-1, help_text='How many units of this service can be used'), + model_name="serviceitem", + name="how_many", + field=models.IntegerField(default=-1, help_text="How many units of this service can be used"), ), migrations.AlterField( - model_name='serviceitem', - name='is_renewable', - field=models.BooleanField(default=False, help_text='If the service is renewable or not'), + model_name="serviceitem", + name="is_renewable", + field=models.BooleanField(default=False, help_text="If the service is renewable or not"), ), migrations.AlterField( - model_name='serviceitem', - name='renew_at', + model_name="serviceitem", + name="renew_at", field=models.IntegerField( - default=1, - help_text='Renew at (e.g. 1, 2, 3, ...) it going to be used to build the balance of customer'), + default=1, help_text="Renew at (e.g. 1, 2, 3, ...) it going to be used to build the balance of customer" + ), ), migrations.AlterField( - model_name='serviceitem', - name='renew_at_unit', - field=models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - help_text='Renew at unit (e.g. DAY, WEEK, MONTH or YEAR)', - max_length=10), + model_name="serviceitem", + name="renew_at_unit", + field=models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + help_text="Renew at unit (e.g. DAY, WEEK, MONTH or YEAR)", + max_length=10, + ), ), migrations.AlterField( - model_name='serviceitem', - name='service', - field=models.ForeignKey(help_text='Service', - on_delete=django.db.models.deletion.CASCADE, - to='payments.service'), + model_name="serviceitem", + name="service", + field=models.ForeignKey( + help_text="Service", on_delete=django.db.models.deletion.CASCADE, to="payments.service" + ), ), migrations.AlterField( - model_name='serviceitem', - name='unit_type', - field=models.CharField(choices=[('UNIT', 'Unit')], - default='UNIT', - help_text='Unit type (e.g. UNIT))', - max_length=10), + model_name="serviceitem", + name="unit_type", + field=models.CharField( + choices=[("UNIT", "Unit")], default="UNIT", help_text="Unit type (e.g. UNIT))", max_length=10 + ), ), migrations.AlterField( - model_name='serviceitemfeature', - name='description', - field=models.CharField(help_text='Description of the service item', max_length=255), + model_name="serviceitemfeature", + name="description", + field=models.CharField(help_text="Description of the service item", max_length=255), ), migrations.AlterField( - model_name='serviceitemfeature', - name='lang', - field=models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code]), + model_name="serviceitemfeature", + name="lang", + field=models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), ), migrations.AlterField( - model_name='serviceitemfeature', - name='one_line_desc', - field=models.CharField(help_text='One line description of the service item', max_length=30), + model_name="serviceitemfeature", + name="one_line_desc", + field=models.CharField(help_text="One line description of the service item", max_length=30), ), migrations.AlterField( - model_name='serviceitemfeature', - name='service_item', - field=models.ForeignKey(help_text='Service item', - on_delete=django.db.models.deletion.CASCADE, - to='payments.serviceitem'), + model_name="serviceitemfeature", + name="service_item", + field=models.ForeignKey( + help_text="Service item", on_delete=django.db.models.deletion.CASCADE, to="payments.serviceitem" + ), ), migrations.AlterField( - model_name='servicestockscheduler', - name='consumables', - field=models.ManyToManyField(blank=True, help_text='Consumables', to='payments.Consumable'), + model_name="servicestockscheduler", + name="consumables", + field=models.ManyToManyField(blank=True, help_text="Consumables", to="payments.Consumable"), ), migrations.AlterField( - model_name='servicestockscheduler', - name='plan_handler', - field=models.ForeignKey(blank=True, - default=None, - help_text='Plan service item handler', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.planserviceitemhandler'), + model_name="servicestockscheduler", + name="plan_handler", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Plan service item handler", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.planserviceitemhandler", + ), ), migrations.AlterField( - model_name='servicestockscheduler', - name='subscription_handler', - field=models.ForeignKey(blank=True, - default=None, - help_text='Subscription service item', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.subscriptionserviceitem'), + model_name="servicestockscheduler", + name="subscription_handler", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Subscription service item", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.subscriptionserviceitem", + ), ), migrations.AlterField( - model_name='servicestockscheduler', - name='valid_until', - field=models.DateTimeField(blank=True, - default=None, - help_text='Valid until, after this date the consumables will be renewed', - null=True), + model_name="servicestockscheduler", + name="valid_until", + field=models.DateTimeField( + blank=True, + default=None, + help_text="Valid until, after this date the consumables will be renewed", + null=True, + ), ), migrations.AlterField( - model_name='servicetranslation', - name='description', - field=models.CharField(help_text='Description of the service', max_length=255), + model_name="servicetranslation", + name="description", + field=models.CharField(help_text="Description of the service", max_length=255), ), migrations.AlterField( - model_name='servicetranslation', - name='lang', - field=models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code]), + model_name="servicetranslation", + name="lang", + field=models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), ), migrations.AlterField( - model_name='servicetranslation', - name='service', - field=models.ForeignKey(help_text='Service', - on_delete=django.db.models.deletion.CASCADE, - to='payments.service'), + model_name="servicetranslation", + name="service", + field=models.ForeignKey( + help_text="Service", on_delete=django.db.models.deletion.CASCADE, to="payments.service" + ), ), migrations.AlterField( - model_name='servicetranslation', - name='title', - field=models.CharField(help_text='Title of the service', max_length=60), + model_name="servicetranslation", + name="title", + field=models.CharField(help_text="Title of the service", max_length=60), ), migrations.AlterField( - model_name='subscription', - name='academy', - field=models.ForeignKey(help_text='Academy owner', - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy'), + model_name="subscription", + name="academy", + field=models.ForeignKey( + help_text="Academy owner", on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), ), migrations.AlterField( - model_name='subscription', - name='invoices', - field=models.ManyToManyField(blank=True, help_text='Invoices', to='payments.Invoice'), + model_name="subscription", + name="invoices", + field=models.ManyToManyField(blank=True, help_text="Invoices", to="payments.Invoice"), ), migrations.AlterField( - model_name='subscription', - name='is_refundable', - field=models.BooleanField(default=True, help_text='Is it refundable?'), + model_name="subscription", + name="is_refundable", + field=models.BooleanField(default=True, help_text="Is it refundable?"), ), migrations.AlterField( - model_name='subscription', - name='next_payment_at', - field=models.DateTimeField(help_text='Next payment date'), + model_name="subscription", + name="next_payment_at", + field=models.DateTimeField(help_text="Next payment date"), ), migrations.AlterField( - model_name='subscription', - name='paid_at', - field=models.DateTimeField(help_text='Last time the subscription was paid'), + model_name="subscription", + name="paid_at", + field=models.DateTimeField(help_text="Last time the subscription was paid"), ), migrations.AlterField( - model_name='subscription', - name='pay_every', - field=models.IntegerField(default=1, help_text='Pay every X units (e.g. 1, 2, 3, ...)'), + model_name="subscription", + name="pay_every", + field=models.IntegerField(default=1, help_text="Pay every X units (e.g. 1, 2, 3, ...)"), ), migrations.AlterField( - model_name='subscription', - name='pay_every_unit', - field=models.CharField(choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - help_text='Pay every unit (e.g. DAY, WEEK, MONTH or YEAR)', - max_length=10), + model_name="subscription", + name="pay_every_unit", + field=models.CharField( + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + help_text="Pay every unit (e.g. DAY, WEEK, MONTH or YEAR)", + max_length=10, + ), ), migrations.AlterField( - model_name='subscription', - name='plans', - field=models.ManyToManyField(blank=True, help_text='Plans to be suplied', to='payments.Plan'), + model_name="subscription", + name="plans", + field=models.ManyToManyField(blank=True, help_text="Plans to be suplied", to="payments.Plan"), ), migrations.AlterField( - model_name='subscription', - name='service_items', - field=models.ManyToManyField(blank=True, - help_text='Service items to be suplied', - through='payments.SubscriptionServiceItem', - to='payments.ServiceItem'), + model_name="subscription", + name="service_items", + field=models.ManyToManyField( + blank=True, + help_text="Service items to be suplied", + through="payments.SubscriptionServiceItem", + to="payments.ServiceItem", + ), ), migrations.AlterField( - model_name='subscription', - name='status', - field=models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error'), - ('FULLY_PAID', 'Fully Paid')], - default='ACTIVE', - help_text='Status', - max_length=13), + model_name="subscription", + name="status", + field=models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ("FULLY_PAID", "Fully Paid"), + ], + default="ACTIVE", + help_text="Status", + max_length=13, + ), ), migrations.AlterField( - model_name='subscription', - name='status_message', - field=models.CharField(blank=True, - default=None, - help_text='Error message if status is ERROR', - max_length=250, - null=True), + model_name="subscription", + name="status_message", + field=models.CharField( + blank=True, default=None, help_text="Error message if status is ERROR", max_length=250, null=True + ), ), migrations.AlterField( - model_name='subscription', - name='user', - field=models.ForeignKey(help_text='Customer', - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL), + model_name="subscription", + name="user", + field=models.ForeignKey( + help_text="Customer", on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), ), migrations.AlterField( - model_name='subscription', - name='valid_until', - field=models.DateTimeField(blank=True, - default=None, - help_text='Valid until, after this date the subscription will be destroyed', - null=True), + model_name="subscription", + name="valid_until", + field=models.DateTimeField( + blank=True, + default=None, + help_text="Valid until, after this date the subscription will be destroyed", + null=True, + ), ), migrations.AlterField( - model_name='subscriptionserviceitem', - name='cohorts', - field=models.ManyToManyField(blank=True, help_text='Cohorts', to='admissions.Cohort'), + model_name="subscriptionserviceitem", + name="cohorts", + field=models.ManyToManyField(blank=True, help_text="Cohorts", to="admissions.Cohort"), ), migrations.AlterField( - model_name='subscriptionserviceitem', - name='mentorship_service_set', - field=models.ForeignKey(blank=True, - help_text='Mentorship service set', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.mentorshipserviceset'), + model_name="subscriptionserviceitem", + name="mentorship_service_set", + field=models.ForeignKey( + blank=True, + help_text="Mentorship service set", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.mentorshipserviceset", + ), ), migrations.AlterField( - model_name='subscriptionserviceitem', - name='service_item', - field=models.ForeignKey(help_text='Service item', - on_delete=django.db.models.deletion.CASCADE, - to='payments.serviceitem'), + model_name="subscriptionserviceitem", + name="service_item", + field=models.ForeignKey( + help_text="Service item", on_delete=django.db.models.deletion.CASCADE, to="payments.serviceitem" + ), ), migrations.AlterField( - model_name='subscriptionserviceitem', - name='subscription', - field=models.ForeignKey(help_text='Subscription', - on_delete=django.db.models.deletion.CASCADE, - to='payments.subscription'), + model_name="subscriptionserviceitem", + name="subscription", + field=models.ForeignKey( + help_text="Subscription", on_delete=django.db.models.deletion.CASCADE, to="payments.subscription" + ), ), migrations.CreateModel( - name='MentorshipServiceSetTranslation', + name="MentorshipServiceSetTranslation", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('title', models.CharField(help_text='Title of the mentorship service set', max_length=60)), - ('description', models.CharField(help_text='Description of the mentorship service set', - max_length=255)), - ('short_description', - models.CharField(help_text='Short description of the mentorship service set', max_length=255)), - ('mentorship_service_set', - models.ForeignKey(help_text='Mentorship service set', - on_delete=django.db.models.deletion.CASCADE, - to='payments.mentorshipserviceset')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), + ), + ("title", models.CharField(help_text="Title of the mentorship service set", max_length=60)), + ( + "description", + models.CharField(help_text="Description of the mentorship service set", max_length=255), + ), + ( + "short_description", + models.CharField(help_text="Short description of the mentorship service set", max_length=255), + ), + ( + "mentorship_service_set", + models.ForeignKey( + help_text="Mentorship service set", + on_delete=django.db.models.deletion.CASCADE, + to="payments.mentorshipserviceset", + ), + ), ], ), migrations.CreateModel( - name='EventTypeSetTranslation', + name="EventTypeSetTranslation", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('title', models.CharField(help_text='Title of the event type set', max_length=60)), - ('description', models.CharField(help_text='Description of the event type set', max_length=255)), - ('short_description', - models.CharField(help_text='Short description of the event type set', max_length=255)), - ('event_type_set', - models.ForeignKey(help_text='Event type set', - on_delete=django.db.models.deletion.CASCADE, - to='payments.eventtypeset')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), + ), + ("title", models.CharField(help_text="Title of the event type set", max_length=60)), + ("description", models.CharField(help_text="Description of the event type set", max_length=255)), + ( + "short_description", + models.CharField(help_text="Short description of the event type set", max_length=255), + ), + ( + "event_type_set", + models.ForeignKey( + help_text="Event type set", + on_delete=django.db.models.deletion.CASCADE, + to="payments.eventtypeset", + ), + ), ], ), migrations.AddField( - model_name='bag', - name='selected_event_type_sets', - field=models.ManyToManyField(blank=True, - help_text='Selected event type sets for the plans of services', - to='payments.EventTypeSet'), + model_name="bag", + name="selected_event_type_sets", + field=models.ManyToManyField( + blank=True, help_text="Selected event type sets for the plans of services", to="payments.EventTypeSet" + ), ), migrations.AddField( - model_name='planfinancing', - name='event_type_set_selected', - field=models.ForeignKey(blank=True, - default=None, - help_text='Event type set which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.eventtypeset'), + model_name="planfinancing", + name="event_type_set_selected", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Event type set which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.eventtypeset", + ), ), migrations.AddField( - model_name='planserviceitem', - name='event_type_sets', + model_name="planserviceitem", + name="event_type_sets", field=models.ManyToManyField( blank=True, - help_text='Available mentorship service sets to be sold in this service and plan', - to='payments.EventTypeSet'), + help_text="Available mentorship service sets to be sold in this service and plan", + to="payments.EventTypeSet", + ), ), migrations.AddField( - model_name='subscription', - name='event_type_set_selected', - field=models.ForeignKey(blank=True, - default=None, - help_text='Event type set which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.eventtypeset'), + model_name="subscription", + name="event_type_set_selected", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Event type set which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.eventtypeset", + ), ), ] diff --git a/breathecode/payments/migrations/0021_auto_20230228_0343.py b/breathecode/payments/migrations/0021_auto_20230228_0343.py index b2c50d045..43a524fca 100644 --- a/breathecode/payments/migrations/0021_auto_20230228_0343.py +++ b/breathecode/payments/migrations/0021_auto_20230228_0343.py @@ -7,171 +7,208 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0055_cohort_available_as_saas'), - ('payments', '0020_auto_20230223_0634'), + ("admissions", "0055_cohort_available_as_saas"), + ("payments", "0020_auto_20230223_0634"), ] operations = [ migrations.RenameField( - model_name='planfinancing', - old_name='cohort_selected', - new_name='selected_cohort', + model_name="planfinancing", + old_name="cohort_selected", + new_name="selected_cohort", ), migrations.RenameField( - model_name='planfinancing', - old_name='event_type_set_selected', - new_name='selected_event_type_set', + model_name="planfinancing", + old_name="event_type_set_selected", + new_name="selected_event_type_set", ), migrations.RenameField( - model_name='planfinancing', - old_name='mentorship_service_set_selected', - new_name='selected_mentorship_service_set', + model_name="planfinancing", + old_name="mentorship_service_set_selected", + new_name="selected_mentorship_service_set", ), migrations.RenameField( - model_name='subscription', - old_name='cohort_selected', - new_name='selected_cohort', + model_name="subscription", + old_name="cohort_selected", + new_name="selected_cohort", ), migrations.RenameField( - model_name='subscription', - old_name='event_type_set_selected', - new_name='selected_event_type_set', + model_name="subscription", + old_name="event_type_set_selected", + new_name="selected_event_type_set", ), migrations.RenameField( - model_name='subscription', - old_name='mentorship_service_set_selected', - new_name='selected_mentorship_service_set', + model_name="subscription", + old_name="mentorship_service_set_selected", + new_name="selected_mentorship_service_set", ), migrations.RemoveField( - model_name='consumable', - name='event_type', + model_name="consumable", + name="event_type", ), migrations.RemoveField( - model_name='consumable', - name='mentorship_service', + model_name="consumable", + name="mentorship_service", ), migrations.RemoveField( - model_name='planserviceitem', - name='cohort_pattern', + model_name="planserviceitem", + name="cohort_pattern", ), migrations.RemoveField( - model_name='planserviceitem', - name='cohorts', + model_name="planserviceitem", + name="cohorts", ), migrations.RemoveField( - model_name='planserviceitem', - name='event_type_sets', + model_name="planserviceitem", + name="event_type_sets", ), migrations.RemoveField( - model_name='planserviceitem', - name='mentorship_service_sets', + model_name="planserviceitem", + name="mentorship_service_sets", ), migrations.AddField( - model_name='consumable', - name='event_type_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Event type which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.eventtypeset'), + model_name="consumable", + name="event_type_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Event type which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.eventtypeset", + ), ), migrations.AddField( - model_name='consumable', - name='mentorship_service_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Mentorship service which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.mentorshipserviceset'), + model_name="consumable", + name="mentorship_service_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Mentorship service which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.mentorshipserviceset", + ), ), migrations.AddField( - model_name='plan', - name='available_cohorts', - field=models.ManyToManyField(blank=True, - help_text='Available cohorts to be sold in this this service and plan', - to='admissions.Cohort'), + model_name="plan", + name="available_cohorts", + field=models.ManyToManyField( + blank=True, + help_text="Available cohorts to be sold in this this service and plan", + to="admissions.Cohort", + ), ), migrations.AddField( - model_name='plan', - name='available_event_type_sets', + model_name="plan", + name="available_event_type_sets", field=models.ManyToManyField( blank=True, - help_text='Available mentorship service sets to be sold in this service and plan', - to='payments.EventTypeSet'), + help_text="Available mentorship service sets to be sold in this service and plan", + to="payments.EventTypeSet", + ), ), migrations.AddField( - model_name='plan', - name='available_mentorship_service_sets', + model_name="plan", + name="available_mentorship_service_sets", field=models.ManyToManyField( blank=True, - help_text='Available mentorship service sets to be sold in this service and plan', - to='payments.MentorshipServiceSet'), + help_text="Available mentorship service sets to be sold in this service and plan", + to="payments.MentorshipServiceSet", + ), ), migrations.AddField( - model_name='plan', - name='cohort_pattern', - field=models.CharField(blank=True, - default=None, - help_text='Cohort pattern to find cohorts to be sold in this plan', - max_length=80, - null=True), + model_name="plan", + name="cohort_pattern", + field=models.CharField( + blank=True, + default=None, + help_text="Cohort pattern to find cohorts to be sold in this plan", + max_length=80, + null=True, + ), ), migrations.AddField( - model_name='service', - name='type', - field=models.CharField(choices=[('COHORT', 'Cohort'), ('MENTORSHIP_SERVICE_SET', 'Mentorship service set'), - ('EVENT_TYPE_SET', 'Event type set')], - default='COHORT', - help_text='Service type', - max_length=22), + model_name="service", + name="type", + field=models.CharField( + choices=[ + ("COHORT", "Cohort"), + ("MENTORSHIP_SERVICE_SET", "Mentorship service set"), + ("EVENT_TYPE_SET", "Event type set"), + ], + default="COHORT", + help_text="Service type", + max_length=22, + ), ), migrations.AlterField( - model_name='consumable', - name='cohort', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort'), + model_name="consumable", + name="cohort", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.cohort", + ), ), migrations.CreateModel( - name='AcademyService', + name="AcademyService", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('price_per_unit', models.FloatField(default=1, help_text='Price per unit (e.g. 1, 2, 3, ...)')), - ('cohort_patterns', - models.JSONField(blank=True, - default=[], - help_text='Array of cohort patterns to find cohorts to be sold in this plan')), - ('academy', - models.ForeignKey(help_text='Academy', - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('available_cohorts', - models.ManyToManyField(blank=True, - help_text='Available cohorts to be sold in this this service and plan', - to='admissions.Cohort')), - ('available_event_type_sets', - models.ManyToManyField( - blank=True, - help_text='Available mentorship service sets to be sold in this service and plan', - to='payments.EventTypeSet')), - ('available_mentorship_service_sets', - models.ManyToManyField( - blank=True, - help_text='Available mentorship service sets to be sold in this service and plan', - to='payments.MentorshipServiceSet')), - ('currency', - models.ForeignKey(help_text='Currency', - on_delete=django.db.models.deletion.CASCADE, - to='payments.currency')), - ('service', - models.OneToOneField(help_text='Service', - on_delete=django.db.models.deletion.CASCADE, - to='payments.service')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("price_per_unit", models.FloatField(default=1, help_text="Price per unit (e.g. 1, 2, 3, ...)")), + ( + "cohort_patterns", + models.JSONField( + blank=True, + default=[], + help_text="Array of cohort patterns to find cohorts to be sold in this plan", + ), + ), + ( + "academy", + models.ForeignKey( + help_text="Academy", on_delete=django.db.models.deletion.CASCADE, to="admissions.academy" + ), + ), + ( + "available_cohorts", + models.ManyToManyField( + blank=True, + help_text="Available cohorts to be sold in this this service and plan", + to="admissions.Cohort", + ), + ), + ( + "available_event_type_sets", + models.ManyToManyField( + blank=True, + help_text="Available mentorship service sets to be sold in this service and plan", + to="payments.EventTypeSet", + ), + ), + ( + "available_mentorship_service_sets", + models.ManyToManyField( + blank=True, + help_text="Available mentorship service sets to be sold in this service and plan", + to="payments.MentorshipServiceSet", + ), + ), + ( + "currency", + models.ForeignKey( + help_text="Currency", on_delete=django.db.models.deletion.CASCADE, to="payments.currency" + ), + ), + ( + "service", + models.OneToOneField( + help_text="Service", on_delete=django.db.models.deletion.CASCADE, to="payments.service" + ), + ), ], ), ] diff --git a/breathecode/payments/migrations/0022_auto_20230302_0633.py b/breathecode/payments/migrations/0022_auto_20230302_0633.py index 09dcf808e..412d464ee 100644 --- a/breathecode/payments/migrations/0022_auto_20230302_0633.py +++ b/breathecode/payments/migrations/0022_auto_20230302_0633.py @@ -7,36 +7,40 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0021_auto_20230228_0343'), + ("payments", "0021_auto_20230228_0343"), ] operations = [ migrations.RemoveField( - model_name='plan', - name='available_event_type_sets', + model_name="plan", + name="available_event_type_sets", ), migrations.RemoveField( - model_name='plan', - name='available_mentorship_service_sets', + model_name="plan", + name="available_mentorship_service_sets", ), migrations.AddField( - model_name='plan', - name='event_type_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Event type sets to be sold in this service and plan', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='payments.eventtypeset'), + model_name="plan", + name="event_type_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Event type sets to be sold in this service and plan", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="payments.eventtypeset", + ), ), migrations.AddField( - model_name='plan', - name='mentorship_service_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Mentorship service sets to be sold in this service and plan', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='payments.mentorshipserviceset'), + model_name="plan", + name="mentorship_service_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Mentorship service sets to be sold in this service and plan", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="payments.mentorshipserviceset", + ), ), ] diff --git a/breathecode/payments/migrations/0023_auto_20230308_0703.py b/breathecode/payments/migrations/0023_auto_20230308_0703.py index 60652712f..b01790560 100644 --- a/breathecode/payments/migrations/0023_auto_20230308_0703.py +++ b/breathecode/payments/migrations/0023_auto_20230308_0703.py @@ -7,42 +7,44 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0022_auto_20230302_0633'), + ("payments", "0022_auto_20230302_0633"), ] operations = [ migrations.RemoveField( - model_name='planoffer', - name='from_syllabus', + model_name="planoffer", + name="from_syllabus", ), migrations.RemoveField( - model_name='planoffer', - name='suggested_plans', + model_name="planoffer", + name="suggested_plans", ), migrations.AddField( - model_name='planoffer', - name='expires_at', + model_name="planoffer", + name="expires_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='planoffer', - name='show_modal', + model_name="planoffer", + name="show_modal", field=models.BooleanField(default=False), ), migrations.AddField( - model_name='planoffer', - name='suggested_plan', - field=models.ForeignKey(help_text='Suggested plans', - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name='plan_offer_to', - to='payments.plan'), + model_name="planoffer", + name="suggested_plan", + field=models.ForeignKey( + help_text="Suggested plans", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="plan_offer_to", + to="payments.plan", + ), ), migrations.AlterField( - model_name='planoffer', - name='original_plan', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - related_name='plan_offer_from', - to='payments.plan'), + model_name="planoffer", + name="original_plan", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, related_name="plan_offer_from", to="payments.plan" + ), ), ] diff --git a/breathecode/payments/migrations/0023_plan_has_waiting_list.py b/breathecode/payments/migrations/0023_plan_has_waiting_list.py index d88286eda..569575cde 100644 --- a/breathecode/payments/migrations/0023_plan_has_waiting_list.py +++ b/breathecode/payments/migrations/0023_plan_has_waiting_list.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0022_auto_20230302_0633'), + ("payments", "0022_auto_20230302_0633"), ] operations = [ migrations.AddField( - model_name='plan', - name='has_waiting_list', - field=models.BooleanField(default=False, help_text='Has waiting list?'), + model_name="plan", + name="has_waiting_list", + field=models.BooleanField(default=False, help_text="Has waiting list?"), ), ] diff --git a/breathecode/payments/migrations/0024_merge_20230315_2252.py b/breathecode/payments/migrations/0024_merge_20230315_2252.py index 16129b4b7..72035b1ab 100644 --- a/breathecode/payments/migrations/0024_merge_20230315_2252.py +++ b/breathecode/payments/migrations/0024_merge_20230315_2252.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0023_auto_20230308_0703'), - ('payments', '0023_plan_has_waiting_list'), + ("payments", "0023_auto_20230308_0703"), + ("payments", "0023_plan_has_waiting_list"), ] operations = [] diff --git a/breathecode/payments/migrations/0025_auto_20230317_0702.py b/breathecode/payments/migrations/0025_auto_20230317_0702.py index e921e29c4..afde47288 100644 --- a/breathecode/payments/migrations/0025_auto_20230317_0702.py +++ b/breathecode/payments/migrations/0025_auto_20230317_0702.py @@ -6,30 +6,46 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0024_merge_20230315_2252'), + ("payments", "0024_merge_20230315_2252"), ] operations = [ migrations.AlterField( - model_name='planfinancing', - name='status', - field=models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error'), - ('FULLY_PAID', 'Fully Paid'), ('EXPIRED', 'Expired')], - default='ACTIVE', - help_text='Status', - max_length=13), + model_name="planfinancing", + name="status", + field=models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ("FULLY_PAID", "Fully Paid"), + ("EXPIRED", "Expired"), + ], + default="ACTIVE", + help_text="Status", + max_length=13, + ), ), migrations.AlterField( - model_name='subscription', - name='status', - field=models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error'), - ('FULLY_PAID', 'Fully Paid'), ('EXPIRED', 'Expired')], - default='ACTIVE', - help_text='Status', - max_length=13), + model_name="subscription", + name="status", + field=models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ("FULLY_PAID", "Fully Paid"), + ("EXPIRED", "Expired"), + ], + default="ACTIVE", + help_text="Status", + max_length=13, + ), ), ] diff --git a/breathecode/payments/migrations/0026_auto_20230502_2225.py b/breathecode/payments/migrations/0026_auto_20230502_2225.py index 258db4789..dcc2d82d5 100644 --- a/breathecode/payments/migrations/0026_auto_20230502_2225.py +++ b/breathecode/payments/migrations/0026_auto_20230502_2225.py @@ -6,51 +6,51 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0056_auto_20230317_1657'), - ('payments', '0025_auto_20230317_0702'), + ("admissions", "0056_auto_20230317_1657"), + ("payments", "0025_auto_20230317_0702"), ] operations = [ migrations.RemoveField( - model_name='academyservice', - name='available_cohorts', + model_name="academyservice", + name="available_cohorts", ), migrations.RemoveField( - model_name='academyservice', - name='cohort_patterns', + model_name="academyservice", + name="cohort_patterns", ), migrations.AddField( - model_name='academyservice', - name='bundle_size', + model_name="academyservice", + name="bundle_size", field=models.FloatField( default=1, - help_text= - 'Minimum unit size allowed to be bought, example: bundle_size=5, then you are allowed to buy a minimum of 5 units. Related to the discount ratio' + help_text="Minimum unit size allowed to be bought, example: bundle_size=5, then you are allowed to buy a minimum of 5 units. Related to the discount ratio", ), ), migrations.AddField( - model_name='academyservice', - name='discount_ratio', - field=models.FloatField(default=1, help_text='Will be used when calculated by the final price'), + model_name="academyservice", + name="discount_ratio", + field=models.FloatField(default=1, help_text="Will be used when calculated by the final price"), ), migrations.AddField( - model_name='academyservice', - name='max_amount', + model_name="academyservice", + name="max_amount", field=models.FloatField(default=1, help_text="Limit total amount, it doesn't matter the bundle size"), ), migrations.AddField( - model_name='academyservice', - name='max_items', + model_name="academyservice", + name="max_items", field=models.FloatField( - default=1, help_text="How many items can be bought in total, it doens't matter the bundle size"), + default=1, help_text="How many items can be bought in total, it doens't matter the bundle size" + ), ), migrations.AlterField( - model_name='plan', - name='available_cohorts', + model_name="plan", + name="available_cohorts", field=models.ManyToManyField( blank=True, - help_text= - 'Minimum unit size allowed to be bought, example: bundle_size=5, then you are allowed to buy a minimum of 5 units. Related to the discount ratio', - to='admissions.Cohort'), + help_text="Minimum unit size allowed to be bought, example: bundle_size=5, then you are allowed to buy a minimum of 5 units. Related to the discount ratio", + to="admissions.Cohort", + ), ), ] diff --git a/breathecode/payments/migrations/0026_plan_invites.py b/breathecode/payments/migrations/0026_plan_invites.py index 775882332..d48f00d68 100644 --- a/breathecode/payments/migrations/0026_plan_invites.py +++ b/breathecode/payments/migrations/0026_plan_invites.py @@ -6,17 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0036_githubacademyuserlog'), - ('payments', '0025_auto_20230317_0702'), + ("authenticate", "0036_githubacademyuserlog"), + ("payments", "0025_auto_20230317_0702"), ] operations = [ migrations.AddField( - model_name='plan', - name='invites', - field=models.ManyToManyField(blank=True, - help_text="Plan's invites", - related_name='plans', - to='authenticate.UserInvite'), + model_name="plan", + name="invites", + field=models.ManyToManyField( + blank=True, help_text="Plan's invites", related_name="plans", to="authenticate.UserInvite" + ), ), ] diff --git a/breathecode/payments/migrations/0027_merge_0026_auto_20230502_2225_0026_plan_invites.py b/breathecode/payments/migrations/0027_merge_0026_auto_20230502_2225_0026_plan_invites.py index 85e694560..1ccf5bc24 100644 --- a/breathecode/payments/migrations/0027_merge_0026_auto_20230502_2225_0026_plan_invites.py +++ b/breathecode/payments/migrations/0027_merge_0026_auto_20230502_2225_0026_plan_invites.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0026_auto_20230502_2225'), - ('payments', '0026_plan_invites'), + ("payments", "0026_auto_20230502_2225"), + ("payments", "0026_plan_invites"), ] operations = [] diff --git a/breathecode/payments/migrations/0028_auto_20230607_2028.py b/breathecode/payments/migrations/0028_auto_20230607_2028.py index 9818e6930..1fd73bb0f 100644 --- a/breathecode/payments/migrations/0028_auto_20230607_2028.py +++ b/breathecode/payments/migrations/0028_auto_20230607_2028.py @@ -6,22 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0027_merge_0026_auto_20230502_2225_0026_plan_invites'), + ("payments", "0027_merge_0026_auto_20230502_2225_0026_plan_invites"), ] operations = [ migrations.RemoveField( - model_name='service', - name='currency', + model_name="service", + name="currency", ), migrations.RemoveField( - model_name='service', - name='price_per_unit', + model_name="service", + name="price_per_unit", ), migrations.AlterField( - model_name='academyservice', - name='max_items', + model_name="academyservice", + name="max_items", field=models.FloatField( - default=1, help_text="How many items can be bought in total, it doesn't matter the bundle size"), + default=1, help_text="How many items can be bought in total, it doesn't matter the bundle size" + ), ), ] diff --git a/breathecode/payments/migrations/0029_alter_bag_type.py b/breathecode/payments/migrations/0029_alter_bag_type.py index 90bebad28..cbad93ff6 100644 --- a/breathecode/payments/migrations/0029_alter_bag_type.py +++ b/breathecode/payments/migrations/0029_alter_bag_type.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0028_auto_20230607_2028'), + ("payments", "0028_auto_20230607_2028"), ] operations = [ migrations.AlterField( - model_name='bag', - name='type', - field=models.CharField(choices=[('BAG', 'Bag'), ('CHARGE', 'Charge'), ('PREVIEW', 'Preview'), - ('INVITED', 'Invited')], - default='BAG', - help_text='Bag type', - max_length=7), + model_name="bag", + name="type", + field=models.CharField( + choices=[("BAG", "Bag"), ("CHARGE", "Charge"), ("PREVIEW", "Preview"), ("INVITED", "Invited")], + default="BAG", + help_text="Bag type", + max_length=7, + ), ), ] diff --git a/breathecode/payments/migrations/0029_service_title.py b/breathecode/payments/migrations/0029_service_title.py index 9af23ae2e..953aa101d 100644 --- a/breathecode/payments/migrations/0029_service_title.py +++ b/breathecode/payments/migrations/0029_service_title.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0028_auto_20230607_2028'), + ("payments", "0028_auto_20230607_2028"), ] operations = [ migrations.AddField( - model_name='service', - name='title', + model_name="service", + name="title", field=models.CharField(blank=True, default=None, max_length=60, null=True), ), ] diff --git a/breathecode/payments/migrations/0030_auto_20230912_0555.py b/breathecode/payments/migrations/0030_auto_20230912_0555.py index 73c5c5de3..d10f776a7 100644 --- a/breathecode/payments/migrations/0030_auto_20230912_0555.py +++ b/breathecode/payments/migrations/0030_auto_20230912_0555.py @@ -8,143 +8,169 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0060_alter_cohortuser_educational_status'), - ('payments', '0029_alter_bag_type'), + ("admissions", "0060_alter_cohortuser_educational_status"), + ("payments", "0029_alter_bag_type"), ] operations = [ migrations.CreateModel( - name='CohortSet', + name="CohortSet", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', - models.SlugField( - help_text= - 'A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens', - max_length=100, - unique=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('cohorts', models.ManyToManyField(blank=True, to='admissions.Cohort')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "slug", + models.SlugField( + help_text="A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", + max_length=100, + unique=True, + ), + ), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ("cohorts", models.ManyToManyField(blank=True, to="admissions.Cohort")), ], ), migrations.RemoveField( - model_name='bag', - name='selected_cohorts', + model_name="bag", + name="selected_cohorts", ), migrations.RemoveField( - model_name='bag', - name='selected_event_type_sets', + model_name="bag", + name="selected_event_type_sets", ), migrations.RemoveField( - model_name='bag', - name='selected_mentorship_service_sets', + model_name="bag", + name="selected_mentorship_service_sets", ), migrations.RemoveField( - model_name='consumable', - name='cohort', + model_name="consumable", + name="cohort", ), migrations.RemoveField( - model_name='plan', - name='available_cohorts', + model_name="plan", + name="available_cohorts", ), migrations.RemoveField( - model_name='plan', - name='cohort_pattern', + model_name="plan", + name="cohort_pattern", ), migrations.RemoveField( - model_name='planfinancing', - name='selected_cohort', + model_name="planfinancing", + name="selected_cohort", ), migrations.RemoveField( - model_name='subscription', - name='selected_cohort', + model_name="subscription", + name="selected_cohort", ), migrations.AlterField( - model_name='plan', - name='event_type_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Event type set to be sold in this service and plan', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='payments.eventtypeset'), + model_name="plan", + name="event_type_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Event type set to be sold in this service and plan", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="payments.eventtypeset", + ), ), migrations.AlterField( - model_name='plan', - name='mentorship_service_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Mentorship service set to be sold in this service and plan', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='payments.mentorshipserviceset'), + model_name="plan", + name="mentorship_service_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Mentorship service set to be sold in this service and plan", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="payments.mentorshipserviceset", + ), ), migrations.AlterField( - model_name='service', - name='type', - field=models.CharField(choices=[('COHORT_SET', 'Cohort set'), - ('MENTORSHIP_SERVICE_SET', 'Mentorship service set'), - ('EVENT_TYPE_SET', 'Event type set')], - default='COHORT_SET', - help_text='Service type', - max_length=22), + model_name="service", + name="type", + field=models.CharField( + choices=[ + ("COHORT_SET", "Cohort set"), + ("MENTORSHIP_SERVICE_SET", "Mentorship service set"), + ("EVENT_TYPE_SET", "Event type set"), + ], + default="COHORT_SET", + help_text="Service type", + max_length=22, + ), ), migrations.CreateModel( - name='CohortSetTranslation', + name="CohortSetTranslation", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('title', models.CharField(help_text='Title of the cohort set', max_length=60)), - ('description', models.CharField(help_text='Description of the cohort set', max_length=255)), - ('short_description', models.CharField(help_text='Short description of the cohort set', - max_length=255)), - ('cohort_set', - models.ForeignKey(help_text='Cohort set', - on_delete=django.db.models.deletion.CASCADE, - to='payments.cohortset')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), + ), + ("title", models.CharField(help_text="Title of the cohort set", max_length=60)), + ("description", models.CharField(help_text="Description of the cohort set", max_length=255)), + ( + "short_description", + models.CharField(help_text="Short description of the cohort set", max_length=255), + ), + ( + "cohort_set", + models.ForeignKey( + help_text="Cohort set", on_delete=django.db.models.deletion.CASCADE, to="payments.cohortset" + ), + ), ], ), migrations.AddField( - model_name='consumable', - name='cohort_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.cohortset'), + model_name="consumable", + name="cohort_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.cohortset", + ), ), migrations.AddField( - model_name='plan', - name='cohort_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort sets to be sold in this service and plan', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='payments.cohortset'), + model_name="plan", + name="cohort_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort sets to be sold in this service and plan", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="payments.cohortset", + ), ), migrations.AddField( - model_name='planfinancing', - name='selected_cohort_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.cohortset'), + model_name="planfinancing", + name="selected_cohort_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.cohortset", + ), ), migrations.AddField( - model_name='subscription', - name='selected_cohort_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.cohortset'), + model_name="subscription", + name="selected_cohort_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.cohortset", + ), ), ] diff --git a/breathecode/payments/migrations/0030_merge_0029_alter_bag_type_0029_service_title.py b/breathecode/payments/migrations/0030_merge_0029_alter_bag_type_0029_service_title.py index 47173b894..0d2f8063a 100644 --- a/breathecode/payments/migrations/0030_merge_0029_alter_bag_type_0029_service_title.py +++ b/breathecode/payments/migrations/0030_merge_0029_alter_bag_type_0029_service_title.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0029_alter_bag_type'), - ('payments', '0029_service_title'), + ("payments", "0029_alter_bag_type"), + ("payments", "0029_service_title"), ] operations = [] diff --git a/breathecode/payments/migrations/0031_merge_20230912_2353.py b/breathecode/payments/migrations/0031_merge_20230912_2353.py index 4e7016571..e9fe530ea 100644 --- a/breathecode/payments/migrations/0031_merge_20230912_2353.py +++ b/breathecode/payments/migrations/0031_merge_20230912_2353.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0030_auto_20230912_0555'), - ('payments', '0030_merge_0029_alter_bag_type_0029_service_title'), + ("payments", "0030_auto_20230912_0555"), + ("payments", "0030_merge_0029_alter_bag_type_0029_service_title"), ] operations = [] diff --git a/breathecode/payments/migrations/0032_auto_20230915_0702.py b/breathecode/payments/migrations/0032_auto_20230915_0702.py index 7c8599052..9f2a9a639 100644 --- a/breathecode/payments/migrations/0032_auto_20230915_0702.py +++ b/breathecode/payments/migrations/0032_auto_20230915_0702.py @@ -7,152 +7,185 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0061_academy_white_label_url'), - ('payments', '0031_merge_20230912_2353'), + ("admissions", "0061_academy_white_label_url"), + ("payments", "0031_merge_20230912_2353"), ] operations = [ migrations.AddField( - model_name='planfinancing', - name='joined_cohorts', - field=models.ManyToManyField(blank=True, - help_text='Cohorts those that he/she joined', - to='admissions.Cohort'), + model_name="planfinancing", + name="joined_cohorts", + field=models.ManyToManyField( + blank=True, help_text="Cohorts those that he/she joined", to="admissions.Cohort" + ), ), migrations.AddField( - model_name='subscription', - name='joined_cohorts', - field=models.ManyToManyField(blank=True, - help_text='Cohorts those that he/she joined', - to='admissions.Cohort'), + model_name="subscription", + name="joined_cohorts", + field=models.ManyToManyField( + blank=True, help_text="Cohorts those that he/she joined", to="admissions.Cohort" + ), ), migrations.AlterField( - model_name='bag', - name='status', - field=models.CharField(choices=[('RENEWAL', 'Renewal'), ('CHECKING', 'Checking'), ('PAID', 'Paid')], - db_index=True, - default='CHECKING', - help_text='Bag status', - max_length=8), + model_name="bag", + name="status", + field=models.CharField( + choices=[("RENEWAL", "Renewal"), ("CHECKING", "Checking"), ("PAID", "Paid")], + db_index=True, + default="CHECKING", + help_text="Bag status", + max_length=8, + ), ), migrations.AlterField( - model_name='consumable', - name='unit_type', - field=models.CharField(choices=[('UNIT', 'Unit')], - db_index=True, - default='UNIT', - help_text='Unit type (e.g. UNIT))', - max_length=10), + model_name="consumable", + name="unit_type", + field=models.CharField( + choices=[("UNIT", "Unit")], + db_index=True, + default="UNIT", + help_text="Unit type (e.g. UNIT))", + max_length=10, + ), ), migrations.AlterField( - model_name='currency', - name='code', - field=models.CharField(db_index=True, - help_text='ISO 4217 currency code (e.g. USD, EUR, MXN)', - max_length=3, - unique=True), + model_name="currency", + name="code", + field=models.CharField( + db_index=True, help_text="ISO 4217 currency code (e.g. USD, EUR, MXN)", max_length=3, unique=True + ), ), migrations.AlterField( - model_name='invoice', - name='status', - field=models.CharField(choices=[('FULFILLED', 'Fulfilled'), ('REJECTED', 'Rejected'), - ('PENDING', 'Pending'), ('REFUNDED', 'Refunded'), - ('DISPUTED_AS_FRAUD', 'Disputed as fraud')], - db_index=True, - default='PENDING', - help_text='Invoice status', - max_length=17), + model_name="invoice", + name="status", + field=models.CharField( + choices=[ + ("FULFILLED", "Fulfilled"), + ("REJECTED", "Rejected"), + ("PENDING", "Pending"), + ("REFUNDED", "Refunded"), + ("DISPUTED_AS_FRAUD", "Disputed as fraud"), + ], + db_index=True, + default="PENDING", + help_text="Invoice status", + max_length=17, + ), ), migrations.AlterField( - model_name='plan', - name='is_onboarding', - field=models.BooleanField(db_index=True, default=False, help_text='Is onboarding plan?'), + model_name="plan", + name="is_onboarding", + field=models.BooleanField(db_index=True, default=False, help_text="Is onboarding plan?"), ), migrations.AlterField( - model_name='plan', - name='slug', + model_name="plan", + name="slug", field=models.CharField( db_index=True, - help_text= - 'A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens', + help_text="A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", max_length=60, - unique=True), + unique=True, + ), ), migrations.AlterField( - model_name='planfinancing', - name='selected_cohort_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort set which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.cohortset'), + model_name="planfinancing", + name="selected_cohort_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort set which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.cohortset", + ), ), migrations.AlterField( - model_name='planfinancing', - name='status', - field=models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error'), - ('FULLY_PAID', 'Fully Paid'), ('EXPIRED', 'Expired')], - db_index=True, - default='ACTIVE', - help_text='Status', - max_length=13), + model_name="planfinancing", + name="status", + field=models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ("FULLY_PAID", "Fully Paid"), + ("EXPIRED", "Expired"), + ], + db_index=True, + default="ACTIVE", + help_text="Status", + max_length=13, + ), ), migrations.AlterField( - model_name='service', - name='private', - field=models.BooleanField(db_index=True, default=True, help_text='If the asset is private or not'), + model_name="service", + name="private", + field=models.BooleanField(db_index=True, default=True, help_text="If the asset is private or not"), ), migrations.AlterField( - model_name='service', - name='slug', + model_name="service", + name="slug", field=models.CharField( db_index=True, - help_text= - 'A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens', + help_text="A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", max_length=60, - unique=True), + unique=True, + ), ), migrations.AlterField( - model_name='serviceitem', - name='unit_type', - field=models.CharField(choices=[('UNIT', 'Unit')], - db_index=True, - default='UNIT', - help_text='Unit type (e.g. UNIT))', - max_length=10), + model_name="serviceitem", + name="unit_type", + field=models.CharField( + choices=[("UNIT", "Unit")], + db_index=True, + default="UNIT", + help_text="Unit type (e.g. UNIT))", + max_length=10, + ), ), migrations.AlterField( - model_name='subscription', - name='selected_cohort_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort set which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.cohortset'), + model_name="subscription", + name="selected_cohort_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort set which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.cohortset", + ), ), migrations.AlterField( - model_name='subscription', - name='status', - field=models.CharField(choices=[('FREE_TRIAL', 'Free trial'), ('ACTIVE', 'Active'), - ('CANCELLED', 'Cancelled'), ('DEPRECATED', 'Deprecated'), - ('PAYMENT_ISSUE', 'Payment issue'), ('ERROR', 'Error'), - ('FULLY_PAID', 'Fully Paid'), ('EXPIRED', 'Expired')], - db_index=True, - default='ACTIVE', - help_text='Status', - max_length=13), + model_name="subscription", + name="status", + field=models.CharField( + choices=[ + ("FREE_TRIAL", "Free trial"), + ("ACTIVE", "Active"), + ("CANCELLED", "Cancelled"), + ("DEPRECATED", "Deprecated"), + ("PAYMENT_ISSUE", "Payment issue"), + ("ERROR", "Error"), + ("FULLY_PAID", "Fully Paid"), + ("EXPIRED", "Expired"), + ], + db_index=True, + default="ACTIVE", + help_text="Status", + max_length=13, + ), ), migrations.AlterField( - model_name='subscription', - name='valid_until', - field=models.DateTimeField(blank=True, - db_index=True, - default=None, - help_text='Valid until, after this date the subscription will be destroyed', - null=True), + model_name="subscription", + name="valid_until", + field=models.DateTimeField( + blank=True, + db_index=True, + default=None, + help_text="Valid until, after this date the subscription will be destroyed", + null=True, + ), ), ] diff --git a/breathecode/payments/migrations/0033_auto_20230920_2023.py b/breathecode/payments/migrations/0033_auto_20230920_2023.py index c199c8968..cfd8f7c80 100644 --- a/breathecode/payments/migrations/0033_auto_20230920_2023.py +++ b/breathecode/payments/migrations/0033_auto_20230920_2023.py @@ -7,27 +7,31 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0062_syllabus_is_documentation'), - ('payments', '0032_auto_20230915_0702'), + ("admissions", "0062_syllabus_is_documentation"), + ("payments", "0032_auto_20230915_0702"), ] operations = [ migrations.RemoveField( - model_name='cohortset', - name='cohorts', + model_name="cohortset", + name="cohorts", ), migrations.CreateModel( - name='CohortSetCohort', + name="CohortSetCohort", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('cohort', - models.ForeignKey(help_text='Cohort', - on_delete=django.db.models.deletion.CASCADE, - to='admissions.cohort')), - ('cohort_set', - models.ForeignKey(help_text='Cohort set', - on_delete=django.db.models.deletion.CASCADE, - to='payments.cohortset')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "cohort", + models.ForeignKey( + help_text="Cohort", on_delete=django.db.models.deletion.CASCADE, to="admissions.cohort" + ), + ), + ( + "cohort_set", + models.ForeignKey( + help_text="Cohort set", on_delete=django.db.models.deletion.CASCADE, to="payments.cohortset" + ), + ), ], ), ] diff --git a/breathecode/payments/migrations/0033_serviceitemfeature_title.py b/breathecode/payments/migrations/0033_serviceitemfeature_title.py index d06976acf..f36f9bb22 100644 --- a/breathecode/payments/migrations/0033_serviceitemfeature_title.py +++ b/breathecode/payments/migrations/0033_serviceitemfeature_title.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0032_auto_20230915_0702'), + ("payments", "0032_auto_20230915_0702"), ] operations = [ migrations.AddField( - model_name='serviceitemfeature', - name='title', - field=models.CharField(default=None, help_text='Title of the service item', max_length=30, null=True), + model_name="serviceitemfeature", + name="title", + field=models.CharField(default=None, help_text="Title of the service item", max_length=30, null=True), ), ] diff --git a/breathecode/payments/migrations/0034_cohortset_cohorts.py b/breathecode/payments/migrations/0034_cohortset_cohorts.py index 29e1bf98f..389eabc2d 100644 --- a/breathecode/payments/migrations/0034_cohortset_cohorts.py +++ b/breathecode/payments/migrations/0034_cohortset_cohorts.py @@ -6,14 +6,14 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0062_syllabus_is_documentation'), - ('payments', '0033_auto_20230920_2023'), + ("admissions", "0062_syllabus_is_documentation"), + ("payments", "0033_auto_20230920_2023"), ] operations = [ migrations.AddField( - model_name='cohortset', - name='cohorts', - field=models.ManyToManyField(blank=True, through='payments.CohortSetCohort', to='admissions.Cohort'), + model_name="cohortset", + name="cohorts", + field=models.ManyToManyField(blank=True, through="payments.CohortSetCohort", to="admissions.Cohort"), ), ] diff --git a/breathecode/payments/migrations/0035_merge_20230921_2255.py b/breathecode/payments/migrations/0035_merge_20230921_2255.py index 9242009f6..bf51966f7 100644 --- a/breathecode/payments/migrations/0035_merge_20230921_2255.py +++ b/breathecode/payments/migrations/0035_merge_20230921_2255.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0033_serviceitemfeature_title'), - ('payments', '0034_cohortset_cohorts'), + ("payments", "0033_serviceitemfeature_title"), + ("payments", "0034_cohortset_cohorts"), ] operations = [] diff --git a/breathecode/payments/migrations/0035_merge_20230923_0701.py b/breathecode/payments/migrations/0035_merge_20230923_0701.py index af14309e0..81d0aa013 100644 --- a/breathecode/payments/migrations/0035_merge_20230923_0701.py +++ b/breathecode/payments/migrations/0035_merge_20230923_0701.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0033_serviceitemfeature_title'), - ('payments', '0034_cohortset_cohorts'), + ("payments", "0033_serviceitemfeature_title"), + ("payments", "0034_cohortset_cohorts"), ] operations = [] diff --git a/breathecode/payments/migrations/0036_merge_20230926_0017.py b/breathecode/payments/migrations/0036_merge_20230926_0017.py index aceaf3a7d..ce91399ef 100644 --- a/breathecode/payments/migrations/0036_merge_20230926_0017.py +++ b/breathecode/payments/migrations/0036_merge_20230926_0017.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0035_merge_20230921_2255'), - ('payments', '0035_merge_20230923_0701'), + ("payments", "0035_merge_20230921_2255"), + ("payments", "0035_merge_20230923_0701"), ] operations = [] diff --git a/breathecode/payments/migrations/0037_auto_20231025_1723.py b/breathecode/payments/migrations/0037_auto_20231025_1723.py index b7ba5dff5..9bceb5dea 100644 --- a/breathecode/payments/migrations/0037_auto_20231025_1723.py +++ b/breathecode/payments/migrations/0037_auto_20231025_1723.py @@ -6,20 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0036_merge_20230926_0017'), + ("payments", "0036_merge_20230926_0017"), ] operations = [ migrations.AddField( - model_name='consumable', - name='sort_priority', + model_name="consumable", + name="sort_priority", field=models.IntegerField( - default=1, help_text='(e.g. 1, 2, 3, ...) It is going to be used to sort the items on the frontend'), + default=1, help_text="(e.g. 1, 2, 3, ...) It is going to be used to sort the items on the frontend" + ), ), migrations.AddField( - model_name='serviceitem', - name='sort_priority', + model_name="serviceitem", + name="sort_priority", field=models.IntegerField( - default=1, help_text='(e.g. 1, 2, 3, ...) It is going to be used to sort the items on the frontend'), + default=1, help_text="(e.g. 1, 2, 3, ...) It is going to be used to sort the items on the frontend" + ), ), ] diff --git a/breathecode/payments/migrations/0038_service_icon_url.py b/breathecode/payments/migrations/0038_service_icon_url.py index 8b791dda6..8e855ff74 100644 --- a/breathecode/payments/migrations/0038_service_icon_url.py +++ b/breathecode/payments/migrations/0038_service_icon_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0037_auto_20231025_1723'), + ("payments", "0037_auto_20231025_1723"), ] operations = [ migrations.AddField( - model_name='service', - name='icon_url', + model_name="service", + name="icon_url", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/payments/migrations/0039_auto_20231128_0239.py b/breathecode/payments/migrations/0039_auto_20231128_0239.py index db1e143a0..2383487bb 100644 --- a/breathecode/payments/migrations/0039_auto_20231128_0239.py +++ b/breathecode/payments/migrations/0039_auto_20231128_0239.py @@ -6,48 +6,53 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0038_service_icon_url'), + ("payments", "0038_service_icon_url"), ] operations = [ migrations.AlterField( - model_name='plan', - name='is_renewable', + model_name="plan", + name="is_renewable", field=models.BooleanField( default=True, - help_text='Is if true, it will create a renewable subscription instead of a plan financing'), + help_text="Is if true, it will create a renewable subscription instead of a plan financing", + ), ), migrations.AlterField( - model_name='plan', - name='time_of_life', - field=models.IntegerField(blank=True, default=1, help_text='Plan lifetime (e.g. 1, 2, 3, ...)', null=True), + model_name="plan", + name="time_of_life", + field=models.IntegerField(blank=True, default=1, help_text="Plan lifetime (e.g. 1, 2, 3, ...)", null=True), ), migrations.AlterField( - model_name='plan', - name='time_of_life_unit', - field=models.CharField(blank=True, - choices=[('DAY', 'Day'), ('WEEK', 'Week'), ('MONTH', 'Month'), ('YEAR', 'Year')], - default='MONTH', - help_text='Lifetime unit (e.g. DAY, WEEK, MONTH or YEAR)', - max_length=10, - null=True), + model_name="plan", + name="time_of_life_unit", + field=models.CharField( + blank=True, + choices=[("DAY", "Day"), ("WEEK", "Week"), ("MONTH", "Month"), ("YEAR", "Year")], + default="MONTH", + help_text="Lifetime unit (e.g. DAY, WEEK, MONTH or YEAR)", + max_length=10, + null=True, + ), ), migrations.AlterField( - model_name='planfinancing', - name='plans', - field=models.ManyToManyField(blank=True, help_text='Plans to be supplied', to='payments.Plan'), + model_name="planfinancing", + name="plans", + field=models.ManyToManyField(blank=True, help_text="Plans to be supplied", to="payments.Plan"), ), migrations.AlterField( - model_name='subscription', - name='plans', - field=models.ManyToManyField(blank=True, help_text='Plans to be supplied', to='payments.Plan'), + model_name="subscription", + name="plans", + field=models.ManyToManyField(blank=True, help_text="Plans to be supplied", to="payments.Plan"), ), migrations.AlterField( - model_name='subscription', - name='service_items', - field=models.ManyToManyField(blank=True, - help_text='Service items to be supplied', - through='payments.SubscriptionServiceItem', - to='payments.ServiceItem'), + model_name="subscription", + name="service_items", + field=models.ManyToManyField( + blank=True, + help_text="Service items to be supplied", + through="payments.SubscriptionServiceItem", + to="payments.ServiceItem", + ), ), ] diff --git a/breathecode/payments/migrations/0040_alter_serviceitem_is_renewable.py b/breathecode/payments/migrations/0040_alter_serviceitem_is_renewable.py index c98c01842..7d4de7213 100644 --- a/breathecode/payments/migrations/0040_alter_serviceitem_is_renewable.py +++ b/breathecode/payments/migrations/0040_alter_serviceitem_is_renewable.py @@ -6,16 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0039_auto_20231128_0239'), + ("payments", "0039_auto_20231128_0239"), ] operations = [ migrations.AlterField( - model_name='serviceitem', - name='is_renewable', + model_name="serviceitem", + name="is_renewable", field=models.BooleanField( default=False, - help_text= - "If it's marked, the consumables will be renewed according to the renew_at and renew_at_unit values."), + help_text="If it's marked, the consumables will be renewed according to the renew_at and renew_at_unit values.", + ), ), ] diff --git a/breathecode/payments/migrations/0041_auto_20231218_1945.py b/breathecode/payments/migrations/0041_auto_20231218_1945.py index 11d79a3e0..5eea58006 100644 --- a/breathecode/payments/migrations/0041_auto_20231218_1945.py +++ b/breathecode/payments/migrations/0041_auto_20231218_1945.py @@ -7,75 +7,93 @@ class Migration(migrations.Migration): dependencies = [ - ('authenticate', '0048_auto_20231128_1224'), - ('payments', '0040_alter_serviceitem_is_renewable'), + ("authenticate", "0048_auto_20231128_1224"), + ("payments", "0040_alter_serviceitem_is_renewable"), ] operations = [ migrations.AlterField( - model_name='consumable', - name='cohort_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Cohort set which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.cohortset'), + model_name="consumable", + name="cohort_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Cohort set which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.cohortset", + ), ), migrations.AlterField( - model_name='consumable', - name='event_type_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Event type set which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.eventtypeset'), + model_name="consumable", + name="event_type_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Event type set which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.eventtypeset", + ), ), migrations.AlterField( - model_name='consumable', - name='mentorship_service_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Mentorship service set which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.mentorshipserviceset'), + model_name="consumable", + name="mentorship_service_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Mentorship service set which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.mentorshipserviceset", + ), ), migrations.AlterField( - model_name='service', - name='type', - field=models.CharField(choices=[('COHORT_SET', 'Cohort set'), - ('MENTORSHIP_SERVICE_SET', 'Mentorship service set'), - ('EVENT_TYPE_SET', 'Event type set'), ('CHAT_SUPPORT', 'Chat support'), - ('CODE_REVIEW', 'Code review'), ('BUILD_PROJECT', 'Build project'), - ('TEST_PROJECT', 'Test project')], - default='COHORT_SET', - help_text='Service type', - max_length=22), + model_name="service", + name="type", + field=models.CharField( + choices=[ + ("COHORT_SET", "Cohort set"), + ("MENTORSHIP_SERVICE_SET", "Mentorship service set"), + ("EVENT_TYPE_SET", "Event type set"), + ("CHAT_SUPPORT", "Chat support"), + ("CODE_REVIEW", "Code review"), + ("BUILD_PROJECT", "Build project"), + ("TEST_PROJECT", "Test project"), + ], + default="COHORT_SET", + help_text="Service type", + max_length=22, + ), ), migrations.CreateModel( - name='AppService', + name="AppService", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('service', models.SlugField(help_text='Microservice slug')), - ('app', - models.ForeignKey(blank=True, - default=None, - help_text='Subscription', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='authenticate.app')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("service", models.SlugField(help_text="Microservice slug")), + ( + "app", + models.ForeignKey( + blank=True, + default=None, + help_text="Subscription", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="authenticate.app", + ), + ), ], ), migrations.AddField( - model_name='consumable', - name='app_service', - field=models.ForeignKey(blank=True, - default=None, - help_text='App service which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.appservice'), + model_name="consumable", + name="app_service", + field=models.ForeignKey( + blank=True, + default=None, + help_text="App service which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.appservice", + ), ), ] diff --git a/breathecode/payments/migrations/0042_auto_20231219_0504.py b/breathecode/payments/migrations/0042_auto_20231219_0504.py index fcce121a0..fbd471a9a 100644 --- a/breathecode/payments/migrations/0042_auto_20231219_0504.py +++ b/breathecode/payments/migrations/0042_auto_20231219_0504.py @@ -6,26 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0041_auto_20231218_1945'), + ("payments", "0041_auto_20231218_1945"), ] operations = [ migrations.AlterField( - model_name='financialreputation', - name='in_4geeks', - field=models.CharField(choices=[('GOOD', 'Good'), ('BAD', 'BAD'), ('FRAUD', 'Fraud'), - ('UNKNOWN', 'Unknown')], - default='GOOD', - help_text='4Geeks reputation', - max_length=17), + model_name="financialreputation", + name="in_4geeks", + field=models.CharField( + choices=[("GOOD", "Good"), ("BAD", "BAD"), ("FRAUD", "Fraud"), ("UNKNOWN", "Unknown")], + default="GOOD", + help_text="4Geeks reputation", + max_length=17, + ), ), migrations.AlterField( - model_name='financialreputation', - name='in_stripe', - field=models.CharField(choices=[('GOOD', 'Good'), ('BAD', 'BAD'), ('FRAUD', 'Fraud'), - ('UNKNOWN', 'Unknown')], - default='GOOD', - help_text='Stripe reputation', - max_length=17), + model_name="financialreputation", + name="in_stripe", + field=models.CharField( + choices=[("GOOD", "Good"), ("BAD", "BAD"), ("FRAUD", "Fraud"), ("UNKNOWN", "Unknown")], + default="GOOD", + help_text="Stripe reputation", + max_length=17, + ), ), ] diff --git a/breathecode/payments/migrations/0043_remove_consumable_app_service_serviceset_and_more.py b/breathecode/payments/migrations/0043_remove_consumable_app_service_serviceset_and_more.py index 30971892b..08ea8caa4 100644 --- a/breathecode/payments/migrations/0043_remove_consumable_app_service_serviceset_and_more.py +++ b/breathecode/payments/migrations/0043_remove_consumable_app_service_serviceset_and_more.py @@ -8,62 +8,80 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0064_academy_legal_name'), - ('payments', '0042_auto_20231219_0504'), + ("admissions", "0064_academy_legal_name"), + ("payments", "0042_auto_20231219_0504"), ] operations = [ migrations.RemoveField( - model_name='consumable', - name='app_service', + model_name="consumable", + name="app_service", ), migrations.CreateModel( - name='ServiceSet', + name="ServiceSet", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', - models.SlugField( - help_text= - 'A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens', - max_length=100, - unique=True)), - ('academy', - models.ForeignKey(blank=True, - default=None, - help_text='Academy', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), - ('services', models.ManyToManyField(blank=True, help_text='Services', to='payments.service')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "slug", + models.SlugField( + help_text="A human-readable identifier, it must be unique and it can only contain letters, numbers and hyphens", + max_length=100, + unique=True, + ), + ), + ( + "academy", + models.ForeignKey( + blank=True, + default=None, + help_text="Academy", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), + ), + ("services", models.ManyToManyField(blank=True, help_text="Services", to="payments.service")), ], ), migrations.AddField( - model_name='consumable', - name='service_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Service set which the consumable belongs to', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.serviceset'), + model_name="consumable", + name="service_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Service set which the consumable belongs to", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.serviceset", + ), ), migrations.CreateModel( - name='ServiceSetTranslation', + name="ServiceSetTranslation", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('lang', - models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('title', models.CharField(help_text='Title of the cohort set', max_length=60)), - ('description', models.CharField(help_text='Description of the cohort set', max_length=255)), - ('short_description', models.CharField(help_text='Short description of the cohort set', - max_length=255)), - ('service_set', - models.ForeignKey(help_text='Service set', - on_delete=django.db.models.deletion.CASCADE, - to='payments.serviceset')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "lang", + models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), + ), + ("title", models.CharField(help_text="Title of the cohort set", max_length=60)), + ("description", models.CharField(help_text="Description of the cohort set", max_length=255)), + ( + "short_description", + models.CharField(help_text="Short description of the cohort set", max_length=255), + ), + ( + "service_set", + models.ForeignKey( + help_text="Service set", on_delete=django.db.models.deletion.CASCADE, to="payments.serviceset" + ), + ), ], ), - migrations.DeleteModel(name='AppService', ), + migrations.DeleteModel( + name="AppService", + ), ] diff --git a/breathecode/payments/migrations/0044_coupon_bag_coupons_seller_coupon_seller.py b/breathecode/payments/migrations/0044_coupon_bag_coupons_seller_coupon_seller.py index d3db27885..1afb1e5d6 100644 --- a/breathecode/payments/migrations/0044_coupon_bag_coupons_seller_coupon_seller.py +++ b/breathecode/payments/migrations/0044_coupon_bag_coupons_seller_coupon_seller.py @@ -9,85 +9,119 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0043_remove_consumable_app_service_serviceset_and_more'), + ("payments", "0043_remove_consumable_app_service_serviceset_and_more"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='Coupon', + name="Coupon", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField()), - ('discount_type', - models.CharField(choices=[('NO_DISCOUNT', 'No discount'), ('PERCENT_OFF', 'Percent off'), - ('FIXED_PRICE', 'Fixed price'), ('HAGGLING', 'Haggling')], - db_index=True, - default='PERCENT_OFF', - max_length=13)), - ('discount_value', models.FloatField(help_text="if type is PERCENT_OFF it's a percentage (range 0-1)")), - ('referral_type', - models.CharField(choices=[('NO_REFERRAL', 'No referral'), ('PERCENTAGE', 'Percentage'), - ('FIXED_PRICE', 'Fixed price')], - db_index=True, - default='NO_REFERRAL', - max_length=13)), - ('referral_value', models.FloatField(default=0, help_text='If set, the seller will receive a reward')), - ('auto', - models.BooleanField(db_index=True, - default=False, - help_text='Automatically apply this coupon (like a special offer)')), - ('how_many_offers', - models.IntegerField( - default=-1, - help_text="if -1 means no limits in the offers provided, if 0 nobody can't use this coupon")), - ('offered_at', models.DateTimeField(blank=True, default=None, null=True)), - ('expires_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('plans', - models.ManyToManyField( - blank=True, - help_text= - 'Available plans, if refferal type is not NO_REFERRAL it should keep empty, so, in this case, all plans will be available', - to='payments.plan')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField()), + ( + "discount_type", + models.CharField( + choices=[ + ("NO_DISCOUNT", "No discount"), + ("PERCENT_OFF", "Percent off"), + ("FIXED_PRICE", "Fixed price"), + ("HAGGLING", "Haggling"), + ], + db_index=True, + default="PERCENT_OFF", + max_length=13, + ), + ), + ("discount_value", models.FloatField(help_text="if type is PERCENT_OFF it's a percentage (range 0-1)")), + ( + "referral_type", + models.CharField( + choices=[ + ("NO_REFERRAL", "No referral"), + ("PERCENTAGE", "Percentage"), + ("FIXED_PRICE", "Fixed price"), + ], + db_index=True, + default="NO_REFERRAL", + max_length=13, + ), + ), + ("referral_value", models.FloatField(default=0, help_text="If set, the seller will receive a reward")), + ( + "auto", + models.BooleanField( + db_index=True, default=False, help_text="Automatically apply this coupon (like a special offer)" + ), + ), + ( + "how_many_offers", + models.IntegerField( + default=-1, + help_text="if -1 means no limits in the offers provided, if 0 nobody can't use this coupon", + ), + ), + ("offered_at", models.DateTimeField(blank=True, default=None, null=True)), + ("expires_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "plans", + models.ManyToManyField( + blank=True, + help_text="Available plans, if refferal type is not NO_REFERRAL it should keep empty, so, in this case, all plans will be available", + to="payments.plan", + ), + ), ], ), migrations.AddField( - model_name='bag', - name='coupons', - field=models.ManyToManyField(blank=True, - help_text='Coupons applied during the sale', - limit_choices_to=breathecode.payments.models.limit_coupon_choices, - to='payments.coupon'), + model_name="bag", + name="coupons", + field=models.ManyToManyField( + blank=True, + help_text="Coupons applied during the sale", + limit_choices_to=breathecode.payments.models.limit_coupon_choices, + to="payments.coupon", + ), ), migrations.CreateModel( - name='Seller', + name="Seller", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(help_text='Company name or person name', max_length=30)), - ('type', - models.CharField(choices=[('INDIVIDUAL', 'Individual'), ('BUSINESS', 'Business')], - db_index=True, - default='INDIVIDUAL', - max_length=13)), - ('is_active', models.BooleanField(default=True, help_text='Is the seller active to be selected?')), - ('user', - models.ForeignKey(blank=True, - limit_choices_to={'is_active': True}, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL)), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(help_text="Company name or person name", max_length=30)), + ( + "type", + models.CharField( + choices=[("INDIVIDUAL", "Individual"), ("BUSINESS", "Business")], + db_index=True, + default="INDIVIDUAL", + max_length=13, + ), + ), + ("is_active", models.BooleanField(default=True, help_text="Is the seller active to be selected?")), + ( + "user", + models.ForeignKey( + blank=True, + limit_choices_to={"is_active": True}, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.AddField( - model_name='coupon', - name='seller', - field=models.ForeignKey(blank=True, - help_text='Seller', - limit_choices_to={'is_active': True}, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.seller'), + model_name="coupon", + name="seller", + field=models.ForeignKey( + blank=True, + help_text="Seller", + limit_choices_to={"is_active": True}, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.seller", + ), ), ] diff --git a/breathecode/payments/migrations/0045_plan_service_set_planfinancing_selected_service_set_and_more.py b/breathecode/payments/migrations/0045_plan_service_set_planfinancing_selected_service_set_and_more.py index 05ed428ba..b06fa41d2 100644 --- a/breathecode/payments/migrations/0045_plan_service_set_planfinancing_selected_service_set_and_more.py +++ b/breathecode/payments/migrations/0045_plan_service_set_planfinancing_selected_service_set_and_more.py @@ -7,48 +7,59 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0044_coupon_bag_coupons_seller_coupon_seller'), + ("payments", "0044_coupon_bag_coupons_seller_coupon_seller"), ] operations = [ migrations.AddField( - model_name='plan', - name='service_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Service set to be sold in this service and plan', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='payments.serviceset'), + model_name="plan", + name="service_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Service set to be sold in this service and plan", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="payments.serviceset", + ), ), migrations.AddField( - model_name='planfinancing', - name='selected_service_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Service set which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.serviceset'), + model_name="planfinancing", + name="selected_service_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Service set which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.serviceset", + ), ), migrations.AddField( - model_name='subscription', - name='selected_service_set', - field=models.ForeignKey(blank=True, - default=None, - help_text='Service set which the plans and services is for', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='payments.serviceset'), + model_name="subscription", + name="selected_service_set", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Service set which the plans and services is for", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="payments.serviceset", + ), ), migrations.AlterField( - model_name='service', - name='type', - field=models.CharField(choices=[('COHORT_SET', 'Cohort set'), - ('MENTORSHIP_SERVICE_SET', 'Mentorship service set'), - ('EVENT_TYPE_SET', 'Event type set'), ('SERVICE_SET', 'Service set')], - default='COHORT_SET', - help_text='Service type', - max_length=22), + model_name="service", + name="type", + field=models.CharField( + choices=[ + ("COHORT_SET", "Cohort set"), + ("MENTORSHIP_SERVICE_SET", "Mentorship service set"), + ("EVENT_TYPE_SET", "Event type set"), + ("SERVICE_SET", "Service set"), + ], + default="COHORT_SET", + help_text="Service type", + max_length=22, + ), ), ] diff --git a/breathecode/payments/migrations/0046_consumptionsession_operation_code_and_more.py b/breathecode/payments/migrations/0046_consumptionsession_operation_code_and_more.py index 8043d7dd6..ed7a634e7 100644 --- a/breathecode/payments/migrations/0046_consumptionsession_operation_code_and_more.py +++ b/breathecode/payments/migrations/0046_consumptionsession_operation_code_and_more.py @@ -6,22 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0045_plan_service_set_planfinancing_selected_service_set_and_more'), + ("payments", "0045_plan_service_set_planfinancing_selected_service_set_and_more"), ] operations = [ migrations.AddField( - model_name='consumptionsession', - name='operation_code', - field=models.SlugField(default='default', - help_text='Code that identifies the operation, it could be repeated'), + model_name="consumptionsession", + name="operation_code", + field=models.SlugField( + default="default", help_text="Code that identifies the operation, it could be repeated" + ), ), migrations.AddField( - model_name='service', - name='session_duration', - field=models.DurationField(blank=True, - default=None, - help_text='Session duration, used in consumption sessions', - null=True), + model_name="service", + name="session_duration", + field=models.DurationField( + blank=True, default=None, help_text="Session duration, used in consumption sessions", null=True + ), ), ] diff --git a/breathecode/payments/migrations/0047_paymentmethod.py b/breathecode/payments/migrations/0047_paymentmethod.py index dedb35145..a8f65bf0c 100644 --- a/breathecode/payments/migrations/0047_paymentmethod.py +++ b/breathecode/payments/migrations/0047_paymentmethod.py @@ -8,30 +8,41 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0064_academy_legal_name'), - ('payments', '0046_consumptionsession_operation_code_and_more'), + ("admissions", "0064_academy_legal_name"), + ("payments", "0046_consumptionsession_operation_code_and_more"), ] operations = [ migrations.CreateModel( - name='PaymentMethod', + name="PaymentMethod", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('title', models.CharField(max_length=120)), - ('description', models.CharField(help_text='Description of the payment method', max_length=255)), - ('third_party_link', - models.URLField(blank=True, default=None, help_text='Link of a third party payment method', - null=True)), - ('lang', - models.CharField(help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US', - max_length=5, - validators=[breathecode.utils.validators.language.validate_language_code])), - ('academy', - models.ForeignKey(blank=True, - help_text='Academy owner', - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='admissions.academy')), + ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("title", models.CharField(max_length=120)), + ("description", models.CharField(help_text="Description of the payment method", max_length=255)), + ( + "third_party_link", + models.URLField( + blank=True, default=None, help_text="Link of a third party payment method", null=True + ), + ), + ( + "lang", + models.CharField( + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + max_length=5, + validators=[breathecode.utils.validators.language.validate_language_code], + ), + ), + ( + "academy", + models.ForeignKey( + blank=True, + help_text="Academy owner", + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="admissions.academy", + ), + ), ], ), ] diff --git a/breathecode/payments/migrations/0048_remove_serviceset_academy_remove_serviceset_services_and_more.py b/breathecode/payments/migrations/0048_remove_serviceset_academy_remove_serviceset_services_and_more.py index 51edf2d4b..8e2d2936c 100644 --- a/breathecode/payments/migrations/0048_remove_serviceset_academy_remove_serviceset_services_and_more.py +++ b/breathecode/payments/migrations/0048_remove_serviceset_academy_remove_serviceset_services_and_more.py @@ -6,44 +6,53 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0047_paymentmethod'), + ("payments", "0047_paymentmethod"), ] operations = [ migrations.RemoveField( - model_name='serviceset', - name='academy', + model_name="serviceset", + name="academy", ), migrations.RemoveField( - model_name='serviceset', - name='services', + model_name="serviceset", + name="services", ), migrations.RemoveField( - model_name='plan', - name='service_set', + model_name="plan", + name="service_set", ), migrations.RemoveField( - model_name='subscription', - name='selected_service_set', + model_name="subscription", + name="selected_service_set", ), migrations.RemoveField( - model_name='planfinancing', - name='selected_service_set', + model_name="planfinancing", + name="selected_service_set", ), migrations.RemoveField( - model_name='consumable', - name='service_set', + model_name="consumable", + name="service_set", ), migrations.AlterField( - model_name='service', - name='type', - field=models.CharField(choices=[('COHORT_SET', 'Cohort set'), - ('MENTORSHIP_SERVICE_SET', 'Mentorship service set'), - ('EVENT_TYPE_SET', 'Event type set'), ('VOID', 'Void')], - default='COHORT_SET', - help_text='Service type', - max_length=22), - ), - migrations.DeleteModel(name='ServiceSetTranslation', ), - migrations.DeleteModel(name='ServiceSet', ), + model_name="service", + name="type", + field=models.CharField( + choices=[ + ("COHORT_SET", "Cohort set"), + ("MENTORSHIP_SERVICE_SET", "Mentorship service set"), + ("EVENT_TYPE_SET", "Event type set"), + ("VOID", "Void"), + ], + default="COHORT_SET", + help_text="Service type", + max_length=22, + ), + ), + migrations.DeleteModel( + name="ServiceSetTranslation", + ), + migrations.DeleteModel( + name="ServiceSet", + ), ] diff --git a/breathecode/payments/migrations/0049_paymentmethod_is_credit_card.py b/breathecode/payments/migrations/0049_paymentmethod_is_credit_card.py index c48de3ca2..52d7faf7d 100644 --- a/breathecode/payments/migrations/0049_paymentmethod_is_credit_card.py +++ b/breathecode/payments/migrations/0049_paymentmethod_is_credit_card.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0048_remove_serviceset_academy_remove_serviceset_services_and_more'), + ("payments", "0048_remove_serviceset_academy_remove_serviceset_services_and_more"), ] operations = [ migrations.AddField( - model_name='paymentmethod', - name='is_credit_card', + model_name="paymentmethod", + name="is_credit_card", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/payments/migrations/0050_planfinancing_conversion_info_and_more.py b/breathecode/payments/migrations/0050_planfinancing_conversion_info_and_more.py index 1118b1cdc..52bd4338d 100644 --- a/breathecode/payments/migrations/0050_planfinancing_conversion_info_and_more.py +++ b/breathecode/payments/migrations/0050_planfinancing_conversion_info_and_more.py @@ -6,24 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0049_paymentmethod_is_credit_card'), + ("payments", "0049_paymentmethod_is_credit_card"), ] operations = [ migrations.AddField( - model_name='planfinancing', - name='conversion_info', - field=models.JSONField(blank=True, - default=None, - help_text='UTMs and other conversion information.', - null=True), + model_name="planfinancing", + name="conversion_info", + field=models.JSONField( + blank=True, default=None, help_text="UTMs and other conversion information.", null=True + ), ), migrations.AddField( - model_name='subscription', - name='conversion_info', - field=models.JSONField(blank=True, - default=None, - help_text='UTMs and other conversion information.', - null=True), + model_name="subscription", + name="conversion_info", + field=models.JSONField( + blank=True, default=None, help_text="UTMs and other conversion information.", null=True + ), ), ] diff --git a/breathecode/payments/models.py b/breathecode/payments/models.py index 034640b2f..1ef9d8f5e 100644 --- a/breathecode/payments/models.py +++ b/breathecode/payments/models.py @@ -38,20 +38,18 @@ class Currency(models.Model): """Represents a currency.""" - code = models.CharField(max_length=3, - unique=True, - db_index=True, - help_text='ISO 4217 currency code (e.g. USD, EUR, MXN)') - name = models.CharField(max_length=20, unique=True, help_text='Currency name (e.g. US Dollar, Euro, Mexican Peso)') - decimals = models.IntegerField(default=0, help_text='Number of decimals (e.g. 2 for USD and EUR, 0 for JPY)') - - countries = models.ManyToManyField(Country, - blank=True, - related_name='currencies', - help_text='Countries that use this currency officially') + code = models.CharField( + max_length=3, unique=True, db_index=True, help_text="ISO 4217 currency code (e.g. USD, EUR, MXN)" + ) + name = models.CharField(max_length=20, unique=True, help_text="Currency name (e.g. US Dollar, Euro, Mexican Peso)") + decimals = models.IntegerField(default=0, help_text="Number of decimals (e.g. 2 for USD and EUR, 0 for JPY)") + + countries = models.ManyToManyField( + Country, blank=True, related_name="currencies", help_text="Countries that use this currency officially" + ) def format_price(self, value): - currency = CurrencyFormatter('USD') + currency = CurrencyFormatter("USD") currency.get_money_currency() return currency.get_money_format(value) @@ -60,14 +58,14 @@ def clean(self) -> None: return super().clean() def __str__(self) -> str: - return f'{self.name} ({self.code})' + return f"{self.name} ({self.code})" class AbstractPriceByUnit(models.Model): """This model is used to store the price of a Product or a Service.""" - price_per_unit = models.FloatField(default=0, help_text='Price per unit') - currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text='Currency') + price_per_unit = models.FloatField(default=0, help_text="Price per unit") + currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text="Currency") def format_price(self): return self.currency.format_price(self.price) @@ -79,11 +77,11 @@ class Meta: class AbstractPriceByTime(models.Model): """This model is used to store the price of a Product or a Service.""" - price_per_month = models.FloatField(default=None, blank=True, null=True, help_text='Price per month') - price_per_quarter = models.FloatField(default=None, blank=True, null=True, help_text='Price per quarter') - price_per_half = models.FloatField(default=None, blank=True, null=True, help_text='Price per half') - price_per_year = models.FloatField(default=None, blank=True, null=True, help_text='Price per year') - currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text='Currency') + price_per_month = models.FloatField(default=None, blank=True, null=True, help_text="Price per month") + price_per_quarter = models.FloatField(default=None, blank=True, null=True, help_text="Price per quarter") + price_per_half = models.FloatField(default=None, blank=True, null=True, help_text="Price per half") + price_per_year = models.FloatField(default=None, blank=True, null=True, help_text="Price per year") + currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text="Currency") def format_price(self): return self.currency.format_price(self.price) @@ -95,11 +93,11 @@ class Meta: class AbstractAmountByTime(models.Model): """This model is used to store the price of a Product or a Service.""" - amount_per_month = models.FloatField(default=0, help_text='Amount per month') - amount_per_quarter = models.FloatField(default=0, help_text='Amount per quarter') - amount_per_half = models.FloatField(default=0, help_text='Amount per half') - amount_per_year = models.FloatField(default=0, help_text='Amount per year') - currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text='Currency') + amount_per_month = models.FloatField(default=0, help_text="Amount per month") + amount_per_quarter = models.FloatField(default=0, help_text="Amount per quarter") + amount_per_half = models.FloatField(default=0, help_text="Amount per half") + amount_per_year = models.FloatField(default=0, help_text="Amount per year") + currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text="Currency") def format_price(self): return self.currency.format_price(self.price) @@ -108,38 +106,42 @@ class Meta: abstract = True -DAY = 'DAY' -WEEK = 'WEEK' -MONTH = 'MONTH' -YEAR = 'YEAR' +DAY = "DAY" +WEEK = "WEEK" +MONTH = "MONTH" +YEAR = "YEAR" PAY_EVERY_UNIT = [ - (DAY, 'Day'), - (WEEK, 'Week'), - (MONTH, 'Month'), - (YEAR, 'Year'), + (DAY, "Day"), + (WEEK, "Week"), + (MONTH, "Month"), + (YEAR, "Year"), ] class AbstractAsset(models.Model): """This model represents a product or a service that can be sold.""" - slug = models.CharField(max_length=60, - unique=True, - db_index=True, - help_text='A human-readable identifier, it must be unique and it can only contain letters, ' - 'numbers and hyphens') + slug = models.CharField( + max_length=60, + unique=True, + db_index=True, + help_text="A human-readable identifier, it must be unique and it can only contain letters, " + "numbers and hyphens", + ) title = models.CharField(max_length=60, default=None, null=True, blank=True) - owner = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True, help_text='Academy owner') - #TODO: visibility and the capacities of disable a asset - private = models.BooleanField(default=True, help_text='If the asset is private or not', db_index=True) + owner = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True, help_text="Academy owner") + # TODO: visibility and the capacities of disable a asset + private = models.BooleanField(default=True, help_text="If the asset is private or not", db_index=True) - trial_duration = models.IntegerField(default=1, help_text='Trial duration (e.g. 1, 2, 3, ...)') - trial_duration_unit = models.CharField(max_length=10, - choices=PAY_EVERY_UNIT, - default=MONTH, - help_text='Trial duration unit (e.g. DAY, WEEK, MONTH or YEAR)') + trial_duration = models.IntegerField(default=1, help_text="Trial duration (e.g. 1, 2, 3, ...)") + trial_duration_unit = models.CharField( + max_length=10, + choices=PAY_EVERY_UNIT, + default=MONTH, + help_text="Trial duration unit (e.g. DAY, WEEK, MONTH or YEAR)", + ) icon_url = models.URLField(blank=True, null=True, default=None) @@ -154,20 +156,19 @@ class Service(AbstractAsset): """Represents the service that can be purchased by the customer.""" class Type(models.TextChoices): - COHORT_SET = ('COHORT_SET', 'Cohort set') - MENTORSHIP_SERVICE_SET = ('MENTORSHIP_SERVICE_SET', 'Mentorship service set') - EVENT_TYPE_SET = ('EVENT_TYPE_SET', 'Event type set') - VOID = ('VOID', 'Void') + COHORT_SET = ("COHORT_SET", "Cohort set") + MENTORSHIP_SERVICE_SET = ("MENTORSHIP_SERVICE_SET", "Mentorship service set") + EVENT_TYPE_SET = ("EVENT_TYPE_SET", "Event type set") + VOID = ("VOID", "Void") - groups = models.ManyToManyField(Group, - blank=True, - help_text='Groups that can access the customer that bought this service') + groups = models.ManyToManyField( + Group, blank=True, help_text="Groups that can access the customer that bought this service" + ) - session_duration = models.DurationField(default=None, - null=True, - blank=True, - help_text='Session duration, used in consumption sessions') - type = models.CharField(max_length=22, choices=Type, default=Type.COHORT_SET, help_text='Service type') + session_duration = models.DurationField( + default=None, null=True, blank=True, help_text="Session duration, used in consumption sessions" + ) + type = models.CharField(max_length=22, choices=Type, default=Type.COHORT_SET, help_text="Service type") def __str__(self): return self.slug @@ -179,20 +180,22 @@ def save(self, *args, **kwargs): class ServiceTranslation(models.Model): - service = models.ForeignKey(Service, on_delete=models.CASCADE, help_text='Service') - lang = models.CharField(max_length=5, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') - title = models.CharField(max_length=60, help_text='Title of the service') - description = models.CharField(max_length=255, help_text='Description of the service') + service = models.ForeignKey(Service, on_delete=models.CASCADE, help_text="Service") + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) + title = models.CharField(max_length=60, help_text="Title of the service") + description = models.CharField(max_length=255, help_text="Description of the service") def __str__(self) -> str: - return f'{self.lang}: {self.title}' + return f"{self.lang}: {self.title}" -UNIT = 'UNIT' +UNIT = "UNIT" SERVICE_UNITS = [ - (UNIT, 'Unit'), + (UNIT, "Unit"), ] @@ -200,14 +203,13 @@ class AbstractServiceItem(models.Model): """Common fields for ServiceItem and Consumable.""" # the unit between a service and a product are different - unit_type = models.CharField(max_length=10, - choices=SERVICE_UNITS, - default=UNIT, - db_index=True, - help_text='Unit type (e.g. UNIT))') - how_many = models.IntegerField(default=-1, help_text='How many units of this service can be used') + unit_type = models.CharField( + max_length=10, choices=SERVICE_UNITS, default=UNIT, db_index=True, help_text="Unit type (e.g. UNIT))" + ) + how_many = models.IntegerField(default=-1, help_text="How many units of this service can be used") sort_priority = models.IntegerField( - default=1, help_text='(e.g. 1, 2, 3, ...) It is going to be used to sort the items on the frontend') + default=1, help_text="(e.g. 1, 2, 3, ...) It is going to be used to sort the items on the frontend" + ) class Meta: abstract = True @@ -217,26 +219,25 @@ class Meta: class ServiceItem(AbstractServiceItem): """This model is used as referenced of units of a service can be used.""" - service = models.ForeignKey(Service, on_delete=models.CASCADE, help_text='Service') + service = models.ForeignKey(Service, on_delete=models.CASCADE, help_text="Service") is_renewable = models.BooleanField( default=False, - help_text='If it\'s marked, the consumables will be renewed according to the renew_at and renew_at_unit values.' + help_text="If it's marked, the consumables will be renewed according to the renew_at and renew_at_unit values.", ) # the below fields are useless when is_renewable=False - renew_at = models.IntegerField(default=1, - help_text='Renew at (e.g. 1, 2, 3, ...) it going to be used to build the balance of ' - 'customer') - renew_at_unit = models.CharField(max_length=10, - choices=PAY_EVERY_UNIT, - default=MONTH, - help_text='Renew at unit (e.g. DAY, WEEK, MONTH or YEAR)') + renew_at = models.IntegerField( + default=1, help_text="Renew at (e.g. 1, 2, 3, ...) it going to be used to build the balance of " "customer" + ) + renew_at_unit = models.CharField( + max_length=10, choices=PAY_EVERY_UNIT, default=MONTH, help_text="Renew at unit (e.g. DAY, WEEK, MONTH or YEAR)" + ) def clean(self): - is_test_env = os.getenv('ENV') == 'test' - inside_mixer = hasattr(self, '__mixer__') + is_test_env = os.getenv("ENV") == "test" + inside_mixer = hasattr(self, "__mixer__") if self.id and (not inside_mixer or (inside_mixer and not is_test_env)): - raise forms.ValidationError('You cannot update a service item') + raise forms.ValidationError("You cannot update a service item") def save(self, *args, **kwargs): self.full_clean() @@ -244,45 +245,51 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) def delete(self): - raise forms.ValidationError('You cannot delete a service item') + raise forms.ValidationError("You cannot delete a service item") def __str__(self) -> str: - return f'{self.service.slug} ({self.how_many})' + return f"{self.service.slug} ({self.how_many})" class ServiceItemFeature(models.Model): """This model is used as referenced of units of a service can be used.""" - service_item = models.ForeignKey(ServiceItem, on_delete=models.CASCADE, help_text='Service item') - lang = models.CharField(max_length=5, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') - title = models.CharField(max_length=30, help_text='Title of the service item', default=None, null=True) - description = models.CharField(max_length=255, help_text='Description of the service item') - one_line_desc = models.CharField(max_length=30, help_text='One line description of the service item') + service_item = models.ForeignKey(ServiceItem, on_delete=models.CASCADE, help_text="Service item") + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) + title = models.CharField(max_length=30, help_text="Title of the service item", default=None, null=True) + description = models.CharField(max_length=255, help_text="Description of the service item") + one_line_desc = models.CharField(max_length=30, help_text="One line description of the service item") def __str__(self) -> str: - return f'{self.lang} {self.service_item.service.slug} ({self.service_item.how_many})' + return f"{self.lang} {self.service_item.service.slug} ({self.service_item.how_many})" class FinancingOption(models.Model): """This model is used as referenced of units of a service can be used.""" - _lang = 'en' + _lang = "en" - monthly_price = models.FloatField(default=1, help_text='Monthly price (e.g. 1, 2, 3, ...)') - currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text='Currency') + monthly_price = models.FloatField(default=1, help_text="Monthly price (e.g. 1, 2, 3, ...)") + currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text="Currency") - how_many_months = models.IntegerField(default=1, - help_text='How many months and installments to collect (e.g. 1, 2, 3, ...)') + how_many_months = models.IntegerField( + default=1, help_text="How many months and installments to collect (e.g. 1, 2, 3, ...)" + ) def clean(self) -> None: if not self.monthly_price: raise forms.ValidationError( - translation(self._lang, - en='Monthly price is required', - es='El precio mensual es requerido', - slug='monthly-price-required')) + translation( + self._lang, + en="Monthly price is required", + es="El precio mensual es requerido", + slug="monthly-price-required", + ) + ) return super().clean() @@ -291,32 +298,36 @@ def save(self, *args, **kwargs) -> None: return super().save(*args, **kwargs) def __str__(self) -> str: - return f'{self.monthly_price} {self.currency.code} per {self.how_many_months} months' + return f"{self.monthly_price} {self.currency.code} per {self.how_many_months} months" class CohortSet(models.Model): """Cohort set.""" - _lang = 'en' + _lang = "en" - slug = models.SlugField(max_length=100, - unique=True, - db_index=True, - help_text='A human-readable identifier, it must be unique and it can only contain letters, ' - 'numbers and hyphens') + slug = models.SlugField( + max_length=100, + unique=True, + db_index=True, + help_text="A human-readable identifier, it must be unique and it can only contain letters, " + "numbers and hyphens", + ) academy = models.ForeignKey(Academy, on_delete=models.CASCADE) - cohorts = models.ManyToManyField(Cohort, - blank=True, - through='CohortSetCohort', - through_fields=('cohort_set', 'cohort')) + cohorts = models.ManyToManyField( + Cohort, blank=True, through="CohortSetCohort", through_fields=("cohort_set", "cohort") + ) def clean(self) -> None: if self.academy.available_as_saas == False: raise forms.ValidationError( - translation(self._lang, - en='Academy is not available as SaaS', - es='La academia no está disponible como SaaS', - slug='academy-not-available-as-saas')) + translation( + self._lang, + en="Academy is not available as SaaS", + es="La academia no está disponible como SaaS", + slug="academy-not-available-as-saas", + ) + ) return super().clean() @@ -326,38 +337,47 @@ def save(self, *args, **kwargs) -> None: class CohortSetTranslation(models.Model): - cohort_set = models.ForeignKey(CohortSet, on_delete=models.CASCADE, help_text='Cohort set') - lang = models.CharField(max_length=5, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') - title = models.CharField(max_length=60, help_text='Title of the cohort set') - description = models.CharField(max_length=255, help_text='Description of the cohort set') - short_description = models.CharField(max_length=255, help_text='Short description of the cohort set') + cohort_set = models.ForeignKey(CohortSet, on_delete=models.CASCADE, help_text="Cohort set") + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) + title = models.CharField(max_length=60, help_text="Title of the cohort set") + description = models.CharField(max_length=255, help_text="Description of the cohort set") + short_description = models.CharField(max_length=255, help_text="Short description of the cohort set") class CohortSetCohort(models.Model): """M2M between CohortSet and Cohort.""" - _lang = 'en' + _lang = "en" - cohort_set = models.ForeignKey(CohortSet, on_delete=models.CASCADE, help_text='Cohort set') - cohort = models.ForeignKey(Cohort, on_delete=models.CASCADE, help_text='Cohort') + cohort_set = models.ForeignKey(CohortSet, on_delete=models.CASCADE, help_text="Cohort set") + cohort = models.ForeignKey(Cohort, on_delete=models.CASCADE, help_text="Cohort") def clean(self) -> None: - if self.cohort.available_as_saas is False or (self.cohort.available_as_saas == None - and self.cohort.academy.available_as_saas is False): + if self.cohort.available_as_saas is False or ( + self.cohort.available_as_saas == None and self.cohort.academy.available_as_saas is False + ): raise forms.ValidationError( - translation(self._lang, - en='Cohort is not available as SaaS', - es='El cohort no está disponible como SaaS', - slug='cohort-not-available-as-saas')) + translation( + self._lang, + en="Cohort is not available as SaaS", + es="El cohort no está disponible como SaaS", + slug="cohort-not-available-as-saas", + ) + ) if self.cohort_set.academy != self.cohort.academy: raise forms.ValidationError( - translation(self._lang, - en='Cohort and cohort set must be from the same academy', - es='El cohort y el cohort set deben ser de la misma academia', - slug='cohort-and-cohort-set-must-be-from-the-same-academy')) + translation( + self._lang, + en="Cohort and cohort set must be from the same academy", + es="El cohort y el cohort set deben ser de la misma academia", + slug="cohort-and-cohort-set-must-be-from-the-same-academy", + ) + ) return super().clean() @@ -369,79 +389,91 @@ def save(self, *args, **kwargs) -> None: class MentorshipServiceSet(models.Model): """M2M between plan and ServiceItem.""" - slug = models.SlugField(max_length=100, - unique=True, - db_index=True, - help_text='A human-readable identifier, it must be unique and it can only contain letters, ' - 'numbers and hyphens') + slug = models.SlugField( + max_length=100, + unique=True, + db_index=True, + help_text="A human-readable identifier, it must be unique and it can only contain letters, " + "numbers and hyphens", + ) academy = models.ForeignKey(Academy, on_delete=models.CASCADE) mentorship_services = models.ManyToManyField(MentorshipService, blank=True) class MentorshipServiceSetTranslation(models.Model): - mentorship_service_set = models.ForeignKey(MentorshipServiceSet, - on_delete=models.CASCADE, - help_text='Mentorship service set') - lang = models.CharField(max_length=5, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') - title = models.CharField(max_length=60, help_text='Title of the mentorship service set') - description = models.CharField(max_length=255, help_text='Description of the mentorship service set') - short_description = models.CharField(max_length=255, help_text='Short description of the mentorship service set') + mentorship_service_set = models.ForeignKey( + MentorshipServiceSet, on_delete=models.CASCADE, help_text="Mentorship service set" + ) + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) + title = models.CharField(max_length=60, help_text="Title of the mentorship service set") + description = models.CharField(max_length=255, help_text="Description of the mentorship service set") + short_description = models.CharField(max_length=255, help_text="Short description of the mentorship service set") class EventTypeSet(models.Model): """M2M between plan and ServiceItem.""" - slug = models.SlugField(max_length=100, - unique=True, - db_index=True, - help_text='A human-readable identifier, it must be unique and it can only contain letters, ' - 'numbers and hyphens') - academy = models.ForeignKey(Academy, on_delete=models.CASCADE, help_text='Academy owner') - event_types = models.ManyToManyField(EventType, blank=True, help_text='Event types') + slug = models.SlugField( + max_length=100, + unique=True, + db_index=True, + help_text="A human-readable identifier, it must be unique and it can only contain letters, " + "numbers and hyphens", + ) + academy = models.ForeignKey(Academy, on_delete=models.CASCADE, help_text="Academy owner") + event_types = models.ManyToManyField(EventType, blank=True, help_text="Event types") class EventTypeSetTranslation(models.Model): - event_type_set = models.ForeignKey(EventTypeSet, on_delete=models.CASCADE, help_text='Event type set') - lang = models.CharField(max_length=5, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') - title = models.CharField(max_length=60, help_text='Title of the event type set') - description = models.CharField(max_length=255, help_text='Description of the event type set') - short_description = models.CharField(max_length=255, help_text='Short description of the event type set') + event_type_set = models.ForeignKey(EventTypeSet, on_delete=models.CASCADE, help_text="Event type set") + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) + title = models.CharField(max_length=60, help_text="Title of the event type set") + description = models.CharField(max_length=255, help_text="Description of the event type set") + short_description = models.CharField(max_length=255, help_text="Short description of the event type set") class AcademyService(models.Model): - service = models.OneToOneField(Service, on_delete=models.CASCADE, help_text='Service') - academy = models.ForeignKey(Academy, on_delete=models.CASCADE, help_text='Academy') + service = models.OneToOneField(Service, on_delete=models.CASCADE, help_text="Service") + academy = models.ForeignKey(Academy, on_delete=models.CASCADE, help_text="Academy") - price_per_unit = models.FloatField(default=1, help_text='Price per unit (e.g. 1, 2, 3, ...)') - currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text='Currency') + price_per_unit = models.FloatField(default=1, help_text="Price per unit (e.g. 1, 2, 3, ...)") + currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text="Currency") bundle_size = models.FloatField( default=1, - help_text='Minimum unit size allowed to be bought, example: bundle_size=5, then you are ' - 'allowed to buy a minimum of 5 units. Related to the discount ratio') - max_items = models.FloatField(default=1, - help_text="How many items can be bought in total, it doesn't matter the bundle size") + help_text="Minimum unit size allowed to be bought, example: bundle_size=5, then you are " + "allowed to buy a minimum of 5 units. Related to the discount ratio", + ) + max_items = models.FloatField( + default=1, help_text="How many items can be bought in total, it doesn't matter the bundle size" + ) max_amount = models.FloatField(default=1, help_text="Limit total amount, it doesn't matter the bundle size") - discount_ratio = models.FloatField(default=1, help_text='Will be used when calculated by the final price') + discount_ratio = models.FloatField(default=1, help_text="Will be used when calculated by the final price") available_mentorship_service_sets = models.ManyToManyField( MentorshipServiceSet, blank=True, - help_text='Available mentorship service sets to be sold in this service and plan') + help_text="Available mentorship service sets to be sold in this service and plan", + ) available_event_type_sets = models.ManyToManyField( - EventTypeSet, blank=True, help_text='Available mentorship service sets to be sold in this service and plan') + EventTypeSet, blank=True, help_text="Available mentorship service sets to be sold in this service and plan" + ) def __str__(self) -> str: - return f'{self.academy.slug} -> {self.service.slug}' + return f"{self.academy.slug} -> {self.service.slug}" def get_discounted_price(self, num_items) -> float: if num_items > self.max_items: - raise ValueError('num_items cannot be greater than max_items') + raise ValueError("num_items cannot be greater than max_items") total_discount_ratio = 0 current_discount_ratio = self.discount_ratio @@ -464,19 +496,28 @@ def get_discounted_price(self, num_items) -> float: return amount - discount def clean(self) -> None: - if self.id and len( - [x for x in [self.available_mentorship_service_sets.count(), - self.available_event_type_sets.count()] if x]) > 1: - raise forms.ValidationError('Only one of available_mentorship_service_sets or ' - 'available_event_type_sets must be set') + if ( + self.id + and len( + [ + x + for x in [self.available_mentorship_service_sets.count(), self.available_event_type_sets.count()] + if x + ] + ) + > 1 + ): + raise forms.ValidationError( + "Only one of available_mentorship_service_sets or " "available_event_type_sets must be set" + ) - required_integer_fields = self.service.type in ['MENTORSHIP_SERVICE_SET', 'EVENT_TYPE_SET'] + required_integer_fields = self.service.type in ["MENTORSHIP_SERVICE_SET", "EVENT_TYPE_SET"] if required_integer_fields and not self.bundle_size.is_integer(): - raise forms.ValidationError('bundle_size must be an integer') + raise forms.ValidationError("bundle_size must be an integer") if required_integer_fields and not self.max_items.is_integer(): - raise forms.ValidationError('max_items must be an integer') + raise forms.ValidationError("max_items must be an integer") return super().clean() @@ -485,79 +526,91 @@ def save(self, *args, **kwargs) -> None: return super().save(*args, **kwargs) -ACTIVE = 'ACTIVE' -UNLISTED = 'UNLISTED' -DELETED = 'DELETED' -DISCONTINUED = 'DISCONTINUED' +ACTIVE = "ACTIVE" +UNLISTED = "UNLISTED" +DELETED = "DELETED" +DISCONTINUED = "DISCONTINUED" PLAN_STATUS = [ - (DRAFT, 'Draft'), - (ACTIVE, 'Active'), - (UNLISTED, 'Unlisted'), - (DELETED, 'Deleted'), - (DISCONTINUED, 'Discontinued'), + (DRAFT, "Draft"), + (ACTIVE, "Active"), + (UNLISTED, "Unlisted"), + (DELETED, "Deleted"), + (DISCONTINUED, "Discontinued"), ] class Plan(AbstractPriceByTime): """A plan is a group of services that can be purchased by a user.""" - slug = models.CharField(max_length=60, - unique=True, - db_index=True, - help_text='A human-readable identifier, it must be unique and it can only contain letters, ' - 'numbers and hyphens') - financing_options = models.ManyToManyField(FinancingOption, blank=True, help_text='Available financing options') + slug = models.CharField( + max_length=60, + unique=True, + db_index=True, + help_text="A human-readable identifier, it must be unique and it can only contain letters, " + "numbers and hyphens", + ) + financing_options = models.ManyToManyField(FinancingOption, blank=True, help_text="Available financing options") is_renewable = models.BooleanField( - default=True, help_text='Is if true, it will create a renewable subscription instead of a plan financing') - - status = models.CharField(max_length=12, choices=PLAN_STATUS, default=DRAFT, help_text='Status') - - time_of_life = models.IntegerField(default=1, blank=True, null=True, help_text='Plan lifetime (e.g. 1, 2, 3, ...)') - time_of_life_unit = models.CharField(max_length=10, - choices=PAY_EVERY_UNIT, - blank=True, - null=True, - default=MONTH, - help_text='Lifetime unit (e.g. DAY, WEEK, MONTH or YEAR)') - - trial_duration = models.IntegerField(default=1, help_text='Trial duration (e.g. 1, 2, 3, ...)') - trial_duration_unit = models.CharField(max_length=10, - choices=PAY_EVERY_UNIT, - default=MONTH, - help_text='Trial duration unit (e.g. DAY, WEEK, MONTH or YEAR)') - - service_items = models.ManyToManyField(ServiceItem, - blank=True, - through='PlanServiceItem', - through_fields=('plan', 'service_item')) - - owner = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True, help_text='Academy owner') - is_onboarding = models.BooleanField(default=False, help_text='Is onboarding plan?', db_index=True) - has_waiting_list = models.BooleanField(default=False, help_text='Has waiting list?') - - cohort_set = models.ForeignKey(CohortSet, - on_delete=models.SET_NULL, - blank=True, - null=True, - default=None, - help_text='Cohort sets to be sold in this service and plan') - - mentorship_service_set = models.ForeignKey(MentorshipServiceSet, - on_delete=models.SET_NULL, - blank=True, - null=True, - default=None, - help_text='Mentorship service set to be sold in this service and plan') - - event_type_set = models.ForeignKey(EventTypeSet, - on_delete=models.SET_NULL, - blank=True, - null=True, - default=None, - help_text='Event type set to be sold in this service and plan') - - invites = models.ManyToManyField(UserInvite, blank=True, help_text='Plan\'s invites', related_name='plans') + default=True, help_text="Is if true, it will create a renewable subscription instead of a plan financing" + ) + + status = models.CharField(max_length=12, choices=PLAN_STATUS, default=DRAFT, help_text="Status") + + time_of_life = models.IntegerField(default=1, blank=True, null=True, help_text="Plan lifetime (e.g. 1, 2, 3, ...)") + time_of_life_unit = models.CharField( + max_length=10, + choices=PAY_EVERY_UNIT, + blank=True, + null=True, + default=MONTH, + help_text="Lifetime unit (e.g. DAY, WEEK, MONTH or YEAR)", + ) + + trial_duration = models.IntegerField(default=1, help_text="Trial duration (e.g. 1, 2, 3, ...)") + trial_duration_unit = models.CharField( + max_length=10, + choices=PAY_EVERY_UNIT, + default=MONTH, + help_text="Trial duration unit (e.g. DAY, WEEK, MONTH or YEAR)", + ) + + service_items = models.ManyToManyField( + ServiceItem, blank=True, through="PlanServiceItem", through_fields=("plan", "service_item") + ) + + owner = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True, help_text="Academy owner") + is_onboarding = models.BooleanField(default=False, help_text="Is onboarding plan?", db_index=True) + has_waiting_list = models.BooleanField(default=False, help_text="Has waiting list?") + + cohort_set = models.ForeignKey( + CohortSet, + on_delete=models.SET_NULL, + blank=True, + null=True, + default=None, + help_text="Cohort sets to be sold in this service and plan", + ) + + mentorship_service_set = models.ForeignKey( + MentorshipServiceSet, + on_delete=models.SET_NULL, + blank=True, + null=True, + default=None, + help_text="Mentorship service set to be sold in this service and plan", + ) + + event_type_set = models.ForeignKey( + EventTypeSet, + on_delete=models.SET_NULL, + blank=True, + null=True, + default=None, + help_text="Event type set to be sold in this service and plan", + ) + + invites = models.ManyToManyField(UserInvite, blank=True, help_text="Plan's invites", related_name="plans") def __str__(self) -> str: return self.slug @@ -565,27 +618,38 @@ def __str__(self) -> str: def clean(self) -> None: if not self.is_renewable and (not self.time_of_life or not self.time_of_life_unit): - raise forms.ValidationError('If the plan is not renewable, you must set time_of_life and time_of_life_unit') + raise forms.ValidationError("If the plan is not renewable, you must set time_of_life and time_of_life_unit") - have_price = (self.price_per_month or self.price_per_year or self.price_per_quarter or self.price_per_half) + have_price = self.price_per_month or self.price_per_year or self.price_per_quarter or self.price_per_half if self.is_renewable and have_price and (self.time_of_life or self.time_of_life_unit): - raise forms.ValidationError('If the plan is renewable and have price, you must not set time_of_life and ' - 'time_of_life_unit') + raise forms.ValidationError( + "If the plan is renewable and have price, you must not set time_of_life and " "time_of_life_unit" + ) free_trial_available = self.trial_duration - if self.is_renewable and not have_price and free_trial_available and (self.time_of_life - or self.time_of_life_unit): + if ( + self.is_renewable + and not have_price + and free_trial_available + and (self.time_of_life or self.time_of_life_unit) + ): raise forms.ValidationError( - 'If the plan is renewable and a have free trial available, you must not set time_of_life ' - 'and time_of_life_unit') - - if self.is_renewable and not have_price and not free_trial_available and (not self.time_of_life - or not self.time_of_life_unit): + "If the plan is renewable and a have free trial available, you must not set time_of_life " + "and time_of_life_unit" + ) + + if ( + self.is_renewable + and not have_price + and not free_trial_available + and (not self.time_of_life or not self.time_of_life_unit) + ): raise forms.ValidationError( - 'If the plan is renewable and a not have free trial available, you must set time_of_life ' - 'and time_of_life_unit') + "If the plan is renewable and a not have free trial available, you must set time_of_life " + "and time_of_life_unit" + ) return super().clean() @@ -597,11 +661,13 @@ def save(self, *args, **kwargs) -> None: class PlanTranslation(models.Model): plan = models.ForeignKey(Plan, on_delete=models.CASCADE) - lang = models.CharField(max_length=5, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') - title = models.CharField(max_length=60, help_text='Title of the plan') - description = models.CharField(max_length=255, help_text='Description of the plan') + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) + title = models.CharField(max_length=60, help_text="Title of the plan") + description = models.CharField(max_length=255, help_text="Description of the plan") def save(self, *args, **kwargs): self.full_clean() @@ -609,30 +675,33 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) def __str__(self) -> str: - return f'{self.lang} {self.title}: ({self.plan.slug})' + return f"{self.lang} {self.title}: ({self.plan.slug})" class PlanOffer(models.Model): - original_plan = models.ForeignKey(Plan, on_delete=models.CASCADE, related_name='plan_offer_from') - suggested_plan = models.ForeignKey(Plan, - related_name='plan_offer_to', - help_text='Suggested plans', - null=True, - blank=False, - on_delete=models.CASCADE) + original_plan = models.ForeignKey(Plan, on_delete=models.CASCADE, related_name="plan_offer_from") + suggested_plan = models.ForeignKey( + Plan, + related_name="plan_offer_to", + help_text="Suggested plans", + null=True, + blank=False, + on_delete=models.CASCADE, + ) show_modal = models.BooleanField(default=False) expires_at = models.DateTimeField(default=None, blank=True, null=True) def clean(self) -> None: utc_now = timezone.now() - others = self.__class__.objects.filter(Q(expires_at=None) | Q(expires_at__gt=utc_now), - original_plan=self.original_plan) + others = self.__class__.objects.filter( + Q(expires_at=None) | Q(expires_at__gt=utc_now), original_plan=self.original_plan + ) if self.pk: others = others.exclude(pk=self.pk) if others.exists(): - raise forms.ValidationError('There is already an active plan offer for this plan') + raise forms.ValidationError("There is already an active plan offer for this plan") return super().clean() @@ -643,40 +712,40 @@ def save(self, *args, **kwargs) -> None: class PlanOfferTranslation(models.Model): - offer = models.ForeignKey(PlanOffer, on_delete=models.CASCADE, help_text='Plan offer') - lang = models.CharField(max_length=5, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') - title = models.CharField(max_length=60, help_text='Title of the plan offer') - description = models.CharField(max_length=255, help_text='Description of the plan offer') - short_description = models.CharField(max_length=255, help_text='Short description of the plan offer') + offer = models.ForeignKey(PlanOffer, on_delete=models.CASCADE, help_text="Plan offer") + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) + title = models.CharField(max_length=60, help_text="Title of the plan offer") + description = models.CharField(max_length=255, help_text="Description of the plan offer") + short_description = models.CharField(max_length=255, help_text="Short description of the plan offer") class Seller(models.Model): class Partner(models.TextChoices): - INDIVIDUAL = ('INDIVIDUAL', 'Individual') - BUSINESS = ('BUSINESS', 'Business') + INDIVIDUAL = ("INDIVIDUAL", "Individual") + BUSINESS = ("BUSINESS", "Business") - name = models.CharField(max_length=30, help_text='Company name or person name') - user = models.ForeignKey(User, - on_delete=models.CASCADE, - blank=True, - null=True, - limit_choices_to={'is_active': True}) + name = models.CharField(max_length=30, help_text="Company name or person name") + user = models.ForeignKey( + User, on_delete=models.CASCADE, blank=True, null=True, limit_choices_to={"is_active": True} + ) type = models.CharField(max_length=13, choices=Partner, default=Partner.INDIVIDUAL, db_index=True) - is_active = models.BooleanField(default=True, help_text='Is the seller active to be selected?') + is_active = models.BooleanField(default=True, help_text="Is the seller active to be selected?") def clean(self) -> None: if self.user and self.__class__.objects.filter(user=self.user).count() > 0: - raise forms.ValidationError('User already registered as seller') + raise forms.ValidationError("User already registered as seller") if len(self.name) < 3: - raise forms.ValidationError('Name must be at least 3 characters long') + raise forms.ValidationError("Name must be at least 3 characters long") if self.type == self.Partner.BUSINESS and self.__class__.objects.filter(name=self.name).count() > 0: - raise forms.ValidationError('Name already registered as seller') + raise forms.ValidationError("Name already registered as seller") return super().clean() @@ -695,41 +764,45 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self._how_many_offers = self.how_many_offers class Discount(models.TextChoices): - NO_DISCOUNT = ('NO_DISCOUNT', 'No discount') - PERCENT_OFF = ('PERCENT_OFF', 'Percent off') - FIXED_PRICE = ('FIXED_PRICE', 'Fixed price') - HAGGLING = ('HAGGLING', 'Haggling') + NO_DISCOUNT = ("NO_DISCOUNT", "No discount") + PERCENT_OFF = ("PERCENT_OFF", "Percent off") + FIXED_PRICE = ("FIXED_PRICE", "Fixed price") + HAGGLING = ("HAGGLING", "Haggling") class Referral(models.TextChoices): - NO_REFERRAL = ('NO_REFERRAL', 'No referral') - PERCENTAGE = ('PERCENTAGE', 'Percentage') - FIXED_PRICE = ('FIXED_PRICE', 'Fixed price') + NO_REFERRAL = ("NO_REFERRAL", "No referral") + PERCENTAGE = ("PERCENTAGE", "Percentage") + FIXED_PRICE = ("FIXED_PRICE", "Fixed price") slug = models.SlugField() discount_type = models.CharField(max_length=13, choices=Discount, default=Discount.PERCENT_OFF, db_index=True) - discount_value = models.FloatField(help_text='if type is PERCENT_OFF it\'s a percentage (range 0-1)') + discount_value = models.FloatField(help_text="if type is PERCENT_OFF it's a percentage (range 0-1)") referral_type = models.CharField(max_length=13, choices=Referral, default=Referral.NO_REFERRAL, db_index=True) - referral_value = models.FloatField(help_text='If set, the seller will receive a reward', default=0) + referral_value = models.FloatField(help_text="If set, the seller will receive a reward", default=0) - auto = models.BooleanField(default=False, - db_index=True, - help_text='Automatically apply this coupon (like a special offer)') + auto = models.BooleanField( + default=False, db_index=True, help_text="Automatically apply this coupon (like a special offer)" + ) how_many_offers = models.IntegerField( - default=-1, help_text='if -1 means no limits in the offers provided, if 0 nobody can\'t use this coupon') - - seller = models.ForeignKey(Seller, - on_delete=models.CASCADE, - blank=True, - null=True, - limit_choices_to={'is_active': True}, - help_text='Seller') + default=-1, help_text="if -1 means no limits in the offers provided, if 0 nobody can't use this coupon" + ) + + seller = models.ForeignKey( + Seller, + on_delete=models.CASCADE, + blank=True, + null=True, + limit_choices_to={"is_active": True}, + help_text="Seller", + ) plans = models.ManyToManyField( Plan, blank=True, - help_text='Available plans, if refferal type is not NO_REFERRAL it should keep empty, ' - 'so, in this case, all plans will be available') + help_text="Available plans, if refferal type is not NO_REFERRAL it should keep empty, " + "so, in this case, all plans will be available", + ) offered_at = models.DateTimeField(default=None, null=True, blank=True) expires_at = models.DateTimeField(default=None, null=True, blank=True) @@ -738,28 +811,29 @@ class Referral(models.TextChoices): def clean(self) -> None: if self.discount_value < 0: - raise forms.ValidationError('Discount value must be positive') + raise forms.ValidationError("Discount value must be positive") if self.referral_value < 0: - raise forms.ValidationError('Referral value must be positive') + raise forms.ValidationError("Referral value must be positive") if self.referral_value != 0 and self.referral_type == self.Referral.NO_REFERRAL: - raise forms.ValidationError('If referral type is NO_REFERRAL, referral value must be None') + raise forms.ValidationError("If referral type is NO_REFERRAL, referral value must be None") elif self.referral_value is None: - raise forms.ValidationError('Referral value must be set if referral status is not NO_REFERRAL') + raise forms.ValidationError("Referral value must be set if referral status is not NO_REFERRAL") - available_with_slug = self.__class__.objects.filter(Q(expires_at=None) | Q(expires_at__gt=timezone.now()), - slug=self.slug) + available_with_slug = self.__class__.objects.filter( + Q(expires_at=None) | Q(expires_at__gt=timezone.now()), slug=self.slug + ) if self.id: available_with_slug = available_with_slug.exclude(id=self.id) if available_with_slug.count() > 0: - raise forms.ValidationError('a valid coupon with this name already exists') + raise forms.ValidationError("a valid coupon with this name already exists") if self.auto and self.discount_type == self.Discount.NO_DISCOUNT: - raise forms.ValidationError('If auto is True, discount type must not be NO_DISCOUNT') + raise forms.ValidationError("If auto is True, discount type must not be NO_DISCOUNT") if self._how_many_offers != self.how_many_offers or self.offered_at is None: self.offered_at = timezone.now() @@ -785,75 +859,75 @@ def limit_coupon_choices(): ) -RENEWAL = 'RENEWAL' -CHECKING = 'CHECKING' -PAID = 'PAID' +RENEWAL = "RENEWAL" +CHECKING = "CHECKING" +PAID = "PAID" BAG_STATUS = [ - (RENEWAL, 'Renewal'), - (CHECKING, 'Checking'), - (PAID, 'Paid'), + (RENEWAL, "Renewal"), + (CHECKING, "Checking"), + (PAID, "Paid"), ] -BAG = 'BAG' -CHARGE = 'CHARGE' -PREVIEW = 'PREVIEW' -INVITED = 'INVITED' +BAG = "BAG" +CHARGE = "CHARGE" +PREVIEW = "PREVIEW" +INVITED = "INVITED" BAG_TYPE = [ - (BAG, 'Bag'), - (CHARGE, 'Charge'), - (PREVIEW, 'Preview'), - (INVITED, 'Invited'), + (BAG, "Bag"), + (CHARGE, "Charge"), + (PREVIEW, "Preview"), + (INVITED, "Invited"), ] -NO_SET = 'NO_SET' -QUARTER = 'QUARTER' -HALF = 'HALF' -YEAR = 'YEAR' +NO_SET = "NO_SET" +QUARTER = "QUARTER" +HALF = "HALF" +YEAR = "YEAR" CHOSEN_PERIOD = [ - (NO_SET, 'No set'), - (MONTH, 'Month'), - (QUARTER, 'Quarter'), - (HALF, 'Half'), - (YEAR, 'Year'), + (NO_SET, "No set"), + (MONTH, "Month"), + (QUARTER, "Quarter"), + (HALF, "Half"), + (YEAR, "Year"), ] class Bag(AbstractAmountByTime): """Represents a credit that can be used by a user to use a service.""" - status = models.CharField(max_length=8, choices=BAG_STATUS, default=CHECKING, help_text='Bag status', db_index=True) - type = models.CharField(max_length=7, choices=BAG_TYPE, default=BAG, help_text='Bag type') - chosen_period = models.CharField(max_length=7, - choices=CHOSEN_PERIOD, - default=NO_SET, - help_text='Chosen period used to calculate the amount and build the subscription') + status = models.CharField(max_length=8, choices=BAG_STATUS, default=CHECKING, help_text="Bag status", db_index=True) + type = models.CharField(max_length=7, choices=BAG_TYPE, default=BAG, help_text="Bag type") + chosen_period = models.CharField( + max_length=7, + choices=CHOSEN_PERIOD, + default=NO_SET, + help_text="Chosen period used to calculate the amount and build the subscription", + ) how_many_installments = models.IntegerField( - default=0, help_text='How many installments to collect and build the plan financing') - - coupons = models.ManyToManyField(Coupon, - blank=True, - help_text='Coupons applied during the sale', - limit_choices_to=limit_coupon_choices) - - academy = models.ForeignKey('admissions.Academy', on_delete=models.CASCADE, help_text='Academy owner') - user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='Customer') - service_items = models.ManyToManyField(ServiceItem, blank=True, help_text='Service items') - plans = models.ManyToManyField(Plan, blank=True, help_text='Plans') - - is_recurrent = models.BooleanField(default=False, help_text='will it be a recurrent payment?') - was_delivered = models.BooleanField(default=False, help_text='Was it delivered to the user?') - - token = models.CharField(max_length=40, - db_index=True, - default=None, - null=True, - blank=True, - help_text='Token of the bag') + default=0, help_text="How many installments to collect and build the plan financing" + ) + + coupons = models.ManyToManyField( + Coupon, blank=True, help_text="Coupons applied during the sale", limit_choices_to=limit_coupon_choices + ) + + academy = models.ForeignKey("admissions.Academy", on_delete=models.CASCADE, help_text="Academy owner") + user = models.ForeignKey(User, on_delete=models.CASCADE, help_text="Customer") + service_items = models.ManyToManyField(ServiceItem, blank=True, help_text="Service items") + plans = models.ManyToManyField(Plan, blank=True, help_text="Plans") + + is_recurrent = models.BooleanField(default=False, help_text="will it be a recurrent payment?") + was_delivered = models.BooleanField(default=False, help_text="Was it delivered to the user?") + + token = models.CharField( + max_length=40, db_index=True, default=None, null=True, blank=True, help_text="Token of the bag" + ) expires_at = models.DateTimeField( default=None, blank=True, null=True, - help_text='Expiration date of the bag, used for preview bag together with the token') + help_text="Expiration date of the bag, used for preview bag together with the token", + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -864,26 +938,25 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) if created: - tasks_activity.add_activity.delay(self.user.id, - 'bag_created', - related_type='payments.Bag', - related_id=self.id) + tasks_activity.add_activity.delay( + self.user.id, "bag_created", related_type="payments.Bag", related_id=self.id + ) def __str__(self) -> str: - return f'{self.type} {self.status} {self.chosen_period}' + return f"{self.type} {self.status} {self.chosen_period}" -FULFILLED = 'FULFILLED' -REJECTED = 'REJECTED' -PENDING = 'PENDING' -REFUNDED = 'REFUNDED' -DISPUTED_AS_FRAUD = 'DISPUTED_AS_FRAUD' +FULFILLED = "FULFILLED" +REJECTED = "REJECTED" +PENDING = "PENDING" +REFUNDED = "REFUNDED" +DISPUTED_AS_FRAUD = "DISPUTED_AS_FRAUD" INVOICE_STATUS = [ - (FULFILLED, 'Fulfilled'), - (REJECTED, 'Rejected'), - (PENDING, 'Pending'), - (REFUNDED, 'Refunded'), - (DISPUTED_AS_FRAUD, 'Disputed as fraud'), + (FULFILLED, "Fulfilled"), + (REJECTED, "Rejected"), + (PENDING, "Pending"), + (REFUNDED, "Refunded"), + (DISPUTED_AS_FRAUD, "Disputed as fraud"), ] @@ -891,33 +964,29 @@ class Invoice(models.Model): """Represents a payment made by a user.""" amount = models.FloatField( - default=0, help_text='If amount is 0, transaction will not be sent to stripe or any other payment processor.') - currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text='Currency of the invoice') - paid_at = models.DateTimeField(help_text='Date when the invoice was paid') - refunded_at = models.DateTimeField(null=True, - blank=True, - default=None, - help_text='Date when the invoice was refunded') - status = models.CharField(max_length=17, - choices=INVOICE_STATUS, - default=PENDING, - db_index=True, - help_text='Invoice status') - - bag = models.ForeignKey('Bag', on_delete=models.CASCADE, help_text='Bag') + default=0, help_text="If amount is 0, transaction will not be sent to stripe or any other payment processor." + ) + currency = models.ForeignKey(Currency, on_delete=models.CASCADE, help_text="Currency of the invoice") + paid_at = models.DateTimeField(help_text="Date when the invoice was paid") + refunded_at = models.DateTimeField( + null=True, blank=True, default=None, help_text="Date when the invoice was refunded" + ) + status = models.CharField( + max_length=17, choices=INVOICE_STATUS, default=PENDING, db_index=True, help_text="Invoice status" + ) + + bag = models.ForeignKey("Bag", on_delete=models.CASCADE, help_text="Bag") # actually return 27 characters - stripe_id = models.CharField(max_length=32, null=True, default=None, blank=True, help_text='Stripe id') + stripe_id = models.CharField(max_length=32, null=True, default=None, blank=True, help_text="Stripe id") # actually return 27 characters - refund_stripe_id = models.CharField(max_length=32, - null=True, - default=None, - blank=True, - help_text='Stripe id for refunding') + refund_stripe_id = models.CharField( + max_length=32, null=True, default=None, blank=True, help_text="Stripe id for refunding" + ) - user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='Customer') - academy = models.ForeignKey(Academy, on_delete=models.CASCADE, help_text='Academy owner') + user = models.ForeignKey(User, on_delete=models.CASCADE, help_text="Customer") + academy = models.ForeignKey(Academy, on_delete=models.CASCADE, help_text="Academy owner") created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -928,75 +997,75 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) def __str__(self) -> str: - return f'{self.user.email} {self.amount} ({self.currency.code})' + return f"{self.user.email} {self.amount} ({self.currency.code})" -FREE_TRIAL = 'FREE_TRIAL' -ACTIVE = 'ACTIVE' -CANCELLED = 'CANCELLED' -DEPRECATED = 'DEPRECATED' -PAYMENT_ISSUE = 'PAYMENT_ISSUE' -ERROR = 'ERROR' -FULLY_PAID = 'FULLY_PAID' -EXPIRED = 'EXPIRED' +FREE_TRIAL = "FREE_TRIAL" +ACTIVE = "ACTIVE" +CANCELLED = "CANCELLED" +DEPRECATED = "DEPRECATED" +PAYMENT_ISSUE = "PAYMENT_ISSUE" +ERROR = "ERROR" +FULLY_PAID = "FULLY_PAID" +EXPIRED = "EXPIRED" SUBSCRIPTION_STATUS = [ - (FREE_TRIAL, 'Free trial'), - (ACTIVE, 'Active'), - (CANCELLED, 'Cancelled'), - (DEPRECATED, 'Deprecated'), - (PAYMENT_ISSUE, 'Payment issue'), - (ERROR, 'Error'), - (FULLY_PAID, 'Fully Paid'), - (EXPIRED, 'Expired'), + (FREE_TRIAL, "Free trial"), + (ACTIVE, "Active"), + (CANCELLED, "Cancelled"), + (DEPRECATED, "Deprecated"), + (PAYMENT_ISSUE, "Payment issue"), + (ERROR, "Error"), + (FULLY_PAID, "Fully Paid"), + (EXPIRED, "Expired"), ] class AbstractIOweYou(models.Model): """Common fields for all I owe you.""" - status = models.CharField(max_length=13, - choices=SUBSCRIPTION_STATUS, - default=ACTIVE, - help_text='Status', - db_index=True) - status_message = models.CharField(max_length=250, - null=True, - blank=True, - default=None, - help_text='Error message if status is ERROR') - - invoices = models.ManyToManyField(Invoice, blank=True, help_text='Invoices') - - user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='Customer') - academy = models.ForeignKey(Academy, on_delete=models.CASCADE, help_text='Academy owner') - - selected_cohort_set = models.ForeignKey(CohortSet, - on_delete=models.CASCADE, - null=True, - blank=True, - default=None, - help_text='Cohort set which the plans and services is for') - joined_cohorts = models.ManyToManyField(Cohort, blank=True, help_text='Cohorts those that he/she joined') + status = models.CharField( + max_length=13, choices=SUBSCRIPTION_STATUS, default=ACTIVE, help_text="Status", db_index=True + ) + status_message = models.CharField( + max_length=250, null=True, blank=True, default=None, help_text="Error message if status is ERROR" + ) + + invoices = models.ManyToManyField(Invoice, blank=True, help_text="Invoices") + + user = models.ForeignKey(User, on_delete=models.CASCADE, help_text="Customer") + academy = models.ForeignKey(Academy, on_delete=models.CASCADE, help_text="Academy owner") + + selected_cohort_set = models.ForeignKey( + CohortSet, + on_delete=models.CASCADE, + null=True, + blank=True, + default=None, + help_text="Cohort set which the plans and services is for", + ) + joined_cohorts = models.ManyToManyField(Cohort, blank=True, help_text="Cohorts those that he/she joined") selected_mentorship_service_set = models.ForeignKey( MentorshipServiceSet, on_delete=models.CASCADE, null=True, blank=True, default=None, - help_text='Mentorship service set which the plans and services is for') - selected_event_type_set = models.ForeignKey(EventTypeSet, - on_delete=models.CASCADE, - null=True, - blank=True, - default=None, - help_text='Event type set which the plans and services is for') + help_text="Mentorship service set which the plans and services is for", + ) + selected_event_type_set = models.ForeignKey( + EventTypeSet, + on_delete=models.CASCADE, + null=True, + blank=True, + default=None, + help_text="Event type set which the plans and services is for", + ) # this reminds the plans to change the stock scheduler on change - plans = models.ManyToManyField(Plan, blank=True, help_text='Plans to be supplied') - conversion_info = models.JSONField(default=None, - blank=True, - null=True, - help_text='UTMs and other conversion information.') + plans = models.ManyToManyField(Plan, blank=True, help_text="Plans to be supplied") + conversion_info = models.JSONField( + default=None, blank=True, null=True, help_text="UTMs and other conversion information." + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -1009,37 +1078,40 @@ class PlanFinancing(AbstractIOweYou): """Allows to financing a plan.""" # in this day the financing needs being paid again - next_payment_at = models.DateTimeField(help_text='Next payment date') + next_payment_at = models.DateTimeField(help_text="Next payment date") # in this moment the subscription will be expired valid_until = models.DateTimeField( - help_text='Valid until, before this date each month the customer must pay, after this ' - 'date the plan financing will be destroyed and if it is belonging to a cohort, the certificate will ' - 'be issued after pay every installments') + help_text="Valid until, before this date each month the customer must pay, after this " + "date the plan financing will be destroyed and if it is belonging to a cohort, the certificate will " + "be issued after pay every installments" + ) # in this moment the subscription will be expired - plan_expires_at = models.DateTimeField(default=None, - null=True, - blank=False, - help_text='Plan expires at, after this date the plan will not be renewed') + plan_expires_at = models.DateTimeField( + default=None, null=True, blank=False, help_text="Plan expires at, after this date the plan will not be renewed" + ) # this remember the current price per month - monthly_price = models.FloatField(default=0, - help_text='Monthly price, we keep this to avoid we changes him/her amount') + monthly_price = models.FloatField( + default=0, help_text="Monthly price, we keep this to avoid we changes him/her amount" + ) def __str__(self) -> str: - return f'{self.user.email} ({self.valid_until})' + return f"{self.user.email} ({self.valid_until})" def clean(self) -> None: settings = get_user_settings(self.user.id) if not self.monthly_price: raise forms.ValidationError( - translation(settings.lang, en='Monthly price is required', es='Precio mensual es requerido')) + translation(settings.lang, en="Monthly price is required", es="Precio mensual es requerido") + ) if not self.plan_expires_at: raise forms.ValidationError( - translation(settings.lang, en='Plan expires at is required', es='Plan expires at es requerido')) + translation(settings.lang, en="Plan expires at is required", es="Plan expires at es requerido") + ) return super().clean() @@ -1056,48 +1128,54 @@ def save(self, *args, **kwargs) -> None: class Subscription(AbstractIOweYou): """Allows to create a subscription to a plan and services.""" - _lang = 'en' + _lang = "en" # last time the subscription was paid - paid_at = models.DateTimeField(help_text='Last time the subscription was paid') + paid_at = models.DateTimeField(help_text="Last time the subscription was paid") - is_refundable = models.BooleanField(default=True, help_text='Is it refundable?') + is_refundable = models.BooleanField(default=True, help_text="Is it refundable?") # in this day the subscription needs being paid again - next_payment_at = models.DateTimeField(help_text='Next payment date') + next_payment_at = models.DateTimeField(help_text="Next payment date") # in this moment the subscription will be expired - valid_until = models.DateTimeField(default=None, - null=True, - blank=True, - db_index=True, - help_text='Valid until, after this date the subscription will be destroyed') + valid_until = models.DateTimeField( + default=None, + null=True, + blank=True, + db_index=True, + help_text="Valid until, after this date the subscription will be destroyed", + ) # this reminds the service items to change the stock scheduler on change # only for consuming single items without having a plan, when you buy consumable quantities - service_items = models.ManyToManyField(ServiceItem, - blank=True, - through='SubscriptionServiceItem', - through_fields=('subscription', 'service_item'), - help_text='Service items to be supplied') + service_items = models.ManyToManyField( + ServiceItem, + blank=True, + through="SubscriptionServiceItem", + through_fields=("subscription", "service_item"), + help_text="Service items to be supplied", + ) # remember the chosen period to pay again - pay_every = models.IntegerField(default=1, help_text='Pay every X units (e.g. 1, 2, 3, ...)') - pay_every_unit = models.CharField(max_length=10, - choices=PAY_EVERY_UNIT, - default=MONTH, - help_text='Pay every unit (e.g. DAY, WEEK, MONTH or YEAR)') + pay_every = models.IntegerField(default=1, help_text="Pay every X units (e.g. 1, 2, 3, ...)") + pay_every_unit = models.CharField( + max_length=10, choices=PAY_EVERY_UNIT, default=MONTH, help_text="Pay every unit (e.g. DAY, WEEK, MONTH or YEAR)" + ) def __str__(self) -> str: - return f'{self.user.email} ({self.valid_until})' + return f"{self.user.email} ({self.valid_until})" def clean(self) -> None: - if self.status == 'FULLY_PAID': + if self.status == "FULLY_PAID": raise forms.ValidationError( - translation(self._lang, - en='Subscription cannot have fully paid as status', - es='La suscripción no puede tener pagado como estado', - slug='subscription-as-fully-paid')) + translation( + self._lang, + en="Subscription cannot have fully paid as status", + es="La suscripción no puede tener pagado como estado", + slug="subscription-as-fully-paid", + ) + ) return super().clean() @@ -1112,22 +1190,23 @@ def save(self, *args, **kwargs) -> None: class SubscriptionServiceItem(models.Model): - subscription = models.ForeignKey(Subscription, on_delete=models.CASCADE, help_text='Subscription') - service_item = models.ForeignKey(ServiceItem, on_delete=models.CASCADE, help_text='Service item') + subscription = models.ForeignKey(Subscription, on_delete=models.CASCADE, help_text="Subscription") + service_item = models.ForeignKey(ServiceItem, on_delete=models.CASCADE, help_text="Service item") - cohorts = models.ManyToManyField(Cohort, blank=True, help_text='Cohorts') - mentorship_service_set = models.ForeignKey(MentorshipServiceSet, - on_delete=models.CASCADE, - blank=True, - null=True, - help_text='Mentorship service set') + cohorts = models.ManyToManyField(Cohort, blank=True, help_text="Cohorts") + mentorship_service_set = models.ForeignKey( + MentorshipServiceSet, on_delete=models.CASCADE, blank=True, null=True, help_text="Mentorship service set" + ) def clean(self): if self.id and self.mentorship_service_set and self.cohorts.count(): raise forms.ValidationError( - translation(self._lang, - en='You can not set cohorts and mentorship service set at the same time', - es='No puedes establecer cohortes y conjunto de servicios de mentoría al mismo tiempo')) + translation( + self._lang, + en="You can not set cohorts and mentorship service set at the same time", + es="No puedes establecer cohortes y conjunto de servicios de mentoría al mismo tiempo", + ) + ) def save(self, *args, **kwargs): self.full_clean() @@ -1144,45 +1223,55 @@ class Consumable(AbstractServiceItem): service_item = models.ForeignKey( ServiceItem, on_delete=models.CASCADE, - help_text='Service item, we remind the service item to know how many units was issued') + help_text="Service item, we remind the service item to know how many units was issued", + ) # if null, this is valid until resources are exhausted - user = models.ForeignKey(User, on_delete=models.CASCADE, help_text='Customer') + user = models.ForeignKey(User, on_delete=models.CASCADE, help_text="Customer") # this could be used for the queries on the consumer, to recognize which resource is belong the consumable - cohort_set = models.ForeignKey(CohortSet, - on_delete=models.CASCADE, - default=None, - blank=True, - null=True, - help_text='Cohort set which the consumable belongs to') - event_type_set = models.ForeignKey(EventTypeSet, - on_delete=models.CASCADE, - default=None, - blank=True, - null=True, - help_text='Event type set which the consumable belongs to') - mentorship_service_set = models.ForeignKey(MentorshipServiceSet, - on_delete=models.CASCADE, - default=None, - blank=True, - null=True, - help_text='Mentorship service set which the consumable belongs to') + cohort_set = models.ForeignKey( + CohortSet, + on_delete=models.CASCADE, + default=None, + blank=True, + null=True, + help_text="Cohort set which the consumable belongs to", + ) + event_type_set = models.ForeignKey( + EventTypeSet, + on_delete=models.CASCADE, + default=None, + blank=True, + null=True, + help_text="Event type set which the consumable belongs to", + ) + mentorship_service_set = models.ForeignKey( + MentorshipServiceSet, + on_delete=models.CASCADE, + default=None, + blank=True, + null=True, + help_text="Mentorship service set which the consumable belongs to", + ) valid_until = models.DateTimeField( null=True, blank=True, default=None, - help_text='Valid until, this is null if the consumable is valid until resources are exhausted') + help_text="Valid until, this is null if the consumable is valid until resources are exhausted", + ) @classmethod - def list(cls, - *, - user: User | str | int, - lang: str = 'en', - service: Optional[Service | str | int] = None, - permission: Optional[Permission | str | int] = None, - extra: dict = None) -> QuerySet[Consumable]: + def list( + cls, + *, + user: User | str | int, + lang: str = "en", + service: Optional[Service | str | int] = None, + permission: Optional[Permission | str | int] = None, + extra: dict = None, + ) -> QuerySet[Consumable]: if extra is None: extra = {} @@ -1193,71 +1282,79 @@ def list(cls, # User if isinstance(user, str) and not user.isdigit(): raise ValidationException( - translation(lang, - en='Client user id must be an integer', - es='El id del cliente debe ser un entero', - slug='client-user-id-must-be-an-integer')) + translation( + lang, + en="Client user id must be an integer", + es="El id del cliente debe ser un entero", + slug="client-user-id-must-be-an-integer", + ) + ) if isinstance(user, str): - param['user__id'] = int(user) + param["user__id"] = int(user) elif isinstance(user, int): - param['user__id'] = user + param["user__id"] = user elif isinstance(user, User): - param['user'] = user + param["user"] = user # Service if service and isinstance(service, str) and not service.isdigit(): - param['service_item__service__slug'] = service + param["service_item__service__slug"] = service elif service and isinstance(service, str) and service.isdigit(): - param['service_item__service__id'] = int(service) + param["service_item__service__id"] = int(service) elif service and isinstance(service, int): - param['service_item__service__id'] = service + param["service_item__service__id"] = service elif isinstance(service, Service): - param['service_item__service'] = service + param["service_item__service"] = service # Permission if permission and isinstance(permission, str) and not permission.isdigit(): - param['service_item__service__groups__permissions__codename'] = permission + param["service_item__service__groups__permissions__codename"] = permission elif permission and isinstance(permission, str) and permission.isdigit(): - param['service_item__service__groups__permissions__id'] = int(permission) + param["service_item__service__groups__permissions__id"] = int(permission) elif permission and isinstance(permission, int): - param['service_item__service__groups__permissions__id'] = permission + param["service_item__service__groups__permissions__id"] = permission elif isinstance(permission, Permission): - param['service_item__service__groups__permissions'] = permission + param["service_item__service__groups__permissions"] = permission - return cls.objects.filter(Q(valid_until__gte=utc_now) | Q(valid_until=None), **{ - **param, - **extra - }).exclude(how_many=0).order_by('id') + return ( + cls.objects.filter(Q(valid_until__gte=utc_now) | Q(valid_until=None), **{**param, **extra}) + .exclude(how_many=0) + .order_by("id") + ) @classmethod @sync_to_async - def alist(cls, - *, - user: User | str | int, - lang: str = 'en', - service: Optional[Service | str | int] = None, - permission: Optional[Permission | str | int] = None, - extra: dict = None) -> QuerySet[Consumable]: + def alist( + cls, + *, + user: User | str | int, + lang: str = "en", + service: Optional[Service | str | int] = None, + permission: Optional[Permission | str | int] = None, + extra: dict = None, + ) -> QuerySet[Consumable]: return cls.list(user=user, lang=lang, service=service, permission=permission, extra=extra) @classmethod - def get(cls, - *, - user: User | str | int, - lang: str = 'en', - service: Optional[Service | str | int] = None, - permission: Optional[Permission | str | int] = None, - extra: Optional[dict] = None) -> Consumable | None: + def get( + cls, + *, + user: User | str | int, + lang: str = "en", + service: Optional[Service | str | int] = None, + permission: Optional[Permission | str | int] = None, + extra: Optional[dict] = None, + ) -> Consumable | None: if extra is None: extra = {} @@ -1266,13 +1363,15 @@ def get(cls, @classmethod @sync_to_async - def aget(cls, - *, - user: User | str | int, - lang: str = 'en', - service: Optional[Service | str | int] = None, - permission: Optional[Permission | str | int] = None, - extra: Optional[dict] = None) -> Consumable | None: + def aget( + cls, + *, + user: User | str | int, + lang: str = "en", + service: Optional[Service | str | int] = None, + permission: Optional[Permission | str | int] = None, + extra: Optional[dict] = None, + ) -> Consumable | None: return cls.get(user=user, lang=lang, service=service, permission=permission, extra=extra) def clean(self) -> None: @@ -1285,15 +1384,19 @@ def clean(self) -> None: raise forms.ValidationError( translation( settings.lang, - en='A consumable can only be associated with one resource', - es='Un consumible solo se puede asociar con un recurso', - )) + en="A consumable can only be associated with one resource", + es="Un consumible solo se puede asociar con un recurso", + ) + ) if self.service_item is None: raise forms.ValidationError( - translation(settings.lang, - en='A consumable must be associated with a service item', - es='Un consumible debe estar asociado con un artículo de un servicio')) + translation( + settings.lang, + en="A consumable must be associated with a service item", + es="Un consumible debe estar asociado con un artículo de un servicio", + ) + ) return super().clean() @@ -1308,47 +1411,50 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) def __str__(self): - return f'{self.user.email}: {self.service_item.service.slug} ({self.how_many})' + return f"{self.user.email}: {self.service_item.service.slug} ({self.how_many})" -PENDING = 'PENDING' -DONE = 'DONE' -CANCELLED = 'CANCELLED' +PENDING = "PENDING" +DONE = "DONE" +CANCELLED = "CANCELLED" CONSUMPTION_SESSION_STATUS = [ - (PENDING, 'Pending'), - (DONE, 'Done'), - (CANCELLED, 'Cancelled'), + (PENDING, "Pending"), + (DONE, "Done"), + (CANCELLED, "Cancelled"), ] class ConsumptionSession(models.Model): - operation_code = models.SlugField(default='default', - help_text='Code that identifies the operation, it could be repeated') - consumable = models.ForeignKey(Consumable, on_delete=models.CASCADE, help_text='Consumable') - user = models.ForeignKey('auth.User', on_delete=models.CASCADE, help_text='Customer') - eta = models.DateTimeField(help_text='Estimated time of arrival') - duration = models.DurationField(blank=False, default=timedelta, help_text='Duration of the session') - how_many = models.FloatField(default=0, help_text='How many units of this service can be used') - status = models.CharField(max_length=12, - choices=CONSUMPTION_SESSION_STATUS, - default=PENDING, - help_text='Status of the session') - was_discounted = models.BooleanField(default=False, help_text='Was it discounted') - - request = models.JSONField(default=dict, - blank=True, - help_text='Request parameters, it\'s used to remind and recover and consumption session') + operation_code = models.SlugField( + default="default", help_text="Code that identifies the operation, it could be repeated" + ) + consumable = models.ForeignKey(Consumable, on_delete=models.CASCADE, help_text="Consumable") + user = models.ForeignKey("auth.User", on_delete=models.CASCADE, help_text="Customer") + eta = models.DateTimeField(help_text="Estimated time of arrival") + duration = models.DurationField(blank=False, default=timedelta, help_text="Duration of the session") + how_many = models.FloatField(default=0, help_text="How many units of this service can be used") + status = models.CharField( + max_length=12, choices=CONSUMPTION_SESSION_STATUS, default=PENDING, help_text="Status of the session" + ) + was_discounted = models.BooleanField(default=False, help_text="Was it discounted") + + request = models.JSONField( + default=dict, + blank=True, + help_text="Request parameters, it's used to remind and recover and consumption session", + ) # this should be used to get - path = models.CharField(max_length=200, blank=True, help_text='Path of the request') - related_id = models.IntegerField(default=None, blank=True, null=True, help_text='Related id') + path = models.CharField(max_length=200, blank=True, help_text="Path of the request") + related_id = models.IntegerField(default=None, blank=True, null=True, help_text="Related id") related_slug = models.CharField( max_length=200, default=None, blank=True, null=True, - help_text='Related slug, it\'s human-readable identifier, it must be unique and it can only contain ' - 'letters, numbers and hyphens') + help_text="Related slug, it's human-readable identifier, it must be unique and it can only contain " + "letters, numbers and hyphens", + ) def clean(self): self.request = self.sort_dict(self.request or {}) @@ -1369,99 +1475,107 @@ def sort_dict(cls, d): return d @classmethod - def build_session(cls, - request: WSGIRequest, - consumable: Consumable, - delta: timedelta, - user: Optional[User] = None, - operation_code: Optional[str] = None) -> 'ConsumptionSession': - assert request, 'You must provide a request' - assert consumable, 'You must provide a consumable' - assert delta, 'You must provide a delta' + def build_session( + cls, + request: WSGIRequest, + consumable: Consumable, + delta: timedelta, + user: Optional[User] = None, + operation_code: Optional[str] = None, + ) -> "ConsumptionSession": + assert request, "You must provide a request" + assert consumable, "You must provide a consumable" + assert delta, "You must provide a delta" if operation_code is None: - operation_code = 'default' + operation_code = "default" utc_now = timezone.now() resource = consumable.mentorship_service_set or consumable.event_type_set or consumable.cohort_set id = resource.id if resource else 0 - slug = resource.slug if resource else '' + slug = resource.slug if resource else "" - path = resource.__class__._meta.app_label + '.' + resource.__class__.__name__ if resource else '' + path = resource.__class__._meta.app_label + "." + resource.__class__.__name__ if resource else "" user = user or request.user - if hasattr(request, 'parser_context'): - args = request.parser_context['args'] - kwargs = request.parser_context['kwargs'] + if hasattr(request, "parser_context"): + args = request.parser_context["args"] + kwargs = request.parser_context["kwargs"] else: args = request.resolver_match.args kwargs = request.resolver_match.kwargs data = { - 'args': args, - 'kwargs': kwargs, - 'headers': { - 'academy': request.META.get('HTTP_ACADEMY') - }, - 'user': user.id, + "args": args, + "kwargs": kwargs, + "headers": {"academy": request.META.get("HTTP_ACADEMY")}, + "user": user.id, } # assert path, 'You must provide a path' - assert delta, 'You must provide a delta' - - session = cls.objects.filter(eta__gte=utc_now, - request=data, - path=path, - duration=delta, - related_id=id, - related_slug=slug, - operation_code=operation_code, - user=user).exclude(eta__lte=utc_now).first() + assert delta, "You must provide a delta" + + session = ( + cls.objects.filter( + eta__gte=utc_now, + request=data, + path=path, + duration=delta, + related_id=id, + related_slug=slug, + operation_code=operation_code, + user=user, + ) + .exclude(eta__lte=utc_now) + .first() + ) if session: return session - return cls.objects.create(request=data, - consumable=consumable, - eta=utc_now + delta, - path=path, - duration=delta, - related_id=id, - related_slug=slug, - operation_code=operation_code, - user=user) + return cls.objects.create( + request=data, + consumable=consumable, + eta=utc_now + delta, + path=path, + duration=delta, + related_id=id, + related_slug=slug, + operation_code=operation_code, + user=user, + ) @classmethod @sync_to_async - def abuild_session(cls, - request: WSGIRequest, - consumable: Consumable, - delta: timedelta, - user: Optional[User] = None, - operation_code: Optional[str] = None) -> 'ConsumptionSession': + def abuild_session( + cls, + request: WSGIRequest, + consumable: Consumable, + delta: timedelta, + user: Optional[User] = None, + operation_code: Optional[str] = None, + ) -> "ConsumptionSession": return cls.build_session(request, consumable, delta, user, operation_code) @classmethod - def get_session(cls, request: WSGIRequest) -> 'ConsumptionSession': + def get_session(cls, request: WSGIRequest) -> "ConsumptionSession": if not request.user.id: return None utc_now = timezone.now() - if hasattr(request, 'parser_context'): - args = request.parser_context['args'] - kwargs = request.parser_context['kwargs'] + if hasattr(request, "parser_context"): + args = request.parser_context["args"] + kwargs = request.parser_context["kwargs"] else: args = request.resolver_match.args kwargs = request.resolver_match.kwargs data = { - 'args': list(args), - 'kwargs': kwargs, - 'headers': { - 'academy': request.META.get('HTTP_ACADEMY') - }, - 'user': request.user.id, + "args": list(args), + "kwargs": kwargs, + "headers": {"academy": request.META.get("HTTP_ACADEMY")}, + "user": request.user.id, } data = cls.sort_dict(data) @@ -1469,7 +1583,7 @@ def get_session(cls, request: WSGIRequest) -> 'ConsumptionSession': @classmethod @sync_to_async - def aget_session(cls, request: WSGIRequest) -> 'ConsumptionSession': + def aget_session(cls, request: WSGIRequest) -> "ConsumptionSession": return cls.get_session(request) def will_consume(self, how_many: float = 1.0) -> None: @@ -1489,40 +1603,34 @@ def awill_consume(self, how_many: float = 1.0) -> None: class PlanServiceItem(models.Model): """M2M between plan and ServiceItem.""" - _lang = 'en' + _lang = "en" - plan = models.ForeignKey(Plan, on_delete=models.CASCADE, help_text='Plan') - service_item = models.ForeignKey(ServiceItem, on_delete=models.CASCADE, help_text='Service item') + plan = models.ForeignKey(Plan, on_delete=models.CASCADE, help_text="Plan") + service_item = models.ForeignKey(ServiceItem, on_delete=models.CASCADE, help_text="Service item") class PlanServiceItemHandler(models.Model): """M2M between plan and ServiceItem.""" - handler = models.ForeignKey(PlanServiceItem, on_delete=models.CASCADE, help_text='Plan service item') + handler = models.ForeignKey(PlanServiceItem, on_delete=models.CASCADE, help_text="Plan service item") # resources associated to this service item, one is required - subscription = models.ForeignKey(Subscription, - on_delete=models.CASCADE, - null=True, - blank=True, - default=None, - help_text='Subscription') - plan_financing = models.ForeignKey(PlanFinancing, - on_delete=models.CASCADE, - null=True, - blank=True, - default=None, - help_text='Plan financing') + subscription = models.ForeignKey( + Subscription, on_delete=models.CASCADE, null=True, blank=True, default=None, help_text="Subscription" + ) + plan_financing = models.ForeignKey( + PlanFinancing, on_delete=models.CASCADE, null=True, blank=True, default=None, help_text="Plan financing" + ) def clean(self) -> None: resources = [self.subscription, self.plan_financing] how_many_resources_are_set = len([r for r in resources if r is not None]) if how_many_resources_are_set == 0: - raise forms.ValidationError('A PlanServiceItem must be associated with one resource') + raise forms.ValidationError("A PlanServiceItem must be associated with one resource") if how_many_resources_are_set != 1: - raise forms.ValidationError('A PlanServiceItem can only be associated with one resource') + raise forms.ValidationError("A PlanServiceItem can only be associated with one resource") return super().clean() @@ -1532,7 +1640,7 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) def __str__(self) -> str: - return str(self.subscription or self.plan_financing or 'Unset') + return str(self.subscription or self.plan_financing or "Unset") class ServiceStockScheduler(models.Model): @@ -1540,35 +1648,38 @@ class ServiceStockScheduler(models.Model): # all this section are M2M service items, in the first case we have a query with subscription and service # item for schedule the renovations - subscription_handler = models.ForeignKey(SubscriptionServiceItem, - on_delete=models.CASCADE, - default=None, - blank=True, - null=True, - help_text='Subscription service item') - plan_handler = models.ForeignKey(PlanServiceItemHandler, - on_delete=models.CASCADE, - default=None, - blank=True, - null=True, - help_text='Plan service item handler') + subscription_handler = models.ForeignKey( + SubscriptionServiceItem, + on_delete=models.CASCADE, + default=None, + blank=True, + null=True, + help_text="Subscription service item", + ) + plan_handler = models.ForeignKey( + PlanServiceItemHandler, + on_delete=models.CASCADE, + default=None, + blank=True, + null=True, + help_text="Plan service item handler", + ) # this reminds which scheduler generated the consumable - consumables = models.ManyToManyField(Consumable, blank=True, help_text='Consumables') - valid_until = models.DateTimeField(null=True, - blank=True, - default=None, - help_text='Valid until, after this date the consumables will be renewed') + consumables = models.ManyToManyField(Consumable, blank=True, help_text="Consumables") + valid_until = models.DateTimeField( + null=True, blank=True, default=None, help_text="Valid until, after this date the consumables will be renewed" + ) def clean(self) -> None: resources = [self.subscription_handler, self.plan_handler] how_many_resources_are_set = len([r for r in resources if r is not None]) if how_many_resources_are_set == 0: - raise forms.ValidationError('A ServiceStockScheduler must be associated with one resource') + raise forms.ValidationError("A ServiceStockScheduler must be associated with one resource") if how_many_resources_are_set != 1: - raise forms.ValidationError('A ServiceStockScheduler can only be associated with one resource') + raise forms.ValidationError("A ServiceStockScheduler can only be associated with one resource") return super().clean() @@ -1579,34 +1690,34 @@ def save(self, *args, **kwargs): def __str__(self) -> str: if self.subscription_handler and self.subscription_handler.subscription: - return f'{self.subscription_handler.subscription.user.email} - {self.subscription_handler.service_item}' + return f"{self.subscription_handler.subscription.user.email} - {self.subscription_handler.service_item}" if self.plan_handler and self.plan_handler.subscription: - return f'{self.plan_handler.subscription.user.email} - {self.plan_handler.handler.service_item}' + return f"{self.plan_handler.subscription.user.email} - {self.plan_handler.handler.service_item}" if self.plan_handler and self.plan_handler.plan_financing: - return f'{self.plan_handler.plan_financing.user.email} - {self.plan_handler.handler.service_item}' + return f"{self.plan_handler.plan_financing.user.email} - {self.plan_handler.handler.service_item}" - return 'Unset' + return "Unset" class PaymentContact(models.Model): - user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='payment_contact', help_text='Customer') - stripe_id = models.CharField(max_length=20, help_text='Stripe id') # actually return 18 characters + user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="payment_contact", help_text="Customer") + stripe_id = models.CharField(max_length=20, help_text="Stripe id") # actually return 18 characters def __str__(self) -> str: - return f'{self.user.email} ({self.stripe_id})' + return f"{self.user.email} ({self.stripe_id})" -GOOD = 'GOOD' -BAD = 'BAD' -FRAUD = 'FRAUD' -UNKNOWN = 'UNKNOWN' +GOOD = "GOOD" +BAD = "BAD" +FRAUD = "FRAUD" +UNKNOWN = "UNKNOWN" REPUTATION_STATUS = [ - (GOOD, 'Good'), - (BAD, 'BAD'), - (FRAUD, 'Fraud'), - (UNKNOWN, 'Unknown'), + (GOOD, "Good"), + (BAD, "BAD"), + (FRAUD, "Fraud"), + (UNKNOWN, "Unknown"), ] @@ -1617,10 +1728,10 @@ class FinancialReputation(models.Model): If the user has a bad reputation, the user will not be able to buy services. """ - user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='reputation', help_text='Customer') + user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="reputation", help_text="Customer") - in_4geeks = models.CharField(max_length=17, choices=REPUTATION_STATUS, default=GOOD, help_text='4Geeks reputation') - in_stripe = models.CharField(max_length=17, choices=REPUTATION_STATUS, default=GOOD, help_text='Stripe reputation') + in_4geeks = models.CharField(max_length=17, choices=REPUTATION_STATUS, default=GOOD, help_text="4Geeks reputation") + in_stripe = models.CharField(max_length=17, choices=REPUTATION_STATUS, default=GOOD, help_text="Stripe reputation") created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -1640,21 +1751,23 @@ def get_reputation(self): return UNKNOWN def __str__(self) -> str: - return f'{self.user.email} -> {self.get_reputation()}' + return f"{self.user.email} -> {self.get_reputation()}" class PaymentMethod(models.Model): """ Different payment methods of each academy have. """ - academy = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True, help_text='Academy owner') + + academy = models.ForeignKey(Academy, on_delete=models.CASCADE, blank=True, null=True, help_text="Academy owner") title = models.CharField(max_length=120, null=False, blank=False) is_credit_card = models.BooleanField(default=False, null=False, blank=False) - description = models.CharField(max_length=255, help_text='Description of the payment method') - third_party_link = models.URLField(blank=True, - null=True, - default=None, - help_text='Link of a third party payment method') - lang = models.CharField(max_length=5, - validators=[validate_language_code], - help_text='ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US') + description = models.CharField(max_length=255, help_text="Description of the payment method") + third_party_link = models.URLField( + blank=True, null=True, default=None, help_text="Link of a third party payment method" + ) + lang = models.CharField( + max_length=5, + validators=[validate_language_code], + help_text="ISO 639-1 language code + ISO 3166-1 alpha-2 country code, e.g. en-US", + ) diff --git a/breathecode/payments/receivers.py b/breathecode/payments/receivers.py index 82906a4d2..ac0cbda37 100644 --- a/breathecode/payments/receivers.py +++ b/breathecode/payments/receivers.py @@ -59,8 +59,9 @@ def lose_service_permissions_receiver(sender: Type[Consumable], instance: Consum if instance.how_many != 0: return - consumables = Consumable.objects.filter(Q(valid_until__lte=now) | Q(valid_until=None), - user=instance.user).exclude(how_many=0) + consumables = Consumable.objects.filter(Q(valid_until__lte=now) | Q(valid_until=None), user=instance.user).exclude( + how_many=0 + ) # for group in instance.user.groups.all(): for group in instance.service_item.service.groups.all(): @@ -81,13 +82,13 @@ def grant_service_permissions_receiver(sender: Type[Consumable], instance: Consu @receiver(mentorship_session_status, sender=MentorshipSession) def post_mentoring_session_ended(sender: Type[MentorshipSession], instance: MentorshipSession, **kwargs): - if instance.mentee and instance.service and instance.status in ['FAILED', 'IGNORED']: + if instance.mentee and instance.service and instance.status in ["FAILED", "IGNORED"]: tasks.refund_mentoring_session.delay(instance.id) @receiver(m2m_changed, sender=Plan.service_items.through) def plan_m2m_wrapper(sender: Type[Plan.service_items.through], instance: Plan, **kwargs): - if kwargs['action'] != 'post_add': + if kwargs["action"] != "post_add": return update_plan_m2m_service_items.send_robust(sender=sender, instance=instance) diff --git a/breathecode/payments/serializers.py b/breathecode/payments/serializers.py index 510762f01..bcc3b3529 100644 --- a/breathecode/payments/serializers.py +++ b/breathecode/payments/serializers.py @@ -108,8 +108,8 @@ class GetServiceItemWithFeaturesSerializer(GetServiceItemSerializer): def get_features(self, obj): query_args = [] - query_kwargs = {'service_item': obj} - obj.lang = obj.lang or 'en' + query_kwargs = {"service_item": obj} + obj.lang = obj.lang or "en" query_args.append(Q(lang=obj.lang) | Q(lang=obj.lang[:2]) | Q(lang__startswith=obj.lang[:2])) @@ -188,8 +188,8 @@ class GetPlanOfferSerializer(serpy.Serializer): def get_details(self, obj): query_args = [] - query_kwargs = {'offer': obj} - obj.lang = obj.lang or 'en' + query_kwargs = {"offer": obj} + obj.lang = obj.lang or "en" query_args.append(Q(lang=obj.lang) | Q(lang=obj.lang[:2]) | Q(lang__startswith=obj.lang[:2])) @@ -269,8 +269,8 @@ class Meta: exclude = () def validate(self, data): - if 'price_per_unit' not in data: - raise ValidationError('You must specify a price per unit') + if "price_per_unit" not in data: + raise ValidationError("You must specify a price per unit") return data @@ -287,7 +287,7 @@ class PUTAcademyServiceSerializer(serializers.ModelSerializer): class Meta: model = AcademyService - fields = '__all__' + fields = "__all__" def validate(self, data): @@ -371,8 +371,8 @@ class GetAbstractIOweYouSerializer(serpy.Serializer): selected_mentorship_service_set = GetMentorshipServiceSetSerializer(many=False, required=False) selected_event_type_set = GetEventTypeSetSerializer(many=False, required=False) - plans = serpy.ManyToManyField(GetPlanSmallSerializer(attr='plans', many=True)) - invoices = serpy.ManyToManyField(GetInvoiceSerializer(attr='invoices', many=True)) + plans = serpy.ManyToManyField(GetPlanSmallSerializer(attr="plans", many=True)) + invoices = serpy.ManyToManyField(GetInvoiceSerializer(attr="invoices", many=True)) next_payment_at = serpy.Field() valid_until = serpy.Field() @@ -434,29 +434,29 @@ class ServiceSerializer(serializers.Serializer): class Meta: model = Service - fields = '__all__' + fields = "__all__" def validate(self, attrs): return attrs class ServiceItemSerializer(serializers.Serializer): - status_fields = ['unit_type'] + status_fields = ["unit_type"] class Meta: model = ServiceItem - fields = '__all__' + fields = "__all__" def validate(self, attrs): return attrs class PlanSerializer(serializers.ModelSerializer): - status_fields = ['status', 'renew_every_unit', 'trial_duration_unit', 'time_of_life_unit'] + status_fields = ["status", "renew_every_unit", "trial_duration_unit", "time_of_life_unit"] class Meta: model = Plan - fields = '__all__' + fields = "__all__" def validate(self, attrs): return attrs @@ -473,11 +473,11 @@ def update(self, instance, validated_data): class PutPlanSerializer(PlanSerializer): - status_fields = ['status', 'renew_every_unit', 'trial_duration_unit', 'time_of_life_unit'] + status_fields = ["status", "renew_every_unit", "trial_duration_unit", "time_of_life_unit"] class Meta: model = Plan - fields = '__all__' + fields = "__all__" def validate(self, attrs): return attrs diff --git a/breathecode/payments/services/stripe.py b/breathecode/payments/services/stripe.py index 3017f06f9..de05eff02 100644 --- a/breathecode/payments/services/stripe.py +++ b/breathecode/payments/services/stripe.py @@ -13,7 +13,7 @@ logger = getLogger(__name__) -__all__ = ['Stripe'] +__all__ = ["Stripe"] class Stripe: @@ -23,8 +23,8 @@ class Stripe: language: str def __init__(self, api_key=None) -> None: - self.api_key = api_key or os.getenv('STRIPE_API_KEY') - self.language = 'en' + self.api_key = api_key or os.getenv("STRIPE_API_KEY") + self.language = "en" def set_language(self, lang: str) -> None: """Set the language for the error messages.""" @@ -41,12 +41,14 @@ def create_card_token(self, card_number: str, exp_month: int, exp_year: int, cvc stripe.api_key = self.api_key def callback(): - return stripe.Token.create(card={ - 'number': card_number, - 'exp_month': exp_month, - 'exp_year': exp_year, - 'cvc': cvc, - }) + return stripe.Token.create( + card={ + "number": card_number, + "exp_month": exp_month, + "exp_year": exp_year, + "cvc": cvc, + } + ) return self._i18n_validations(callback).id @@ -75,14 +77,14 @@ def add_contact(self, user: User): contact = PaymentContact(user=user) name = user.first_name - name += f' {user.last_name}' if name and user.last_name else f'{user.last_name}' + name += f" {user.last_name}" if name and user.last_name else f"{user.last_name}" def callback(): return stripe.Customer.create(email=user.email, name=name) response = self._i18n_validations(callback) - contact.stripe_id = response['id'] + contact.stripe_id = response["id"] contact.save() FinancialReputation.objects.get_or_create(user=user) @@ -100,39 +102,39 @@ def _i18n_validations(self, callback: callable, attempts=0): except stripe.error.CardError as e: logger.error(str(e)) - raise PaymentException(translation(self.language, - en='Card declined', - es='Tarjeta rechazada', - slug='card-error'), - slug='card-error', - silent=True) + raise PaymentException( + translation(self.language, en="Card declined", es="Tarjeta rechazada", slug="card-error"), + slug="card-error", + silent=True, + ) except stripe.error.RateLimitError as e: logger.error(str(e)) - raise PaymentException(translation(self.language, - en='Too many requests', - es='Demasiadas solicitudes', - slug='rate-limit-error'), - slug='rate-limit-error', - silent=True) + raise PaymentException( + translation( + self.language, en="Too many requests", es="Demasiadas solicitudes", slug="rate-limit-error" + ), + slug="rate-limit-error", + silent=True, + ) except stripe.error.InvalidRequestError as e: logger.error(str(e)) - raise PaymentException(translation(self.language, - en='Invalid request', - es='Solicitud invalida', - slug='invalid-request'), - slug='invalid-request', - silent=True) + raise PaymentException( + translation(self.language, en="Invalid request", es="Solicitud invalida", slug="invalid-request"), + slug="invalid-request", + silent=True, + ) except stripe.error.AuthenticationError as e: logger.error(str(e)) - raise PaymentException(translation(self.language, - en='Authentication error', - es='Error de autenticación', - slug='authentication-error'), - slug='authentication-error', - silent=True) + raise PaymentException( + translation( + self.language, en="Authentication error", es="Error de autenticación", slug="authentication-error" + ), + slug="authentication-error", + silent=True, + ) except stripe.error.APIConnectionError as e: attempts += 1 @@ -141,42 +143,52 @@ def _i18n_validations(self, callback: callable, attempts=0): logger.error(str(e)) - raise PaymentException(translation(self.language, - en='Payment service are down, try again later', - es='El servicio de pago está caído, inténtalo de nuevo más tarde', - slug='payment-service-are-down'), - slug='payment-service-are-down', - silent=True) + raise PaymentException( + translation( + self.language, + en="Payment service are down, try again later", + es="El servicio de pago está caído, inténtalo de nuevo más tarde", + slug="payment-service-are-down", + ), + slug="payment-service-are-down", + silent=True, + ) except stripe.error.StripeError as e: logger.error(str(e)) - raise PaymentException(translation( - self.language, - en='We have problems with the payment provider, try again later', - es='Tenemos problemas con el proveedor de pago, inténtalo de nuevo más tarde', - slug='stripe-error'), - slug='stripe-error', - silent=True) + raise PaymentException( + translation( + self.language, + en="We have problems with the payment provider, try again later", + es="Tenemos problemas con el proveedor de pago, inténtalo de nuevo más tarde", + slug="stripe-error", + ), + slug="stripe-error", + silent=True, + ) except Exception as e: # Something else happened, completely unrelated to Stripe logger.error(str(e)) - raise PaymentException(translation( - self.language, - en='A unexpected error occur during the payment process, please contact support', - es='Ocurrió un error inesperado durante el proceso de pago, comuníquese con soporte', - slug='unexpected-exception'), - slug='unexpected-exception', - silent=True) + raise PaymentException( + translation( + self.language, + en="A unexpected error occur during the payment process, please contact support", + es="Ocurrió un error inesperado durante el proceso de pago, comuníquese con soporte", + slug="unexpected-exception", + ), + slug="unexpected-exception", + silent=True, + ) def pay( self, user: User, bag: Bag, amount: int, - currency: str | Currency = 'usd', - description: str = '', + currency: str | Currency = "usd", + description: str = "", ) -> Invoice: """Pay for a given bag.""" @@ -188,9 +200,9 @@ def pay( raise ValidationException( translation( self.language, - en='Cannot determine the currency during process of payment', - es='No se puede determinar la moneda durante el proceso de pago', - slug='currency', + en="Cannot determine the currency during process of payment", + es="No se puede determinar la moneda durante el proceso de pago", + slug="currency", ), code=500, ) @@ -208,16 +220,18 @@ def pay( invoice_amount = math.ceil(amount) def callback(): - return stripe.Charge.create(customer=customer.stripe_id, - amount=math.ceil(stripe_amount), - currency=currency.code.lower(), - description=description) + return stripe.Charge.create( + customer=customer.stripe_id, + amount=math.ceil(stripe_amount), + currency=currency.code.lower(), + description=description, + ) charge = self._i18n_validations(callback) utc_now = timezone.now() - invoice = Invoice(user=user, amount=invoice_amount, stripe_id=charge['id'], paid_at=utc_now) - invoice.status = 'FULFILLED' + invoice = Invoice(user=user, amount=invoice_amount, stripe_id=charge["id"], paid_at=utc_now) + invoice.status = "FULFILLED" invoice.currency = currency invoice.bag = bag invoice.academy = bag.academy @@ -238,9 +252,9 @@ def callback(): refund = self._i18n_validations(callback) - invoice.refund_stripe_id = refund['id'] + invoice.refund_stripe_id = refund["id"] invoice.refunded_at = timezone.now() - invoice.status = 'REFUNDED' + invoice.status = "REFUNDED" invoice.save() return invoice @@ -251,13 +265,15 @@ def create_payment_link(self, price_id: str, quantity: int) -> tuple[str, str]: stripe.api_key = self.api_key def callback(): - return stripe.PaymentLink.create(line_items=[ - { - 'price': price_id, - 'quantity': quantity, - }, - ], ) + return stripe.PaymentLink.create( + line_items=[ + { + "price": price_id, + "quantity": quantity, + }, + ], + ) refund = self._i18n_validations(callback) - return refund['id'], refund['url'] + return refund["id"], refund["url"] diff --git a/breathecode/payments/signals.py b/breathecode/payments/signals.py index b93e27c60..514aee6fd 100644 --- a/breathecode/payments/signals.py +++ b/breathecode/payments/signals.py @@ -2,22 +2,22 @@ from task_manager.django.dispatch import Emisor -emisor = Emisor('breathecode.payments') +emisor = Emisor("breathecode.payments") # consume a service -consume_service = emisor.signal('consume_service') +consume_service = emisor.signal("consume_service") # refund the units in case of error -reimburse_service_units = emisor.signal('reimburse_service_units') +reimburse_service_units = emisor.signal("reimburse_service_units") # manage of permissions for the service -lose_service_permissions = emisor.signal('lose_service_permissions') -grant_service_permissions = emisor.signal('grant_service_permissions') -revoke_service_permissions = emisor.signal('revoke_service_permissions') +lose_service_permissions = emisor.signal("lose_service_permissions") +grant_service_permissions = emisor.signal("grant_service_permissions") +revoke_service_permissions = emisor.signal("revoke_service_permissions") # proxy to m2m_changed in Event.service_items -update_plan_m2m_service_items = emisor.signal('update_plan_m2m_service_items') +update_plan_m2m_service_items = emisor.signal("update_plan_m2m_service_items") # Plan adquired -planfinancing_created = emisor.signal('planfinancing_created') -subscription_created = emisor.signal('subscription_created') +planfinancing_created = emisor.signal("planfinancing_created") +subscription_created = emisor.signal("subscription_created") diff --git a/breathecode/payments/supervisors.py b/breathecode/payments/supervisors.py index 7f4c743f1..a5cf47150 100644 --- a/breathecode/payments/supervisors.py +++ b/breathecode/payments/supervisors.py @@ -14,42 +14,55 @@ def supervise_all_consumption_sessions(): utc_now = timezone.now() - done_sessions = ConsumptionSession.objects.filter(status='DONE', - eta__lte=utc_now, - eta__gte=utc_now - timedelta(days=1)) - pending_sessions = ConsumptionSession.objects.filter(status='PENDING', - eta__lte=utc_now, - eta__gte=utc_now - timedelta(days=1)) + done_sessions = ConsumptionSession.objects.filter( + status="DONE", eta__lte=utc_now, eta__gte=utc_now - timedelta(days=1) + ) + pending_sessions = ConsumptionSession.objects.filter( + status="PENDING", eta__lte=utc_now, eta__gte=utc_now - timedelta(days=1) + ) done_amount = done_sessions.count() pending_amount = pending_sessions.count() - if pending_amount and done_amount and (rate := - pending_amount / done_amount) >= 0.3 and done_amount > MIN_PENDING_SESSIONS: - yield f'There has so much pending consumption sessions, {pending_amount} pending and rate {round(rate * 100, 2)}%' + if ( + pending_amount + and done_amount + and (rate := pending_amount / done_amount) >= 0.3 + and done_amount > MIN_PENDING_SESSIONS + ): + yield f"There has so much pending consumption sessions, {pending_amount} pending and rate {round(rate * 100, 2)}%" - users = User.objects.filter(consumptionsession__status='CANCELLED', - consumptionsession__eta__lte=utc_now, - consumptionsession__eta__gte=utc_now - timedelta(days=1)).distinct() + users = User.objects.filter( + consumptionsession__status="CANCELLED", + consumptionsession__eta__lte=utc_now, + consumptionsession__eta__gte=utc_now - timedelta(days=1), + ).distinct() for user in users: - done_sessions = ConsumptionSession.objects.filter(user=user, - status='DONE', - operation_code='unsafe-consume-service-set', - consumable__service_item__service__type=Service.Type.VOID, - eta__lte=utc_now - timedelta(minutes=10)) + done_sessions = ConsumptionSession.objects.filter( + user=user, + status="DONE", + operation_code="unsafe-consume-service-set", + consumable__service_item__service__type=Service.Type.VOID, + eta__lte=utc_now - timedelta(minutes=10), + ) cancelled_sessions = ConsumptionSession.objects.filter( user=user, - status='CANCELLED', - operation_code='unsafe-consume-service-set', + status="CANCELLED", + operation_code="unsafe-consume-service-set", consumable__service_item__service__type=Service.Type.VOID, eta__lte=utc_now, - eta__gte=utc_now - timedelta(days=1)) + eta__gte=utc_now - timedelta(days=1), + ) done_amount = done_sessions.count() cancelled_amount = cancelled_sessions.count() # this client should be a cheater - if cancelled_amount and done_amount and (rate := cancelled_amount / - done_amount) > 0.1 and done_amount >= MIN_CANCELLED_SESSIONS: - yield f'There has {round(rate * 100, 2)}% cancelled consumption sessions, due to a bug or a cheater, user {user.email}' + if ( + cancelled_amount + and done_amount + and (rate := cancelled_amount / done_amount) > 0.1 + and done_amount >= MIN_CANCELLED_SESSIONS + ): + yield f"There has {round(rate * 100, 2)}% cancelled consumption sessions, due to a bug or a cheater, user {user.email}" diff --git a/breathecode/payments/tasks.py b/breathecode/payments/tasks.py index 16264feb5..417feefa9 100644 --- a/breathecode/payments/tasks.py +++ b/breathecode/payments/tasks.py @@ -38,7 +38,7 @@ ) logger = logging.getLogger(__name__) -IS_DJANGO_REDIS = hasattr(cache, 'delete_pattern') +IS_DJANGO_REDIS = hasattr(cache, "delete_pattern") @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) @@ -49,55 +49,78 @@ def get_resource_lookup(i_owe_you: AbstractIOweYou, service: Service): lookups = {} key = service.type.lower() - value = getattr(i_owe_you, f'selected_{key}', None) + value = getattr(i_owe_you, f"selected_{key}", None) if value: lookups[key] = value return lookups - logger.info(f'Starting renew_consumables for service stock scheduler {scheduler_id}') + logger.info(f"Starting renew_consumables for service stock scheduler {scheduler_id}") if not (scheduler := ServiceStockScheduler.objects.filter(id=scheduler_id).first()): - raise RetryTask(f'ServiceStockScheduler with id {scheduler_id} not found') + raise RetryTask(f"ServiceStockScheduler with id {scheduler_id} not found") utc_now = timezone.now() # is over - if (scheduler.plan_handler and scheduler.plan_handler.subscription - and scheduler.plan_handler.subscription.valid_until - and scheduler.plan_handler.subscription.valid_until < utc_now): - raise AbortTask(f'The subscription {scheduler.plan_handler.subscription.id} is over') + if ( + scheduler.plan_handler + and scheduler.plan_handler.subscription + and scheduler.plan_handler.subscription.valid_until + and scheduler.plan_handler.subscription.valid_until < utc_now + ): + raise AbortTask(f"The subscription {scheduler.plan_handler.subscription.id} is over") # it needs to be paid - if (scheduler.plan_handler and scheduler.plan_handler.subscription - and scheduler.plan_handler.subscription.next_payment_at < utc_now): - raise AbortTask(f'The subscription {scheduler.plan_handler.subscription.id} needs to be paid to renew the ' - 'consumables') + if ( + scheduler.plan_handler + and scheduler.plan_handler.subscription + and scheduler.plan_handler.subscription.next_payment_at < utc_now + ): + raise AbortTask( + f"The subscription {scheduler.plan_handler.subscription.id} needs to be paid to renew the " "consumables" + ) # is over - if (scheduler.plan_handler and scheduler.plan_handler.plan_financing - and scheduler.plan_handler.plan_financing.plan_expires_at < utc_now): - raise AbortTask(f'The plan financing {scheduler.plan_handler.plan_financing.id} is over') + if ( + scheduler.plan_handler + and scheduler.plan_handler.plan_financing + and scheduler.plan_handler.plan_financing.plan_expires_at < utc_now + ): + raise AbortTask(f"The plan financing {scheduler.plan_handler.plan_financing.id} is over") # it needs to be paid - if (scheduler.plan_handler and scheduler.plan_handler.plan_financing - and scheduler.plan_handler.plan_financing.next_payment_at < utc_now): - raise AbortTask(f'The plan financing {scheduler.plan_handler.plan_financing.id} needs to be paid to renew ' - 'the consumables') + if ( + scheduler.plan_handler + and scheduler.plan_handler.plan_financing + and scheduler.plan_handler.plan_financing.next_payment_at < utc_now + ): + raise AbortTask( + f"The plan financing {scheduler.plan_handler.plan_financing.id} needs to be paid to renew " + "the consumables" + ) # is over - if (scheduler.subscription_handler and scheduler.subscription_handler.subscription - and scheduler.subscription_handler.subscription.valid_until < utc_now): - raise AbortTask(f'The subscription {scheduler.subscription_handler.subscription.id} is over') + if ( + scheduler.subscription_handler + and scheduler.subscription_handler.subscription + and scheduler.subscription_handler.subscription.valid_until < utc_now + ): + raise AbortTask(f"The subscription {scheduler.subscription_handler.subscription.id} is over") # it needs to be paid - if (scheduler.subscription_handler and scheduler.subscription_handler.subscription - and scheduler.subscription_handler.subscription.next_payment_at < utc_now): - raise AbortTask(f'The subscription {scheduler.subscription_handler.subscription.id} needs to be paid to renew ' - 'the consumables') - - if (scheduler.valid_until and scheduler.valid_until - timedelta(days=1) < utc_now): - logger.info(f'The scheduler {scheduler.id} don\'t needs to be renewed') + if ( + scheduler.subscription_handler + and scheduler.subscription_handler.subscription + and scheduler.subscription_handler.subscription.next_payment_at < utc_now + ): + raise AbortTask( + f"The subscription {scheduler.subscription_handler.subscription.id} needs to be paid to renew " + "the consumables" + ) + + if scheduler.valid_until and scheduler.valid_until - timedelta(days=1) < utc_now: + logger.info(f"The scheduler {scheduler.id} don't needs to be renewed") return service_item = None @@ -138,16 +161,17 @@ def get_resource_lookup(i_owe_you: AbstractIOweYou, service: Service): scheduler.save() if not selected_lookup: - logger.error('The Plan not have a resource linked to it ' - f'for the ServiceStockScheduler {scheduler.id}') + logger.error("The Plan not have a resource linked to it " f"for the ServiceStockScheduler {scheduler.id}") return - consumable = Consumable(service_item=service_item, - user=user, - unit_type=service_item.unit_type, - how_many=service_item.how_many, - valid_until=scheduler.valid_until, - **selected_lookup) + consumable = Consumable( + service_item=service_item, + user=user, + unit_type=service_item.unit_type, + how_many=service_item.how_many, + valid_until=scheduler.valid_until, + **selected_lookup, + ) consumable.save() @@ -155,27 +179,27 @@ def get_resource_lookup(i_owe_you: AbstractIOweYou, service: Service): key = list(selected_lookup.keys())[0] id = selected_lookup[key].id - name = key.replace('selected_', '').replace('_', ' ') + name = key.replace("selected_", "").replace("_", " ") - logger.info(f'The consumable {consumable.id} for {name} {id} was built') - logger.info(f'The scheduler {scheduler.id} was renewed') + logger.info(f"The consumable {consumable.id} for {name} {id} was built") + logger.info(f"The scheduler {scheduler.id} was renewed") @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) def renew_subscription_consumables(self, subscription_id: int, **_: Any): """Renew consumables belongs to a subscription.""" - logger.info(f'Starting renew_subscription_consumables for id {subscription_id}') + logger.info(f"Starting renew_subscription_consumables for id {subscription_id}") if not (subscription := Subscription.objects.filter(id=subscription_id).first()): - raise RetryTask(f'Subscription with id {subscription_id} not found') + raise RetryTask(f"Subscription with id {subscription_id} not found") utc_now = timezone.now() if subscription.valid_until and subscription.valid_until < utc_now: - raise AbortTask(f'The subscription {subscription.id} is over') + raise AbortTask(f"The subscription {subscription.id} is over") if subscription.next_payment_at < utc_now: - raise AbortTask(f'The subscription {subscription.id} needs to be paid to renew the consumables') + raise AbortTask(f"The subscription {subscription.id} needs to be paid to renew the consumables") for scheduler in ServiceStockScheduler.objects.filter(subscription_handler__subscription=subscription): renew_consumables.delay(scheduler.id) @@ -188,17 +212,17 @@ def renew_subscription_consumables(self, subscription_id: int, **_: Any): def renew_plan_financing_consumables(self, plan_financing_id: int, **_: Any): """Renew consumables belongs to a plan financing.""" - logger.info(f'Starting renew_plan_financing_consumables for id {plan_financing_id}') + logger.info(f"Starting renew_plan_financing_consumables for id {plan_financing_id}") if not (plan_financing := PlanFinancing.objects.filter(id=plan_financing_id).first()): - raise RetryTask(f'PlanFinancing with id {plan_financing_id} not found') + raise RetryTask(f"PlanFinancing with id {plan_financing_id} not found") utc_now = timezone.now() if plan_financing.next_payment_at < utc_now: - raise AbortTask(f'The PlanFinancing {plan_financing.id} needs to be paid to renew the consumables') + raise AbortTask(f"The PlanFinancing {plan_financing.id} needs to be paid to renew the consumables") if plan_financing.plan_expires_at and plan_financing.plan_expires_at < utc_now: - logger.info(f'The services related to PlanFinancing {plan_financing.id} is over') + logger.info(f"The services related to PlanFinancing {plan_financing.id} is over") return for scheduler in ServiceStockScheduler.objects.filter(plan_handler__plan_financing=plan_financing): @@ -212,14 +236,14 @@ def fallback_charge_subscription(self, subscription_id: int, exception: Exceptio settings = get_user_settings(subscription.user.id) utc_now = timezone.now() - message = f'charge_subscription is failing for the subscription {subscription.id}: ' - message += str(exception)[:250 - len(message)] + message = f"charge_subscription is failing for the subscription {subscription.id}: " + message += str(exception)[: 250 - len(message)] - subscription.status = 'ERROR' + subscription.status = "ERROR" subscription.status_message = message subscription.save() - invoice = subscription.invoices.filter(paid_at__gte=utc_now - timedelta(days=1)).order_by('-id').first() + invoice = subscription.invoices.filter(paid_at__gte=utc_now - timedelta(days=1)).order_by("-id").first() if invoice: s = Stripe() @@ -227,41 +251,40 @@ def fallback_charge_subscription(self, subscription_id: int, exception: Exceptio s.refund_payment(invoice) -@task(bind=True, - transaction=True, - fallback=fallback_charge_subscription, - priority=TaskPriority.WEB_SERVICE_PAYMENT.value) +@task( + bind=True, transaction=True, fallback=fallback_charge_subscription, priority=TaskPriority.WEB_SERVICE_PAYMENT.value +) def charge_subscription(self, subscription_id: int, **_: Any): """Renews a subscription.""" - logger.info(f'Starting charge_subscription for subscription {subscription_id}') + logger.info(f"Starting charge_subscription for subscription {subscription_id}") client = None if IS_DJANGO_REDIS: - client = get_redis_connection('default') + client = get_redis_connection("default") try: - with Lock(client, f'lock:subscription:{subscription_id}', timeout=30, blocking_timeout=30): + with Lock(client, f"lock:subscription:{subscription_id}", timeout=30, blocking_timeout=30): if not (subscription := Subscription.objects.filter(id=subscription_id).first()): - raise AbortTask(f'Subscription with id {subscription_id} not found') + raise AbortTask(f"Subscription with id {subscription_id} not found") utc_now = timezone.now() if subscription.valid_until and subscription.valid_until < utc_now: - raise AbortTask(f'The subscription {subscription.id} is over') + raise AbortTask(f"The subscription {subscription.id} is over") if subscription.next_payment_at > utc_now: - raise AbortTask(f'The subscription with id {subscription_id} was paid this month') + raise AbortTask(f"The subscription with id {subscription_id} was paid this month") settings = get_user_settings(subscription.user.id) try: bag = actions.get_bag_from_subscription(subscription, settings) except Exception as e: - subscription.status = 'ERROR' + subscription.status = "ERROR" subscription.status_message = str(e) subscription.save() - raise AbortTask(f'Error getting bag from subscription {subscription_id}: {e}') + raise AbortTask(f"Error getting bag from subscription {subscription_id}: {e}") amount = actions.get_amount_by_chosen_period(bag, bag.chosen_period, settings.lang) @@ -271,30 +294,35 @@ def charge_subscription(self, subscription_id: int, **_: Any): invoice = s.pay(subscription.user, bag, amount, currency=bag.currency) except Exception: - subject = translation(settings.lang, - en='Your 4Geeks subscription could not be renewed', - es='Tu suscripción 4Geeks no pudo ser renovada') - - message = translation(settings.lang, - en='Please update your payment methods', - es='Por favor actualiza tus métodos de pago') - - button = translation(settings.lang, - en='Please update your payment methods', - es='Por favor actualiza tus métodos de pago') - - notify_actions.send_email_message('message', - subscription.user.email, { - 'SUBJECT': subject, - 'MESSAGE': message, - 'BUTTON': button, - 'LINK': f'{get_app_url()}/subscription/{subscription.id}', - }, - academy=subscription.academy) + subject = translation( + settings.lang, + en="Your 4Geeks subscription could not be renewed", + es="Tu suscripción 4Geeks no pudo ser renovada", + ) + + message = translation( + settings.lang, en="Please update your payment methods", es="Por favor actualiza tus métodos de pago" + ) + + button = translation( + settings.lang, en="Please update your payment methods", es="Por favor actualiza tus métodos de pago" + ) + + notify_actions.send_email_message( + "message", + subscription.user.email, + { + "SUBJECT": subject, + "MESSAGE": message, + "BUTTON": button, + "LINK": f"{get_app_url()}/subscription/{subscription.id}", + }, + academy=subscription.academy, + ) bag.delete() - subscription.status = 'PAYMENT_ISSUE' + subscription.status = "PAYMENT_ISSUE" subscription.save() return @@ -312,27 +340,32 @@ def charge_subscription(self, subscription_id: int, **_: Any): subscription.save() value = invoice.currency.format_price(invoice.amount) - subject = translation(settings.lang, - en='Your 4Geeks subscription was successfully renewed', - es='Tu suscripción 4Geeks fue renovada exitosamente') + subject = translation( + settings.lang, + en="Your 4Geeks subscription was successfully renewed", + es="Tu suscripción 4Geeks fue renovada exitosamente", + ) - message = translation(settings.lang, en=f'The amount was {value}', es=f'El monto fue {value}') + message = translation(settings.lang, en=f"The amount was {value}", es=f"El monto fue {value}") - button = translation(settings.lang, en='See the invoice', es='Ver la factura') + button = translation(settings.lang, en="See the invoice", es="Ver la factura") - notify_actions.send_email_message('message', - invoice.user.email, { - 'SUBJECT': subject, - 'MESSAGE': message, - 'BUTTON': button, - 'LINK': f'{get_app_url()}/subscription/{subscription.id}', - }, - academy=subscription.academy) + notify_actions.send_email_message( + "message", + invoice.user.email, + { + "SUBJECT": subject, + "MESSAGE": message, + "BUTTON": button, + "LINK": f"{get_app_url()}/subscription/{subscription.id}", + }, + academy=subscription.academy, + ) renew_subscription_consumables.delay(subscription.id) except LockError: - raise RetryTask('Could not acquire lock for activity, operation timed out.') + raise RetryTask("Could not acquire lock for activity, operation timed out.") def fallback_charge_plan_financing(self, plan_financing_id: int, exception: Exception, **_: Any): @@ -342,14 +375,14 @@ def fallback_charge_plan_financing(self, plan_financing_id: int, exception: Exce settings = get_user_settings(plan_financing.user.id) utc_now = timezone.now() - message = f'charge_plan_financing is failing for the plan financing {plan_financing.id}: ' - message += str(exception)[:250 - len(message)] + message = f"charge_plan_financing is failing for the plan financing {plan_financing.id}: " + message += str(exception)[: 250 - len(message)] - plan_financing.status = 'ERROR' + plan_financing.status = "ERROR" plan_financing.status_message = message plan_financing.save() - invoice = plan_financing.invoices.filter(paid_at__gte=utc_now - timedelta(days=1)).order_by('-id').first() + invoice = plan_financing.invoices.filter(paid_at__gte=utc_now - timedelta(days=1)).order_by("-id").first() if invoice: s = Stripe() @@ -357,54 +390,56 @@ def fallback_charge_plan_financing(self, plan_financing_id: int, exception: Exce s.refund_payment(invoice) -@task(bind=True, - transaction=True, - fallback=fallback_charge_plan_financing, - priority=TaskPriority.WEB_SERVICE_PAYMENT.value) +@task( + bind=True, + transaction=True, + fallback=fallback_charge_plan_financing, + priority=TaskPriority.WEB_SERVICE_PAYMENT.value, +) def charge_plan_financing(self, plan_financing_id: int, **_: Any): """Renew a plan financing.""" - logger.info(f'Starting charge_plan_financing for id {plan_financing_id}') + logger.info(f"Starting charge_plan_financing for id {plan_financing_id}") client = None if IS_DJANGO_REDIS: - client = get_redis_connection('default') + client = get_redis_connection("default") try: - with Lock(client, f'lock:plan_financing:{plan_financing_id}', timeout=30, blocking_timeout=30): + with Lock(client, f"lock:plan_financing:{plan_financing_id}", timeout=30, blocking_timeout=30): if not (plan_financing := PlanFinancing.objects.filter(id=plan_financing_id).first()): - raise AbortTask(f'PlanFinancing with id {plan_financing_id} not found') + raise AbortTask(f"PlanFinancing with id {plan_financing_id} not found") utc_now = timezone.now() if plan_financing.plan_expires_at < utc_now: - raise AbortTask(f'PlanFinancing with id {plan_financing_id} is over') + raise AbortTask(f"PlanFinancing with id {plan_financing_id} is over") if plan_financing.next_payment_at > utc_now: - raise AbortTask(f'PlanFinancing with id {plan_financing_id} was paid this month') + raise AbortTask(f"PlanFinancing with id {plan_financing_id} was paid this month") settings = get_user_settings(plan_financing.user.id) try: bag = actions.get_bag_from_plan_financing(plan_financing, settings) except Exception as e: - plan_financing.status = 'ERROR' + plan_financing.status = "ERROR" plan_financing.status_message = str(e) plan_financing.save() - raise AbortTask(f'Error getting bag from plan financing {plan_financing_id}: {e}') + raise AbortTask(f"Error getting bag from plan financing {plan_financing_id}: {e}") amount = plan_financing.monthly_price - invoices = plan_financing.invoices.order_by('created_at') + invoices = plan_financing.invoices.order_by("created_at") first_invoice = invoices.first() last_invoice = invoices.last() installments = first_invoice.bag.how_many_installments if utc_now - last_invoice.created_at < timedelta(days=5): - raise AbortTask(f'PlanFinancing with id {plan_financing_id} was paid earlier') + raise AbortTask(f"PlanFinancing with id {plan_financing_id} was paid earlier") remaining_installments = installments - invoices.count() @@ -416,30 +451,39 @@ def charge_plan_financing(self, plan_financing_id: int, **_: Any): invoice = s.pay(plan_financing.user, bag, amount, currency=bag.currency) except Exception: - subject = translation(settings.lang, - en='Your 4Geeks subscription could not be renewed', - es='Tu suscripción 4Geeks no pudo ser renovada') - - message = translation(settings.lang, - en='Please update your payment methods', - es='Por favor actualiza tus métodos de pago') - - button = translation(settings.lang, - en='Please update your payment methods', - es='Por favor actualiza tus métodos de pago') - - notify_actions.send_email_message('message', - plan_financing.user.email, { - 'SUBJECT': subject, - 'MESSAGE': message, - 'BUTTON': button, - 'LINK': f'{get_app_url()}/plan-financing/{plan_financing.id}', - }, - academy=plan_financing.academy) + subject = translation( + settings.lang, + en="Your 4Geeks subscription could not be renewed", + es="Tu suscripción 4Geeks no pudo ser renovada", + ) + + message = translation( + settings.lang, + en="Please update your payment methods", + es="Por favor actualiza tus métodos de pago", + ) + + button = translation( + settings.lang, + en="Please update your payment methods", + es="Por favor actualiza tus métodos de pago", + ) + + notify_actions.send_email_message( + "message", + plan_financing.user.email, + { + "SUBJECT": subject, + "MESSAGE": message, + "BUTTON": button, + "LINK": f"{get_app_url()}/plan-financing/{plan_financing.id}", + }, + academy=plan_financing.academy, + ) bag.delete() - plan_financing.status = 'PAYMENT_ISSUE' + plan_financing.status = "PAYMENT_ISSUE" plan_financing.save() return @@ -451,22 +495,27 @@ def charge_plan_financing(self, plan_financing_id: int, **_: Any): value = invoice.currency.format_price(invoice.amount) - subject = translation(settings.lang, - en='Your installment at 4Geeks was successfully charged', - es='Tu cuota en 4Geeks fue cobrada exitosamente') + subject = translation( + settings.lang, + en="Your installment at 4Geeks was successfully charged", + es="Tu cuota en 4Geeks fue cobrada exitosamente", + ) - message = translation(settings.lang, en=f'The amount was {value}', es=f'El monto fue {value}') + message = translation(settings.lang, en=f"The amount was {value}", es=f"El monto fue {value}") - button = translation(settings.lang, en='See the invoice', es='Ver la factura') + button = translation(settings.lang, en="See the invoice", es="Ver la factura") - notify_actions.send_email_message('message', - invoice.user.email, { - 'SUBJECT': subject, - 'MESSAGE': message, - 'BUTTON': button, - 'LINK': f'{get_app_url()}/plan-financing/{plan_financing.id}', - }, - academy=plan_financing.academy) + notify_actions.send_email_message( + "message", + invoice.user.email, + { + "SUBJECT": subject, + "MESSAGE": message, + "BUTTON": button, + "LINK": f"{get_app_url()}/plan-financing/{plan_financing.id}", + }, + academy=plan_financing.academy, + ) delta = relativedelta(months=1) @@ -479,45 +528,41 @@ def charge_plan_financing(self, plan_financing_id: int, **_: Any): renew_plan_financing_consumables.delay(plan_financing.id) except LockError: - raise RetryTask('Could not acquire lock for activity, operation timed out.') + raise RetryTask("Could not acquire lock for activity, operation timed out.") @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) -def build_service_stock_scheduler_from_subscription(self, - subscription_id: int, - user_id: Optional[int] = None, - update_mode: Optional[bool] = False, - **_: Any): +def build_service_stock_scheduler_from_subscription( + self, subscription_id: int, user_id: Optional[int] = None, update_mode: Optional[bool] = False, **_: Any +): """Build service stock scheduler for a subscription.""" - logger.info(f'Starting build_service_stock_scheduler_from_subscription for subscription {subscription_id}') + logger.info(f"Starting build_service_stock_scheduler_from_subscription for subscription {subscription_id}") k = { - 'subscription': 'user__id', + "subscription": "user__id", # service items of - 'handlers': { - 'of_subscription': 'subscription_handler__subscription__user__id', - 'of_plan': 'plan_handler__subscription__user__id', + "handlers": { + "of_subscription": "subscription_handler__subscription__user__id", + "of_plan": "plan_handler__subscription__user__id", }, } additional_args = { - 'subscription': { - k['subscription']: user_id - } if user_id else {}, + "subscription": {k["subscription"]: user_id} if user_id else {}, # service items of - 'handlers': { - 'of_subscription': { - k['handlers']['of_subscription']: user_id, + "handlers": { + "of_subscription": { + k["handlers"]["of_subscription"]: user_id, }, - 'of_plan': { - k['handlers']['of_plan']: user_id, + "of_plan": { + k["handlers"]["of_plan"]: user_id, }, }, } - if not (subscription := Subscription.objects.filter(id=subscription_id, **additional_args['subscription']).first()): - raise RetryTask(f'Subscription with id {subscription_id} not found') + if not (subscription := Subscription.objects.filter(id=subscription_id, **additional_args["subscription"]).first()): + raise RetryTask(f"Subscription with id {subscription_id} not found") utc_now = timezone.now() @@ -557,38 +602,38 @@ def build_service_stock_scheduler_from_subscription(self, @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) -def build_service_stock_scheduler_from_plan_financing(self, - plan_financing_id: int, - user_id: Optional[int] = None, - **_: Any): +def build_service_stock_scheduler_from_plan_financing( + self, plan_financing_id: int, user_id: Optional[int] = None, **_: Any +): """Build service stock scheduler for a plan financing.""" - logger.info(f'Starting build_service_stock_scheduler_from_plan_financing for subscription {plan_financing_id}') + logger.info(f"Starting build_service_stock_scheduler_from_plan_financing for subscription {plan_financing_id}") k = { - 'plan_financing': 'user__id', + "plan_financing": "user__id", # service items of - 'handlers': { - 'of_subscription': 'subscription_handler__subscription__user__id', - 'of_plan': 'plan_handler__subscription__user__id', + "handlers": { + "of_subscription": "subscription_handler__subscription__user__id", + "of_plan": "plan_handler__subscription__user__id", }, } additional_args = { - 'plan_financing': { - k['plan_financing']: user_id - } if user_id else {}, + "plan_financing": {k["plan_financing"]: user_id} if user_id else {}, # service items of - 'handlers': { - 'of_plan': { - k['handlers']['of_plan']: user_id, + "handlers": { + "of_plan": { + k["handlers"]["of_plan"]: user_id, }, }, } - if not (plan_financing := PlanFinancing.objects.filter(id=plan_financing_id, ** - additional_args['plan_financing']).first()): - raise RetryTask(f'PlanFinancing with id {plan_financing_id} not found') + if not ( + plan_financing := PlanFinancing.objects.filter( + id=plan_financing_id, **additional_args["plan_financing"] + ).first() + ): + raise RetryTask(f"PlanFinancing with id {plan_financing_id} not found") for plan in plan_financing.plans.all(): for handler in PlanServiceItem.objects.filter(plan=plan): @@ -614,29 +659,31 @@ def build_service_stock_scheduler_from_plan_financing(self, @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) -def build_subscription(self, - bag_id: int, - invoice_id: int, - start_date: Optional[datetime] = None, - conversion_info: Optional[str] = '', - **_: Any): - logger.info(f'Starting build_subscription for bag {bag_id}') - - if not (bag := Bag.objects.filter(id=bag_id, status='PAID', was_delivered=False).first()): - raise RetryTask(f'Bag with id {bag_id} not found') - - if not (invoice := Invoice.objects.filter(id=invoice_id, status='FULFILLED').first()): - raise RetryTask(f'Invoice with id {invoice_id} not found') +def build_subscription( + self, + bag_id: int, + invoice_id: int, + start_date: Optional[datetime] = None, + conversion_info: Optional[str] = "", + **_: Any, +): + logger.info(f"Starting build_subscription for bag {bag_id}") + + if not (bag := Bag.objects.filter(id=bag_id, status="PAID", was_delivered=False).first()): + raise RetryTask(f"Bag with id {bag_id} not found") + + if not (invoice := Invoice.objects.filter(id=invoice_id, status="FULFILLED").first()): + raise RetryTask(f"Invoice with id {invoice_id} not found") months = 1 - if bag.chosen_period == 'QUARTER': + if bag.chosen_period == "QUARTER": months = 3 - elif bag.chosen_period == 'HALF': + elif bag.chosen_period == "HALF": months = 6 - elif bag.chosen_period == 'YEAR': + elif bag.chosen_period == "YEAR": months = 12 plan = bag.plans.first() @@ -653,17 +700,19 @@ def build_subscription(self, subscription_start_at = start_date or invoice.paid_at - parsed_conversion_info = ast.literal_eval(conversion_info) if conversion_info != '' else None - subscription = Subscription.objects.create(user=bag.user, - paid_at=invoice.paid_at, - academy=bag.academy, - selected_cohort_set=cohort_set, - selected_event_type_set=event_type_set, - selected_mentorship_service_set=mentorship_service_set, - valid_until=None, - next_payment_at=subscription_start_at + relativedelta(months=months), - status='ACTIVE', - conversion_info=parsed_conversion_info) + parsed_conversion_info = ast.literal_eval(conversion_info) if conversion_info != "" else None + subscription = Subscription.objects.create( + user=bag.user, + paid_at=invoice.paid_at, + academy=bag.academy, + selected_cohort_set=cohort_set, + selected_event_type_set=event_type_set, + selected_mentorship_service_set=mentorship_service_set, + valid_until=None, + next_payment_at=subscription_start_at + relativedelta(months=months), + status="ACTIVE", + conversion_info=parsed_conversion_info, + ) subscription.plans.set(bag.plans.all()) subscription.service_items.set(bag.service_items.all()) @@ -676,26 +725,23 @@ def build_subscription(self, build_service_stock_scheduler_from_subscription.delay(subscription.id) - logger.info(f'Subscription was created with id {subscription.id}') + logger.info(f"Subscription was created with id {subscription.id}") @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) -def build_plan_financing(self, - bag_id: int, - invoice_id: int, - is_free: bool = False, - conversion_info: Optional[str] = '', - **_: Any): - logger.info(f'Starting build_plan_financing for bag {bag_id}') +def build_plan_financing( + self, bag_id: int, invoice_id: int, is_free: bool = False, conversion_info: Optional[str] = "", **_: Any +): + logger.info(f"Starting build_plan_financing for bag {bag_id}") - if not (bag := Bag.objects.filter(id=bag_id, status='PAID', was_delivered=False).first()): - raise RetryTask(f'Bag with id {bag_id} not found') + if not (bag := Bag.objects.filter(id=bag_id, status="PAID", was_delivered=False).first()): + raise RetryTask(f"Bag with id {bag_id} not found") - if not (invoice := Invoice.objects.filter(id=invoice_id, status='FULFILLED').first()): - raise RetryTask(f'Invoice with id {invoice_id} not found') + if not (invoice := Invoice.objects.filter(id=invoice_id, status="FULFILLED").first()): + raise RetryTask(f"Invoice with id {invoice_id} not found") if not is_free and not invoice.amount: - raise AbortTask(f'An invoice without amount is prohibited (id: {invoice_id})') + raise AbortTask(f"An invoice without amount is prohibited (id: {invoice_id})") utc_now = timezone.now() months = bag.how_many_installments @@ -725,20 +771,22 @@ def build_plan_financing(self, event_type_set = None mentorship_service_set = None - print('conversion_info') + print("conversion_info") print(conversion_info) - parsed_conversion_info = ast.literal_eval(conversion_info) if conversion_info != '' else None - financing = PlanFinancing.objects.create(user=bag.user, - next_payment_at=invoice.paid_at + relativedelta(months=1), - academy=bag.academy, - selected_cohort_set=cohort_set, - selected_event_type_set=event_type_set, - selected_mentorship_service_set=mentorship_service_set, - valid_until=invoice.paid_at + relativedelta(months=months - 1), - plan_expires_at=invoice.paid_at + delta, - monthly_price=invoice.amount, - status='ACTIVE', - conversion_info=parsed_conversion_info) + parsed_conversion_info = ast.literal_eval(conversion_info) if conversion_info != "" else None + financing = PlanFinancing.objects.create( + user=bag.user, + next_payment_at=invoice.paid_at + relativedelta(months=1), + academy=bag.academy, + selected_cohort_set=cohort_set, + selected_event_type_set=event_type_set, + selected_mentorship_service_set=mentorship_service_set, + valid_until=invoice.paid_at + relativedelta(months=months - 1), + plan_expires_at=invoice.paid_at + delta, + monthly_price=invoice.amount, + status="ACTIVE", + conversion_info=parsed_conversion_info, + ) financing.plans.set(plans) @@ -750,26 +798,26 @@ def build_plan_financing(self, build_service_stock_scheduler_from_plan_financing.delay(financing.id) - logger.info(f'PlanFinancing was created with id {financing.id}') + logger.info(f"PlanFinancing was created with id {financing.id}") @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) -def build_free_subscription(self, bag_id: int, invoice_id: int, conversion_info: Optional[str] = '', **_: Any): - logger.info(f'Starting build_free_subscription for bag {bag_id}') +def build_free_subscription(self, bag_id: int, invoice_id: int, conversion_info: Optional[str] = "", **_: Any): + logger.info(f"Starting build_free_subscription for bag {bag_id}") - if not (bag := Bag.objects.filter(id=bag_id, status='PAID', was_delivered=False).first()): - raise RetryTask(f'Bag with id {bag_id} not found') + if not (bag := Bag.objects.filter(id=bag_id, status="PAID", was_delivered=False).first()): + raise RetryTask(f"Bag with id {bag_id} not found") - if not (invoice := Invoice.objects.filter(id=invoice_id, status='FULFILLED').first()): - raise RetryTask(f'Invoice with id {invoice_id} not found') + if not (invoice := Invoice.objects.filter(id=invoice_id, status="FULFILLED").first()): + raise RetryTask(f"Invoice with id {invoice_id} not found") if invoice.amount != 0: - raise AbortTask(f'The invoice with id {invoice_id} is invalid for a free subscription') + raise AbortTask(f"The invoice with id {invoice_id} is invalid for a free subscription") plans = bag.plans.all() if not plans: - raise AbortTask(f'Not have plans to associated to this free subscription in the bag {bag_id}') + raise AbortTask(f"Not have plans to associated to this free subscription in the bag {bag_id}") for plan in plans: is_free_trial = True @@ -797,32 +845,34 @@ def build_free_subscription(self, bag_id: int, invoice_id: int, conversion_info: if is_free_trial: extra = { - 'status': 'FREE_TRIAL', - 'valid_until': until, + "status": "FREE_TRIAL", + "valid_until": until, } elif not is_free_trial and plan.is_renewable: extra = { - 'status': 'ACTIVE', - 'valid_until': None, + "status": "ACTIVE", + "valid_until": None, } else: extra = { - 'status': 'ACTIVE', - 'valid_until': until, + "status": "ACTIVE", + "valid_until": until, } - parsed_conversion_info = ast.literal_eval(conversion_info) if conversion_info != '' else None - subscription = Subscription.objects.create(user=bag.user, - paid_at=invoice.paid_at, - academy=bag.academy, - selected_cohort_set=cohort_set, - selected_event_type_set=event_type_set, - selected_mentorship_service_set=mentorship_service_set, - next_payment_at=until, - conversion_info=parsed_conversion_info, - **extra) + parsed_conversion_info = ast.literal_eval(conversion_info) if conversion_info != "" else None + subscription = Subscription.objects.create( + user=bag.user, + paid_at=invoice.paid_at, + academy=bag.academy, + selected_cohort_set=cohort_set, + selected_event_type_set=event_type_set, + selected_mentorship_service_set=mentorship_service_set, + next_payment_at=until, + conversion_info=parsed_conversion_info, + **extra, + ) subscription.plans.add(plan) @@ -831,7 +881,7 @@ def build_free_subscription(self, bag_id: int, invoice_id: int, conversion_info: build_service_stock_scheduler_from_subscription.delay(subscription.id) - logger.info(f'Free subscription was created with id {subscription.id} for plan {plan.id}') + logger.info(f"Free subscription was created with id {subscription.id} for plan {plan.id}") bag.was_delivered = True bag.save() @@ -839,20 +889,20 @@ def build_free_subscription(self, bag_id: int, invoice_id: int, conversion_info: @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) def end_the_consumption_session(self, consumption_session_id: int, how_many: float = 1.0, **_: Any): - logger.info(f'Starting end_the_consumption_session for ConsumptionSession {consumption_session_id}') + logger.info(f"Starting end_the_consumption_session for ConsumptionSession {consumption_session_id}") session = ConsumptionSession.objects.filter(id=consumption_session_id).first() if not session: - raise AbortTask(f'ConsumptionSession with id {consumption_session_id} not found') + raise AbortTask(f"ConsumptionSession with id {consumption_session_id} not found") - if session.status != 'PENDING': - raise AbortTask(f'ConsumptionSession with id {consumption_session_id} already processed') + if session.status != "PENDING": + raise AbortTask(f"ConsumptionSession with id {consumption_session_id} already processed") consumable = session.consumable consume_service.send_robust(instance=consumable, sender=consumable.__class__, how_many=how_many) session.was_discounted = True - session.status = 'DONE' + session.status = "DONE" session.save() @@ -860,35 +910,38 @@ def end_the_consumption_session(self, consumption_session_id: int, how_many: flo # you need fix the logic about the consumable valid until, maybe this must be removed @task(bind=True, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) def build_consumables_from_bag(bag_id: int, **_: Any): - logger.info(f'Starting build_consumables_from_bag for bag {bag_id}') + logger.info(f"Starting build_consumables_from_bag for bag {bag_id}") - if not (bag := Bag.objects.filter(id=bag_id, status='PAID', was_delivered=False).first()): - raise RetryTask(f'Bag with id {bag_id} not found') + if not (bag := Bag.objects.filter(id=bag_id, status="PAID", was_delivered=False).first()): + raise RetryTask(f"Bag with id {bag_id} not found") mentorship_service_set = bag.selected_mentorship_service_sets.first() event_type_set = bag.selected_event_type_sets.first() if [mentorship_service_set, event_type_set].count(None) != 1: - raise AbortTask(f'Bag with id {bag_id} not have a resource associated') + raise AbortTask(f"Bag with id {bag_id} not have a resource associated") consumables = [] for service_item in bag.service_items.all(): kwargs = {} - if service_item.service_item_type == 'MENTORSHIP_SERVICE_SET': - kwargs['mentorship_service_set'] = mentorship_service_set + if service_item.service_item_type == "MENTORSHIP_SERVICE_SET": + kwargs["mentorship_service_set"] = mentorship_service_set - if service_item.service_item_type == 'EVENT_TYPE_SET': - kwargs['event_type_set'] = event_type_set + if service_item.service_item_type == "EVENT_TYPE_SET": + kwargs["event_type_set"] = event_type_set if not kwargs: - raise AbortTask(f'Bag with id {bag_id} have a resource associated opposite to the service item type') + raise AbortTask(f"Bag with id {bag_id} have a resource associated opposite to the service item type") consumables.append( - Consumable(service_item=service_item, - unit_type=service_item.unit_type, - how_many=service_item.how_many, - user=bag.user, - **kwargs)) + Consumable( + service_item=service_item, + unit_type=service_item.unit_type, + how_many=service_item.how_many, + user=bag.user, + **kwargs, + ) + ) for consumable in consumables: consumable.save() @@ -901,56 +954,65 @@ def build_consumables_from_bag(bag_id: int, **_: Any): def refund_mentoring_session(session_id: int, **_: Any): from breathecode.mentorship.models import MentorshipSession - logger.info(f'Starting refund_mentoring_session for mentoring session {session_id}') + logger.info(f"Starting refund_mentoring_session for mentoring session {session_id}") - if not (mentorship_session := MentorshipSession.objects.filter( - id=session_id, mentee__isnull=False, service__isnull=False, status__in=['FAILED', 'IGNORED']).first()): - raise AbortTask(f'MentoringSession with id {session_id} not found or is invalid') + if not ( + mentorship_session := MentorshipSession.objects.filter( + id=session_id, mentee__isnull=False, service__isnull=False, status__in=["FAILED", "IGNORED"] + ).first() + ): + raise AbortTask(f"MentoringSession with id {session_id} not found or is invalid") mentee = mentorship_session.mentee service = mentorship_session.service - consumption_session = ConsumptionSession.objects.filter( - consumable__user=mentee, - consumable__mentorship_service_set__mentorship_services=service).exclude(status='CANCELLED').first() + consumption_session = ( + ConsumptionSession.objects.filter( + consumable__user=mentee, consumable__mentorship_service_set__mentorship_services=service + ) + .exclude(status="CANCELLED") + .first() + ) if not consumption_session: - raise AbortTask(f'ConsumptionSession not found for mentorship session {session_id}') + raise AbortTask(f"ConsumptionSession not found for mentorship session {session_id}") - if consumption_session.status == 'CANCELLED': - raise AbortTask(f'ConsumptionSession already cancelled for mentorship session {session_id}') + if consumption_session.status == "CANCELLED": + raise AbortTask(f"ConsumptionSession already cancelled for mentorship session {session_id}") - if consumption_session.status == 'DONE': - logger.info('Refunding consumption session because it was discounted') + if consumption_session.status == "DONE": + logger.info("Refunding consumption session because it was discounted") how_many = consumption_session.how_many consumable = consumption_session.consumable reimburse_service_units.send_robust(instance=consumable, sender=consumable.__class__, how_many=how_many) - consumption_session.status = 'CANCELLED' + consumption_session.status = "CANCELLED" consumption_session.save() @task(bind=False, priority=TaskPriority.ACADEMY.value) def add_cohort_set_to_subscription(subscription_id: int, cohort_set_id: int, **_: Any): logger.info( - f'Starting add_cohort_set_to_subscription for subscription {subscription_id} cohort_set {cohort_set_id}') + f"Starting add_cohort_set_to_subscription for subscription {subscription_id} cohort_set {cohort_set_id}" + ) - subscription = Subscription.objects.filter(id=subscription_id).exclude( - status__in=['CANCELLED', 'DEPRECATED']).first() + subscription = ( + Subscription.objects.filter(id=subscription_id).exclude(status__in=["CANCELLED", "DEPRECATED"]).first() + ) if not subscription: - raise RetryTask(f'Subscription with id {subscription_id} not found') + raise RetryTask(f"Subscription with id {subscription_id} not found") if subscription.valid_until and subscription.valid_until < timezone.now(): - raise AbortTask(f'The subscription {subscription.id} is over') + raise AbortTask(f"The subscription {subscription.id} is over") if subscription.selected_cohort_set: - raise AbortTask(f'Subscription with id {subscription_id} already have a cohort set') + raise AbortTask(f"Subscription with id {subscription_id} already have a cohort set") cohort_set = CohortSet.objects.filter(id=cohort_set_id).first() if not cohort_set: - raise RetryTask(f'CohortSet with id {cohort_set_id} not found') + raise RetryTask(f"CohortSet with id {cohort_set_id} not found") subscription.selected_cohort_set = cohort_set subscription.save() @@ -958,15 +1020,16 @@ def add_cohort_set_to_subscription(subscription_id: int, cohort_set_id: int, **_ @task(bind=False, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) def update_subscription_service_stock_schedulers(plan_id: int, subscription_id: int, **_: Any): - plan = Plan.objects.filter(id=plan_id).only('id').prefetch_related('service_items').first() - subscription = Subscription.objects.filter(plans__id=subscription_id).only('id', 'next_payment_at').first() + plan = Plan.objects.filter(id=plan_id).only("id").prefetch_related("service_items").first() + subscription = Subscription.objects.filter(plans__id=subscription_id).only("id", "next_payment_at").first() - for plan_service_item in PlanServiceItem.objects.filter(plan=plan).prefetch_related('service_item'): + for plan_service_item in PlanServiceItem.objects.filter(plan=plan).prefetch_related("service_item"): service_item = plan_service_item.service_item scheduler = ServiceStockScheduler.objects.filter( plan_handler__subscription__id=subscription_id, plan_handler__handler__plan=plan, - plan_handler__handler__service_item__id=service_item.id).first() + plan_handler__handler__service_item__id=service_item.id, + ).first() if not scheduler: unit = service_item.renew_at @@ -980,23 +1043,25 @@ def update_subscription_service_stock_schedulers(plan_id: int, subscription_id: if subscription.valid_until and valid_until > subscription.valid_until: valid_until = subscription.valid_until - handler, _ = PlanServiceItemHandler.objects.get_or_create(subscription=subscription, - handler=plan_service_item) + handler, _ = PlanServiceItemHandler.objects.get_or_create( + subscription=subscription, handler=plan_service_item + ) ServiceStockScheduler.objects.get_or_create(plan_handler=handler) @task(bind=False, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) def update_plan_financing_service_stock_schedulers(plan_id: int, subscription_id: int, **_: Any): - plan = Plan.objects.filter(id=plan_id).only('id').prefetch_related('service_items').first() - plan_financing = PlanFinancing.objects.filter(plans__id=subscription_id).only('id', 'next_payment_at').first() + plan = Plan.objects.filter(id=plan_id).only("id").prefetch_related("service_items").first() + plan_financing = PlanFinancing.objects.filter(plans__id=subscription_id).only("id", "next_payment_at").first() - for plan_service_item in PlanServiceItem.objects.filter(plan=plan).prefetch_related('service_item'): + for plan_service_item in PlanServiceItem.objects.filter(plan=plan).prefetch_related("service_item"): service_item = plan_service_item.service_item scheduler = ServiceStockScheduler.objects.filter( plan_handler__plan_financing__id=subscription_id, plan_handler__handler__plan=plan, - plan_handler__handler__service_item__id=service_item.id).first() + plan_handler__handler__service_item__id=service_item.id, + ).first() if not scheduler: unit = service_item.renew_at @@ -1010,16 +1075,17 @@ def update_plan_financing_service_stock_schedulers(plan_id: int, subscription_id if plan_financing.valid_until and valid_until > plan_financing.valid_until: valid_until = plan_financing.valid_until - handler, _ = PlanServiceItemHandler.objects.get_or_create(plan_financing=plan_financing, - handler=plan_service_item) + handler, _ = PlanServiceItemHandler.objects.get_or_create( + plan_financing=plan_financing, handler=plan_service_item + ) ServiceStockScheduler.objects.get_or_create(plan_handler=handler) @task(bind=False, priority=TaskPriority.WEB_SERVICE_PAYMENT.value) def update_service_stock_schedulers(plan_id: int, **_: Any): - for subscription in Subscription.objects.filter(plans__id=plan_id).only('id'): + for subscription in Subscription.objects.filter(plans__id=plan_id).only("id"): update_subscription_service_stock_schedulers.delay(plan_id, subscription.id) - for plan_financing in PlanFinancing.objects.filter(plans__id=plan_id).only('id'): + for plan_financing in PlanFinancing.objects.filter(plans__id=plan_id).only("id"): update_plan_financing_service_stock_schedulers.delay(plan_id, plan_financing.id) diff --git a/breathecode/payments/tests/admin/tests_add_cohort_set_to_the_subscriptions.py b/breathecode/payments/tests/admin/tests_add_cohort_set_to_the_subscriptions.py index 2f12a936e..b46bf2b7d 100644 --- a/breathecode/payments/tests/admin/tests_add_cohort_set_to_the_subscriptions.py +++ b/breathecode/payments/tests/admin/tests_add_cohort_set_to_the_subscriptions.py @@ -1,6 +1,7 @@ """ Test /answer """ + from django import forms import random import pytest @@ -14,12 +15,12 @@ UTC_NOW = timezone.now() # enable this file to use the database -pytestmark = pytest.mark.usefixtures('db') +pytestmark = pytest.mark.usefixtures("db") @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr('breathecode.payments.tasks.add_cohort_set_to_subscription.delay', MagicMock()) + monkeypatch.setattr("breathecode.payments.tasks.add_cohort_set_to_subscription.delay", MagicMock()) yield @@ -28,14 +29,14 @@ def setup(monkeypatch): # Then: shouldn't to do anything def test_no_cohort_sets(bc: Breathecode): - CohortSet = bc.database.get_model('payments.CohortSet') + CohortSet = bc.database.get_model("payments.CohortSet") queryset = CohortSet.objects.all() add_cohort_set_to_the_subscriptions(None, None, queryset) - assert bc.database.list_of('payments.CohortSet') == [] - assert bc.database.list_of('payments.CohortSetCohort') == [] - assert bc.database.list_of('payments.Subscription') == [] + assert bc.database.list_of("payments.CohortSet") == [] + assert bc.database.list_of("payments.CohortSetCohort") == [] + assert bc.database.list_of("payments.Subscription") == [] assert tasks.add_cohort_set_to_subscription.delay.call_args_list == [] @@ -45,25 +46,25 @@ def test_no_cohort_sets(bc: Breathecode): def test_two_cohort_sets(bc: Breathecode): # with pytest.raises(forms.ValidationError, match='academy-not-available-as-saas'): if random.randint(0, 1): - academy = {'available_as_saas': True} - cohort = {'available_as_saas': None} + academy = {"available_as_saas": True} + cohort = {"available_as_saas": None} else: - academy = {'available_as_saas': True} - cohort = {'available_as_saas': True} + academy = {"available_as_saas": True} + cohort = {"available_as_saas": True} - cohort_set_cohorts = [{'cohort_id': x + 1} for x in range(2)] + cohort_set_cohorts = [{"cohort_id": x + 1} for x in range(2)] model = bc.database.create(cohort=(2, cohort), academy=academy, cohort_set=2, cohort_set_cohort=cohort_set_cohorts) - CohortSet = bc.database.get_model('payments.CohortSet') + CohortSet = bc.database.get_model("payments.CohortSet") queryset = CohortSet.objects.all() - with pytest.raises(forms.ValidationError, match='You just can select one subscription at a time'): + with pytest.raises(forms.ValidationError, match="You just can select one subscription at a time"): add_cohort_set_to_the_subscriptions(None, None, queryset) - assert bc.database.list_of('payments.CohortSet') == bc.format.to_dict(model.cohort_set) - assert bc.database.list_of('payments.CohortSetCohort') == bc.format.to_dict(model.cohort_set_cohort) - assert bc.database.list_of('payments.Subscription') == [] + assert bc.database.list_of("payments.CohortSet") == bc.format.to_dict(model.cohort_set) + assert bc.database.list_of("payments.CohortSetCohort") == bc.format.to_dict(model.cohort_set_cohort) + assert bc.database.list_of("payments.Subscription") == [] assert tasks.add_cohort_set_to_subscription.delay.call_args_list == [] @@ -73,22 +74,22 @@ def test_two_cohort_sets(bc: Breathecode): def test_one_cohort_set(bc: Breathecode): # with pytest.raises(forms.ValidationError, match='academy-not-available-as-saas'): if random.randint(0, 1): - academy = {'available_as_saas': True} - cohort = {'available_as_saas': None} + academy = {"available_as_saas": True} + cohort = {"available_as_saas": None} else: - academy = {'available_as_saas': True} - cohort = {'available_as_saas': True} + academy = {"available_as_saas": True} + cohort = {"available_as_saas": True} model = bc.database.create(cohort=cohort, academy=academy, cohort_set=1, cohort_set_cohort=1) - CohortSet = bc.database.get_model('payments.CohortSet') + CohortSet = bc.database.get_model("payments.CohortSet") queryset = CohortSet.objects.all() add_cohort_set_to_the_subscriptions(None, None, queryset) - assert bc.database.list_of('payments.CohortSet') == [bc.format.to_dict(model.cohort_set)] - assert bc.database.list_of('payments.CohortSetCohort') == [bc.format.to_dict(model.cohort_set_cohort)] - assert bc.database.list_of('payments.Subscription') == [] + assert bc.database.list_of("payments.CohortSet") == [bc.format.to_dict(model.cohort_set)] + assert bc.database.list_of("payments.CohortSetCohort") == [bc.format.to_dict(model.cohort_set_cohort)] + assert bc.database.list_of("payments.Subscription") == [] assert tasks.add_cohort_set_to_subscription.delay.call_args_list == [] @@ -98,28 +99,26 @@ def test_one_cohort_set(bc: Breathecode): def test_one_cohort_set__two_subscriptions(bc: Breathecode): # with pytest.raises(forms.ValidationError, match='academy-not-available-as-saas'): if random.randint(0, 1): - academy = {'available_as_saas': True} - cohort = {'available_as_saas': None} + academy = {"available_as_saas": True} + cohort = {"available_as_saas": None} else: - academy = {'available_as_saas': True} - cohort = {'available_as_saas': True} + academy = {"available_as_saas": True} + cohort = {"available_as_saas": True} - subscriptions = [{'selected_cohort_set_id': None} for _ in range(2)] + subscriptions = [{"selected_cohort_set_id": None} for _ in range(2)] - model = bc.database.create(cohort=cohort, - academy=academy, - cohort_set=1, - cohort_set_cohort=1, - subscription=subscriptions) + model = bc.database.create( + cohort=cohort, academy=academy, cohort_set=1, cohort_set_cohort=1, subscription=subscriptions + ) - CohortSet = bc.database.get_model('payments.CohortSet') + CohortSet = bc.database.get_model("payments.CohortSet") queryset = CohortSet.objects.all() add_cohort_set_to_the_subscriptions(None, None, queryset) - assert bc.database.list_of('payments.CohortSet') == [bc.format.to_dict(model.cohort_set)] - assert bc.database.list_of('payments.CohortSetCohort') == [bc.format.to_dict(model.cohort_set_cohort)] - assert bc.database.list_of('payments.Subscription') == bc.format.to_dict(model.subscription) + assert bc.database.list_of("payments.CohortSet") == [bc.format.to_dict(model.cohort_set)] + assert bc.database.list_of("payments.CohortSetCohort") == [bc.format.to_dict(model.cohort_set_cohort)] + assert bc.database.list_of("payments.Subscription") == bc.format.to_dict(model.subscription) assert tasks.add_cohort_set_to_subscription.delay.call_args_list == [call(1, 1), call(2, 1)] @@ -129,31 +128,31 @@ def test_one_cohort_set__two_subscriptions(bc: Breathecode): def test_one_cohort_set__two_subscriptions__cohort_set_already_selected(bc: Breathecode): # with pytest.raises(forms.ValidationError, match='academy-not-available-as-saas'): if random.randint(0, 1): - academy = {'available_as_saas': True} - cohort = {'available_as_saas': None} + academy = {"available_as_saas": True} + cohort = {"available_as_saas": None} else: - academy = {'available_as_saas': True} - cohort = {'available_as_saas': True} + academy = {"available_as_saas": True} + cohort = {"available_as_saas": True} - subscriptions = [{'selected_cohort_set_id': 1} for _ in range(2)] + subscriptions = [{"selected_cohort_set_id": 1} for _ in range(2)] - model = bc.database.create(cohort=cohort, - academy=academy, - cohort_set=1, - cohort_set_cohort=1, - subscription=subscriptions) + model = bc.database.create( + cohort=cohort, academy=academy, cohort_set=1, cohort_set_cohort=1, subscription=subscriptions + ) - CohortSet = bc.database.get_model('payments.CohortSet') + CohortSet = bc.database.get_model("payments.CohortSet") queryset = CohortSet.objects.all() add_cohort_set_to_the_subscriptions(None, None, queryset) - assert bc.database.list_of('payments.CohortSet') == [bc.format.to_dict(model.cohort_set)] - assert bc.database.list_of('payments.CohortSetCohort') == [bc.format.to_dict(model.cohort_set_cohort)] - assert bc.database.list_of('payments.Subscription') == [{ - **bc.format.to_dict(model.subscription)[n - 1], - 'selected_cohort_set_id': - 1, - } for n in range(1, 3)] + assert bc.database.list_of("payments.CohortSet") == [bc.format.to_dict(model.cohort_set)] + assert bc.database.list_of("payments.CohortSetCohort") == [bc.format.to_dict(model.cohort_set_cohort)] + assert bc.database.list_of("payments.Subscription") == [ + { + **bc.format.to_dict(model.subscription)[n - 1], + "selected_cohort_set_id": 1, + } + for n in range(1, 3) + ] assert tasks.add_cohort_set_to_subscription.delay.call_args_list == [] diff --git a/breathecode/payments/tests/admin/tests_grant_service_permissions.py b/breathecode/payments/tests/admin/tests_grant_service_permissions.py index 62c740ea8..89d0c1517 100644 --- a/breathecode/payments/tests/admin/tests_grant_service_permissions.py +++ b/breathecode/payments/tests/admin/tests_grant_service_permissions.py @@ -18,40 +18,35 @@ class PaymentsTestSuite(PaymentsTestCase): def test_no_consumables(self): - groups = [{'permissions': [1, 2]}, {'permissions': [3, 4]}] - groups = [{'permissions': [1, 2]}, {'permissions': [3, 4]}] - services = [{'groups': [1]}, {'groups': [2]}] - service_items = [{'service_id': n + 1} for n in range(2)] - model = self.bc.database.create(user=1, - group=groups, - permission=4, - service_item=service_items, - service=services) - Consumable = self.bc.database.get_model('payments.Consumable') + groups = [{"permissions": [1, 2]}, {"permissions": [3, 4]}] + groups = [{"permissions": [1, 2]}, {"permissions": [3, 4]}] + services = [{"groups": [1]}, {"groups": [2]}] + service_items = [{"service_id": n + 1} for n in range(2)] + model = self.bc.database.create( + user=1, group=groups, permission=4, service_item=service_items, service=services + ) + Consumable = self.bc.database.get_model("payments.Consumable") queryset = Consumable.objects.all() grant_service_permissions(None, None, queryset) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) def test_with_consumables(self): - groups = [{'permissions': [1, 2]}, {'permissions': [3, 4]}] - services = [{'groups': [1]}, {'groups': [2]}] - consumables = [{'service_item_id': 1}, {'service_item_id': 2}] - service_items = [{'service_id': n + 1} for n in range(2)] - model = self.bc.database.create(user=1, - group=groups, - permission=4, - consumable=consumables, - service_item=service_items, - service=services) + groups = [{"permissions": [1, 2]}, {"permissions": [3, 4]}] + services = [{"groups": [1]}, {"groups": [2]}] + consumables = [{"service_item_id": 1}, {"service_item_id": 2}] + service_items = [{"service_id": n + 1} for n in range(2)] + model = self.bc.database.create( + user=1, group=groups, permission=4, consumable=consumables, service_item=service_items, service=services + ) db = self.bc.format.to_dict(model.consumable) - Consumable = self.bc.database.get_model('payments.Consumable') + Consumable = self.bc.database.get_model("payments.Consumable") queryset = Consumable.objects.all() grant_service_permissions(None, None, queryset) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), db) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), db) self.bc.check.queryset_with_pks(model.user.groups.all(), [1, 2]) diff --git a/breathecode/payments/tests/management/commands/tests_make_charges.py b/breathecode/payments/tests/management/commands/tests_make_charges.py index d93fdd307..f59e79aea 100644 --- a/breathecode/payments/tests/management/commands/tests_make_charges.py +++ b/breathecode/payments/tests/management/commands/tests_make_charges.py @@ -14,8 +14,8 @@ @pytest.fixture(autouse=True) def setup(db: None, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr(tasks.charge_subscription, 'delay', MagicMock()) - monkeypatch.setattr(tasks.charge_plan_financing, 'delay', MagicMock()) + monkeypatch.setattr(tasks.charge_subscription, "delay", MagicMock()) + monkeypatch.setattr(tasks.charge_plan_financing, "delay", MagicMock()) def test_with_zero_subscriptions(bc: Breathecode): @@ -23,21 +23,24 @@ def test_with_zero_subscriptions(bc: Breathecode): result = command.handle() assert result == None - assert bc.database.list_of('payments.Subscription') == [] + assert bc.database.list_of("payments.Subscription") == [] assert tasks.charge_subscription.delay.call_args_list == [] -@pytest.mark.parametrize('delta, status', [ - (relativedelta(days=1, minutes=1), 'ACTIVE'), - (relativedelta(days=1, minutes=1), 'ERROR'), - (relativedelta(days=1, minutes=1), 'PAYMENT_ISSUE'), - (-relativedelta(days=1, seconds=1), 'CANCELLED'), - (-relativedelta(days=1, seconds=1), 'FREE_TRIAL'), - (-relativedelta(days=1, seconds=1), 'DEPRECATED'), -]) +@pytest.mark.parametrize( + "delta, status", + [ + (relativedelta(days=1, minutes=1), "ACTIVE"), + (relativedelta(days=1, minutes=1), "ERROR"), + (relativedelta(days=1, minutes=1), "PAYMENT_ISSUE"), + (-relativedelta(days=1, seconds=1), "CANCELLED"), + (-relativedelta(days=1, seconds=1), "FREE_TRIAL"), + (-relativedelta(days=1, seconds=1), "DEPRECATED"), + ], +) def test_with_two_subscriptions__wrong_cases(bc: Breathecode, delta, status, utc_now): valid_until = utc_now + delta - subscription = {'valid_until': valid_until, 'next_payment_at': valid_until, 'status': status} + subscription = {"valid_until": valid_until, "next_payment_at": valid_until, "status": status} model = bc.database.create(subscription=(2, subscription)) @@ -45,18 +48,21 @@ def test_with_two_subscriptions__wrong_cases(bc: Breathecode, delta, status, utc result = command.handle() assert result == None - assert bc.database.list_of('payments.Subscription') == bc.format.to_dict(model.subscription) + assert bc.database.list_of("payments.Subscription") == bc.format.to_dict(model.subscription) assert tasks.charge_subscription.delay.call_args_list == [] -@pytest.mark.parametrize('delta, status, status_changed', [ - (-relativedelta(days=1, seconds=1), 'ACTIVE', True), - (-relativedelta(days=1, seconds=1), 'ERROR', False), - (-relativedelta(days=1, seconds=1), 'PAYMENT_ISSUE', False), -]) +@pytest.mark.parametrize( + "delta, status, status_changed", + [ + (-relativedelta(days=1, seconds=1), "ACTIVE", True), + (-relativedelta(days=1, seconds=1), "ERROR", False), + (-relativedelta(days=1, seconds=1), "PAYMENT_ISSUE", False), + ], +) def test_with_two_subscriptions__expired(bc: Breathecode, delta, status, status_changed, utc_now): valid_until = utc_now + delta - subscription = {'valid_until': valid_until, 'next_payment_at': valid_until, 'status': status} + subscription = {"valid_until": valid_until, "next_payment_at": valid_until, "status": status} model = bc.database.create(subscription=(2, subscription)) @@ -67,18 +73,22 @@ def test_with_two_subscriptions__expired(bc: Breathecode, delta, status, status_ db = bc.format.to_dict(model.subscription) for i in range(len(db)): if status_changed: - db[i]['status'] = 'EXPIRED' + db[i]["status"] = "EXPIRED" - assert bc.database.list_of('payments.Subscription') == db + assert bc.database.list_of("payments.Subscription") == db assert tasks.charge_subscription.delay.call_args_list == [] -@pytest.mark.parametrize('delta_valid_until, delta_next_payment_at', [ - (relativedelta(0), -relativedelta(days=7, seconds=1)), - (relativedelta(days=28), -relativedelta(days=7, seconds=1)), -]) -def test_with_two_subscriptions__payment_issue__gt_7_days(bc: Breathecode, delta_valid_until, delta_next_payment_at, - utc_now): +@pytest.mark.parametrize( + "delta_valid_until, delta_next_payment_at", + [ + (relativedelta(0), -relativedelta(days=7, seconds=1)), + (relativedelta(days=28), -relativedelta(days=7, seconds=1)), + ], +) +def test_with_two_subscriptions__payment_issue__gt_7_days( + bc: Breathecode, delta_valid_until, delta_next_payment_at, utc_now +): if delta_valid_until: valid_until = utc_now + delta_valid_until @@ -88,7 +98,7 @@ def test_with_two_subscriptions__payment_issue__gt_7_days(bc: Breathecode, delta next_payment_at = utc_now + delta_next_payment_at - subscription = {'valid_until': valid_until, 'next_payment_at': next_payment_at, 'status': 'PAYMENT_ISSUE'} + subscription = {"valid_until": valid_until, "next_payment_at": next_payment_at, "status": "PAYMENT_ISSUE"} model = bc.database.create(subscription=(2, subscription)) @@ -98,18 +108,22 @@ def test_with_two_subscriptions__payment_issue__gt_7_days(bc: Breathecode, delta assert result == None db = bc.format.to_dict(model.subscription) for i in range(len(db)): - db[i]['status'] = 'EXPIRED' + db[i]["status"] = "EXPIRED" - assert bc.database.list_of('payments.Subscription') == db + assert bc.database.list_of("payments.Subscription") == db assert tasks.charge_subscription.delay.call_args_list == [] -@pytest.mark.parametrize('delta_valid_until, delta_next_payment_at', [ - (relativedelta(0), -relativedelta(days=6)), - (relativedelta(days=28), -relativedelta(days=6)), -]) -def test_with_two_subscriptions__payment_issue__lt_7_days(bc: Breathecode, delta_valid_until, delta_next_payment_at, - utc_now): +@pytest.mark.parametrize( + "delta_valid_until, delta_next_payment_at", + [ + (relativedelta(0), -relativedelta(days=6)), + (relativedelta(days=28), -relativedelta(days=6)), + ], +) +def test_with_two_subscriptions__payment_issue__lt_7_days( + bc: Breathecode, delta_valid_until, delta_next_payment_at, utc_now +): if delta_valid_until: valid_until = utc_now + delta_valid_until @@ -119,7 +133,7 @@ def test_with_two_subscriptions__payment_issue__lt_7_days(bc: Breathecode, delta next_payment_at = utc_now + delta_next_payment_at - subscription = {'valid_until': valid_until, 'next_payment_at': next_payment_at, 'status': 'PAYMENT_ISSUE'} + subscription = {"valid_until": valid_until, "next_payment_at": next_payment_at, "status": "PAYMENT_ISSUE"} model = bc.database.create(subscription=(2, subscription)) @@ -129,19 +143,22 @@ def test_with_two_subscriptions__payment_issue__lt_7_days(bc: Breathecode, delta assert result == None db = bc.format.to_dict(model.subscription) - assert bc.database.list_of('payments.Subscription') == db + assert bc.database.list_of("payments.Subscription") == db assert tasks.charge_subscription.delay.call_args_list == [call(1), call(2)] -@pytest.mark.parametrize('delta, status', [ - (relativedelta(days=2, seconds=1), 'ACTIVE'), - (relativedelta(days=2, seconds=1), 'ERROR'), - (relativedelta(days=2, seconds=1), 'PAYMENT_ISSUE'), -]) +@pytest.mark.parametrize( + "delta, status", + [ + (relativedelta(days=2, seconds=1), "ACTIVE"), + (relativedelta(days=2, seconds=1), "ERROR"), + (relativedelta(days=2, seconds=1), "PAYMENT_ISSUE"), + ], +) def test_with_two_subscriptions__valid_cases(bc: Breathecode, delta, status, utc_now): valid_until = utc_now + delta next_payment_at = utc_now - delta - subscription = {'valid_until': valid_until, 'next_payment_at': next_payment_at, 'status': status} + subscription = {"valid_until": valid_until, "next_payment_at": next_payment_at, "status": status} model = bc.database.create(subscription=(2, subscription)) @@ -149,7 +166,7 @@ def test_with_two_subscriptions__valid_cases(bc: Breathecode, delta, status, utc result = command.handle() assert result == None - assert bc.database.list_of('payments.Subscription') == bc.format.to_dict(model.subscription) + assert bc.database.list_of("payments.Subscription") == bc.format.to_dict(model.subscription) assert tasks.charge_subscription.delay.call_args_list == [ call(model.subscription[0].id), call(model.subscription[1].id), @@ -164,26 +181,29 @@ def test_with_zero_plan_financings(bc: Breathecode): result = command.handle() assert result == None - assert bc.database.list_of('payments.PlanFinancing') == [] + assert bc.database.list_of("payments.PlanFinancing") == [] assert tasks.charge_plan_financing.delay.call_args_list == [] -@pytest.mark.parametrize('delta, status', [ - (relativedelta(days=1, minutes=1), 'ACTIVE'), - (relativedelta(days=1, minutes=1), 'ERROR'), - (relativedelta(days=1, minutes=1), 'PAYMENT_ISSUE'), - (-relativedelta(days=1, seconds=1), 'CANCELLED'), - (-relativedelta(days=1, seconds=1), 'FREE_TRIAL'), - (-relativedelta(days=1, seconds=1), 'DEPRECATED'), -]) +@pytest.mark.parametrize( + "delta, status", + [ + (relativedelta(days=1, minutes=1), "ACTIVE"), + (relativedelta(days=1, minutes=1), "ERROR"), + (relativedelta(days=1, minutes=1), "PAYMENT_ISSUE"), + (-relativedelta(days=1, seconds=1), "CANCELLED"), + (-relativedelta(days=1, seconds=1), "FREE_TRIAL"), + (-relativedelta(days=1, seconds=1), "DEPRECATED"), + ], +) def test_with_two_plan_financings__wrong_cases(bc: Breathecode, delta, status, utc_now): valid_until = utc_now + delta plan_financing = { - 'next_payment_at': valid_until, - 'valid_until': UTC_NOW + relativedelta(months=random.randint(1, 12)), - 'status': status, - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW + relativedelta(months=random.randint(12, 24)), + "next_payment_at": valid_until, + "valid_until": UTC_NOW + relativedelta(months=random.randint(1, 12)), + "status": status, + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW + relativedelta(months=random.randint(12, 24)), } model = bc.database.create(plan_financing=(2, plan_financing)) @@ -192,23 +212,26 @@ def test_with_two_plan_financings__wrong_cases(bc: Breathecode, delta, status, u result = command.handle() assert result == None - assert bc.database.list_of('payments.PlanFinancing') == bc.format.to_dict(model.plan_financing) + assert bc.database.list_of("payments.PlanFinancing") == bc.format.to_dict(model.plan_financing) assert tasks.charge_plan_financing.delay.call_args_list == [] -@pytest.mark.parametrize('delta, status, status_changed', [ - (-relativedelta(days=1, seconds=1), 'ACTIVE', True), - (-relativedelta(days=1, seconds=1), 'ERROR', False), - (-relativedelta(days=1, seconds=1), 'PAYMENT_ISSUE', False), -]) +@pytest.mark.parametrize( + "delta, status, status_changed", + [ + (-relativedelta(days=1, seconds=1), "ACTIVE", True), + (-relativedelta(days=1, seconds=1), "ERROR", False), + (-relativedelta(days=1, seconds=1), "PAYMENT_ISSUE", False), + ], +) def test_with_two_plan_financings__expired(bc: Breathecode, delta, status, status_changed, utc_now): valid_until = utc_now + delta plan_financing = { - 'next_payment_at': valid_until, - 'valid_until': UTC_NOW + relativedelta(months=random.randint(1, 12)), - 'status': status, - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': valid_until, + "next_payment_at": valid_until, + "valid_until": UTC_NOW + relativedelta(months=random.randint(1, 12)), + "status": status, + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": valid_until, } model = bc.database.create(plan_financing=(2, plan_financing)) @@ -221,26 +244,29 @@ def test_with_two_plan_financings__expired(bc: Breathecode, delta, status, statu db = bc.format.to_dict(model.plan_financing) for i in range(len(db)): if status_changed: - db[i]['status'] = 'EXPIRED' + db[i]["status"] = "EXPIRED" - assert bc.database.list_of('payments.PlanFinancing') == db + assert bc.database.list_of("payments.PlanFinancing") == db assert tasks.charge_plan_financing.delay.call_args_list == [] -@pytest.mark.parametrize('delta, status', [ - (relativedelta(days=2, seconds=1), 'ACTIVE'), - (relativedelta(days=2, seconds=1), 'ERROR'), - (relativedelta(days=2, seconds=1), 'PAYMENT_ISSUE'), -]) +@pytest.mark.parametrize( + "delta, status", + [ + (relativedelta(days=2, seconds=1), "ACTIVE"), + (relativedelta(days=2, seconds=1), "ERROR"), + (relativedelta(days=2, seconds=1), "PAYMENT_ISSUE"), + ], +) def test_with_two_plan_financings__valid_cases(bc: Breathecode, delta, status, utc_now): valid_until = utc_now + delta next_payment_at = utc_now - delta plan_financing = { - 'next_payment_at': next_payment_at, - 'valid_until': UTC_NOW + relativedelta(months=random.randint(1, 12)), - 'status': status, - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': valid_until, + "next_payment_at": next_payment_at, + "valid_until": UTC_NOW + relativedelta(months=random.randint(1, 12)), + "status": status, + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": valid_until, } model = bc.database.create(plan_financing=(2, plan_financing)) @@ -249,7 +275,7 @@ def test_with_two_plan_financings__valid_cases(bc: Breathecode, delta, status, u result = command.handle() assert result == None - assert bc.database.list_of('payments.PlanFinancing') == bc.format.to_dict(model.plan_financing) + assert bc.database.list_of("payments.PlanFinancing") == bc.format.to_dict(model.plan_financing) assert tasks.charge_plan_financing.delay.call_args_list == [ call(model.plan_financing[0].id), call(model.plan_financing[1].id), diff --git a/breathecode/payments/tests/management/commands/tests_renew_consumables.py b/breathecode/payments/tests/management/commands/tests_renew_consumables.py index 31ecd72d0..c9ca4c5a9 100644 --- a/breathecode/payments/tests/management/commands/tests_renew_consumables.py +++ b/breathecode/payments/tests/management/commands/tests_renew_consumables.py @@ -17,8 +17,8 @@ def apply_patch(db, monkeypatch): m1 = MagicMock() m2 = MagicMock() - monkeypatch.setattr(tasks.renew_subscription_consumables, 'delay', m1) - monkeypatch.setattr(tasks.renew_plan_financing_consumables, 'delay', m2) + monkeypatch.setattr(tasks.renew_subscription_consumables, "delay", m1) + monkeypatch.setattr(tasks.renew_plan_financing_consumables, "delay", m2) yield m1, m2 @@ -28,28 +28,28 @@ def test_no_related_entities(bc: Breathecode): assert result == None - assert bc.database.list_of('payments.Subscription') == [] - assert bc.database.list_of('payments.PlanFinancing') == [] + assert bc.database.list_of("payments.Subscription") == [] + assert bc.database.list_of("payments.PlanFinancing") == [] assert tasks.renew_subscription_consumables.delay.call_args_list == [] assert tasks.renew_plan_financing_consumables.delay.call_args_list == [] def invalid_statuses_params(): - for entity in ['subscription', 'plan_financing']: - for status in ['CANCELLED', 'DEPRECATED']: + for entity in ["subscription", "plan_financing"]: + for status in ["CANCELLED", "DEPRECATED"]: entity_attrs = { - 'status': status, + "status": status, } yield entity, entity_attrs -@pytest.mark.parametrize('entity,entity_attrs', invalid_statuses_params()) +@pytest.mark.parametrize("entity,entity_attrs", invalid_statuses_params()) def test_no_schedulers__invalid_statuses(bc: Breathecode, entity, entity_attrs): - if entity == 'plan_financing': - entity_attrs['monthly_price'] = 10 - entity_attrs['plan_expires_at'] = bc.datetime.now() + if entity == "plan_financing": + entity_attrs["monthly_price"] = 10 + entity_attrs["plan_expires_at"] = bc.datetime.now() extra = {entity: (2, entity_attrs)} @@ -60,37 +60,37 @@ def test_no_schedulers__invalid_statuses(bc: Breathecode, entity, entity_attrs): assert result == None - if entity == 'subscription': - assert bc.database.list_of('payments.Subscription') == bc.format.to_dict(model.subscription) - assert bc.database.list_of('payments.PlanFinancing') == [] + if entity == "subscription": + assert bc.database.list_of("payments.Subscription") == bc.format.to_dict(model.subscription) + assert bc.database.list_of("payments.PlanFinancing") == [] - elif entity == 'plan_financing': - assert bc.database.list_of('payments.Subscription') == [] - assert bc.database.list_of('payments.PlanFinancing') == bc.format.to_dict(model.plan_financing) + elif entity == "plan_financing": + assert bc.database.list_of("payments.Subscription") == [] + assert bc.database.list_of("payments.PlanFinancing") == bc.format.to_dict(model.plan_financing) assert tasks.renew_subscription_consumables.delay.call_args_list == [] assert tasks.renew_plan_financing_consumables.delay.call_args_list == [] def valid_statuses_params(): - for entity in ['subscription', 'plan_financing']: - statuses = ['FREE_TRIAL', 'ACTIVE', 'ERROR', 'EXPIRED'] - if entity != 'subscription': - statuses.append('FULLY_PAID') + for entity in ["subscription", "plan_financing"]: + statuses = ["FREE_TRIAL", "ACTIVE", "ERROR", "EXPIRED"] + if entity != "subscription": + statuses.append("FULLY_PAID") for status in statuses: entity_attrs = { - 'status': status, + "status": status, } yield entity, entity_attrs -@pytest.mark.parametrize('entity,entity_attrs', valid_statuses_params()) +@pytest.mark.parametrize("entity,entity_attrs", valid_statuses_params()) def test_no_schedulers__valid_statuses(bc: Breathecode, entity, entity_attrs): - if entity == 'plan_financing': - entity_attrs['monthly_price'] = 10 - entity_attrs['plan_expires_at'] = bc.datetime.now() + if entity == "plan_financing": + entity_attrs["monthly_price"] = 10 + entity_attrs["plan_expires_at"] = bc.datetime.now() extra = {entity: (2, entity_attrs)} @@ -101,129 +101,133 @@ def test_no_schedulers__valid_statuses(bc: Breathecode, entity, entity_attrs): assert result == None - if entity == 'subscription': - assert bc.database.list_of('payments.Subscription') == bc.format.to_dict(model.subscription) - assert bc.database.list_of('payments.PlanFinancing') == [] + if entity == "subscription": + assert bc.database.list_of("payments.Subscription") == bc.format.to_dict(model.subscription) + assert bc.database.list_of("payments.PlanFinancing") == [] assert tasks.renew_subscription_consumables.delay.call_args_list == [call(1), call(2)] assert tasks.renew_plan_financing_consumables.delay.call_args_list == [] - elif entity == 'plan_financing': - assert bc.database.list_of('payments.Subscription') == [] - assert bc.database.list_of('payments.PlanFinancing') == bc.format.to_dict(model.plan_financing) + elif entity == "plan_financing": + assert bc.database.list_of("payments.Subscription") == [] + assert bc.database.list_of("payments.PlanFinancing") == bc.format.to_dict(model.plan_financing) assert tasks.renew_subscription_consumables.delay.call_args_list == [] assert tasks.renew_plan_financing_consumables.delay.call_args_list == [call(1), call(2)] def valid_statuses_params(): - for entity in ['subscription', 'plan_financing']: - statuses = ['FREE_TRIAL', 'ACTIVE', 'ERROR', 'EXPIRED'] - if entity != 'subscription': - statuses.append('FULLY_PAID') + for entity in ["subscription", "plan_financing"]: + statuses = ["FREE_TRIAL", "ACTIVE", "ERROR", "EXPIRED"] + if entity != "subscription": + statuses.append("FULLY_PAID") for status in statuses: entity_attrs = { - 'status': status, + "status": status, } yield entity, entity_attrs -@pytest.mark.parametrize('entity,entity_attrs', valid_statuses_params()) +@pytest.mark.parametrize("entity,entity_attrs", valid_statuses_params()) def test_this_resource_does_not_requires_a_renovation(bc: Breathecode, entity, entity_attrs): - if entity == 'plan_financing': - entity_attrs['monthly_price'] = 10 - entity_attrs['plan_expires_at'] = bc.datetime.now() + if entity == "plan_financing": + entity_attrs["monthly_price"] = 10 + entity_attrs["plan_expires_at"] = bc.datetime.now() extra = {entity: (2, entity_attrs)} consumable = { - 'valid_until': bc.datetime.now() + relativedelta(hours=2, days=random.randint(1, 31)), + "valid_until": bc.datetime.now() + relativedelta(hours=2, days=random.randint(1, 31)), } plan = { - 'is_renewable': False, - 'time_of_life': random.randint(1, 31), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'price_per_month': random.randint(1, 31), - 'price_per_year': random.randint(1, 31), - 'price_per_quarter': random.randint(1, 31), - 'price_per_half': random.randint(1, 31), + "is_renewable": False, + "time_of_life": random.randint(1, 31), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "price_per_month": random.randint(1, 31), + "price_per_year": random.randint(1, 31), + "price_per_quarter": random.randint(1, 31), + "price_per_half": random.randint(1, 31), } - service_stock_schedulers = [{'consumables': [n]} for n in range(1, 3)] - plan_service_item_handlers = [{entity + '_id': n} for n in range(1, 3)] + service_stock_schedulers = [{"consumables": [n]} for n in range(1, 3)] + plan_service_item_handlers = [{entity + "_id": n} for n in range(1, 3)] - model = bc.database.create(**extra, - consumable=(2, consumable), - service_stock_scheduler=service_stock_schedulers, - plan_service_item_handler=plan_service_item_handlers, - plan=plan) + model = bc.database.create( + **extra, + consumable=(2, consumable), + service_stock_scheduler=service_stock_schedulers, + plan_service_item_handler=plan_service_item_handlers, + plan=plan + ) command = Command() result = command.handle() assert result == None - if entity == 'subscription': - assert bc.database.list_of('payments.Subscription') == bc.format.to_dict(model.subscription) - assert bc.database.list_of('payments.PlanFinancing') == [] + if entity == "subscription": + assert bc.database.list_of("payments.Subscription") == bc.format.to_dict(model.subscription) + assert bc.database.list_of("payments.PlanFinancing") == [] else: - assert bc.database.list_of('payments.Subscription') == [] - assert bc.database.list_of('payments.PlanFinancing') == bc.format.to_dict(model.plan_financing) + assert bc.database.list_of("payments.Subscription") == [] + assert bc.database.list_of("payments.PlanFinancing") == bc.format.to_dict(model.plan_financing) assert tasks.renew_subscription_consumables.delay.call_args_list == [] assert tasks.renew_plan_financing_consumables.delay.call_args_list == [] -@pytest.mark.parametrize('entity,entity_attrs', valid_statuses_params()) +@pytest.mark.parametrize("entity,entity_attrs", valid_statuses_params()) def test_this_resource_requires_a_renovation(bc: Breathecode, entity, entity_attrs): - if entity == 'plan_financing': - entity_attrs['monthly_price'] = 10 - entity_attrs['plan_expires_at'] = bc.datetime.now() + if entity == "plan_financing": + entity_attrs["monthly_price"] = 10 + entity_attrs["plan_expires_at"] = bc.datetime.now() extra = {entity: (2, entity_attrs)} consumable = { - 'valid_until': bc.datetime.now() - relativedelta(hours=2, days=random.randint(1, 31)), + "valid_until": bc.datetime.now() - relativedelta(hours=2, days=random.randint(1, 31)), } plan = { - 'is_renewable': False, - 'time_of_life': random.randint(1, 31), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'price_per_month': random.randint(1, 31), - 'price_per_year': random.randint(1, 31), - 'price_per_quarter': random.randint(1, 31), - 'price_per_half': random.randint(1, 31), + "is_renewable": False, + "time_of_life": random.randint(1, 31), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "price_per_month": random.randint(1, 31), + "price_per_year": random.randint(1, 31), + "price_per_quarter": random.randint(1, 31), + "price_per_half": random.randint(1, 31), } - service_stock_schedulers = [{'consumables': [n]} for n in range(1, 3)] - plan_service_item_handlers = [{entity + '_id': n} for n in range(1, 3)] + service_stock_schedulers = [{"consumables": [n]} for n in range(1, 3)] + plan_service_item_handlers = [{entity + "_id": n} for n in range(1, 3)] - model = bc.database.create(**extra, - consumable=(2, consumable), - service_stock_scheduler=service_stock_schedulers, - plan_service_item_handler=plan_service_item_handlers, - plan=plan) + model = bc.database.create( + **extra, + consumable=(2, consumable), + service_stock_scheduler=service_stock_schedulers, + plan_service_item_handler=plan_service_item_handlers, + plan=plan + ) command = Command() result = command.handle() assert result == None - if entity == 'subscription': - assert bc.database.list_of('payments.Subscription') == bc.format.to_dict(model.subscription) - assert bc.database.list_of('payments.PlanFinancing') == [] + if entity == "subscription": + assert bc.database.list_of("payments.Subscription") == bc.format.to_dict(model.subscription) + assert bc.database.list_of("payments.PlanFinancing") == [] assert tasks.renew_subscription_consumables.delay.call_args_list == [call(1), call(2)] assert tasks.renew_plan_financing_consumables.delay.call_args_list == [] - elif entity == 'plan_financing': - assert bc.database.list_of('payments.Subscription') == [] - assert bc.database.list_of('payments.PlanFinancing') == bc.format.to_dict(model.plan_financing) + elif entity == "plan_financing": + assert bc.database.list_of("payments.Subscription") == [] + assert bc.database.list_of("payments.PlanFinancing") == bc.format.to_dict(model.plan_financing) assert tasks.renew_subscription_consumables.delay.call_args_list == [] assert tasks.renew_plan_financing_consumables.delay.call_args_list == [call(1), call(2)] diff --git a/breathecode/payments/tests/management/commands/tests_retry_pending_bags.py b/breathecode/payments/tests/management/commands/tests_retry_pending_bags.py index 3d6d3b8d5..bed3bfec9 100644 --- a/breathecode/payments/tests/management/commands/tests_retry_pending_bags.py +++ b/breathecode/payments/tests/management/commands/tests_retry_pending_bags.py @@ -17,44 +17,89 @@ def apply_patch(db, monkeypatch: pytest.MonkeyPatch): m2 = MagicMock() m3 = MagicMock() - monkeypatch.setattr(tasks.build_plan_financing, 'delay', m1) - monkeypatch.setattr(tasks.build_subscription, 'delay', m2) - monkeypatch.setattr(tasks.build_free_subscription, 'delay', m3) + monkeypatch.setattr(tasks.build_plan_financing, "delay", m1) + monkeypatch.setattr(tasks.build_subscription, "delay", m2) + monkeypatch.setattr(tasks.build_free_subscription, "delay", m3) yield m1, m2, m3 -@pytest.mark.parametrize('bags, in_the_past', [ - (0, False), - ((2, { - 'was_delivered': True, - 'status': 'RENEWAL', - }), False), - ((2, { - 'was_delivered': True, - 'status': 'CHECKING', - }), False), - ((2, { - 'was_delivered': True, - 'status': 'PAID', - }), False), - ((2, { - 'was_delivered': False, - 'status': 'RENEWAL', - }), True), - ((2, { - 'was_delivered': False, - 'status': 'CHECKING', - }), True), - ((2, { - 'was_delivered': False, - 'status': 'RENEWAL', - }), False), - ((2, { - 'was_delivered': False, - 'status': 'CHECKING', - }), False), -]) +@pytest.mark.parametrize( + "bags, in_the_past", + [ + (0, False), + ( + ( + 2, + { + "was_delivered": True, + "status": "RENEWAL", + }, + ), + False, + ), + ( + ( + 2, + { + "was_delivered": True, + "status": "CHECKING", + }, + ), + False, + ), + ( + ( + 2, + { + "was_delivered": True, + "status": "PAID", + }, + ), + False, + ), + ( + ( + 2, + { + "was_delivered": False, + "status": "RENEWAL", + }, + ), + True, + ), + ( + ( + 2, + { + "was_delivered": False, + "status": "CHECKING", + }, + ), + True, + ), + ( + ( + 2, + { + "was_delivered": False, + "status": "RENEWAL", + }, + ), + False, + ), + ( + ( + 2, + { + "was_delivered": False, + "status": "CHECKING", + }, + ), + False, + ), + ], +) def test_nothing_to_process(bc: Breathecode, bags, in_the_past, utc_now, set_datetime): model = bc.database.create(bag=bags) if in_the_past: @@ -68,36 +113,60 @@ def test_nothing_to_process(bc: Breathecode, bags, in_the_past, utc_now, set_dat db = [] if bags: db = bc.format.to_dict(model.bag) - assert bc.database.list_of('payments.Bag') == db + assert bc.database.list_of("payments.Bag") == db assert tasks.build_plan_financing.delay.call_args_list == [] assert tasks.build_subscription.delay.call_args_list == [] assert tasks.build_free_subscription.delay.call_args_list == [] -@pytest.mark.parametrize('bags, invoices, type', [ - ((2, { - 'was_delivered': False, - 'status': 'PAID', - 'how_many_installments': 0, - }), { - 'amount': 0, - }, 'free'), - ((2, { - 'was_delivered': False, - 'status': 'PAID', - 'how_many_installments': 2, - }), { - 'amount': 0, - }, 'financing'), - ((2, { - 'was_delivered': False, - 'status': 'PAID', - 'how_many_installments': 0, - }), { - 'amount': 2, - }, 'subscription'), -]) +@pytest.mark.parametrize( + "bags, invoices, type", + [ + ( + ( + 2, + { + "was_delivered": False, + "status": "PAID", + "how_many_installments": 0, + }, + ), + { + "amount": 0, + }, + "free", + ), + ( + ( + 2, + { + "was_delivered": False, + "status": "PAID", + "how_many_installments": 2, + }, + ), + { + "amount": 0, + }, + "financing", + ), + ( + ( + 2, + { + "was_delivered": False, + "status": "PAID", + "how_many_installments": 0, + }, + ), + { + "amount": 2, + }, + "subscription", + ), + ], +) def test_rescheduling_bags(bc: Breathecode, bags, invoices, type, utc_now, set_datetime): model = bc.database.create(bag=bags, invoice=invoices) set_datetime(utc_now + timedelta(minutes=11)) @@ -108,22 +177,22 @@ def test_rescheduling_bags(bc: Breathecode, bags, invoices, type, utc_now, set_d assert result == None db = bc.format.to_dict(model.bag) - assert bc.database.list_of('payments.Bag') == db + assert bc.database.list_of("payments.Bag") == db - if type == 'free': + if type == "free": assert tasks.build_plan_financing.delay.call_args_list == [] assert tasks.build_subscription.delay.call_args_list == [] assert tasks.build_free_subscription.delay.call_args_list == [call(1, 1)] - elif type == 'financing': + elif type == "financing": assert tasks.build_plan_financing.delay.call_args_list == [call(1, 1)] assert tasks.build_subscription.delay.call_args_list == [] assert tasks.build_free_subscription.delay.call_args_list == [] - elif type == 'subscription': + elif type == "subscription": assert tasks.build_plan_financing.delay.call_args_list == [] assert tasks.build_subscription.delay.call_args_list == [call(1, 1)] assert tasks.build_free_subscription.delay.call_args_list == [] else: - assert 0, 'type value is mandatory' + assert 0, "type value is mandatory" diff --git a/breathecode/payments/tests/mixins/__init__.py b/breathecode/payments/tests/mixins/__init__.py index dea3a86dc..ee827eaa3 100644 --- a/breathecode/payments/tests/mixins/__init__.py +++ b/breathecode/payments/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ commons mixins """ + from .payments_test_case import PaymentsTestCase # noqa: F401 diff --git a/breathecode/payments/tests/mixins/payments_test_case.py b/breathecode/payments/tests/mixins/payments_test_case.py index 9d2765231..e528c8234 100644 --- a/breathecode/payments/tests/mixins/payments_test_case.py +++ b/breathecode/payments/tests/mixins/payments_test_case.py @@ -1,14 +1,22 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase -from breathecode.tests.mixins import (BreathecodeMixin, CacheMixin, DatetimeMixin, GenerateModelsMixin, - GenerateQueriesMixin, TokenMixin) +from breathecode.tests.mixins import ( + BreathecodeMixin, + CacheMixin, + DatetimeMixin, + GenerateModelsMixin, + GenerateQueriesMixin, + TokenMixin, +) -class PaymentsTestCase(APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, - BreathecodeMixin): +class PaymentsTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, TokenMixin, GenerateQueriesMixin, DatetimeMixin, BreathecodeMixin +): """MarketingTestCase with auth methods""" def tearDown(self): diff --git a/breathecode/payments/tests/models/tests_subscription.py b/breathecode/payments/tests/models/tests_subscription.py index 2c87962f5..b1275dd02 100644 --- a/breathecode/payments/tests/models/tests_subscription.py +++ b/breathecode/payments/tests/models/tests_subscription.py @@ -12,5 +12,5 @@ def test__without_auth(self): model = self.bc.database.create(subscription=1) with self.assertRaisesMessage(forms.ValidationError, "{'__all__': ['subscription-as-fully-paid']}"): - model.subscription.status = 'FULLY_PAID' + model.subscription.status = "FULLY_PAID" model.subscription.save() diff --git a/breathecode/payments/tests/signals/tests_consume_service.py b/breathecode/payments/tests/signals/tests_consume_service.py index d07aaac89..09f780f67 100644 --- a/breathecode/payments/tests/signals/tests_consume_service.py +++ b/breathecode/payments/tests/signals/tests_consume_service.py @@ -7,70 +7,82 @@ class TestSignal(LegacyAPITestCase): - @patch('breathecode.payments.signals.lose_service_permissions.send_robust', MagicMock()) + @patch("breathecode.payments.signals.lose_service_permissions.send_robust", MagicMock()) def test__consumable_how_many_minus_1__consume_gte_1(self, enable_signals): enable_signals() how_many_consume = random.randint(1, 100) how_many = -1 - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} model = self.bc.database.create(consumable=consumable) consumable_db = self.bc.format.to_dict(model.consumable) - signals.consume_service.send(sender=model.consumable.__class__, - instance=model.consumable, - how_many=how_many_consume) + signals.consume_service.send( + sender=model.consumable.__class__, instance=model.consumable, how_many=how_many_consume + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - { - **consumable_db, - 'how_many': how_many, - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + { + **consumable_db, + "how_many": how_many, + }, + ], + ) self.assertEqual(signals.lose_service_permissions.send_robust.call_args_list, []) - @patch('breathecode.payments.signals.lose_service_permissions.send_robust', MagicMock()) + @patch("breathecode.payments.signals.lose_service_permissions.send_robust", MagicMock()) def test__consumable_how_many_0__consume_gte_1(self, enable_signals): enable_signals() how_many_consume = random.randint(1, 100) how_many = 0 - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} model = self.bc.database.create(consumable=consumable) consumable_db = self.bc.format.to_dict(model.consumable) - signals.consume_service.send(sender=model.consumable.__class__, - instance=model.consumable, - how_many=how_many_consume) + signals.consume_service.send( + sender=model.consumable.__class__, instance=model.consumable, how_many=how_many_consume + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - { - **consumable_db, - 'how_many': how_many, - }, - ]) - self.assertEqual(signals.lose_service_permissions.send_robust.call_args_list, [ - call(sender=model.consumable.__class__, instance=model.consumable), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + { + **consumable_db, + "how_many": how_many, + }, + ], + ) + self.assertEqual( + signals.lose_service_permissions.send_robust.call_args_list, + [ + call(sender=model.consumable.__class__, instance=model.consumable), + ], + ) - @patch('breathecode.payments.signals.lose_service_permissions.send_robust', MagicMock()) + @patch("breathecode.payments.signals.lose_service_permissions.send_robust", MagicMock()) def test__consumable_how_many_gte_1__consume_gte_1(self, enable_signals): enable_signals() how_many_consume = random.randint(1, 100) how_many = random.randint(1, 100) - consumable = {'how_many': how_many + how_many_consume} + consumable = {"how_many": how_many + how_many_consume} model = self.bc.database.create(consumable=consumable) consumable_db = self.bc.format.to_dict(model.consumable) - signals.consume_service.send(sender=model.consumable.__class__, - instance=model.consumable, - how_many=how_many_consume) + signals.consume_service.send( + sender=model.consumable.__class__, instance=model.consumable, how_many=how_many_consume + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - { - **consumable_db, - 'how_many': how_many, - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + { + **consumable_db, + "how_many": how_many, + }, + ], + ) self.assertEqual(signals.lose_service_permissions.send_robust.call_args_list, []) diff --git a/breathecode/payments/tests/signals/tests_lose_service_permissions.py b/breathecode/payments/tests/signals/tests_lose_service_permissions.py index 8428e7f00..7114ed1e0 100644 --- a/breathecode/payments/tests/signals/tests_lose_service_permissions.py +++ b/breathecode/payments/tests/signals/tests_lose_service_permissions.py @@ -13,21 +13,24 @@ def test__with_consumable(self, enable_signals): enable_signals() how_many = -1 - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} model = self.bc.database.create(consumable=consumable) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable.__class__, instance=model.consumable) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - { - **consumable_db, - 'how_many': how_many, - }, - ]) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + { + **consumable_db, + "how_many": how_many, + }, + ], + ) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), []) - self.assertEqual(self.bc.database.list_of('auth.Group'), []) + self.assertEqual(self.bc.database.list_of("auth.Group"), []) """ 🔽🔽🔽 With one Consumable, User and Group @@ -37,21 +40,24 @@ def test__with_consumable__with_group(self, enable_signals): enable_signals() how_many = -1 - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} model = self.bc.database.create(consumable=consumable, group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable.__class__, instance=model.consumable) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - { - **consumable_db, - 'how_many': how_many, - }, - ]) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + { + **consumable_db, + "how_many": how_many, + }, + ], + ) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [1]) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable(how_many=-1), User and Group @@ -61,16 +67,16 @@ def test__with_two_consumables__with_group__how_many_minus_1(self, enable_signal enable_signals() how_many = -1 - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} model = self.bc.database.create(consumable=(2, consumable), group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [1]) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable(how_many__gte=1), User and Group @@ -80,16 +86,16 @@ def test__with_two_consumables__with_group__how_many_gte_1(self, enable_signals) enable_signals() how_many = random.randint(1, 100) - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} model = self.bc.database.create(consumable=(2, consumable), group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [1]) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable(how_many=0), User and Group @@ -99,16 +105,16 @@ def test__with_two_consumables__with_group__how_many_0(self, enable_signals): enable_signals() how_many = 0 - consumable = {'how_many': how_many} + consumable = {"how_many": how_many} model = self.bc.database.create(consumable=(2, consumable), group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), []) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable[(how_many=0), (how_many__gte=1)], User and Group @@ -119,16 +125,16 @@ def test__with_two_consumables__with_group__first_with_how_many_0__second_with_h first_how_many = 0 second_how_many = random.randint(1, 100) - consumables = [{'how_many': n} for n in [first_how_many, second_how_many]] + consumables = [{"how_many": n} for n in [first_how_many, second_how_many]] model = self.bc.database.create(consumable=consumables, group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [1]) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable[(how_many=0), ...(how_many__gte=1)], User and Group @@ -138,37 +144,38 @@ def test__with_two_consumables__with_group__first_with_how_many_0__rest_with_how enable_signals() length = random.randint(2, 5) - consumables = [{'how_many': 0 if n == 0 else random.randint(1, 100)} for n in range(length)] + consumables = [{"how_many": 0 if n == 0 else random.randint(1, 100)} for n in range(length)] model = self.bc.database.create(consumable=consumables, group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [1]) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable[(how_many=0), (how_many=-1)], User and Group """ def test__with_two_consumables__with_group__first_with_how_many_0__second_with_how_many_minus_1( - self, enable_signals): + self, enable_signals + ): enable_signals() first_how_many = 0 second_how_many = -1 - consumables = [{'how_many': n} for n in [first_how_many, second_how_many]] + consumables = [{"how_many": n} for n in [first_how_many, second_how_many]] model = self.bc.database.create(consumable=consumables, group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [1]) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable[(how_many=0), ...(how_many=-1)], User and Group @@ -178,16 +185,16 @@ def test__with_two_consumables__with_group__first_with_how_many_0__rest_with_how enable_signals() length = random.randint(2, 5) - consumables = [{'how_many': 0 if n == 0 else -1} for n in range(length)] + consumables = [{"how_many": 0 if n == 0 else -1} for n in range(length)] model = self.bc.database.create(consumable=consumables, group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [1]) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable[(how_many=0), (how_many=-1)], User and Group @@ -198,16 +205,16 @@ def test__with_two_consumables__with_group__first_with_how_many_0__second_with_h first_how_many = 0 second_how_many = 0 - consumables = [{'how_many': n} for n in [first_how_many, second_how_many]] + consumables = [{"how_many": n} for n in [first_how_many, second_how_many]] model = self.bc.database.create(consumable=consumables, group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), []) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable[(how_many=0), ...(how_many=-1)], User and Group @@ -217,16 +224,16 @@ def test__with_two_consumables__with_group__first_with_how_many_0__rest_with_how enable_signals() length = random.randint(2, 5) - consumables = [{'how_many': 0 if n == 0 else 0} for n in range(length)] + consumables = [{"how_many": 0 if n == 0 else 0} for n in range(length)] model = self.bc.database.create(consumable=consumables, group=1) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), []) - self.assertEqual(self.bc.database.list_of('auth.Group'), [self.bc.format.to_dict(model.group)]) + self.assertEqual(self.bc.database.list_of("auth.Group"), [self.bc.format.to_dict(model.group)]) """ 🔽🔽🔽 With two Consumable[(how_many=0, service=1), ...(how_many=0, service=2)], one User, two @@ -237,23 +244,29 @@ def test__with_two_consumables__with_two_group__first_with_how_many_0__rest_with enable_signals() length = random.randint(2, 5) - consumables = [{ - 'how_many': 0 if n == 0 else 0, - 'service_item_id': 1 if n == 0 else 2, - } for n in range(length)] - services = [{ - 'groups': [1] if n == 0 else [2], - } for n in range(length)] - service_items = [{'service_id': x} for x in range(1, 3)] + consumables = [ + { + "how_many": 0 if n == 0 else 0, + "service_item_id": 1 if n == 0 else 2, + } + for n in range(length) + ] + services = [ + { + "groups": [1] if n == 0 else [2], + } + for n in range(length) + ] + service_items = [{"service_id": x} for x in range(1, 3)] model = self.bc.database.create(consumable=consumables, group=2, service=services, service_item=service_items) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [2]) - self.assertEqual(self.bc.database.list_of('auth.Group'), self.bc.format.to_dict(model.group)) + self.assertEqual(self.bc.database.list_of("auth.Group"), self.bc.format.to_dict(model.group)) """ 🔽🔽🔽 With two Consumable[(how_many=0, service=1), ...(how_many=-1, service=2)], one User, two @@ -261,27 +274,34 @@ def test__with_two_consumables__with_two_group__first_with_how_many_0__rest_with """ def test__with_two_consumables__with_two_group__first_with_how_many_0__rest_with_how_many_minus_1( - self, enable_signals): + self, enable_signals + ): enable_signals() length = random.randint(2, 5) - consumables = [{ - 'how_many': 0 if n == 0 else -1, - 'service_item_id': 1 if n == 0 else 2, - } for n in range(length)] - services = [{ - 'groups': [1] if n == 0 else [2], - } for n in range(length)] - service_items = [{'service_id': x} for x in range(1, 3)] + consumables = [ + { + "how_many": 0 if n == 0 else -1, + "service_item_id": 1 if n == 0 else 2, + } + for n in range(length) + ] + services = [ + { + "groups": [1] if n == 0 else [2], + } + for n in range(length) + ] + service_items = [{"service_id": x} for x in range(1, 3)] model = self.bc.database.create(consumable=consumables, group=2, service=services, service_item=service_items) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [2]) - self.assertEqual(self.bc.database.list_of('auth.Group'), self.bc.format.to_dict(model.group)) + self.assertEqual(self.bc.database.list_of("auth.Group"), self.bc.format.to_dict(model.group)) """ 🔽🔽🔽 With two Consumable[(how_many=0, service=1), ...(how_many__gte=1, service=2)], one User, two @@ -289,24 +309,31 @@ def test__with_two_consumables__with_two_group__first_with_how_many_0__rest_with """ def test__with_two_consumables__with_two_group__first_with_how_many_0__rest_with_how_many_gte_1( - self, enable_signals): + self, enable_signals + ): enable_signals() length = random.randint(2, 5) - consumables = [{ - 'how_many': 0 if n == 0 else random.randint(1, 100), - 'service_item_id': 1 if n == 0 else 2, - } for n in range(length)] - services = [{ - 'groups': [1] if n == 0 else [2], - } for n in range(length)] - service_items = [{'service_id': x} for x in range(1, 3)] + consumables = [ + { + "how_many": 0 if n == 0 else random.randint(1, 100), + "service_item_id": 1 if n == 0 else 2, + } + for n in range(length) + ] + services = [ + { + "groups": [1] if n == 0 else [2], + } + for n in range(length) + ] + service_items = [{"service_id": x} for x in range(1, 3)] model = self.bc.database.create(consumable=consumables, group=2, service=services, service_item=service_items) consumable_db = self.bc.format.to_dict(model.consumable) signals.lose_service_permissions.send(sender=model.consumable[0].__class__, instance=model.consumable[0]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), consumable_db) - self.assertEqual(self.bc.database.list_of('auth.User'), [self.bc.format.to_dict(model.user)]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), consumable_db) + self.assertEqual(self.bc.database.list_of("auth.User"), [self.bc.format.to_dict(model.user)]) self.bc.check.queryset_with_pks(model.user.groups.all(), [2]) - self.assertEqual(self.bc.database.list_of('auth.Group'), self.bc.format.to_dict(model.group)) + self.assertEqual(self.bc.database.list_of("auth.Group"), self.bc.format.to_dict(model.group)) diff --git a/breathecode/payments/tests/signals/tests_subscription_created.py b/breathecode/payments/tests/signals/tests_subscription_created.py index 5cff0766d..c60cd4d62 100644 --- a/breathecode/payments/tests/signals/tests_subscription_created.py +++ b/breathecode/payments/tests/signals/tests_subscription_created.py @@ -10,18 +10,20 @@ @pytest.fixture(autouse=True) def mocks(db, monkeypatch): m1 = MagicMock() - monkeypatch.setattr(tasks.async_deliver_hook, 'delay', m1) + monkeypatch.setattr(tasks.async_deliver_hook, "delay", m1) yield m1 @pytest.fixture(autouse=True) def base(db, bc: Breathecode): - model = bc.database.create(hook={'event': 'subscription.subscription_created'}, - user={'username': 'test'}, - academy={ - 'slug': 'test', - 'available_as_saas': True, - }) + model = bc.database.create( + hook={"event": "subscription.subscription_created"}, + user={"username": "test"}, + academy={ + "slug": "test", + "available_as_saas": True, + }, + ) yield model @@ -29,41 +31,41 @@ def serializer(subscription, user=None, academy=None): academy_obj = None if academy: academy_obj = { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } user_obj = None if user: user_obj = { - 'first_name': user.first_name, - 'last_name': user.last_name, - 'email': user.email, + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, } return { - 'id': subscription.id, - 'status': subscription.status, - 'status_message': subscription.status_message, - 'user': user_obj, - 'academy': academy_obj, - 'selected_cohort_set': subscription.selected_cohort_set, - 'selected_mentorship_service_set': subscription.selected_mentorship_service_set, - 'selected_event_type_set': subscription.selected_event_type_set, - 'plans': [], - 'invoices': [], - 'next_payment_at': subscription.next_payment_at, - 'valid_until': subscription.valid_until, - 'paid_at': subscription.paid_at, - 'is_refundable': subscription.is_refundable, - 'pay_every': subscription.pay_every, - 'pay_every_unit': subscription.pay_every_unit, + "id": subscription.id, + "status": subscription.status, + "status_message": subscription.status_message, + "user": user_obj, + "academy": academy_obj, + "selected_cohort_set": subscription.selected_cohort_set, + "selected_mentorship_service_set": subscription.selected_mentorship_service_set, + "selected_event_type_set": subscription.selected_event_type_set, + "plans": [], + "invoices": [], + "next_payment_at": subscription.next_payment_at, + "valid_until": subscription.valid_until, + "paid_at": subscription.paid_at, + "is_refundable": subscription.is_refundable, + "pay_every": subscription.pay_every, + "pay_every_unit": subscription.pay_every_unit, } def test_nothing_happens(bc: Breathecode, enable_signals, enable_hook_manager, mocks, base): - enable_signals('breathecode.payments.signals.subscription_created') + enable_signals("breathecode.payments.signals.subscription_created") enable_hook_manager() mock = mocks @@ -73,7 +75,7 @@ def test_nothing_happens(bc: Breathecode, enable_signals, enable_hook_manager, m academy=base.academy, ) - assert bc.database.list_of('payments.subscription') == bc.format.to_dict(model.subscription) + assert bc.database.list_of("payments.subscription") == bc.format.to_dict(model.subscription) assert mock.call_args_list == [ call(base.hook.target, serializer(model.subscription[0], user=model.user, academy=model.academy), hook_id=1), diff --git a/breathecode/payments/tests/signals/tests_update_plan_m2m_service_items.py b/breathecode/payments/tests/signals/tests_update_plan_m2m_service_items.py index eb2ecedc2..3f3de14c5 100644 --- a/breathecode/payments/tests/signals/tests_update_plan_m2m_service_items.py +++ b/breathecode/payments/tests/signals/tests_update_plan_m2m_service_items.py @@ -11,56 +11,72 @@ @pytest.fixture(autouse=True) def mocks(db, monkeypatch): m1 = MagicMock() - monkeypatch.setattr(tasks.update_service_stock_schedulers, 'delay', m1) + monkeypatch.setattr(tasks.update_service_stock_schedulers, "delay", m1) yield m1 -@pytest.mark.parametrize('plan,empty,service_item', [ - (None, True, None), - ((2, { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, - }), False, 2), -]) +@pytest.mark.parametrize( + "plan,empty,service_item", + [ + (None, True, None), + ( + ( + 2, + { + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, + }, + ), + False, + 2, + ), + ], +) def test_nothing_happens(bc: Breathecode, enable_signals, mocks, plan, empty, service_item): - enable_signals('django.db.models.signals.m2m_changed', 'breathecode.payments.signals.update_plan_m2m_service_items') + enable_signals("django.db.models.signals.m2m_changed", "breathecode.payments.signals.update_plan_m2m_service_items") mock = mocks model = bc.database.create(plan=plan, service_item=service_item) if empty: - assert bc.database.list_of('payments.Plan') == [] + assert bc.database.list_of("payments.Plan") == [] else: - assert bc.database.list_of('payments.Plan') == bc.format.to_dict(model.plan) + assert bc.database.list_of("payments.Plan") == bc.format.to_dict(model.plan) assert mock.call_args_list == [] -@pytest.mark.parametrize('attr,value ', [ - ('subscription', {}), - ('plan_financing', { - 'monthly_price': 10, - }), -]) +@pytest.mark.parametrize( + "attr,value ", + [ + ("subscription", {}), + ( + "plan_financing", + { + "monthly_price": 10, + }, + ), + ], +) def test__consumable_how_many_minus_1__consume_gte_1(bc: Breathecode, enable_signals, mocks, attr, value): - enable_signals('django.db.models.signals.m2m_changed', 'breathecode.payments.signals.update_plan_m2m_service_items') + enable_signals("django.db.models.signals.m2m_changed", "breathecode.payments.signals.update_plan_m2m_service_items") extra = {} - if attr == 'plan_financing': - value['plan_expires_at'] = bc.datetime.now() - extra['plan_financing'] = (2, value) + if attr == "plan_financing": + value["plan_expires_at"] = bc.datetime.now() + extra["plan_financing"] = (2, value) else: - extra['subscription'] = (2, value) + extra["subscription"] = (2, value) mock = mocks plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } model = bc.database.create(plan=(2, plan), service_item=2, **extra) @@ -68,7 +84,7 @@ def test__consumable_how_many_minus_1__consume_gte_1(bc: Breathecode, enable_sig for plan in model.plan: plan.service_items.add(*model.service_item) - assert bc.database.list_of('payments.Plan') == bc.format.to_dict(model.plan) + assert bc.database.list_of("payments.Plan") == bc.format.to_dict(model.plan) assert mock.call_args_list == [call(1), call(2)] assert [x.id for x in model.plan[0].service_items.all()] == [1, 2] diff --git a/breathecode/payments/tests/supervisors/tests_supervise_all_consumption_sessions.py b/breathecode/payments/tests/supervisors/tests_supervise_all_consumption_sessions.py index b9e1b3a00..97c248783 100644 --- a/breathecode/payments/tests/supervisors/tests_supervise_all_consumption_sessions.py +++ b/breathecode/payments/tests/supervisors/tests_supervise_all_consumption_sessions.py @@ -21,10 +21,13 @@ def __init__(self, bc: Breathecode): def list(self): supervisors = SupervisorModel.objects.all() - return [{ - 'task_module': supervisor.task_module, - 'task_name': supervisor.task_name, - } for supervisor in supervisors] + return [ + { + "task_module": supervisor.task_module, + "task_name": supervisor.task_name, + } + for supervisor in supervisors + ] @sync_to_async def alist(self): @@ -55,29 +58,31 @@ def supervisor(db, bc: Breathecode): def setup(db, monkeypatch: pytest.MonkeyPatch): from breathecode.monitoring.models import Supervisor - monkeypatch.setattr('logging.Logger.error', MagicMock()) - monkeypatch.setattr('breathecode.payments.supervisors.MIN_PENDING_SESSIONS', 2) - monkeypatch.setattr('breathecode.payments.supervisors.MIN_CANCELLED_SESSIONS', 2) - - fn_module = 'breathecode.payments.supervisors' - fn_name = 'supervise_all_consumption_sessions' - Supervisor.objects.get_or_create(task_module=fn_module, - task_name=fn_name, - defaults={ - 'delta': timedelta(seconds=3600), - 'ran_at': None, - }) + monkeypatch.setattr("logging.Logger.error", MagicMock()) + monkeypatch.setattr("breathecode.payments.supervisors.MIN_PENDING_SESSIONS", 2) + monkeypatch.setattr("breathecode.payments.supervisors.MIN_CANCELLED_SESSIONS", 2) + + fn_module = "breathecode.payments.supervisors" + fn_name = "supervise_all_consumption_sessions" + Supervisor.objects.get_or_create( + task_module=fn_module, + task_name=fn_name, + defaults={ + "delta": timedelta(seconds=3600), + "ran_at": None, + }, + ) yield def db(data={}): return { - 'delta': timedelta(seconds=3600), - 'id': 0, - 'ran_at': None, - 'task_module': '', - 'task_name': '', + "delta": timedelta(seconds=3600), + "id": 0, + "ran_at": None, + "task_module": "", + "task_name": "", **data, } @@ -87,78 +92,73 @@ def tests_no_sessions(supervisor: Supervisor): assert supervisor.list() == [ { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", }, ] - assert supervisor.log('breathecode.payments.supervisors', 'supervise_all_consumption_sessions') == [] + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [] -def tests_so_much_pending_sessions(database: dfx.Database, supervisor: Supervisor, utc_now: datetime, - random: cfx.Random): +def tests_so_much_pending_sessions( + database: dfx.Database, supervisor: Supervisor, utc_now: datetime, random: cfx.Random +): eta = utc_now - timedelta(seconds=(3600 * random.int(1, 24)) - 1) - x = {'eta': eta} - consumption_sessions = [{'status': 'PENDING', **x} for _ in range(3)] + [{'status': 'DONE', **x} for _ in range(7)] + x = {"eta": eta} + consumption_sessions = [{"status": "PENDING", **x} for _ in range(3)] + [{"status": "DONE", **x} for _ in range(7)] database.create(consumption_session=consumption_sessions) supervise_all_consumption_sessions() assert supervisor.list() == [ { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", }, ] - assert supervisor.log('breathecode.payments.supervisors', 'supervise_all_consumption_sessions') == [ - 'There has so much pending consumption sessions, 3 pending and rate 42.86%', + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [ + "There has so much pending consumption sessions, 3 pending and rate 42.86%", ] -def tests_so_much_cancelled_sessions__no_unsafe_sessions(database: dfx.Database, supervisor: Supervisor, - utc_now: datetime, random: cfx.Random): +def tests_so_much_cancelled_sessions__no_unsafe_sessions( + database: dfx.Database, supervisor: Supervisor, utc_now: datetime, random: cfx.Random +): eta = utc_now - timedelta(seconds=(3600 * random.int(1, 24)) - 1) - x = {'eta': eta} - consumption_sessions = [{ - 'status': 'CANCELLED', - **x - } for _ in range(2)] + [{ - 'status': 'DONE', - **x - } for _ in range(8)] + x = {"eta": eta} + consumption_sessions = [{"status": "CANCELLED", **x} for _ in range(2)] + [ + {"status": "DONE", **x} for _ in range(8) + ] database.create(consumption_session=consumption_sessions) supervise_all_consumption_sessions() assert supervisor.list() == [ { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", }, ] - assert supervisor.log('breathecode.payments.supervisors', 'supervise_all_consumption_sessions') == [] + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [] -def tests_so_much_cancelled_sessions__unsafe_sessions(database: dfx.Database, supervisor: Supervisor, utc_now: datetime, - random: cfx.Random): +def tests_so_much_cancelled_sessions__unsafe_sessions( + database: dfx.Database, supervisor: Supervisor, utc_now: datetime, random: cfx.Random +): eta = utc_now - timedelta(seconds=(3600 * random.int(1, 24)) - 1) - x = {'eta': eta, 'operation_code': 'unsafe-consume-service-set', 'user_id': 1} - consumption_sessions = [{ - 'status': 'CANCELLED', - **x - } for _ in range(4)] + [{ - 'status': 'DONE', - **x - } for _ in range(6)] - model = database.create(consumption_session=consumption_sessions, user=1, service={'type': 'VOID'}) + x = {"eta": eta, "operation_code": "unsafe-consume-service-set", "user_id": 1} + consumption_sessions = [{"status": "CANCELLED", **x} for _ in range(4)] + [ + {"status": "DONE", **x} for _ in range(6) + ] + model = database.create(consumption_session=consumption_sessions, user=1, service={"type": "VOID"}) supervise_all_consumption_sessions() assert supervisor.list() == [ { - 'task_module': 'breathecode.payments.supervisors', - 'task_name': 'supervise_all_consumption_sessions', + "task_module": "breathecode.payments.supervisors", + "task_name": "supervise_all_consumption_sessions", }, ] - assert supervisor.log('breathecode.payments.supervisors', 'supervise_all_consumption_sessions') == [ - f'There has 66.67% cancelled consumption sessions, due to a bug or a cheater, user {model.user.email}', + assert supervisor.log("breathecode.payments.supervisors", "supervise_all_consumption_sessions") == [ + f"There has 66.67% cancelled consumption sessions, due to a bug or a cheater, user {model.user.email}", ] diff --git a/breathecode/payments/tests/tasks/tests_add_cohort_set_to_subscription.py b/breathecode/payments/tests/tasks/tests_add_cohort_set_to_subscription.py index 1b2ab9dfa..3571c8800 100644 --- a/breathecode/payments/tests/tasks/tests_add_cohort_set_to_subscription.py +++ b/breathecode/payments/tests/tasks/tests_add_cohort_set_to_subscription.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random from logging import Logger from unittest.mock import MagicMock, call @@ -15,15 +16,15 @@ UTC_NOW = timezone.now() # enable this file to use the database -pytestmark = pytest.mark.usefixtures('db') +pytestmark = pytest.mark.usefixtures("db") @pytest.fixture(autouse=True) def setup(monkeypatch): # mock logger with monkeypatch - monkeypatch.setattr('logging.Logger.info', MagicMock()) - monkeypatch.setattr('logging.Logger.error', MagicMock()) + monkeypatch.setattr("logging.Logger.info", MagicMock()) + monkeypatch.setattr("logging.Logger.error", MagicMock()) yield @@ -43,27 +44,27 @@ def wrapper(): def test_subscription_set_not_found(bc: Breathecode, reset_mock_calls): if have_subscription := random.randint(0, 1): - subscription = {'status': random.choice(['CANCELLED', 'DEPRECATED'])} + subscription = {"status": random.choice(["CANCELLED", "DEPRECATED"])} model = bc.database.create(subscription=subscription) reset_mock_calls() tasks.add_cohort_set_to_subscription(1, 1) - assert bc.database.list_of('payments.CohortSet') == [] - assert bc.database.list_of('payments.CohortSetCohort') == [] + assert bc.database.list_of("payments.CohortSet") == [] + assert bc.database.list_of("payments.CohortSetCohort") == [] if have_subscription: - assert bc.database.list_of('payments.Subscription') == [bc.format.to_dict(model.subscription)] + assert bc.database.list_of("payments.Subscription") == [bc.format.to_dict(model.subscription)] else: - assert bc.database.list_of('payments.Subscription') == [] + assert bc.database.list_of("payments.Subscription") == [] assert Logger.info.call_args_list == [ - call('Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1'), + call("Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1"), # retry - call('Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1'), + call("Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1"), ] - assert Logger.error.call_args_list == [call('Subscription with id 1 not found', exc_info=True)] + assert Logger.error.call_args_list == [call("Subscription with id 1 not found", exc_info=True)] # When: cohort set not found @@ -75,84 +76,84 @@ def test_cohort_set_not_found(bc: Breathecode, reset_mock_calls): tasks.add_cohort_set_to_subscription(1, 1) - assert bc.database.list_of('payments.CohortSet') == [] - assert bc.database.list_of('payments.CohortSetCohort') == [] - assert bc.database.list_of('payments.Subscription') == [bc.format.to_dict(model.subscription)] + assert bc.database.list_of("payments.CohortSet") == [] + assert bc.database.list_of("payments.CohortSetCohort") == [] + assert bc.database.list_of("payments.Subscription") == [bc.format.to_dict(model.subscription)] assert Logger.info.call_args_list == [ - call('Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1'), + call("Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1"), # retry - call('Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1'), + call("Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1"), ] - assert Logger.error.call_args_list == [call('CohortSet with id 1 not found', exc_info=True)] + assert Logger.error.call_args_list == [call("CohortSet with id 1 not found", exc_info=True)] # When: subscription is over # Then: should abort the execution def test_subscription_is_over(bc: Breathecode, reset_mock_calls): - academy = {'available_as_saas': True} - subscription = {'valid_until': timezone.now() - timezone.timedelta(days=random.randint(1, 1000))} + academy = {"available_as_saas": True} + subscription = {"valid_until": timezone.now() - timezone.timedelta(days=random.randint(1, 1000))} model = bc.database.create(subscription=subscription, cohort_set=1, academy=academy) reset_mock_calls() tasks.add_cohort_set_to_subscription(1, 1) - assert bc.database.list_of('payments.CohortSet') == [bc.format.to_dict(model.cohort_set)] - assert bc.database.list_of('payments.CohortSetCohort') == [] - assert bc.database.list_of('payments.Subscription') == [bc.format.to_dict(model.subscription)] + assert bc.database.list_of("payments.CohortSet") == [bc.format.to_dict(model.cohort_set)] + assert bc.database.list_of("payments.CohortSetCohort") == [] + assert bc.database.list_of("payments.Subscription") == [bc.format.to_dict(model.subscription)] assert Logger.info.call_args_list == [ - call('Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1'), + call("Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1"), ] - assert Logger.error.call_args_list == [call('The subscription 1 is over', exc_info=True)] + assert Logger.error.call_args_list == [call("The subscription 1 is over", exc_info=True)] # When: subscription with selected_cohort_set # Then: should abort the execution def test_subscription_have_a_cohort_set(bc: Breathecode, reset_mock_calls, caplog): - academy = {'available_as_saas': True} - subscription = {'valid_until': timezone.now() + timezone.timedelta(days=random.randint(1, 1000))} + academy = {"available_as_saas": True} + subscription = {"valid_until": timezone.now() + timezone.timedelta(days=random.randint(1, 1000))} model = bc.database.create(subscription=subscription, cohort_set=1, academy=academy) reset_mock_calls() tasks.add_cohort_set_to_subscription(1, 1) - assert bc.database.list_of('payments.CohortSet') == [bc.format.to_dict(model.cohort_set)] - assert bc.database.list_of('payments.CohortSetCohort') == [] - assert bc.database.list_of('payments.Subscription') == [bc.format.to_dict(model.subscription)] + assert bc.database.list_of("payments.CohortSet") == [bc.format.to_dict(model.cohort_set)] + assert bc.database.list_of("payments.CohortSetCohort") == [] + assert bc.database.list_of("payments.Subscription") == [bc.format.to_dict(model.subscription)] assert Logger.info.call_args_list == [ - call('Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1'), + call("Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1"), ] - assert Logger.error.call_args_list == [call('Subscription with id 1 already have a cohort set', exc_info=True)] + assert Logger.error.call_args_list == [call("Subscription with id 1 already have a cohort set", exc_info=True)] # When: all is ok # Then: should add the cohort set to the subscription def test_all_is_ok(bc: Breathecode, reset_mock_calls): - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} subscription = { - 'valid_until': timezone.now() + timezone.timedelta(days=random.randint(1, 1000)), - 'selected_cohort_set_id': None, + "valid_until": timezone.now() + timezone.timedelta(days=random.randint(1, 1000)), + "selected_cohort_set_id": None, } model = bc.database.create(subscription=subscription, cohort_set=1, academy=academy) reset_mock_calls() tasks.add_cohort_set_to_subscription(1, 1) - assert bc.database.list_of('payments.CohortSet') == [bc.format.to_dict(model.cohort_set)] - assert bc.database.list_of('payments.CohortSetCohort') == [] - assert bc.database.list_of('payments.Subscription') == [ + assert bc.database.list_of("payments.CohortSet") == [bc.format.to_dict(model.cohort_set)] + assert bc.database.list_of("payments.CohortSetCohort") == [] + assert bc.database.list_of("payments.Subscription") == [ { **bc.format.to_dict(model.subscription), - 'selected_cohort_set_id': 1, + "selected_cohort_set_id": 1, }, ] assert Logger.info.call_args_list == [ - call('Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1'), + call("Starting add_cohort_set_to_subscription for subscription 1 cohort_set 1"), ] assert Logger.error.call_args_list == [] diff --git a/breathecode/payments/tests/tasks/tests_build_free_subscription.py b/breathecode/payments/tests/tasks/tests_build_free_subscription.py index 8291b82f4..4d0480acf 100644 --- a/breathecode/payments/tests/tasks/tests_build_free_subscription.py +++ b/breathecode/payments/tests/tasks/tests_build_free_subscription.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -20,65 +21,65 @@ def subscription_item(data={}): return { - 'id': 1, - 'academy_id': 1, - 'is_refundable': True, - 'paid_at': UTC_NOW, - 'pay_every': 1, - 'pay_every_unit': 'MONTH', - 'selected_cohort_set_id': None, - 'selected_event_type_set_id': None, - 'selected_mentorship_service_set_id': None, - 'status': 'ACTIVE', - 'status_message': None, - 'user_id': 1, - 'valid_until': UTC_NOW, + "id": 1, + "academy_id": 1, + "is_refundable": True, + "paid_at": UTC_NOW, + "pay_every": 1, + "pay_every_unit": "MONTH", + "selected_cohort_set_id": None, + "selected_event_type_set_id": None, + "selected_mentorship_service_set_id": None, + "status": "ACTIVE", + "status_message": None, + "user_id": 1, + "valid_until": UTC_NOW, **data, } @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield -#FIXME: create_v2 fail in this test file +# FIXME: create_v2 fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 Bag not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_bag_not_found(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), + call("Starting build_free_subscription for bag 1"), # retry - call('Starting build_free_subscription for bag 1'), + call("Starting build_free_subscription for bag 1"), ], ) - self.assertEqual(logging.Logger.error.call_args_list, [call('Bag with id 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Bag with id 1 not found", exc_info=True)]) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 With Bag """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_invoice_not_found(self): - bag = {'status': 'PAID', 'was_delivered': False} + bag = {"status": "PAID", "was_delivered": False} model = self.bc.database.create_v2(bag=bag) # remove prints from mixer @@ -87,42 +88,48 @@ def test_invoice_not_found(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), + call("Starting build_free_subscription for bag 1"), # retry - call('Starting build_free_subscription for bag 1'), + call("Starting build_free_subscription for bag 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Invoice with id 1 not found", exc_info=True), ], ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Invoice with id 1 not found', exc_info=True), - ]) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [self.bc.format.to_dict(model.bag)]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), [self.bc.format.to_dict(model.bag)]) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag and Invoice """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_without_plan(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} model = self.bc.database.create(bag=bag, invoice=invoice) @@ -132,51 +139,69 @@ def test_without_plan(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Not have plans to associated to this free subscription in the bag 1', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Not have plans to associated to this free subscription in the bag 1", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': False, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": False, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag, Invoice and Plan """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__is_free_trial(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': 'NO_SET', + "status": "PAID", + "was_delivered": False, + "chosen_period": "NO_SET", } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} - plans = [{ - 'is_renewable': False, - 'trial_duration': random.randint(1, 100), - 'trial_duration_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - } for _ in range(2)] + plans = [ + { + "is_renewable": False, + "trial_duration": random.randint(1, 100), + "trial_duration_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + } + for _ in range(2) + ] model = self.bc.database.create(bag=bag, invoice=invoice, plan=plans) @@ -186,69 +211,90 @@ def test_subscription_was_created__is_free_trial(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - call('Free subscription was created with id 1 for plan 1'), - call('Free subscription was created with id 2 for plan 2'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + call("Free subscription was created with id 1 for plan 1"), + call("Free subscription was created with id 2 for plan 2"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) db = [] for plan in model.plan: unit = plan.trial_duration unit_type = plan.trial_duration_unit db.append( - subscription_item({ - 'conversion_info': None, - 'id': plan.id, - 'status': 'FREE_TRIAL', - 'paid_at': model.invoice.paid_at, - 'next_payment_at': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - 'valid_until': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - })) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), db) - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - call(2), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + subscription_item( + { + "conversion_info": None, + "id": plan.id, + "status": "FREE_TRIAL", + "paid_at": model.invoice.paid_at, + "next_payment_at": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + "valid_until": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + } + ) + ) + + self.assertEqual(self.bc.database.list_of("payments.Subscription"), db) + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + call(2), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag, Invoice with amount and Plan """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_invoice_with_amount(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED', 'amount': (random.random() * 99.99) + 0.01} + invoice = {"status": "FULFILLED", "amount": (random.random() * 99.99) + 0.01} - plans = [{ - 'is_renewable': False, - 'trial_duration': random.randint(1, 100), - 'trial_duration_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - } for _ in range(2)] + plans = [ + { + "is_renewable": False, + "trial_duration": random.randint(1, 100), + "trial_duration_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + } + for _ in range(2) + ] model = self.bc.database.create(bag=bag, invoice=invoice, plan=plans) @@ -258,54 +304,72 @@ def test_invoice_with_amount(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + ], + ) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The invoice with id 1 is invalid for a free subscription', exc_info=True), - ]) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The invoice with id 1 is invalid for a free subscription", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': False, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": False, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag with Cohort, Invoice and Plan """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__bag_with_cohort(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} - plans = [{ - 'is_renewable': False, - 'trial_duration': random.randint(1, 100), - 'trial_duration_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - } for _ in range(2)] - academy = {'available_as_saas': True} + plans = [ + { + "is_renewable": False, + "trial_duration": random.randint(1, 100), + "trial_duration_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + } + for _ in range(2) + ] + academy = {"available_as_saas": True} model = self.bc.database.create(bag=bag, invoice=invoice, plan=plans, cohort=1, cohort_set=1, academy=academy) @@ -315,72 +379,96 @@ def test_subscription_was_created__bag_with_cohort(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - call('Free subscription was created with id 1 for plan 1'), - call('Free subscription was created with id 2 for plan 2'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + call("Free subscription was created with id 1 for plan 1"), + call("Free subscription was created with id 2 for plan 2"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) db = [] for plan in model.plan: unit = plan.trial_duration unit_type = plan.trial_duration_unit db.append( - subscription_item({ - 'conversion_info': None, - 'id': plan.id, - 'selected_cohort_set_id': 1, - 'status': 'FREE_TRIAL', - 'paid_at': model.invoice.paid_at, - 'next_payment_at': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - 'valid_until': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - })) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), db) - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - call(2), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + subscription_item( + { + "conversion_info": None, + "id": plan.id, + "selected_cohort_set_id": 1, + "status": "FREE_TRIAL", + "paid_at": model.invoice.paid_at, + "next_payment_at": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + "valid_until": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + } + ) + ) + + self.assertEqual(self.bc.database.list_of("payments.Subscription"), db) + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + call(2), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag with EventTypeSet, Invoice and Plan """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__bag_with_event_type_set(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} - plans = [{ - 'is_renewable': False, - 'trial_duration': random.randint(1, 100), - 'trial_duration_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - } for _ in range(2)] + plans = [ + { + "is_renewable": False, + "trial_duration": random.randint(1, 100), + "trial_duration_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + } + for _ in range(2) + ] model = self.bc.database.create(bag=bag, invoice=invoice, plan=plans, event_type_set=1) @@ -390,70 +478,91 @@ def test_subscription_was_created__bag_with_event_type_set(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - call('Free subscription was created with id 1 for plan 1'), - call('Free subscription was created with id 2 for plan 2'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + call("Free subscription was created with id 1 for plan 1"), + call("Free subscription was created with id 2 for plan 2"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) db = [] for plan in model.plan: unit = plan.trial_duration unit_type = plan.trial_duration_unit db.append( - subscription_item({ - 'conversion_info': None, - 'id': plan.id, - 'selected_event_type_set_id': 1, - 'status': 'FREE_TRIAL', - 'paid_at': model.invoice.paid_at, - 'next_payment_at': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - 'valid_until': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - })) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), db) - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - call(2), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + subscription_item( + { + "conversion_info": None, + "id": plan.id, + "selected_event_type_set_id": 1, + "status": "FREE_TRIAL", + "paid_at": model.invoice.paid_at, + "next_payment_at": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + "valid_until": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + } + ) + ) + + self.assertEqual(self.bc.database.list_of("payments.Subscription"), db) + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + call(2), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag with MentorshipServiceSet, Invoice and Plan """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__bag_with_mentorship_service_set(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} - plans = [{ - 'is_renewable': False, - 'trial_duration': random.randint(1, 100), - 'trial_duration_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - } for _ in range(2)] + plans = [ + { + "is_renewable": False, + "trial_duration": random.randint(1, 100), + "trial_duration_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + } + for _ in range(2) + ] model = self.bc.database.create(bag=bag, invoice=invoice, plan=plans, mentorship_service_set=1) @@ -463,72 +572,93 @@ def test_subscription_was_created__bag_with_mentorship_service_set(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - call('Free subscription was created with id 1 for plan 1'), - call('Free subscription was created with id 2 for plan 2'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + call("Free subscription was created with id 1 for plan 1"), + call("Free subscription was created with id 2 for plan 2"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) db = [] for plan in model.plan: unit = plan.trial_duration unit_type = plan.trial_duration_unit db.append( - subscription_item({ - 'conversion_info': None, - 'id': plan.id, - 'selected_mentorship_service_set_id': 1, - 'status': 'FREE_TRIAL', - 'paid_at': model.invoice.paid_at, - 'next_payment_at': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - 'valid_until': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - })) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), db) - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - call(2), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + subscription_item( + { + "conversion_info": None, + "id": plan.id, + "selected_mentorship_service_set_id": 1, + "status": "FREE_TRIAL", + "paid_at": model.invoice.paid_at, + "next_payment_at": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + "valid_until": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + } + ) + ) + + self.assertEqual(self.bc.database.list_of("payments.Subscription"), db) + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + call(2), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag, Invoice and Plan with is_renewable=False """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__is_free__is_not_renewable(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': 'NO_SET', + "status": "PAID", + "was_delivered": False, + "chosen_period": "NO_SET", } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} - plans = [{ - 'is_renewable': False, - 'trial_duration': 0, - 'trial_duration_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - } for _ in range(2)] + plans = [ + { + "is_renewable": False, + "trial_duration": 0, + "trial_duration_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + } + for _ in range(2) + ] model = self.bc.database.create(bag=bag, invoice=invoice, plan=plans) @@ -538,71 +668,92 @@ def test_subscription_was_created__is_free__is_not_renewable(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - call('Free subscription was created with id 1 for plan 1'), - call('Free subscription was created with id 2 for plan 2'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + call("Free subscription was created with id 1 for plan 1"), + call("Free subscription was created with id 2 for plan 2"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) db = [] for plan in model.plan: unit = plan.time_of_life unit_type = plan.time_of_life_unit db.append( - subscription_item({ - 'conversion_info': None, - 'id': plan.id, - 'status': 'ACTIVE', - 'paid_at': model.invoice.paid_at, - 'next_payment_at': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - 'valid_until': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - })) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), db) - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - call(2), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + subscription_item( + { + "conversion_info": None, + "id": plan.id, + "status": "ACTIVE", + "paid_at": model.invoice.paid_at, + "next_payment_at": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + "valid_until": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + } + ) + ) + + self.assertEqual(self.bc.database.list_of("payments.Subscription"), db) + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + call(2), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag, Invoice and Plan with is_renewable=True """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__is_free__is_renewable(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': 'NO_SET', + "status": "PAID", + "was_delivered": False, + "chosen_period": "NO_SET", } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} - plans = [{ - 'is_renewable': True, - 'trial_duration': 0, - 'trial_duration_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - } for _ in range(2)] + plans = [ + { + "is_renewable": True, + "trial_duration": 0, + "trial_duration_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + } + for _ in range(2) + ] model = self.bc.database.create(bag=bag, invoice=invoice, plan=plans) @@ -612,71 +763,92 @@ def test_subscription_was_created__is_free__is_renewable(self): build_free_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - call('Free subscription was created with id 1 for plan 1'), - call('Free subscription was created with id 2 for plan 2'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + call("Free subscription was created with id 1 for plan 1"), + call("Free subscription was created with id 2 for plan 2"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) db = [] for plan in model.plan: unit = plan.time_of_life unit_type = plan.time_of_life_unit db.append( - subscription_item({ - 'conversion_info': None, - 'id': plan.id, - 'status': 'ACTIVE', - 'paid_at': model.invoice.paid_at, - 'next_payment_at': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - 'valid_until': None, - })) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), db) - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - call(2), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + subscription_item( + { + "conversion_info": None, + "id": plan.id, + "status": "ACTIVE", + "paid_at": model.invoice.paid_at, + "next_payment_at": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + "valid_until": None, + } + ) + ) + + self.assertEqual(self.bc.database.list_of("payments.Subscription"), db) + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + call(2), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag, Invoice and Plan with is_renewable=True """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__is_free__is_renewable_with_conversion_info(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': 'NO_SET', + "status": "PAID", + "was_delivered": False, + "chosen_period": "NO_SET", } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} - plans = [{ - 'is_renewable': True, - 'trial_duration': 0, - 'trial_duration_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - } for _ in range(2)] + plans = [ + { + "is_renewable": True, + "trial_duration": 0, + "trial_duration_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + } + for _ in range(2) + ] model = self.bc.database.create(bag=bag, invoice=invoice, plan=plans) @@ -686,46 +858,62 @@ def test_subscription_was_created__is_free__is_renewable_with_conversion_info(se build_free_subscription.delay(1, 1, conversion_info='{"landing_url": "/"}') - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_free_subscription for bag 1'), - call('Free subscription was created with id 1 for plan 1'), - call('Free subscription was created with id 2 for plan 2'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_free_subscription for bag 1"), + call("Free subscription was created with id 1 for plan 1"), + call("Free subscription was created with id 2 for plan 2"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) db = [] for plan in model.plan: unit = plan.time_of_life unit_type = plan.time_of_life_unit db.append( - subscription_item({ - 'conversion_info': { - 'landing_url': '/' - }, - 'id': plan.id, - 'status': 'ACTIVE', - 'paid_at': model.invoice.paid_at, - 'next_payment_at': model.invoice.paid_at + calculate_relative_delta(unit, unit_type), - 'valid_until': None, - })) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), db) - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - call(2), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + subscription_item( + { + "conversion_info": {"landing_url": "/"}, + "id": plan.id, + "status": "ACTIVE", + "paid_at": model.invoice.paid_at, + "next_payment_at": model.invoice.paid_at + calculate_relative_delta(unit, unit_type), + "valid_until": None, + } + ) + ) + + self.assertEqual(self.bc.database.list_of("payments.Subscription"), db) + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + call(2), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) diff --git a/breathecode/payments/tests/tasks/tests_build_plan_financing.py b/breathecode/payments/tests/tasks/tests_build_plan_financing.py index d0f05dca7..e461d19b9 100644 --- a/breathecode/payments/tests/tasks/tests_build_plan_financing.py +++ b/breathecode/payments/tests/tasks/tests_build_plan_financing.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -21,63 +22,64 @@ def plan_financing_item(data={}): return { - 'id': 1, - 'academy_id': 1, - 'monthly_price': 0, - 'plan_expires_at': UTC_NOW, - 'status': 'ACTIVE', - 'status_message': None, - 'user_id': 1, - 'valid_until': UTC_NOW, - 'next_payment_at': UTC_NOW, - 'selected_cohort_set_id': None, - 'selected_event_type_set_id': None, - 'selected_mentorship_service_set_id': None, + "id": 1, + "academy_id": 1, + "monthly_price": 0, + "plan_expires_at": UTC_NOW, + "status": "ACTIVE", + "status_message": None, + "user_id": 1, + "valid_until": UTC_NOW, + "next_payment_at": UTC_NOW, + "selected_cohort_set_id": None, + "selected_event_type_set_id": None, + "selected_mentorship_service_set_id": None, **data, } @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield -#FIXME: create_v2 fail in this test file +# FIXME: create_v2 fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 Bag not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_bag_not_found(self): build_plan_financing.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting build_plan_financing for bag 1'), + call("Starting build_plan_financing for bag 1"), # retrying - call('Starting build_plan_financing for bag 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Bag with id 1 not found', exc_info=True)]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + call("Starting build_plan_financing for bag 1"), + ], + ) + self.assertEqual(logging.Logger.error.call_args_list, [call("Bag with id 1 not found", exc_info=True)]) + + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 With Bag """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_invoice_not_found(self): - bag = {'status': 'PAID', 'was_delivered': False} + bag = {"status": "PAID", "was_delivered": False} model = self.bc.database.create_v2(bag=bag) # remove prints from mixer @@ -86,48 +88,52 @@ def test_invoice_not_found(self): build_plan_financing.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting build_plan_financing for bag 1'), + call("Starting build_plan_financing for bag 1"), # retrying - call('Starting build_plan_financing for bag 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [call('Invoice with id 1 not found', exc_info=True)]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), [self.bc.format.to_dict(model.bag)]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + call("Starting build_plan_financing for bag 1"), + ], + ) + self.assertEqual(logging.Logger.error.call_args_list, [call("Invoice with id 1 not found", exc_info=True)]) + + self.assertEqual(self.bc.database.list_of("payments.Bag"), [self.bc.format.to_dict(model.bag)]) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag and Invoice """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay", MagicMock()) def test_invoice_with_wrong_amount(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} months = 1 - if bag['chosen_period'] == 'QUARTER': + if bag["chosen_period"] == "QUARTER": months = 3 - elif bag['chosen_period'] == 'HALF': + elif bag["chosen_period"] == "HALF": months = 6 - elif bag['chosen_period'] == 'YEAR': + elif bag["chosen_period"] == "YEAR": months = 12 model = self.bc.database.create(bag=bag, invoice=invoice) @@ -138,48 +144,63 @@ def test_invoice_with_wrong_amount(self): build_plan_financing.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_plan_financing for bag 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('An invoice without amount is prohibited (id: 1)', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': False, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_plan_financing for bag 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("An invoice without amount is prohibited (id: 1)", exc_info=True), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": False, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) self.assertEqual(tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag and Invoice with amount """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay", MagicMock()) def test_subscription_was_created(self): amount = (random.random() * 99) + 1 bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED', 'amount': amount} - plan = {'is_renewable': False} + invoice = {"status": "FULFILLED", "amount": amount} + plan = {"is_renewable": False} model = self.bc.database.create(bag=bag, invoice=invoice, plan=plan) @@ -191,66 +212,77 @@ def test_subscription_was_created(self): build_plan_financing.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_plan_financing for bag 1'), - call('PlanFinancing was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_plan_financing for bag 1"), + call("PlanFinancing was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) self.assertEqual( - self.bc.database.list_of('payments.Invoice'), + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), [ { **self.bc.format.to_dict(model.invoice), # 'monthly_price': amount, }, - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - plan_financing_item({ - 'conversion_info': - None, - 'monthly_price': - model.invoice.amount, - 'valid_until': - model.invoice.paid_at + relativedelta(months=months - 1), - 'next_payment_at': - model.invoice.paid_at + relativedelta(months=1), - 'plan_expires_at': - model.invoice.paid_at + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), - }), - ]) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + plan_financing_item( + { + "conversion_info": None, + "monthly_price": model.invoice.amount, + "valid_until": model.invoice.paid_at + relativedelta(months=months - 1), + "next_payment_at": model.invoice.paid_at + relativedelta(months=1), + "plan_expires_at": model.invoice.paid_at + + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), + } + ), + ], + ) self.assertEqual(tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, [call(1)]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag with Cohort and Invoice with amount """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay", MagicMock()) def test_subscription_was_created__bag_with_cohort(self): amount = (random.random() * 99) + 1 bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED', 'amount': amount} - plan = {'is_renewable': False} - academy = {'available_as_saas': True} + invoice = {"status": "FULFILLED", "amount": amount} + plan = {"is_renewable": False} + academy = {"available_as_saas": True} model = self.bc.database.create(bag=bag, invoice=invoice, plan=plan, cohort=1, cohort_set=1, academy=academy) @@ -262,71 +294,87 @@ def test_subscription_was_created__bag_with_cohort(self): build_plan_financing.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_plan_financing for bag 1'), - call('PlanFinancing was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_plan_financing for bag 1"), + call("PlanFinancing was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) self.assertEqual( - self.bc.database.list_of('payments.Invoice'), + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), [ { **self.bc.format.to_dict(model.invoice), # 'monthly_price': amount, }, - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - plan_financing_item({ - 'conversion_info': - None, - 'monthly_price': - model.invoice.amount, - 'selected_cohort_set_id': - 1, - 'valid_until': - model.invoice.paid_at + relativedelta(months=months - 1), - 'next_payment_at': - model.invoice.paid_at + relativedelta(months=1), - 'plan_expires_at': - model.invoice.paid_at + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + plan_financing_item( + { + "conversion_info": None, + "monthly_price": model.invoice.amount, + "selected_cohort_set_id": 1, + "valid_until": model.invoice.paid_at + relativedelta(months=months - 1), + "next_payment_at": model.invoice.paid_at + relativedelta(months=1), + "plan_expires_at": model.invoice.paid_at + + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag with EventTypeSet and Invoice with amount """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay", MagicMock()) def test_subscription_was_created__bag_with_event_type_set(self): amount = (random.random() * 99) + 1 bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED', 'amount': amount} - plan = {'is_renewable': False} + invoice = {"status": "FULFILLED", "amount": amount} + plan = {"is_renewable": False} model = self.bc.database.create(bag=bag, invoice=invoice, plan=plan, event_type_set=1) @@ -338,69 +386,82 @@ def test_subscription_was_created__bag_with_event_type_set(self): build_plan_financing.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_plan_financing for bag 1'), - call('PlanFinancing was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_plan_financing for bag 1"), + call("PlanFinancing was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) self.assertEqual( - self.bc.database.list_of('payments.Invoice'), + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), [ { **self.bc.format.to_dict(model.invoice), # 'monthly_price': amount, }, - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - plan_financing_item({ - 'conversion_info': - None, - 'monthly_price': - model.invoice.amount, - 'selected_event_type_set_id': - 1, - 'valid_until': - model.invoice.paid_at + relativedelta(months=months - 1), - 'next_payment_at': - model.invoice.paid_at + relativedelta(months=1), - 'plan_expires_at': - model.invoice.paid_at + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + plan_financing_item( + { + "conversion_info": None, + "monthly_price": model.invoice.amount, + "selected_event_type_set_id": 1, + "valid_until": model.invoice.paid_at + relativedelta(months=months - 1), + "next_payment_at": model.invoice.paid_at + relativedelta(months=1), + "plan_expires_at": model.invoice.paid_at + + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag with MentorshipServiceSet and Invoice with amount """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay", MagicMock()) def test_subscription_was_created__bag_with_mentorship_service_set(self): amount = (random.random() * 99) + 1 bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED', 'amount': amount} - plan = {'is_renewable': False} + invoice = {"status": "FULFILLED", "amount": amount} + plan = {"is_renewable": False} model = self.bc.database.create(bag=bag, invoice=invoice, plan=plan, mentorship_service_set=1) @@ -412,69 +473,82 @@ def test_subscription_was_created__bag_with_mentorship_service_set(self): build_plan_financing.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_plan_financing for bag 1'), - call('PlanFinancing was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_plan_financing for bag 1"), + call("PlanFinancing was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) self.assertEqual( - self.bc.database.list_of('payments.Invoice'), + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), [ { **self.bc.format.to_dict(model.invoice), # 'monthly_price': amount, }, - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - plan_financing_item({ - 'conversion_info': - None, - 'monthly_price': - model.invoice.amount, - 'selected_mentorship_service_set_id': - 1, - 'valid_until': - model.invoice.paid_at + relativedelta(months=months - 1), - 'next_payment_at': - model.invoice.paid_at + relativedelta(months=1), - 'plan_expires_at': - model.invoice.paid_at + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + plan_financing_item( + { + "conversion_info": None, + "monthly_price": model.invoice.amount, + "selected_mentorship_service_set_id": 1, + "valid_until": model.invoice.paid_at + relativedelta(months=months - 1), + "next_payment_at": model.invoice.paid_at + relativedelta(months=1), + "plan_expires_at": model.invoice.paid_at + + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag with MentorshipServiceSet and Invoice with amount and conversion_info """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_plan_financing.delay", MagicMock()) def test_subscription_was_created__bag_with_mentorship_service_set_with_conversion_info(self): amount = (random.random() * 99) + 1 bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED', 'amount': amount} - plan = {'is_renewable': False} + invoice = {"status": "FULFILLED", "amount": amount} + plan = {"is_renewable": False} model = self.bc.database.create(bag=bag, invoice=invoice, plan=plan, mentorship_service_set=1) @@ -486,49 +560,61 @@ def test_subscription_was_created__bag_with_mentorship_service_set_with_conversi build_plan_financing.delay(1, 1, conversion_info='{"landing_url": "/home"}') - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_plan_financing for bag 1'), - call('PlanFinancing was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_plan_financing for bag 1"), + call("PlanFinancing was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) self.assertEqual( - self.bc.database.list_of('payments.Invoice'), + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), [ { **self.bc.format.to_dict(model.invoice), # 'monthly_price': amount, }, - ]) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - plan_financing_item({ - 'conversion_info': { - 'landing_url': '/home' - }, - 'monthly_price': - model.invoice.amount, - 'selected_mentorship_service_set_id': - 1, - 'valid_until': - model.invoice.paid_at + relativedelta(months=months - 1), - 'next_payment_at': - model.invoice.paid_at + relativedelta(months=1), - 'plan_expires_at': - model.invoice.paid_at + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + plan_financing_item( + { + "conversion_info": {"landing_url": "/home"}, + "monthly_price": model.invoice.amount, + "selected_mentorship_service_set_id": 1, + "valid_until": model.invoice.paid_at + relativedelta(months=months - 1), + "next_payment_at": model.invoice.paid_at + relativedelta(months=1), + "plan_expires_at": model.invoice.paid_at + + calculate_relative_delta(model.plan.time_of_life, model.plan.time_of_life_unit), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_plan_financing.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) diff --git a/breathecode/payments/tests/tasks/tests_build_service_stock_scheduler_from_plan_financing.py b/breathecode/payments/tests/tasks/tests_build_service_stock_scheduler_from_plan_financing.py index 17ad043b0..7aaa2f890 100644 --- a/breathecode/payments/tests/tasks/tests_build_service_stock_scheduler_from_plan_financing.py +++ b/breathecode/payments/tests/tasks/tests_build_service_stock_scheduler_from_plan_financing.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -19,51 +20,55 @@ def service_stock_scheduler_item(data={}): return { - 'id': 1, - 'plan_handler_id': None, - 'subscription_handler_id': None, - 'valid_until': None, + "id": 1, + "plan_handler_id": None, + "subscription_handler_id": None, + "valid_until": None, **data, } -#FIXME: create fail in this test file +# FIXME: create fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 Subscription not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_subscription_not_found(self): build_service_stock_scheduler_from_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting build_service_stock_scheduler_from_plan_financing for subscription 1'), + call("Starting build_service_stock_scheduler_from_plan_financing for subscription 1"), # retrying - call('Starting build_service_stock_scheduler_from_plan_financing for subscription 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('PlanFinancing with id 1 not found', exc_info=True), - ]) + call("Starting build_service_stock_scheduler_from_plan_financing for subscription 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("PlanFinancing with id 1 not found", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) """ 🔽🔽🔽 With Subscription """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) def test_subscription_exists(self): subscription = { - 'plan_expires_at': UTC_NOW + relativedelta(months=2), - 'monthly_price': (random.random() * 99.99) + 0.01, + "plan_expires_at": UTC_NOW + relativedelta(months=2), + "monthly_price": (random.random() * 99.99) + 0.01, } model = self.bc.database.create(plan_financing=subscription) @@ -73,36 +78,41 @@ def test_subscription_exists(self): build_service_stock_scheduler_from_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, - [call('Starting build_service_stock_scheduler_from_plan_financing for subscription 1')]) + self.assertEqual( + logging.Logger.info.call_args_list, + [call("Starting build_service_stock_scheduler_from_plan_financing for subscription 1")], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) self.assertEqual(tasks.renew_plan_financing_consumables.delay.call_args_list, [call(1)]) self.bc.check.queryset_with_pks(model.plan_financing.plans.all(), []) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), []) + self.assertEqual(self.bc.database.list_of("payments.ServiceStockScheduler"), []) """ 🔽🔽🔽 With Subscription with one ServiceItem """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_with_service_item(self): subscription = { - 'next_payment_at': UTC_NOW + relativedelta(months=1), - 'plan_expires_at': UTC_NOW + relativedelta(months=2), - 'valid_until': UTC_NOW + relativedelta(months=3), - 'monthly_price': (random.random() * 99.99) + 0.01, + "next_payment_at": UTC_NOW + relativedelta(months=1), + "plan_expires_at": UTC_NOW + relativedelta(months=2), + "valid_until": UTC_NOW + relativedelta(months=3), + "monthly_price": (random.random() * 99.99) + 0.01, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create(plan_financing=subscription, service_item=1, plan_service_item=1, plan=plan) # remove prints from mixer @@ -111,43 +121,54 @@ def test_subscription_with_service_item(self): build_service_stock_scheduler_from_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_service_stock_scheduler_from_plan_financing for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_service_stock_scheduler_from_plan_financing for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) self.assertEqual(tasks.renew_plan_financing_consumables.delay.call_args_list, [call(1)]) self.bc.check.queryset_with_pks(model.plan_financing.plans.all(), [1]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), [ - service_stock_scheduler_item({ - 'valid_until': None, - 'plan_handler_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceStockScheduler"), + [ + service_stock_scheduler_item( + { + "valid_until": None, + "plan_handler_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 With Subscription with one Plan with ServiceItem """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_with_plan(self): subscription = { - 'next_payment_at': UTC_NOW + relativedelta(months=1), - 'plan_expires_at': UTC_NOW + relativedelta(months=2), - 'valid_until': UTC_NOW + relativedelta(months=3), - 'monthly_price': (random.random() * 99.99) + 0.01, + "next_payment_at": UTC_NOW + relativedelta(months=1), + "plan_expires_at": UTC_NOW + relativedelta(months=2), + "valid_until": UTC_NOW + relativedelta(months=3), + "monthly_price": (random.random() * 99.99) + 0.01, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create(plan_financing=subscription, plan=plan, plan_service_item=1) @@ -157,53 +178,59 @@ def test_subscription_with_plan(self): build_service_stock_scheduler_from_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_service_stock_scheduler_from_plan_financing for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_service_stock_scheduler_from_plan_financing for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) self.assertEqual(tasks.renew_plan_financing_consumables.delay.call_args_list, [call(1)]) self.bc.check.queryset_with_pks(model.plan_financing.plans.all(), [1]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), [ - service_stock_scheduler_item({ - 'plan_handler_id': 1, - 'valid_until': None, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceStockScheduler"), + [ + service_stock_scheduler_item( + { + "plan_handler_id": 1, + "valid_until": None, + } + ), + ], + ) """ 🔽🔽🔽 With Subscription with one ServiceItem and one Plan with ServiceItem """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_with_plan_and_service_item(self): subscription = { - 'next_payment_at': UTC_NOW + relativedelta(months=1), - 'plan_expires_at': UTC_NOW + relativedelta(months=2), - 'valid_until': UTC_NOW + relativedelta(months=3), - 'monthly_price': (random.random() * 99.99) + 0.01, + "next_payment_at": UTC_NOW + relativedelta(months=1), + "plan_expires_at": UTC_NOW + relativedelta(months=2), + "valid_until": UTC_NOW + relativedelta(months=3), + "monthly_price": (random.random() * 99.99) + 0.01, } - plan_service_items = [{ - 'plan_id': 1, - 'service_item_id': n - } for n in range(1, 3)] + [{ - 'plan_id': 2, - 'service_item_id': n - } for n in range(3, 5)] - plan = {'is_renewable': False} - model = self.bc.database.create(plan_financing=subscription, - plan_service_item=plan_service_items, - plan=(2, plan), - service_item=4) + plan_service_items = [{"plan_id": 1, "service_item_id": n} for n in range(1, 3)] + [ + {"plan_id": 2, "service_item_id": n} for n in range(3, 5) + ] + plan = {"is_renewable": False} + model = self.bc.database.create( + plan_financing=subscription, plan_service_item=plan_service_items, plan=(2, plan), service_item=4 + ) # remove prints from mixer logging.Logger.info.call_args_list = [] @@ -211,38 +238,55 @@ def test_subscription_with_plan_and_service_item(self): build_service_stock_scheduler_from_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_service_stock_scheduler_from_plan_financing for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_service_stock_scheduler_from_plan_financing for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) self.assertEqual(tasks.renew_plan_financing_consumables.delay.call_args_list, [call(1)]) self.bc.check.queryset_with_pks(model.plan_financing.plans.all(), [1, 2]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), [ - service_stock_scheduler_item({ - 'id': 1, - 'plan_handler_id': 1, - 'valid_until': None, - }), - service_stock_scheduler_item({ - 'id': 2, - 'plan_handler_id': 2, - 'valid_until': None, - }), - service_stock_scheduler_item({ - 'id': 3, - 'plan_handler_id': 3, - 'valid_until': None, - }), - service_stock_scheduler_item({ - 'id': 4, - 'plan_handler_id': 4, - 'valid_until': None, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceStockScheduler"), + [ + service_stock_scheduler_item( + { + "id": 1, + "plan_handler_id": 1, + "valid_until": None, + } + ), + service_stock_scheduler_item( + { + "id": 2, + "plan_handler_id": 2, + "valid_until": None, + } + ), + service_stock_scheduler_item( + { + "id": 3, + "plan_handler_id": 3, + "valid_until": None, + } + ), + service_stock_scheduler_item( + { + "id": 4, + "plan_handler_id": 4, + "valid_until": None, + } + ), + ], + ) diff --git a/breathecode/payments/tests/tasks/tests_build_service_stock_scheduler_from_subscription.py b/breathecode/payments/tests/tasks/tests_build_service_stock_scheduler_from_subscription.py index 6ca6bd4ec..f2578a2f2 100644 --- a/breathecode/payments/tests/tasks/tests_build_service_stock_scheduler_from_subscription.py +++ b/breathecode/payments/tests/tasks/tests_build_service_stock_scheduler_from_subscription.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -19,47 +20,51 @@ def service_stock_scheduler_item(data={}): return { - 'id': 1, - 'plan_handler_id': None, - 'subscription_handler_id': None, - 'valid_until': None, + "id": 1, + "plan_handler_id": None, + "subscription_handler_id": None, + "valid_until": None, **data, } -#FIXME: create fail in this test file +# FIXME: create fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 Subscription not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_subscription_not_found(self): build_service_stock_scheduler_from_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting build_service_stock_scheduler_from_subscription for subscription 1'), + call("Starting build_service_stock_scheduler_from_subscription for subscription 1"), # retrying - call('Starting build_service_stock_scheduler_from_subscription for subscription 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Subscription with id 1 not found', exc_info=True), - ]) + call("Starting build_service_stock_scheduler_from_subscription for subscription 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Subscription with id 1 not found", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) """ 🔽🔽🔽 With Subscription """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) def test_subscription_exists(self): model = self.bc.database.create(subscription=1) @@ -69,33 +74,38 @@ def test_subscription_exists(self): build_service_stock_scheduler_from_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, - [call('Starting build_service_stock_scheduler_from_subscription for subscription 1')]) + self.assertEqual( + logging.Logger.info.call_args_list, + [call("Starting build_service_stock_scheduler_from_subscription for subscription 1")], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) self.assertEqual(tasks.renew_subscription_consumables.delay.call_args_list, [call(1)]) self.bc.check.queryset_with_pks(model.subscription.service_items.all(), []) self.bc.check.queryset_with_pks(model.subscription.plans.all(), []) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), []) + self.assertEqual(self.bc.database.list_of("payments.ServiceStockScheduler"), []) """ 🔽🔽🔽 With Subscription with one ServiceItem """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_with_service_item(self): subscription = { - 'next_payment_at': UTC_NOW + relativedelta(months=1), - 'valid_until': UTC_NOW + relativedelta(months=2), + "next_payment_at": UTC_NOW + relativedelta(months=1), + "valid_until": UTC_NOW + relativedelta(months=2), } model = self.bc.database.create(subscription=subscription, service_item=1, subscription_service_item=1) @@ -105,42 +115,53 @@ def test_subscription_with_service_item(self): build_service_stock_scheduler_from_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_service_stock_scheduler_from_subscription for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_service_stock_scheduler_from_subscription for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) self.assertEqual(tasks.renew_subscription_consumables.delay.call_args_list, [call(1)]) self.bc.check.queryset_with_pks(model.subscription.service_items.all(), [1]) self.bc.check.queryset_with_pks(model.subscription.plans.all(), []) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), [ - service_stock_scheduler_item({ - 'valid_until': None, - 'subscription_handler_id': 1, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceStockScheduler"), + [ + service_stock_scheduler_item( + { + "valid_until": None, + "subscription_handler_id": 1, + } + ), + ], + ) """ 🔽🔽🔽 With Subscription with one Plan with ServiceItem """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_with_plan(self): subscription = { - 'next_payment_at': UTC_NOW + relativedelta(months=1), - 'valid_until': UTC_NOW + relativedelta(months=2), + "next_payment_at": UTC_NOW + relativedelta(months=1), + "valid_until": UTC_NOW + relativedelta(months=2), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create(subscription=subscription, plan=plan, plan_service_item=1) @@ -150,54 +171,63 @@ def test_subscription_with_plan(self): build_service_stock_scheduler_from_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_service_stock_scheduler_from_subscription for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_service_stock_scheduler_from_subscription for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) self.assertEqual(tasks.renew_subscription_consumables.delay.call_args_list, [call(1)]) self.bc.check.queryset_with_pks(model.subscription.service_items.all(), []) self.bc.check.queryset_with_pks(model.subscription.plans.all(), [1]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), [ - service_stock_scheduler_item({ - 'plan_handler_id': 1, - 'valid_until': None, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceStockScheduler"), + [ + service_stock_scheduler_item( + { + "plan_handler_id": 1, + "valid_until": None, + } + ), + ], + ) """ 🔽🔽🔽 With Subscription with one ServiceItem and one Plan with ServiceItem """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_with_plan_and_service_item(self): subscription = { - 'next_payment_at': UTC_NOW + relativedelta(months=1), - 'valid_until': UTC_NOW + relativedelta(months=2), + "next_payment_at": UTC_NOW + relativedelta(months=1), + "valid_until": UTC_NOW + relativedelta(months=2), } - subscription_service_items = [{'service_item_id': n} for n in range(1, 3)] - plan_service_items = [{ - 'plan_id': 1, - 'service_item_id': n - } for n in range(3, 5)] + [{ - 'plan_id': 2, - 'service_item_id': n - } for n in range(5, 7)] - plan = {'is_renewable': False} - model = self.bc.database.create(subscription=subscription, - subscription_service_item=subscription_service_items, - plan=(2, plan), - plan_service_item=plan_service_items, - service_item=6) + subscription_service_items = [{"service_item_id": n} for n in range(1, 3)] + plan_service_items = [{"plan_id": 1, "service_item_id": n} for n in range(3, 5)] + [ + {"plan_id": 2, "service_item_id": n} for n in range(5, 7) + ] + plan = {"is_renewable": False} + model = self.bc.database.create( + subscription=subscription, + subscription_service_item=subscription_service_items, + plan=(2, plan), + plan_service_item=plan_service_items, + service_item=6, + ) # remove prints from mixer logging.Logger.info.call_args_list = [] @@ -205,49 +235,70 @@ def test_subscription_with_plan_and_service_item(self): build_service_stock_scheduler_from_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_service_stock_scheduler_from_subscription for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_service_stock_scheduler_from_subscription for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) self.assertEqual(tasks.renew_subscription_consumables.delay.call_args_list, [call(1)]) self.bc.check.queryset_with_pks(model.subscription.service_items.all(), [1, 2]) self.bc.check.queryset_with_pks(model.subscription.plans.all(), [1, 2]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), [ - service_stock_scheduler_item({ - 'id': 1, - 'subscription_handler_id': 1, - 'valid_until': None, - }), - service_stock_scheduler_item({ - 'id': 2, - 'subscription_handler_id': 2, - 'valid_until': None, - }), - service_stock_scheduler_item({ - 'id': 3, - 'plan_handler_id': 1, - 'valid_until': None, - }), - service_stock_scheduler_item({ - 'id': 4, - 'plan_handler_id': 2, - 'valid_until': None, - }), - service_stock_scheduler_item({ - 'id': 5, - 'plan_handler_id': 3, - 'valid_until': None, - }), - service_stock_scheduler_item({ - 'id': 6, - 'plan_handler_id': 4, - 'valid_until': None, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceStockScheduler"), + [ + service_stock_scheduler_item( + { + "id": 1, + "subscription_handler_id": 1, + "valid_until": None, + } + ), + service_stock_scheduler_item( + { + "id": 2, + "subscription_handler_id": 2, + "valid_until": None, + } + ), + service_stock_scheduler_item( + { + "id": 3, + "plan_handler_id": 1, + "valid_until": None, + } + ), + service_stock_scheduler_item( + { + "id": 4, + "plan_handler_id": 2, + "valid_until": None, + } + ), + service_stock_scheduler_item( + { + "id": 5, + "plan_handler_id": 3, + "valid_until": None, + } + ), + service_stock_scheduler_item( + { + "id": 6, + "plan_handler_id": 4, + "valid_until": None, + } + ), + ], + ) diff --git a/breathecode/payments/tests/tasks/tests_build_subscription.py b/breathecode/payments/tests/tasks/tests_build_subscription.py index ed78bf9f3..733eb35d5 100644 --- a/breathecode/payments/tests/tasks/tests_build_subscription.py +++ b/breathecode/payments/tests/tasks/tests_build_subscription.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -20,66 +21,70 @@ def subscription_item(data={}): return { - 'id': 1, - 'selected_cohort_set_id': None, - 'selected_event_type_set_id': None, - 'selected_mentorship_service_set_id': None, - 'academy_id': 1, - 'is_refundable': True, - 'paid_at': UTC_NOW, - 'pay_every': 1, - 'pay_every_unit': 'MONTH', - 'status': 'ACTIVE', - 'status_message': None, - 'user_id': 1, - 'valid_until': UTC_NOW, + "id": 1, + "selected_cohort_set_id": None, + "selected_event_type_set_id": None, + "selected_mentorship_service_set_id": None, + "academy_id": 1, + "is_refundable": True, + "paid_at": UTC_NOW, + "pay_every": 1, + "pay_every_unit": "MONTH", + "status": "ACTIVE", + "status_message": None, + "user_id": 1, + "valid_until": UTC_NOW, **data, } @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield -#FIXME: create_v2 fail in this test file +# FIXME: create_v2 fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 Bag not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_bag_not_found(self): build_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting build_subscription for bag 1'), + call("Starting build_subscription for bag 1"), # retrying - call('Starting build_subscription for bag 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Bag with id 1 not found', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + call("Starting build_subscription for bag 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Bag with id 1 not found", exc_info=True), + ], + ) + + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 With Bag """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_invoice_not_found(self): - bag = {'status': 'PAID', 'was_delivered': False} + bag = {"status": "PAID", "was_delivered": False} model = self.bc.database.create_v2(bag=bag) # remove prints from mixer @@ -88,50 +93,57 @@ def test_invoice_not_found(self): build_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting build_subscription for bag 1'), + call("Starting build_subscription for bag 1"), # retrying - call('Starting build_subscription for bag 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Invoice with id 1 not found', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), [self.bc.format.to_dict(model.bag)]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + call("Starting build_subscription for bag 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Invoice with id 1 not found", exc_info=True), + ], + ) + + self.assertEqual(self.bc.database.list_of("payments.Bag"), [self.bc.format.to_dict(model.bag)]) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag and Invoice """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} months = 1 - if bag['chosen_period'] == 'QUARTER': + if bag["chosen_period"] == "QUARTER": months = 3 - elif bag['chosen_period'] == 'HALF': + elif bag["chosen_period"] == "HALF": months = 6 - elif bag['chosen_period'] == 'YEAR': + elif bag["chosen_period"] == "YEAR": months = 12 model = self.bc.database.create(bag=bag, invoice=invoice) @@ -142,63 +154,83 @@ def test_subscription_was_created(self): build_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_subscription for bag 1'), - call('Subscription was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_subscription for bag 1"), + call("Subscription was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - subscription_item({ - 'conversion_info': None, - 'paid_at': model.invoice.paid_at, - 'valid_until': None, - 'next_payment_at': model.invoice.paid_at + relativedelta(months=months), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + subscription_item( + { + "conversion_info": None, + "paid_at": model.invoice.paid_at, + "valid_until": None, + "next_payment_at": model.invoice.paid_at + relativedelta(months=months), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag and Invoice and conversion_info """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created_with_conversion_info(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} months = 1 - if bag['chosen_period'] == 'QUARTER': + if bag["chosen_period"] == "QUARTER": months = 3 - elif bag['chosen_period'] == 'HALF': + elif bag["chosen_period"] == "HALF": months = 6 - elif bag['chosen_period'] == 'YEAR': + elif bag["chosen_period"] == "YEAR": months = 12 model = self.bc.database.create(bag=bag, invoice=invoice) @@ -210,72 +242,90 @@ def test_subscription_was_created_with_conversion_info(self): conversion_info = "{ 'landing_url': '/home' }" build_subscription.delay(1, 1, conversion_info=conversion_info) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_subscription for bag 1'), - call('Subscription was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_subscription for bag 1"), + call("Subscription was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - subscription_item({ - 'conversion_info': { - 'landing_url': '/home' + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, }, - 'paid_at': model.invoice.paid_at, - 'valid_until': None, - 'next_payment_at': model.invoice.paid_at + relativedelta(months=months), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + subscription_item( + { + "conversion_info": {"landing_url": "/home"}, + "paid_at": model.invoice.paid_at, + "valid_until": None, + "next_payment_at": model.invoice.paid_at + relativedelta(months=months), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag, Invoice and Cohort """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__bag_with_cohort(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} months = 1 - if bag['chosen_period'] == 'QUARTER': + if bag["chosen_period"] == "QUARTER": months = 3 - elif bag['chosen_period'] == 'HALF': + elif bag["chosen_period"] == "HALF": months = 6 - elif bag['chosen_period'] == 'YEAR': + elif bag["chosen_period"] == "YEAR": months = 12 plan = { - 'time_of_life': None, - 'time_of_life_unit': None, + "time_of_life": None, + "time_of_life_unit": None, } - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(bag=bag, invoice=invoice, cohort_set=1, plan=plan, academy=academy) # remove prints from mixer @@ -284,71 +334,94 @@ def test_subscription_was_created__bag_with_cohort(self): build_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), [ - self.bc.format.to_dict(model.cohort), - ]) + self.assertEqual( + self.bc.database.list_of("admissions.Cohort"), + [ + self.bc.format.to_dict(model.cohort), + ], + ) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_subscription for bag 1'), - call('Subscription was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_subscription for bag 1"), + call("Subscription was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - subscription_item({ - 'conversion_info': None, - 'paid_at': model.invoice.paid_at, - 'valid_until': None, - 'selected_cohort_set_id': 1, - 'next_payment_at': model.invoice.paid_at + relativedelta(months=months), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + subscription_item( + { + "conversion_info": None, + "paid_at": model.invoice.paid_at, + "valid_until": None, + "selected_cohort_set_id": 1, + "next_payment_at": model.invoice.paid_at + relativedelta(months=months), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag, Invoice and EventTypeSet """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__bag_with_event_type_set(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} months = 1 - if bag['chosen_period'] == 'QUARTER': + if bag["chosen_period"] == "QUARTER": months = 3 - elif bag['chosen_period'] == 'HALF': + elif bag["chosen_period"] == "HALF": months = 6 - elif bag['chosen_period'] == 'YEAR': + elif bag["chosen_period"] == "YEAR": months = 12 plan = { - 'time_of_life': None, - 'time_of_life_unit': None, + "time_of_life": None, + "time_of_life_unit": None, } model = self.bc.database.create(bag=bag, invoice=invoice, event_type_set=1, plan=plan) @@ -358,69 +431,89 @@ def test_subscription_was_created__bag_with_event_type_set(self): build_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_subscription for bag 1'), - call('Subscription was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_subscription for bag 1"), + call("Subscription was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - subscription_item({ - 'conversion_info': None, - 'paid_at': model.invoice.paid_at, - 'valid_until': None, - 'selected_event_type_set_id': 1, - 'next_payment_at': model.invoice.paid_at + relativedelta(months=months), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + subscription_item( + { + "conversion_info": None, + "paid_at": model.invoice.paid_at, + "valid_until": None, + "selected_event_type_set_id": 1, + "next_payment_at": model.invoice.paid_at + relativedelta(months=months), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 With Bag, Invoice and MentorshipServiceSet """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch.object(timezone, 'now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch.object(timezone, "now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.tasks.build_service_stock_scheduler_from_subscription.delay", MagicMock()) def test_subscription_was_created__bag_with_mentorship_service_set(self): bag = { - 'status': 'PAID', - 'was_delivered': False, - 'chosen_period': random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']), + "status": "PAID", + "was_delivered": False, + "chosen_period": random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]), } - invoice = {'status': 'FULFILLED'} + invoice = {"status": "FULFILLED"} months = 1 - if bag['chosen_period'] == 'QUARTER': + if bag["chosen_period"] == "QUARTER": months = 3 - elif bag['chosen_period'] == 'HALF': + elif bag["chosen_period"] == "HALF": months = 6 - elif bag['chosen_period'] == 'YEAR': + elif bag["chosen_period"] == "YEAR": months = 12 plan = { - 'time_of_life': None, - 'time_of_life_unit': None, + "time_of_life": None, + "time_of_life_unit": None, } model = self.bc.database.create(bag=bag, invoice=invoice, mentorship_service_set=1, plan=plan) @@ -430,36 +523,56 @@ def test_subscription_was_created__bag_with_mentorship_service_set(self): build_subscription.delay(1, 1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting build_subscription for bag 1'), - call('Subscription was created with id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting build_subscription for bag 1"), + call("Subscription was created with id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'was_delivered': True, - }, - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - subscription_item({ - 'conversion_info': None, - 'paid_at': model.invoice.paid_at, - 'valid_until': None, - 'selected_mentorship_service_set_id': 1, - 'next_payment_at': model.invoice.paid_at + relativedelta(months=months), - }), - ]) - - self.assertEqual(tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, [ - call(1), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "was_delivered": True, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + subscription_item( + { + "conversion_info": None, + "paid_at": model.invoice.paid_at, + "valid_until": None, + "selected_mentorship_service_set_id": 1, + "next_payment_at": model.invoice.paid_at + relativedelta(months=months), + } + ), + ], + ) + + self.assertEqual( + tasks.build_service_stock_scheduler_from_subscription.delay.call_args_list, + [ + call(1), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) diff --git a/breathecode/payments/tests/tasks/tests_charge_plan_financing.py b/breathecode/payments/tests/tasks/tests_charge_plan_financing.py index 362b61938..26f1ab8a0 100644 --- a/breathecode/payments/tests/tasks/tests_charge_plan_financing.py +++ b/breathecode/payments/tests/tasks/tests_charge_plan_financing.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import os import random @@ -24,129 +25,135 @@ def plan_financing_item(data={}): return { - 'id': 1, - 'is_refundable': True, - 'paid_at': UTC_NOW, - 'pay_every': 1, - 'pay_every_unit': 'MONTH', - 'status': 'ACTIVE', - 'user_id': 1, - 'valid_until': UTC_NOW, + "id": 1, + "is_refundable": True, + "paid_at": UTC_NOW, + "pay_every": 1, + "pay_every_unit": "MONTH", + "status": "ACTIVE", + "user_id": 1, + "valid_until": UTC_NOW, **data, } def bag_item(data={}): return { - 'id': 1, - 'amount_per_month': 0.0, - 'amount_per_quarter': 0.0, - 'amount_per_half': 0.0, - 'amount_per_year': 0.0, - 'currency_id': 0, - 'status': 'CHECKING', - 'type': 'CHARGE', - 'chosen_period': 'NO_SET', - 'how_many_installments': 0, - 'academy_id': 0, - 'user_id': 0, - 'is_recurrent': False, - 'was_delivered': False, - 'token': None, - 'expires_at': None, + "id": 1, + "amount_per_month": 0.0, + "amount_per_quarter": 0.0, + "amount_per_half": 0.0, + "amount_per_year": 0.0, + "currency_id": 0, + "status": "CHECKING", + "type": "CHARGE", + "chosen_period": "NO_SET", + "how_many_installments": 0, + "academy_id": 0, + "user_id": 0, + "is_recurrent": False, + "was_delivered": False, + "token": None, + "expires_at": None, **data, } def invoice_item(data={}): return { - 'academy_id': 0, - 'amount': 0.0, - 'bag_id': 2, - 'currency_id': 2, - 'id': 0, - 'paid_at': None, - 'status': 'PENDING', - 'stripe_id': None, - 'user_id': 0, - 'refund_stripe_id': None, - 'refunded_at': None, + "academy_id": 0, + "amount": 0.0, + "bag_id": 2, + "currency_id": 2, + "id": 0, + "paid_at": None, + "status": "PENDING", + "stripe_id": None, + "user_id": 0, + "refund_stripe_id": None, + "refunded_at": None, **data, } def fake_stripe_pay(**kwargs): - def wrapper(user, bag, amount: int, currency='usd', description=''): - return mixer.blend('payments.Invoice', user=user, bag=bag, **kwargs) + def wrapper(user, bag, amount: int, currency="usd", description=""): + return mixer.blend("payments.Invoice", user=user, bag=bag, **kwargs) return wrapper def calculate_relative_delta(unit: float, unit_type: str): delta_args = {} - if unit_type == 'DAY': - delta_args['days'] = unit + if unit_type == "DAY": + delta_args["days"] = unit - elif unit_type == 'WEEK': - delta_args['weeks'] = unit + elif unit_type == "WEEK": + delta_args["weeks"] = unit - elif unit_type == 'MONTH': - delta_args['months'] = unit + elif unit_type == "MONTH": + delta_args["months"] = unit - elif unit_type == 'YEAR': - delta_args['years'] = unit + elif unit_type == "YEAR": + delta_args["years"] = unit return relativedelta(**delta_args) @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield -#FIXME: create_v2 fail in this test file +# FIXME: create_v2 fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 PlanFinancing not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_not_found(self): charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('PlanFinancing with id 1 not found', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("PlanFinancing with id 1 not found", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 PlanFinancing with zero Invoice """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_without_invoices(self): plan_financing = { - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW + relativedelta(months=random.randint(1, 12)), + "valid_until": UTC_NOW + relativedelta(minutes=3), + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW + relativedelta(months=random.randint(1, 12)), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create(plan_financing=plan_financing, plan=plan, user=1) # remove prints from mixer @@ -155,28 +162,35 @@ def test_plan_financing_without_invoices(self): charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Error getting bag from plan financing 1: plan-financing-has-no-invoices', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Error getting bag from plan financing 1: plan-financing-has-no-invoices", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) self.assertEqual( - self.bc.database.list_of('payments.PlanFinancing'), + self.bc.database.list_of("payments.PlanFinancing"), [ { **self.bc.format.to_dict(model.plan_financing), # 'status': 'PAYMENT_ISSUE',, - 'status': 'ERROR', - 'status_message': 'plan-financing-has-no-invoices', + "status": "ERROR", + "status_message": "plan-financing-has-no-invoices", }, - ]) + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) @@ -184,449 +198,580 @@ def test_plan_financing_without_invoices(self): 🔽🔽🔽 PlanFinancing process to charge """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_was_paid_this_month(self): delta = relativedelta(months=random.randint(1, 12)) plan_financing = { - 'valid_until': UTC_NOW + delta, - 'next_payment_at': UTC_NOW + delta, - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW + relativedelta(months=random.randint(1, 12)), + "valid_until": UTC_NOW + delta, + "next_payment_at": UTC_NOW + delta, + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW + relativedelta(months=random.randint(1, 12)), } - plan = {'is_renewable': False} - invoice = {'paid_at': UTC_NOW - relativedelta(hours=24, seconds=1)} - bag = {'how_many_installments': 3} - with patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW - relativedelta(months=2))): - model = self.bc.database.create(academy=1, - plan_financing=plan_financing, - invoice=invoice, - plan=plan, - bag=bag) - - with patch('breathecode.payments.services.stripe.Stripe.pay', - MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW, academy=model.academy))): + plan = {"is_renewable": False} + invoice = {"paid_at": UTC_NOW - relativedelta(hours=24, seconds=1)} + bag = {"how_many_installments": 3} + with patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW - relativedelta(months=2))): + model = self.bc.database.create( + academy=1, plan_financing=plan_financing, invoice=invoice, plan=plan, bag=bag + ) + + with patch( + "breathecode.payments.services.stripe.Stripe.pay", + MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW, academy=model.academy)), + ): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('PlanFinancing with id 1 was paid this month', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - { - **self.bc.format.to_dict(model.plan_financing), - 'status': 'ACTIVE', - }, - ]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("PlanFinancing with id 1 was paid this month", exc_info=True), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + { + **self.bc.format.to_dict(model.plan_financing), + "status": "ACTIVE", + }, + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 PlanFinancing process to charge """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_process_to_charge(self): delta = relativedelta(months=random.randint(1, 12)) plan_financing = { - 'valid_until': UTC_NOW + delta, - 'next_payment_at': UTC_NOW - delta, - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW + relativedelta(months=random.randint(1, 12)), + "valid_until": UTC_NOW + delta, + "next_payment_at": UTC_NOW - delta, + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW + relativedelta(months=random.randint(1, 12)), } - plan = {'is_renewable': False} - invoice = {'paid_at': UTC_NOW - relativedelta(hours=24, seconds=1)} - bag = {'how_many_installments': 3} - with patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW - relativedelta(months=2))): - model = self.bc.database.create(academy=1, - plan_financing=plan_financing, - invoice=invoice, - plan=plan, - bag=bag) - - with patch('breathecode.payments.services.stripe.Stripe.pay', - MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW, academy=model.academy))): + plan = {"is_renewable": False} + invoice = {"paid_at": UTC_NOW - relativedelta(hours=24, seconds=1)} + bag = {"how_many_installments": 3} + with patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW - relativedelta(months=2))): + model = self.bc.database.create( + academy=1, plan_financing=plan_financing, invoice=invoice, plan=plan, bag=bag + ) + + with patch( + "breathecode.payments.services.stripe.Stripe.pay", + MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW, academy=model.academy)), + ): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - bag_item({ - 'academy_id': 1, - 'currency_id': 1, - 'id': 2, - 'is_recurrent': True, - 'status': 'RENEWAL', - 'user_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - invoice_item({ - 'academy_id': 1, - 'id': 2, - 'user_id': 1, - 'paid_at': UTC_NOW, - }), - ]) - - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - { - **self.bc.format.to_dict(model.plan_financing), - 'status': 'ACTIVE', - 'next_payment_at': model.plan_financing.next_payment_at + (delta + relativedelta(months=1)), - }, - ]) - self.assertEqual(notify_actions.send_email_message.call_args_list, [ - call('message', - model.user.email, { - 'SUBJECT': 'Your installment at 4Geeks was successfully charged', - 'MESSAGE': 'The amount was $0.0', - 'BUTTON': 'See the invoice', - 'LINK': os.getenv('APP_URL')[:-1] + '/plan-financing/1' - }, - academy=model.academy) - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'bag_created', related_type='payments.Bag', related_id=2), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + bag_item( + { + "academy_id": 1, + "currency_id": 1, + "id": 2, + "is_recurrent": True, + "status": "RENEWAL", + "user_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + invoice_item( + { + "academy_id": 1, + "id": 2, + "user_id": 1, + "paid_at": UTC_NOW, + } + ), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + { + **self.bc.format.to_dict(model.plan_financing), + "status": "ACTIVE", + "next_payment_at": model.plan_financing.next_payment_at + (delta + relativedelta(months=1)), + }, + ], + ) + self.assertEqual( + notify_actions.send_email_message.call_args_list, + [ + call( + "message", + model.user.email, + { + "SUBJECT": "Your installment at 4Geeks was successfully charged", + "MESSAGE": "The amount was $0.0", + "BUTTON": "See the invoice", + "LINK": os.getenv("APP_URL")[:-1] + "/plan-financing/1", + }, + academy=model.academy, + ) + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=2), + ], + ) """ 🔽🔽🔽 PlanFinancing error when try to charge """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_error_when_try_to_charge(self): plan_financing = { - 'valid_until': UTC_NOW + relativedelta(minutes=1), - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW + relativedelta(months=random.randint(1, 12)), + "valid_until": UTC_NOW + relativedelta(minutes=1), + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW + relativedelta(months=random.randint(1, 12)), } - plan = {'is_renewable': False} - bag = {'how_many_installments': 3} - with patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW - relativedelta(months=2))): + plan = {"is_renewable": False} + bag = {"how_many_installments": 3} + with patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW - relativedelta(months=2))): model = self.bc.database.create(plan_financing=plan_financing, invoice=1, plan=plan, bag=bag) - with patch('breathecode.payments.services.stripe.Stripe.pay', MagicMock(side_effect=Exception('fake error'))): + with patch("breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=Exception("fake error"))): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - { - **self.bc.format.to_dict(model.plan_financing), - 'status': 'PAYMENT_ISSUE', - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + { + **self.bc.format.to_dict(model.plan_financing), + "status": "PAYMENT_ISSUE", + }, + ], + ) from breathecode.admissions.models import Academy - self.assertEqual(notify_actions.send_email_message.call_args_list, [ - call('message', - model.user.email, { - 'SUBJECT': 'Your 4Geeks subscription could not be renewed', - 'MESSAGE': 'Please update your payment methods', - 'BUTTON': 'Please update your payment methods', - 'LINK': os.getenv('APP_URL')[:-1] + '/plan-financing/1' - }, - academy=model.academy) - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'bag_created', related_type='payments.Bag', related_id=2), - ]) + + self.assertEqual( + notify_actions.send_email_message.call_args_list, + [ + call( + "message", + model.user.email, + { + "SUBJECT": "Your 4Geeks subscription could not be renewed", + "MESSAGE": "Please update your payment methods", + "BUTTON": "Please update your payment methods", + "LINK": os.getenv("APP_URL")[:-1] + "/plan-financing/1", + }, + academy=model.academy, + ) + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=2), + ], + ) """ 🔽🔽🔽 PlanFinancing is over """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_is_over(self): plan_financing = { - 'valid_until': UTC_NOW + relativedelta(months=random.randint(1, 12)), - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW - relativedelta(minutes=1), + "valid_until": UTC_NOW + relativedelta(months=random.randint(1, 12)), + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW - relativedelta(minutes=1), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create(plan_financing=plan_financing, invoice=1, plan=plan) - with patch('breathecode.payments.services.stripe.Stripe.pay', MagicMock(side_effect=Exception('fake error'))): + with patch("breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=Exception("fake error"))): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('PlanFinancing with id 1 is over', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - { - **self.bc.format.to_dict(model.plan_financing), - }, - ]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("PlanFinancing with id 1 is over", exc_info=True), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + { + **self.bc.format.to_dict(model.plan_financing), + }, + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 PlanFinancing was paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_was_paid(self): plan_financing = { - 'valid_until': UTC_NOW + relativedelta(minutes=1), - 'next_payment_at': UTC_NOW + relativedelta(minutes=1), - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW + relativedelta(months=random.randint(1, 12)), + "valid_until": UTC_NOW + relativedelta(minutes=1), + "next_payment_at": UTC_NOW + relativedelta(minutes=1), + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW + relativedelta(months=random.randint(1, 12)), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create(plan_financing=plan_financing, invoice=1, plan=plan) - with patch('breathecode.payments.services.stripe.Stripe.pay', MagicMock(side_effect=Exception('fake error'))): + with patch("breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=Exception("fake error"))): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('PlanFinancing with id 1 was paid this month', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - { - **self.bc.format.to_dict(model.plan_financing), - }, - ]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("PlanFinancing with id 1 was paid this month", exc_info=True), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + { + **self.bc.format.to_dict(model.plan_financing), + }, + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 PlanFinancing try to charge, but a undexpected exception is raised, the database is rollbacked and the error is register in PlanFinancing """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.services.stripe.Stripe.refund_payment', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.services.stripe.Stripe.refund_payment", MagicMock()) def test_plan_financing_process_to_charge__but_a_undexpected_exception_is_raised__not_found_invoice_to_refund(self): plan_financing = { - 'valid_until': UTC_NOW + relativedelta(minutes=1), - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW + relativedelta(months=random.randint(1, 12)), + "valid_until": UTC_NOW + relativedelta(minutes=1), + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW + relativedelta(months=random.randint(1, 12)), } - invoice = {'paid_at': UTC_NOW - relativedelta(hours=24, seconds=1)} - plan = {'is_renewable': False} - bag = {'how_many_installments': 3} - with patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW - relativedelta(months=2))): - model = self.bc.database.create(academy=1, - plan_financing=plan_financing, - invoice=invoice, - plan=plan, - bag=bag) + invoice = {"paid_at": UTC_NOW - relativedelta(hours=24, seconds=1)} + plan = {"is_renewable": False} + bag = {"how_many_installments": 3} + with patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW - relativedelta(months=2))): + model = self.bc.database.create( + academy=1, plan_financing=plan_financing, invoice=invoice, plan=plan, bag=bag + ) error = self.bc.fake.text() - with patch('breathecode.payments.services.stripe.Stripe.pay', - MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW, academy=model.academy))): + with patch( + "breathecode.payments.services.stripe.Stripe.pay", + MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW, academy=model.academy)), + ): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.notify.actions.send_email_message', MagicMock(side_effect=Exception(error))): + with patch("breathecode.notify.actions.send_email_message", MagicMock(side_effect=Exception(error))): charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, [call(error, exc_info=True)]) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) - message = f'charge_plan_financing is failing for the plan financing {model.plan_financing.id}: ' - message += str(error)[:250 - len(message)] + message = f"charge_plan_financing is failing for the plan financing {model.plan_financing.id}: " + message += str(error)[: 250 - len(message)] - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - { - **self.bc.format.to_dict(model.plan_financing), - 'status': 'ERROR', - 'status_message': message, - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + { + **self.bc.format.to_dict(model.plan_financing), + "status": "ERROR", + "status_message": message, + }, + ], + ) self.assertEqual(Stripe.refund_payment.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'bag_created', related_type='payments.Bag', related_id=2), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_plan_financing_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.services.stripe.Stripe.refund_payment', MagicMock()) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=2), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_plan_financing_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.services.stripe.Stripe.refund_payment", MagicMock()) def test_plan_financing_process_to_charge__but_a_undexpected_exception_is_raised__found_invoice_to_refund(self): plan_financing = { - 'valid_until': UTC_NOW + relativedelta(minutes=1), - 'monthly_price': (random.random() * 99) + 1, - 'plan_expires_at': UTC_NOW + relativedelta(months=random.randint(1, 12)), + "valid_until": UTC_NOW + relativedelta(minutes=1), + "monthly_price": (random.random() * 99) + 1, + "plan_expires_at": UTC_NOW + relativedelta(months=random.randint(1, 12)), } - invoice = {'paid_at': UTC_NOW - relativedelta(hours=random.randint(1, 23))} - plan = {'is_renewable': False} - bag = {'how_many_installments': 3} - with patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW - relativedelta(months=2))): - model = self.bc.database.create(academy=1, - plan_financing=plan_financing, - invoice=invoice, - plan=plan, - bag=bag) + invoice = {"paid_at": UTC_NOW - relativedelta(hours=random.randint(1, 23))} + plan = {"is_renewable": False} + bag = {"how_many_installments": 3} + with patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW - relativedelta(months=2))): + model = self.bc.database.create( + academy=1, plan_financing=plan_financing, invoice=invoice, plan=plan, bag=bag + ) error = self.bc.fake.text() - with patch('breathecode.payments.services.stripe.Stripe.pay', - MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW, academy=model.academy))): + with patch( + "breathecode.payments.services.stripe.Stripe.pay", + MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW, academy=model.academy)), + ): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.notify.actions.send_email_message', MagicMock(side_effect=Exception(error))): + with patch("breathecode.notify.actions.send_email_message", MagicMock(side_effect=Exception(error))): charge_plan_financing.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_plan_financing for id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_plan_financing for id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, [call(error, exc_info=True)]) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) - message = f'charge_plan_financing is failing for the plan financing {model.plan_financing.id}: ' - message += str(error)[:250 - len(message)] + message = f"charge_plan_financing is failing for the plan financing {model.plan_financing.id}: " + message += str(error)[: 250 - len(message)] - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - { - **self.bc.format.to_dict(model.plan_financing), - 'status': 'ERROR', - 'status_message': message, - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + { + **self.bc.format.to_dict(model.plan_financing), + "status": "ERROR", + "status_message": message, + }, + ], + ) self.assertEqual(Stripe.refund_payment.call_args_list, [call(model.invoice)]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'bag_created', related_type='payments.Bag', related_id=2), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=2), + ], + ) diff --git a/breathecode/payments/tests/tasks/tests_charge_subscription.py b/breathecode/payments/tests/tasks/tests_charge_subscription.py index 9bea0449f..d6a042222 100644 --- a/breathecode/payments/tests/tasks/tests_charge_subscription.py +++ b/breathecode/payments/tests/tasks/tests_charge_subscription.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import os import random @@ -24,85 +25,85 @@ def subscription_item(data={}): return { - 'id': 1, - 'is_refundable': True, - 'paid_at': UTC_NOW, - 'pay_every': 1, - 'pay_every_unit': 'MONTH', - 'status': 'ACTIVE', - 'user_id': 1, - 'valid_until': UTC_NOW, + "id": 1, + "is_refundable": True, + "paid_at": UTC_NOW, + "pay_every": 1, + "pay_every_unit": "MONTH", + "status": "ACTIVE", + "user_id": 1, + "valid_until": UTC_NOW, **data, } def bag_item(data={}): return { - 'id': 1, - 'amount_per_month': 0.0, - 'amount_per_quarter': 0.0, - 'amount_per_half': 0.0, - 'amount_per_year': 0.0, - 'currency_id': 0, - 'status': 'CHECKING', - 'type': 'CHARGE', - 'chosen_period': 'NO_SET', - 'how_many_installments': 0, - 'academy_id': 0, - 'user_id': 0, - 'is_recurrent': False, - 'was_delivered': False, - 'token': None, - 'expires_at': None, + "id": 1, + "amount_per_month": 0.0, + "amount_per_quarter": 0.0, + "amount_per_half": 0.0, + "amount_per_year": 0.0, + "currency_id": 0, + "status": "CHECKING", + "type": "CHARGE", + "chosen_period": "NO_SET", + "how_many_installments": 0, + "academy_id": 0, + "user_id": 0, + "is_recurrent": False, + "was_delivered": False, + "token": None, + "expires_at": None, **data, } def invoice_item(data={}): return { - 'academy_id': 0, - 'amount': 0.0, - 'bag_id': 2, - 'currency_id': 2, - 'id': 0, - 'paid_at': None, - 'status': 'PENDING', - 'stripe_id': None, - 'user_id': 0, - 'refund_stripe_id': None, - 'refunded_at': None, + "academy_id": 0, + "amount": 0.0, + "bag_id": 2, + "currency_id": 2, + "id": 0, + "paid_at": None, + "status": "PENDING", + "stripe_id": None, + "user_id": 0, + "refund_stripe_id": None, + "refunded_at": None, **data, } def fake_stripe_pay(**kwargs): - def wrapper(user, bag, amount: int, currency='usd', description=''): - return mixer.blend('payments.Invoice', user=user, bag=bag, **kwargs) + def wrapper(user, bag, amount: int, currency="usd", description=""): + return mixer.blend("payments.Invoice", user=user, bag=bag, **kwargs) return wrapper def calculate_relative_delta(unit: float, unit_type: str): delta_args = {} - if unit_type == 'DAY': - delta_args['days'] = unit + if unit_type == "DAY": + delta_args["days"] = unit - elif unit_type == 'WEEK': - delta_args['weeks'] = unit + elif unit_type == "WEEK": + delta_args["weeks"] = unit - elif unit_type == 'MONTH': - delta_args['months'] = unit + elif unit_type == "MONTH": + delta_args["months"] = unit - elif unit_type == 'YEAR': - delta_args['years'] = unit + elif unit_type == "YEAR": + delta_args["years"] = unit return relativedelta(**delta_args) @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield @@ -111,34 +112,40 @@ class PaymentsTestSuite(PaymentsTestCase): 🔽🔽🔽 Subscription not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_not_found(self): charge_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_subscription for subscription 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Subscription with id 1 not found', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_subscription for subscription 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Subscription with id 1 not found", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Subscription with zero Invoice """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_without_invoices(self): model = self.bc.database.create_v2(subscription=1) @@ -148,28 +155,35 @@ def test_subscription_without_invoices(self): charge_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_subscription for subscription 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('Error getting bag from subscription 1: subscription-has-no-invoices', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_subscription for subscription 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("Error getting bag from subscription 1: subscription-has-no-invoices", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) self.assertEqual( - self.bc.database.list_of('payments.Subscription'), + self.bc.database.list_of("payments.Subscription"), [ { **self.bc.format.to_dict(model.subscription), # 'status': 'PAYMENT_ISSUE',, - 'status': 'ERROR', - 'status_message': 'subscription-has-no-invoices', + "status": "ERROR", + "status_message": "subscription-has-no-invoices", }, - ]) + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) @@ -177,371 +191,483 @@ def test_subscription_without_invoices(self): 🔽🔽🔽 Subscription process to charge """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_process_to_charge(self): unit = random.choice([1, 3, 6, 12]) - unit_type = 'MONTH' + unit_type = "MONTH" subscription = { - 'pay_every': unit, - 'pay_every_unit': unit_type, - 'next_payment_at': UTC_NOW - relativedelta(days=25, months=unit * 2), + "pay_every": unit, + "pay_every_unit": unit_type, + "next_payment_at": UTC_NOW - relativedelta(days=25, months=unit * 2), } model = self.bc.database.create(subscription=subscription, invoice=1) - with patch('breathecode.payments.services.stripe.Stripe.pay', - MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW))): + with patch( + "breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW)) + ): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_subscription for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_subscription for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - bag_item({ - 'academy_id': 1, - 'currency_id': 1, - 'id': 2, - 'is_recurrent': True, - 'status': 'RENEWAL', - 'user_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - invoice_item({ - 'academy_id': 2, - 'id': 2, - 'user_id': 1, - 'paid_at': UTC_NOW, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + bag_item( + { + "academy_id": 1, + "currency_id": 1, + "id": 2, + "is_recurrent": True, + "status": "RENEWAL", + "user_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + invoice_item( + { + "academy_id": 2, + "id": 2, + "user_id": 1, + "paid_at": UTC_NOW, + } + ), + ], + ) next_payment_at = model.subscription.next_payment_at delta = calculate_relative_delta(unit, unit_type) for _ in range(3): next_payment_at += delta - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - 'status': 'ACTIVE', - 'paid_at': UTC_NOW, - 'next_payment_at': next_payment_at, - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + "status": "ACTIVE", + "paid_at": UTC_NOW, + "next_payment_at": next_payment_at, + }, + ], + ) assert notify_actions.send_email_message.call_args_list == [ - call('message', - model.user.email, { - 'SUBJECT': 'Your 4Geeks subscription was successfully renewed', - 'MESSAGE': 'The amount was $0.0', - 'BUTTON': 'See the invoice', - 'LINK': os.getenv('APP_URL')[:-1] + '/subscription/1' - }, - academy=model.academy) + call( + "message", + model.user.email, + { + "SUBJECT": "Your 4Geeks subscription was successfully renewed", + "MESSAGE": "The amount was $0.0", + "BUTTON": "See the invoice", + "LINK": os.getenv("APP_URL")[:-1] + "/subscription/1", + }, + academy=model.academy, + ) ] - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'bag_created', related_type='payments.Bag', related_id=2), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=2), + ], + ) """ 🔽🔽🔽 Subscription error when try to charge """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_error_when_try_to_charge(self): unit = random.choice([1, 3, 6, 12]) - unit_type = 'MONTH' + unit_type = "MONTH" subscription = { - 'pay_every': unit, - 'pay_every_unit': unit_type, + "pay_every": unit, + "pay_every_unit": unit_type, } model = self.bc.database.create(subscription=subscription, invoice=1) - with patch('breathecode.payments.services.stripe.Stripe.pay', MagicMock(side_effect=Exception('fake error'))): + with patch("breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=Exception("fake error"))): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_subscription for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_subscription for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - 'status': 'PAYMENT_ISSUE', - }, - ]) - self.assertEqual(notify_actions.send_email_message.call_args_list, [ - call('message', - model.user.email, { - 'SUBJECT': 'Your 4Geeks subscription could not be renewed', - 'MESSAGE': 'Please update your payment methods', - 'BUTTON': 'Please update your payment methods', - 'LINK': os.getenv('APP_URL')[:-1] + '/subscription/1' - }, - academy=model.academy) - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'bag_created', related_type='payments.Bag', related_id=2), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + "status": "PAYMENT_ISSUE", + }, + ], + ) + self.assertEqual( + notify_actions.send_email_message.call_args_list, + [ + call( + "message", + model.user.email, + { + "SUBJECT": "Your 4Geeks subscription could not be renewed", + "MESSAGE": "Please update your payment methods", + "BUTTON": "Please update your payment methods", + "LINK": os.getenv("APP_URL")[:-1] + "/subscription/1", + }, + academy=model.academy, + ) + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=2), + ], + ) """ 🔽🔽🔽 Subscription is over """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_is_over(self): unit = random.choice([1, 3, 6, 12]) - unit_type = 'MONTH' + unit_type = "MONTH" subscription = { - 'pay_every': unit, - 'pay_every_unit': unit_type, - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "pay_every": unit, + "pay_every_unit": unit_type, + "valid_until": UTC_NOW - relativedelta(seconds=1), } model = self.bc.database.create(subscription=subscription, invoice=1) - with patch('breathecode.payments.services.stripe.Stripe.pay', MagicMock(side_effect=Exception('fake error'))): + with patch("breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=Exception("fake error"))): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_subscription for subscription 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The subscription 1 is over', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - }, - ]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_subscription for subscription 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The subscription 1 is over", exc_info=True), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + }, + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Subscription was paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.actions.send_email_message', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.actions.send_email_message", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_was_paid(self): unit = random.choice([1, 3, 6, 12]) - unit_type = 'MONTH' + unit_type = "MONTH" subscription = { - 'pay_every': unit, - 'pay_every_unit': unit_type, - 'valid_until': UTC_NOW + relativedelta(seconds=1), - 'next_payment_at': UTC_NOW + relativedelta(seconds=1), + "pay_every": unit, + "pay_every_unit": unit_type, + "valid_until": UTC_NOW + relativedelta(seconds=1), + "next_payment_at": UTC_NOW + relativedelta(seconds=1), } model = self.bc.database.create(subscription=subscription, invoice=1) - with patch('breathecode.payments.services.stripe.Stripe.pay', MagicMock(side_effect=Exception('fake error'))): + with patch("breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=Exception("fake error"))): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] charge_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) - - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_subscription for subscription 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The subscription with id 1 was paid this month', exc_info=True), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - }, - ]) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) + + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_subscription for subscription 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The subscription with id 1 was paid this month", exc_info=True), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + }, + ], + ) self.assertEqual(notify_actions.send_email_message.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Subscription try to charge, but a undexpected exception is raised, the database is rollbacked and the error is register in Subscription """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.services.stripe.Stripe.refund_payment', MagicMock()) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.services.stripe.Stripe.refund_payment", MagicMock()) def test_plan_financing_process_to_charge__but_a_undexpected_exception_is_raised__not_found_invoice_to_refund(self): unit = random.choice([1, 3, 6, 12]) - unit_type = 'MONTH' + unit_type = "MONTH" subscription = { - 'pay_every': unit, - 'pay_every_unit': unit_type, + "pay_every": unit, + "pay_every_unit": unit_type, } - invoice = {'paid_at': UTC_NOW - relativedelta(hours=24, seconds=1)} + invoice = {"paid_at": UTC_NOW - relativedelta(hours=24, seconds=1)} model = self.bc.database.create(subscription=subscription, invoice=invoice) error = self.bc.fake.text() - with patch('breathecode.payments.services.stripe.Stripe.pay', - MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW))): + with patch( + "breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW)) + ): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.notify.actions.send_email_message', MagicMock(side_effect=Exception(error))): + with patch("breathecode.notify.actions.send_email_message", MagicMock(side_effect=Exception(error))): charge_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_subscription for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_subscription for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, [call(error, exc_info=True)]) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) - message = f'charge_subscription is failing for the subscription {model.subscription.id}: ' - message += str(error)[:250 - len(message)] + message = f"charge_subscription is failing for the subscription {model.subscription.id}: " + message += str(error)[: 250 - len(message)] - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - 'status': 'ERROR', - 'status_message': message, - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + "status": "ERROR", + "status_message": message, + }, + ], + ) self.assertEqual(Stripe.refund_payment.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'bag_created', related_type='payments.Bag', related_id=2), - ]) - - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_subscription_consumables.delay', MagicMock()) - @patch('mixer.main.LOGGER.info', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.payments.services.stripe.Stripe.refund_payment', MagicMock()) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=2), + ], + ) + + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_subscription_consumables.delay", MagicMock()) + @patch("mixer.main.LOGGER.info", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.payments.services.stripe.Stripe.refund_payment", MagicMock()) def test_plan_financing_process_to_charge__but_a_undexpected_exception_is_raised__found_invoice_to_refund(self): unit = random.choice([1, 3, 6, 12]) - unit_type = 'MONTH' + unit_type = "MONTH" subscription = { - 'pay_every': unit, - 'pay_every_unit': unit_type, + "pay_every": unit, + "pay_every_unit": unit_type, } - invoice = {'paid_at': UTC_NOW - relativedelta(hours=random.randint(1, 23))} + invoice = {"paid_at": UTC_NOW - relativedelta(hours=random.randint(1, 23))} model = self.bc.database.create(subscription=subscription, invoice=invoice) error = self.bc.fake.text() - with patch('breathecode.payments.services.stripe.Stripe.pay', - MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW))): + with patch( + "breathecode.payments.services.stripe.Stripe.pay", MagicMock(side_effect=fake_stripe_pay(paid_at=UTC_NOW)) + ): # remove prints from mixer logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.notify.actions.send_email_message', MagicMock(side_effect=Exception(error))): + with patch("breathecode.notify.actions.send_email_message", MagicMock(side_effect=Exception(error))): charge_subscription.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting charge_subscription for subscription 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting charge_subscription for subscription 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, [call(error, exc_info=True)]) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - self.bc.format.to_dict(model.bag), - ]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - self.bc.format.to_dict(model.invoice), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + self.bc.format.to_dict(model.bag), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + self.bc.format.to_dict(model.invoice), + ], + ) - message = f'charge_subscription is failing for the subscription {model.subscription.id}: ' - message += str(error)[:250 - len(message)] + message = f"charge_subscription is failing for the subscription {model.subscription.id}: " + message += str(error)[: 250 - len(message)] - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - 'status': 'ERROR', - 'status_message': message, - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + "status": "ERROR", + "status_message": message, + }, + ], + ) self.assertEqual(Stripe.refund_payment.call_args_list, [call(model.invoice)]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'bag_created', related_type='payments.Bag', related_id=2), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=2), + ], + ) diff --git a/breathecode/payments/tests/tasks/tests_refund_mentoring_session.py b/breathecode/payments/tests/tasks/tests_refund_mentoring_session.py index fa494279b..a6659810a 100644 --- a/breathecode/payments/tests/tasks/tests_refund_mentoring_session.py +++ b/breathecode/payments/tests/tasks/tests_refund_mentoring_session.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call @@ -24,14 +25,14 @@ def get_queryset_pks(queryset: QuerySet): @pytest.fixture(autouse=True) def setup_db(db, monkeypatch, enable_signals): enable_signals( - 'breathecode.payments.signals.consume_service', - 'breathecode.payments.signals.grant_service_permissions', - 'breathecode.payments.signals.lose_service_permissions', - 'breathecode.payments.signals.reimburse_service_units', # + "breathecode.payments.signals.consume_service", + "breathecode.payments.signals.grant_service_permissions", + "breathecode.payments.signals.lose_service_permissions", + "breathecode.payments.signals.reimburse_service_units", # ) - monkeypatch.setattr('logging.Logger.info', MagicMock()) - monkeypatch.setattr('logging.Logger.error', MagicMock()) - monkeypatch.setattr('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + monkeypatch.setattr("logging.Logger.info", MagicMock()) + monkeypatch.setattr("logging.Logger.error", MagicMock()) + monkeypatch.setattr("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) yield @@ -40,16 +41,22 @@ def setup_db(db, monkeypatch, enable_signals): def test_0_items(bc: Breathecode): refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - ]) - bc.check.calls(logging.Logger.error.call_args_list, [ - call('MentoringSession with id 1 not found or is invalid', exc_info=True), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + ], + ) + bc.check.calls( + logging.Logger.error.call_args_list, + [ + call("MentoringSession with id 1 not found or is invalid", exc_info=True), + ], + ) - assert bc.database.list_of('mentorship.MentorshipSession') == [] - assert bc.database.list_of('payments.ConsumptionSession') == [] - assert bc.database.list_of('payments.Consumable') == [] + assert bc.database.list_of("mentorship.MentorshipSession") == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] + assert bc.database.list_of("payments.Consumable") == [] # Given: 1 MentoringSession @@ -64,25 +71,31 @@ def test_1_mentoring_session__nothing_provide(bc: Breathecode): refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - ]) - bc.check.calls(logging.Logger.error.call_args_list, [ - call('MentoringSession with id 1 not found or is invalid', exc_info=True), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + ], + ) + bc.check.calls( + logging.Logger.error.call_args_list, + [ + call("MentoringSession with id 1 not found or is invalid", exc_info=True), + ], + ) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [] - assert bc.database.list_of('payments.Consumable') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] + assert bc.database.list_of("payments.Consumable") == [] # Given: 1 MentoringSession and 1 User # When: have mentee and not have service and have a bad status # Then: not found mentorship session def test_1_mentoring_session__just_with_mentee(bc: Breathecode, get_queryset_pks): - user = {'groups': []} + user = {"groups": []} model = bc.database.create(mentorship_session=1, user=user, group=1, permission=1) # remove prints from mixer @@ -91,18 +104,24 @@ def test_1_mentoring_session__just_with_mentee(bc: Breathecode, get_queryset_pks refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - ]) - bc.check.calls(logging.Logger.error.call_args_list, [ - call('MentoringSession with id 1 not found or is invalid', exc_info=True), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + ], + ) + bc.check.calls( + logging.Logger.error.call_args_list, + [ + call("MentoringSession with id 1 not found or is invalid", exc_info=True), + ], + ) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [] - assert bc.database.list_of('payments.Consumable') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] + assert bc.database.list_of("payments.Consumable") == [] get_queryset_pks(model.user.groups.all()) == [] @@ -119,25 +138,31 @@ def test_1_mentoring_session__just_with_service(bc: Breathecode): refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - ]) - bc.check.calls(logging.Logger.error.call_args_list, [ - call('MentoringSession with id 1 not found or is invalid', exc_info=True), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + ], + ) + bc.check.calls( + logging.Logger.error.call_args_list, + [ + call("MentoringSession with id 1 not found or is invalid", exc_info=True), + ], + ) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [] - assert bc.database.list_of('payments.Consumable') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] + assert bc.database.list_of("payments.Consumable") == [] # Given: 1 MentoringSession # When: not have service, mentee and have a right status # Then: not found mentorship session def test_1_mentoring_session__just_with_right_status(bc: Breathecode): - mentorship_session = {'status': random.choice(['PENDING', 'STARTED', 'COMPLETED'])} + mentorship_session = {"status": random.choice(["PENDING", "STARTED", "COMPLETED"])} model = bc.database.create(mentorship_session=mentorship_session) # remove prints from mixer @@ -146,32 +171,36 @@ def test_1_mentoring_session__just_with_right_status(bc: Breathecode): refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - ]) - bc.check.calls(logging.Logger.error.call_args_list, [ - call('MentoringSession with id 1 not found or is invalid', exc_info=True), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + ], + ) + bc.check.calls( + logging.Logger.error.call_args_list, + [ + call("MentoringSession with id 1 not found or is invalid", exc_info=True), + ], + ) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [] - assert bc.database.list_of('payments.Consumable') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] + assert bc.database.list_of("payments.Consumable") == [] # Given: 1 MentoringSession, 1 User and 1 MentorshipService # When: have service, mentee and have a right status # Then: not found mentorship session def test_1_mentoring_session__all_elements_given(bc: Breathecode, get_queryset_pks): - mentorship_session = {'status': random.choice(['FAILED', 'IGNORED'])} + mentorship_session = {"status": random.choice(["FAILED", "IGNORED"])} - user = {'groups': []} - model = bc.database.create(mentorship_session=mentorship_session, - user=user, - mentorship_service=1, - group=1, - permission=1) + user = {"groups": []} + model = bc.database.create( + mentorship_session=mentorship_session, user=user, mentorship_service=1, group=1, permission=1 + ) # remove prints from mixer logging.Logger.info.call_args_list = [] @@ -179,18 +208,24 @@ def test_1_mentoring_session__all_elements_given(bc: Breathecode, get_queryset_p refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - ]) - bc.check.calls(logging.Logger.error.call_args_list, [ - call('ConsumptionSession not found for mentorship session 1', exc_info=True), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + ], + ) + bc.check.calls( + logging.Logger.error.call_args_list, + [ + call("ConsumptionSession not found for mentorship session 1", exc_info=True), + ], + ) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [] - assert bc.database.list_of('payments.Consumable') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] + assert bc.database.list_of("payments.Consumable") == [] get_queryset_pks(model.user.groups.all()) == [] @@ -199,21 +234,23 @@ def test_1_mentoring_session__all_elements_given(bc: Breathecode, get_queryset_p # When: consumption session is pending # Then: not refund consumable def test_consumption_session_is_pending(bc: Breathecode, get_queryset_pks): - mentorship_session = {'status': random.choice(['FAILED', 'IGNORED'])} + mentorship_session = {"status": random.choice(["FAILED", "IGNORED"])} how_many_consumables = random.randint(1, 10) how_mawy_will_consume = random.randint(1, how_many_consumables) - consumable = {'how_many': how_many_consumables} - consumption_session = {'how_many': how_mawy_will_consume, 'status': 'PENDING'} - - user = {'groups': []} - model = bc.database.create(mentorship_session=mentorship_session, - user=user, - mentorship_service=1, - consumption_session=consumption_session, - consumable=consumable, - mentorship_service_set=1, - group=1, - permission=1) + consumable = {"how_many": how_many_consumables} + consumption_session = {"how_many": how_mawy_will_consume, "status": "PENDING"} + + user = {"groups": []} + model = bc.database.create( + mentorship_session=mentorship_session, + user=user, + mentorship_service=1, + consumption_session=consumption_session, + consumable=consumable, + mentorship_service_set=1, + group=1, + permission=1, + ) # remove prints from mixer logging.Logger.info.call_args_list = [] @@ -221,23 +258,26 @@ def test_consumption_session_is_pending(bc: Breathecode, get_queryset_pks): refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + ], + ) bc.check.calls(logging.Logger.error.call_args_list, []) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [ + assert bc.database.list_of("payments.ConsumptionSession") == [ { **bc.format.to_dict(model.consumption_session), - 'status': 'CANCELLED', + "status": "CANCELLED", }, ] - assert bc.database.list_of('payments.Consumable') == [ + assert bc.database.list_of("payments.Consumable") == [ bc.format.to_dict(model.consumable), ] @@ -248,21 +288,23 @@ def test_consumption_session_is_pending(bc: Breathecode, get_queryset_pks): # When: consumption session is done # Then: not refund consumable def test_consumption_session_is_done(bc: Breathecode, get_queryset_pks): - mentorship_session = {'status': random.choice(['FAILED', 'IGNORED'])} + mentorship_session = {"status": random.choice(["FAILED", "IGNORED"])} how_many_consumables = random.randint(1, 10) how_mawy_will_consume = random.randint(1, 10) - consumable = {'how_many': how_many_consumables} - consumption_session = {'how_many': how_mawy_will_consume, 'status': 'DONE'} - - user = {'groups': []} - model = bc.database.create(mentorship_session=mentorship_session, - user=user, - mentorship_service=1, - consumption_session=consumption_session, - consumable=consumable, - mentorship_service_set=1, - group=1, - permission=1) + consumable = {"how_many": how_many_consumables} + consumption_session = {"how_many": how_mawy_will_consume, "status": "DONE"} + + user = {"groups": []} + model = bc.database.create( + mentorship_session=mentorship_session, + user=user, + mentorship_service=1, + consumption_session=consumption_session, + consumable=consumable, + mentorship_service_set=1, + group=1, + permission=1, + ) # remove prints from mixer logging.Logger.info.call_args_list = [] @@ -270,28 +312,32 @@ def test_consumption_session_is_done(bc: Breathecode, get_queryset_pks): refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - call('Refunding consumption session because it was discounted'), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + call("Refunding consumption session because it was discounted"), + ], + ) bc.check.calls(logging.Logger.error.call_args_list, []) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [ + assert bc.database.list_of("payments.ConsumptionSession") == [ { **bc.format.to_dict(model.consumption_session), - 'status': 'CANCELLED', + "status": "CANCELLED", }, ] - assert bc.database.list_of('payments.Consumable') == [{ - **bc.format.to_dict(model.consumable), - 'how_many': - how_many_consumables + how_mawy_will_consume, - }] + assert bc.database.list_of("payments.Consumable") == [ + { + **bc.format.to_dict(model.consumable), + "how_many": how_many_consumables + how_mawy_will_consume, + } + ] get_queryset_pks(model.user.groups.all()) == [] @@ -300,21 +346,23 @@ def test_consumption_session_is_done(bc: Breathecode, get_queryset_pks): # When: consumption session is done # Then: not refund consumable def test_consumption_session_is_cancelled(bc: Breathecode, get_queryset_pks): - mentorship_session = {'status': random.choice(['FAILED', 'IGNORED'])} + mentorship_session = {"status": random.choice(["FAILED", "IGNORED"])} how_many_consumables = random.randint(1, 10) how_mawy_will_consume = random.randint(1, 10) - consumable = {'how_many': how_many_consumables} - consumption_session = {'how_many': how_mawy_will_consume, 'status': 'CANCELLED'} - - user = {'groups': []} - model = bc.database.create(mentorship_session=mentorship_session, - user=user, - mentorship_service=1, - consumption_session=consumption_session, - consumable=consumable, - mentorship_service_set=1, - group=1, - permission=1) + consumable = {"how_many": how_many_consumables} + consumption_session = {"how_many": how_mawy_will_consume, "status": "CANCELLED"} + + user = {"groups": []} + model = bc.database.create( + mentorship_session=mentorship_session, + user=user, + mentorship_service=1, + consumption_session=consumption_session, + consumable=consumable, + mentorship_service_set=1, + group=1, + permission=1, + ) # remove prints from mixer logging.Logger.info.call_args_list = [] @@ -322,22 +370,28 @@ def test_consumption_session_is_cancelled(bc: Breathecode, get_queryset_pks): refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - ]) - bc.check.calls(logging.Logger.error.call_args_list, [ - call('ConsumptionSession not found for mentorship session 1', exc_info=True), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + ], + ) + bc.check.calls( + logging.Logger.error.call_args_list, + [ + call("ConsumptionSession not found for mentorship session 1", exc_info=True), + ], + ) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [ + assert bc.database.list_of("payments.ConsumptionSession") == [ bc.format.to_dict(model.consumption_session), ] - assert bc.database.list_of('payments.Consumable') == [ + assert bc.database.list_of("payments.Consumable") == [ bc.format.to_dict(model.consumable), ] @@ -348,22 +402,24 @@ def test_consumption_session_is_cancelled(bc: Breathecode, get_queryset_pks): # When: consumption session is done and consumable how many is 0 # Then: not refund consumable def test_consumable_wasted(bc: Breathecode, get_queryset_pks): - mentorship_session = {'status': random.choice(['FAILED', 'IGNORED'])} + mentorship_session = {"status": random.choice(["FAILED", "IGNORED"])} how_many_consumables = 0 how_mawy_will_consume = random.randint(1, 10) - consumable = {'how_many': how_many_consumables} - consumption_session = {'how_many': how_mawy_will_consume, 'status': 'DONE'} - - user = {'groups': []} - groups = [{'permissions': n} for n in range(1, 4)] - model = bc.database.create(mentorship_session=mentorship_session, - user=user, - mentorship_service=1, - consumption_session=consumption_session, - consumable=consumable, - mentorship_service_set=1, - group=groups, - permission=2) + consumable = {"how_many": how_many_consumables} + consumption_session = {"how_many": how_mawy_will_consume, "status": "DONE"} + + user = {"groups": []} + groups = [{"permissions": n} for n in range(1, 4)] + model = bc.database.create( + mentorship_session=mentorship_session, + user=user, + mentorship_service=1, + consumption_session=consumption_session, + consumable=consumable, + mentorship_service_set=1, + group=groups, + permission=2, + ) # remove prints from mixer logging.Logger.info.call_args_list = [] @@ -371,27 +427,31 @@ def test_consumable_wasted(bc: Breathecode, get_queryset_pks): refund_mentoring_session.delay(1) - bc.check.calls(logging.Logger.info.call_args_list, [ - call('Starting refund_mentoring_session for mentoring session 1'), - call('Refunding consumption session because it was discounted'), - ]) + bc.check.calls( + logging.Logger.info.call_args_list, + [ + call("Starting refund_mentoring_session for mentoring session 1"), + call("Refunding consumption session because it was discounted"), + ], + ) bc.check.calls(logging.Logger.error.call_args_list, []) - assert bc.database.list_of('mentorship.MentorshipSession') == [ + assert bc.database.list_of("mentorship.MentorshipSession") == [ bc.format.to_dict(model.mentorship_session), ] - assert bc.database.list_of('payments.ConsumptionSession') == [ + assert bc.database.list_of("payments.ConsumptionSession") == [ { **bc.format.to_dict(model.consumption_session), - 'status': 'CANCELLED', + "status": "CANCELLED", }, ] - assert bc.database.list_of('payments.Consumable') == [{ - **bc.format.to_dict(model.consumable), - 'how_many': - how_many_consumables + how_mawy_will_consume, - }] + assert bc.database.list_of("payments.Consumable") == [ + { + **bc.format.to_dict(model.consumable), + "how_many": how_many_consumables + how_mawy_will_consume, + } + ] assert get_queryset_pks(model.user.groups.all()) == [1, 2, 3] diff --git a/breathecode/payments/tests/tasks/tests_renew_consumables.py b/breathecode/payments/tests/tasks/tests_renew_consumables.py index 8411e2466..ee0708336 100644 --- a/breathecode/payments/tests/tasks/tests_renew_consumables.py +++ b/breathecode/payments/tests/tasks/tests_renew_consumables.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -19,779 +20,902 @@ def consumable_item(data={}): return { - 'cohort_set_id': None, - 'event_type_set_id': None, - 'how_many': -1, - 'id': 0, - 'mentorship_service_set_id': None, - 'service_item_id': 0, - 'unit_type': 'UNIT', - 'user_id': 0, - 'valid_until': UTC_NOW, - 'sort_priority': 1, + "cohort_set_id": None, + "event_type_set_id": None, + "how_many": -1, + "id": 0, + "mentorship_service_set_id": None, + "service_item_id": 0, + "unit_type": "UNIT", + "user_id": 0, + "valid_until": UTC_NOW, + "sort_priority": 1, **data, } -#FIXME: create_v2 fail in this test file +# FIXME: create_v2 fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 ServiceStockScheduler not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_scheduler_not_found(self): renew_consumables.delay(1) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), + call("Starting renew_consumables for service stock scheduler 1"), # retrying - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('ServiceStockScheduler with id 1 not found', exc_info=True), - ]) + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("ServiceStockScheduler with id 1 not found", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing that is over """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_is_over(self): plan_financing = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=1), - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW - relativedelta(seconds=1), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - plan_financing=plan_financing, - plan_service_item_handler=1) + model = self.bc.database.create( + service_stock_scheduler=1, plan=plan, plan_financing=plan_financing, plan_service_item_handler=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The plan financing 1 is over', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The plan financing 1 is over", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing without be paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_without_be_paid(self): plan_financing = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW + relativedelta(minutes=3), - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW + relativedelta(minutes=3), + "valid_until": UTC_NOW - relativedelta(seconds=1), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - plan_financing=plan_financing, - plan_service_item_handler=1) + model = self.bc.database.create( + service_stock_scheduler=1, plan=plan, plan_financing=plan_financing, plan_service_item_handler=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The plan financing 1 needs to be paid to renew the consumables', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The plan financing 1 needs to be paid to renew the consumables", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing without a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_with_plan_service_item_without_a_resource_linked(self): plan_financing = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW + relativedelta(minutes=3), - 'valid_until': UTC_NOW - relativedelta(seconds=1), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW + relativedelta(minutes=3), + "valid_until": UTC_NOW - relativedelta(seconds=1), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - plan_financing=plan_financing, - plan_service_item_handler=1) + model = self.bc.database.create( + service_stock_scheduler=1, plan=plan, plan_financing=plan_financing, plan_service_item_handler=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The Plan not have a resource linked to it ' - 'for the ServiceStockScheduler 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The Plan not have a resource linked to it " "for the ServiceStockScheduler 1"), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing with a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_with_plan_service_item_with_two_cohorts_linked(self): plan_financing = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW + relativedelta(minutes=5), - 'valid_until': UTC_NOW - relativedelta(seconds=4), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW + relativedelta(minutes=5), + "valid_until": UTC_NOW - relativedelta(seconds=4), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - plan = {'is_renewable': False} - service_item = {'how_many': -1} + plan = {"is_renewable": False} + service_item = {"how_many": -1} if random.randint(0, 1) == 1: - service_item['how_many'] = random.randint(1, 100) - academy = {'available_as_saas': True} - - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - service_item=service_item, - plan_financing=plan_financing, - plan_service_item_handler=1, - cohort=2, - cohort_set=2, - academy=academy) + service_item["how_many"] = random.randint(1, 100) + academy = {"available_as_saas": True} + + model = self.bc.database.create( + service_stock_scheduler=1, + plan=plan, + service_item=service_item, + plan_financing=plan_financing, + plan_service_item_handler=1, + cohort=2, + cohort_set=2, + academy=academy, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call('The consumable 1 for cohort set 1 was built'), - call('The scheduler 1 was renewed'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The consumable 1 for cohort set 1 was built"), + call("The scheduler 1 was renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - consumable_item({ - 'cohort_set_id': 1, - 'id': 1, - 'service_item_id': 1, - 'user_id': 1, - 'how_many': model.service_item.how_many, - 'valid_until': UTC_NOW + relativedelta(minutes=5), - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + consumable_item( + { + "cohort_set_id": 1, + "id": 1, + "service_item_id": 1, + "user_id": 1, + "how_many": model.service_item.how_many, + "valid_until": UTC_NOW + relativedelta(minutes=5), + } + ), + ], + ) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing with a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_with_plan_service_item_with_two_mentorship_services_linked(self): plan_financing = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW + relativedelta(minutes=5), - 'valid_until': UTC_NOW - relativedelta(seconds=4), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW + relativedelta(minutes=5), + "valid_until": UTC_NOW - relativedelta(seconds=4), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - service = {'type': 'MENTORSHIP_SERVICE_SET'} - plan = {'is_renewable': False} - service_item = {'how_many': -1} + service = {"type": "MENTORSHIP_SERVICE_SET"} + plan = {"is_renewable": False} + service_item = {"how_many": -1} if random.randint(0, 1) == 1: - service_item['how_many'] = random.randint(1, 100) - - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - service_item=service_item, - plan_financing=plan_financing, - plan_service_item_handler=1, - mentorship_service=2, - mentorship_service_set=1, - service=service) + service_item["how_many"] = random.randint(1, 100) + + model = self.bc.database.create( + service_stock_scheduler=1, + plan=plan, + service_item=service_item, + plan_financing=plan_financing, + plan_service_item_handler=1, + mentorship_service=2, + mentorship_service_set=1, + service=service, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call('The consumable 1 for mentorship service set 1 was built'), - call('The scheduler 1 was renewed'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The consumable 1 for mentorship service set 1 was built"), + call("The scheduler 1 was renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - consumable_item({ - 'mentorship_service_set_id': 1, - 'id': 1, - 'service_item_id': 1, - 'user_id': 1, - 'how_many': model.service_item.how_many, - 'valid_until': UTC_NOW + relativedelta(minutes=5), - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + consumable_item( + { + "mentorship_service_set_id": 1, + "id": 1, + "service_item_id": 1, + "user_id": 1, + "how_many": model.service_item.how_many, + "valid_until": UTC_NOW + relativedelta(minutes=5), + } + ), + ], + ) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing, do not needs renew """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_plan_financing_with_plan_service_item__do_not_needs_renew(self): plan_financing = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW + relativedelta(minutes=5), - 'valid_until': UTC_NOW - relativedelta(seconds=4), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW + relativedelta(minutes=5), + "valid_until": UTC_NOW - relativedelta(seconds=4), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } service_stock_scheduler = { - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW - relativedelta(seconds=1), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=service_stock_scheduler, - plan=plan, - plan_financing=plan_financing, - plan_service_item_handler=1, - mentorship_service=2, - mentorship_service_set=1) + model = self.bc.database.create( + service_stock_scheduler=service_stock_scheduler, + plan=plan, + plan_financing=plan_financing, + plan_service_item_handler=1, + mentorship_service=2, + mentorship_service_set=1, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call("The scheduler 1 don't needs to be renewed"), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The scheduler 1 don't needs to be renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with Subscription that is over """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__plan__is_over(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=1), - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW - relativedelta(seconds=1), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - subscription=subscription, - plan_service_item_handler=1) + model = self.bc.database.create( + service_stock_scheduler=1, plan=plan, subscription=subscription, plan_service_item_handler=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The subscription 1 is over', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The subscription 1 is over", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with Subscription without be paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__plan__without_be_paid(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=1), - 'valid_until': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW + relativedelta(minutes=3), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - subscription=subscription, - plan_service_item_handler=1) + model = self.bc.database.create( + service_stock_scheduler=1, plan=plan, subscription=subscription, plan_service_item_handler=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The subscription 1 needs to be paid to renew the consumables', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The subscription 1 needs to be paid to renew the consumables", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with Subscription without a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__plan__with_plan_service_item_without_a_resource_linked(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=1), - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW + relativedelta(minutes=3), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - subscription=subscription, - plan_service_item_handler=1) + model = self.bc.database.create( + service_stock_scheduler=1, plan=plan, subscription=subscription, plan_service_item_handler=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The Plan not have a resource linked to it ' - 'for the ServiceStockScheduler 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The Plan not have a resource linked to it " "for the ServiceStockScheduler 1"), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with Subscription with a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__plan__with_plan_service_item_with_two_cohorts_linked(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=4), - 'valid_until': UTC_NOW + relativedelta(minutes=5), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=4), + "valid_until": UTC_NOW + relativedelta(minutes=5), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - plan = {'is_renewable': False} - service_item = {'how_many': -1} + plan = {"is_renewable": False} + service_item = {"how_many": -1} if random.randint(0, 1) == 1: - service_item['how_many'] = random.randint(1, 100) - academy = {'available_as_saas': True} - - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - service_item=service_item, - subscription=subscription, - plan_service_item_handler=1, - cohort=2, - cohort_set=2, - academy=academy) + service_item["how_many"] = random.randint(1, 100) + academy = {"available_as_saas": True} + + model = self.bc.database.create( + service_stock_scheduler=1, + plan=plan, + service_item=service_item, + subscription=subscription, + plan_service_item_handler=1, + cohort=2, + cohort_set=2, + academy=academy, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call('The consumable 1 for cohort set 1 was built'), - call('The scheduler 1 was renewed'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The consumable 1 for cohort set 1 was built"), + call("The scheduler 1 was renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - consumable_item({ - 'cohort_set_id': 1, - 'id': 1, - 'service_item_id': 1, - 'user_id': 1, - 'how_many': model.service_item.how_many, - 'valid_until': UTC_NOW + relativedelta(minutes=5), - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + consumable_item( + { + "cohort_set_id": 1, + "id": 1, + "service_item_id": 1, + "user_id": 1, + "how_many": model.service_item.how_many, + "valid_until": UTC_NOW + relativedelta(minutes=5), + } + ), + ], + ) """ 🔽🔽🔽 ServiceStockScheduler with Subscription with a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__plan__with_plan_service_item_with_two_mentorship_services_linked(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=4), - 'valid_until': UTC_NOW + relativedelta(minutes=5), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=4), + "valid_until": UTC_NOW + relativedelta(minutes=5), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - service = {'type': 'MENTORSHIP_SERVICE_SET'} - plan = {'is_renewable': False} - service_item = {'how_many': -1} + service = {"type": "MENTORSHIP_SERVICE_SET"} + plan = {"is_renewable": False} + service_item = {"how_many": -1} if random.randint(0, 1) == 1: - service_item['how_many'] = random.randint(1, 100) - - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - service_item=service_item, - subscription=subscription, - plan_service_item_handler=1, - mentorship_service=2, - mentorship_service_set=1, - service=service) + service_item["how_many"] = random.randint(1, 100) + + model = self.bc.database.create( + service_stock_scheduler=1, + plan=plan, + service_item=service_item, + subscription=subscription, + plan_service_item_handler=1, + mentorship_service=2, + mentorship_service_set=1, + service=service, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call('The consumable 1 for mentorship service set 1 was built'), - call('The scheduler 1 was renewed'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The consumable 1 for mentorship service set 1 was built"), + call("The scheduler 1 was renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - consumable_item({ - 'mentorship_service_set_id': 1, - 'id': 1, - 'service_item_id': 1, - 'user_id': 1, - 'how_many': model.service_item.how_many, - 'valid_until': UTC_NOW + relativedelta(minutes=5), - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + consumable_item( + { + "mentorship_service_set_id": 1, + "id": 1, + "service_item_id": 1, + "user_id": 1, + "how_many": model.service_item.how_many, + "valid_until": UTC_NOW + relativedelta(minutes=5), + } + ), + ], + ) """ 🔽🔽🔽 ServiceStockScheduler with Subscription, do not needs renew """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__plan__with_plan_service_item__do_not_needs_renew(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=4), - 'valid_until': UTC_NOW + relativedelta(minutes=5), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=4), + "valid_until": UTC_NOW + relativedelta(minutes=5), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } service_stock_scheduler = { - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW - relativedelta(seconds=1), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=service_stock_scheduler, - plan=plan, - subscription=subscription, - plan_service_item_handler=1, - mentorship_service=2, - mentorship_service_set=1) + model = self.bc.database.create( + service_stock_scheduler=service_stock_scheduler, + plan=plan, + subscription=subscription, + plan_service_item_handler=1, + mentorship_service=2, + mentorship_service_set=1, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call("The scheduler 1 don't needs to be renewed"), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The scheduler 1 don't needs to be renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing that is over """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__service_item__is_over(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=1), - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW - relativedelta(seconds=1), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} - model = self.bc.database.create(service_stock_scheduler=1, - plan=plan, - subscription=subscription, - subscription_service_item=1) + model = self.bc.database.create( + service_stock_scheduler=1, plan=plan, subscription=subscription, subscription_service_item=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The subscription 1 is over', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The subscription 1 is over", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing without be paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__service_item__without_be_paid(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=1), - 'valid_until': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW + relativedelta(minutes=3), } - model = self.bc.database.create(service_stock_scheduler=1, - subscription=subscription, - subscription_service_item=1) + model = self.bc.database.create( + service_stock_scheduler=1, subscription=subscription, subscription_service_item=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The subscription 1 needs to be paid to renew the consumables', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The subscription 1 needs to be paid to renew the consumables", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing without a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__service_item__with_plan_service_item_without_a_resource_linked(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=1), - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW + relativedelta(minutes=3), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - model = self.bc.database.create(service_stock_scheduler=1, - subscription=subscription, - subscription_service_item=1) + model = self.bc.database.create( + service_stock_scheduler=1, subscription=subscription, subscription_service_item=1 + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The Plan not have a resource linked to it ' - 'for the ServiceStockScheduler 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The Plan not have a resource linked to it " "for the ServiceStockScheduler 1"), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing with a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__service_item__with_plan_service_item_with_two_cohorts_linked(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=2), - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=2), + "valid_until": UTC_NOW + relativedelta(minutes=3), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - service_item = {'how_many': -1} + service_item = {"how_many": -1} if random.randint(0, 1) == 1: - service_item['how_many'] = random.randint(1, 100) - academy = {'available_as_saas': True} - - model = self.bc.database.create(service_stock_scheduler=1, - service_item=service_item, - subscription=subscription, - subscription_service_item=1, - cohort=2, - cohort_set=2, - academy=academy) + service_item["how_many"] = random.randint(1, 100) + academy = {"available_as_saas": True} + + model = self.bc.database.create( + service_stock_scheduler=1, + service_item=service_item, + subscription=subscription, + subscription_service_item=1, + cohort=2, + cohort_set=2, + academy=academy, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call('The consumable 1 for cohort set 1 was built'), - call('The scheduler 1 was renewed'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The consumable 1 for cohort set 1 was built"), + call("The scheduler 1 was renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - consumable_item({ - 'cohort_set_id': 1, - 'id': 1, - 'service_item_id': 1, - 'user_id': 1, - 'how_many': model.service_item.how_many, - 'valid_until': UTC_NOW + relativedelta(minutes=3), - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + consumable_item( + { + "cohort_set_id": 1, + "id": 1, + "service_item_id": 1, + "user_id": 1, + "how_many": model.service_item.how_many, + "valid_until": UTC_NOW + relativedelta(minutes=3), + } + ), + ], + ) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing with a PlanServiceItem linked to a resource """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__service_item__with_plan_service_item_with_two_mentorship_services_linked(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=2), - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=2), + "valid_until": UTC_NOW + relativedelta(minutes=3), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } - service = {'type': 'MENTORSHIP_SERVICE_SET'} - service_item = {'how_many': -1} + service = {"type": "MENTORSHIP_SERVICE_SET"} + service_item = {"how_many": -1} if random.randint(0, 1) == 1: - service_item['how_many'] = random.randint(1, 100) - - model = self.bc.database.create(service_stock_scheduler=1, - service_item=service_item, - subscription=subscription, - subscription_service_item=1, - mentorship_service=2, - mentorship_service_set=1, - service=service) + service_item["how_many"] = random.randint(1, 100) + + model = self.bc.database.create( + service_stock_scheduler=1, + service_item=service_item, + subscription=subscription, + subscription_service_item=1, + mentorship_service=2, + mentorship_service_set=1, + service=service, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call('The consumable 1 for mentorship service set 1 was built'), - call('The scheduler 1 was renewed'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The consumable 1 for mentorship service set 1 was built"), + call("The scheduler 1 was renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - consumable_item({ - 'mentorship_service_set_id': 1, - 'id': 1, - 'service_item_id': 1, - 'user_id': 1, - 'how_many': model.service_item.how_many, - 'valid_until': UTC_NOW + relativedelta(minutes=3), - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + consumable_item( + { + "mentorship_service_set_id": 1, + "id": 1, + "service_item_id": 1, + "user_id": 1, + "how_many": model.service_item.how_many, + "valid_until": UTC_NOW + relativedelta(minutes=3), + } + ), + ], + ) """ 🔽🔽🔽 ServiceStockScheduler with PlanFinancing, do not needs renew """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription__service_item__with_plan_service_item__do_not_needs_renew(self): subscription = { - 'monthly_price': random.random() * 99.99 + 0.01, - 'plan_expires_at': UTC_NOW - relativedelta(seconds=2), - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, + "plan_expires_at": UTC_NOW - relativedelta(seconds=2), + "valid_until": UTC_NOW + relativedelta(minutes=3), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), } service_stock_scheduler = { - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW - relativedelta(seconds=1), } - model = self.bc.database.create(service_stock_scheduler=service_stock_scheduler, - subscription=subscription, - subscription_service_item=1, - mentorship_service=2, - mentorship_service_set=1) + model = self.bc.database.create( + service_stock_scheduler=service_stock_scheduler, + subscription=subscription, + subscription_service_item=1, + mentorship_service=2, + mentorship_service_set=1, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_consumables.delay(1) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_consumables for service stock scheduler 1'), - call("The scheduler 1 don't needs to be renewed"), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_consumables for service stock scheduler 1"), + call("The scheduler 1 don't needs to be renewed"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) diff --git a/breathecode/payments/tests/tasks/tests_renew_plan_financing_consumables.py b/breathecode/payments/tests/tasks/tests_renew_plan_financing_consumables.py index 2b9559118..be8ac68f6 100644 --- a/breathecode/payments/tests/tasks/tests_renew_plan_financing_consumables.py +++ b/breathecode/payments/tests/tasks/tests_renew_plan_financing_consumables.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -19,57 +20,61 @@ def service_stock_scheduler_item(data={}): return { - 'id': 1, - 'plan_handler_id': None, - 'subscription_handler_id': None, - 'valid_until': None, + "id": 1, + "plan_handler_id": None, + "subscription_handler_id": None, + "valid_until": None, **data, } -#FIXME: create fail in this test file +# FIXME: create fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 Subscription not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_not_found(self): renew_plan_financing_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting renew_plan_financing_consumables for id 1'), + call("Starting renew_plan_financing_consumables for id 1"), # retrying - call('Starting renew_plan_financing_consumables for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('PlanFinancing with id 1 not found', exc_info=True), - ]) + call("Starting renew_plan_financing_consumables for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("PlanFinancing with id 1 not found", exc_info=True), + ], + ) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), []) + self.assertEqual(self.bc.database.list_of("payments.PlanFinancing"), []) """ 🔽🔽🔽 Subscription was not paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_was_not_paid(self): subscription = { - 'next_payment_at': UTC_NOW - relativedelta(seconds=1), - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'plan_expires_at': UTC_NOW + relativedelta(minutes=3), - 'monthly_price': random.random() * 99.99 + 0.01, + "next_payment_at": UTC_NOW - relativedelta(seconds=1), + "valid_until": UTC_NOW + relativedelta(minutes=3), + "plan_expires_at": UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create(plan_financing=subscription, plan=plan) logging.Logger.info.call_args_list = [] @@ -77,38 +82,47 @@ def test_subscription_was_not_paid(self): renew_plan_financing_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_plan_financing_consumables for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The PlanFinancing 1 needs to be paid to renew the consumables', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_plan_financing_consumables for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The PlanFinancing 1 needs to be paid to renew the consumables", exc_info=True), + ], + ) self.assertEqual(tasks.renew_consumables.delay.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), []) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.ServiceStockScheduler"), []) """ 🔽🔽🔽 Subscription was paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_was_paid__without_stock_scheduler(self): subscription = { - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), - 'plan_expires_at': UTC_NOW + relativedelta(minutes=3), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + relativedelta(minutes=3), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), + "plan_expires_at": UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create(plan_financing=subscription, plan=plan) logging.Logger.info.call_args_list = [] @@ -116,79 +130,107 @@ def test_subscription_was_paid__without_stock_scheduler(self): renew_plan_financing_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_plan_financing_consumables for id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_plan_financing_consumables for id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(tasks.renew_consumables.delay.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), []) + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.ServiceStockScheduler"), []) """ 🔽🔽🔽 Subscription was not paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_was_not_paid__(self): - plan_service_items = [{ - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 3)] + [{ - 'plan_id': 2, - 'service_item_id': n, - } for n in range(3, 5)] - plan_service_item_handlers = [{'handler_id': n} for n in range(1, 5)] + plan_service_items = [ + { + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 3) + ] + [ + { + "plan_id": 2, + "service_item_id": n, + } + for n in range(3, 5) + ] + plan_service_item_handlers = [{"handler_id": n} for n in range(1, 5)] subscription = { - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), - 'valid_until': UTC_NOW + relativedelta(minutes=3), - 'plan_expires_at': UTC_NOW + relativedelta(minutes=3), - 'monthly_price': random.random() * 99.99 + 0.01, + "next_payment_at": UTC_NOW + relativedelta(minutes=3), + "valid_until": UTC_NOW + relativedelta(minutes=3), + "plan_expires_at": UTC_NOW + relativedelta(minutes=3), + "monthly_price": random.random() * 99.99 + 0.01, } - service_stock_schedulers = [{ - 'subscription_handler_id': None, - 'plan_handler_id': n, - } for n in range(1, 5)] - - plan = {'is_renewable': False} - - model = self.bc.database.create(plan_financing=subscription, - service_stock_scheduler=service_stock_schedulers, - plan=(2, plan), - service_item=4, - plan_service_item=plan_service_items, - plan_service_item_handler=plan_service_item_handlers) + service_stock_schedulers = [ + { + "subscription_handler_id": None, + "plan_handler_id": n, + } + for n in range(1, 5) + ] + + plan = {"is_renewable": False} + + model = self.bc.database.create( + plan_financing=subscription, + service_stock_scheduler=service_stock_schedulers, + plan=(2, plan), + service_item=4, + plan_service_item=plan_service_items, + plan_service_item_handler=plan_service_item_handlers, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_plan_financing_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_plan_financing_consumables for id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_plan_financing_consumables for id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(tasks.renew_consumables.delay.call_args_list, [ - call(1), - call(2), - call(3), - call(4), - ]) - - self.assertEqual(self.bc.database.list_of('payments.PlanFinancing'), [ - self.bc.format.to_dict(model.plan_financing), - ]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), - self.bc.format.to_dict(model.service_stock_scheduler)) + self.assertEqual( + tasks.renew_consumables.delay.call_args_list, + [ + call(1), + call(2), + call(3), + call(4), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.PlanFinancing"), + [ + self.bc.format.to_dict(model.plan_financing), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.ServiceStockScheduler"), + self.bc.format.to_dict(model.service_stock_scheduler), + ) diff --git a/breathecode/payments/tests/tasks/tests_renew_subscription_consumables.py b/breathecode/payments/tests/tasks/tests_renew_subscription_consumables.py index a59bdfa96..00dbf0292 100644 --- a/breathecode/payments/tests/tasks/tests_renew_subscription_consumables.py +++ b/breathecode/payments/tests/tasks/tests_renew_subscription_consumables.py @@ -1,6 +1,7 @@ """ Test /answer """ + import logging import random from unittest.mock import MagicMock, call, patch @@ -19,82 +20,93 @@ def service_stock_scheduler_item(data={}): return { - 'id': 1, - 'plan_handler_id': None, - 'subscription_handler_id': None, - 'valid_until': None, + "id": 1, + "plan_handler_id": None, + "subscription_handler_id": None, + "valid_until": None, **data, } -#FIXME: create fail in this test file +# FIXME: create fail in this test file class PaymentsTestSuite(PaymentsTestCase): """ 🔽🔽🔽 Subscription not found """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_not_found(self): renew_subscription_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) self.assertEqual( logging.Logger.info.call_args_list, [ - call('Starting renew_subscription_consumables for id 1'), + call("Starting renew_subscription_consumables for id 1"), # retrying - call('Starting renew_subscription_consumables for id 1'), + call("Starting renew_subscription_consumables for id 1"), ], ) - self.assertEqual(logging.Logger.error.call_args_list, [call('Subscription with id 1 not found', exc_info=True)]) + self.assertEqual(logging.Logger.error.call_args_list, [call("Subscription with id 1 not found", exc_info=True)]) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) """ 🔽🔽🔽 Subscription was not paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_was_not_paid(self): - model = self.bc.database.create(subscription=1, ) + model = self.bc.database.create( + subscription=1, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_subscription_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_subscription_consumables for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The subscription 1 needs to be paid to renew the consumables', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_subscription_consumables for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The subscription 1 needs to be paid to renew the consumables", exc_info=True), + ], + ) self.assertEqual(tasks.renew_consumables.delay.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), []) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.ServiceStockScheduler"), []) """ 🔽🔽🔽 Subscription was paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_was_paid__without_stock_scheduler(self): - subscription = {'next_payment_at': UTC_NOW + relativedelta(minutes=3)} + subscription = {"next_payment_at": UTC_NOW + relativedelta(minutes=3)} model = self.bc.database.create(subscription=subscription) logging.Logger.info.call_args_list = [] @@ -102,32 +114,38 @@ def test_subscription_was_paid__without_stock_scheduler(self): renew_subscription_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_subscription_consumables for id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_subscription_consumables for id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) self.assertEqual(tasks.renew_consumables.delay.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), []) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.ServiceStockScheduler"), []) """ 🔽🔽🔽 Subscription is over """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_is_over(self): subscription = { - 'next_payment_at': UTC_NOW + relativedelta(minutes=1), - 'valid_until': UTC_NOW - relativedelta(seconds=1), + "next_payment_at": UTC_NOW + relativedelta(minutes=1), + "valid_until": UTC_NOW - relativedelta(seconds=1), } model = self.bc.database.create(subscription=subscription) @@ -136,86 +154,120 @@ def test_subscription_is_over(self): renew_subscription_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_subscription_consumables for id 1'), - ]) - self.assertEqual(logging.Logger.error.call_args_list, [ - call('The subscription 1 is over', exc_info=True), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_subscription_consumables for id 1"), + ], + ) + self.assertEqual( + logging.Logger.error.call_args_list, + [ + call("The subscription 1 is over", exc_info=True), + ], + ) self.assertEqual(tasks.renew_consumables.delay.call_args_list, []) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), []) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual(self.bc.database.list_of("payments.ServiceStockScheduler"), []) """ 🔽🔽🔽 Subscription was not paid """ - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.payments.tasks.renew_consumables.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.payments.tasks.renew_consumables.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_subscription_is_right(self): - subscription_service_items = [{'service_item_id': n} for n in range(1, 3)] - plan_service_items = [{ - 'plan_id': 1, - 'service_item_id': n, - } for n in range(3, 5)] + [{ - 'plan_id': 2, - 'service_item_id': n, - } for n in range(5, 7)] - plan_service_item_handlers = [{'handler_id': n} for n in range(1, 5)] + subscription_service_items = [{"service_item_id": n} for n in range(1, 3)] + plan_service_items = [ + { + "plan_id": 1, + "service_item_id": n, + } + for n in range(3, 5) + ] + [ + { + "plan_id": 2, + "service_item_id": n, + } + for n in range(5, 7) + ] + plan_service_item_handlers = [{"handler_id": n} for n in range(1, 5)] subscription = { - 'next_payment_at': UTC_NOW + relativedelta(minutes=3), - 'valid_until': UTC_NOW + relativedelta(minutes=3), + "next_payment_at": UTC_NOW + relativedelta(minutes=3), + "valid_until": UTC_NOW + relativedelta(minutes=3), } - service_stock_schedulers = [{ - 'subscription_handler_id': n, - 'plan_handler_id': None, - } for n in range(1, 3)] + [{ - 'subscription_handler_id': None, - 'plan_handler_id': n, - } for n in range(1, 5)] - - plan = {'is_renewable': False} - - model = self.bc.database.create(subscription=subscription, - service_stock_scheduler=service_stock_schedulers, - plan=(2, plan), - service_item=6, - plan_service_item=plan_service_items, - plan_service_item_handler=plan_service_item_handlers, - subscription_service_item=subscription_service_items) + service_stock_schedulers = [ + { + "subscription_handler_id": n, + "plan_handler_id": None, + } + for n in range(1, 3) + ] + [ + { + "subscription_handler_id": None, + "plan_handler_id": n, + } + for n in range(1, 5) + ] + + plan = {"is_renewable": False} + + model = self.bc.database.create( + subscription=subscription, + service_stock_scheduler=service_stock_schedulers, + plan=(2, plan), + service_item=6, + plan_service_item=plan_service_items, + plan_service_item_handler=plan_service_item_handlers, + subscription_service_item=subscription_service_items, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] renew_subscription_consumables.delay(1) - self.assertEqual(self.bc.database.list_of('admissions.Cohort'), []) + self.assertEqual(self.bc.database.list_of("admissions.Cohort"), []) - self.assertEqual(logging.Logger.info.call_args_list, [ - call('Starting renew_subscription_consumables for id 1'), - ]) + self.assertEqual( + logging.Logger.info.call_args_list, + [ + call("Starting renew_subscription_consumables for id 1"), + ], + ) self.assertEqual(logging.Logger.error.call_args_list, []) - self.assertEqual(tasks.renew_consumables.delay.call_args_list, [ - call(1), - call(2), - call(3), - call(4), - call(5), - call(6), - ]) - - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - self.bc.format.to_dict(model.subscription), - ]) - self.assertEqual(self.bc.database.list_of('payments.ServiceStockScheduler'), - self.bc.format.to_dict(model.service_stock_scheduler)) + self.assertEqual( + tasks.renew_consumables.delay.call_args_list, + [ + call(1), + call(2), + call(3), + call(4), + call(5), + call(6), + ], + ) + + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + self.bc.format.to_dict(model.subscription), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.ServiceStockScheduler"), + self.bc.format.to_dict(model.service_stock_scheduler), + ) diff --git a/breathecode/payments/tests/tasks/tests_update_plan_financing_service_stock_schedulers.py b/breathecode/payments/tests/tasks/tests_update_plan_financing_service_stock_schedulers.py index 1d3bed276..a6e628a27 100644 --- a/breathecode/payments/tests/tasks/tests_update_plan_financing_service_stock_schedulers.py +++ b/breathecode/payments/tests/tasks/tests_update_plan_financing_service_stock_schedulers.py @@ -13,130 +13,155 @@ def setup(db): def test_nothing_happens(bc: Breathecode): plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } plan_financing = { - 'monthly_price': 10, - 'plan_expires_at': bc.datetime.now(), + "monthly_price": 10, + "plan_expires_at": bc.datetime.now(), } model = bc.database.create(plan=plan, plan_financing=plan_financing) tasks.update_plan_financing_service_stock_schedulers.delay(1, 1) - assert bc.database.list_of('payments.ServiceStockScheduler') == [] - assert bc.database.list_of('payments.PlanServiceItemHandler') == [] + assert bc.database.list_of("payments.ServiceStockScheduler") == [] + assert bc.database.list_of("payments.PlanServiceItemHandler") == [] def test_all_schedulers_must_be_created(bc: Breathecode): plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } plan_financing = { - 'monthly_price': 10, - 'plan_expires_at': bc.datetime.now(), + "monthly_price": 10, + "plan_expires_at": bc.datetime.now(), } - plan_service_items = [{ - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 3)] + plan_service_items = [ + { + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 3) + ] - model = bc.database.create(plan=plan, - plan_financing=plan_financing, - service_item=2, - plan_service_item=plan_service_items) + model = bc.database.create( + plan=plan, plan_financing=plan_financing, service_item=2, plan_service_item=plan_service_items + ) tasks.update_plan_financing_service_stock_schedulers.delay(1, 1) - assert bc.database.list_of('payments.ServiceStockScheduler') == [ + assert bc.database.list_of("payments.ServiceStockScheduler") == [ { - 'id': 1, - 'plan_handler_id': 1, - 'subscription_handler_id': None, - 'valid_until': None, + "id": 1, + "plan_handler_id": 1, + "subscription_handler_id": None, + "valid_until": None, }, { - 'id': 2, - 'plan_handler_id': 2, - 'subscription_handler_id': None, - 'valid_until': None, + "id": 2, + "plan_handler_id": 2, + "subscription_handler_id": None, + "valid_until": None, }, ] - assert bc.database.list_of('payments.PlanServiceItem') == [{ - 'id': n, - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 3)] - assert bc.database.list_of('payments.PlanServiceItemHandler') == [ + assert bc.database.list_of("payments.PlanServiceItem") == [ { - 'handler_id': 1, - 'id': 1, - 'plan_financing_id': 1, - 'subscription_id': None, + "id": n, + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 3) + ] + assert bc.database.list_of("payments.PlanServiceItemHandler") == [ + { + "handler_id": 1, + "id": 1, + "plan_financing_id": 1, + "subscription_id": None, }, { - 'handler_id': 2, - 'id': 2, - 'plan_financing_id': 1, - 'subscription_id': None, + "handler_id": 2, + "id": 2, + "plan_financing_id": 1, + "subscription_id": None, }, ] def test_half_schedulers_must_be_created(bc: Breathecode): plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } plan_financing = { - 'monthly_price': 10, - 'plan_expires_at': bc.datetime.now(), + "monthly_price": 10, + "plan_expires_at": bc.datetime.now(), } - plan_service_items = [{ - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 5)] - - service_stock_schedulers = [{ - 'plan_handler_id': n, - 'valid_until': None, - } for n in range(1, 3)] - - plan_service_item_handlers = [{ - 'handler_id': n, - 'plan_financing_id': 1, - } for n in range(1, 3)] - - model = bc.database.create(plan=plan, - plan_financing=plan_financing, - service_item=4, - plan_service_item=plan_service_items, - service_stock_scheduler=service_stock_schedulers, - plan_service_item_handler=plan_service_item_handlers) + plan_service_items = [ + { + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 5) + ] + + service_stock_schedulers = [ + { + "plan_handler_id": n, + "valid_until": None, + } + for n in range(1, 3) + ] + + plan_service_item_handlers = [ + { + "handler_id": n, + "plan_financing_id": 1, + } + for n in range(1, 3) + ] + + model = bc.database.create( + plan=plan, + plan_financing=plan_financing, + service_item=4, + plan_service_item=plan_service_items, + service_stock_scheduler=service_stock_schedulers, + plan_service_item_handler=plan_service_item_handlers, + ) tasks.update_plan_financing_service_stock_schedulers.delay(1, 1) - assert bc.database.list_of('payments.ServiceStockScheduler') == [{ - 'id': n, - 'plan_handler_id': n, - 'subscription_handler_id': None, - 'valid_until': None, - } for n in range(1, 5)] - assert bc.database.list_of('payments.PlanServiceItem') == [{ - 'id': n, - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 5)] - assert bc.database.list_of('payments.PlanServiceItemHandler') == [{ - 'handler_id': n, - 'id': n, - 'plan_financing_id': 1, - 'subscription_id': None, - } for n in range(1, 5)] + assert bc.database.list_of("payments.ServiceStockScheduler") == [ + { + "id": n, + "plan_handler_id": n, + "subscription_handler_id": None, + "valid_until": None, + } + for n in range(1, 5) + ] + assert bc.database.list_of("payments.PlanServiceItem") == [ + { + "id": n, + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 5) + ] + assert bc.database.list_of("payments.PlanServiceItemHandler") == [ + { + "handler_id": n, + "id": n, + "plan_financing_id": 1, + "subscription_id": None, + } + for n in range(1, 5) + ] diff --git a/breathecode/payments/tests/tasks/tests_update_service_stock_schedulers.py b/breathecode/payments/tests/tasks/tests_update_service_stock_schedulers.py index cb03da32f..942ef92fa 100644 --- a/breathecode/payments/tests/tasks/tests_update_service_stock_schedulers.py +++ b/breathecode/payments/tests/tasks/tests_update_service_stock_schedulers.py @@ -10,8 +10,8 @@ def mocks(db, monkeypatch): m1 = MagicMock() m2 = MagicMock() - monkeypatch.setattr(tasks.update_subscription_service_stock_schedulers, 'delay', m1) - monkeypatch.setattr(tasks.update_plan_financing_service_stock_schedulers, 'delay', m2) + monkeypatch.setattr(tasks.update_subscription_service_stock_schedulers, "delay", m1) + monkeypatch.setattr(tasks.update_plan_financing_service_stock_schedulers, "delay", m2) yield m1, m2 @@ -20,9 +20,9 @@ def test_nothing_happens(bc: Breathecode, mocks): subscription_mock, plan_financing_mock = mocks plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } model = bc.database.create(plan=plan) @@ -37,29 +37,35 @@ def test_calling_the_builders_for_the_related_content(bc: Breathecode, mocks): subscription_mock, plan_financing_mock = mocks plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } plan_financing = { - 'plan_expires_at': bc.datetime.now(), - 'monthly_price': 10, + "plan_expires_at": bc.datetime.now(), + "monthly_price": 10, } - subscriptions = [{'plans': [1]} for x in range(1, 3)] - subscriptions += [{'plans': []} for x in range(1, 3)] - - plan_financing = [{ - 'plans': [1], - 'plan_expires_at': bc.datetime.now(), - 'monthly_price': 10, - } for x in range(1, 3)] - plan_financing += [{ - 'plans': [], - 'plan_expires_at': bc.datetime.now(), - 'monthly_price': 10, - } for x in range(1, 3)] + subscriptions = [{"plans": [1]} for x in range(1, 3)] + subscriptions += [{"plans": []} for x in range(1, 3)] + + plan_financing = [ + { + "plans": [1], + "plan_expires_at": bc.datetime.now(), + "monthly_price": 10, + } + for x in range(1, 3) + ] + plan_financing += [ + { + "plans": [], + "plan_expires_at": bc.datetime.now(), + "monthly_price": 10, + } + for x in range(1, 3) + ] model = bc.database.create(plan=plan, subscription=subscriptions, plan_financing=plan_financing) tasks.update_service_stock_schedulers.delay(1) diff --git a/breathecode/payments/tests/tasks/tests_update_subscription_service_stock_schedulers.py b/breathecode/payments/tests/tasks/tests_update_subscription_service_stock_schedulers.py index ab70feef3..eb8e61b98 100644 --- a/breathecode/payments/tests/tasks/tests_update_subscription_service_stock_schedulers.py +++ b/breathecode/payments/tests/tasks/tests_update_subscription_service_stock_schedulers.py @@ -13,112 +13,138 @@ def setup(db): def test_nothing_happens(bc: Breathecode): plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } model = bc.database.create(plan=plan, subscription=1) tasks.update_subscription_service_stock_schedulers.delay(1, 1) - assert bc.database.list_of('payments.ServiceStockScheduler') == [] - assert bc.database.list_of('payments.PlanServiceItemHandler') == [] + assert bc.database.list_of("payments.ServiceStockScheduler") == [] + assert bc.database.list_of("payments.PlanServiceItemHandler") == [] def test_all_schedulers_must_be_created(bc: Breathecode): plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } - plan_service_items = [{ - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 3)] + plan_service_items = [ + { + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 3) + ] model = bc.database.create(plan=plan, subscription=1, service_item=2, plan_service_item=plan_service_items) tasks.update_subscription_service_stock_schedulers.delay(1, 1) - assert bc.database.list_of('payments.ServiceStockScheduler') == [ + assert bc.database.list_of("payments.ServiceStockScheduler") == [ { - 'id': 1, - 'plan_handler_id': 1, - 'subscription_handler_id': None, - 'valid_until': None, + "id": 1, + "plan_handler_id": 1, + "subscription_handler_id": None, + "valid_until": None, }, { - 'id': 2, - 'plan_handler_id': 2, - 'subscription_handler_id': None, - 'valid_until': None, + "id": 2, + "plan_handler_id": 2, + "subscription_handler_id": None, + "valid_until": None, }, ] - assert bc.database.list_of('payments.PlanServiceItem') == [{ - 'id': n, - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 3)] - assert bc.database.list_of('payments.PlanServiceItemHandler') == [ + assert bc.database.list_of("payments.PlanServiceItem") == [ { - 'handler_id': 1, - 'id': 1, - 'plan_financing_id': None, - 'subscription_id': 1, + "id": n, + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 3) + ] + assert bc.database.list_of("payments.PlanServiceItemHandler") == [ + { + "handler_id": 1, + "id": 1, + "plan_financing_id": None, + "subscription_id": 1, }, { - 'handler_id': 2, - 'id': 2, - 'plan_financing_id': None, - 'subscription_id': 1, + "handler_id": 2, + "id": 2, + "plan_financing_id": None, + "subscription_id": 1, }, ] def test_half_schedulers_must_be_created(bc: Breathecode): plan = { - 'time_of_life': 1, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, + "time_of_life": 1, + "time_of_life_unit": "MONTH", + "trial_duration": 0, } - plan_service_items = [{ - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 5)] - - service_stock_schedulers = [{ - 'plan_handler_id': n, - 'valid_until': None, - } for n in range(1, 3)] - - plan_service_item_handlers = [{ - 'handler_id': n, - 'subscription_id': 1, - } for n in range(1, 3)] - - model = bc.database.create(plan=plan, - subscription=1, - service_item=4, - plan_service_item=plan_service_items, - service_stock_scheduler=service_stock_schedulers, - plan_service_item_handler=plan_service_item_handlers) + plan_service_items = [ + { + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 5) + ] + + service_stock_schedulers = [ + { + "plan_handler_id": n, + "valid_until": None, + } + for n in range(1, 3) + ] + + plan_service_item_handlers = [ + { + "handler_id": n, + "subscription_id": 1, + } + for n in range(1, 3) + ] + + model = bc.database.create( + plan=plan, + subscription=1, + service_item=4, + plan_service_item=plan_service_items, + service_stock_scheduler=service_stock_schedulers, + plan_service_item_handler=plan_service_item_handlers, + ) tasks.update_subscription_service_stock_schedulers.delay(1, 1) - assert bc.database.list_of('payments.ServiceStockScheduler') == [{ - 'id': n, - 'plan_handler_id': n, - 'subscription_handler_id': None, - 'valid_until': None, - } for n in range(1, 5)] - assert bc.database.list_of('payments.PlanServiceItem') == [{ - 'id': n, - 'plan_id': 1, - 'service_item_id': n, - } for n in range(1, 5)] - assert bc.database.list_of('payments.PlanServiceItemHandler') == [{ - 'handler_id': n, - 'id': n, - 'plan_financing_id': None, - 'subscription_id': 1, - } for n in range(1, 5)] + assert bc.database.list_of("payments.ServiceStockScheduler") == [ + { + "id": n, + "plan_handler_id": n, + "subscription_handler_id": None, + "valid_until": None, + } + for n in range(1, 5) + ] + assert bc.database.list_of("payments.PlanServiceItem") == [ + { + "id": n, + "plan_id": 1, + "service_item_id": n, + } + for n in range(1, 5) + ] + assert bc.database.list_of("payments.PlanServiceItemHandler") == [ + { + "handler_id": n, + "id": n, + "plan_financing_id": None, + "subscription_id": 1, + } + for n in range(1, 5) + ] diff --git a/breathecode/payments/tests/urls/tests_academy_plan.py b/breathecode/payments/tests/urls/tests_academy_plan.py index 2286ab14f..1342b722f 100644 --- a/breathecode/payments/tests/urls/tests_academy_plan.py +++ b/breathecode/payments/tests/urls/tests_academy_plan.py @@ -13,35 +13,35 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def service_item_serializer(service_item, service): return { - 'how_many': service_item.how_many, - 'service': { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "how_many": service_item.how_many, + "service": { + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, }, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, } def financing_option_serializer(financing_option, currency): return { - 'currency': { - 'code': currency.code, - 'name': currency.name, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'how_many_months': financing_option.how_many_months, - 'monthly_price': financing_option.monthly_price, + "how_many_months": financing_option.how_many_months, + "monthly_price": financing_option.monthly_price, } @@ -56,54 +56,54 @@ def get_serializer(event, currency, service=None, academy=None, service_items=[] academy = academy_serializer(academy) return { - 'slug': event.slug, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "slug": event.slug, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'financing_options': financing_options, - 'has_available_cohorts': len(cohorts) > 0, - 'has_waiting_list': event.has_waiting_list, - 'is_renewable': event.is_renewable, - 'owner': academy, - 'price_per_half': event.price_per_half, - 'price_per_month': event.price_per_month, - 'price_per_quarter': event.price_per_quarter, - 'price_per_year': event.price_per_year, - 'service_items': service_items, - 'slug': event.slug, - 'status': event.status, - 'time_of_life': event.time_of_life, - 'time_of_life_unit': event.time_of_life_unit, - 'trial_duration': event.trial_duration, - 'trial_duration_unit': event.trial_duration_unit, + "financing_options": financing_options, + "has_available_cohorts": len(cohorts) > 0, + "has_waiting_list": event.has_waiting_list, + "is_renewable": event.is_renewable, + "owner": academy, + "price_per_half": event.price_per_half, + "price_per_month": event.price_per_month, + "price_per_quarter": event.price_per_quarter, + "price_per_year": event.price_per_year, + "service_items": service_items, + "slug": event.slug, + "status": event.status, + "time_of_life": event.time_of_life, + "time_of_life_unit": event.time_of_life_unit, + "trial_duration": event.trial_duration, + "trial_duration_unit": event.trial_duration_unit, } def post_serializer(currency, service=None, academy=None, service_items=[], financing_options=[], cohorts=[], data={}): return { - 'id': 0, - 'slug': '', - 'currency': currency.id, - 'financing_options': [x.id for x in financing_options], - 'is_renewable': False, - 'owner': academy.id, - 'price_per_half': None, - 'price_per_month': None, - 'price_per_quarter': None, - 'price_per_year': None, - 'has_waiting_list': False, - 'service_items': [x.id for x in service_items], - 'status': 'DRAFT', - 'time_of_life': 0, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, - 'trial_duration_unit': 'MONTH', - 'mentorship_service_set': None, - 'cohort_set': None, - 'event_type_set': None, - 'invites': [], + "id": 0, + "slug": "", + "currency": currency.id, + "financing_options": [x.id for x in financing_options], + "is_renewable": False, + "owner": academy.id, + "price_per_half": None, + "price_per_month": None, + "price_per_quarter": None, + "price_per_year": None, + "has_waiting_list": False, + "service_items": [x.id for x in service_items], + "status": "DRAFT", + "time_of_life": 0, + "time_of_life_unit": "MONTH", + "trial_duration": 0, + "trial_duration_unit": "MONTH", + "mentorship_service_set": None, + "cohort_set": None, + "event_type_set": None, + "invites": [], **data, } @@ -111,24 +111,24 @@ def post_serializer(currency, service=None, academy=None, service_items=[], fina def row(currency, academy=None, data={}): return { - 'id': 0, - 'slug': '', - 'currency_id': currency.id, - 'is_renewable': False, - 'owner_id': academy.id, - 'price_per_half': None, - 'price_per_month': None, - 'price_per_quarter': None, - 'price_per_year': None, - 'has_waiting_list': False, - 'status': 'DRAFT', - 'time_of_life': 0, - 'time_of_life_unit': 'MONTH', - 'trial_duration': 0, - 'trial_duration_unit': 'MONTH', - 'mentorship_service_set_id': None, - 'cohort_set_id': None, - 'event_type_set_id': None, + "id": 0, + "slug": "", + "currency_id": currency.id, + "is_renewable": False, + "owner_id": academy.id, + "price_per_half": None, + "price_per_month": None, + "price_per_quarter": None, + "price_per_year": None, + "has_waiting_list": False, + "status": "DRAFT", + "time_of_life": 0, + "time_of_life_unit": "MONTH", + "trial_duration": 0, + "trial_duration_unit": "MONTH", + "mentorship_service_set_id": None, + "cohort_set_id": None, + "event_type_set_id": None, **data, } @@ -142,15 +142,15 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 200 def test__no_auth(self): - url = reverse_lazy('payments:academy_plan') + url = reverse_lazy("payments:academy_plan") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # Given: 0 Plan # When: get with no auth @@ -161,67 +161,69 @@ def test__no_capability(self): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + url = reverse_lazy("payments:academy_plan") response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_plan for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_plan for academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 200 with 2 Plan with no financial options def test__two_items__plan_is_renewable(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + url = reverse_lazy("payments:academy_plan") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.plan[1], - model.currency, - model.service, - academy=model.academy, - service_items=model.service_item, - financing_options=[]), - get_serializer(model.plan[0], - model.currency, - model.service, - academy=model.academy, - service_items=model.service_item, - financing_options=[]), + get_serializer( + model.plan[1], + model.currency, + model.service, + academy=model.academy, + service_items=model.service_item, + financing_options=[], + ), + get_serializer( + model.plan[0], + model.currency, + model.service, + academy=model.academy, + service_items=model.service_item, + financing_options=[], + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -229,50 +231,52 @@ def test__two_items__plan_is_renewable(self): # When: get with no auth and plan is not renewable # Then: return 200 with 2 Plan with financial options def test__two_items__plan_is_not_renewable(self): - plan = {'time_of_life': 1, 'time_of_life_unit': 'WEEK', 'is_renewable': False} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": 1, "time_of_life_unit": "WEEK", "is_renewable": False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + url = reverse_lazy("payments:academy_plan") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.plan[1], - model.currency, - model.service, - academy=model.academy, - service_items=model.service_item, - financing_options=model.financing_option), - get_serializer(model.plan[0], - model.currency, - model.service, - academy=model.academy, - service_items=model.service_item, - financing_options=model.financing_option), + get_serializer( + model.plan[1], + model.currency, + model.service, + academy=model.academy, + service_items=model.service_item, + financing_options=model.financing_option, + ), + get_serializer( + model.plan[0], + model.currency, + model.service, + academy=model.academy, + service_items=model.service_item, + financing_options=model.financing_option, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -284,37 +288,35 @@ def test__two_items__plan_is_not_renewable(self): # When: get with no auth and cohort provided in the querystring # Then: return 400 def test__cohort_not_found(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?cohort=1' + url = reverse_lazy("payments:academy_plan") + "?cohort=1" response = self.client.get(url) json = response.json() - expected = {'detail': 'cohort-not-found', 'status_code': 400} + expected = {"detail": "cohort-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -324,29 +326,27 @@ def test__cohort_not_found(self): # -> plan is_onboarding is False # Then: return 200 with 2 Plan with no financial options def test__cohort_exists__is_onboarding_is_false(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': False} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=1, - syllabus_version=1) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=1, + syllabus_version=1, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?cohort=1' + url = reverse_lazy("payments:academy_plan") + "?cohort=1" response = self.client.get(url) json = response.json() @@ -355,7 +355,7 @@ def test__cohort_exists__is_onboarding_is_false(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -365,28 +365,26 @@ def test__cohort_exists__is_onboarding_is_false(self): # -> plan is_onboarding is True # Then: return 200 with 2 Plan with no financial options def test__cohort_exists__is_onboarding_is_true(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=1) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=1, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?cohort=1' + url = reverse_lazy("payments:academy_plan") + "?cohort=1" response = self.client.get(url) json = response.json() @@ -395,7 +393,7 @@ def test__cohort_exists__is_onboarding_is_true(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -405,58 +403,60 @@ def test__cohort_exists__is_onboarding_is_true(self): # -> plan is_onboarding is True # Then: return 200 with 2 Plan with no financial options def test__cohort_exists__is_onboarding_is_true(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - cohort = {'available_as_saas': True} - academy = {'available_as_saas': True} - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - syllabus_version=1, - academy=academy) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + cohort = {"available_as_saas": True} + academy = {"available_as_saas": True} + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + syllabus_version=1, + academy=academy, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?cohort=1' + url = reverse_lazy("payments:academy_plan") + "?cohort=1" response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.plan[1], - model.currency, - model.service, - model.academy, - service_items=model.service_item, - cohorts=[model.cohort], - financing_options=[]), - get_serializer(model.plan[0], - model.currency, - model.service, - model.academy, - service_items=model.service_item, - cohorts=[model.cohort], - financing_options=[]), + get_serializer( + model.plan[1], + model.currency, + model.service, + model.academy, + service_items=model.service_item, + cohorts=[model.cohort], + financing_options=[], + ), + get_serializer( + model.plan[0], + model.currency, + model.service, + model.academy, + service_items=model.service_item, + cohorts=[model.cohort], + financing_options=[], + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -468,37 +468,35 @@ def test__cohort_exists__is_onboarding_is_true(self): # When: get with no auth and cohort provided in the querystring # Then: return 400 def test__syllabus_not_found(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?syllabus=1' + url = reverse_lazy("payments:academy_plan") + "?syllabus=1" response = self.client.get(url) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 400} + expected = {"detail": "syllabus-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -508,29 +506,27 @@ def test__syllabus_not_found(self): # -> plan is_onboarding is False # Then: return 200 with 2 Plan with no financial options def test__syllabus_exists__is_onboarding_is_false(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': False} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=1, - syllabus_version=1) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=1, + syllabus_version=1, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?syllabus=1' + url = reverse_lazy("payments:academy_plan") + "?syllabus=1" response = self.client.get(url) json = response.json() @@ -539,7 +535,7 @@ def test__syllabus_exists__is_onboarding_is_false(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -549,28 +545,26 @@ def test__syllabus_exists__is_onboarding_is_false(self): # -> plan is_onboarding is True # Then: return 200 with 2 Plan with no financial options def test__syllabus_exists__is_onboarding_is_true(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=1) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=1, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?syllabus=1' + url = reverse_lazy("payments:academy_plan") + "?syllabus=1" response = self.client.get(url) json = response.json() @@ -579,7 +573,7 @@ def test__syllabus_exists__is_onboarding_is_true(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -589,58 +583,60 @@ def test__syllabus_exists__is_onboarding_is_true(self): # -> plan is_onboarding is True # Then: return 200 with 2 Plan with no financial options def test__syllabus_exists__is_onboarding_is_true(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - cohort = {'available_as_saas': True} - academy = {'available_as_saas': True} - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - syllabus_version=1, - academy=academy) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + cohort = {"available_as_saas": True} + academy = {"available_as_saas": True} + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + syllabus_version=1, + academy=academy, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?syllabus=1' + url = reverse_lazy("payments:academy_plan") + "?syllabus=1" response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.plan[1], - model.currency, - model.service, - model.academy, - service_items=model.service_item, - cohorts=[model.cohort], - financing_options=[]), - get_serializer(model.plan[0], - model.currency, - model.service, - model.academy, - service_items=model.service_item, - cohorts=[model.cohort], - financing_options=[]), + get_serializer( + model.plan[1], + model.currency, + model.service, + model.academy, + service_items=model.service_item, + cohorts=[model.cohort], + financing_options=[], + ), + get_serializer( + model.plan[0], + model.currency, + model.service, + model.academy, + service_items=model.service_item, + cohorts=[model.cohort], + financing_options=[], + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -651,62 +647,62 @@ def test__syllabus_exists__is_onboarding_is_true(self): # Given: compile_lookup was mocked # When: the mock is called # Then: the mock should be called with the correct arguments and does not raise an exception - @patch('breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup', - MagicMock(wraps=lookup_extension.compile_lookup)) + @patch( + "breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup", + MagicMock(wraps=lookup_extension.compile_lookup), + ) def test_lookup_extension(self): self.bc.request.set_headers(academy=1) - plan = {'time_of_life': None, 'time_of_life_unit': None} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - service_item=2, - plan_service_item=plan_service_items) + plan = {"time_of_life": None, "time_of_life_unit": None} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + service_item=2, + plan_service_item=plan_service_items, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) args, kwargs = self.bc.format.call( - 'en', + "en", strings={ - 'exact': [ - 'service_items__service__slug', + "exact": [ + "service_items__service__slug", ], }, overwrite={ - 'service_slug': 'service_items__service__slug', + "service_slug": "service_items__service__slug", }, - custom_fields={'is_onboarding': lambda: 'true' if random.randint(0, 1) else 'false'}, + custom_fields={"is_onboarding": lambda: "true" if random.randint(0, 1) else "false"}, ) query = self.bc.format.lookup(*args, **kwargs) - url = reverse_lazy('payments:academy_plan') + '?' + self.bc.format.querystring(query) + url = reverse_lazy("payments:academy_plan") + "?" + self.bc.format.querystring(query) - self.assertEqual([x for x in query], ['service_slug', 'is_onboarding']) + self.assertEqual([x for x in query], ["service_slug", "is_onboarding"]) response = self.client.get(url) json = response.json() expected = [] - for x in ['overwrite', 'custom_fields']: + for x in ["overwrite", "custom_fields"]: if x in kwargs: del kwargs[x] - for field in ['ids', 'slugs']: + for field in ["ids", "slugs"]: values = kwargs.get(field, tuple()) kwargs[field] = tuple(values) - for field in ['ints', 'strings', 'bools', 'datetimes']: + for field in ["ints", "strings", "bools", "datetimes"]: modes = kwargs.get(field, {}) for mode in modes: if not isinstance(kwargs[field][mode], tuple): @@ -714,178 +710,194 @@ def test_lookup_extension(self): kwargs[field] = frozenset(modes.items()) - self.bc.check.calls(lookup_extension.compile_lookup.call_args_list, [ - call(**kwargs), - ]) + self.bc.check.calls( + lookup_extension.compile_lookup.call_args_list, + [ + call(**kwargs), + ], + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) # When: get is called # Then: it's setup properly - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_get__spy_extensions(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - user=1, - capability='read_plan', - role=1, - profile_academy=1, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=1, - syllabus_version=1) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), + user=1, + capability="read_plan", + role=1, + profile_academy=1, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=1, + syllabus_version=1, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan') + '?syllabus=1' + url = reverse_lazy("payments:academy_plan") + "?syllabus=1" response = self.client.get(url) - self.bc.check.calls(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.bc.check.calls(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-id', paginate=True), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-id", paginate=True), + ], + ) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 400 because required fields are missing def test__post__required_fields(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - user=1, - capability='crud_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, + user=1, + capability="crud_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) data = { - 'slug': self.bc.fake.slug(), - 'is_renewable': random.choice([True, False]), - 'status': random.choice(['DRAFT', 'ACTIVE', 'UNLISTED', 'DELETED', 'DISCONTINUED']), - 'is_onboarding': random.choice([True, False]), + "slug": self.bc.fake.slug(), + "is_renewable": random.choice([True, False]), + "status": random.choice(["DRAFT", "ACTIVE", "UNLISTED", "DELETED", "DISCONTINUED"]), + "is_onboarding": random.choice([True, False]), } if random.choice([True, False]): - data['time_of_life'] = random.randint(1, 100) - data['time_of_life_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) - data['is_renewable'] = False + data["time_of_life"] = random.randint(1, 100) + data["time_of_life_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) + data["is_renewable"] = False else: - data['time_of_life'] = None - data['time_of_life_unit'] = None - data['is_renewable'] = True + data["time_of_life"] = None + data["time_of_life_unit"] = None + data["is_renewable"] = True if random.choice([True, False]): - data['trial_duration'] = random.randint(1, 100) - data['trial_duration_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) + data["trial_duration"] = random.randint(1, 100) + data["trial_duration_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) else: - data['trial_duration'] = random.randint(1, 100) - data['trial_duration_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) + data["trial_duration"] = random.randint(1, 100) + data["trial_duration_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) - url = reverse_lazy('payments:academy_plan') - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:academy_plan") + response = self.client.post(url, data, format="json") json = response.json() - expected = {'detail': 'currency-not-found', 'status_code': 400} + expected = {"detail": "currency-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Plan'), [{ - **self.bc.format.to_dict(model.plan), - }]) + self.assertEqual( + self.bc.database.list_of("payments.Plan"), + [ + { + **self.bc.format.to_dict(model.plan), + } + ], + ) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 200 and change all fields def test__post__all_fields(self): - model = self.bc.database.create(user=1, - capability='crud_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - financing_option=2) + model = self.bc.database.create( + user=1, + capability="crud_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) data = { - 'slug': self.bc.fake.slug(), - 'currency': model.currency.code, - 'is_renewable': random.choice([True, False]), - 'has_waiting_list': random.choice([True, False]), - 'status': random.choice(['DRAFT', 'ACTIVE', 'UNLISTED', 'DELETED', 'DISCONTINUED']), - 'is_onboarding': random.choice([True, False]), - 'price_per_half': random.randint(1, 100), - 'price_per_month': random.randint(1, 100), - 'price_per_quarter': random.randint(1, 100), - 'price_per_year': random.randint(1, 100), + "slug": self.bc.fake.slug(), + "currency": model.currency.code, + "is_renewable": random.choice([True, False]), + "has_waiting_list": random.choice([True, False]), + "status": random.choice(["DRAFT", "ACTIVE", "UNLISTED", "DELETED", "DISCONTINUED"]), + "is_onboarding": random.choice([True, False]), + "price_per_half": random.randint(1, 100), + "price_per_month": random.randint(1, 100), + "price_per_quarter": random.randint(1, 100), + "price_per_year": random.randint(1, 100), } if random.choice([True, False]): - data['time_of_life'] = random.randint(1, 100) - data['time_of_life_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) - data['is_renewable'] = False + data["time_of_life"] = random.randint(1, 100) + data["time_of_life_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) + data["is_renewable"] = False else: - data['time_of_life'] = None - data['time_of_life_unit'] = None - data['is_renewable'] = True + data["time_of_life"] = None + data["time_of_life_unit"] = None + data["is_renewable"] = True if random.choice([True, False]): - data['trial_duration'] = random.randint(1, 100) - data['trial_duration_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) + data["trial_duration"] = random.randint(1, 100) + data["trial_duration_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) else: - data['trial_duration'] = random.randint(1, 100) - data['trial_duration_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) + data["trial_duration"] = random.randint(1, 100) + data["trial_duration_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) - url = reverse_lazy('payments:academy_plan') - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:academy_plan") + response = self.client.post(url, data, format="json") data = { **data, - 'id': 1, - 'currency': 1, + "id": 1, + "currency": 1, } json = response.json() - expected = post_serializer(model.currency, - model.service, - academy=model.academy, - service_items=[], - financing_options=[], - data=data) + expected = post_serializer( + model.currency, model.service, academy=model.academy, service_items=[], financing_options=[], data=data + ) - data['currency_id'] = data.pop('currency') + data["currency_id"] = data.pop("currency") self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('payments.Plan'), [ - row(model.currency, academy=model.academy, data=data), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Plan"), + [ + row(model.currency, academy=model.academy, data=data), + ], + ) diff --git a/breathecode/payments/tests/urls/tests_academy_plan_id.py b/breathecode/payments/tests/urls/tests_academy_plan_id.py index 41ba9e805..74367325c 100644 --- a/breathecode/payments/tests/urls/tests_academy_plan_id.py +++ b/breathecode/payments/tests/urls/tests_academy_plan_id.py @@ -12,35 +12,35 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def service_item_serializer(service_item, service): return { - 'how_many': service_item.how_many, - 'service': { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "how_many": service_item.how_many, + "service": { + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, }, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, } def financing_option_serializer(financing_option, currency): return { - 'currency': { - 'code': currency.code, - 'name': currency.name, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'how_many_months': financing_option.how_many_months, - 'monthly_price': financing_option.monthly_price, + "how_many_months": financing_option.how_many_months, + "monthly_price": financing_option.monthly_price, } @@ -55,62 +55,57 @@ def get_serializer(event, currency, service=None, academy=None, service_items=[] academy = academy_serializer(academy) return { - 'slug': event.slug, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "slug": event.slug, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'financing_options': financing_options, - 'has_available_cohorts': len(cohorts) > 0, - 'has_waiting_list': event.has_waiting_list, - 'is_renewable': event.is_renewable, - 'owner': academy, - 'price_per_half': event.price_per_half, - 'price_per_month': event.price_per_month, - 'price_per_quarter': event.price_per_quarter, - 'price_per_year': event.price_per_year, - 'service_items': service_items, - 'slug': event.slug, - 'status': event.status, - 'time_of_life': event.time_of_life, - 'time_of_life_unit': event.time_of_life_unit, - 'trial_duration': event.trial_duration, - 'trial_duration_unit': event.trial_duration_unit, + "financing_options": financing_options, + "has_available_cohorts": len(cohorts) > 0, + "has_waiting_list": event.has_waiting_list, + "is_renewable": event.is_renewable, + "owner": academy, + "price_per_half": event.price_per_half, + "price_per_month": event.price_per_month, + "price_per_quarter": event.price_per_quarter, + "price_per_year": event.price_per_year, + "service_items": service_items, + "slug": event.slug, + "status": event.status, + "time_of_life": event.time_of_life, + "time_of_life_unit": event.time_of_life_unit, + "trial_duration": event.trial_duration, + "trial_duration_unit": event.trial_duration_unit, } -def put_serializer(event, - currency, - service=None, - academy=None, - service_items=[], - financing_options=[], - cohorts=[], - data={}): +def put_serializer( + event, currency, service=None, academy=None, service_items=[], financing_options=[], cohorts=[], data={} +): return { - 'id': event.id, - 'slug': event.slug, - 'currency': currency.id, - 'financing_options': [x.id for x in financing_options], - 'has_waiting_list': event.has_waiting_list, - 'is_renewable': event.is_renewable, - 'owner': academy.id, - 'price_per_half': event.price_per_half, - 'price_per_month': event.price_per_month, - 'price_per_quarter': event.price_per_quarter, - 'price_per_year': event.price_per_year, - 'service_items': [x.id for x in service_items], - 'slug': event.slug, - 'status': event.status, - 'time_of_life': event.time_of_life, - 'time_of_life_unit': event.time_of_life_unit, - 'trial_duration': event.trial_duration, - 'trial_duration_unit': event.trial_duration_unit, - 'mentorship_service_set': event.mentorship_service_set, - 'cohort_set': event.cohort_set, - 'event_type_set': event.event_type_set, - 'invites': [], + "id": event.id, + "slug": event.slug, + "currency": currency.id, + "financing_options": [x.id for x in financing_options], + "has_waiting_list": event.has_waiting_list, + "is_renewable": event.is_renewable, + "owner": academy.id, + "price_per_half": event.price_per_half, + "price_per_month": event.price_per_month, + "price_per_quarter": event.price_per_quarter, + "price_per_year": event.price_per_year, + "service_items": [x.id for x in service_items], + "slug": event.slug, + "status": event.status, + "time_of_life": event.time_of_life, + "time_of_life_unit": event.time_of_life_unit, + "trial_duration": event.trial_duration, + "trial_duration_unit": event.trial_duration_unit, + "mentorship_service_set": event.mentorship_service_set, + "cohort_set": event.cohort_set, + "event_type_set": event.event_type_set, + "invites": [], **data, } @@ -121,15 +116,15 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 200 def test__no_auth(self): - url = reverse_lazy('payments:academy_plan_id', kwargs={'plan_id': 1}) + url = reverse_lazy("payments:academy_plan_id", kwargs={"plan_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # Given: 0 Plan # When: get with no auth @@ -140,25 +135,25 @@ def test__no_capability(self): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan_id', kwargs={'plan_id': 1}) + url = reverse_lazy("payments:academy_plan_id", kwargs={"plan_id": 1}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_plan for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_plan for academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # When: Not found # Then: return 404 def test__not_found(self): model = self.bc.database.create( user=1, - capability='read_plan', + capability="read_plan", role=1, profile_academy=1, skip_cohort=True, @@ -167,200 +162,231 @@ def test__not_found(self): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan_id', kwargs={'plan_id': 1}) + url = reverse_lazy("payments:academy_plan_id", kwargs={"plan_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 200 with 2 Plan with no financial options def test__two_items__plan_is_renewable(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - user=1, - capability='read_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, + user=1, + capability="read_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan_id', kwargs={'plan_id': 1}) + url = reverse_lazy("payments:academy_plan_id", kwargs={"plan_id": 1}) response = self.client.get(url) json = response.json() - expected = get_serializer(model.plan, - model.currency, - model.service, - academy=model.academy, - service_items=model.service_item, - financing_options=[]) + expected = get_serializer( + model.plan, + model.currency, + model.service, + academy=model.academy, + service_items=model.service_item, + financing_options=[], + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Plan'), [ - self.bc.format.to_dict(model.plan), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Plan"), + [ + self.bc.format.to_dict(model.plan), + ], + ) # When: get is called # Then: it's setup properly - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_get__spy_extensions(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - user=1, - capability='read_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, + user=1, + capability="read_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan_id', kwargs={'plan_id': 1}) + url = reverse_lazy("payments:academy_plan_id", kwargs={"plan_id": 1}) self.client.get(url) - self.bc.check.calls(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.bc.check.calls(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-id', paginate=True), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-id", paginate=True), + ], + ) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 200 but not found def test__put__not_found(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - user=1, - capability='crud_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, + user=1, + capability="crud_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) data = { - 'slug': self.bc.fake.slug(), - 'is_renewable': random.choice([True, False]), - 'status': random.choice(['DRAFT', 'ACTIVE', 'UNLISTED', 'DELETED', 'DISCONTINUED']), - 'is_onboarding': random.choice([True, False]), - 'has_waiting_list': random.choice([True, False]), + "slug": self.bc.fake.slug(), + "is_renewable": random.choice([True, False]), + "status": random.choice(["DRAFT", "ACTIVE", "UNLISTED", "DELETED", "DISCONTINUED"]), + "is_onboarding": random.choice([True, False]), + "has_waiting_list": random.choice([True, False]), } if random.choice([True, False]): - data['time_of_life'] = random.randint(1, 100) - data['time_of_life_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) - data['is_renewable'] = False + data["time_of_life"] = random.randint(1, 100) + data["time_of_life_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) + data["is_renewable"] = False else: - data['time_of_life'] = None - data['time_of_life_unit'] = None - data['is_renewable'] = True + data["time_of_life"] = None + data["time_of_life_unit"] = None + data["is_renewable"] = True if random.choice([True, False]): - data['trial_duration'] = random.randint(1, 100) - data['trial_duration_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) + data["trial_duration"] = random.randint(1, 100) + data["trial_duration_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) else: - data['trial_duration'] = random.randint(1, 100) - data['trial_duration_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) + data["trial_duration"] = random.randint(1, 100) + data["trial_duration_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) - url = reverse_lazy('payments:academy_plan_id', kwargs={'plan_id': 2}) - response = self.client.put(url, data, format='json') + url = reverse_lazy("payments:academy_plan_id", kwargs={"plan_id": 2}) + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Plan'), [{ - **self.bc.format.to_dict(model.plan), - }]) + self.assertEqual( + self.bc.database.list_of("payments.Plan"), + [ + { + **self.bc.format.to_dict(model.plan), + } + ], + ) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 200 and change all fields def test__put__all_fields(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - user=1, - capability='crud_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, + user=1, + capability="crud_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) data = { - 'slug': self.bc.fake.slug(), - 'is_renewable': random.choice([True, False]), - 'status': random.choice(['DRAFT', 'ACTIVE', 'UNLISTED', 'DELETED', 'DISCONTINUED']), - 'is_onboarding': random.choice([True, False]), - 'has_waiting_list': random.choice([True, False]), + "slug": self.bc.fake.slug(), + "is_renewable": random.choice([True, False]), + "status": random.choice(["DRAFT", "ACTIVE", "UNLISTED", "DELETED", "DISCONTINUED"]), + "is_onboarding": random.choice([True, False]), + "has_waiting_list": random.choice([True, False]), } if random.choice([True, False]): - data['time_of_life'] = random.randint(1, 100) - data['time_of_life_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) - data['is_renewable'] = False + data["time_of_life"] = random.randint(1, 100) + data["time_of_life_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) + data["is_renewable"] = False else: - data['time_of_life'] = None - data['time_of_life_unit'] = None - data['is_renewable'] = True + data["time_of_life"] = None + data["time_of_life_unit"] = None + data["is_renewable"] = True if random.choice([True, False]): - data['trial_duration'] = random.randint(1, 100) - data['trial_duration_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) + data["trial_duration"] = random.randint(1, 100) + data["trial_duration_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) else: - data['trial_duration'] = random.randint(1, 100) - data['trial_duration_unit'] = random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']) + data["trial_duration"] = random.randint(1, 100) + data["trial_duration_unit"] = random.choice(["DAY", "WEEK", "MONTH", "YEAR"]) - url = reverse_lazy('payments:academy_plan_id', kwargs={'plan_id': 1}) - response = self.client.put(url, data, format='json') + url = reverse_lazy("payments:academy_plan_id", kwargs={"plan_id": 1}) + response = self.client.put(url, data, format="json") json = response.json() - expected = put_serializer(model.plan, - model.currency, - model.service, - academy=model.academy, - service_items=model.service_item, - financing_options=model.financing_option, - data=data) + expected = put_serializer( + model.plan, + model.currency, + model.service, + academy=model.academy, + service_items=model.service_item, + financing_options=model.financing_option, + data=data, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Plan'), [{ - **self.bc.format.to_dict(model.plan), - **data, - }]) + self.assertEqual( + self.bc.database.list_of("payments.Plan"), + [ + { + **self.bc.format.to_dict(model.plan), + **data, + } + ], + ) diff --git a/breathecode/payments/tests/urls/tests_academy_plan_slug.py b/breathecode/payments/tests/urls/tests_academy_plan_slug.py index e504b81a9..6d9800782 100644 --- a/breathecode/payments/tests/urls/tests_academy_plan_slug.py +++ b/breathecode/payments/tests/urls/tests_academy_plan_slug.py @@ -12,35 +12,35 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def service_item_serializer(service_item, service): return { - 'how_many': service_item.how_many, - 'service': { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "how_many": service_item.how_many, + "service": { + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, }, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, } def financing_option_serializer(financing_option, currency): return { - 'currency': { - 'code': currency.code, - 'name': currency.name, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'how_many_months': financing_option.how_many_months, - 'monthly_price': financing_option.monthly_price, + "how_many_months": financing_option.how_many_months, + "monthly_price": financing_option.monthly_price, } @@ -55,27 +55,27 @@ def get_serializer(event, currency, service=None, academy=None, service_items=[] academy = academy_serializer(academy) return { - 'slug': event.slug, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "slug": event.slug, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'financing_options': financing_options, - 'has_available_cohorts': len(cohorts) > 0, - 'has_waiting_list': event.has_waiting_list, - 'is_renewable': event.is_renewable, - 'owner': academy, - 'price_per_half': event.price_per_half, - 'price_per_month': event.price_per_month, - 'price_per_quarter': event.price_per_quarter, - 'price_per_year': event.price_per_year, - 'service_items': service_items, - 'slug': event.slug, - 'status': event.status, - 'time_of_life': event.time_of_life, - 'time_of_life_unit': event.time_of_life_unit, - 'trial_duration': event.trial_duration, - 'trial_duration_unit': event.trial_duration_unit, + "financing_options": financing_options, + "has_available_cohorts": len(cohorts) > 0, + "has_waiting_list": event.has_waiting_list, + "is_renewable": event.is_renewable, + "owner": academy, + "price_per_half": event.price_per_half, + "price_per_month": event.price_per_month, + "price_per_quarter": event.price_per_quarter, + "price_per_year": event.price_per_year, + "service_items": service_items, + "slug": event.slug, + "status": event.status, + "time_of_life": event.time_of_life, + "time_of_life_unit": event.time_of_life_unit, + "trial_duration": event.trial_duration, + "trial_duration_unit": event.trial_duration_unit, } @@ -88,15 +88,15 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 200 def test__no_auth(self): - url = reverse_lazy('payments:academy_plan_slug', kwargs={'plan_slug': 'plan-1'}) + url = reverse_lazy("payments:academy_plan_slug", kwargs={"plan_slug": "plan-1"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # Given: 0 Plan # When: get with no auth @@ -107,25 +107,25 @@ def test__no_capability(self): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan_slug', kwargs={'plan_slug': 'plan-1'}) + url = reverse_lazy("payments:academy_plan_slug", kwargs={"plan_slug": "plan-1"}) response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_plan for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_plan for academy 1", + "status_code": 403, } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # When: Not found # Then: return 404 def test__not_found(self): model = self.bc.database.create( user=1, - capability='read_plan', + capability="read_plan", role=1, profile_academy=1, skip_cohort=True, @@ -134,79 +134,94 @@ def test__not_found(self): self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan_slug', kwargs={'plan_slug': 'plan-1'}) + url = reverse_lazy("payments:academy_plan_slug", kwargs={"plan_slug": "plan-1"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 200 with 2 Plan with no financial options def test__two_items__plan_is_renewable(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - user=1, - capability='read_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, + user=1, + capability="read_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan_slug', kwargs={'plan_slug': model.plan.slug}) + url = reverse_lazy("payments:academy_plan_slug", kwargs={"plan_slug": model.plan.slug}) response = self.client.get(url) json = response.json() - expected = get_serializer(model.plan, - model.currency, - model.service, - academy=model.academy, - service_items=model.service_item, - financing_options=[]) + expected = get_serializer( + model.plan, + model.currency, + model.service, + academy=model.academy, + service_items=model.service_item, + financing_options=[], + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Plan'), [ - self.bc.format.to_dict(model.plan), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Plan"), + [ + self.bc.format.to_dict(model.plan), + ], + ) # When: get is called # Then: it's setup properly - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_get__spy_extensions(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - user=1, - capability='read_plan', - role=1, - profile_academy=1, - skip_cohort=True, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, + user=1, + capability="read_plan", + role=1, + profile_academy=1, + skip_cohort=True, + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + ) self.client.force_authenticate(model.user) self.bc.request.set_headers(academy=1) - url = reverse_lazy('payments:academy_plan_slug', kwargs={'plan_slug': model.plan.slug}) + url = reverse_lazy("payments:academy_plan_slug", kwargs={"plan_slug": model.plan.slug}) self.client.get(url) - self.bc.check.calls(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.bc.check.calls(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-id', paginate=True), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-id", paginate=True), + ], + ) diff --git a/breathecode/payments/tests/urls/tests_bag_id_coupon.py b/breathecode/payments/tests/urls/tests_bag_id_coupon.py index 81e73336f..173f82de5 100644 --- a/breathecode/payments/tests/urls/tests_bag_id_coupon.py +++ b/breathecode/payments/tests/urls/tests_bag_id_coupon.py @@ -33,7 +33,7 @@ def queryset_with_pks(query: Any, pks: list[int]) -> None: ``` """ - assert isinstance(query, QuerySet), 'The first argument is not a QuerySet' + assert isinstance(query, QuerySet), "The first argument is not a QuerySet" assert [x.pk for x in query] == pks @@ -45,194 +45,189 @@ def setup(db): def db_bag(data={}): return { - 'academy_id': 0, - 'amount_per_half': 0.0, - 'amount_per_month': 0.0, - 'amount_per_quarter': 0.0, - 'amount_per_year': 0.0, - 'chosen_period': 'NO_SET', - 'currency_id': 0, - 'expires_at': None, - 'how_many_installments': 0, - 'id': 0, - 'is_recurrent': False, - 'status': 'CHECKING', - 'token': None, - 'type': 'BAG', - 'user_id': 0, - 'was_delivered': False, + "academy_id": 0, + "amount_per_half": 0.0, + "amount_per_month": 0.0, + "amount_per_quarter": 0.0, + "amount_per_year": 0.0, + "chosen_period": "NO_SET", + "currency_id": 0, + "expires_at": None, + "how_many_installments": 0, + "id": 0, + "is_recurrent": False, + "status": "CHECKING", + "token": None, + "type": "BAG", + "user_id": 0, + "was_delivered": False, **data, } def plan_serializer(plan, data={}): return { - 'service_items': [], - 'financing_options': [], - 'slug': plan.slug, - 'status': plan.status, - 'time_of_life': plan.time_of_life, - 'time_of_life_unit': plan.time_of_life_unit, - 'trial_duration': plan.trial_duration, - 'trial_duration_unit': plan.trial_duration_unit, - 'has_available_cohorts': bool(plan.cohort_set), + "service_items": [], + "financing_options": [], + "slug": plan.slug, + "status": plan.status, + "time_of_life": plan.time_of_life, + "time_of_life_unit": plan.time_of_life_unit, + "trial_duration": plan.trial_duration, + "trial_duration_unit": plan.trial_duration_unit, + "has_available_cohorts": bool(plan.cohort_set), **data, } def to_iso(date: datetime) -> str: - return re.sub(r'\+00:00$', 'Z', date.replace(tzinfo=UTC).isoformat()) + return re.sub(r"\+00:00$", "Z", date.replace(tzinfo=UTC).isoformat()) def format_coupon(coupon, data={}): return { - 'auto': coupon.auto, - 'discount_type': coupon.discount_type, - 'discount_value': coupon.discount_value, - 'expires_at': to_iso(coupon.expires_at) if coupon.expires_at else None, - 'offered_at': to_iso(coupon.offered_at) if coupon.offered_at else None, - 'referral_type': coupon.referral_type, - 'referral_value': coupon.referral_value, - 'slug': coupon.slug, + "auto": coupon.auto, + "discount_type": coupon.discount_type, + "discount_value": coupon.discount_value, + "expires_at": to_iso(coupon.expires_at) if coupon.expires_at else None, + "offered_at": to_iso(coupon.offered_at) if coupon.offered_at else None, + "referral_type": coupon.referral_type, + "referral_value": coupon.referral_value, + "slug": coupon.slug, **data, } def put_serializer(bag, plans=[], coupons=[], data={}): return { - 'id': bag.id, - 'amount_per_month': bag.amount_per_month, - 'amount_per_quarter': bag.amount_per_quarter, - 'amount_per_half': bag.amount_per_half, - 'amount_per_year': bag.amount_per_year, - 'expires_at': bag.expires_at, - 'is_recurrent': bag.is_recurrent, - 'plans': [plan_serializer(plan) for plan in plans], - 'service_items': [], - 'status': bag.status, - 'token': bag.token, - 'type': bag.type, - 'was_delivered': bag.was_delivered, - 'coupons': [format_coupon(x) for x in coupons], + "id": bag.id, + "amount_per_month": bag.amount_per_month, + "amount_per_quarter": bag.amount_per_quarter, + "amount_per_half": bag.amount_per_half, + "amount_per_year": bag.amount_per_year, + "expires_at": bag.expires_at, + "is_recurrent": bag.is_recurrent, + "plans": [plan_serializer(plan) for plan in plans], + "service_items": [], + "status": bag.status, + "token": bag.token, + "type": bag.type, + "was_delivered": bag.was_delivered, + "coupons": [format_coupon(x) for x in coupons], **data, } -@pytest.mark.parametrize('plan_pk', [None, '']) +@pytest.mark.parametrize("plan_pk", [None, ""]) def test_no_auth(bc: Breathecode, client: rfx.Client, plan_pk): - url = reverse_lazy('payments:bag_id_coupon', kwargs={'bag_id': 1}) + url = reverse_lazy("payments:bag_id_coupon", kwargs={"bag_id": 1}) if plan_pk is not None: - url += f'?plan={plan_pk}' + url += f"?plan={plan_pk}" response = client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('payments.Coupon') == [] - assert bc.database.list_of('payments.Bag') == [] + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("payments.Coupon") == [] + assert bc.database.list_of("payments.Bag") == [] -@pytest.mark.parametrize('plan_pk', [None, '']) +@pytest.mark.parametrize("plan_pk", [None, ""]) def test_missing_plan(bc: Breathecode, client: rfx.Client, plan_pk): model = bc.database.create(user=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:bag_id_coupon', kwargs={'bag_id': 1}) + url = reverse_lazy("payments:bag_id_coupon", kwargs={"bag_id": 1}) if plan_pk is not None: - url += f'?plan={plan_pk}' + url += f"?plan={plan_pk}" response = client.put(url) json = response.json() - expected = {'detail': 'missing-plan', 'status_code': 404} + expected = {"detail": "missing-plan", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('payments.Coupon') == [] - assert bc.database.list_of('payments.Bag') == [] + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("payments.Coupon") == [] + assert bc.database.list_of("payments.Bag") == [] -@pytest.mark.parametrize('plan_pk', ['my-plan', 1]) +@pytest.mark.parametrize("plan_pk", ["my-plan", 1]) def test_plan_not_found(bc: Breathecode, client: rfx.Client, plan_pk): model = bc.database.create(user=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:bag_id_coupon', kwargs={'bag_id': 1}) + url = reverse_lazy("payments:bag_id_coupon", kwargs={"bag_id": 1}) if plan_pk is not None: - url += f'?plan={plan_pk}' + url += f"?plan={plan_pk}" response = client.put(url) json = response.json() - expected = {'detail': 'plan-not-found', 'status_code': 404} + expected = {"detail": "plan-not-found", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('payments.Coupon') == [] - assert bc.database.list_of('payments.Bag') == [] + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("payments.Coupon") == [] + assert bc.database.list_of("payments.Bag") == [] -@pytest.mark.parametrize('plan_pk', ['my-plan', 1]) +@pytest.mark.parametrize("plan_pk", ["my-plan", 1]) def test_no_bag(bc: Breathecode, client: rfx.Client, plan_pk): plan = { - 'is_renewable': False, + "is_renewable": False, } if isinstance(plan_pk, str): - plan['slug'] = plan_pk + plan["slug"] = plan_pk model = bc.database.create(plan=plan, user=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:bag_id_coupon', kwargs={'bag_id': 1}) - url += f'?plan={plan_pk}&coupons=coupon1,coupon2' + url = reverse_lazy("payments:bag_id_coupon", kwargs={"bag_id": 1}) + url += f"?plan={plan_pk}&coupons=coupon1,coupon2" response = client.put(url) json = response.json() expected = { - 'detail': 'bag-not-found', - 'status_code': 404, + "detail": "bag-not-found", + "status_code": 404, } assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('payments.Plan') == [bc.format.to_dict(model.plan)] - assert bc.database.list_of('payments.Coupon') == [] - assert bc.database.list_of('payments.Bag') == [] - - -@pytest.mark.parametrize('plan_pk', ['my-plan', 1]) -@pytest.mark.parametrize('bag_type, coupons', [ - ('BAG', 0), - ('BAG', [{ - 'slug': slug, - 'auto': False, - 'discount_value': 1 - } for slug in ['coupon3', 'coupon4']]), - ('PREVIEW', [{ - 'slug': slug, - 'auto': False, - 'discount_value': 1 - } for slug in ['coupon3', 'coupon4']]), -]) + assert bc.database.list_of("payments.Plan") == [bc.format.to_dict(model.plan)] + assert bc.database.list_of("payments.Coupon") == [] + assert bc.database.list_of("payments.Bag") == [] + + +@pytest.mark.parametrize("plan_pk", ["my-plan", 1]) +@pytest.mark.parametrize( + "bag_type, coupons", + [ + ("BAG", 0), + ("BAG", [{"slug": slug, "auto": False, "discount_value": 1} for slug in ["coupon3", "coupon4"]]), + ("PREVIEW", [{"slug": slug, "auto": False, "discount_value": 1} for slug in ["coupon3", "coupon4"]]), + ], +) def test_plan_found__coupons_not_found(bc: Breathecode, client: rfx.Client, bag_type, plan_pk, coupons): plan = { - 'is_renewable': False, + "is_renewable": False, } if isinstance(plan_pk, str): - plan['slug'] = plan_pk + plan["slug"] = plan_pk - model = bc.database.create(plan=plan, coupon=coupons, user=1, bag={'status': 'CHECKING', 'type': bag_type}) + model = bc.database.create(plan=plan, coupon=coupons, user=1, bag={"status": "CHECKING", "type": bag_type}) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:bag_id_coupon', kwargs={'bag_id': 1}) - url += f'?plan={plan_pk}&coupons=coupon1,coupon2' + url = reverse_lazy("payments:bag_id_coupon", kwargs={"bag_id": 1}) + url += f"?plan={plan_pk}&coupons=coupon1,coupon2" response = client.put(url) @@ -241,50 +236,42 @@ def test_plan_found__coupons_not_found(bc: Breathecode, client: rfx.Client, bag_ assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.Plan') == [bc.format.to_dict(model.plan)] + assert bc.database.list_of("payments.Plan") == [bc.format.to_dict(model.plan)] if coupons: - assert bc.database.list_of('payments.Coupon') == bc.format.to_dict(model.coupon) + assert bc.database.list_of("payments.Coupon") == bc.format.to_dict(model.coupon) else: - assert bc.database.list_of('payments.Coupon') == [] + assert bc.database.list_of("payments.Coupon") == [] - assert bc.database.list_of('payments.Bag') == [bc.format.to_dict(model.bag)] + assert bc.database.list_of("payments.Bag") == [bc.format.to_dict(model.bag)] queryset_with_pks(model.bag.coupons.all(), []) -@pytest.mark.parametrize('plan_pk', ['my-plan', 1]) -@pytest.mark.parametrize('bag_type, max, coupons', [ - ('BAG', 2, [{ - 'slug': slug, - 'auto': True, - 'discount_value': 1 - } for slug in ['coupon3', 'coupon4']]), - ('PREVIEW', 1, [{ - 'slug': slug, - 'auto': False, - 'discount_value': 1 - } for slug in ['coupon1', 'coupon2']]), - ('BAG', 3, [{ - 'slug': slug, - 'auto': True, - 'discount_value': 1 - } for slug in ['coupon3', 'coupon4']] + [{ - 'slug': slug, - 'auto': False, - 'discount_value': 1 - } for slug in ['coupon1', 'coupon2']]), -]) +@pytest.mark.parametrize("plan_pk", ["my-plan", 1]) +@pytest.mark.parametrize( + "bag_type, max, coupons", + [ + ("BAG", 2, [{"slug": slug, "auto": True, "discount_value": 1} for slug in ["coupon3", "coupon4"]]), + ("PREVIEW", 1, [{"slug": slug, "auto": False, "discount_value": 1} for slug in ["coupon1", "coupon2"]]), + ( + "BAG", + 3, + [{"slug": slug, "auto": True, "discount_value": 1} for slug in ["coupon3", "coupon4"]] + + [{"slug": slug, "auto": False, "discount_value": 1} for slug in ["coupon1", "coupon2"]], + ), + ], +) def test_plan_found__coupons_found(bc: Breathecode, client: rfx.Client, plan_pk, max, coupons, bag_type): plan = { - 'is_renewable': False, + "is_renewable": False, } if isinstance(plan_pk, str): - plan['slug'] = plan_pk + plan["slug"] = plan_pk - model = bc.database.create(plan=plan, coupon=coupons, user=1, bag={'status': 'CHECKING', 'type': bag_type}) + model = bc.database.create(plan=plan, coupon=coupons, user=1, bag={"status": "CHECKING", "type": bag_type}) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:bag_id_coupon', kwargs={'bag_id': 1}) - url += f'?plan={plan_pk}&coupons=coupon1,coupon2' + url = reverse_lazy("payments:bag_id_coupon", kwargs={"bag_id": 1}) + url += f"?plan={plan_pk}&coupons=coupon1,coupon2" response = client.put(url) @@ -293,7 +280,7 @@ def test_plan_found__coupons_found(bc: Breathecode, client: rfx.Client, plan_pk, assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.Plan') == [bc.format.to_dict(model.plan)] - assert bc.database.list_of('payments.Coupon') == bc.format.to_dict(model.coupon) - assert bc.database.list_of('payments.Bag') == [bc.format.to_dict(model.bag)] + assert bc.database.list_of("payments.Plan") == [bc.format.to_dict(model.plan)] + assert bc.database.list_of("payments.Coupon") == bc.format.to_dict(model.coupon) + assert bc.database.list_of("payments.Bag") == [bc.format.to_dict(model.bag)] queryset_with_pks(model.bag.coupons.all(), [n + 1 for n in range(max)]) diff --git a/breathecode/payments/tests/urls/tests_card.py b/breathecode/payments/tests/urls/tests_card.py index 1ac98786a..bda8d5327 100644 --- a/breathecode/payments/tests/urls/tests_card.py +++ b/breathecode/payments/tests/urls/tests_card.py @@ -15,44 +15,44 @@ def format_user_setting(data={}): return { - 'id': 1, - 'user_id': 1, - 'main_currency_id': None, - 'lang': 'en', + "id": 1, + "user_id": 1, + "main_currency_id": None, + "lang": "en", **data, } def format_invoice_item(data={}): return { - 'academy_id': 1, - 'amount': 0.0, - 'currency_id': 1, - 'bag_id': 1, - 'id': 1, - 'paid_at': UTC_NOW, - 'status': 'FULFILLED', - 'stripe_id': None, - 'user_id': 1, - 'refund_stripe_id': None, - 'refunded_at': None, + "academy_id": 1, + "amount": 0.0, + "currency_id": 1, + "bag_id": 1, + "id": 1, + "paid_at": UTC_NOW, + "status": "FULFILLED", + "stripe_id": None, + "user_id": 1, + "refund_stripe_id": None, + "refunded_at": None, **data, } def get_serializer(self, currency, user, data={}): return { - 'amount': 0, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "amount": 0, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'paid_at': self.bc.datetime.to_iso_string(UTC_NOW), - 'status': 'FULFILLED', - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, + "paid_at": self.bc.datetime.to_iso_string(UTC_NOW), + "status": "FULFILLED", + "user": { + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, }, **data, } @@ -60,39 +60,41 @@ def get_serializer(self, currency, user, data={}): def generate_amounts_by_time(): return { - 'amount_per_month': random.random() * 100 + 1, - 'amount_per_quarter': random.random() * 100 + 1, - 'amount_per_half': random.random() * 100 + 1, - 'amount_per_year': random.random() * 100 + 1, + "amount_per_month": random.random() * 100 + 1, + "amount_per_quarter": random.random() * 100 + 1, + "amount_per_half": random.random() * 100 + 1, + "amount_per_year": random.random() * 100 + 1, } def generate_three_amounts_by_time(): - l = random.shuffle([ - 0, - random.random() * 100 + 1, - random.random() * 100 + 1, - random.random() * 100 + 1, - ]) + l = random.shuffle( + [ + 0, + random.random() * 100 + 1, + random.random() * 100 + 1, + random.random() * 100 + 1, + ] + ) return { - 'amount_per_month': l[0], - 'amount_per_quarter': l[1], - 'amount_per_half': l[2], - 'amount_per_year': l[3], + "amount_per_month": l[0], + "amount_per_quarter": l[1], + "amount_per_half": l[2], + "amount_per_year": l[3], } def which_amount_is_zero(data={}): for key in data: - if key == 'amount_per_quarter': - return 'MONTH', 1 + if key == "amount_per_quarter": + return "MONTH", 1 CHOSEN_PERIOD = { - 'MONTH': 'amount_per_month', - 'QUARTER': 'amount_per_quarter', - 'HALF': 'amount_per_half', - 'YEAR': 'amount_per_year', + "MONTH": "amount_per_month", + "QUARTER": "amount_per_quarter", + "HALF": "amount_per_half", + "YEAR": "amount_per_year", } @@ -102,7 +104,7 @@ def get_amount_per_period(period, data): def invoice_mock(): - class FakeInvoice(): + class FakeInvoice: id = 1 amount = 100 @@ -113,78 +115,90 @@ class SignalTestSuite(PaymentsTestCase): # When: no auth # Then: return 401 def test__no_auth(self): - url = reverse_lazy('payments:card') + url = reverse_lazy("payments:card") response = self.client.post(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), []) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserSetting"), []) # When: no body # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('stripe.Token.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Customer.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Customer.modify', MagicMock(return_value={'id': 1})) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("stripe.Token.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Customer.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Customer.modify", MagicMock(return_value={"id": 1})) def test__no_body(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:card') + url = reverse_lazy("payments:card") response = self.client.post(url) self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'missing-card-information', 'status_code': 404} + expected = {"detail": "missing-card-information", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(stripe.Token.create.call_args_list, []) - self.bc.check.calls(stripe.Customer.create.call_args_list, [ - call(email=model.user.email, name=f'{model.user.first_name} {model.user.last_name}'), - ]) + self.bc.check.calls( + stripe.Customer.create.call_args_list, + [ + call(email=model.user.email, name=f"{model.user.first_name} {model.user.last_name}"), + ], + ) self.bc.check.calls(stripe.Customer.modify.call_args_list, []) # When: passing card # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('stripe.Token.create', MagicMock(return_value=AttrDict(id=1))) - @patch('stripe.Customer.create', MagicMock(return_value=AttrDict(id=1))) - @patch('stripe.Customer.modify', MagicMock(return_value=AttrDict(id=1))) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("stripe.Token.create", MagicMock(return_value=AttrDict(id=1))) + @patch("stripe.Customer.create", MagicMock(return_value=AttrDict(id=1))) + @patch("stripe.Customer.modify", MagicMock(return_value=AttrDict(id=1))) def test__passing_card(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:card') - data = {'card_number': '4242424242424242', 'exp_month': '12', 'exp_year': '2030', 'cvc': '123'} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:card") + data = {"card_number": "4242424242424242", "exp_month": "12", "exp_year": "2030", "cvc": "123"} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'status': 'ok'} + expected = {"status": "ok"} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) - data['number'] = data.pop('card_number') + data["number"] = data.pop("card_number") self.bc.check.calls(stripe.Token.create.call_args_list, [call(card=data)]) - self.bc.check.calls(stripe.Customer.create.call_args_list, [ - call(email=model.user.email, name=f'{model.user.first_name} {model.user.last_name}'), - ]) - - self.bc.check.calls(stripe.Customer.modify.call_args_list, [call('1', source=1)]) + self.bc.check.calls( + stripe.Customer.create.call_args_list, + [ + call(email=model.user.email, name=f"{model.user.first_name} {model.user.last_name}"), + ], + ) + + self.bc.check.calls(stripe.Customer.modify.call_args_list, [call("1", source=1)]) diff --git a/breathecode/payments/tests/urls/tests_checking.py b/breathecode/payments/tests/urls/tests_checking.py index 4e4637646..edc84910a 100644 --- a/breathecode/payments/tests/urls/tests_checking.py +++ b/breathecode/payments/tests/urls/tests_checking.py @@ -38,7 +38,7 @@ def queryset_with_pks(query: Any, pks: list[int]) -> None: ``` """ - assert isinstance(query, QuerySet), 'The first argument is not a QuerySet' + assert isinstance(query, QuerySet), "The first argument is not a QuerySet" assert [x.pk for x in query] == pks @@ -48,131 +48,117 @@ def queryset_with_pks(query: Any, pks: list[int]) -> None: def format_user_setting(data={}): return { - 'id': 1, - 'user_id': 1, - 'main_currency_id': None, - 'lang': 'en', + "id": 1, + "user_id": 1, + "main_currency_id": None, + "lang": "en", **data, } def financing_option_serializer(financing_option, currency, data={}): return { - 'currency': { - 'code': currency.code, - 'name': currency.name, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'how_many_months': financing_option.how_many_months, - 'monthly_price': financing_option.monthly_price, + "how_many_months": financing_option.how_many_months, + "monthly_price": financing_option.monthly_price, } def plan_serializer(plan, service_items, service, cohorts=[], financing_options=[], currency=None, data={}): return { - 'service_items': [service_item_serializer(service_item, service, cohorts) for service_item in service_items], - 'financing_options': - [financing_option_serializer(financing_option, currency) for financing_option in financing_options], - 'slug': - plan.slug, - 'status': - plan.status, - 'time_of_life': - plan.time_of_life, - 'time_of_life_unit': - plan.time_of_life_unit, - 'trial_duration': - plan.trial_duration, - 'trial_duration_unit': - plan.trial_duration_unit, - 'has_available_cohorts': - bool(plan.cohort_set), + "service_items": [service_item_serializer(service_item, service, cohorts) for service_item in service_items], + "financing_options": [ + financing_option_serializer(financing_option, currency) for financing_option in financing_options + ], + "slug": plan.slug, + "status": plan.status, + "time_of_life": plan.time_of_life, + "time_of_life_unit": plan.time_of_life_unit, + "trial_duration": plan.trial_duration, + "trial_duration_unit": plan.trial_duration_unit, + "has_available_cohorts": bool(plan.cohort_set), **data, } def service_serializer(service, cohorts=[], data={}): return { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, **data, } def service_item_serializer(service_item, service, cohorts=[], data={}): return { - 'how_many': service_item.how_many, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, - 'service': service_serializer(service, cohorts), + "how_many": service_item.how_many, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, + "service": service_serializer(service, cohorts), **data, } def to_iso(date): - return re.sub(r'\+00:00$', 'Z', date.replace(tzinfo=UTC).isoformat()) + return re.sub(r"\+00:00$", "Z", date.replace(tzinfo=UTC).isoformat()) def format_coupon(coupon, data={}): return { - 'auto': coupon.auto, - 'discount_type': coupon.discount_type, - 'discount_value': coupon.discount_value, - 'expires_at': to_iso(coupon.expires_at) if coupon.expires_at else None, - 'offered_at': to_iso(coupon.offered_at) if coupon.offered_at else None, - 'referral_type': coupon.referral_type, - 'referral_value': coupon.referral_value, - 'slug': coupon.slug, + "auto": coupon.auto, + "discount_type": coupon.discount_type, + "discount_value": coupon.discount_value, + "expires_at": to_iso(coupon.expires_at) if coupon.expires_at else None, + "offered_at": to_iso(coupon.offered_at) if coupon.offered_at else None, + "referral_type": coupon.referral_type, + "referral_value": coupon.referral_value, + "slug": coupon.slug, **data, } -def get_serializer(bag, - plans=[], - plan_service_items=[], - service_items=[], - service=None, - cohorts=[], - financing_options=[], - currency=None, - coupons=[], - data={}): +def get_serializer( + bag, + plans=[], + plan_service_items=[], + service_items=[], + service=None, + cohorts=[], + financing_options=[], + currency=None, + coupons=[], + data={}, +): return { - 'id': - bag.id, - 'amount_per_month': - bag.amount_per_month, - 'amount_per_quarter': - bag.amount_per_quarter, - 'amount_per_half': - bag.amount_per_half, - 'amount_per_year': - bag.amount_per_year, - 'expires_at': - bag.expires_at, - 'is_recurrent': - bag.is_recurrent, - 'plans': - [plan_serializer(plan, plan_service_items, service, cohorts, financing_options, currency) for plan in plans], - 'service_items': [service_item_serializer(service_item, service, cohorts) for service_item in service_items], - 'status': - bag.status, - 'token': - bag.token, - 'type': - bag.type, - 'was_delivered': - bag.was_delivered, - 'coupons': [format_coupon(x) for x in coupons], + "id": bag.id, + "amount_per_month": bag.amount_per_month, + "amount_per_quarter": bag.amount_per_quarter, + "amount_per_half": bag.amount_per_half, + "amount_per_year": bag.amount_per_year, + "expires_at": bag.expires_at, + "is_recurrent": bag.is_recurrent, + "plans": [ + plan_serializer(plan, plan_service_items, service, cohorts, financing_options, currency) for plan in plans + ], + "service_items": [service_item_serializer(service_item, service, cohorts) for service_item in service_items], + "status": bag.status, + "token": bag.token, + "type": bag.type, + "was_delivered": bag.was_delivered, + "coupons": [format_coupon(x) for x in coupons], **data, } @pytest.fixture(autouse=True) def setup(db, monkeypatch, set_datetime): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) set_datetime(UTC_NOW) yield @@ -183,107 +169,121 @@ class SignalTestSuite(PaymentsTestCase): """ def test__without_auth(self): - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") response = self.client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), []) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserSetting"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with zero Bag """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__without_bag(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") response = self.client.put(url) self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with one Bag, type is BAG """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag(self): bag = { - 'status': 'CHECKING', - 'type': 'BAG', + "status": "CHECKING", + "type": "BAG", } - cases = [{}, {'type': 'BAG'}] + cases = [{}, {"type": "BAG"}] for case in cases: model = self.bc.database.create(user=1, bag=bag) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data=case, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data=case, format="json") json = response.json() expected = get_serializer( model.bag, data={ - 'amount_per_month': 0.0, - 'amount_per_quarter': 0.0, - 'amount_per_half': 0.0, - 'amount_per_year': 0.0, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": 0.0, + "amount_per_quarter": 0.0, + "amount_per_half": 0.0, + "amount_per_year": 0.0, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': 0.0, - 'amount_per_quarter': 0.0, - 'amount_per_half': 0.0, - 'amount_per_year': 0.0, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": 0.0, + "amount_per_quarter": 0.0, + "amount_per_half": 0.0, + "amount_per_year": 0.0, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) # teardown - self.bc.database.delete('payments.Bag', model.bag.id) - self.bc.database.delete('authenticate.UserSetting', model.bag.id) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(model.user.id, 'bag_created', related_type='payments.Bag', related_id=model.user.id), - ]) + self.bc.database.delete("payments.Bag", model.bag.id) + self.bc.database.delete("authenticate.UserSetting", model.bag.id) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(model.user.id, "bag_created", related_type="payments.Bag", related_id=model.user.id), + ], + ) activity_tasks.add_activity.delay.call_args_list = [] @@ -291,233 +291,276 @@ def test__with_bag__type_bag(self): 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing nothing """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_nothing(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', + "status": "CHECKING", + "type": "PREVIEW", } model = self.bc.database.create(user=1, bag=bag) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") response = self.client.put(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [self.bc.format.to_dict(model.bag)]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), [self.bc.format.to_dict(model.bag)]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', + "status": "CHECKING", + "type": "PREVIEW", } model = self.bc.database.create(user=1, bag=bag, academy=1, currency=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:checking') - data = {'academy': 1, 'type': 'PREVIEW'} + url = reverse_lazy("payments:checking") + data = {"academy": 1, "type": "PREVIEW"} token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() expected = get_serializer( model.bag, data={ - 'amount_per_month': 0.0, - 'amount_per_quarter': 0.0, - 'amount_per_half': 0.0, - 'amount_per_year': 0.0, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": 0.0, + "amount_per_quarter": 0.0, + "amount_per_half": 0.0, + "amount_per_year": 0.0, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': 0.0, - 'amount_per_quarter': 0.0, - 'amount_per_half': 0.0, - 'amount_per_year': 0.0, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": 0.0, + "amount_per_quarter": 0.0, + "amount_per_half": 0.0, + "amount_per_year": 0.0, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan that not found """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__service_item_not_is_object(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', + "status": "CHECKING", + "type": "PREVIEW", } model = self.bc.database.create(user=1, bag=bag, academy=1, currency=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:checking') - data = {'academy': 1, 'type': 'PREVIEW', 'plans': [1], 'service_items': [1]} + url = reverse_lazy("payments:checking") + data = {"academy": 1, "type": "PREVIEW", "plans": [1], "service_items": [1]} token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'service-item-not-object', 'status_code': 400} + expected = {"detail": "service-item-not-object", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [self.bc.format.to_dict(model.bag)]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), [self.bc.format.to_dict(model.bag)]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan that not found """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__service_item_object_malformed(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', + "status": "CHECKING", + "type": "PREVIEW", } model = self.bc.database.create(user=1, bag=bag, academy=1, currency=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:checking') - data = {'academy': 1, 'type': 'PREVIEW', 'plans': [1], 'service_items': [{}]} + url = reverse_lazy("payments:checking") + data = {"academy": 1, "type": "PREVIEW", "plans": [1], "service_items": [{}]} token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'service-item-malformed', 'status_code': 400} + expected = {"detail": "service-item-malformed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [self.bc.format.to_dict(model.bag)]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), [self.bc.format.to_dict(model.bag)]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan that not found """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_not_found(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', + "status": "CHECKING", + "type": "PREVIEW", } model = self.bc.database.create(user=1, bag=bag, academy=1, currency=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:checking') - data = {'academy': 1, 'type': 'PREVIEW', 'plans': [1], 'service_items': [{'how_many': 1, 'service': 1}]} + url = reverse_lazy("payments:checking") + data = {"academy": 1, "type": "PREVIEW", "plans": [1], "service_items": [{"how_many": 1, "service": 1}]} token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'some-items-not-found', 'status_code': 404} + expected = {"detail": "some-items-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [self.bc.format.to_dict(model.bag)]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), [self.bc.format.to_dict(model.bag)]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, without the Currency """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__academy_without_the_currency(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - academy = {'main_currency': None} - plan = {'is_renewable': False} + academy = {"main_currency": None} + plan = {"is_renewable": False} model = self.bc.database.create(user=1, bag=bag, service_item=1, plan=plan, academy=academy) self.client.force_authenticate(model.user) @@ -525,188 +568,209 @@ def test__with_bag__type_bag__passing_type_preview__items_found__academy_without self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), []) - url = reverse_lazy('payments:checking') - data = {'academy': 1, 'type': 'PREVIEW', 'plans': [1], 'service_items': [{'how_many': 1, 'service': 1}]} + url = reverse_lazy("payments:checking") + data = {"academy": 1, "type": "PREVIEW", "plans": [1], "service_items": [{"how_many": 1, "service": 1}]} token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [self.bc.format.to_dict(model.bag)]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), [self.bc.format.to_dict(model.bag)]) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, with the correct Currency and Price """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__with_the_correct_currency__with_service_item(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': random.random() * 100, - 'price_per_quarter': random.random() * 100, - 'price_per_half': random.random() * 100, - 'price_per_year': random.random() * 100, - 'is_renewable': True, - 'time_of_life': 0, - 'time_of_life_unit': None, + "price_per_month": random.random() * 100, + "price_per_quarter": random.random() * 100, + "price_per_half": random.random() * 100, + "price_per_year": random.random() * 100, + "is_renewable": True, + "time_of_life": 0, + "time_of_life_unit": None, } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - currency=currency) + service_item = {"how_many": how_many1} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, - 'service_items': [{ - 'how_many': how_many2, - 'service': 1 - }] + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, + "service_items": [{"how_many": how_many2, "service": 1}], } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'one-plan-and-many-services', 'status_code': 400} + expected = {"detail": "one-plan-and-many-services", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, with the correct Currency and Price """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__with_the_correct_currency__without_service_item( - self): + self, + ): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': random.random() * 100, - 'price_per_quarter': random.random() * 100, - 'price_per_half': random.random() * 100, - 'price_per_year': random.random() * 100, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': 0, + "price_per_month": random.random() * 100, + "price_per_quarter": random.random() * 100, + "price_per_half": random.random() * 100, + "price_per_year": random.random() * 100, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": 0, } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -724,105 +788,118 @@ def test__with_bag__type_bag__passing_type_preview__items_found__with_the_correc [model.financing_option], model.currency, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, with the correct Currency and Price, Plan with trial_duration """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__taking_free_trial(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': random.random() * 100, - 'price_per_quarter': random.random() * 100, - 'price_per_half': random.random() * 100, - 'price_per_year': random.random() * 100, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": random.random() * 100, + "price_per_quarter": random.random() * 100, + "price_per_half": random.random() * 100, + "price_per_year": random.random() * 100, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -836,100 +913,113 @@ def test__with_bag__type_bag__passing_type_preview__items_found__taking_free_tri [model.financing_option], model.currency, data={ - 'amount_per_month': 0, - 'amount_per_quarter': 0, - 'amount_per_half': 0, - 'amount_per_year': 0, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": 0, + "amount_per_quarter": 0, + "amount_per_half": 0, + "amount_per_year": 0, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': 0, - 'amount_per_quarter': 0, - 'amount_per_half': 0, - 'amount_per_year': 0, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": 0, + "amount_per_quarter": 0, + "amount_per_half": 0, + "amount_per_year": 0, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, with the correct Currency and Price, Plan with trial_duration """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__taking_free_plan__not_renewable(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": 0, } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - currency=currency) + service_item = {"how_many": how_many1} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -943,95 +1033,108 @@ def test__with_bag__type_bag__passing_type_preview__items_found__taking_free_pla [], model.currency, data={ - 'amount_per_month': 0, - 'amount_per_quarter': 0, - 'amount_per_half': 0, - 'amount_per_year': 0, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": 0, + "amount_per_quarter": 0, + "amount_per_half": 0, + "amount_per_year": 0, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': 0, - 'amount_per_quarter': 0, - 'amount_per_half': 0, - 'amount_per_year': 0, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": 0, + "amount_per_quarter": 0, + "amount_per_half": 0, + "amount_per_year": 0, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__taking_free_plan__renewable(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'is_renewable': True, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': 0, + "is_renewable": True, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": 0, } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - currency=currency) + service_item = {"how_many": how_many1} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -1045,107 +1148,120 @@ def test__with_bag__type_bag__passing_type_preview__items_found__taking_free_pla [], model.currency, data={ - 'amount_per_month': 0, - 'amount_per_quarter': 0, - 'amount_per_half': 0, - 'amount_per_year': 0, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": 0, + "amount_per_quarter": 0, + "amount_per_half": 0, + "amount_per_year": 0, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': 0, - 'amount_per_quarter': 0, - 'amount_per_half': 0, - 'amount_per_year': 0, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": 0, + "amount_per_quarter": 0, + "amount_per_half": 0, + "amount_per_year": 0, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, with the correct Currency and Price, Plan with trial_duration and Subscription """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__free_trial_already_taken(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': random.random() * 100, - 'price_per_quarter': random.random() * 100, - 'price_per_half': random.random() * 100, - 'price_per_year': random.random() * 100, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": random.random() * 100, + "price_per_quarter": random.random() * 100, + "price_per_half": random.random() * 100, + "price_per_year": random.random() * 100, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - subscription=subscription, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + subscription=subscription, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -1163,131 +1279,155 @@ def test__with_bag__type_bag__passing_type_preview__items_found__free_trial_alre [model.financing_option], model.currency, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, with the correct Currency and Price, Plan with trial_duration and price et 0 and Subscription """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__free_trial_already_taken__amount_is_0(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - subscription=subscription, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - currency=currency) + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + subscription=subscription, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'free-trial-already-bought', 'status_code': 400} + expected = {"detail": "free-trial-already-bought", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, @@ -1295,67 +1435,70 @@ def test__with_bag__type_bag__passing_type_preview__items_found__free_trial_alre FinancingOption """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__free_trial_already_taken__with_financing_option( - self): + self, + ): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - subscription=subscription, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + subscription=subscription, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -1373,135 +1516,159 @@ def test__with_bag__type_bag__passing_type_preview__items_found__free_trial_alre [model.financing_option], model.currency, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, with the correct Currency and Price, Plan with trial_duration and price et 0 and PlanFinancing """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__plan_already_financed(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} + service_item = {"how_many": how_many1} plan_financing = { - 'valid_until': UTC_NOW - timedelta(seconds=1), - 'plan_expires_at': UTC_NOW - timedelta(seconds=1), - 'monthly_price': random.randint(1, 100), - } - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - plan_financing=plan_financing, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - currency=currency) + "valid_until": UTC_NOW - timedelta(seconds=1), + "plan_expires_at": UTC_NOW - timedelta(seconds=1), + "monthly_price": random.randint(1, 100), + } + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + plan_financing=plan_financing, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'plan-already-financed', 'status_code': 400} + expected = {"detail": "plan-already-financed", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, @@ -1509,95 +1676,108 @@ def test__with_bag__type_bag__passing_type_preview__items_found__plan_already_fi future """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__plan_already_bought__cancelled(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': random.random() * 100, - 'price_per_quarter': random.random() * 100, - 'price_per_half': random.random() * 100, - 'price_per_year': random.random() * 100, - 'is_renewable': True, - 'time_of_life': 0, - 'time_of_life_unit': None, - 'trial_duration': random.randint(1, 10), + "price_per_month": random.random() * 100, + "price_per_quarter": random.random() * 100, + "price_per_half": random.random() * 100, + "price_per_year": random.random() * 100, + "is_renewable": True, + "time_of_life": 0, + "time_of_life_unit": None, + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} + service_item = {"how_many": how_many1} subscription = { - 'valid_until': None, - 'next_payment_at': UTC_NOW + timedelta(seconds=1), - 'status': random.choice(['CANCELLED', 'DEPRECATED']), - } - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - subscription=subscription, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + "valid_until": None, + "next_payment_at": UTC_NOW + timedelta(seconds=1), + "status": random.choice(["CANCELLED", "DEPRECATED"]), + } + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + subscription=subscription, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'plan-already-bought', 'status_code': 400} + expected = {"detail": "plan-already-bought", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with one Bag, type is PREVIEW, passing type preview and many ServiceItem and Plan found, @@ -1605,154 +1785,169 @@ def test__with_bag__type_bag__passing_type_preview__items_found__plan_already_bo future """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_bag__type_bag__passing_type_preview__items_found__plan_already_bought__no_cancelled(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': random.random() * 100, - 'price_per_quarter': random.random() * 100, - 'price_per_half': random.random() * 100, - 'price_per_year': random.random() * 100, - 'is_renewable': True, - 'time_of_life': 0, - 'time_of_life_unit': None, - 'trial_duration': random.randint(1, 10), + "price_per_month": random.random() * 100, + "price_per_quarter": random.random() * 100, + "price_per_half": random.random() * 100, + "price_per_year": random.random() * 100, + "is_renewable": True, + "time_of_life": 0, + "time_of_life_unit": None, + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} + service_item = {"how_many": how_many1} subscription = { - 'valid_until': UTC_NOW + timedelta(seconds=1), - 'status': random.choice(['CANCELLED', 'ACTIVE', 'DEPRECATED', 'PAYMENT_ISSUE', 'ERROR']), - } - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - subscription=subscription, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + "valid_until": UTC_NOW + timedelta(seconds=1), + "status": random.choice(["CANCELLED", "ACTIVE", "DEPRECATED", "PAYMENT_ISSUE", "ERROR"]), + } + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + subscription=subscription, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() - expected = {'detail': 'plan-already-bought', 'status_code': 400} + expected = {"detail": "plan-already-bought", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) # When: Passing just the plan in the body without academy # -> and the academy have a currency # Then: It should infer the academy from the plan to fill the bag - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__inferring_academy_from_plan__no_linked(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - - model = self.bc.database.create(user=1, - bag=bag, - academy=1, - subscription=subscription, - skip_cohort=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=1, + subscription=subscription, + skip_cohort=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'type': 'PREVIEW', - 'plans': random.choices([model.plan.id, model.plan.slug], k=1), + "type": "PREVIEW", + "plans": random.choices([model.plan.id, model.plan.slug], k=1), } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -1770,103 +1965,116 @@ def test__inferring_academy_from_plan__no_linked(self): [model.financing_option], model.currency, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) # When: Passing just the plan in the body without academy # -> and the academy have a currency # -> this plan have a EventTypeSet # Then: It should infer the academy from the plan to fill the bag - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__inferring_academy_from_plan__linked_to_event_type_set(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - - model = self.bc.database.create(user=1, - bag=bag, - academy=1, - event_type_set=1, - subscription=subscription, - skip_cohort=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=1, + event_type_set=1, + subscription=subscription, + skip_cohort=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'type': 'PREVIEW', - 'plans': random.choices([model.plan.id, model.plan.slug], k=1), + "type": "PREVIEW", + "plans": random.choices([model.plan.id, model.plan.slug], k=1), } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -1884,101 +2092,114 @@ def test__inferring_academy_from_plan__linked_to_event_type_set(self): [model.financing_option], model.currency, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) # When: Passing just the plan in the body without academy # -> and the academy have a currency # Then: It should infer the academy from the plan to fill the bag, # -> but the plan have a cohort linked, take it as selected cohort - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__inferring_academy_from_plan__linked_to_1_cohort(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - - model = self.bc.database.create(user=1, - bag=bag, - academy=1, - subscription=subscription, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=1, + subscription=subscription, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'type': 'PREVIEW', - 'plans': random.choices([model.plan.id, model.plan.slug], k=1), + "type": "PREVIEW", + "plans": random.choices([model.plan.id, model.plan.slug], k=1), } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -1996,104 +2217,117 @@ def test__inferring_academy_from_plan__linked_to_1_cohort(self): [model.financing_option], model.currency, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) # When: Passing just the cohort in the body without academy # -> and the academy have a currency # Then: It should infer the academy from the cohort to fill the bag - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__inferring_academy_from_cohort(self): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - academy = {'available_as_saas': True} - - model = self.bc.database.create(user=1, - bag=bag, - academy=academy, - subscription=subscription, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + academy = {"available_as_saas": True} + + model = self.bc.database.create( + user=1, + bag=bag, + academy=academy, + subscription=subscription, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) self.client.force_authenticate(model.user) - service_item = self.bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = self.bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'type': 'PREVIEW', - 'plans': random.choices([model.plan.id, model.plan.slug], k=1), - 'cohort_set': random.choice([model.cohort_set.id, model.cohort_set.slug]), + "type": "PREVIEW", + "plans": random.choices([model.plan.id, model.plan.slug], k=1), + "cohort_set": random.choice([model.cohort_set.id, model.cohort_set.slug]), } token = self.bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = self.client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = self.client.put(url, data, format="json") json = response.json() @@ -2111,112 +2345,125 @@ def test__inferring_academy_from_cohort(self): [model.financing_option], model.currency, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": self.bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [ - { - **self.bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, - }, - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Bag"), + [ + { + **self.bc.format.to_dict(model.bag), + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), + ], + ) self.bc.check.queryset_with_pks(model.bag.service_items.all(), []) self.bc.check.queryset_with_pks(model.bag.plans.all(), [1]) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) def test_exceding_coupon_limit(bc: Breathecode, client: APIClient): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], - 'coupons': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], + "coupons": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - academy = {'available_as_saas': True} + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + academy = {"available_as_saas": True} coupon = { - 'discount_value': random.random() * 100, - 'offered_at': None, - 'expires_at': None, - 'auto': False, - 'how_many_offers': random.randint(1, 5), + "discount_value": random.random() * 100, + "offered_at": None, + "expires_at": None, + "auto": False, + "how_many_offers": random.randint(1, 5), } - model = bc.database.create(user=1, - bag=bag, - coupon=(2, coupon), - academy=academy, - subscription=subscription, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + model = bc.database.create( + user=1, + bag=bag, + coupon=(2, coupon), + academy=academy, + subscription=subscription, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) client.force_authenticate(model.user) - service_item = bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, - 'coupons': [x.slug for x in model.coupon], + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, + "coupons": [x.slug for x in model.coupon], } token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = client.put(url, data, format="json") json = response.json() @@ -2225,114 +2472,127 @@ def test_exceding_coupon_limit(bc: Breathecode, client: APIClient): price_per_half = model.plan.price_per_half price_per_year = model.plan.price_per_year expected = { - 'detail': 'too-many-coupons', - 'status_code': 400, + "detail": "too-many-coupons", + "status_code": 400, } assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('payments.Bag') == [ + assert bc.database.list_of("payments.Bag") == [ { **bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': None, - 'token': None, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": None, + "token": None, }, ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), ] queryset_with_pks(model.bag.service_items.all(), []) queryset_with_pks(model.bag.plans.all(), []) queryset_with_pks(model.bag.coupons.all(), []) - bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) -@pytest.mark.parametrize('how_many_offers', [-1, 10]) -@pytest.mark.parametrize('offered_at, expires_at', [ - (None, None), - (UTC_NOW - timedelta(days=10), None), - (UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10)), -]) +@pytest.mark.parametrize("how_many_offers", [-1, 10]) +@pytest.mark.parametrize( + "offered_at, expires_at", + [ + (None, None), + (UTC_NOW - timedelta(days=10), None), + (UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10)), + ], +) def test_providing_coupons(bc: Breathecode, client: APIClient, how_many_offers, offered_at, expires_at): bag = { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], - 'coupons': [], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], + "coupons": [], } - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} - subscription = {'valid_until': UTC_NOW - timedelta(seconds=1)} - academy = {'available_as_saas': True} - coupons = [{ - 'discount_value': random.random() * 100, - 'offered_at': offered_at, - 'expires_at': expires_at, - 'auto': auto, - 'how_many_offers': how_many_offers, - } for auto in [True, True, False]] - - model = bc.database.create(user=1, - bag=bag, - coupon=coupons, - academy=academy, - subscription=subscription, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + service_item = {"how_many": how_many1} + subscription = {"valid_until": UTC_NOW - timedelta(seconds=1)} + academy = {"available_as_saas": True} + coupons = [ + { + "discount_value": random.random() * 100, + "offered_at": offered_at, + "expires_at": expires_at, + "auto": auto, + "how_many_offers": how_many_offers, + } + for auto in [True, True, False] + ] + + model = bc.database.create( + user=1, + bag=bag, + coupon=coupons, + academy=academy, + subscription=subscription, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) client.force_authenticate(model.user) - service_item = bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, - 'coupons': [model.coupon[2].slug], + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, + "coupons": [model.coupon[2].slug], } token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = client.put(url, data, format="json") json = response.json() @@ -2351,149 +2611,169 @@ def test_providing_coupons(bc: Breathecode, client: APIClient, how_many_offers, model.currency, coupons=model.coupon, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.Bag') == [ + assert bc.database.list_of("payments.Bag") == [ { **bc.format.to_dict(model.bag), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, }, ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({ - 'lang': 'en', - 'id': model.user.id, - 'user_id': model.user.id, - }), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting( + { + "lang": "en", + "id": model.user.id, + "user_id": model.user.id, + } + ), ] queryset_with_pks(model.bag.service_items.all(), []) queryset_with_pks(model.bag.plans.all(), [1]) queryset_with_pks(model.bag.coupons.all(), [1, 2, 3]) - bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) - - -@pytest.mark.parametrize('how_many_offers, how_many_subscriptions, how_many_plan_financings', [ - (-1, 0, 0), - (6, 0, 0), - (6, 5, 0), - (6, 0, 5), - (6, 3, 3), -]) -@pytest.mark.parametrize('offered_at, expires_at', [ - (None, None), - (UTC_NOW - timedelta(days=10), None), - (UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10)), -]) -def test_getting_coupons(bc: Breathecode, client: APIClient, how_many_offers, offered_at, expires_at, - how_many_subscriptions, how_many_plan_financings): + bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) + + +@pytest.mark.parametrize( + "how_many_offers, how_many_subscriptions, how_many_plan_financings", + [ + (-1, 0, 0), + (6, 0, 0), + (6, 5, 0), + (6, 0, 5), + (6, 3, 3), + ], +) +@pytest.mark.parametrize( + "offered_at, expires_at", + [ + (None, None), + (UTC_NOW - timedelta(days=10), None), + (UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10)), + ], +) +def test_getting_coupons( + bc: Breathecode, + client: APIClient, + how_many_offers, + offered_at, + expires_at, + how_many_subscriptions, + how_many_plan_financings, +): auto = True bags = [ { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], - 'coupons': [], - 'user_id': 1, + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], + "coupons": [], + "user_id": 1, }, { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], - 'user_id': 2, - 'coupons': [1, 2, 3], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], + "user_id": 2, + "coupons": [1, 2, 3], }, ] - invoice = {'user_id': 2, 'bag_id': 2} + invoice = {"user_id": 2, "bag_id": 2} - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} + service_item = {"how_many": how_many1} subscription = { - 'user_id': 2, - 'valid_until': UTC_NOW - timedelta(seconds=1), + "user_id": 2, + "valid_until": UTC_NOW - timedelta(seconds=1), } plan_financing = { - 'user_id': 2, - 'plan_expires_at': UTC_NOW - timedelta(seconds=1), - 'monthly_price': random.random() * 100, + "user_id": 2, + "plan_expires_at": UTC_NOW - timedelta(seconds=1), + "monthly_price": random.random() * 100, } - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} coupon = { - 'discount_value': random.random() * 100, - 'offered_at': offered_at, - 'expires_at': expires_at, - 'auto': auto, - 'how_many_offers': how_many_offers, + "discount_value": random.random() * 100, + "offered_at": offered_at, + "expires_at": expires_at, + "auto": auto, + "how_many_offers": how_many_offers, } - model = bc.database.create(user=2, - subscription=(how_many_subscriptions, subscription), - plan_financing=(how_many_plan_financings, plan_financing), - bag=bags, - invoice=invoice, - coupon=(3, coupon), - academy=academy, - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + model = bc.database.create( + user=2, + subscription=(how_many_subscriptions, subscription), + plan_financing=(how_many_plan_financings, plan_financing), + bag=bags, + invoice=invoice, + coupon=(3, coupon), + academy=academy, + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) client.force_authenticate(model.user[0]) activity_tasks.add_activity.delay.call_args_list = [] - service_item = bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, } token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = client.put(url, data, format="json") json = response.json() @@ -2512,41 +2792,45 @@ def test_getting_coupons(bc: Breathecode, client: APIClient, how_many_offers, of model.currency, coupons=model.coupon, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.Bag') == [ + assert bc.database.list_of("payments.Bag") == [ { **bc.format.to_dict(model.bag[0]), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, }, bc.format.to_dict(model.bag[1]), ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({ - 'lang': 'en', - 'id': 1, - 'user_id': model.user[1].id, - }), - format_user_setting({ - 'lang': 'en', - 'id': 2, - 'user_id': model.user[0].id, - }), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting( + { + "lang": "en", + "id": 1, + "user_id": model.user[1].id, + } + ), + format_user_setting( + { + "lang": "en", + "id": 2, + "user_id": model.user[0].id, + } + ), ] queryset_with_pks(model.bag[0].service_items.all(), []) @@ -2558,109 +2842,124 @@ def test_getting_coupons(bc: Breathecode, client: APIClient, how_many_offers, of bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) -@pytest.mark.parametrize('how_many_offers, offered_at, expires_at, how_many_subscriptions, how_many_plan_financings', [ - (0, None, None, 0, 0), - (5, None, None, 6, 0), - (5, None, None, 0, 6), - (5, None, None, 3, 3), - (6, UTC_NOW - timedelta(days=20), UTC_NOW - timedelta(days=10), 0, 0), - (5, UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10), 6, 0), - (5, UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10), 0, 6), - (5, UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10), 3, 3), -]) -def test_exausted_coupons(bc: Breathecode, client: APIClient, how_many_offers, offered_at, expires_at, - how_many_subscriptions, how_many_plan_financings): +@pytest.mark.parametrize( + "how_many_offers, offered_at, expires_at, how_many_subscriptions, how_many_plan_financings", + [ + (0, None, None, 0, 0), + (5, None, None, 6, 0), + (5, None, None, 0, 6), + (5, None, None, 3, 3), + (6, UTC_NOW - timedelta(days=20), UTC_NOW - timedelta(days=10), 0, 0), + (5, UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10), 6, 0), + (5, UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10), 0, 6), + (5, UTC_NOW - timedelta(days=10), UTC_NOW + timedelta(days=10), 3, 3), + ], +) +def test_exausted_coupons( + bc: Breathecode, + client: APIClient, + how_many_offers, + offered_at, + expires_at, + how_many_subscriptions, + how_many_plan_financings, +): bags = [ { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], - 'coupons': [], - 'user_id': 1, + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], + "coupons": [], + "user_id": 1, }, { - 'status': 'CHECKING', - 'type': 'PREVIEW', - 'plans': [], - 'service_items': [], - 'user_id': 2, - 'coupons': [1, 2, 3], + "status": "CHECKING", + "type": "PREVIEW", + "plans": [], + "service_items": [], + "user_id": 2, + "coupons": [1, 2, 3], }, ] - invoice = {'user_id': 2, 'bag_id': 2} + invoice = {"user_id": 2, "bag_id": 2} - currency = {'code': 'USD', 'name': 'United States dollar'} + currency = {"code": "USD", "name": "United States dollar"} plan = { - 'price_per_month': 0, - 'price_per_quarter': 0, - 'price_per_half': 0, - 'price_per_year': 0, - 'is_renewable': False, - 'time_of_life': random.randint(1, 100), - 'time_of_life_unit': random.choice(['DAY', 'WEEK', 'MONTH', 'YEAR']), - 'trial_duration': random.randint(1, 10), + "price_per_month": 0, + "price_per_quarter": 0, + "price_per_half": 0, + "price_per_year": 0, + "is_renewable": False, + "time_of_life": random.randint(1, 100), + "time_of_life_unit": random.choice(["DAY", "WEEK", "MONTH", "YEAR"]), + "trial_duration": random.randint(1, 10), } service = { - 'price_per_unit': random.random() * 100, + "price_per_unit": random.random() * 100, } how_many1 = random.randint(1, 5) how_many2 = random.choice([x for x in range(1, 6) if x != how_many1]) - service_item = {'how_many': how_many1} + service_item = {"how_many": how_many1} subscription = { - 'user_id': 2, - 'valid_until': UTC_NOW - timedelta(seconds=1), + "user_id": 2, + "valid_until": UTC_NOW - timedelta(seconds=1), } plan_financing = { - 'user_id': 2, - 'plan_expires_at': UTC_NOW - timedelta(seconds=1), - 'monthly_price': random.random() * 100, + "user_id": 2, + "plan_expires_at": UTC_NOW - timedelta(seconds=1), + "monthly_price": random.random() * 100, } - academy = {'available_as_saas': True} - coupons = [{ - 'discount_value': random.random() * 100, - 'offered_at': offered_at, - 'expires_at': expires_at, - 'auto': auto, - 'how_many_offers': how_many_offers, - } for auto in [True, True, False]] - - model = bc.database.create(user=2, - bag=bags, - invoice=invoice, - coupon=coupons, - academy=academy, - subscription=(how_many_subscriptions, subscription), - plan_financing=(how_many_plan_financings, plan_financing), - cohort=1, - cohort_set=1, - service_item=service_item, - service=service, - plan=plan, - plan_service_item=1, - financing_option=1, - currency=currency) + academy = {"available_as_saas": True} + coupons = [ + { + "discount_value": random.random() * 100, + "offered_at": offered_at, + "expires_at": expires_at, + "auto": auto, + "how_many_offers": how_many_offers, + } + for auto in [True, True, False] + ] + + model = bc.database.create( + user=2, + bag=bags, + invoice=invoice, + coupon=coupons, + academy=academy, + subscription=(how_many_subscriptions, subscription), + plan_financing=(how_many_plan_financings, plan_financing), + cohort=1, + cohort_set=1, + service_item=service_item, + service=service, + plan=plan, + plan_service_item=1, + financing_option=1, + currency=currency, + ) client.force_authenticate(model.user[0]) activity_tasks.add_activity.delay.call_args_list = [] - service_item = bc.database.get('payments.ServiceItem', 1, dict=False) + service_item = bc.database.get("payments.ServiceItem", 1, dict=False) service_item.how_many = how_many2 - url = reverse_lazy('payments:checking') + url = reverse_lazy("payments:checking") data = { - 'academy': 1, - 'type': 'PREVIEW', - 'plans': [1], - 'cohort_set': 1, - 'coupons': [model.coupon[2].slug], + "academy": 1, + "type": "PREVIEW", + "plans": [1], + "cohort_set": 1, + "coupons": [model.coupon[2].slug], } token = bc.random.string(lower=True, upper=True, number=True, size=40) - with patch('rest_framework.authtoken.models.Token.generate_key', MagicMock(return_value=token)): - response = client.put(url, data, format='json') + with patch("rest_framework.authtoken.models.Token.generate_key", MagicMock(return_value=token)): + response = client.put(url, data, format="json") json = response.json() @@ -2678,41 +2977,45 @@ def test_exausted_coupons(bc: Breathecode, client: APIClient, how_many_offers, o [model.financing_option], model.currency, data={ - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": bc.datetime.to_iso_string(UTC_NOW + timedelta(minutes=60)), + "token": token, }, ) assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.Bag') == [ + assert bc.database.list_of("payments.Bag") == [ { **bc.format.to_dict(model.bag[0]), - 'amount_per_month': price_per_month, - 'amount_per_quarter': price_per_quarter, - 'amount_per_half': price_per_half, - 'amount_per_year': price_per_year, - 'expires_at': UTC_NOW + timedelta(minutes=60), - 'token': token, + "amount_per_month": price_per_month, + "amount_per_quarter": price_per_quarter, + "amount_per_half": price_per_half, + "amount_per_year": price_per_year, + "expires_at": UTC_NOW + timedelta(minutes=60), + "token": token, }, bc.format.to_dict(model.bag[1]), ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({ - 'lang': 'en', - 'id': 1, - 'user_id': model.user[1].id, - }), - format_user_setting({ - 'lang': 'en', - 'id': 2, - 'user_id': model.user[0].id, - }), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting( + { + "lang": "en", + "id": 1, + "user_id": model.user[1].id, + } + ), + format_user_setting( + { + "lang": "en", + "id": 2, + "user_id": model.user[0].id, + } + ), ] queryset_with_pks(model.bag[0].service_items.all(), []) queryset_with_pks(model.bag[0].plans.all(), [1]) diff --git a/breathecode/payments/tests/urls/tests_consumable_checkout.py b/breathecode/payments/tests/urls/tests_consumable_checkout.py index 656d49e90..9e2ab246a 100644 --- a/breathecode/payments/tests/urls/tests_consumable_checkout.py +++ b/breathecode/payments/tests/urls/tests_consumable_checkout.py @@ -17,82 +17,82 @@ def format_user_setting(data={}): return { - 'id': 1, - 'user_id': 1, - 'main_currency_id': None, - 'lang': 'en', + "id": 1, + "user_id": 1, + "main_currency_id": None, + "lang": "en", **data, } def format_consumable_item(data={}): return { - 'cohort_set_id': None, - 'event_type_set_id': None, - 'how_many': -1, - 'id': 1, - 'mentorship_service_set_id': None, - 'service_item_id': 0, - 'unit_type': 'UNIT', - 'user_id': 0, - 'valid_until': None, - 'sort_priority': 1, + "cohort_set_id": None, + "event_type_set_id": None, + "how_many": -1, + "id": 1, + "mentorship_service_set_id": None, + "service_item_id": 0, + "unit_type": "UNIT", + "user_id": 0, + "valid_until": None, + "sort_priority": 1, **data, } def format_bag_item(data={}): return { - 'academy_id': 1, - 'amount_per_half': 0.0, - 'amount_per_month': 0.0, - 'amount_per_quarter': 0.0, - 'amount_per_year': 0.0, - 'chosen_period': 'NO_SET', - 'currency_id': 1, - 'expires_at': None, - 'how_many_installments': 0, - 'id': 1, - 'is_recurrent': False, - 'status': 'PAID', - 'token': None, - 'type': 'CHARGE', - 'user_id': 1, - 'was_delivered': True, + "academy_id": 1, + "amount_per_half": 0.0, + "amount_per_month": 0.0, + "amount_per_quarter": 0.0, + "amount_per_year": 0.0, + "chosen_period": "NO_SET", + "currency_id": 1, + "expires_at": None, + "how_many_installments": 0, + "id": 1, + "is_recurrent": False, + "status": "PAID", + "token": None, + "type": "CHARGE", + "user_id": 1, + "was_delivered": True, **data, } def format_invoice_item(data={}): return { - 'academy_id': 1, - 'amount': 0.0, - 'currency_id': 1, - 'bag_id': 1, - 'id': 1, - 'paid_at': UTC_NOW, - 'status': 'FULFILLED', - 'stripe_id': None, - 'user_id': 1, - 'refund_stripe_id': None, - 'refunded_at': None, + "academy_id": 1, + "amount": 0.0, + "currency_id": 1, + "bag_id": 1, + "id": 1, + "paid_at": UTC_NOW, + "status": "FULFILLED", + "stripe_id": None, + "user_id": 1, + "refund_stripe_id": None, + "refunded_at": None, **data, } def get_serializer(self, currency, user, data={}): return { - 'amount': 0, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "amount": 0, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'paid_at': self.bc.datetime.to_iso_string(UTC_NOW), - 'status': 'FULFILLED', - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, + "paid_at": self.bc.datetime.to_iso_string(UTC_NOW), + "status": "FULFILLED", + "user": { + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, }, **data, } @@ -100,7 +100,7 @@ def get_serializer(self, currency, user, data={}): def get_discounted_price(academy_service, num_items) -> float: if num_items > academy_service.max_items: - raise ValueError('num_items cannot be greater than max_items') + raise ValueError("num_items cannot be greater than max_items") total_discount_ratio = 0 current_discount_ratio = academy_service.discount_ratio @@ -125,7 +125,7 @@ def get_discounted_price(academy_service, num_items) -> float: @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield @@ -133,238 +133,262 @@ class SignalTestSuite(PaymentsTestCase): # When: no auth # Then: return 401 def test__without_auth(self): - url = reverse_lazy('payments:consumable_checkout') + url = reverse_lazy("payments:consumable_checkout") response = self.client.post(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), []) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual(self.bc.database.list_of("authenticate.UserSetting"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User # When: is auth and no service in body # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__no_service(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') + url = reverse_lazy("payments:consumable_checkout") response = self.client.post(url) self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'service-is-required', 'status_code': 400} + expected = {"detail": "service-is-required", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User # When: is auth and service that not found in body # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__service_not_found(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'service-not-found', 'status_code': 400} + expected = {"detail": "service-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User and 1 Service # When: is auth, with a service in body # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_service(self): model = self.bc.database.create(user=1, service=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'how-many-is-required', 'status_code': 400} + expected = {"detail": "how-many-is-required", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User and 1 Service # When: is auth, with a service and how_many in body # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__academy_is_required(self): model = self.bc.database.create(user=1, service=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'academy-is-required', 'status_code': 400} + expected = {"detail": "academy-is-required", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User and 1 Service # When: is auth, with a service, how_many and academy in body, and academy not found # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__academy_not_found(self): model = self.bc.database.create(user=1, service=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': 1, 'academy': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": 1, "academy": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'academy-not-found', 'status_code': 400} + expected = {"detail": "academy-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User, 1 Service and 1 Academy # When: is auth, with a service, how_many and academy in body, resource is required # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__resourse_is_required(self): model = self.bc.database.create(user=1, service=1, academy=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': 1, 'academy': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": 1, "academy": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'mentorship-service-set-or-event-type-set-is-required', 'status_code': 400} + expected = {"detail": "mentorship-service-set-or-event-type-set-is-required", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User, 1 Service and 1 Academy # When: is auth, with a service, how_many, academy and event_type_set in body, # ----> service type is MENTORSHIP_SERVICE_SET # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__bad_service_type_for_event_type_set(self): - service = {'type': 'MENTORSHIP_SERVICE_SET'} + service = {"type": "MENTORSHIP_SERVICE_SET"} model = self.bc.database.create(user=1, service=service, academy=1, event_type_set=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': 1, 'academy': 1, 'event_type_set': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": 1, "academy": 1, "event_type_set": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'bad-service-type-mentorship-service-set', 'status_code': 400} + expected = {"detail": "bad-service-type-mentorship-service-set", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User, 1 Service and 1 Academy # When: is auth, with a service, how_many, academy and mentorship_service_set in body, # ----> service type is EVENT_TYPE_SET # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__bad_service_type_for_mentorship_service_set(self): - service = {'type': 'EVENT_TYPE_SET'} + service = {"type": "EVENT_TYPE_SET"} model = self.bc.database.create(user=1, service=service, academy=1, mentorship_service_set=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': 1, 'academy': 1, 'mentorship_service_set': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": 1, "academy": 1, "mentorship_service_set": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'bad-service-type-event-type-set', 'status_code': 400} + expected = {"detail": "bad-service-type-event-type-set", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User, 1 Service and 1 Academy @@ -372,36 +396,39 @@ def test__bad_service_type_for_mentorship_service_set(self): # ----> mentorship_service_set or event_type_set in body # ----> service type is COHORT # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__service_is_cohort(self): - service = {'type': 'COHORT_SET'} + service = {"type": "COHORT_SET"} kwargs = {} if random.randint(0, 1) == 0: - kwargs['mentorship_service_set'] = 1 + kwargs["mentorship_service_set"] = 1 else: - kwargs['event_type_set'] = 1 + kwargs["event_type_set"] = 1 model = self.bc.database.create(user=1, service=service, academy=1, **kwargs) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': 1, 'academy': 1, 'mentorship_service_set': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": 1, "academy": 1, "mentorship_service_set": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'service-type-no-implemented', 'status_code': 400} + expected = {"detail": "service-type-no-implemented", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User, 1 Service and 1 Academy @@ -409,37 +436,40 @@ def test__service_is_cohort(self): # ----> mentorship_service_set and service type is MENTORSHIP_SERVICE_SET or # ----> event_type_set in body and service type is EVENT_TYPE_SET # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__academy_service_not_found(self): kwargs = {} if random.randint(0, 1) == 0: - service = {'type': 'MENTORSHIP_SERVICE_SET'} - kwargs['mentorship_service_set'] = 1 + service = {"type": "MENTORSHIP_SERVICE_SET"} + kwargs["mentorship_service_set"] = 1 else: - service = {'type': 'EVENT_TYPE_SET'} - kwargs['event_type_set'] = 1 + service = {"type": "EVENT_TYPE_SET"} + kwargs["event_type_set"] = 1 model = self.bc.database.create(user=1, service=service, academy=1, **kwargs) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': 1, 'academy': 1, **kwargs} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": 1, "academy": 1, **kwargs} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'academy-service-not-found', 'status_code': 404} + expected = {"detail": "academy-service-not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) # Given: 1 User, 1 Service, 1 Academy and 1 AcademyService @@ -448,46 +478,49 @@ def test__academy_service_not_found(self): # ----> event_type_set in body and service type is EVENT_TYPE_SET, # ----> over academy_service max_items # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('stripe.Charge.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Customer.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Refund.create', MagicMock(return_value={'id': 1})) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("stripe.Charge.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Customer.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Refund.create", MagicMock(return_value={"id": 1})) def test__how_many_too_hight(self): kwargs = {} how_many = random.randint(2, 10) if random.randint(0, 1) == 0: - service = {'type': 'MENTORSHIP_SERVICE_SET'} - kwargs['mentorship_service_set'] = 1 + service = {"type": "MENTORSHIP_SERVICE_SET"} + kwargs["mentorship_service_set"] = 1 else: - service = {'type': 'EVENT_TYPE_SET'} - kwargs['event_type_set'] = 1 + service = {"type": "EVENT_TYPE_SET"} + kwargs["event_type_set"] = 1 - academy_service = {'price_per_unit': (0.5 + (random.random() / 2)) / how_many, 'max_amount': 11} + academy_service = {"price_per_unit": (0.5 + (random.random() / 2)) / how_many, "max_amount": 11} # how_many * 1 # how_many / 2 model = self.bc.database.create(user=1, service=service, academy=1, academy_service=academy_service, **kwargs) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': how_many, 'academy': 1, **kwargs} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": how_many, "academy": 1, **kwargs} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'the-amount-of-items-is-too-high', 'status_code': 400} + expected = {"detail": "the-amount-of-items-is-too-high", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.assertEqual(stripe.Charge.create.call_args_list, []) self.assertEqual(stripe.Customer.create.call_args_list, []) @@ -500,44 +533,47 @@ def test__how_many_too_hight(self): # ----> event_type_set in body and service type is EVENT_TYPE_SET, # ----> academy_service price_per_unit is less than 0.50 # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('stripe.Charge.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Customer.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Refund.create', MagicMock(return_value={'id': 1})) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("stripe.Charge.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Customer.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Refund.create", MagicMock(return_value={"id": 1})) def test__value_too_low(self): kwargs = {} how_many = random.randint(1, 10) if random.randint(0, 1) == 0: - service = {'type': 'MENTORSHIP_SERVICE_SET'} - kwargs['mentorship_service_set'] = 1 + service = {"type": "MENTORSHIP_SERVICE_SET"} + kwargs["mentorship_service_set"] = 1 else: - service = {'type': 'EVENT_TYPE_SET'} - kwargs['event_type_set'] = 1 + service = {"type": "EVENT_TYPE_SET"} + kwargs["event_type_set"] = 1 - academy_service = {'price_per_unit': random.random() / 2.01 / how_many, 'max_items': how_many} + academy_service = {"price_per_unit": random.random() / 2.01 / how_many, "max_items": how_many} model = self.bc.database.create(user=1, service=service, academy=1, academy_service=academy_service, **kwargs) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': how_many, 'academy': 1, **kwargs} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": how_many, "academy": 1, **kwargs} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'the-amount-is-too-low', 'status_code': 400} + expected = {"detail": "the-amount-is-too-low", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.assertEqual(stripe.Charge.create.call_args_list, []) self.assertEqual(stripe.Customer.create.call_args_list, []) @@ -549,41 +585,42 @@ def test__value_too_low(self): # ----> academy_service price_per_unit is greater than 0.50, # ----> over academy_service max_amount # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('stripe.Charge.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Customer.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Refund.create', MagicMock(return_value={'id': 1})) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("stripe.Charge.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Customer.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Refund.create", MagicMock(return_value={"id": 1})) def test__amount_to_hight(self): how_many = random.randint(1, 10) - service = {'type': 'MENTORSHIP_SERVICE_SET'} + service = {"type": "MENTORSHIP_SERVICE_SET"} price_per_unit = (random.random() + 0.50) * 100 / how_many - academy_service = {'price_per_unit': price_per_unit, 'max_items': how_many, 'bundle_size': 2} + academy_service = {"price_per_unit": price_per_unit, "max_items": how_many, "bundle_size": 2} - model = self.bc.database.create(user=1, - service=service, - academy=1, - academy_service=academy_service, - mentorship_service_set=1) + model = self.bc.database.create( + user=1, service=service, academy=1, academy_service=academy_service, mentorship_service_set=1 + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': how_many / 2, 'academy': 1, 'mentorship_service_set': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": how_many / 2, "academy": 1, "mentorship_service_set": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() - expected = {'detail': 'the-amount-is-too-high', 'status_code': 400} + expected = {"detail": "the-amount-is-too-high", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Bag'), []) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), []) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), []) + self.assertEqual(self.bc.database.list_of("payments.Invoice"), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) self.bc.check.calls(stripe.Charge.create.call_args_list, []) self.assertEqual(stripe.Customer.create.call_args_list, []) @@ -594,152 +631,207 @@ def test__amount_to_hight(self): # When: is auth, with a service, how_many, academy and mentorship_service_set in body, # ----> academy_service price_per_unit is greater than 0.50 # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('stripe.Charge.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Customer.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Refund.create', MagicMock(return_value={'id': 1})) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("stripe.Charge.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Customer.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Refund.create", MagicMock(return_value={"id": 1})) def test__x_mentorship_service_set_bought(self): how_many = random.randint(1, 10) - service = {'type': 'MENTORSHIP_SERVICE_SET'} + service = {"type": "MENTORSHIP_SERVICE_SET"} price_per_unit = random.random() * 100 / how_many academy_service = { - 'price_per_unit': price_per_unit, - 'max_items': how_many, - 'bundle_size': 2, - 'max_amount': price_per_unit * how_many, - 'discount_ratio': random.random() * 0.2, + "price_per_unit": price_per_unit, + "max_items": how_many, + "bundle_size": 2, + "max_amount": price_per_unit * how_many, + "discount_ratio": random.random() * 0.2, } - model = self.bc.database.create(user=1, - service=service, - academy=1, - academy_service=academy_service, - mentorship_service_set=1) + model = self.bc.database.create( + user=1, service=service, academy=1, academy_service=academy_service, mentorship_service_set=1 + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': how_many, 'academy': 1, 'mentorship_service_set': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": how_many, "academy": 1, "mentorship_service_set": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) json = response.json() amount = get_discounted_price(model.academy_service, how_many) amount = math.ceil(amount) - expected = get_serializer(self, model.currency, model.user, data={ - 'amount': amount, - }) + expected = get_serializer( + self, + model.currency, + model.user, + data={ + "amount": amount, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [format_bag_item()]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), [ - format_invoice_item({ - 'stripe_id': '1', - 'amount': amount, - }), - ]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - format_consumable_item(data={ - 'mentorship_service_set_id': 1, - 'service_item_id': 1, - 'user_id': 1, - 'how_many': how_many, - }), - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) - - self.bc.check.calls(stripe.Charge.create.call_args_list, [ - call(customer='1', - amount=amount, - currency=model.currency.code.lower(), - description=f'Can join to {int(how_many)} mentorships'), - ]) - self.assertEqual(stripe.Customer.create.call_args_list, [ - call(email=model.user.email, name=f'{model.user.first_name} {model.user.last_name}'), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), [format_bag_item()]) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + format_invoice_item( + { + "stripe_id": "1", + "amount": amount, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + format_consumable_item( + data={ + "mentorship_service_set_id": 1, + "service_item_id": 1, + "user_id": 1, + "how_many": how_many, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) + + self.bc.check.calls( + stripe.Charge.create.call_args_list, + [ + call( + customer="1", + amount=amount, + currency=model.currency.code.lower(), + description=f"Can join to {int(how_many)} mentorships", + ), + ], + ) + self.assertEqual( + stripe.Customer.create.call_args_list, + [ + call(email=model.user.email, name=f"{model.user.first_name} {model.user.last_name}"), + ], + ) self.assertEqual(stripe.Refund.create.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) # Given: 1 User, 1 Service, 1 Academy, 1 AcademyService and 1 EventTypeSet # When: is auth, with a service, how_many, academy and event_type_set in body, # ----> academy_service price_per_unit is greater than 0.50 # Then: return 400 - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('stripe.Charge.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Customer.create', MagicMock(return_value={'id': 1})) - @patch('stripe.Refund.create', MagicMock(return_value={'id': 1})) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("stripe.Charge.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Customer.create", MagicMock(return_value={"id": 1})) + @patch("stripe.Refund.create", MagicMock(return_value={"id": 1})) def test__x_event_type_set_bought(self): how_many = random.randint(1, 10) - service = {'type': 'EVENT_TYPE_SET'} + service = {"type": "EVENT_TYPE_SET"} price_per_unit = random.random() * 100 / how_many academy_service = { - 'price_per_unit': price_per_unit, - 'max_items': how_many, - 'bundle_size': 2, - 'max_amount': price_per_unit * how_many, - 'max_items': 11, - 'discount_ratio': random.random() * 0.2, + "price_per_unit": price_per_unit, + "max_items": how_many, + "bundle_size": 2, + "max_amount": price_per_unit * how_many, + "max_items": 11, + "discount_ratio": random.random() * 0.2, } - model = self.bc.database.create(user=1, - service=service, - academy=1, - academy_service=academy_service, - event_type_set=1) + model = self.bc.database.create( + user=1, service=service, academy=1, academy_service=academy_service, event_type_set=1 + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:consumable_checkout') - data = {'service': 1, 'how_many': how_many, 'academy': 1, 'event_type_set': 1} - response = self.client.post(url, data, format='json') + url = reverse_lazy("payments:consumable_checkout") + data = {"service": 1, "how_many": how_many, "academy": 1, "event_type_set": 1} + response = self.client.post(url, data, format="json") self.client.force_authenticate(model.user) amount = get_discounted_price(model.academy_service, how_many) amount = math.ceil(amount) json = response.json() - expected = get_serializer(self, model.currency, model.user, data={ - 'amount': amount, - }) + expected = get_serializer( + self, + model.currency, + model.user, + data={ + "amount": amount, + }, + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertEqual(self.bc.database.list_of('payments.Bag'), [format_bag_item()]) - self.assertEqual(self.bc.database.list_of('payments.Invoice'), - [format_invoice_item({ - 'stripe_id': '1', - 'amount': amount, - })]) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - format_consumable_item(data={ - 'event_type_set_id': 1, - 'service_item_id': 1, - 'user_id': 1, - 'how_many': how_many, - }), - ]) - self.assertEqual(self.bc.database.list_of('authenticate.UserSetting'), [ - format_user_setting({'lang': 'en'}), - ]) - - self.bc.check.calls(stripe.Charge.create.call_args_list, [ - call(customer='1', - amount=amount, - currency=model.currency.code.lower(), - description=f'Can join to {int(how_many)} events'), - ]) - self.assertEqual(stripe.Customer.create.call_args_list, [ - call(email=model.user.email, name=f'{model.user.first_name} {model.user.last_name}'), - ]) + self.assertEqual(self.bc.database.list_of("payments.Bag"), [format_bag_item()]) + self.assertEqual( + self.bc.database.list_of("payments.Invoice"), + [ + format_invoice_item( + { + "stripe_id": "1", + "amount": amount, + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + format_consumable_item( + data={ + "event_type_set_id": 1, + "service_item_id": 1, + "user_id": 1, + "how_many": how_many, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("authenticate.UserSetting"), + [ + format_user_setting({"lang": "en"}), + ], + ) + + self.bc.check.calls( + stripe.Charge.create.call_args_list, + [ + call( + customer="1", + amount=amount, + currency=model.currency.code.lower(), + description=f"Can join to {int(how_many)} events", + ), + ], + ) + self.assertEqual( + stripe.Customer.create.call_args_list, + [ + call(email=model.user.email, name=f"{model.user.first_name} {model.user.last_name}"), + ], + ) self.assertEqual(stripe.Refund.create.call_args_list, []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) diff --git a/breathecode/payments/tests/urls/tests_coupon.py b/breathecode/payments/tests/urls/tests_coupon.py index 5a8adc7b0..ba99ccae5 100644 --- a/breathecode/payments/tests/urls/tests_coupon.py +++ b/breathecode/payments/tests/urls/tests_coupon.py @@ -13,71 +13,70 @@ def setup(db): def get_serializer(bc: Breathecode, coupon): return { - 'auto': coupon.auto, - 'discount_type': coupon.discount_type, - 'discount_value': coupon.discount_value, - 'expires_at': coupon.expires_at, - 'offered_at': bc.datetime.to_iso_string(coupon.offered_at), - 'referral_type': coupon.referral_type, - 'referral_value': coupon.referral_value, - 'slug': coupon.slug, + "auto": coupon.auto, + "discount_type": coupon.discount_type, + "discount_value": coupon.discount_value, + "expires_at": coupon.expires_at, + "offered_at": bc.datetime.to_iso_string(coupon.offered_at), + "referral_type": coupon.referral_type, + "referral_value": coupon.referral_value, + "slug": coupon.slug, } -@pytest.mark.parametrize('plan_pk', [None, '']) +@pytest.mark.parametrize("plan_pk", [None, ""]) def test_missing_plan(bc: Breathecode, client: rfx.Client, plan_pk): - url = reverse_lazy('payments:coupon') + url = reverse_lazy("payments:coupon") if plan_pk is not None: - url += f'?plan={plan_pk}' + url += f"?plan={plan_pk}" response = client.get(url) json = response.json() - expected = {'detail': 'missing-plan', 'status_code': 404} + expected = {"detail": "missing-plan", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('payments.Coupon') == [] + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("payments.Coupon") == [] -@pytest.mark.parametrize('plan_pk', ['my-plan', 1]) +@pytest.mark.parametrize("plan_pk", ["my-plan", 1]) def test_plan_not_found(bc: Breathecode, client: rfx.Client, plan_pk): - url = reverse_lazy('payments:coupon') + url = reverse_lazy("payments:coupon") if plan_pk is not None: - url += f'?plan={plan_pk}' + url += f"?plan={plan_pk}" response = client.get(url) json = response.json() - expected = {'detail': 'plan-not-found', 'status_code': 404} + expected = {"detail": "plan-not-found", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('payments.Plan') == [] - assert bc.database.list_of('payments.Coupon') == [] - - -@pytest.mark.parametrize('plan_pk', ['my-plan', 1]) -@pytest.mark.parametrize('coupons', [ - 0, - ([{ - 'slug': slug, - 'auto': False, - 'discount_value': 1 - } for slug in ['coupon3', 'coupon4']]), -]) + assert bc.database.list_of("payments.Plan") == [] + assert bc.database.list_of("payments.Coupon") == [] + + +@pytest.mark.parametrize("plan_pk", ["my-plan", 1]) +@pytest.mark.parametrize( + "coupons", + [ + 0, + ([{"slug": slug, "auto": False, "discount_value": 1} for slug in ["coupon3", "coupon4"]]), + ], +) def test_plan_found__coupons_not_found(bc: Breathecode, client: rfx.Client, plan_pk, coupons): plan = { - 'is_renewable': False, + "is_renewable": False, } if isinstance(plan_pk, str): - plan['slug'] = plan_pk + plan["slug"] = plan_pk model = bc.database.create(plan=plan, coupon=coupons) - url = reverse_lazy('payments:coupon') - url += f'?plan={plan_pk}&coupons=coupon1,coupon2' + url = reverse_lazy("payments:coupon") + url += f"?plan={plan_pk}&coupons=coupon1,coupon2" response = client.get(url) @@ -86,46 +85,37 @@ def test_plan_found__coupons_not_found(bc: Breathecode, client: rfx.Client, plan assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.Plan') == [bc.format.to_dict(model.plan)] + assert bc.database.list_of("payments.Plan") == [bc.format.to_dict(model.plan)] if coupons: - assert bc.database.list_of('payments.Coupon') == bc.format.to_dict(model.coupon) + assert bc.database.list_of("payments.Coupon") == bc.format.to_dict(model.coupon) else: - assert bc.database.list_of('payments.Coupon') == [] - - -@pytest.mark.parametrize('plan_pk', ['my-plan', 1]) -@pytest.mark.parametrize('max, coupons', [ - (2, [{ - 'slug': slug, - 'auto': True, - 'discount_value': 1 - } for slug in ['coupon3', 'coupon4']]), - (1, [{ - 'slug': slug, - 'auto': False, - 'discount_value': 1 - } for slug in ['coupon1', 'coupon2']]), - (3, [{ - 'slug': slug, - 'auto': True, - 'discount_value': 1 - } for slug in ['coupon3', 'coupon4']] + [{ - 'slug': slug, - 'auto': False, - 'discount_value': 1 - } for slug in ['coupon1', 'coupon2']]), -]) + assert bc.database.list_of("payments.Coupon") == [] + + +@pytest.mark.parametrize("plan_pk", ["my-plan", 1]) +@pytest.mark.parametrize( + "max, coupons", + [ + (2, [{"slug": slug, "auto": True, "discount_value": 1} for slug in ["coupon3", "coupon4"]]), + (1, [{"slug": slug, "auto": False, "discount_value": 1} for slug in ["coupon1", "coupon2"]]), + ( + 3, + [{"slug": slug, "auto": True, "discount_value": 1} for slug in ["coupon3", "coupon4"]] + + [{"slug": slug, "auto": False, "discount_value": 1} for slug in ["coupon1", "coupon2"]], + ), + ], +) def test_plan_found__coupons_found(bc: Breathecode, client: rfx.Client, plan_pk, max, coupons): plan = { - 'is_renewable': False, + "is_renewable": False, } if isinstance(plan_pk, str): - plan['slug'] = plan_pk + plan["slug"] = plan_pk model = bc.database.create(plan=plan, coupon=coupons) - url = reverse_lazy('payments:coupon') - url += f'?plan={plan_pk}&coupons=coupon1,coupon2' + url = reverse_lazy("payments:coupon") + url += f"?plan={plan_pk}&coupons=coupon1,coupon2" response = client.get(url) @@ -134,5 +124,5 @@ def test_plan_found__coupons_found(bc: Breathecode, client: rfx.Client, plan_pk, assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.Plan') == [bc.format.to_dict(model.plan)] - assert bc.database.list_of('payments.Coupon') == bc.format.to_dict(model.coupon) + assert bc.database.list_of("payments.Plan") == [bc.format.to_dict(model.plan)] + assert bc.database.list_of("payments.Coupon") == bc.format.to_dict(model.coupon) diff --git a/breathecode/payments/tests/urls/tests_eventtypeset.py b/breathecode/payments/tests/urls/tests_eventtypeset.py index 58d298616..9fff6b428 100644 --- a/breathecode/payments/tests/urls/tests_eventtypeset.py +++ b/breathecode/payments/tests/urls/tests_eventtypeset.py @@ -9,31 +9,31 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def event_type_serializer(event_type, academy): return { # 'academy': academy_serializer(academy), - 'description': event_type.description, - 'lang': event_type.lang, - 'name': event_type.name, - 'id': event_type.id, - 'slug': event_type.slug, - 'icon_url': event_type.icon_url, - 'allow_shared_creation': event_type.allow_shared_creation, + "description": event_type.description, + "lang": event_type.lang, + "name": event_type.name, + "id": event_type.id, + "slug": event_type.slug, + "icon_url": event_type.icon_url, + "allow_shared_creation": event_type.allow_shared_creation, } def get_serializer(event_type_set, event_types, academy): return { - 'id': event_type_set.id, - 'slug': event_type_set.slug, - 'academy': academy_serializer(academy), - 'event_types': [event_type_serializer(event_type, academy) for event_type in event_types], + "id": event_type_set.id, + "slug": event_type_set.slug, + "academy": academy_serializer(academy), + "event_types": [event_type_serializer(event_type, academy) for event_type in event_types], } @@ -46,7 +46,7 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 200 def test__no_auth(self): - url = reverse_lazy('payments:eventtypeset') + url = reverse_lazy("payments:eventtypeset") response = self.client.get(url) json = response.json() @@ -54,16 +54,16 @@ def test__no_auth(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.EventTypeSet'), []) + self.assertEqual(self.bc.database.list_of("payments.EventTypeSet"), []) # Given: 2 EventTypeSet, 2 MentorshipService and 1 Academy # When: get with no auth # Then: return 200 with 2 EventTypeSet def test__two_items(self): - event_types = [{'icon_url': self.bc.fake.url()} for _ in range(2)] + event_types = [{"icon_url": self.bc.fake.url()} for _ in range(2)] model = self.bc.database.create(event_type_set=2, event_type=event_types) - url = reverse_lazy('payments:eventtypeset') + url = reverse_lazy("payments:eventtypeset") response = self.client.get(url) json = response.json() @@ -83,54 +83,56 @@ def test__two_items(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.EventTypeSet'), + self.bc.database.list_of("payments.EventTypeSet"), self.bc.format.to_dict(model.event_type_set), ) # Given: compile_lookup was mocked # When: the mock is called # Then: the mock should be called with the correct arguments and does not raise an exception - @patch('breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup', - MagicMock(wraps=lookup_extension.compile_lookup)) + @patch( + "breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup", + MagicMock(wraps=lookup_extension.compile_lookup), + ) def test_lookup_extension(self): self.bc.request.set_headers(academy=1) - event_types = [{'icon_url': self.bc.fake.url()} for _ in range(2)] + event_types = [{"icon_url": self.bc.fake.url()} for _ in range(2)] model = self.bc.database.create(event_type_set=2, event_type=event_types) args, kwargs = self.bc.format.call( - 'en', - strings={'exact': ['event_types__lang']}, + "en", + strings={"exact": ["event_types__lang"]}, slugs=[ - '', - 'academy', - 'event_types', + "", + "academy", + "event_types", ], overwrite={ - 'event_type': 'event_types', - 'lang': 'event_types__lang', + "event_type": "event_types", + "lang": "event_types__lang", }, ) query = self.bc.format.lookup(*args, **kwargs) - url = reverse_lazy('payments:eventtypeset') + '?' + self.bc.format.querystring(query) + url = reverse_lazy("payments:eventtypeset") + "?" + self.bc.format.querystring(query) - self.assertEqual([x for x in query], ['id', 'slug', 'academy', 'event_type', 'lang']) + self.assertEqual([x for x in query], ["id", "slug", "academy", "event_type", "lang"]) response = self.client.get(url) json = response.json() expected = [] - for x in ['overwrite', 'custom_fields']: + for x in ["overwrite", "custom_fields"]: if x in kwargs: del kwargs[x] - for field in ['ids', 'slugs']: + for field in ["ids", "slugs"]: values = kwargs.get(field, tuple()) kwargs[field] = tuple(values) - for field in ['ints', 'strings', 'bools', 'datetimes']: + for field in ["ints", "strings", "bools", "datetimes"]: modes = kwargs.get(field, {}) for mode in modes: if not isinstance(kwargs[field][mode], tuple): @@ -138,13 +140,16 @@ def test_lookup_extension(self): kwargs[field] = frozenset(modes.items()) - self.bc.check.calls(lookup_extension.compile_lookup.call_args_list, [ - call(**kwargs), - ]) + self.bc.check.calls( + lookup_extension.compile_lookup.call_args_list, + [ + call(**kwargs), + ], + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) self.assertEqual( - self.bc.database.list_of('payments.EventTypeSet'), + self.bc.database.list_of("payments.EventTypeSet"), self.bc.format.to_dict(model.event_type_set), ) diff --git a/breathecode/payments/tests/urls/tests_eventtypeset_id.py b/breathecode/payments/tests/urls/tests_eventtypeset_id.py index 48d459053..34584d02c 100644 --- a/breathecode/payments/tests/urls/tests_eventtypeset_id.py +++ b/breathecode/payments/tests/urls/tests_eventtypeset_id.py @@ -9,69 +9,66 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def event_type_serializer(event_type, academy): return { # 'academy': academy_serializer(academy), - 'description': event_type.description, - 'lang': event_type.lang, - 'name': event_type.name, - 'id': event_type.id, - 'slug': event_type.slug, - 'icon_url': event_type.icon_url, - 'allow_shared_creation': event_type.allow_shared_creation, + "description": event_type.description, + "lang": event_type.lang, + "name": event_type.name, + "id": event_type.id, + "slug": event_type.slug, + "icon_url": event_type.icon_url, + "allow_shared_creation": event_type.allow_shared_creation, } def service_serializer(service): return { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, } def currency_serializer(currency): return { - 'code': currency.code, - 'name': currency.name, + "code": currency.code, + "name": currency.name, } def academy_service_serialize(academy_service, academy, currency, service): return { - 'academy': academy_serializer(academy), - 'currency': currency_serializer(currency), - 'id': academy_service.id, - 'price_per_unit': academy_service.price_per_unit, - 'max_items': academy_service.max_items, - 'bundle_size': academy_service.bundle_size, - 'max_amount': academy_service.max_amount, - 'discount_ratio': academy_service.discount_ratio, - 'service': service_serializer(service), + "academy": academy_serializer(academy), + "currency": currency_serializer(currency), + "id": academy_service.id, + "price_per_unit": academy_service.price_per_unit, + "max_items": academy_service.max_items, + "bundle_size": academy_service.bundle_size, + "max_amount": academy_service.max_amount, + "discount_ratio": academy_service.discount_ratio, + "service": service_serializer(service), } def get_serializer(event_type_set, event_types, academy, academy_services, currency, service): return { - 'academy_services': [ + "academy_services": [ academy_service_serialize(academy_service, academy, currency, service) for academy_service in academy_services ], - 'id': - event_type_set.id, - 'slug': - event_type_set.slug, - 'academy': - academy_serializer(academy), - 'event_types': [event_type_serializer(event_type, academy) for event_type in event_types], + "id": event_type_set.id, + "slug": event_type_set.slug, + "academy": academy_serializer(academy), + "event_types": [event_type_serializer(event_type, academy) for event_type in event_types], } @@ -84,25 +81,25 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 404 def test__no_auth(self): - url = reverse_lazy('payments:eventtypeset_id', kwargs={'event_type_set_id': 1}) + url = reverse_lazy("payments:eventtypeset_id", kwargs={"event_type_set_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.EventTypeSet'), []) + self.assertEqual(self.bc.database.list_of("payments.EventTypeSet"), []) # Given: 1 EventTypeSet, 2 EventType and 1 Academy, 1 AcademyService, 1 Currency and 1 # Service # When: get with no auth # Then: return 200 with 1 EventTypeSet def test__one_item(self): - event_types = [{'icon_url': self.bc.fake.url()} for _ in range(2)] + event_types = [{"icon_url": self.bc.fake.url()} for _ in range(2)] model = self.bc.database.create(event_type_set=1, event_type=event_types, academy_service=1) - url = reverse_lazy('payments:eventtypeset_id', kwargs={'event_type_set_id': 1}) + url = reverse_lazy("payments:eventtypeset_id", kwargs={"event_type_set_id": 1}) response = self.client.get(url) json = response.json() @@ -117,6 +114,9 @@ def test__one_item(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.EventTypeSet'), [ - self.bc.format.to_dict(model.event_type_set), - ]) + self.assertEqual( + self.bc.database.list_of("payments.EventTypeSet"), + [ + self.bc.format.to_dict(model.event_type_set), + ], + ) diff --git a/breathecode/payments/tests/urls/tests_me_service_consumable.py b/breathecode/payments/tests/urls/tests_me_service_consumable.py index ebe78d117..3136ab93d 100644 --- a/breathecode/payments/tests/urls/tests_me_service_consumable.py +++ b/breathecode/payments/tests/urls/tests_me_service_consumable.py @@ -13,42 +13,42 @@ def format_user_setting(data={}): return { - 'id': 1, - 'user_id': 1, - 'main_currency_id': None, - 'lang': 'en', + "id": 1, + "user_id": 1, + "main_currency_id": None, + "lang": "en", **data, } def format_invoice_item(data={}): return { - 'academy_id': None, - 'amount': 0.0, - 'currency_id': 1, - 'bag_id': None, - 'id': 1, - 'paid_at': UTC_NOW, - 'status': 'FULFILLED', - 'stripe_id': None, - 'user_id': 1, + "academy_id": None, + "amount": 0.0, + "currency_id": 1, + "bag_id": None, + "id": 1, + "paid_at": UTC_NOW, + "status": "FULFILLED", + "stripe_id": None, + "user_id": 1, **data, } def get_serializer(self, currency, user, data={}): return { - 'amount': 0, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "amount": 0, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'paid_at': self.bc.datetime.to_iso_string(UTC_NOW), - 'status': 'FULFILLED', - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, + "paid_at": self.bc.datetime.to_iso_string(UTC_NOW), + "status": "FULFILLED", + "user": { + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, }, **data, } @@ -56,39 +56,41 @@ def get_serializer(self, currency, user, data={}): def generate_amounts_by_time(): return { - 'amount_per_month': random.random() * 100 + 1, - 'amount_per_quarter': random.random() * 100 + 1, - 'amount_per_half': random.random() * 100 + 1, - 'amount_per_year': random.random() * 100 + 1, + "amount_per_month": random.random() * 100 + 1, + "amount_per_quarter": random.random() * 100 + 1, + "amount_per_half": random.random() * 100 + 1, + "amount_per_year": random.random() * 100 + 1, } def generate_three_amounts_by_time(): - l = random.shuffle([ - 0, - random.random() * 100 + 1, - random.random() * 100 + 1, - random.random() * 100 + 1, - ]) + l = random.shuffle( + [ + 0, + random.random() * 100 + 1, + random.random() * 100 + 1, + random.random() * 100 + 1, + ] + ) return { - 'amount_per_month': l[0], - 'amount_per_quarter': l[1], - 'amount_per_half': l[2], - 'amount_per_year': l[3], + "amount_per_month": l[0], + "amount_per_quarter": l[1], + "amount_per_half": l[2], + "amount_per_year": l[3], } def which_amount_is_zero(data={}): for key in data: - if key == 'amount_per_quarter': - return 'MONTH', 1 + if key == "amount_per_quarter": + return "MONTH", 1 CHOSEN_PERIOD = { - 'MONTH': 'amount_per_month', - 'QUARTER': 'amount_per_quarter', - 'HALF': 'amount_per_half', - 'YEAR': 'amount_per_year', + "MONTH": "amount_per_month", + "QUARTER": "amount_per_quarter", + "HALF": "amount_per_half", + "YEAR": "amount_per_year", } @@ -98,10 +100,10 @@ def get_amount_per_period(period, data): def serialize_consumable(consumable, data={}): return { - 'how_many': consumable.how_many, - 'id': consumable.id, - 'unit_type': consumable.unit_type, - 'valid_until': consumable.valid_until, + "how_many": consumable.how_many, + "id": consumable.id, + "unit_type": consumable.unit_type, + "valid_until": consumable.valid_until, **data, } @@ -112,212 +114,211 @@ class TestSignal(LegacyAPITestCase): """ def test__without_auth(self): - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 Get with zero Consumable """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__without_consumables(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [], - 'event_type_sets': [], + "mentorship_service_sets": [], + "cohort_sets": [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) """ 🔽🔽🔽 Get with one Consumable, how_many = 0 """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__one_consumable__how_many_is_zero(self): model = self.bc.database.create_v2(user=1, consumable=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [], - 'event_type_sets': [], + "mentorship_service_sets": [], + "cohort_sets": [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), [ - self.bc.format.to_dict(model.consumable), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Consumable"), + [ + self.bc.format.to_dict(model.consumable), + ], + ) """ 🔽🔽🔽 Get with nine Consumable and three Cohort, random how_many """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__random_how_many__related_to_three_cohorts__without_cohorts_in_querystring(self): - consumables = [{'how_many': random.randint(1, 30), 'cohort_set_id': math.floor(n / 3) + 1} for n in range(9)] + consumables = [{"how_many": random.randint(1, 30), "cohort_set_id": math.floor(n / 3) + 1} for n in range(9)] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(user=1, consumable=consumables, cohort_set=3, academy=academy) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [ + "mentorship_service_sets": [], + "cohort_sets": [ { - 'balance': { - 'unit': how_many_belong_to1 - }, - 'id': model.cohort_set[0].id, - 'slug': model.cohort_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "balance": {"unit": how_many_belong_to1}, + "id": model.cohort_set[0].id, + "slug": model.cohort_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.cohort_set[1].id, - 'slug': model.cohort_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[1].id, + "slug": model.cohort_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.cohort_set[2].id, - 'slug': model.cohort_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[2].id, + "slug": model.cohort_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'event_type_sets': [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__random_how_many__related_to_three_cohorts__with_wrong_cohorts_in_querystring(self): - consumables = [{'how_many': random.randint(1, 30), 'cohort_set_id': math.floor(n / 3) + 1} for n in range(9)] + consumables = [{"how_many": random.randint(1, 30), "cohort_set_id": math.floor(n / 3) + 1} for n in range(9)] - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(user=1, consumable=consumables, cohort_set=3, academy=academy) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + '?cohort_set=4,5,6' + url = reverse_lazy("payments:me_service_consumable") + "?cohort_set=4,5,6" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [], - 'event_type_sets': [], + "mentorship_service_sets": [], + "cohort_sets": [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__random_how_many__related_to_three_cohorts__with_cohorts_in_querystring(self): - consumables = [{'how_many': random.randint(1, 30), 'cohort_set_id': math.floor(n / 3) + 1} for n in range(9)] + consumables = [{"how_many": random.randint(1, 30), "cohort_set_id": math.floor(n / 3) + 1} for n in range(9)] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(user=1, consumable=consumables, cohort_set=3, academy=academy) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + '?cohort_set=1,2,3' + url = reverse_lazy("payments:me_service_consumable") + "?cohort_set=1,2,3" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [ + "mentorship_service_sets": [], + "cohort_sets": [ { - 'balance': { - 'unit': how_many_belong_to1 - }, - 'id': model.cohort_set[0].id, - 'slug': model.cohort_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "balance": {"unit": how_many_belong_to1}, + "id": model.cohort_set[0].id, + "slug": model.cohort_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.cohort_set[1].id, - 'slug': model.cohort_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[1].id, + "slug": model.cohort_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.cohort_set[2].id, - 'slug': model.cohort_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[2].id, + "slug": model.cohort_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'event_type_sets': [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) @@ -325,152 +326,149 @@ def test__nine_consumables__random_how_many__related_to_three_cohorts__with_coho 🔽🔽🔽 Get with nine Consumable and three MentorshipService, random how_many """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_mentorship_services__without_cohorts_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'mentorship_service_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "mentorship_service_set_id": math.floor(n / 3) + 1} for n in range(9) + ] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) model = self.bc.database.create(user=1, consumable=consumables, mentorship_service_set=3) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [ + "mentorship_service_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.mentorship_service_set[0].id, - 'slug': model.mentorship_service_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[0].id, + "slug": model.mentorship_service_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.mentorship_service_set[1].id, - 'slug': model.mentorship_service_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[1].id, + "slug": model.mentorship_service_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.mentorship_service_set[2].id, - 'slug': model.mentorship_service_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[2].id, + "slug": model.mentorship_service_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'cohort_sets': [], - 'event_type_sets': [], + "cohort_sets": [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_mentorship_services__with_wrong_cohorts_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'mentorship_service_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "mentorship_service_set_id": math.floor(n / 3) + 1} for n in range(9) + ] model = self.bc.database.create(user=1, consumable=consumables, mentorship_service_set=3) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + '?mentorship_service_set=4,5,6' + url = reverse_lazy("payments:me_service_consumable") + "?mentorship_service_set=4,5,6" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'cohort_sets': [], - 'mentorship_service_sets': [], - 'event_type_sets': [], + "cohort_sets": [], + "mentorship_service_sets": [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_mentorship_services__with_cohorts_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'mentorship_service_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "mentorship_service_set_id": math.floor(n / 3) + 1} for n in range(9) + ] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) model = self.bc.database.create(user=1, consumable=consumables, mentorship_service_set=3) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + '?mentorship_service_set=1,2,3' + url = reverse_lazy("payments:me_service_consumable") + "?mentorship_service_set=1,2,3" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'cohort_sets': [], - 'mentorship_service_sets': [ + "cohort_sets": [], + "mentorship_service_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.mentorship_service_set[0].id, - 'slug': model.mentorship_service_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[0].id, + "slug": model.mentorship_service_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.mentorship_service_set[1].id, - 'slug': model.mentorship_service_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[1].id, + "slug": model.mentorship_service_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.mentorship_service_set[2].id, - 'slug': model.mentorship_service_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[2].id, + "slug": model.mentorship_service_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'event_type_sets': [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) @@ -478,66 +476,65 @@ def test__nine_consumables__related_to_three_mentorship_services__with_cohorts_i 🔽🔽🔽 Get with nine Consumable and three EventType, random how_many """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_event_types__without_cohorts_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'event_type_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "event_type_set_id": math.floor(n / 3) + 1} for n in range(9) + ] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) - - event_type_sets = [{'event_type_id': x} for x in range(1, 4)] - - model = self.bc.database.create(user=1, - consumable=consumables, - event_type_set=event_type_sets, - event_type=[{ - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) + + event_type_sets = [{"event_type_id": x} for x in range(1, 4)] + + model = self.bc.database.create( + user=1, + consumable=consumables, + event_type_set=event_type_sets, + event_type=[ + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + ], + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [], - 'event_type_sets': [ + "mentorship_service_sets": [], + "cohort_sets": [], + "event_type_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.event_type_set[0].id, - 'slug': model.event_type_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[0].id, + "slug": model.event_type_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.event_type_set[1].id, - 'slug': model.event_type_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[1].id, + "slug": model.event_type_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.event_type_set[2].id, - 'slug': model.event_type_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[2].id, + "slug": model.event_type_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], } @@ -545,115 +542,113 @@ def test__nine_consumables__related_to_three_event_types__without_cohorts_in_que assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_event_types__with_wrong_cohorts_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'event_type_set_id': math.floor(n / 3) + 1 - } for n in range(9)] - - event_type_sets = [{'event_type_id': x} for x in range(1, 4)] - model = self.bc.database.create(user=1, - consumable=consumables, - event_type_set=event_type_sets, - event_type=[{ - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }]) + consumables = [ + {"how_many": random.randint(1, 30), "event_type_set_id": math.floor(n / 3) + 1} for n in range(9) + ] + + event_type_sets = [{"event_type_id": x} for x in range(1, 4)] + model = self.bc.database.create( + user=1, + consumable=consumables, + event_type_set=event_type_sets, + event_type=[ + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + ], + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + '?event_type_set=4,5,6' + url = reverse_lazy("payments:me_service_consumable") + "?event_type_set=4,5,6" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'cohort_sets': [], - 'event_type_sets': [], - 'mentorship_service_sets': [], + "cohort_sets": [], + "event_type_sets": [], + "mentorship_service_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_event_types__with_cohorts_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'event_type_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "event_type_set_id": math.floor(n / 3) + 1} for n in range(9) + ] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) - - event_type_sets = [{'event_type_id': x} for x in range(1, 4)] - model = self.bc.database.create(user=1, - consumable=consumables, - event_type_set=event_type_sets, - event_type=[{ - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) + + event_type_sets = [{"event_type_id": x} for x in range(1, 4)] + model = self.bc.database.create( + user=1, + consumable=consumables, + event_type_set=event_type_sets, + event_type=[ + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + ], + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + '?event_type_set=1,2,3' + url = reverse_lazy("payments:me_service_consumable") + "?event_type_set=1,2,3" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'cohort_sets': [], - 'event_type_sets': [ + "cohort_sets": [], + "event_type_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.event_type_set[0].id, - 'slug': model.event_type_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[0].id, + "slug": model.event_type_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.event_type_set[1].id, - 'slug': model.event_type_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[1].id, + "slug": model.event_type_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.event_type_set[2].id, - 'slug': model.event_type_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[2].id, + "slug": model.event_type_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'mentorship_service_sets': [], + "mentorship_service_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) @@ -662,149 +657,151 @@ def test__nine_consumables__related_to_three_event_types__with_cohorts_in_querys 🔽🔽🔽 Get with nine Consumable and three Cohort, random how_many """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__random_how_many__related_to_three_cohorts__without_cohort_slugs_in_querystring(self): - consumables = [{'how_many': random.randint(1, 30), 'cohort_set_id': math.floor(n / 3) + 1} for n in range(9)] + consumables = [{"how_many": random.randint(1, 30), "cohort_set_id": math.floor(n / 3) + 1} for n in range(9)] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(user=1, consumable=consumables, cohort_set=3, academy=academy) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [ + "mentorship_service_sets": [], + "cohort_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.cohort_set[0].id, - 'slug': model.cohort_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[0].id, + "slug": model.cohort_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.cohort_set[1].id, - 'slug': model.cohort_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[1].id, + "slug": model.cohort_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.cohort_set[2].id, - 'slug': model.cohort_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[2].id, + "slug": model.cohort_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'event_type_sets': [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__random_how_many__related_to_three_cohorts__with_wrong_cohort_slugs_in_querystring(self): - consumables = [{'how_many': random.randint(1, 30), 'cohort_set_id': math.floor(n / 3) + 1} for n in range(9)] + consumables = [{"how_many": random.randint(1, 30), "cohort_set_id": math.floor(n / 3) + 1} for n in range(9)] - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(user=1, consumable=consumables, cohort_set=3, academy=academy) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + f'?cohort_set_slug=blabla1,blabla2,blabla3' + url = reverse_lazy("payments:me_service_consumable") + f"?cohort_set_slug=blabla1,blabla2,blabla3" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [], - 'event_type_sets': [], + "mentorship_service_sets": [], + "cohort_sets": [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__random_how_many__related_to_three_cohorts__with_cohort_slugs_in_querystring(self): - consumables = [{'how_many': random.randint(1, 30), 'cohort_set_id': math.floor(n / 3) + 1} for n in range(9)] + consumables = [{"how_many": random.randint(1, 30), "cohort_set_id": math.floor(n / 3) + 1} for n in range(9)] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) - academy = {'available_as_saas': True} + academy = {"available_as_saas": True} model = self.bc.database.create(user=1, consumable=consumables, cohort_set=3, academy=academy) self.client.force_authenticate(model.user) - url = reverse_lazy( - 'payments:me_service_consumable') + f'?cohort_set_slug={",".join([x.slug for x in model.cohort_set])}' + url = ( + reverse_lazy("payments:me_service_consumable") + + f'?cohort_set_slug={",".join([x.slug for x in model.cohort_set])}' + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [ + "mentorship_service_sets": [], + "cohort_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.cohort_set[0].id, - 'slug': model.cohort_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[0].id, + "slug": model.cohort_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.cohort_set[1].id, - 'slug': model.cohort_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[1].id, + "slug": model.cohort_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.cohort_set[2].id, - 'slug': model.cohort_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.cohort_set[2].id, + "slug": model.cohort_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'event_type_sets': [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) @@ -812,153 +809,151 @@ def test__nine_consumables__random_how_many__related_to_three_cohorts__with_coho 🔽🔽🔽 Get with nine Consumable and three MentorshipService, random how_many """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_mentorship_services__without_cohort_slugs_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'mentorship_service_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "mentorship_service_set_id": math.floor(n / 3) + 1} for n in range(9) + ] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) model = self.bc.database.create(user=1, consumable=consumables, mentorship_service_set=3) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [ + "mentorship_service_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.mentorship_service_set[0].id, - 'slug': model.mentorship_service_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[0].id, + "slug": model.mentorship_service_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.mentorship_service_set[1].id, - 'slug': model.mentorship_service_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[1].id, + "slug": model.mentorship_service_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.mentorship_service_set[2].id, - 'slug': model.mentorship_service_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[2].id, + "slug": model.mentorship_service_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'cohort_sets': [], - 'event_type_sets': [], + "cohort_sets": [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_mentorship_services__with_wrong_cohort_slugs_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'mentorship_service_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "mentorship_service_set_id": math.floor(n / 3) + 1} for n in range(9) + ] model = self.bc.database.create(user=1, consumable=consumables, mentorship_service_set=3) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + f'?mentorship_service_set_slug=blabla1,blabla2,blabla3' + url = reverse_lazy("payments:me_service_consumable") + f"?mentorship_service_set_slug=blabla1,blabla2,blabla3" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'cohort_sets': [], - 'mentorship_service_sets': [], - 'event_type_sets': [], + "cohort_sets": [], + "mentorship_service_sets": [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_mentorship_services__with_cohort_slugs_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'mentorship_service_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "mentorship_service_set_id": math.floor(n / 3) + 1} for n in range(9) + ] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) model = self.bc.database.create(user=1, consumable=consumables, mentorship_service_set=3) self.client.force_authenticate(model.user) - url = reverse_lazy( - 'payments:me_service_consumable' - ) + f'?mentorship_service_set_slug={",".join([x.slug for x in model.mentorship_service_set])}' + url = ( + reverse_lazy("payments:me_service_consumable") + + f'?mentorship_service_set_slug={",".join([x.slug for x in model.mentorship_service_set])}' + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'cohort_sets': [], - 'mentorship_service_sets': [ + "cohort_sets": [], + "mentorship_service_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.mentorship_service_set[0].id, - 'slug': model.mentorship_service_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[0].id, + "slug": model.mentorship_service_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.mentorship_service_set[1].id, - 'slug': model.mentorship_service_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[1].id, + "slug": model.mentorship_service_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.mentorship_service_set[2].id, - 'slug': model.mentorship_service_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.mentorship_service_set[2].id, + "slug": model.mentorship_service_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'event_type_sets': [], + "event_type_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) @@ -966,65 +961,64 @@ def test__nine_consumables__related_to_three_mentorship_services__with_cohort_sl 🔽🔽🔽 Get with nine Consumable and three EventType, random how_many """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_event_types__without_cohort_slugs_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'event_type_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "event_type_set_id": math.floor(n / 3) + 1} for n in range(9) + ] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) - - event_type_sets = [{'event_type_id': x} for x in range(1, 4)] - model = self.bc.database.create(user=1, - consumable=consumables, - event_type_set=event_type_sets, - event_type=[{ - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) + + event_type_sets = [{"event_type_id": x} for x in range(1, 4)] + model = self.bc.database.create( + user=1, + consumable=consumables, + event_type_set=event_type_sets, + event_type=[ + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + ], + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + url = reverse_lazy("payments:me_service_consumable") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'mentorship_service_sets': [], - 'cohort_sets': [], - 'event_type_sets': [ + "mentorship_service_sets": [], + "cohort_sets": [], + "event_type_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.event_type_set[0].id, - 'slug': model.event_type_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[0].id, + "slug": model.event_type_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.event_type_set[1].id, - 'slug': model.event_type_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[1].id, + "slug": model.event_type_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.event_type_set[2].id, - 'slug': model.event_type_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[2].id, + "slug": model.event_type_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], } @@ -1032,115 +1026,115 @@ def test__nine_consumables__related_to_three_event_types__without_cohort_slugs_i assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_event_types__with_wrong_cohort_slugs_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'event_type_set_id': math.floor(n / 3) + 1 - } for n in range(9)] - - event_type_sets = [{'event_type_id': x} for x in range(1, 4)] - model = self.bc.database.create(user=1, - consumable=consumables, - event_type_set=event_type_sets, - event_type=[{ - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }]) + consumables = [ + {"how_many": random.randint(1, 30), "event_type_set_id": math.floor(n / 3) + 1} for n in range(9) + ] + + event_type_sets = [{"event_type_id": x} for x in range(1, 4)] + model = self.bc.database.create( + user=1, + consumable=consumables, + event_type_set=event_type_sets, + event_type=[ + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + ], + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable') + f'?event_type_set_slug=blabla1,blabla2,blabla3' + url = reverse_lazy("payments:me_service_consumable") + f"?event_type_set_slug=blabla1,blabla2,blabla3" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'cohort_sets': [], - 'event_type_sets': [], - 'mentorship_service_sets': [], + "cohort_sets": [], + "event_type_sets": [], + "mentorship_service_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__nine_consumables__related_to_three_event_types__with_cohort_slugs_in_querystring(self): - consumables = [{ - 'how_many': random.randint(1, 30), - 'event_type_set_id': math.floor(n / 3) + 1 - } for n in range(9)] + consumables = [ + {"how_many": random.randint(1, 30), "event_type_set_id": math.floor(n / 3) + 1} for n in range(9) + ] belong_to1 = consumables[:3] belong_to2 = consumables[3:6] belong_to3 = consumables[6:] - how_many_belong_to1 = sum([x['how_many'] for x in belong_to1]) - how_many_belong_to2 = sum([x['how_many'] for x in belong_to2]) - how_many_belong_to3 = sum([x['how_many'] for x in belong_to3]) - - event_type_sets = [{'event_type_id': x} for x in range(1, 4)] - model = self.bc.database.create(user=1, - consumable=consumables, - event_type_set=event_type_sets, - event_type=[{ - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }, { - 'icon_url': 'https://www.google.com' - }]) + how_many_belong_to1 = sum([x["how_many"] for x in belong_to1]) + how_many_belong_to2 = sum([x["how_many"] for x in belong_to2]) + how_many_belong_to3 = sum([x["how_many"] for x in belong_to3]) + + event_type_sets = [{"event_type_id": x} for x in range(1, 4)] + model = self.bc.database.create( + user=1, + consumable=consumables, + event_type_set=event_type_sets, + event_type=[ + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + {"icon_url": "https://www.google.com"}, + ], + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_service_consumable' - ) + f'?event_type_set_slug={",".join([x.slug for x in model.event_type_set])}' + url = ( + reverse_lazy("payments:me_service_consumable") + + f'?event_type_set_slug={",".join([x.slug for x in model.event_type_set])}' + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'cohort_sets': [], - 'event_type_sets': [ + "cohort_sets": [], + "event_type_sets": [ { - 'balance': { - 'unit': how_many_belong_to1, + "balance": { + "unit": how_many_belong_to1, }, - 'id': model.event_type_set[0].id, - 'slug': model.event_type_set[0].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[0].id, + "slug": model.event_type_set[0].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to2, + "balance": { + "unit": how_many_belong_to2, }, - 'id': model.event_type_set[1].id, - 'slug': model.event_type_set[1].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[1].id, + "slug": model.event_type_set[1].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, { - 'balance': { - 'unit': how_many_belong_to3, + "balance": { + "unit": how_many_belong_to3, }, - 'id': model.event_type_set[2].id, - 'slug': model.event_type_set[2].slug, - 'items': [serialize_consumable(model.consumable[n]) for n in range(9)], + "id": model.event_type_set[2].id, + "slug": model.event_type_set[2].slug, + "items": [serialize_consumable(model.consumable[n]) for n in range(9)], }, ], - 'mentorship_service_sets': [], + "mentorship_service_sets": [], } assert json == expected self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Consumable'), + self.bc.database.list_of("payments.Consumable"), self.bc.format.to_dict(model.consumable), ) diff --git a/breathecode/payments/tests/urls/tests_me_service_slug_cancel_hash.py b/breathecode/payments/tests/urls/tests_me_service_slug_cancel_hash.py index f8aed289e..a8022463a 100644 --- a/breathecode/payments/tests/urls/tests_me_service_slug_cancel_hash.py +++ b/breathecode/payments/tests/urls/tests_me_service_slug_cancel_hash.py @@ -11,8 +11,9 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('breathecode.payments.tasks.end_the_consumption_session.apply_async', - lambda *args, **kwargs: None) + monkeypatch.setattr( + "breathecode.payments.tasks.end_the_consumption_session.apply_async", lambda *args, **kwargs: None + ) yield @@ -25,91 +26,89 @@ def random_duration(): def db_item(service, data={}): return { - 'consumable_id': 1, - 'duration': ..., - 'eta': ..., - 'how_many': 1.0, - 'id': 1, - 'operation_code': 'unsafe-consume-service-set', - 'path': '', - 'related_id': 0, - 'related_slug': '', - 'request': { - 'args': [], - 'headers': { - 'academy': None, + "consumable_id": 1, + "duration": ..., + "eta": ..., + "how_many": 1.0, + "id": 1, + "operation_code": "unsafe-consume-service-set", + "path": "", + "related_id": 0, + "related_slug": "", + "request": { + "args": [], + "headers": { + "academy": None, }, - 'kwargs': { - 'hash': '1234567890123456', - 'service_slug': service.slug, + "kwargs": { + "hash": "1234567890123456", + "service_slug": service.slug, }, - 'user': 1, + "user": 1, }, - 'status': 'PENDING', - 'user_id': 1, - 'was_discounted': False, + "status": "PENDING", + "user_id": 1, + "was_discounted": False, **data, } def test_no_auth(bc: Breathecode, client: rfx.Client): - url = reverse_lazy('payments:me_service_slug_cancel_hash', - kwargs={ - 'service_slug': 'my-service', - 'hash': '1234567890123456' - }) + url = reverse_lazy( + "payments:me_service_slug_cancel_hash", kwargs={"service_slug": "my-service", "hash": "1234567890123456"} + ) response = client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('payments.ConsumptionSession') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] -@pytest.mark.parametrize('with_session', [False, True]) +@pytest.mark.parametrize("with_session", [False, True]) def test_no_sessions(bc: Breathecode, client: rfx.Client, with_session, fake, utc_now): slug = fake.slug() duration = random_duration() - url = reverse_lazy('payments:me_service_slug_cancel_hash', - kwargs={ - 'service_slug': 'my-service', - 'hash': '1234567890123456' - }) + url = reverse_lazy( + "payments:me_service_slug_cancel_hash", kwargs={"service_slug": "my-service", "hash": "1234567890123456"} + ) extra = {} if with_session: - extra.update({ - 'consumable': 1, - 'service': { - 'session_duration': duration, - 'slug': slug, - }, - 'consumption_session': { - 'how_many': 1, - 'eta': utc_now + duration, - 'duration': duration, - 'was_discounted': False, - 'operation_code': 'unsafe-consume-service-set', - 'related_id': 0, - 'related_slug': '', - 'status': 'CANCELLED', - 'path': '', - 'request': { - 'args': [], - 'headers': { - 'academy': None, - }, - 'kwargs': { - 'hash': '1234567890123456', - 'service_slug': slug, + extra.update( + { + "consumable": 1, + "service": { + "session_duration": duration, + "slug": slug, + }, + "consumption_session": { + "how_many": 1, + "eta": utc_now + duration, + "duration": duration, + "was_discounted": False, + "operation_code": "unsafe-consume-service-set", + "related_id": 0, + "related_slug": "", + "status": "CANCELLED", + "path": "", + "request": { + "args": [], + "headers": { + "academy": None, + }, + "kwargs": { + "hash": "1234567890123456", + "service_slug": slug, + }, + "user": 1, }, - 'user': 1, }, - }, - }) + } + ) model = bc.database.create(user=1, **extra) client.force_authenticate(user=model.user) @@ -117,66 +116,69 @@ def test_no_sessions(bc: Breathecode, client: rfx.Client, with_session, fake, ut response = client.put(url) json = response.json() - expected = {'detail': 'session-not-found', 'status_code': 404} + expected = {"detail": "session-not-found", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND if with_session: - assert bc.database.list_of('payments.ConsumptionSession') == [bc.format.to_dict(model.consumption_session)] + assert bc.database.list_of("payments.ConsumptionSession") == [bc.format.to_dict(model.consumption_session)] else: - assert bc.database.list_of('payments.ConsumptionSession') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] def test_cancelled(bc: Breathecode, client: rfx.Client, utc_now, fake): slug = fake.slug() duration = random_duration() - model = bc.database.create(user=1, - consumable=1, - service={ - 'session_duration': duration, - 'slug': slug, - 'type': 'VOID', - }, - consumption_session={ - 'how_many': 1, - 'eta': utc_now + duration, - 'duration': duration, - 'was_discounted': False, - 'operation_code': 'unsafe-consume-service-set', - 'related_id': 0, - 'related_slug': '', - 'status': 'PENDING', - 'path': '', - 'request': { - 'args': [], - 'headers': { - 'academy': None, - }, - 'kwargs': { - 'hash': '1234567890123456', - 'service_slug': slug, - }, - 'user': 1, - }, - }) - url = reverse_lazy('payments:me_service_slug_cancel_hash', - kwargs={ - 'service_slug': model.service.slug, - 'hash': '1234567890123456' - }) + model = bc.database.create( + user=1, + consumable=1, + service={ + "session_duration": duration, + "slug": slug, + "type": "VOID", + }, + consumption_session={ + "how_many": 1, + "eta": utc_now + duration, + "duration": duration, + "was_discounted": False, + "operation_code": "unsafe-consume-service-set", + "related_id": 0, + "related_slug": "", + "status": "PENDING", + "path": "", + "request": { + "args": [], + "headers": { + "academy": None, + }, + "kwargs": { + "hash": "1234567890123456", + "service_slug": slug, + }, + "user": 1, + }, + }, + ) + url = reverse_lazy( + "payments:me_service_slug_cancel_hash", kwargs={"service_slug": model.service.slug, "hash": "1234567890123456"} + ) client.force_authenticate(user=model.user) response = client.put(url) json = response.json() - expected = {'status': 'reversed'} + expected = {"status": "reversed"} assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.ConsumptionSession') == [ - db_item(model.service, data={ - 'duration': duration, - 'eta': utc_now + duration, - }) + assert bc.database.list_of("payments.ConsumptionSession") == [ + db_item( + model.service, + data={ + "duration": duration, + "eta": utc_now + duration, + }, + ) ] diff --git a/breathecode/payments/tests/urls/tests_me_service_slug_consume_hash.py b/breathecode/payments/tests/urls/tests_me_service_slug_consume_hash.py index 81bce24de..38e0fa947 100644 --- a/breathecode/payments/tests/urls/tests_me_service_slug_consume_hash.py +++ b/breathecode/payments/tests/urls/tests_me_service_slug_consume_hash.py @@ -11,36 +11,37 @@ @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr('breathecode.payments.tasks.end_the_consumption_session.apply_async', - lambda *args, **kwargs: None) + monkeypatch.setattr( + "breathecode.payments.tasks.end_the_consumption_session.apply_async", lambda *args, **kwargs: None + ) yield def db_item(service, data={}): return { - 'consumable_id': 1, - 'duration': ..., - 'eta': ..., - 'how_many': 1.0, - 'id': 1, - 'operation_code': 'unsafe-consume-service-set', - 'path': '', - 'related_id': 0, - 'related_slug': '', - 'request': { - 'args': [], - 'headers': { - 'academy': None, + "consumable_id": 1, + "duration": ..., + "eta": ..., + "how_many": 1.0, + "id": 1, + "operation_code": "unsafe-consume-service-set", + "path": "", + "related_id": 0, + "related_slug": "", + "request": { + "args": [], + "headers": { + "academy": None, }, - 'kwargs': { - 'hash': '1234567890123456', - 'service_slug': service.slug, + "kwargs": { + "hash": "1234567890123456", + "service_slug": service.slug, }, - 'user': 1, + "user": 1, }, - 'status': 'PENDING', - 'user_id': 1, - 'was_discounted': False, + "status": "PENDING", + "user_id": 1, + "was_discounted": False, **data, } @@ -53,28 +54,24 @@ def random_duration(): def test_no_auth(bc: Breathecode, client: rfx.Client): - url = reverse_lazy('payments:me_service_slug_consume_hash', - kwargs={ - 'service_slug': 'my-service', - 'hash': '1234567890123456' - }) + url = reverse_lazy( + "payments:me_service_slug_consume_hash", kwargs={"service_slug": "my-service", "hash": "1234567890123456"} + ) response = client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('payments.ConsumptionSession') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] def test_no_consumables(bc: Breathecode, client: rfx.Client): - url = reverse_lazy('payments:me_service_slug_consume_hash', - kwargs={ - 'service_slug': 'my-service', - 'hash': '1234567890123456' - }) + url = reverse_lazy( + "payments:me_service_slug_consume_hash", kwargs={"service_slug": "my-service", "hash": "1234567890123456"} + ) model = bc.database.create(user=1) client.force_authenticate(user=model.user) @@ -82,88 +79,92 @@ def test_no_consumables(bc: Breathecode, client: rfx.Client): response = client.put(url) json = response.json() - expected = {'detail': 'insufficient-credits', 'status_code': 402} + expected = {"detail": "insufficient-credits", "status_code": 402} assert json == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED - assert bc.database.list_of('payments.ConsumptionSession') == [] + assert bc.database.list_of("payments.ConsumptionSession") == [] def test_created(bc: Breathecode, client: rfx.Client, utc_now): duration = random_duration() - model = bc.database.create(user=1, consumable=1, service={'session_duration': duration}) - url = reverse_lazy('payments:me_service_slug_consume_hash', - kwargs={ - 'service_slug': model.service.slug, - 'hash': '1234567890123456' - }) + model = bc.database.create(user=1, consumable=1, service={"session_duration": duration}) + url = reverse_lazy( + "payments:me_service_slug_consume_hash", kwargs={"service_slug": model.service.slug, "hash": "1234567890123456"} + ) client.force_authenticate(user=model.user) response = client.put(url) json = response.json() - expected = {'status': 'ok'} + expected = {"status": "ok"} assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.ConsumptionSession') == [ - db_item(model.service, data={ - 'duration': duration, - 'eta': utc_now + duration, - }) + assert bc.database.list_of("payments.ConsumptionSession") == [ + db_item( + model.service, + data={ + "duration": duration, + "eta": utc_now + duration, + }, + ) ] def test_cached(bc: Breathecode, client: rfx.Client, utc_now, fake): slug = fake.slug() duration = random_duration() - model = bc.database.create(user=1, - consumable=1, - service={ - 'session_duration': duration, - 'slug': slug, - }, - consumption_session={ - 'how_many': 1, - 'eta': utc_now + duration, - 'duration': duration, - 'was_discounted': False, - 'operation_code': 'unsafe-consume-service-set', - 'related_id': 0, - 'related_slug': '', - 'status': 'PENDING', - 'path': '', - 'request': { - 'args': [], - 'headers': { - 'academy': None, - }, - 'kwargs': { - 'hash': '1234567890123456', - 'service_slug': slug, - }, - 'user': 1, - }, - }) - url = reverse_lazy('payments:me_service_slug_consume_hash', - kwargs={ - 'service_slug': model.service.slug, - 'hash': '1234567890123456' - }) + model = bc.database.create( + user=1, + consumable=1, + service={ + "session_duration": duration, + "slug": slug, + }, + consumption_session={ + "how_many": 1, + "eta": utc_now + duration, + "duration": duration, + "was_discounted": False, + "operation_code": "unsafe-consume-service-set", + "related_id": 0, + "related_slug": "", + "status": "PENDING", + "path": "", + "request": { + "args": [], + "headers": { + "academy": None, + }, + "kwargs": { + "hash": "1234567890123456", + "service_slug": slug, + }, + "user": 1, + }, + }, + ) + url = reverse_lazy( + "payments:me_service_slug_consume_hash", kwargs={"service_slug": model.service.slug, "hash": "1234567890123456"} + ) client.force_authenticate(user=model.user) response = client.put(url) json = response.json() - expected = {'status': 'ok'} + expected = {"status": "ok"} assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('payments.ConsumptionSession') == [ - db_item(model.service, data={ - 'duration': duration, - 'eta': utc_now + duration, - }) + assert bc.database.list_of("payments.ConsumptionSession") == [ + db_item( + model.service, + data={ + "duration": duration, + "eta": utc_now + duration, + }, + ) ] diff --git a/breathecode/payments/tests/urls/tests_me_subscription.py b/breathecode/payments/tests/urls/tests_me_subscription.py index 153668c9a..b241c2e78 100644 --- a/breathecode/payments/tests/urls/tests_me_subscription.py +++ b/breathecode/payments/tests/urls/tests_me_subscription.py @@ -17,288 +17,272 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def currency_serializer(currency): return { - 'code': currency.code, - 'name': currency.name, + "code": currency.code, + "name": currency.name, } def user_serializer(user): return { - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, } def invoice_serializer(self, invoice, currency, user): return { - 'amount': invoice.amount, - 'currency': currency_serializer(currency), - 'paid_at': self.bc.datetime.to_iso_string(invoice.paid_at), - 'status': invoice.status, - 'user': user_serializer(user), + "amount": invoice.amount, + "currency": currency_serializer(currency), + "paid_at": self.bc.datetime.to_iso_string(invoice.paid_at), + "status": invoice.status, + "user": user_serializer(user), } def permission_serializer(permission): return { - 'codename': permission.codename, - 'name': permission.name, + "codename": permission.codename, + "name": permission.name, } def group_serializer(group, permissions=[]): return { - 'name': group.name, - 'permissions': [permission_serializer(permission) for permission in permissions], + "name": group.name, + "permissions": [permission_serializer(permission) for permission in permissions], } def service_serializer(service, groups=[], permissions=[]): return { - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, - 'groups': [group_serializer(group, permissions) for group in groups], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, + "groups": [group_serializer(group, permissions) for group in groups], } def service_item_serializer(self, service_item, service, groups=[], permissions=[]): return { - 'how_many': service_item.how_many, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, - 'service': service_serializer(service, groups, permissions), + "how_many": service_item.how_many, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, + "service": service_serializer(service, groups, permissions), } def plan_serializer(self, plan, service, groups=[], permissions=[], service_items=[]): return { - 'financing_options': [], - 'service_items': - [service_item_serializer(self, service_item, service, groups, permissions) for service_item in service_items], - 'slug': - plan.slug, - 'status': - plan.status, - 'time_of_life': - plan.time_of_life, - 'time_of_life_unit': - plan.time_of_life_unit, - 'trial_duration': - plan.trial_duration, - 'trial_duration_unit': - plan.trial_duration_unit, - 'has_available_cohorts': - bool(plan.cohort_set), + "financing_options": [], + "service_items": [ + service_item_serializer(self, service_item, service, groups, permissions) for service_item in service_items + ], + "slug": plan.slug, + "status": plan.status, + "time_of_life": plan.time_of_life, + "time_of_life_unit": plan.time_of_life_unit, + "trial_duration": plan.trial_duration, + "trial_duration_unit": plan.trial_duration_unit, + "has_available_cohorts": bool(plan.cohort_set), } def get_mentorship_service_serializer(mentorship_service, academy): return { - 'academy': { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "id": academy.id, + "name": academy.name, + "slug": academy.slug, }, - 'id': mentorship_service.id, + "id": mentorship_service.id, } def get_mentorship_service_set_serializer(mentorship_service_set, academy, mentorship_services=[]): return { - 'academy': { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "id": academy.id, + "name": academy.name, + "slug": academy.slug, }, - 'id': - mentorship_service_set.id, - 'mentorship_services': - [get_mentorship_service_serializer(mentorship_service, academy) for mentorship_service in mentorship_services], - 'slug': - mentorship_service_set.slug, - 'academy_services': [], + "id": mentorship_service_set.id, + "mentorship_services": [ + get_mentorship_service_serializer(mentorship_service, academy) for mentorship_service in mentorship_services + ], + "slug": mentorship_service_set.slug, + "academy_services": [], } def get_event_type_serializer(event_type, academy): return { - 'academy': { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "id": academy.id, + "name": academy.name, + "slug": academy.slug, }, - 'id': event_type.id, + "id": event_type.id, } def get_event_type_set_serializer(event_type_set, academy, event_types=[]): return { - 'academy': { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "academy": { + "id": academy.id, + "name": academy.name, + "slug": academy.slug, }, - 'id': event_type_set.id, - 'event_types': [get_event_type_serializer(event_type, academy) for event_type in event_types], - 'slug': event_type_set.slug, - 'academy_services': [], + "id": event_type_set.id, + "event_types": [get_event_type_serializer(event_type, academy) for event_type in event_types], + "slug": event_type_set.slug, + "academy_services": [], } def get_academy_serializer(academy): return { - 'id': academy.id, - 'slug': academy.slug, - 'name': academy.name, + "id": academy.id, + "slug": academy.slug, + "name": academy.name, } def get_cohort_serializer(cohort): return { - 'id': cohort.id, - 'slug': cohort.slug, - 'name': cohort.name, + "id": cohort.id, + "slug": cohort.slug, + "name": cohort.name, } def get_cohort_set_serializer(cohort_set, academy, cohorts=[]): return { - 'academy': get_academy_serializer(academy), - 'cohorts': [get_cohort_serializer(cohort) for cohort in cohorts], - 'id': cohort_set.id, - 'slug': cohort_set.slug, + "academy": get_academy_serializer(academy), + "cohorts": [get_cohort_serializer(cohort) for cohort in cohorts], + "id": cohort_set.id, + "slug": cohort_set.slug, } -def get_plan_financing_serializer(self, - plan_financing, - academy, - currency, - user, - service, - mentorship_service_set=None, - event_type_set=None, - cohort_set=None, - invoices=[], - financing_options=[], - plans=[], - groups=[], - permissions=[], - service_items=[], - mentorship_services=[], - event_types=[], - cohorts=[]): +def get_plan_financing_serializer( + self, + plan_financing, + academy, + currency, + user, + service, + mentorship_service_set=None, + event_type_set=None, + cohort_set=None, + invoices=[], + financing_options=[], + plans=[], + groups=[], + permissions=[], + service_items=[], + mentorship_services=[], + event_types=[], + cohorts=[], +): if cohort_set: cohort_set = get_cohort_set_serializer(cohort_set, academy, cohorts=cohorts) if mentorship_service_set: - mentorship_service_set = get_mentorship_service_set_serializer(mentorship_service_set, - academy, - mentorship_services=mentorship_services) + mentorship_service_set = get_mentorship_service_set_serializer( + mentorship_service_set, academy, mentorship_services=mentorship_services + ) if event_type_set: event_type_set = get_event_type_set_serializer(event_type_set, academy, event_types=event_types) return { - 'id': plan_financing.id, - 'academy': academy_serializer(academy), - 'invoices': [invoice_serializer(self, invoice, currency, user) for invoice in invoices], - 'valid_until': self.bc.datetime.to_iso_string(plan_financing.valid_until), - 'next_payment_at': self.bc.datetime.to_iso_string(plan_financing.next_payment_at), - 'plan_expires_at': self.bc.datetime.to_iso_string(plan_financing.plan_expires_at), - 'plans': [plan_serializer(self, plan, service, groups, permissions, service_items) for plan in plans], - 'selected_mentorship_service_set': mentorship_service_set, - 'selected_event_type_set': event_type_set, - 'selected_cohort_set': cohort_set, - 'status': plan_financing.status, - 'monthly_price': plan_financing.monthly_price, - 'status_message': plan_financing.status_message, - 'user': user_serializer(user), + "id": plan_financing.id, + "academy": academy_serializer(academy), + "invoices": [invoice_serializer(self, invoice, currency, user) for invoice in invoices], + "valid_until": self.bc.datetime.to_iso_string(plan_financing.valid_until), + "next_payment_at": self.bc.datetime.to_iso_string(plan_financing.next_payment_at), + "plan_expires_at": self.bc.datetime.to_iso_string(plan_financing.plan_expires_at), + "plans": [plan_serializer(self, plan, service, groups, permissions, service_items) for plan in plans], + "selected_mentorship_service_set": mentorship_service_set, + "selected_event_type_set": event_type_set, + "selected_cohort_set": cohort_set, + "status": plan_financing.status, + "monthly_price": plan_financing.monthly_price, + "status_message": plan_financing.status_message, + "user": user_serializer(user), } -def get_subscription_serializer(self, - subscription, - academy, - currency, - user, - service, - mentorship_service_set=None, - event_type_set=None, - cohort_set=None, - invoices=[], - financing_options=[], - plans=[], - groups=[], - permissions=[], - service_items=[], - mentorship_services=[], - event_types=[], - cohorts=[]): +def get_subscription_serializer( + self, + subscription, + academy, + currency, + user, + service, + mentorship_service_set=None, + event_type_set=None, + cohort_set=None, + invoices=[], + financing_options=[], + plans=[], + groups=[], + permissions=[], + service_items=[], + mentorship_services=[], + event_types=[], + cohorts=[], +): valid_until = self.bc.datetime.to_iso_string(subscription.valid_until) if subscription.valid_until else None if cohort_set: cohort_set = get_cohort_set_serializer(cohort_set, academy, cohorts=cohorts) if mentorship_service_set: - mentorship_service_set = get_mentorship_service_set_serializer(mentorship_service_set, - academy, - mentorship_services=mentorship_services) + mentorship_service_set = get_mentorship_service_set_serializer( + mentorship_service_set, academy, mentorship_services=mentorship_services + ) if event_type_set: event_type_set = get_event_type_set_serializer(event_type_set, academy, event_types=event_types) return { - 'id': - subscription.id, - 'academy': - academy_serializer(academy), - 'invoices': [invoice_serializer(self, invoice, currency, user) for invoice in invoices], - 'paid_at': - self.bc.datetime.to_iso_string(subscription.paid_at), - 'valid_until': - valid_until, - 'plans': [plan_serializer(self, plan, service, groups, permissions, service_items) for plan in plans], - 'status': - subscription.status, - 'status_message': - subscription.status_message, - 'is_refundable': - subscription.is_refundable, - 'next_payment_at': - self.bc.datetime.to_iso_string(subscription.next_payment_at), - 'pay_every': - subscription.pay_every, - 'pay_every_unit': - subscription.pay_every_unit, - 'selected_mentorship_service_set': - mentorship_service_set, - 'selected_event_type_set': - event_type_set, - 'selected_cohort_set': - cohort_set, - 'user': - user_serializer(user), - 'service_items': - [service_item_serializer(self, service_item, service, groups, permissions) for service_item in service_items], + "id": subscription.id, + "academy": academy_serializer(academy), + "invoices": [invoice_serializer(self, invoice, currency, user) for invoice in invoices], + "paid_at": self.bc.datetime.to_iso_string(subscription.paid_at), + "valid_until": valid_until, + "plans": [plan_serializer(self, plan, service, groups, permissions, service_items) for plan in plans], + "status": subscription.status, + "status_message": subscription.status_message, + "is_refundable": subscription.is_refundable, + "next_payment_at": self.bc.datetime.to_iso_string(subscription.next_payment_at), + "pay_every": subscription.pay_every, + "pay_every_unit": subscription.pay_every_unit, + "selected_mentorship_service_set": mentorship_service_set, + "selected_event_type_set": event_type_set, + "selected_cohort_set": cohort_set, + "user": user_serializer(user), + "service_items": [ + service_item_serializer(self, service_item, service, groups, permissions) for service_item in service_items + ], } @pytest.fixture(autouse=True) def setup(monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) yield @@ -308,58 +292,61 @@ class SignalTestSuite(PaymentsTestCase): """ def test__without_auth(self): - url = reverse_lazy('payments:me_subscription') + url = reverse_lazy("payments:me_subscription") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get without items """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__without_items(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + url = reverse_lazy("payments:me_subscription") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() - expected = {'plan_financings': [], 'subscriptions': []} + expected = {"plan_financings": [], "subscriptions": []} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items(self): - subscriptions = [{ - 'valid_until': x, - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -371,84 +358,98 @@ def test__with_many_items(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + url = reverse_lazy("payments:me_subscription") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by subscription """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_subscription(self): - subscriptions = [{ - 'valid_until': x, - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -460,53 +461,61 @@ def test__with_many_items__filter_by_subscription(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + f'?subscription={model.subscription[0].id}' + url = reverse_lazy("payments:me_subscription") + f"?subscription={model.subscription[0].id}" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [], + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by plan financing """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_plan_financing(self): - subscriptions = [{ - 'valid_until': x, - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -518,53 +527,61 @@ def test__with_many_items__filter_by_plan_financing(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + f'?plan-financing={model.plan_financing[0].id}' + url = reverse_lazy("payments:me_subscription") + f"?plan-financing={model.plan_financing[0].id}" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [], + "subscriptions": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by subscription and plan financing """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_subscription_and_plan_financing(self): - subscriptions = [{ - 'valid_until': x, - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -576,68 +593,79 @@ def test__with_many_items__filter_by_subscription_and_plan_financing(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + (f'?subscription={model.subscription[0].id}&' - f'plan-financing={model.plan_financing[0].id}') + url = reverse_lazy("payments:me_subscription") + ( + f"?subscription={model.subscription[0].id}&" f"plan-financing={model.plan_financing[0].id}" + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, with wrong statuses """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__with_wrong_statuses(self): - wrong_statuses = ['PAYMENT_ISSUE', 'DEPRECATED', 'CANCELLED'] - subscriptions = [{ - 'valid_until': x, - 'status': random.choice(wrong_statuses), - } for x in [None, UTC_NOW + timedelta(days=1)]] + wrong_statuses = ["PAYMENT_ISSUE", "DEPRECATED", "CANCELLED"] + subscriptions = [ + { + "valid_until": x, + "status": random.choice(wrong_statuses), + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'status': random.choice(wrong_statuses), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "status": random.choice(wrong_statuses), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -649,46 +677,52 @@ def test__with_many_items__with_wrong_statuses(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + url = reverse_lazy("payments:me_subscription") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [], - 'subscriptions': [], + "plan_financings": [], + "subscriptions": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by status """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_statuses(self): - statuses = ['FREE_TRIAL', 'ACTIVE', 'PAYMENT_ISSUE', 'DEPRECATED', 'CANCELLED', 'ERROR'] + statuses = ["FREE_TRIAL", "ACTIVE", "PAYMENT_ISSUE", "DEPRECATED", "CANCELLED", "ERROR"] chosen_statuses = [random.choice(statuses) for _ in range(2)] - subscriptions = [{ - 'valid_until': x, - 'status': random.choice(chosen_statuses), - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + "status": random.choice(chosen_statuses), + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'status': random.choice(chosen_statuses), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "status": random.choice(chosen_statuses), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -700,89 +734,102 @@ def test__with_many_items__filter_by_statuses(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + (f'?status={chosen_statuses[0]},' - f'{chosen_statuses[1]}') + url = reverse_lazy("payments:me_subscription") + (f"?status={chosen_statuses[0]}," f"{chosen_statuses[1]}") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by wrong invoice """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_wrong_invoice(self): - statuses = ['FREE_TRIAL', 'ACTIVE', 'PAYMENT_ISSUE', 'DEPRECATED', 'CANCELLED', 'ERROR'] + statuses = ["FREE_TRIAL", "ACTIVE", "PAYMENT_ISSUE", "DEPRECATED", "CANCELLED", "ERROR"] chosen_statuses = [random.choice(statuses) for _ in range(2)] - subscriptions = [{ - 'valid_until': x, - 'status': random.choice(chosen_statuses), - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + "status": random.choice(chosen_statuses), + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'status': random.choice(chosen_statuses), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "status": random.choice(chosen_statuses), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -794,42 +841,48 @@ def test__with_many_items__filter_by_wrong_invoice(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + (f'?invoice=3,4') + url = reverse_lazy("payments:me_subscription") + (f"?invoice=3,4") response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [], - 'subscriptions': [], + "plan_financings": [], + "subscriptions": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by good invoice """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_good_invoice(self): - subscriptions = [{ - 'valid_until': x, - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -841,88 +894,102 @@ def test__with_many_items__filter_by_good_invoice(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + '?invoice=1,2' + url = reverse_lazy("payments:me_subscription") + "?invoice=1,2" response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by wrong service """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_wrong_service(self): - statuses = ['FREE_TRIAL', 'ACTIVE', 'PAYMENT_ISSUE', 'DEPRECATED', 'CANCELLED', 'ERROR'] + statuses = ["FREE_TRIAL", "ACTIVE", "PAYMENT_ISSUE", "DEPRECATED", "CANCELLED", "ERROR"] chosen_statuses = [random.choice(statuses) for _ in range(2)] - subscriptions = [{ - 'valid_until': x, - 'status': random.choice(chosen_statuses), - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + "status": random.choice(chosen_statuses), + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'status': random.choice(chosen_statuses), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "status": random.choice(chosen_statuses), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -934,43 +1001,50 @@ def test__with_many_items__filter_by_wrong_service(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + (f'?invoice={random.choice([3, "gangsters-i"])},' - f'{random.choice([4, "gangsters-ii"])}') + url = reverse_lazy("payments:me_subscription") + ( + f'?invoice={random.choice([3, "gangsters-i"])},' f'{random.choice([4, "gangsters-ii"])}' + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [], - 'subscriptions': [], + "plan_financings": [], + "subscriptions": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by good service """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_good_service(self): - subscriptions = [{ - 'valid_until': x, - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -982,90 +1056,105 @@ def test__with_many_items__filter_by_good_service(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + ( - f'?service={random.choice([model.service.id, model.service.slug])},' - f'{random.choice([model.service.id, model.service.slug])}') + url = reverse_lazy("payments:me_subscription") + ( + f"?service={random.choice([model.service.id, model.service.slug])}," + f"{random.choice([model.service.id, model.service.slug])}" + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by wrong plan """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_wrong_plan(self): - statuses = ['FREE_TRIAL', 'ACTIVE', 'PAYMENT_ISSUE', 'DEPRECATED', 'CANCELLED', 'ERROR'] + statuses = ["FREE_TRIAL", "ACTIVE", "PAYMENT_ISSUE", "DEPRECATED", "CANCELLED", "ERROR"] chosen_statuses = [random.choice(statuses) for _ in range(2)] - subscriptions = [{ - 'valid_until': x, - 'status': random.choice(chosen_statuses), - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + "status": random.choice(chosen_statuses), + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'status': random.choice(chosen_statuses), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "status": random.choice(chosen_statuses), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -1077,43 +1166,50 @@ def test__with_many_items__filter_by_wrong_plan(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + (f'?plan={random.choice([3, "gangsters-i"])},' - f'{random.choice([4, "gangsters-ii"])}') + url = reverse_lazy("payments:me_subscription") + ( + f'?plan={random.choice([3, "gangsters-i"])},' f'{random.choice([4, "gangsters-ii"])}' + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [], - 'subscriptions': [], + "plan_financings": [], + "subscriptions": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by good plan """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_good_plan(self): - subscriptions = [{ - 'valid_until': x, - } for x in [None, UTC_NOW + timedelta(days=1)]] + subscriptions = [ + { + "valid_until": x, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] plan_financing = { - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, } - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} model = self.bc.database.create( subscription=subscriptions, plan_financing=(2, plan_financing), @@ -1125,518 +1221,626 @@ def test__with_many_items__filter_by_good_plan(self): ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + ( - f'?plan={random.choice([model.plan[0].id, model.plan[0].slug])},' - f'{random.choice([model.plan[1].id, model.plan[1].slug])}') + url = reverse_lazy("payments:me_subscription") + ( + f"?plan={random.choice([model.plan[0].id, model.plan[0].slug])}," + f"{random.choice([model.plan[1].id, model.plan[1].slug])}" + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[1], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[1], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by wrong cohort """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_wrong_cohort(self): - subscriptions = [{ - 'valid_until': x, - 'selected_cohort_set_id': None, - } for x in [None, UTC_NOW + timedelta(days=1)]] - plan_financings = [{ - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, - 'selected_cohort_set_id': None, - } for _ in range(1, 3)] - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} - academy = {'available_as_saas': True} - model = self.bc.database.create(subscription=subscriptions, - plan_financing=plan_financings, - plan_service_item=plan_service_items, - subscription_service_item=subscription_service_items, - invoice=2, - plan=(2, plan), - service_item=2, - cohort_set=2, - academy=academy) + subscriptions = [ + { + "valid_until": x, + "selected_cohort_set_id": None, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] + plan_financings = [ + { + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, + "selected_cohort_set_id": None, + } + for _ in range(1, 3) + ] + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} + academy = {"available_as_saas": True} + model = self.bc.database.create( + subscription=subscriptions, + plan_financing=plan_financings, + plan_service_item=plan_service_items, + subscription_service_item=subscription_service_items, + invoice=2, + plan=(2, plan), + service_item=2, + cohort_set=2, + academy=academy, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + (f'?cohort-set-selected={random.choice([1, "slug1"])},' - f'{random.choice([2, "slug2"])}') + url = reverse_lazy("payments:me_subscription") + ( + f'?cohort-set-selected={random.choice([1, "slug1"])},' f'{random.choice([2, "slug2"])}' + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [], - 'subscriptions': [], + "plan_financings": [], + "subscriptions": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by good cohort """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_good_cohort(self): - subscriptions = [{ - 'valid_until': x, - 'selected_cohort_set_id': y, - } for x, y in [(None, 1), (UTC_NOW + timedelta(days=1), 2)]] - plan_financings = [{ - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, - 'selected_cohort_set_id': x, - } for x in range(1, 3)] - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} - cohort_set_cohorts = [{'cohort_id': 1, 'cohort_set_id': x} for x in range(1, 3)] - cohort = {'available_as_saas': True} - academy = {'available_as_saas': True} - model = self.bc.database.create(subscription=subscriptions, - plan_financing=plan_financings, - plan_service_item=plan_service_items, - subscription_service_item=subscription_service_items, - invoice=2, - plan=(2, plan), - service_item=2, - cohort=cohort, - cohort_set=2, - cohort_set_cohort=cohort_set_cohorts, - academy=academy) + subscriptions = [ + { + "valid_until": x, + "selected_cohort_set_id": y, + } + for x, y in [(None, 1), (UTC_NOW + timedelta(days=1), 2)] + ] + plan_financings = [ + { + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, + "selected_cohort_set_id": x, + } + for x in range(1, 3) + ] + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} + cohort_set_cohorts = [{"cohort_id": 1, "cohort_set_id": x} for x in range(1, 3)] + cohort = {"available_as_saas": True} + academy = {"available_as_saas": True} + model = self.bc.database.create( + subscription=subscriptions, + plan_financing=plan_financings, + plan_service_item=plan_service_items, + subscription_service_item=subscription_service_items, + invoice=2, + plan=(2, plan), + service_item=2, + cohort=cohort, + cohort_set=2, + cohort_set_cohort=cohort_set_cohorts, + academy=academy, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + ( - f'?cohort-set-selected={random.choice([model.cohort_set[0].id, model.cohort_set[0].slug])},' - f'{random.choice([model.cohort_set[1].id, model.cohort_set[1].slug])}') + url = reverse_lazy("payments:me_subscription") + ( + f"?cohort-set-selected={random.choice([model.cohort_set[0].id, model.cohort_set[0].slug])}," + f"{random.choice([model.cohort_set[1].id, model.cohort_set[1].slug])}" + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[1], - model.academy, - model.currency, - model.user, - model.service, - cohort_set=model.cohort_set[1], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - cohorts=[model.cohort], - service_items=[model.service_item[0], model.service_item[1]]), - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - cohort_set=model.cohort_set[0], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - cohorts=[model.cohort], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[1], + model.academy, + model.currency, + model.user, + model.service, + cohort_set=model.cohort_set[1], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + cohorts=[model.cohort], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + cohort_set=model.cohort_set[0], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + cohorts=[model.cohort], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[1], - model.academy, - model.currency, - model.user, - model.service, - cohort_set=model.cohort_set[1], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - cohorts=[model.cohort], - service_items=[model.service_item[0], model.service_item[1]]), - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - cohort_set=model.cohort_set[0], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - cohorts=[model.cohort], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[1], + model.academy, + model.currency, + model.user, + model.service, + cohort_set=model.cohort_set[1], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + cohorts=[model.cohort], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + cohort_set=model.cohort_set[0], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + cohorts=[model.cohort], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by wrong MentorshipServiceSet """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_wrong_mentorship_service_set(self): - subscriptions = [{ - 'valid_until': x, - 'selected_mentorship_service_set_id': None, - } for x in [None, UTC_NOW + timedelta(days=1)]] - plan_financings = [{ - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, - 'selected_mentorship_service_set_id': None, - } for _ in range(1, 3)] - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} - academy = {'available_as_saas': True} - model = self.bc.database.create(subscription=subscriptions, - plan_financing=plan_financings, - plan_service_item=plan_service_items, - subscription_service_item=subscription_service_items, - invoice=2, - plan=(2, plan), - service_item=2, - cohort_set=2, - academy=academy) + subscriptions = [ + { + "valid_until": x, + "selected_mentorship_service_set_id": None, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] + plan_financings = [ + { + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, + "selected_mentorship_service_set_id": None, + } + for _ in range(1, 3) + ] + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} + academy = {"available_as_saas": True} + model = self.bc.database.create( + subscription=subscriptions, + plan_financing=plan_financings, + plan_service_item=plan_service_items, + subscription_service_item=subscription_service_items, + invoice=2, + plan=(2, plan), + service_item=2, + cohort_set=2, + academy=academy, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + (f'?cohort-set-selected={random.choice([3, "slug1"])},' - f'{random.choice([4, "slug2"])}') + url = reverse_lazy("payments:me_subscription") + ( + f'?cohort-set-selected={random.choice([3, "slug1"])},' f'{random.choice([4, "slug2"])}' + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [], - 'subscriptions': [], + "plan_financings": [], + "subscriptions": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by good MentorshipServiceSet """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_good_mentorship_service_set(self): - subscriptions = [{ - 'valid_until': x, - 'selected_mentorship_service_set_id': y, - 'selected_event_type_set_id': None, - } for x, y in [(None, 1), (UTC_NOW + timedelta(days=1), 2)]] - plan_financings = [{ - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, - 'selected_mentorship_service_set_id': x, - 'selected_event_type_set_id': None, - } for x in range(1, 3)] - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} - model = self.bc.database.create(subscription=subscriptions, - plan_financing=plan_financings, - plan_service_item=plan_service_items, - subscription_service_item=subscription_service_items, - invoice=2, - plan=(2, plan), - service_item=2, - mentorship_service_set=2) + subscriptions = [ + { + "valid_until": x, + "selected_mentorship_service_set_id": y, + "selected_event_type_set_id": None, + } + for x, y in [(None, 1), (UTC_NOW + timedelta(days=1), 2)] + ] + plan_financings = [ + { + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, + "selected_mentorship_service_set_id": x, + "selected_event_type_set_id": None, + } + for x in range(1, 3) + ] + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} + model = self.bc.database.create( + subscription=subscriptions, + plan_financing=plan_financings, + plan_service_item=plan_service_items, + subscription_service_item=subscription_service_items, + invoice=2, + plan=(2, plan), + service_item=2, + mentorship_service_set=2, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + ( - '?mentorship-service-set-selected=' - f'{random.choice([model.mentorship_service_set[0].id,model.mentorship_service_set[0].slug])},' - f'{random.choice([model.mentorship_service_set[1].id, model.mentorship_service_set[1].slug])}') + url = reverse_lazy("payments:me_subscription") + ( + "?mentorship-service-set-selected=" + f"{random.choice([model.mentorship_service_set[0].id,model.mentorship_service_set[0].slug])}," + f"{random.choice([model.mentorship_service_set[1].id, model.mentorship_service_set[1].slug])}" + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[1], - model.academy, - model.currency, - model.user, - model.service, - mentorship_service_set=model.mentorship_service_set[1], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - mentorship_service_set=model.mentorship_service_set[0], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[1], + model.academy, + model.currency, + model.user, + model.service, + mentorship_service_set=model.mentorship_service_set[1], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + mentorship_service_set=model.mentorship_service_set[0], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[1], - model.academy, - model.currency, - model.user, - model.service, - mentorship_service_set=model.mentorship_service_set[1], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - mentorship_service_set=model.mentorship_service_set[0], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[1], + model.academy, + model.currency, + model.user, + model.service, + mentorship_service_set=model.mentorship_service_set[1], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + mentorship_service_set=model.mentorship_service_set[0], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by wrong EventTypeSet """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_wrong_event_type_set(self): - subscriptions = [{ - 'valid_until': x, - 'selected_event_type_set_id': None, - } for x in [None, UTC_NOW + timedelta(days=1)]] - plan_financings = [{ - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, - 'selected_event_type_set_id': None, - } for _ in range(1, 3)] - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} - academy = {'available_as_saas': True} - model = self.bc.database.create(subscription=subscriptions, - plan_financing=plan_financings, - plan_service_item=plan_service_items, - subscription_service_item=subscription_service_items, - invoice=2, - plan=(2, plan), - service_item=2, - cohort_set=2, - academy=academy) + subscriptions = [ + { + "valid_until": x, + "selected_event_type_set_id": None, + } + for x in [None, UTC_NOW + timedelta(days=1)] + ] + plan_financings = [ + { + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, + "selected_event_type_set_id": None, + } + for _ in range(1, 3) + ] + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} + academy = {"available_as_saas": True} + model = self.bc.database.create( + subscription=subscriptions, + plan_financing=plan_financings, + plan_service_item=plan_service_items, + subscription_service_item=subscription_service_items, + invoice=2, + plan=(2, plan), + service_item=2, + cohort_set=2, + academy=academy, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + (f'?event-type-set-selected={random.choice([1, "slug1"])},' - f'{random.choice([2, "slug2"])}') + url = reverse_lazy("payments:me_subscription") + ( + f'?event-type-set-selected={random.choice([1, "slug1"])},' f'{random.choice([2, "slug2"])}' + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [], - 'subscriptions': [], + "plan_financings": [], + "subscriptions": [], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) """ 🔽🔽🔽 Get with many PlanFinancing and Subscription, filter by good MentorshipServiceSet """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_many_items__filter_by_good_event_type_set(self): - subscriptions = [{ - 'valid_until': x, - 'selected_event_type_set_id': y, - } for x, y in [(None, 1), (UTC_NOW + timedelta(days=1), 2)]] - plan_financings = [{ - 'valid_until': UTC_NOW + timedelta(days=1), - 'plan_expires_at': UTC_NOW + timedelta(days=1), - 'monthly_price': random.random() * 99.99 + 0.01, - 'selected_event_type_set_id': x, - } for x in range(1, 3)] - plan_service_items = [{'service_item_id': x, 'plan_id': 1} for x in range(1, 3)] - plan_service_items += [{'service_item_id': x, 'plan_id': 2} for x in range(1, 3)] - subscription_service_items = [{'service_item_id': x, 'subscription_id': 1} for x in range(1, 3)] - subscription_service_items += [{'service_item_id': x, 'subscription_id': 2} for x in range(1, 3)] - plan = {'is_renewable': False} - model = self.bc.database.create(subscription=subscriptions, - plan_financing=plan_financings, - plan_service_item=plan_service_items, - subscription_service_item=subscription_service_items, - invoice=2, - plan=(2, plan), - service_item=2, - event_type_set=2) + subscriptions = [ + { + "valid_until": x, + "selected_event_type_set_id": y, + } + for x, y in [(None, 1), (UTC_NOW + timedelta(days=1), 2)] + ] + plan_financings = [ + { + "valid_until": UTC_NOW + timedelta(days=1), + "plan_expires_at": UTC_NOW + timedelta(days=1), + "monthly_price": random.random() * 99.99 + 0.01, + "selected_event_type_set_id": x, + } + for x in range(1, 3) + ] + plan_service_items = [{"service_item_id": x, "plan_id": 1} for x in range(1, 3)] + plan_service_items += [{"service_item_id": x, "plan_id": 2} for x in range(1, 3)] + subscription_service_items = [{"service_item_id": x, "subscription_id": 1} for x in range(1, 3)] + subscription_service_items += [{"service_item_id": x, "subscription_id": 2} for x in range(1, 3)] + plan = {"is_renewable": False} + model = self.bc.database.create( + subscription=subscriptions, + plan_financing=plan_financings, + plan_service_item=plan_service_items, + subscription_service_item=subscription_service_items, + invoice=2, + plan=(2, plan), + service_item=2, + event_type_set=2, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription') + ( - '?event-type-set-selected=' - f'{random.choice([model.event_type_set[0].id,model.event_type_set[0].slug])},' - f'{random.choice([model.event_type_set[1].id, model.event_type_set[1].slug])}') + url = reverse_lazy("payments:me_subscription") + ( + "?event-type-set-selected=" + f"{random.choice([model.event_type_set[0].id,model.event_type_set[0].slug])}," + f"{random.choice([model.event_type_set[1].id, model.event_type_set[1].slug])}" + ) response = self.client.get(url) self.client.force_authenticate(model.user) json = response.json() expected = { - 'plan_financings': [ - get_plan_financing_serializer(self, - model.plan_financing[1], - model.academy, - model.currency, - model.user, - model.service, - event_type_set=model.event_type_set[1], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_plan_financing_serializer(self, - model.plan_financing[0], - model.academy, - model.currency, - model.user, - model.service, - event_type_set=model.event_type_set[0], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "plan_financings": [ + get_plan_financing_serializer( + self, + model.plan_financing[1], + model.academy, + model.currency, + model.user, + model.service, + event_type_set=model.event_type_set[1], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_plan_financing_serializer( + self, + model.plan_financing[0], + model.academy, + model.currency, + model.user, + model.service, + event_type_set=model.event_type_set[0], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], - 'subscriptions': [ - get_subscription_serializer(self, - model.subscription[1], - model.academy, - model.currency, - model.user, - model.service, - event_type_set=model.event_type_set[1], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), - get_subscription_serializer(self, - model.subscription[0], - model.academy, - model.currency, - model.user, - model.service, - event_type_set=model.event_type_set[0], - invoices=[model.invoice[0], model.invoice[1]], - financing_options=[], - plans=[model.plan[0], model.plan[1]], - service_items=[model.service_item[0], model.service_item[1]]), + "subscriptions": [ + get_subscription_serializer( + self, + model.subscription[1], + model.academy, + model.currency, + model.user, + model.service, + event_type_set=model.event_type_set[1], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), + get_subscription_serializer( + self, + model.subscription[0], + model.academy, + model.currency, + model.user, + model.service, + event_type_set=model.event_type_set[0], + invoices=[model.invoice[0], model.invoice[1]], + financing_options=[], + plans=[model.plan[0], model.plan[1]], + service_items=[model.service_item[0], model.service_item[1]], + ), ], } self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Consumable'), []) - self.bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + self.assertEqual(self.bc.database.list_of("payments.Consumable"), []) + self.bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) diff --git a/breathecode/payments/tests/urls/tests_me_subscription_id_cancel.py b/breathecode/payments/tests/urls/tests_me_subscription_id_cancel.py index 3b12d312e..b136f807d 100644 --- a/breathecode/payments/tests/urls/tests_me_subscription_id_cancel.py +++ b/breathecode/payments/tests/urls/tests_me_subscription_id_cancel.py @@ -17,39 +17,39 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def user_serializer(user): return { - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, } def get_serializer(self, subscription, academy, user, data={}): return { - 'academy': academy_serializer(academy), - 'id': subscription.id, - 'invoices': [], - 'is_refundable': subscription.is_refundable, - 'next_payment_at': self.bc.datetime.to_iso_string(subscription.next_payment_at), - 'paid_at': self.bc.datetime.to_iso_string(subscription.paid_at), - 'pay_every': subscription.pay_every, - 'pay_every_unit': subscription.pay_every_unit, - 'plans': [], - 'selected_cohort_set': subscription.selected_cohort_set, - 'selected_event_type_set': subscription.selected_event_type_set, - 'selected_mentorship_service_set': subscription.selected_mentorship_service_set, - 'service_items': [], - 'status': subscription.status, - 'status_message': subscription.status_message, - 'user': user_serializer(user), - 'valid_until': self.bc.datetime.to_iso_string(subscription.valid_until) if subscription.valid_until else None, + "academy": academy_serializer(academy), + "id": subscription.id, + "invoices": [], + "is_refundable": subscription.is_refundable, + "next_payment_at": self.bc.datetime.to_iso_string(subscription.next_payment_at), + "paid_at": self.bc.datetime.to_iso_string(subscription.paid_at), + "pay_every": subscription.pay_every, + "pay_every_unit": subscription.pay_every_unit, + "plans": [], + "selected_cohort_set": subscription.selected_cohort_set, + "selected_event_type_set": subscription.selected_event_type_set, + "selected_mentorship_service_set": subscription.selected_mentorship_service_set, + "service_items": [], + "status": subscription.status, + "status_message": subscription.status_message, + "user": user_serializer(user), + "valid_until": self.bc.datetime.to_iso_string(subscription.valid_until) if subscription.valid_until else None, **data, } @@ -60,95 +60,106 @@ class SignalTestSuite(PaymentsTestCase): """ def test__without_auth(self): - url = reverse_lazy('payments:me_subscription_id_cancel', kwargs={'subscription_id': 1}) + url = reverse_lazy("payments:me_subscription_id_cancel", kwargs={"subscription_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) def test__put__not_found(self): - model = self.bc.database.create(user=1, ) + model = self.bc.database.create( + user=1, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription_id_cancel', kwargs={'subscription_id': 1}) + url = reverse_lazy("payments:me_subscription_id_cancel", kwargs={"subscription_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), []) + self.assertEqual(self.bc.database.list_of("payments.Subscription"), []) def test__put__cancelled_these_statuses(self): - statuses = ['FREE_TRIAL', 'ACTIVE', 'PAYMENT_ISSUE', 'ERROR'] + statuses = ["FREE_TRIAL", "ACTIVE", "PAYMENT_ISSUE", "ERROR"] for s in statuses: - subscription = {'status': s} + subscription = {"status": s} model = self.bc.database.create(user=1, subscription=subscription) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription_id_cancel', kwargs={'subscription_id': model.subscription.id}) + url = reverse_lazy("payments:me_subscription_id_cancel", kwargs={"subscription_id": model.subscription.id}) response = self.client.put(url) json = response.json() - expected = get_serializer(self, model.subscription, model.academy, model.user, data={'status': 'CANCELLED'}) + expected = get_serializer(self, model.subscription, model.academy, model.user, data={"status": "CANCELLED"}) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - 'status': 'CANCELLED', - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + "status": "CANCELLED", + }, + ], + ) # teardown - self.bc.database.delete('payments.Subscription') + self.bc.database.delete("payments.Subscription") def test__put__cancelled_twice(self): - subscription = {'status': 'CANCELLED'} + subscription = {"status": "CANCELLED"} model = self.bc.database.create(user=1, subscription=subscription) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription_id_cancel', kwargs={'subscription_id': model.subscription.id}) + url = reverse_lazy("payments:me_subscription_id_cancel", kwargs={"subscription_id": model.subscription.id}) response = self.client.put(url) json = response.json() - expected = {'detail': 'already-cancelled', 'status_code': 400} + expected = {"detail": "already-cancelled", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - 'status': 'CANCELLED', - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + "status": "CANCELLED", + }, + ], + ) def test__put__cancelled_over_deprecated(self): - subscription = {'status': 'DEPRECATED'} + subscription = {"status": "DEPRECATED"} model = self.bc.database.create(user=1, subscription=subscription) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:me_subscription_id_cancel', kwargs={'subscription_id': model.subscription.id}) + url = reverse_lazy("payments:me_subscription_id_cancel", kwargs={"subscription_id": model.subscription.id}) response = self.client.put(url) json = response.json() - expected = {'detail': 'deprecated', 'status_code': 400} + expected = {"detail": "deprecated", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('payments.Subscription'), [ - { - **self.bc.format.to_dict(model.subscription), - 'status': 'DEPRECATED', - }, - ]) + self.assertEqual( + self.bc.database.list_of("payments.Subscription"), + [ + { + **self.bc.format.to_dict(model.subscription), + "status": "DEPRECATED", + }, + ], + ) diff --git a/breathecode/payments/tests/urls/tests_mentorshipserviceset.py b/breathecode/payments/tests/urls/tests_mentorshipserviceset.py index 5cc335b74..e58cb822a 100644 --- a/breathecode/payments/tests/urls/tests_mentorshipserviceset.py +++ b/breathecode/payments/tests/urls/tests_mentorshipserviceset.py @@ -9,37 +9,34 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def mentorship_service_serializer(self, mentorship_service, academy): return { - 'academy': academy_serializer(academy), - 'description': mentorship_service.description, - 'duration': self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': mentorship_service.id, - 'language': mentorship_service.language, - 'logo_url': mentorship_service.logo_url, - 'max_duration': self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'status': mentorship_service.status, + "academy": academy_serializer(academy), + "description": mentorship_service.description, + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, } def get_serializer(self, mentorship_service_set, mentorship_services, academy): return { - 'id': - mentorship_service_set.id, - 'slug': - mentorship_service_set.slug, - 'academy': - academy_serializer(academy), - 'mentorship_services': [ + "id": mentorship_service_set.id, + "slug": mentorship_service_set.slug, + "academy": academy_serializer(academy), + "mentorship_services": [ mentorship_service_serializer(self, mentorship_service, academy) for mentorship_service in mentorship_services ], @@ -55,7 +52,7 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 200 def test__no_auth(self): - url = reverse_lazy('payments:mentorshipserviceset') + url = reverse_lazy("payments:mentorshipserviceset") response = self.client.get(url) json = response.json() @@ -63,7 +60,7 @@ def test__no_auth(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.MentorshipServiceSet'), []) + self.assertEqual(self.bc.database.list_of("payments.MentorshipServiceSet"), []) # Given: 2 MentorshipServiceSet, 2 MentorshipService and 1 Academy # When: get with no auth @@ -71,7 +68,7 @@ def test__no_auth(self): def test__two_items(self): model = self.bc.database.create(mentorship_service_set=2, mentorship_service=2) - url = reverse_lazy('payments:mentorshipserviceset') + url = reverse_lazy("payments:mentorshipserviceset") response = self.client.get(url) json = response.json() @@ -93,51 +90,53 @@ def test__two_items(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.MentorshipServiceSet'), + self.bc.database.list_of("payments.MentorshipServiceSet"), self.bc.format.to_dict(model.mentorship_service_set), ) # Given: compile_lookup was mocked # When: the mock is called # Then: the mock should be called with the correct arguments and does not raise an exception - @patch('breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup', - MagicMock(wraps=lookup_extension.compile_lookup)) + @patch( + "breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup", + MagicMock(wraps=lookup_extension.compile_lookup), + ) def test_lookup_extension(self): self.bc.request.set_headers(academy=1) model = self.bc.database.create(mentorship_service_set=2, mentorship_service=2) args, kwargs = self.bc.format.call( - 'en', + "en", slugs=[ - '', - 'academy', - 'mentorship_services', + "", + "academy", + "mentorship_services", ], overwrite={ - 'mentorship_service': 'mentorship_services', + "mentorship_service": "mentorship_services", }, ) query = self.bc.format.lookup(*args, **kwargs) - url = reverse_lazy('payments:mentorshipserviceset') + '?' + self.bc.format.querystring(query) + url = reverse_lazy("payments:mentorshipserviceset") + "?" + self.bc.format.querystring(query) - self.assertEqual([x for x in query], ['id', 'slug', 'academy', 'mentorship_service']) + self.assertEqual([x for x in query], ["id", "slug", "academy", "mentorship_service"]) response = self.client.get(url) json = response.json() expected = [] - for x in ['overwrite', 'custom_fields']: + for x in ["overwrite", "custom_fields"]: if x in kwargs: del kwargs[x] - for field in ['ids', 'slugs']: + for field in ["ids", "slugs"]: values = kwargs.get(field, tuple()) kwargs[field] = tuple(values) - for field in ['ints', 'strings', 'bools', 'datetimes']: + for field in ["ints", "strings", "bools", "datetimes"]: modes = kwargs.get(field, {}) for mode in modes: if not isinstance(kwargs[field][mode], tuple): @@ -145,13 +144,16 @@ def test_lookup_extension(self): kwargs[field] = frozenset(modes.items()) - self.bc.check.calls(lookup_extension.compile_lookup.call_args_list, [ - call(**kwargs), - ]) + self.bc.check.calls( + lookup_extension.compile_lookup.call_args_list, + [ + call(**kwargs), + ], + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, 200) self.assertEqual( - self.bc.database.list_of('payments.MentorshipServiceSet'), + self.bc.database.list_of("payments.MentorshipServiceSet"), self.bc.format.to_dict(model.mentorship_service_set), ) diff --git a/breathecode/payments/tests/urls/tests_mentorshipserviceset_id.py b/breathecode/payments/tests/urls/tests_mentorshipserviceset_id.py index 0117838f1..3d0b461dd 100644 --- a/breathecode/payments/tests/urls/tests_mentorshipserviceset_id.py +++ b/breathecode/payments/tests/urls/tests_mentorshipserviceset_id.py @@ -9,72 +9,69 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def mentorship_service_serializer(self, mentorship_service, academy): return { - 'academy': academy_serializer(academy), - 'description': mentorship_service.description, - 'duration': self.bc.datetime.from_timedelta(mentorship_service.duration), - 'id': mentorship_service.id, - 'language': mentorship_service.language, - 'logo_url': mentorship_service.logo_url, - 'max_duration': self.bc.datetime.from_timedelta(mentorship_service.max_duration), - 'missed_meeting_duration': self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), - 'name': mentorship_service.name, - 'slug': mentorship_service.slug, - 'status': mentorship_service.status, + "academy": academy_serializer(academy), + "description": mentorship_service.description, + "duration": self.bc.datetime.from_timedelta(mentorship_service.duration), + "id": mentorship_service.id, + "language": mentorship_service.language, + "logo_url": mentorship_service.logo_url, + "max_duration": self.bc.datetime.from_timedelta(mentorship_service.max_duration), + "missed_meeting_duration": self.bc.datetime.from_timedelta(mentorship_service.missed_meeting_duration), + "name": mentorship_service.name, + "slug": mentorship_service.slug, + "status": mentorship_service.status, } def service_serializer(service): return { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, } def currency_serializer(currency): return { - 'code': currency.code, - 'name': currency.name, + "code": currency.code, + "name": currency.name, } def academy_service_serialize(academy_service, academy, currency, service): return { - 'academy': academy_serializer(academy), - 'currency': currency_serializer(currency), - 'id': academy_service.id, - 'max_items': academy_service.max_items, - 'bundle_size': academy_service.bundle_size, - 'max_amount': academy_service.max_amount, - 'discount_ratio': academy_service.discount_ratio, - 'price_per_unit': academy_service.price_per_unit, - 'service': service_serializer(service), + "academy": academy_serializer(academy), + "currency": currency_serializer(currency), + "id": academy_service.id, + "max_items": academy_service.max_items, + "bundle_size": academy_service.bundle_size, + "max_amount": academy_service.max_amount, + "discount_ratio": academy_service.discount_ratio, + "price_per_unit": academy_service.price_per_unit, + "service": service_serializer(service), } def get_serializer(self, mentorship_service_set, mentorship_services, academy, academy_services, currency, service): return { - 'academy_services': [ + "academy_services": [ academy_service_serialize(academy_service, academy, currency, service) for academy_service in academy_services ], - 'id': - mentorship_service_set.id, - 'slug': - mentorship_service_set.slug, - 'academy': - academy_serializer(academy), - 'mentorship_services': [ + "id": mentorship_service_set.id, + "slug": mentorship_service_set.slug, + "academy": academy_serializer(academy), + "mentorship_services": [ mentorship_service_serializer(self, mentorship_service, academy) for mentorship_service in mentorship_services ], @@ -90,15 +87,15 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 404 def test__no_auth(self): - url = reverse_lazy('payments:mentorshipserviceset_id', kwargs={'mentorship_service_set_id': 1}) + url = reverse_lazy("payments:mentorshipserviceset_id", kwargs={"mentorship_service_set_id": 1}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.MentorshipServiceSet'), []) + self.assertEqual(self.bc.database.list_of("payments.MentorshipServiceSet"), []) # Given: 1 MentorshipServiceSet, 2 MentorshipService, 1 Academy, 1 AcademyService, 1 Currency and 1 # Service @@ -107,7 +104,7 @@ def test__no_auth(self): def test__two_items(self): model = self.bc.database.create(mentorship_service_set=1, mentorship_service=2, academy_service=1) - url = reverse_lazy('payments:mentorshipserviceset_id', kwargs={'mentorship_service_set_id': 1}) + url = reverse_lazy("payments:mentorshipserviceset_id", kwargs={"mentorship_service_set_id": 1}) response = self.client.get(url) json = response.json() @@ -123,6 +120,9 @@ def test__two_items(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.MentorshipServiceSet'), [ - self.bc.format.to_dict(model.mentorship_service_set), - ]) + self.assertEqual( + self.bc.database.list_of("payments.MentorshipServiceSet"), + [ + self.bc.format.to_dict(model.mentorship_service_set), + ], + ) diff --git a/breathecode/payments/tests/urls/tests_pay.py b/breathecode/payments/tests/urls/tests_pay.py index 4f6a2f5e4..2470167d6 100644 --- a/breathecode/payments/tests/urls/tests_pay.py +++ b/breathecode/payments/tests/urls/tests_pay.py @@ -21,64 +21,64 @@ def format_user_setting(data={}): return { - 'id': 1, - 'user_id': 1, - 'main_currency_id': None, - 'lang': 'en', + "id": 1, + "user_id": 1, + "main_currency_id": None, + "lang": "en", **data, } def format_invoice_item(data={}): return { - 'academy_id': 1, - 'amount': 0.0, - 'currency_id': 1, - 'bag_id': 1, - 'id': 1, - 'paid_at': UTC_NOW, - 'status': 'FULFILLED', - 'stripe_id': None, - 'user_id': 1, - 'refund_stripe_id': None, - 'refunded_at': None, + "academy_id": 1, + "amount": 0.0, + "currency_id": 1, + "bag_id": 1, + "id": 1, + "paid_at": UTC_NOW, + "status": "FULFILLED", + "stripe_id": None, + "user_id": 1, + "refund_stripe_id": None, + "refunded_at": None, **data, } def to_iso(date): - return re.sub(r'\+00:00$', 'Z', date.replace(tzinfo=UTC).isoformat()) + return re.sub(r"\+00:00$", "Z", date.replace(tzinfo=UTC).isoformat()) def format_coupon(coupon, data={}): return { - 'auto': coupon.auto, - 'discount_type': coupon.discount_type, - 'discount_value': coupon.discount_value, - 'expires_at': to_iso(coupon.expires_at) if coupon.expires_at else None, - 'offered_at': to_iso(coupon.offered_at) if coupon.offered_at else None, - 'referral_type': coupon.referral_type, - 'referral_value': coupon.referral_value, - 'slug': coupon.slug, + "auto": coupon.auto, + "discount_type": coupon.discount_type, + "discount_value": coupon.discount_value, + "expires_at": to_iso(coupon.expires_at) if coupon.expires_at else None, + "offered_at": to_iso(coupon.offered_at) if coupon.offered_at else None, + "referral_type": coupon.referral_type, + "referral_value": coupon.referral_value, + "slug": coupon.slug, **data, } def get_serializer(bc, currency, user, coupons=[], data={}): return { - 'amount': 0, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "amount": 0, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'paid_at': bc.datetime.to_iso_string(UTC_NOW), - 'status': 'FULFILLED', - 'user': { - 'email': user.email, - 'first_name': user.first_name, - 'last_name': user.last_name, + "paid_at": bc.datetime.to_iso_string(UTC_NOW), + "status": "FULFILLED", + "user": { + "email": user.email, + "first_name": user.first_name, + "last_name": user.last_name, }, - 'coupons': [format_coupon(x) for x in coupons], + "coupons": [format_coupon(x) for x in coupons], **data, } @@ -86,31 +86,31 @@ def get_serializer(bc, currency, user, coupons=[], data={}): def generate_amounts_by_time(over_50=False): if over_50: return { - 'amount_per_month': random.random() * 50 + 50, - 'amount_per_quarter': random.random() * 50 + 50, - 'amount_per_half': random.random() * 50 + 50, - 'amount_per_year': random.random() * 50 + 50, + "amount_per_month": random.random() * 50 + 50, + "amount_per_quarter": random.random() * 50 + 50, + "amount_per_half": random.random() * 50 + 50, + "amount_per_year": random.random() * 50 + 50, } return { - 'amount_per_month': random.random() * 100 + 1, - 'amount_per_quarter': random.random() * 100 + 1, - 'amount_per_half': random.random() * 100 + 1, - 'amount_per_year': random.random() * 100 + 1, + "amount_per_month": random.random() * 100 + 1, + "amount_per_quarter": random.random() * 100 + 1, + "amount_per_half": random.random() * 100 + 1, + "amount_per_year": random.random() * 100 + 1, } def which_amount_is_zero(data={}): for key in data: - if key == 'amount_per_quarter': - return 'MONTH', 1 + if key == "amount_per_quarter": + return "MONTH", 1 CHOSEN_PERIOD = { - 'MONTH': 'amount_per_month', - 'QUARTER': 'amount_per_quarter', - 'HALF': 'amount_per_half', - 'YEAR': 'amount_per_year', + "MONTH": "amount_per_month", + "QUARTER": "amount_per_quarter", + "HALF": "amount_per_half", + "YEAR": "amount_per_year", } @@ -129,80 +129,80 @@ class FakeInvoice: @pytest.fixture(autouse=True) def get_patch(db, monkeypatch): - monkeypatch.setattr(activity_tasks.add_activity, 'delay', MagicMock()) - monkeypatch.setattr('breathecode.admissions.tasks.build_cohort_user.delay', MagicMock()) - monkeypatch.setattr('breathecode.admissions.tasks.build_profile_academy.delay', MagicMock()) - monkeypatch.setattr('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - monkeypatch.setattr('breathecode.payments.tasks.build_subscription.delay', MagicMock()) - monkeypatch.setattr('breathecode.payments.tasks.build_plan_financing.delay', MagicMock()) - monkeypatch.setattr('breathecode.payments.tasks.build_free_subscription.delay', MagicMock()) - monkeypatch.setattr('stripe.Charge.create', MagicMock(return_value={'id': 1})) - monkeypatch.setattr('stripe.Customer.create', MagicMock(return_value={'id': 1})) + monkeypatch.setattr(activity_tasks.add_activity, "delay", MagicMock()) + monkeypatch.setattr("breathecode.admissions.tasks.build_cohort_user.delay", MagicMock()) + monkeypatch.setattr("breathecode.admissions.tasks.build_profile_academy.delay", MagicMock()) + monkeypatch.setattr("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + monkeypatch.setattr("breathecode.payments.tasks.build_subscription.delay", MagicMock()) + monkeypatch.setattr("breathecode.payments.tasks.build_plan_financing.delay", MagicMock()) + monkeypatch.setattr("breathecode.payments.tasks.build_free_subscription.delay", MagicMock()) + monkeypatch.setattr("stripe.Charge.create", MagicMock(return_value={"id": 1})) + monkeypatch.setattr("stripe.Customer.create", MagicMock(return_value={"id": 1})) def wrapper(charge={}, customer={}): - monkeypatch.setattr('stripe.Charge.create', MagicMock(return_value=charge)) - monkeypatch.setattr('stripe.Customer.create', MagicMock(return_value=customer)) + monkeypatch.setattr("stripe.Charge.create", MagicMock(return_value=charge)) + monkeypatch.setattr("stripe.Customer.create", MagicMock(return_value=customer)) yield wrapper def test_without_auth(bc: Breathecode, client: APIClient): - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") response = client.post(url) json = response.json() expected = { - 'detail': 'Authentication credentials were not provided.', - 'status_code': 401, + "detail": "Authentication credentials were not provided.", + "status_code": 401, } assert json == expected assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('payments.Bag') == [] - assert bc.database.list_of('authenticate.UserSetting') == [] + assert bc.database.list_of("payments.Bag") == [] + assert bc.database.list_of("authenticate.UserSetting") == [] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) bc.check.calls(admissions_tasks.build_profile_academy.delay.call_args_list, []) bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) -@pytest.mark.parametrize('in_4geeks', [True, False]) -@pytest.mark.parametrize('bad_reputation', ['BAD', 'FRAUD']) -@pytest.mark.parametrize('good_reputation', ['GOOD', 'UNKNOWN']) +@pytest.mark.parametrize("in_4geeks", [True, False]) +@pytest.mark.parametrize("bad_reputation", ["BAD", "FRAUD"]) +@pytest.mark.parametrize("good_reputation", ["GOOD", "UNKNOWN"]) def test_fraud_case(bc: Breathecode, client: APIClient, in_4geeks, bad_reputation, good_reputation): if in_4geeks: financial_reputation = { - 'in_4geeks': bad_reputation, - 'in_stripe': good_reputation, + "in_4geeks": bad_reputation, + "in_stripe": good_reputation, } else: financial_reputation = { - 'in_4geeks': good_reputation, - 'in_stripe': bad_reputation, + "in_4geeks": good_reputation, + "in_stripe": bad_reputation, } model = bc.database.create(user=1, financial_reputation=financial_reputation) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") response = client.post(url) json = response.json() expected = { - 'detail': 'fraud-or-bad-reputation', - 'status_code': 402, - 'silent': True, - 'silent_code': 'fraud-or-bad-reputation', + "detail": "fraud-or-bad-reputation", + "status_code": 402, + "silent": True, + "silent_code": "fraud-or-bad-reputation", } assert json == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED - assert bc.database.list_of('payments.Bag') == [] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [] + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) @@ -210,30 +210,30 @@ def test_fraud_case(bc: Breathecode, client: APIClient, in_4geeks, bad_reputatio bc.check.calls(activity_tasks.add_activity.delay.call_args_list, []) -@pytest.mark.parametrize('reputation1', ['GOOD', 'UNKNOWN']) -@pytest.mark.parametrize('reputation2', ['GOOD', 'UNKNOWN']) +@pytest.mark.parametrize("reputation1", ["GOOD", "UNKNOWN"]) +@pytest.mark.parametrize("reputation2", ["GOOD", "UNKNOWN"]) def test_no_token(bc: Breathecode, client: APIClient, reputation1, reputation2): financial_reputation = { - 'in_4geeks': reputation1, - 'in_stripe': reputation2, + "in_4geeks": reputation1, + "in_stripe": reputation2, } model = bc.database.create(user=1, financial_reputation=financial_reputation) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") response = client.post(url) json = response.json() - expected = {'detail': 'missing-token', 'status_code': 404} + expected = {"detail": "missing-token", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('payments.Bag') == [] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [] + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) @@ -245,20 +245,20 @@ def test_without_bag__passing_token(bc: Breathecode, client: APIClient): model = bc.database.create(user=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') - data = {'token': 'xdxdxdxdxdxdxdxdxdxd'} - response = client.post(url, data, format='json') + url = reverse_lazy("payments:pay") + data = {"token": "xdxdxdxdxdxdxdxdxdxd"} + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'not-found-or-without-checking', 'status_code': 404} + expected = {"detail": "not-found-or-without-checking", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('payments.Bag') == [] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [] + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) @@ -268,28 +268,28 @@ def test_without_bag__passing_token(bc: Breathecode, client: APIClient): def test_no_bag(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", } model = bc.database.create(user=1, bag=bag, currency=1, academy=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') - data = {'token': 'xdxdxdxdxdxdxdxdxdxd'} - response = client.post(url, data, format='json') + url = reverse_lazy("payments:pay") + data = {"token": "xdxdxdxdxdxdxdxdxdxd"} + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'bag-is-empty', 'status_code': 400} + expected = {"detail": "bag-is-empty", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('payments.Bag') == [bc.format.to_dict(model.bag)] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [bc.format.to_dict(model.bag)] + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), []) @@ -300,44 +300,46 @@ def test_no_bag(bc: Breathecode, client: APIClient): bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), ], ) def test_with_bag__no_free_trial(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', - random.choice([ - 'amount_per_month', - 'amount_per_quarter', - 'amount_per_half', - 'amount_per_year', - ]): 1, + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", + random.choice( + [ + "amount_per_month", + "amount_per_quarter", + "amount_per_half", + "amount_per_year", + ] + ): 1, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') - data = {'token': 'xdxdxdxdxdxdxdxdxdxd'} - response = client.post(url, data, format='json') + url = reverse_lazy("payments:pay") + data = {"token": "xdxdxdxdxdxdxdxdxdxd"} + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'missing-chosen-period', 'status_code': 400} + expected = {"detail": "missing-chosen-period", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('payments.Bag') == [bc.format.to_dict(model.bag)] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [bc.format.to_dict(model.bag)] + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -348,38 +350,38 @@ def test_with_bag__no_free_trial(bc: Breathecode, client: APIClient): bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), ], ) def test_bad_choosen_period(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') - data = {'token': 'xdxdxdxdxdxdxdxdxdxd', 'chosen_period': bc.fake.slug()} - response = client.post(url, data, format='json') + url = reverse_lazy("payments:pay") + data = {"token": "xdxdxdxdxdxdxdxdxdxd", "chosen_period": bc.fake.slug()} + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'invalid-chosen-period', 'status_code': 400} + expected = {"detail": "invalid-chosen-period", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('payments.Bag') == [bc.format.to_dict(model.bag)] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [bc.format.to_dict(model.bag)] + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -390,45 +392,45 @@ def test_bad_choosen_period(bc: Breathecode, client: APIClient): bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), ], ) def test_free_trial__no_plan_offer(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', + "token": "xdxdxdxdxdxdxdxdxdxd", } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() expected = { - 'detail': 'the-plan-was-chosen-is-not-ready-too-be-sold', - 'status_code': 400, + "detail": "the-plan-was-chosen-is-not-ready-too-be-sold", + "status_code": 400, } assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('payments.Bag') == [ + assert bc.database.list_of("payments.Bag") == [ bc.format.to_dict(model.bag), ] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -442,29 +444,29 @@ def test_free_trial__no_plan_offer(bc: Breathecode, client: APIClient): bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), ], ) def test_free_trial__with_plan_offer(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1, plan_offer=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', + "token": "xdxdxdxdxdxdxdxdxdxd", } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() expected = get_serializer(bc, model.currency, model.user, data={}) @@ -472,55 +474,55 @@ def test_free_trial__with_plan_offer(bc: Breathecode, client: APIClient): assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - 'expires_at': None, - }] - assert bc.database.list_of('payments.Invoice') == [format_invoice_item()] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + "expires_at": None, + } + ] + assert bc.database.list_of("payments.Invoice") == [format_invoice_item()] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) bc.check.queryset_with_pks(model.bag.service_items.all(), [1]) assert tasks.build_subscription.delay.call_args_list == [] assert tasks.build_plan_financing.delay.call_args_list == [] - assert tasks.build_free_subscription.delay.call_args_list == [call(1, 1, conversion_info='')] + assert tasks.build_free_subscription.delay.call_args_list == [call(1, 1, conversion_info="")] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) bc.check.calls(admissions_tasks.build_profile_academy.delay.call_args_list, [call(1, 1)]) bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) def test_free_trial__with_plan_offer_with_conversion_info(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1, plan_offer=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'conversion_info': { - 'landing_url': '/home' - }, + "token": "xdxdxdxdxdxdxdxdxdxd", + "conversion_info": {"landing_url": "/home"}, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() expected = get_serializer(bc, model.currency, model.user, data={}) @@ -528,15 +530,17 @@ def test_free_trial__with_plan_offer_with_conversion_info(bc: Breathecode, clien assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - 'expires_at': None, - }] - assert bc.database.list_of('payments.Invoice') == [format_invoice_item()] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + "expires_at": None, + } + ] + assert bc.database.list_of("payments.Invoice") == [format_invoice_item()] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -552,22 +556,22 @@ def test_free_trial__with_plan_offer_with_conversion_info(bc: Breathecode, clien bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) @pytest.mark.parametrize( - 'exc_cls,silent_code', + "exc_cls,silent_code", [ - (stripe.error.CardError, 'card-error'), - (stripe.error.RateLimitError, 'rate-limit-error'), - (stripe.error.InvalidRequestError, 'invalid-request'), - (stripe.error.AuthenticationError, 'authentication-error'), - (stripe.error.APIConnectionError, 'payment-service-are-down'), - (stripe.error.StripeError, 'stripe-error'), - (Exception, 'unexpected-exception'), + (stripe.error.CardError, "card-error"), + (stripe.error.RateLimitError, "rate-limit-error"), + (stripe.error.InvalidRequestError, "invalid-request"), + (stripe.error.AuthenticationError, "authentication-error"), + (stripe.error.APIConnectionError, "payment-service-are-down"), + (stripe.error.StripeError, "stripe-error"), + (Exception, "unexpected-exception"), ], ) def test_pay_for_subscription_has_failed(bc: Breathecode, client: APIClient, exc_cls, silent_code, monkeypatch, fake): @@ -576,57 +580,57 @@ def get_exp(): args = [fake.slug()] kwargs = {} if exc_cls in [stripe.error.CardError, stripe.error.InvalidRequestError]: - kwargs['param'] = {} + kwargs["param"] = {} if exc_cls == stripe.error.CardError: - kwargs['code'] = fake.slug() + kwargs["code"] = fake.slug() return exc_cls(*args, **kwargs) monkeypatch.setattr( - 'breathecode.payments.services.stripe.Stripe._execute_callback', + "breathecode.payments.services.stripe.Stripe._execute_callback", MagicMock(side_effect=get_exp()), ) bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } - chosen_period = random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']) + chosen_period = random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]) amount = get_amount_per_period(chosen_period, bag) - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'chosen_period': chosen_period, + "token": "xdxdxdxdxdxdxdxdxdxd", + "chosen_period": chosen_period, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() expected = { - 'detail': silent_code, - 'silent': True, - 'silent_code': silent_code, - 'status_code': 402, + "detail": silent_code, + "silent": True, + "silent_code": silent_code, + "status_code": 402, } assert json == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED - assert bc.database.list_of('payments.Bag') == [ + assert bc.database.list_of("payments.Bag") == [ bc.format.to_dict(model.bag), ] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -637,21 +641,24 @@ def get_exp(): bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) bc.check.calls(admissions_tasks.build_profile_academy.delay.call_args_list, []) - bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) @pytest.mark.parametrize( - 'exc_cls,silent_code', + "exc_cls,silent_code", [ - (stripe.error.CardError, 'card-error'), - (stripe.error.RateLimitError, 'rate-limit-error'), - (stripe.error.InvalidRequestError, 'invalid-request'), - (stripe.error.AuthenticationError, 'authentication-error'), - (stripe.error.APIConnectionError, 'payment-service-are-down'), - (stripe.error.StripeError, 'stripe-error'), - (Exception, 'unexpected-exception'), + (stripe.error.CardError, "card-error"), + (stripe.error.RateLimitError, "rate-limit-error"), + (stripe.error.InvalidRequestError, "invalid-request"), + (stripe.error.AuthenticationError, "authentication-error"), + (stripe.error.APIConnectionError, "payment-service-are-down"), + (stripe.error.StripeError, "stripe-error"), + (Exception, "unexpected-exception"), ], ) def test_pay_for_plan_financing_has_failed(bc: Breathecode, client: APIClient, exc_cls, silent_code, monkeypatch, fake): @@ -660,32 +667,32 @@ def get_exp(): args = [fake.slug()] kwargs = {} if exc_cls in [stripe.error.CardError, stripe.error.InvalidRequestError]: - kwargs['param'] = {} + kwargs["param"] = {} if exc_cls == stripe.error.CardError: - kwargs['code'] = fake.slug() + kwargs["code"] = fake.slug() return exc_cls(*args, **kwargs) monkeypatch.setattr( - 'breathecode.payments.services.stripe.Stripe._execute_callback', + "breathecode.payments.services.stripe.Stripe._execute_callback", MagicMock(side_effect=get_exp()), ) how_many_installments = random.randint(1, 12) charge = random.random() * 99 + 1 bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } financing_option = { - 'monthly_price': charge, - 'how_many_months': how_many_installments, + "monthly_price": charge, + "how_many_months": how_many_installments, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create( user=1, @@ -698,30 +705,30 @@ def get_exp(): ) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'how_many_installments': how_many_installments, + "token": "xdxdxdxdxdxdxdxdxdxd", + "how_many_installments": how_many_installments, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() expected = { - 'detail': silent_code, - 'silent': True, - 'silent_code': silent_code, - 'status_code': 402, + "detail": silent_code, + "silent": True, + "silent_code": silent_code, + "status_code": 402, } assert json == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED - assert bc.database.list_of('payments.Bag') == [ + assert bc.database.list_of("payments.Bag") == [ bc.format.to_dict(model.bag), ] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -732,29 +739,32 @@ def get_exp(): bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) bc.check.calls(admissions_tasks.build_profile_academy.delay.call_args_list, []) - bc.check.calls(activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - ]) + bc.check.calls( + activity_tasks.add_activity.delay.call_args_list, + [ + call(1, "bag_created", related_type="payments.Bag", related_id=1), + ], + ) def test_free_plan__is_renewable(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", } - plan = {'is_renewable': True, 'trial_duration': 0} + plan = {"is_renewable": True, "trial_duration": 0} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1, plan_offer=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', + "token": "xdxdxdxdxdxdxdxdxdxd", } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() expected = get_serializer(bc, model.currency, model.user, data={}) @@ -762,52 +772,54 @@ def test_free_plan__is_renewable(bc: Breathecode, client: APIClient): assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - 'expires_at': None, - }] - assert bc.database.list_of('payments.Invoice') == [format_invoice_item()] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + "expires_at": None, + } + ] + assert bc.database.list_of("payments.Invoice") == [format_invoice_item()] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) bc.check.queryset_with_pks(model.bag.service_items.all(), [1]) assert tasks.build_subscription.delay.call_args_list == [] assert tasks.build_plan_financing.delay.call_args_list == [] - assert tasks.build_free_subscription.delay.call_args_list == [call(1, 1, conversion_info='')] + assert tasks.build_free_subscription.delay.call_args_list == [call(1, 1, conversion_info="")] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) bc.check.calls(admissions_tasks.build_profile_academy.delay.call_args_list, [call(1, 1)]) bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) def test_free_plan__not_is_renewable(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", } - plan = {'is_renewable': False, 'trial_duration': 0} + plan = {"is_renewable": False, "trial_duration": 0} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1, plan_offer=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', + "token": "xdxdxdxdxdxdxdxdxdxd", } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() expected = get_serializer(bc, model.currency, model.user, data={}) @@ -815,83 +827,89 @@ def test_free_plan__not_is_renewable(bc: Breathecode, client: APIClient): assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - 'expires_at': None, - }] - assert bc.database.list_of('payments.Invoice') == [format_invoice_item()] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + "expires_at": None, + } + ] + assert bc.database.list_of("payments.Invoice") == [format_invoice_item()] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) bc.check.queryset_with_pks(model.bag.service_items.all(), [1]) assert tasks.build_subscription.delay.call_args_list == [] assert tasks.build_plan_financing.delay.call_args_list == [] - assert tasks.build_free_subscription.delay.call_args_list == [call(1, 1, conversion_info='')] + assert tasks.build_free_subscription.delay.call_args_list == [call(1, 1, conversion_info="")] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) bc.check.calls(admissions_tasks.build_profile_academy.delay.call_args_list, [call(1, 1)]) bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) def test_with_chosen_period__amount_set(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } - chosen_period = random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']) + chosen_period = random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]) amount = get_amount_per_period(chosen_period, bag) - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'chosen_period': chosen_period, + "token": "xdxdxdxdxdxdxdxdxdxd", + "chosen_period": chosen_period, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - expected = get_serializer(bc, model.currency, model.user, data={'amount': math.ceil(amount)}) + expected = get_serializer(bc, model.currency, model.user, data={"amount": math.ceil(amount)}) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - 'expires_at': None, - 'chosen_period': chosen_period, - }] - assert bc.database.list_of('payments.Invoice') == [ - format_invoice_item({ - 'amount': math.ceil(amount), - 'stripe_id': '1', - }), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + "expires_at": None, + "chosen_period": chosen_period, + } + ] + assert bc.database.list_of("payments.Invoice") == [ + format_invoice_item( + { + "amount": math.ceil(amount), + "stripe_id": "1", + } + ), ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) bc.check.queryset_with_pks(model.bag.service_items.all(), [1]) - assert tasks.build_subscription.delay.call_args_list == [call(1, 1, conversion_info='')] + assert tasks.build_subscription.delay.call_args_list == [call(1, 1, conversion_info="")] assert tasks.build_plan_financing.delay.call_args_list == [] assert tasks.build_free_subscription.delay.call_args_list == [] @@ -900,59 +918,61 @@ def test_with_chosen_period__amount_set(bc: Breathecode, client: APIClient): bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) def test_with_chosen_period__amount_set_with_conversion_info(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } - chosen_period = random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']) + chosen_period = random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]) amount = get_amount_per_period(chosen_period, bag) - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'chosen_period': chosen_period, - 'conversion_info': { - 'landing_url': '/home' - }, + "token": "xdxdxdxdxdxdxdxdxdxd", + "chosen_period": chosen_period, + "conversion_info": {"landing_url": "/home"}, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - expected = get_serializer(bc, model.currency, model.user, data={'amount': math.ceil(amount)}) + expected = get_serializer(bc, model.currency, model.user, data={"amount": math.ceil(amount)}) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - 'expires_at': None, - 'chosen_period': chosen_period, - }] - assert bc.database.list_of('payments.Invoice') == [ - format_invoice_item({ - 'amount': math.ceil(amount), - 'stripe_id': '1', - }), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + "expires_at": None, + "chosen_period": chosen_period, + } ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Invoice") == [ + format_invoice_item( + { + "amount": math.ceil(amount), + "stripe_id": "1", + } + ), + ] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -966,50 +986,50 @@ def test_with_chosen_period__amount_set_with_conversion_info(bc: Breathecode, cl bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) def test_with_chosen_period__amount_set_with_conversion_info_with_wrong_fields(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } - chosen_period = random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']) + chosen_period = random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]) amount = get_amount_per_period(chosen_period, bag) - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'chosen_period': chosen_period, - 'conversion_info': { - 'pepe': '/home' - }, + "token": "xdxdxdxdxdxdxdxdxdxd", + "chosen_period": chosen_period, + "conversion_info": {"pepe": "/home"}, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'conversion-info-invalid-key', 'status_code': 400} + expected = {"detail": "conversion-info-invalid-key", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - }] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + } + ] + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -1023,46 +1043,48 @@ def test_with_chosen_period__amount_set_with_conversion_info_with_wrong_fields(b bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), ], ) def test_installments_not_found(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } - chosen_period = random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']) + chosen_period = random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]) amount = get_amount_per_period(chosen_period, bag) - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'how_many_installments': random.randint(1, 12), + "token": "xdxdxdxdxdxdxdxdxdxd", + "how_many_installments": random.randint(1, 12), } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - expected = {'detail': 'invalid-bag-configured-by-installments', 'status_code': 500} + expected = {"detail": "invalid-bag-configured-by-installments", "status_code": 500} assert json == expected assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - }] - assert bc.database.list_of('payments.Invoice') == [] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + } + ] + assert bc.database.list_of("payments.Invoice") == [] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -1076,7 +1098,7 @@ def test_installments_not_found(bc: Breathecode, client: APIClient): bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), ], ) @@ -1085,17 +1107,17 @@ def test_with_installments(bc: Breathecode, client: APIClient): how_many_installments = random.randint(1, 12) charge = random.random() * 99 + 1 bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } financing_option = { - 'monthly_price': charge, - 'how_many_months': how_many_installments, + "monthly_price": charge, + "how_many_months": how_many_installments, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create( user=1, @@ -1108,41 +1130,45 @@ def test_with_installments(bc: Breathecode, client: APIClient): ) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'how_many_installments': how_many_installments, + "token": "xdxdxdxdxdxdxdxdxdxd", + "how_many_installments": how_many_installments, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - expected = get_serializer(bc, model.currency, model.user, data={'amount': math.ceil(charge)}) + expected = get_serializer(bc, model.currency, model.user, data={"amount": math.ceil(charge)}) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - # 'chosen_period': 'NO_SET', - 'expires_at': None, - 'how_many_installments': how_many_installments, - }] - assert bc.database.list_of('payments.Invoice') == [ - format_invoice_item({ - 'amount': math.ceil(charge), - 'stripe_id': '1', - }), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + # 'chosen_period': 'NO_SET', + "expires_at": None, + "how_many_installments": how_many_installments, + } + ] + assert bc.database.list_of("payments.Invoice") == [ + format_invoice_item( + { + "amount": math.ceil(charge), + "stripe_id": "1", + } + ), ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) bc.check.queryset_with_pks(model.bag.service_items.all(), [1]) assert tasks.build_subscription.delay.call_args_list == [] - assert tasks.build_plan_financing.delay.call_args_list == [call(1, 1, conversion_info='')] + assert tasks.build_plan_financing.delay.call_args_list == [call(1, 1, conversion_info="")] assert tasks.build_free_subscription.delay.call_args_list == [] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) @@ -1150,8 +1176,8 @@ def test_with_installments(bc: Breathecode, client: APIClient): bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) @@ -1160,17 +1186,17 @@ def test_with_installments_with_conversion_info(bc: Breathecode, client: APIClie how_many_installments = random.randint(1, 12) charge = random.random() * 99 + 1 bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } financing_option = { - 'monthly_price': charge, - 'how_many_months': how_many_installments, + "monthly_price": charge, + "how_many_months": how_many_installments, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = bc.database.create( user=1, @@ -1183,38 +1209,40 @@ def test_with_installments_with_conversion_info(bc: Breathecode, client: APIClie ) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'how_many_installments': how_many_installments, - 'conversion_info': { - 'landing_url': '/home' - }, + "token": "xdxdxdxdxdxdxdxdxdxd", + "how_many_installments": how_many_installments, + "conversion_info": {"landing_url": "/home"}, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() - expected = get_serializer(bc, model.currency, model.user, data={'amount': math.ceil(charge)}) + expected = get_serializer(bc, model.currency, model.user, data={"amount": math.ceil(charge)}) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - # 'chosen_period': 'NO_SET', - 'expires_at': None, - 'how_many_installments': how_many_installments, - }] - assert bc.database.list_of('payments.Invoice') == [ - format_invoice_item({ - 'amount': math.ceil(charge), - 'stripe_id': '1', - }), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + # 'chosen_period': 'NO_SET', + "expires_at": None, + "how_many_installments": how_many_installments, + } ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("payments.Invoice") == [ + format_invoice_item( + { + "amount": math.ceil(charge), + "stripe_id": "1", + } + ), + ] + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -1228,8 +1256,8 @@ def test_with_installments_with_conversion_info(bc: Breathecode, client: APIClie bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) @@ -1238,38 +1266,38 @@ def test_coupons__with_installments(bc: Breathecode, client: APIClient): how_many_installments = random.randint(1, 12) charge = random.random() * 50 + 50 bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(), } financing_option = { - 'monthly_price': charge, - 'how_many_months': how_many_installments, + "monthly_price": charge, + "how_many_months": how_many_installments, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} random_percent = random.random() * 0.3 discount1 = random.random() * 20 discount2 = random.random() * 10 coupons = [ { - 'discount_type': 'PERCENT_OFF', - 'discount_value': random_percent, - 'offered_at': None, - 'expires_at': None, + "discount_type": "PERCENT_OFF", + "discount_value": random_percent, + "offered_at": None, + "expires_at": None, }, { - 'discount_type': 'FIXED_PRICE', - 'discount_value': discount1, - 'offered_at': None, - 'expires_at': None, + "discount_type": "FIXED_PRICE", + "discount_value": discount1, + "offered_at": None, + "expires_at": None, }, { - 'discount_type': 'HAGGLING', - 'discount_value': discount2, - 'offered_at': None, - 'expires_at': None, + "discount_type": "HAGGLING", + "discount_value": discount2, + "offered_at": None, + "expires_at": None, }, ] random.shuffle(coupons) @@ -1286,42 +1314,46 @@ def test_coupons__with_installments(bc: Breathecode, client: APIClient): ) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'how_many_installments': how_many_installments, + "token": "xdxdxdxdxdxdxdxdxdxd", + "how_many_installments": how_many_installments, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() total = math.ceil(charge - (charge * random_percent) - discount1 - discount2) - expected = get_serializer(bc, model.currency, model.user, coupons=model.coupon, data={'amount': total}) + expected = get_serializer(bc, model.currency, model.user, coupons=model.coupon, data={"amount": total}) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - # 'chosen_period': 'NO_SET', - 'expires_at': None, - 'how_many_installments': how_many_installments, - }] - assert bc.database.list_of('payments.Invoice') == [ - format_invoice_item({ - 'amount': total, - 'stripe_id': '1', - }), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + # 'chosen_period': 'NO_SET', + "expires_at": None, + "how_many_installments": how_many_installments, + } + ] + assert bc.database.list_of("payments.Invoice") == [ + format_invoice_item( + { + "amount": total, + "stripe_id": "1", + } + ), ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) bc.check.queryset_with_pks(model.bag.service_items.all(), [1]) assert tasks.build_subscription.delay.call_args_list == [] - assert tasks.build_plan_financing.delay.call_args_list == [call(1, 1, conversion_info='')] + assert tasks.build_plan_financing.delay.call_args_list == [call(1, 1, conversion_info="")] assert tasks.build_free_subscription.delay.call_args_list == [] bc.check.calls(admissions_tasks.build_cohort_user.delay.call_args_list, []) @@ -1329,45 +1361,45 @@ def test_coupons__with_installments(bc: Breathecode, client: APIClient): bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) def test_coupons__with_chosen_period__amount_set(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(over_50=True), } - chosen_period = random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']) + chosen_period = random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]) amount = get_amount_per_period(chosen_period, bag) - plan = {'is_renewable': False} + plan = {"is_renewable": False} random_percent = random.random() * 0.3 discount1 = random.random() * 20 discount2 = random.random() * 10 coupons = [ { - 'discount_type': 'PERCENT_OFF', - 'discount_value': random_percent, - 'offered_at': None, - 'expires_at': None, + "discount_type": "PERCENT_OFF", + "discount_value": random_percent, + "offered_at": None, + "expires_at": None, }, { - 'discount_type': 'FIXED_PRICE', - 'discount_value': discount1, - 'offered_at': None, - 'expires_at': None, + "discount_type": "FIXED_PRICE", + "discount_value": discount1, + "offered_at": None, + "expires_at": None, }, { - 'discount_type': 'HAGGLING', - 'discount_value': discount2, - 'offered_at': None, - 'expires_at': None, + "discount_type": "HAGGLING", + "discount_value": discount2, + "offered_at": None, + "expires_at": None, }, ] random.shuffle(coupons) @@ -1375,40 +1407,44 @@ def test_coupons__with_chosen_period__amount_set(bc: Breathecode, client: APICli model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1, coupon=coupons) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'chosen_period': chosen_period, + "token": "xdxdxdxdxdxdxdxdxdxd", + "chosen_period": chosen_period, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() total = math.ceil(amount - (amount * random_percent) - discount1 - discount2) - expected = get_serializer(bc, model.currency, model.user, coupons=model.coupon, data={'amount': total}) + expected = get_serializer(bc, model.currency, model.user, coupons=model.coupon, data={"amount": total}) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - 'expires_at': None, - 'chosen_period': chosen_period, - }] - assert bc.database.list_of('payments.Invoice') == [ - format_invoice_item({ - 'amount': total, - 'stripe_id': '1', - }), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + "expires_at": None, + "chosen_period": chosen_period, + } + ] + assert bc.database.list_of("payments.Invoice") == [ + format_invoice_item( + { + "amount": total, + "stripe_id": "1", + } + ), ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) bc.check.queryset_with_pks(model.bag.service_items.all(), [1]) - assert tasks.build_subscription.delay.call_args_list == [call(1, 1, conversion_info='')] + assert tasks.build_subscription.delay.call_args_list == [call(1, 1, conversion_info="")] assert tasks.build_plan_financing.delay.call_args_list == [] assert tasks.build_free_subscription.delay.call_args_list == [] @@ -1417,45 +1453,45 @@ def test_coupons__with_chosen_period__amount_set(bc: Breathecode, client: APICli bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) def test_coupons__with_chosen_period__amount_set_with_conversion_info(bc: Breathecode, client: APIClient): bag = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'expires_at': UTC_NOW, - 'status': 'CHECKING', - 'type': 'BAG', + "token": "xdxdxdxdxdxdxdxdxdxd", + "expires_at": UTC_NOW, + "status": "CHECKING", + "type": "BAG", **generate_amounts_by_time(over_50=True), } - chosen_period = random.choice(['MONTH', 'QUARTER', 'HALF', 'YEAR']) + chosen_period = random.choice(["MONTH", "QUARTER", "HALF", "YEAR"]) amount = get_amount_per_period(chosen_period, bag) - plan = {'is_renewable': False} + plan = {"is_renewable": False} random_percent = random.random() * 0.3 discount1 = random.random() * 20 discount2 = random.random() * 10 coupons = [ { - 'discount_type': 'PERCENT_OFF', - 'discount_value': random_percent, - 'offered_at': None, - 'expires_at': None, + "discount_type": "PERCENT_OFF", + "discount_value": random_percent, + "offered_at": None, + "expires_at": None, }, { - 'discount_type': 'FIXED_PRICE', - 'discount_value': discount1, - 'offered_at': None, - 'expires_at': None, + "discount_type": "FIXED_PRICE", + "discount_value": discount1, + "offered_at": None, + "expires_at": None, }, { - 'discount_type': 'HAGGLING', - 'discount_value': discount2, - 'offered_at': None, - 'expires_at': None, + "discount_type": "HAGGLING", + "discount_value": discount2, + "offered_at": None, + "expires_at": None, }, ] random.shuffle(coupons) @@ -1463,38 +1499,40 @@ def test_coupons__with_chosen_period__amount_set_with_conversion_info(bc: Breath model = bc.database.create(user=1, bag=bag, academy=1, currency=1, plan=plan, service_item=1, coupon=coupons) client.force_authenticate(user=model.user) - url = reverse_lazy('payments:pay') + url = reverse_lazy("payments:pay") data = { - 'token': 'xdxdxdxdxdxdxdxdxdxd', - 'chosen_period': chosen_period, - 'conversion_info': { - 'landing_url': '/home' - }, + "token": "xdxdxdxdxdxdxdxdxdxd", + "chosen_period": chosen_period, + "conversion_info": {"landing_url": "/home"}, } - response = client.post(url, data, format='json') + response = client.post(url, data, format="json") json = response.json() total = math.ceil(amount - (amount * random_percent) - discount1 - discount2) - expected = get_serializer(bc, model.currency, model.user, coupons=model.coupon, data={'amount': total}) + expected = get_serializer(bc, model.currency, model.user, coupons=model.coupon, data={"amount": total}) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert bc.database.list_of('payments.Bag') == [{ - **bc.format.to_dict(model.bag), - 'token': None, - 'status': 'PAID', - 'expires_at': None, - 'chosen_period': chosen_period, - }] - assert bc.database.list_of('payments.Invoice') == [ - format_invoice_item({ - 'amount': total, - 'stripe_id': '1', - }), + assert bc.database.list_of("payments.Bag") == [ + { + **bc.format.to_dict(model.bag), + "token": None, + "status": "PAID", + "expires_at": None, + "chosen_period": chosen_period, + } + ] + assert bc.database.list_of("payments.Invoice") == [ + format_invoice_item( + { + "amount": total, + "stripe_id": "1", + } + ), ] - assert bc.database.list_of('authenticate.UserSetting') == [ - format_user_setting({'lang': 'en'}), + assert bc.database.list_of("authenticate.UserSetting") == [ + format_user_setting({"lang": "en"}), ] bc.check.queryset_with_pks(model.bag.plans.all(), [1]) @@ -1508,7 +1546,7 @@ def test_coupons__with_chosen_period__amount_set_with_conversion_info(bc: Breath bc.check.calls( activity_tasks.add_activity.delay.call_args_list, [ - call(1, 'bag_created', related_type='payments.Bag', related_id=1), - call(1, 'checkout_completed', related_type='payments.Invoice', related_id=1), + call(1, "bag_created", related_type="payments.Bag", related_id=1), + call(1, "checkout_completed", related_type="payments.Invoice", related_id=1), ], ) diff --git a/breathecode/payments/tests/urls/tests_plan.py b/breathecode/payments/tests/urls/tests_plan.py index 9ab0ccf43..f68cef691 100644 --- a/breathecode/payments/tests/urls/tests_plan.py +++ b/breathecode/payments/tests/urls/tests_plan.py @@ -11,35 +11,35 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def service_item_serializer(service_item, service): return { - 'how_many': service_item.how_many, - 'service': { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "how_many": service_item.how_many, + "service": { + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, }, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, } def financing_option_serializer(financing_option, currency): return { - 'currency': { - 'code': currency.code, - 'name': currency.name, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'how_many_months': financing_option.how_many_months, - 'monthly_price': financing_option.monthly_price, + "how_many_months": financing_option.how_many_months, + "monthly_price": financing_option.monthly_price, } @@ -54,27 +54,27 @@ def get_serializer(event, currency, service=None, academy=None, service_items=[] academy = academy_serializer(academy) return { - 'slug': event.slug, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "slug": event.slug, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'financing_options': financing_options, - 'has_available_cohorts': len(cohorts) > 0, - 'has_waiting_list': event.has_waiting_list, - 'is_renewable': event.is_renewable, - 'owner': academy, - 'price_per_half': event.price_per_half, - 'price_per_month': event.price_per_month, - 'price_per_quarter': event.price_per_quarter, - 'price_per_year': event.price_per_year, - 'service_items': service_items, - 'slug': event.slug, - 'status': event.status, - 'time_of_life': event.time_of_life, - 'time_of_life_unit': event.time_of_life_unit, - 'trial_duration': event.trial_duration, - 'trial_duration_unit': event.trial_duration_unit, + "financing_options": financing_options, + "has_available_cohorts": len(cohorts) > 0, + "has_waiting_list": event.has_waiting_list, + "is_renewable": event.is_renewable, + "owner": academy, + "price_per_half": event.price_per_half, + "price_per_month": event.price_per_month, + "price_per_quarter": event.price_per_quarter, + "price_per_year": event.price_per_year, + "service_items": service_items, + "slug": event.slug, + "status": event.status, + "time_of_life": event.time_of_life, + "time_of_life_unit": event.time_of_life_unit, + "trial_duration": event.trial_duration, + "trial_duration_unit": event.trial_duration_unit, } @@ -87,7 +87,7 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 200 def test__no_auth(self): - url = reverse_lazy('payments:plan') + url = reverse_lazy("payments:plan") response = self.client.get(url) json = response.json() @@ -95,46 +95,37 @@ def test__no_auth(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 200 with 2 Plan with no financial options def test__two_items__plan_is_renewable(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) - - url = reverse_lazy('payments:plan') + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), service_item=2, plan_service_item=plan_service_items, financing_option=2 + ) + + url = reverse_lazy("payments:plan") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.plan[1], - model.currency, - model.service, - service_items=model.service_item, - financing_options=[]), - get_serializer(model.plan[0], - model.currency, - model.service, - service_items=model.service_item, - financing_options=[]), + get_serializer( + model.plan[1], model.currency, model.service, service_items=model.service_item, financing_options=[] + ), + get_serializer( + model.plan[0], model.currency, model.service, service_items=model.service_item, financing_options=[] + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -142,40 +133,39 @@ def test__two_items__plan_is_renewable(self): # When: get with no auth and plan is not renewable # Then: return 200 with 2 Plan with financial options def test__two_items__plan_is_not_renewable(self): - plan = {'time_of_life': 1, 'time_of_life_unit': 'WEEK', 'is_renewable': False} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) - - url = reverse_lazy('payments:plan') + plan = {"time_of_life": 1, "time_of_life_unit": "WEEK", "is_renewable": False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), service_item=2, plan_service_item=plan_service_items, financing_option=2 + ) + + url = reverse_lazy("payments:plan") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.plan[1], - model.currency, - model.service, - service_items=model.service_item, - financing_options=model.financing_option), - get_serializer(model.plan[0], - model.currency, - model.service, - service_items=model.service_item, - financing_options=model.financing_option), + get_serializer( + model.plan[1], + model.currency, + model.service, + service_items=model.service_item, + financing_options=model.financing_option, + ), + get_serializer( + model.plan[0], + model.currency, + model.service, + service_items=model.service_item, + financing_options=model.financing_option, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -187,29 +177,24 @@ def test__two_items__plan_is_not_renewable(self): # When: get with no auth and cohort provided in the querystring # Then: return 400 def test__cohort_not_found(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) - - url = reverse_lazy('payments:plan') + '?cohort=1' + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), service_item=2, plan_service_item=plan_service_items, financing_option=2 + ) + + url = reverse_lazy("payments:plan") + "?cohort=1" response = self.client.get(url) json = response.json() - expected = {'detail': 'cohort-not-found', 'status_code': 400} + expected = {"detail": "cohort-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -219,23 +204,21 @@ def test__cohort_not_found(self): # -> plan is_onboarding is False # Then: return 200 with 2 Plan with no financial options def test__cohort_exists__is_onboarding_is_false(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': False} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - cohort = {'available_as_saas': True} - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=cohort, - syllabus_version=1) - - url = reverse_lazy('payments:plan') + '?cohort=1' + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + cohort = {"available_as_saas": True} + model = self.bc.database.create( + plan=(2, plan), + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=cohort, + syllabus_version=1, + ) + + url = reverse_lazy("payments:plan") + "?cohort=1" response = self.client.get(url) json = response.json() @@ -244,7 +227,7 @@ def test__cohort_exists__is_onboarding_is_false(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -254,21 +237,15 @@ def test__cohort_exists__is_onboarding_is_false(self): # -> plan is_onboarding is True # Then: return 200 with 2 Plan with no financial options def test__cohort_exists__is_onboarding_is_true(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=1) - - url = reverse_lazy('payments:plan') + '?cohort=1' + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), service_item=2, plan_service_item=plan_service_items, financing_option=2, cohort=1 + ) + + url = reverse_lazy("payments:plan") + "?cohort=1" response = self.client.get(url) json = response.json() @@ -277,7 +254,7 @@ def test__cohort_exists__is_onboarding_is_true(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -287,51 +264,53 @@ def test__cohort_exists__is_onboarding_is_true(self): # -> plan is_onboarding is True # Then: return 200 with 2 Plan with no financial options def test__cohort_exists__is_onboarding_is_true(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - cohort = {'available_as_saas': True} - academy = {'available_as_saas': True} - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - syllabus_version=1, - academy=academy) - - url = reverse_lazy('payments:plan') + '?cohort=1' + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + cohort = {"available_as_saas": True} + academy = {"available_as_saas": True} + model = self.bc.database.create( + plan=(2, plan), + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + syllabus_version=1, + academy=academy, + ) + + url = reverse_lazy("payments:plan") + "?cohort=1" response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.plan[1], - model.currency, - model.service, - model.academy, - service_items=model.service_item, - cohorts=[model.cohort], - financing_options=[]), - get_serializer(model.plan[0], - model.currency, - model.service, - model.academy, - service_items=model.service_item, - cohorts=[model.cohort], - financing_options=[]), + get_serializer( + model.plan[1], + model.currency, + model.service, + model.academy, + service_items=model.service_item, + cohorts=[model.cohort], + financing_options=[], + ), + get_serializer( + model.plan[0], + model.currency, + model.service, + model.academy, + service_items=model.service_item, + cohorts=[model.cohort], + financing_options=[], + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -343,29 +322,24 @@ def test__cohort_exists__is_onboarding_is_true(self): # When: get with no auth and cohort provided in the querystring # Then: return 400 def test__syllabus_not_found(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) - - url = reverse_lazy('payments:plan') + '?syllabus=1' + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), service_item=2, plan_service_item=plan_service_items, financing_option=2 + ) + + url = reverse_lazy("payments:plan") + "?syllabus=1" response = self.client.get(url) json = response.json() - expected = {'detail': 'syllabus-not-found', 'status_code': 400} + expected = {"detail": "syllabus-not-found", "status_code": 400} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -375,23 +349,21 @@ def test__syllabus_not_found(self): # -> plan is_onboarding is False # Then: return 200 with 2 Plan with no financial options def test__syllabus_exists__is_onboarding_is_false(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': False} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - cohort = {'available_as_saas': True} - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=cohort, - syllabus_version=1) - - url = reverse_lazy('payments:plan') + '?syllabus=1' + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + cohort = {"available_as_saas": True} + model = self.bc.database.create( + plan=(2, plan), + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=cohort, + syllabus_version=1, + ) + + url = reverse_lazy("payments:plan") + "?syllabus=1" response = self.client.get(url) json = response.json() @@ -400,7 +372,7 @@ def test__syllabus_exists__is_onboarding_is_false(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -410,21 +382,15 @@ def test__syllabus_exists__is_onboarding_is_false(self): # -> plan is_onboarding is True # Then: return 200 with 2 Plan with no financial options def test__syllabus_exists__is_onboarding_is_true(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=1) - - url = reverse_lazy('payments:plan') + '?syllabus=1' + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + model = self.bc.database.create( + plan=(2, plan), service_item=2, plan_service_item=plan_service_items, financing_option=2, cohort=1 + ) + + url = reverse_lazy("payments:plan") + "?syllabus=1" response = self.client.get(url) json = response.json() @@ -433,7 +399,7 @@ def test__syllabus_exists__is_onboarding_is_true(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -443,51 +409,53 @@ def test__syllabus_exists__is_onboarding_is_true(self): # -> plan is_onboarding is True # Then: return 200 with 2 Plan with no financial options def test__syllabus_exists__is_onboarding_is_true(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True, 'is_onboarding': True} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] - cohort = {'available_as_saas': True} - academy = {'available_as_saas': True} - model = self.bc.database.create(plan=(2, plan), - service_item=2, - plan_service_item=plan_service_items, - financing_option=2, - cohort=cohort, - cohort_set=1, - cohort_set_cohort=1, - syllabus_version=1, - academy=academy) - - url = reverse_lazy('payments:plan') + '?syllabus=1' + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True, "is_onboarding": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] + cohort = {"available_as_saas": True} + academy = {"available_as_saas": True} + model = self.bc.database.create( + plan=(2, plan), + service_item=2, + plan_service_item=plan_service_items, + financing_option=2, + cohort=cohort, + cohort_set=1, + cohort_set_cohort=1, + syllabus_version=1, + academy=academy, + ) + + url = reverse_lazy("payments:plan") + "?syllabus=1" response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.plan[1], - model.currency, - model.service, - model.academy, - service_items=model.service_item, - cohorts=[model.cohort], - financing_options=[]), - get_serializer(model.plan[0], - model.currency, - model.service, - model.academy, - service_items=model.service_item, - cohorts=[model.cohort], - financing_options=[]), + get_serializer( + model.plan[1], + model.currency, + model.service, + model.academy, + service_items=model.service_item, + cohorts=[model.cohort], + financing_options=[], + ), + get_serializer( + model.plan[0], + model.currency, + model.service, + model.academy, + service_items=model.service_item, + cohorts=[model.cohort], + financing_options=[], + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) @@ -498,53 +466,51 @@ def test__syllabus_exists__is_onboarding_is_true(self): # Given: compile_lookup was mocked # When: the mock is called # Then: the mock should be called with the correct arguments and does not raise an exception - @patch('breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup', - MagicMock(wraps=lookup_extension.compile_lookup)) + @patch( + "breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup", + MagicMock(wraps=lookup_extension.compile_lookup), + ) def test_lookup_extension(self): self.bc.request.set_headers(academy=1) - plan = {'time_of_life': None, 'time_of_life_unit': None} - plan_service_items = [{ - 'service_item_id': n, - 'plan_id': 1 - } for n in range(1, 3)] + [{ - 'service_item_id': n, - 'plan_id': 2 - } for n in range(1, 3)] + plan = {"time_of_life": None, "time_of_life_unit": None} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + [ + {"service_item_id": n, "plan_id": 2} for n in range(1, 3) + ] model = self.bc.database.create(plan=(2, plan), service_item=2, plan_service_item=plan_service_items) args, kwargs = self.bc.format.call( - 'en', + "en", strings={ - 'exact': [ - 'service_items__service__slug', + "exact": [ + "service_items__service__slug", ], }, overwrite={ - 'service_slug': 'service_items__service__slug', + "service_slug": "service_items__service__slug", }, - custom_fields={'is_onboarding': lambda: 'true' if random.randint(0, 1) else 'false'}, + custom_fields={"is_onboarding": lambda: "true" if random.randint(0, 1) else "false"}, ) query = self.bc.format.lookup(*args, **kwargs) - url = reverse_lazy('payments:plan') + '?' + self.bc.format.querystring(query) + url = reverse_lazy("payments:plan") + "?" + self.bc.format.querystring(query) - self.assertEqual([x for x in query], ['service_slug', 'is_onboarding']) + self.assertEqual([x for x in query], ["service_slug", "is_onboarding"]) response = self.client.get(url) json = response.json() expected = [] - for x in ['overwrite', 'custom_fields']: + for x in ["overwrite", "custom_fields"]: if x in kwargs: del kwargs[x] - for field in ['ids', 'slugs']: + for field in ["ids", "slugs"]: values = kwargs.get(field, tuple()) kwargs[field] = tuple(values) - for field in ['ints', 'strings', 'bools', 'datetimes']: + for field in ["ints", "strings", "bools", "datetimes"]: modes = kwargs.get(field, {}) for mode in modes: if not isinstance(kwargs[field][mode], tuple): @@ -552,37 +518,45 @@ def test_lookup_extension(self): kwargs[field] = frozenset(modes.items()) - self.bc.check.calls(lookup_extension.compile_lookup.call_args_list, [ - call(**kwargs), - ]) + self.bc.check.calls( + lookup_extension.compile_lookup.call_args_list, + [ + call(**kwargs), + ], + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.Plan'), + self.bc.database.list_of("payments.Plan"), self.bc.format.to_dict(model.plan), ) # When: get is called # Then: it's setup properly - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_get__spy_extensions(self): """Test /cohort/:id without auth""" - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) - - url = reverse_lazy('payments:plan') + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, service_item=2, plan_service_item=plan_service_items, financing_option=2 + ) + + url = reverse_lazy("payments:plan") self.client.get(url) - self.bc.check.calls(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) - self.bc.check.calls(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-id', paginate=True), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-id", paginate=True), + ], + ) diff --git a/breathecode/payments/tests/urls/tests_plan_slug.py b/breathecode/payments/tests/urls/tests_plan_slug.py index c9a67cee3..43195ad1a 100644 --- a/breathecode/payments/tests/urls/tests_plan_slug.py +++ b/breathecode/payments/tests/urls/tests_plan_slug.py @@ -11,35 +11,35 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, - 'slug': academy.slug, + "id": academy.id, + "name": academy.name, + "slug": academy.slug, } def service_item_serializer(service_item, service): return { - 'how_many': service_item.how_many, - 'service': { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "how_many": service_item.how_many, + "service": { + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, }, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, } def financing_option_serializer(financing_option, currency): return { - 'currency': { - 'code': currency.code, - 'name': currency.name, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'how_many_months': financing_option.how_many_months, - 'monthly_price': financing_option.monthly_price, + "how_many_months": financing_option.how_many_months, + "monthly_price": financing_option.monthly_price, } @@ -54,27 +54,27 @@ def get_serializer(event, currency, service=None, academy=None, service_items=[] academy = academy_serializer(academy) return { - 'slug': event.slug, - 'currency': { - 'code': currency.code, - 'name': currency.name, + "slug": event.slug, + "currency": { + "code": currency.code, + "name": currency.name, }, - 'financing_options': financing_options, - 'has_available_cohorts': len(cohorts) > 0, - 'has_waiting_list': event.has_waiting_list, - 'is_renewable': event.is_renewable, - 'owner': academy, - 'price_per_half': event.price_per_half, - 'price_per_month': event.price_per_month, - 'price_per_quarter': event.price_per_quarter, - 'price_per_year': event.price_per_year, - 'service_items': service_items, - 'slug': event.slug, - 'status': event.status, - 'time_of_life': event.time_of_life, - 'time_of_life_unit': event.time_of_life_unit, - 'trial_duration': event.trial_duration, - 'trial_duration_unit': event.trial_duration_unit, + "financing_options": financing_options, + "has_available_cohorts": len(cohorts) > 0, + "has_waiting_list": event.has_waiting_list, + "is_renewable": event.is_renewable, + "owner": academy, + "price_per_half": event.price_per_half, + "price_per_month": event.price_per_month, + "price_per_quarter": event.price_per_quarter, + "price_per_year": event.price_per_year, + "service_items": service_items, + "slug": event.slug, + "status": event.status, + "time_of_life": event.time_of_life, + "time_of_life_unit": event.time_of_life_unit, + "trial_duration": event.trial_duration, + "trial_duration_unit": event.trial_duration_unit, } @@ -87,63 +87,68 @@ class SignalTestSuite(PaymentsTestCase): # When: get with no auth # Then: return 200 def test__no_auth(self): - url = reverse_lazy('payments:plan_slug', kwargs={'plan_slug': 'plan-1'}) + url = reverse_lazy("payments:plan_slug", kwargs={"plan_slug": "plan-1"}) response = self.client.get(url) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('payments.Plan'), []) + self.assertEqual(self.bc.database.list_of("payments.Plan"), []) # Given: 2 Plan, 4 PlanServiceItem, 2 ServiceItem and 1 Service # When: get with no auth and plan is renewable # Then: return 200 with 2 Plan with no financial options def test__two_items__plan_is_renewable(self): - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) - - url = reverse_lazy('payments:plan_slug', kwargs={'plan_slug': model.plan.slug}) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, service_item=2, plan_service_item=plan_service_items, financing_option=2 + ) + + url = reverse_lazy("payments:plan_slug", kwargs={"plan_slug": model.plan.slug}) response = self.client.get(url) json = response.json() - expected = get_serializer(model.plan, - model.currency, - model.service, - service_items=model.service_item, - financing_options=[]) + expected = get_serializer( + model.plan, model.currency, model.service, service_items=model.service_item, financing_options=[] + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.Plan'), [ - self.bc.format.to_dict(model.plan), - ]) + self.assertEqual( + self.bc.database.list_of("payments.Plan"), + [ + self.bc.format.to_dict(model.plan), + ], + ) # When: get is called # Then: it's setup properly - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_get__spy_extensions(self): """Test /cohort/:id without auth""" - plan = {'time_of_life': None, 'time_of_life_unit': None, 'is_renewable': True} - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - model = self.bc.database.create(plan=plan, - service_item=2, - plan_service_item=plan_service_items, - financing_option=2) - - url = reverse_lazy('payments:plan_slug', kwargs={'plan_slug': model.plan.slug}) - self.client.get(url) + plan = {"time_of_life": None, "time_of_life_unit": None, "is_renewable": True} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + model = self.bc.database.create( + plan=plan, service_item=2, plan_service_item=plan_service_items, financing_option=2 + ) - self.bc.check.calls(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + url = reverse_lazy("payments:plan_slug", kwargs={"plan_slug": model.plan.slug}) + self.client.get(url) - self.bc.check.calls(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-id', paginate=True), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"]), + ], + ) + + self.bc.check.calls( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-id", paginate=True), + ], + ) diff --git a/breathecode/payments/tests/urls/tests_planoffer.py b/breathecode/payments/tests/urls/tests_planoffer.py index 197506c5b..f6a6d3a2e 100644 --- a/breathecode/payments/tests/urls/tests_planoffer.py +++ b/breathecode/payments/tests/urls/tests_planoffer.py @@ -17,111 +17,99 @@ def permission_serializer(permission): return { - 'codename': permission.codename, - 'name': permission.name, + "codename": permission.codename, + "name": permission.name, } def group_serializer(group, permissions=[]): return { - 'name': group.name, - 'permissions': [permission_serializer(permission) for permission in permissions], + "name": group.name, + "permissions": [permission_serializer(permission) for permission in permissions], } def service_serializer(service, groups=[], permissions=[]): return { - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, - 'groups': [group_serializer(group, permissions) for group in groups], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, + "groups": [group_serializer(group, permissions) for group in groups], } def service_item_serializer(self, service_item, service, groups=[], permissions=[]): return { - 'how_many': service_item.how_many, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, - 'service': service_serializer(service, groups, permissions), + "how_many": service_item.how_many, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, + "service": service_serializer(service, groups, permissions), } def currency_serializer(currency): return { - 'code': currency.code, - 'name': currency.name, + "code": currency.code, + "name": currency.name, } def plan_serializer(self, plan, service, currency, groups=[], permissions=[], service_items=[]): return { - 'financing_options': [], - 'service_items': - [service_item_serializer(self, service_item, service, groups, permissions) for service_item in service_items], - 'currency': - currency_serializer(currency), - 'slug': - plan.slug, - 'status': - plan.status, - 'time_of_life': - plan.time_of_life, - 'time_of_life_unit': - plan.time_of_life_unit, - 'trial_duration': - plan.trial_duration, - 'trial_duration_unit': - plan.trial_duration_unit, - 'has_waiting_list': - plan.has_waiting_list, - 'is_renewable': - plan.is_renewable, - 'owner': - plan.owner, - 'price_per_half': - plan.price_per_half, - 'price_per_month': - plan.price_per_month, - 'price_per_quarter': - plan.price_per_quarter, - 'price_per_year': - plan.price_per_year, - 'has_available_cohorts': - bool(plan.cohort_set), + "financing_options": [], + "service_items": [ + service_item_serializer(self, service_item, service, groups, permissions) for service_item in service_items + ], + "currency": currency_serializer(currency), + "slug": plan.slug, + "status": plan.status, + "time_of_life": plan.time_of_life, + "time_of_life_unit": plan.time_of_life_unit, + "trial_duration": plan.trial_duration, + "trial_duration_unit": plan.trial_duration_unit, + "has_waiting_list": plan.has_waiting_list, + "is_renewable": plan.is_renewable, + "owner": plan.owner, + "price_per_half": plan.price_per_half, + "price_per_month": plan.price_per_month, + "price_per_quarter": plan.price_per_quarter, + "price_per_year": plan.price_per_year, + "has_available_cohorts": bool(plan.cohort_set), } def plan_offer_transaction_serializer(plan_offer_transaction): return { - 'lang': plan_offer_transaction.lang, - 'title': plan_offer_transaction.title, - 'description': plan_offer_transaction.description, - 'short_description': plan_offer_transaction.short_description, + "lang": plan_offer_transaction.lang, + "title": plan_offer_transaction.title, + "description": plan_offer_transaction.description, + "short_description": plan_offer_transaction.short_description, } -def get_serializer(self, - plan_offer, - plan1, - plan2, - service, - currency, - plan_offer_translation=None, - groups=[], - permissions=[], - service_items=[]): +def get_serializer( + self, + plan_offer, + plan1, + plan2, + service, + currency, + plan_offer_translation=None, + groups=[], + permissions=[], + service_items=[], +): if plan_offer_translation: plan_offer_translation = plan_offer_transaction_serializer(plan_offer_translation) return { - 'details': plan_offer_translation, - 'original_plan': plan_serializer(self, plan1, service, currency, groups, permissions, service_items), - 'suggested_plan': plan_serializer(self, plan2, service, currency, groups, permissions, service_items), - 'show_modal': plan_offer.show_modal, - 'expires_at': self.bc.datetime.to_iso_string(plan_offer.expires_at) if plan_offer.expires_at else None, + "details": plan_offer_translation, + "original_plan": plan_serializer(self, plan1, service, currency, groups, permissions, service_items), + "suggested_plan": plan_serializer(self, plan2, service, currency, groups, permissions, service_items), + "show_modal": plan_offer.show_modal, + "expires_at": self.bc.datetime.to_iso_string(plan_offer.expires_at) if plan_offer.expires_at else None, } @@ -131,7 +119,7 @@ class SignalTestSuite(PaymentsTestCase): """ def test__without_auth__without_service_items(self): - url = reverse_lazy('payments:planoffer') + url = reverse_lazy("payments:planoffer") response = self.client.get(url) json = response.json() @@ -139,20 +127,20 @@ def test__without_auth__without_service_items(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.PlanOffer'), []) + self.assertEqual(self.bc.database.list_of("payments.PlanOffer"), []) def test__without_auth__with_plan_offer__expires_at_eq_none(self): - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - plan_service_items += [{'service_item_id': n, 'plan_id': 2} for n in range(1, 3)] + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + plan_service_items += [{"service_item_id": n, "plan_id": 2} for n in range(1, 3)] plan_offer = { - 'original_plan_id': 1, - 'suggested_plan_id': 2, - 'show_modal': bool(random.getrandbits(1)), - 'expires_at': None, + "original_plan_id": 1, + "suggested_plan_id": 2, + "show_modal": bool(random.getrandbits(1)), + "expires_at": None, } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create( plan=(2, plan), @@ -164,41 +152,46 @@ def test__without_auth__with_plan_offer__expires_at_eq_none(self): permission=1, ) - url = reverse_lazy('payments:planoffer') + url = reverse_lazy("payments:planoffer") response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.plan_offer, - model.plan[0], - model.plan[1], - model.service, - model.currency, - groups=model.group, - permissions=[model.permission], - service_items=model.service_item) + get_serializer( + self, + model.plan_offer, + model.plan[0], + model.plan[1], + model.service, + model.currency, + groups=model.group, + permissions=[model.permission], + service_items=model.service_item, + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.PlanOffer'), [ - self.bc.format.to_dict(model.plan_offer), - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanOffer"), + [ + self.bc.format.to_dict(model.plan_offer), + ], + ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__without_auth__with_plan_offer__expires_at_in_the_future(self): - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - plan_service_items += [{'service_item_id': n, 'plan_id': 2} for n in range(1, 3)] + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + plan_service_items += [{"service_item_id": n, "plan_id": 2} for n in range(1, 3)] plan_offer = { - 'original_plan_id': 1, - 'suggested_plan_id': 2, - 'show_modal': bool(random.getrandbits(1)), - 'expires_at': UTC_NOW + timedelta(seconds=random.randint(1, 60)), + "original_plan_id": 1, + "suggested_plan_id": 2, + "show_modal": bool(random.getrandbits(1)), + "expires_at": UTC_NOW + timedelta(seconds=random.randint(1, 60)), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create( plan=(2, plan), @@ -210,41 +203,46 @@ def test__without_auth__with_plan_offer__expires_at_in_the_future(self): permission=1, ) - url = reverse_lazy('payments:planoffer') + url = reverse_lazy("payments:planoffer") response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.plan_offer, - model.plan[0], - model.plan[1], - model.service, - model.currency, - groups=model.group, - permissions=[model.permission], - service_items=model.service_item) + get_serializer( + self, + model.plan_offer, + model.plan[0], + model.plan[1], + model.service, + model.currency, + groups=model.group, + permissions=[model.permission], + service_items=model.service_item, + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.PlanOffer'), [ - self.bc.format.to_dict(model.plan_offer), - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanOffer"), + [ + self.bc.format.to_dict(model.plan_offer), + ], + ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__without_auth__with_plan_offer__expires_at_in_the_past(self): - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - plan_service_items += [{'service_item_id': n, 'plan_id': 2} for n in range(1, 3)] + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + plan_service_items += [{"service_item_id": n, "plan_id": 2} for n in range(1, 3)] plan_offer = { - 'original_plan_id': 1, - 'suggested_plan_id': 2, - 'show_modal': bool(random.getrandbits(1)), - 'expires_at': UTC_NOW - timedelta(seconds=random.randint(1, 60)), + "original_plan_id": 1, + "suggested_plan_id": 2, + "show_modal": bool(random.getrandbits(1)), + "expires_at": UTC_NOW - timedelta(seconds=random.randint(1, 60)), } - plan = {'is_renewable': False} + plan = {"is_renewable": False} model = self.bc.database.create( plan=(2, plan), @@ -256,7 +254,7 @@ def test__without_auth__with_plan_offer__expires_at_in_the_past(self): permission=1, ) - url = reverse_lazy('payments:planoffer') + url = reverse_lazy("payments:planoffer") response = self.client.get(url) json = response.json() @@ -264,30 +262,31 @@ def test__without_auth__with_plan_offer__expires_at_in_the_past(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.PlanOffer'), [ - self.bc.format.to_dict(model.plan_offer), - ]) + self.assertEqual( + self.bc.database.list_of("payments.PlanOffer"), + [ + self.bc.format.to_dict(model.plan_offer), + ], + ) def test__without_auth__with_plan_offer_transaction(self): - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - plan_service_items += [{'service_item_id': n, 'plan_id': 2} for n in range(1, 3)] - - plan_offers = [{ - 'original_plan_id': 1, - 'suggested_plan_id': 2, - 'show_modal': bool(random.getrandbits(1)) - }, { - 'original_plan_id': 2, - 'suggested_plan_id': 1, - 'show_modal': bool(random.getrandbits(1)) - }] - - plan_offer_translation = [{ - 'lang': 'en', - 'offer_id': n, - } for n in range(1, 3)] - - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + plan_service_items += [{"service_item_id": n, "plan_id": 2} for n in range(1, 3)] + + plan_offers = [ + {"original_plan_id": 1, "suggested_plan_id": 2, "show_modal": bool(random.getrandbits(1))}, + {"original_plan_id": 2, "suggested_plan_id": 1, "show_modal": bool(random.getrandbits(1))}, + ] + + plan_offer_translation = [ + { + "lang": "en", + "offer_id": n, + } + for n in range(1, 3) + ] + + plan = {"is_renewable": False} model = self.bc.database.create( plan=(4, plan), @@ -300,60 +299,62 @@ def test__without_auth__with_plan_offer_transaction(self): plan_offer_translation=plan_offer_translation, ) - url = reverse_lazy('payments:planoffer') + url = reverse_lazy("payments:planoffer") response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.plan_offer[1], - model.plan[1], - model.plan[0], - model.service, - model.currency, - plan_offer_translation=model.plan_offer_translation[1], - groups=model.group, - permissions=[model.permission], - service_items=model.service_item), - get_serializer(self, - model.plan_offer[0], - model.plan[0], - model.plan[1], - model.service, - model.currency, - plan_offer_translation=model.plan_offer_translation[0], - groups=model.group, - permissions=[model.permission], - service_items=model.service_item) + get_serializer( + self, + model.plan_offer[1], + model.plan[1], + model.plan[0], + model.service, + model.currency, + plan_offer_translation=model.plan_offer_translation[1], + groups=model.group, + permissions=[model.permission], + service_items=model.service_item, + ), + get_serializer( + self, + model.plan_offer[0], + model.plan[0], + model.plan[1], + model.service, + model.currency, + plan_offer_translation=model.plan_offer_translation[0], + groups=model.group, + permissions=[model.permission], + service_items=model.service_item, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.PlanOffer'), + self.bc.database.list_of("payments.PlanOffer"), self.bc.format.to_dict(model.plan_offer), ) def test__without_auth__filter_by_original_plan(self): - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - plan_service_items += [{'service_item_id': n, 'plan_id': 2} for n in range(1, 3)] - - plan_offers = [{ - 'original_plan_id': 1, - 'suggested_plan_id': 2, - 'show_modal': bool(random.getrandbits(1)) - }, { - 'original_plan_id': 2, - 'suggested_plan_id': 1, - 'show_modal': bool(random.getrandbits(1)) - }] - - plan_offer_translation = [{ - 'lang': 'en', - 'offer_id': n, - } for n in range(1, 3)] - - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + plan_service_items += [{"service_item_id": n, "plan_id": 2} for n in range(1, 3)] + + plan_offers = [ + {"original_plan_id": 1, "suggested_plan_id": 2, "show_modal": bool(random.getrandbits(1))}, + {"original_plan_id": 2, "suggested_plan_id": 1, "show_modal": bool(random.getrandbits(1))}, + ] + + plan_offer_translation = [ + { + "lang": "en", + "offer_id": n, + } + for n in range(1, 3) + ] + + plan = {"is_renewable": False} model = self.bc.database.create( plan=(4, plan), @@ -366,51 +367,53 @@ def test__without_auth__filter_by_original_plan(self): plan_offer_translation=plan_offer_translation, ) - url = reverse_lazy( - 'payments:planoffer') + f'?original_plan={random.choice([model.plan[0].id, model.plan[0].slug])}' + url = ( + reverse_lazy("payments:planoffer") + + f"?original_plan={random.choice([model.plan[0].id, model.plan[0].slug])}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.plan_offer[0], - model.plan[0], - model.plan[1], - model.service, - model.currency, - plan_offer_translation=model.plan_offer_translation[0], - groups=model.group, - permissions=[model.permission], - service_items=model.service_item) + get_serializer( + self, + model.plan_offer[0], + model.plan[0], + model.plan[1], + model.service, + model.currency, + plan_offer_translation=model.plan_offer_translation[0], + groups=model.group, + permissions=[model.permission], + service_items=model.service_item, + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.PlanOffer'), + self.bc.database.list_of("payments.PlanOffer"), self.bc.format.to_dict(model.plan_offer), ) def test__without_auth__filter_by_suggested_plan(self): - plan_service_items = [{'service_item_id': n, 'plan_id': 1} for n in range(1, 3)] - plan_service_items += [{'service_item_id': n, 'plan_id': 2} for n in range(1, 3)] - - plan_offers = [{ - 'original_plan_id': 1, - 'suggested_plan_id': 2, - 'show_modal': bool(random.getrandbits(1)) - }, { - 'original_plan_id': 2, - 'suggested_plan_id': 1, - 'show_modal': bool(random.getrandbits(1)) - }] - - plan_offer_translation = [{ - 'lang': 'en', - 'offer_id': n, - } for n in range(1, 3)] - - plan = {'is_renewable': False} + plan_service_items = [{"service_item_id": n, "plan_id": 1} for n in range(1, 3)] + plan_service_items += [{"service_item_id": n, "plan_id": 2} for n in range(1, 3)] + + plan_offers = [ + {"original_plan_id": 1, "suggested_plan_id": 2, "show_modal": bool(random.getrandbits(1))}, + {"original_plan_id": 2, "suggested_plan_id": 1, "show_modal": bool(random.getrandbits(1))}, + ] + + plan_offer_translation = [ + { + "lang": "en", + "offer_id": n, + } + for n in range(1, 3) + ] + + plan = {"is_renewable": False} model = self.bc.database.create( plan=(4, plan), @@ -423,27 +426,31 @@ def test__without_auth__filter_by_suggested_plan(self): plan_offer_translation=plan_offer_translation, ) - url = reverse_lazy( - 'payments:planoffer') + f'?suggested_plan={random.choice([model.plan[1].id, model.plan[1].slug])}' + url = ( + reverse_lazy("payments:planoffer") + + f"?suggested_plan={random.choice([model.plan[1].id, model.plan[1].slug])}" + ) response = self.client.get(url) json = response.json() expected = [ - get_serializer(self, - model.plan_offer[0], - model.plan[0], - model.plan[1], - model.service, - model.currency, - plan_offer_translation=model.plan_offer_translation[0], - groups=model.group, - permissions=[model.permission], - service_items=model.service_item) + get_serializer( + self, + model.plan_offer[0], + model.plan[0], + model.plan[1], + model.service, + model.currency, + plan_offer_translation=model.plan_offer_translation[0], + groups=model.group, + permissions=[model.permission], + service_items=model.service_item, + ) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.PlanOffer'), + self.bc.database.list_of("payments.PlanOffer"), self.bc.format.to_dict(model.plan_offer), ) diff --git a/breathecode/payments/tests/urls/tests_serviceitem.py b/breathecode/payments/tests/urls/tests_serviceitem.py index 35ecd8821..481f96574 100644 --- a/breathecode/payments/tests/urls/tests_serviceitem.py +++ b/breathecode/payments/tests/urls/tests_serviceitem.py @@ -17,51 +17,51 @@ def format_user_setting(data={}): return { - 'id': 1, - 'user_id': 1, - 'main_currency_id': None, - 'lang': 'en', + "id": 1, + "user_id": 1, + "main_currency_id": None, + "lang": "en", **data, } def format_invoice_item(data={}): return { - 'academy_id': None, - 'amount': 0.0, - 'currency_id': 1, - 'bag_id': None, - 'id': 1, - 'paid_at': UTC_NOW, - 'status': 'FULFILLED', - 'stripe_id': None, - 'user_id': 1, + "academy_id": None, + "amount": 0.0, + "currency_id": 1, + "bag_id": None, + "id": 1, + "paid_at": UTC_NOW, + "status": "FULFILLED", + "stripe_id": None, + "user_id": 1, **data, } def feature_serializer(service_item_feature): return { - 'description': service_item_feature.description, - 'one_line_desc': service_item_feature.one_line_desc, - 'title': service_item_feature.title, + "description": service_item_feature.description, + "one_line_desc": service_item_feature.one_line_desc, + "title": service_item_feature.title, } def get_serializer(service_item, service, service_item_features=[], data={}): features = [feature_serializer(service_item_feature) for service_item_feature in service_item_features] return { - 'features': features, - 'how_many': service_item.how_many, - 'service': { - 'groups': [], - 'private': service.private, - 'slug': service.slug, - 'title': service.title, - 'icon_url': service.icon_url, + "features": features, + "how_many": service_item.how_many, + "service": { + "groups": [], + "private": service.private, + "slug": service.slug, + "title": service.title, + "icon_url": service.icon_url, }, - 'unit_type': service_item.unit_type, - 'sort_priority': service_item.sort_priority, + "unit_type": service_item.unit_type, + "sort_priority": service_item.sort_priority, **data, } @@ -72,7 +72,7 @@ class SignalTestSuite(PaymentsTestCase): """ def test__without_auth__without_service_items(self): - url = reverse_lazy('payments:serviceitem') + url = reverse_lazy("payments:serviceitem") response = self.client.get(url) json = response.json() @@ -80,37 +80,45 @@ def test__without_auth__without_service_items(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.ServiceItem'), []) + self.assertEqual(self.bc.database.list_of("payments.ServiceItem"), []) def test__without_auth__with_service_items(self): - service_item_features = [{'lang': 'en', 'service_item_id': 1} for _ in range(2)] - service_item_features += [{'lang': 'en', 'service_item_id': 2} for _ in range(2)] - plan_service_items = [{'service_item_id': n} for n in range(1, 3)] - plan = {'is_renewable': False} - - model = self.bc.database.create(plan=plan, - service=1, - service_item=2, - plan_service_item=plan_service_items, - service_item_feature=service_item_features) - - url = reverse_lazy('payments:serviceitem') + service_item_features = [{"lang": "en", "service_item_id": 1} for _ in range(2)] + service_item_features += [{"lang": "en", "service_item_id": 2} for _ in range(2)] + plan_service_items = [{"service_item_id": n} for n in range(1, 3)] + plan = {"is_renewable": False} + + model = self.bc.database.create( + plan=plan, + service=1, + service_item=2, + plan_service_item=plan_service_items, + service_item_feature=service_item_features, + ) + + url = reverse_lazy("payments:serviceitem") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.service_item[1], - model.service, [model.service_item_feature[2], model.service_item_feature[3]], - data={}), - get_serializer(model.service_item[0], - model.service, [model.service_item_feature[0], model.service_item_feature[1]], - data={}), + get_serializer( + model.service_item[1], + model.service, + [model.service_item_feature[2], model.service_item_feature[3]], + data={}, + ), + get_serializer( + model.service_item[0], + model.service, + [model.service_item_feature[0], model.service_item_feature[1]], + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.ServiceItem'), + self.bc.database.list_of("payments.ServiceItem"), self.bc.format.to_dict(model.service_item), ) @@ -118,81 +126,91 @@ def test__without_auth__with_service_items(self): 🔽🔽🔽 Without auth filtering by lang """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__without_auth__filtering_by_lang(self): - service_item_features = [{'lang': 'en', 'service_item_id': 1} for _ in range(2)] - service_item_features += [{'lang': 'es', 'service_item_id': 1} for _ in range(2)] - plan = {'is_renewable': False} + service_item_features = [{"lang": "en", "service_item_id": 1} for _ in range(2)] + service_item_features += [{"lang": "es", "service_item_id": 1} for _ in range(2)] + plan = {"is_renewable": False} - model = self.bc.database.create(plan=plan, - service=1, - service_item=1, - plan_service_item=1, - service_item_feature=service_item_features) + model = self.bc.database.create( + plan=plan, service=1, service_item=1, plan_service_item=1, service_item_feature=service_item_features + ) - url = reverse_lazy('payments:serviceitem') + url = reverse_lazy("payments:serviceitem") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.service_item, - model.service, [model.service_item_feature[0], model.service_item_feature[1]], - data={}), + get_serializer( + model.service_item, + model.service, + [model.service_item_feature[0], model.service_item_feature[1]], + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.ServiceItem'), [ - self.bc.format.to_dict(model.service_item), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceItem"), + [ + self.bc.format.to_dict(model.service_item), + ], + ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__without_auth__filtering_by_lang_from_headers(self): - service_item_features = [{'lang': 'en', 'service_item_id': 1} for _ in range(2)] - service_item_features += [{'lang': 'es', 'service_item_id': 1} for _ in range(2)] - plan = {'is_renewable': False} + service_item_features = [{"lang": "en", "service_item_id": 1} for _ in range(2)] + service_item_features += [{"lang": "es", "service_item_id": 1} for _ in range(2)] + plan = {"is_renewable": False} - model = self.bc.database.create(plan=plan, - service=1, - service_item=1, - plan_service_item=1, - service_item_feature=service_item_features) + model = self.bc.database.create( + plan=plan, service=1, service_item=1, plan_service_item=1, service_item_feature=service_item_features + ) - self.bc.request.set_headers(accept_language='es') + self.bc.request.set_headers(accept_language="es") - url = reverse_lazy('payments:serviceitem') + url = reverse_lazy("payments:serviceitem") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.service_item, - model.service, [model.service_item_feature[2], model.service_item_feature[3]], - data={}), + get_serializer( + model.service_item, + model.service, + [model.service_item_feature[2], model.service_item_feature[3]], + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.ServiceItem'), [ - self.bc.format.to_dict(model.service_item), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceItem"), + [ + self.bc.format.to_dict(model.service_item), + ], + ) """ 🔽🔽🔽 Without auth filtering by plan """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__without_auth__filtering_by_plan(self): - service_item_features = [{'lang': 'en', 'service_item_id': n} for n in range(1, 5)] - plan_service_items = [{'service_item_id': n} for n in range(1, 3)] - plan = {'is_renewable': False} - - model = self.bc.database.create(plan=plan, - service=1, - service_item=4, - plan_service_item=plan_service_items, - service_item_feature=service_item_features) + service_item_features = [{"lang": "en", "service_item_id": n} for n in range(1, 5)] + plan_service_items = [{"service_item_id": n} for n in range(1, 3)] + plan = {"is_renewable": False} + + model = self.bc.database.create( + plan=plan, + service=1, + service_item=4, + plan_service_item=plan_service_items, + service_item_feature=service_item_features, + ) - url = reverse_lazy('payments:serviceitem') + '?plan=1' + url = reverse_lazy("payments:serviceitem") + "?plan=1" response = self.client.get(url) json = response.json() @@ -204,7 +222,7 @@ def test__without_auth__filtering_by_plan(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.ServiceItem'), + self.bc.database.list_of("payments.ServiceItem"), self.bc.format.to_dict(model.service_item), ) @@ -216,7 +234,7 @@ def test__with_auth__without_service_items(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:serviceitem') + url = reverse_lazy("payments:serviceitem") response = self.client.get(url) json = response.json() @@ -224,41 +242,49 @@ def test__with_auth__without_service_items(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.ServiceItem'), []) + self.assertEqual(self.bc.database.list_of("payments.ServiceItem"), []) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_auth__with_service_items(self): - service_item_features = [{'lang': 'en', 'service_item_id': 1} for _ in range(2)] - service_item_features += [{'lang': 'en', 'service_item_id': 2} for _ in range(2)] - plan_service_items = [{'service_item_id': n} for n in range(1, 3)] - plan = {'is_renewable': False} - - model = self.bc.database.create(user=1, - plan=plan, - service=1, - service_item=2, - plan_service_item=plan_service_items, - service_item_feature=service_item_features) + service_item_features = [{"lang": "en", "service_item_id": 1} for _ in range(2)] + service_item_features += [{"lang": "en", "service_item_id": 2} for _ in range(2)] + plan_service_items = [{"service_item_id": n} for n in range(1, 3)] + plan = {"is_renewable": False} + + model = self.bc.database.create( + user=1, + plan=plan, + service=1, + service_item=2, + plan_service_item=plan_service_items, + service_item_feature=service_item_features, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:serviceitem') + url = reverse_lazy("payments:serviceitem") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.service_item[1], - model.service, [model.service_item_feature[2], model.service_item_feature[3]], - data={}), - get_serializer(model.service_item[0], - model.service, [model.service_item_feature[0], model.service_item_feature[1]], - data={}), + get_serializer( + model.service_item[1], + model.service, + [model.service_item_feature[2], model.service_item_feature[3]], + data={}, + ), + get_serializer( + model.service_item[0], + model.service, + [model.service_item_feature[0], model.service_item_feature[1]], + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.ServiceItem'), + self.bc.database.list_of("payments.ServiceItem"), self.bc.format.to_dict(model.service_item), ) @@ -266,89 +292,107 @@ def test__with_auth__with_service_items(self): 🔽🔽🔽 With auth filtering by lang """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_auth__filtering_by_lang(self): - service_item_features = [{'lang': 'en', 'service_item_id': 1} for _ in range(2)] - service_item_features += [{'lang': 'es', 'service_item_id': 1} for _ in range(2)] - plan = {'is_renewable': False} - - model = self.bc.database.create(user=1, - plan=plan, - service=1, - service_item=1, - plan_service_item=1, - service_item_feature=service_item_features) + service_item_features = [{"lang": "en", "service_item_id": 1} for _ in range(2)] + service_item_features += [{"lang": "es", "service_item_id": 1} for _ in range(2)] + plan = {"is_renewable": False} + + model = self.bc.database.create( + user=1, + plan=plan, + service=1, + service_item=1, + plan_service_item=1, + service_item_feature=service_item_features, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:serviceitem') + url = reverse_lazy("payments:serviceitem") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.service_item, - model.service, [model.service_item_feature[0], model.service_item_feature[1]], - data={}), + get_serializer( + model.service_item, + model.service, + [model.service_item_feature[0], model.service_item_feature[1]], + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.ServiceItem'), [ - self.bc.format.to_dict(model.service_item), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceItem"), + [ + self.bc.format.to_dict(model.service_item), + ], + ) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_auth__filtering_by_lang_from_headers(self): - service_item_features = [{'lang': 'en', 'service_item_id': 1} for _ in range(2)] - service_item_features += [{'lang': 'es', 'service_item_id': 1} for _ in range(2)] - plan = {'is_renewable': False} - - model = self.bc.database.create(user=1, - plan=plan, - service=1, - service_item=1, - plan_service_item=1, - service_item_feature=service_item_features) - - self.bc.request.set_headers(accept_language='es') + service_item_features = [{"lang": "en", "service_item_id": 1} for _ in range(2)] + service_item_features += [{"lang": "es", "service_item_id": 1} for _ in range(2)] + plan = {"is_renewable": False} + + model = self.bc.database.create( + user=1, + plan=plan, + service=1, + service_item=1, + plan_service_item=1, + service_item_feature=service_item_features, + ) + + self.bc.request.set_headers(accept_language="es") self.client.force_authenticate(model.user) - url = reverse_lazy('payments:serviceitem') + url = reverse_lazy("payments:serviceitem") response = self.client.get(url) json = response.json() expected = [ - get_serializer(model.service_item, - model.service, [model.service_item_feature[2], model.service_item_feature[3]], - data={}), + get_serializer( + model.service_item, + model.service, + [model.service_item_feature[2], model.service_item_feature[3]], + data={}, + ), ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('payments.ServiceItem'), [ - self.bc.format.to_dict(model.service_item), - ]) + self.assertEqual( + self.bc.database.list_of("payments.ServiceItem"), + [ + self.bc.format.to_dict(model.service_item), + ], + ) """ 🔽🔽🔽 With auth filtering by plan """ - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_auth__filtering_by_plan(self): - service_item_features = [{'lang': 'en', 'service_item_id': n} for n in range(1, 5)] - plan_service_items = [{'service_item_id': n} for n in range(1, 3)] - plan = {'is_renewable': False} - - model = self.bc.database.create(user=1, - plan=plan, - service=1, - service_item=4, - plan_service_item=plan_service_items, - service_item_feature=service_item_features) + service_item_features = [{"lang": "en", "service_item_id": n} for n in range(1, 5)] + plan_service_items = [{"service_item_id": n} for n in range(1, 3)] + plan = {"is_renewable": False} + + model = self.bc.database.create( + user=1, + plan=plan, + service=1, + service_item=4, + plan_service_item=plan_service_items, + service_item_feature=service_item_features, + ) self.client.force_authenticate(model.user) - url = reverse_lazy('payments:serviceitem') + '?plan=1' + url = reverse_lazy("payments:serviceitem") + "?plan=1" response = self.client.get(url) json = response.json() @@ -360,6 +404,6 @@ def test__with_auth__filtering_by_plan(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('payments.ServiceItem'), + self.bc.database.list_of("payments.ServiceItem"), self.bc.format.to_dict(model.service_item), ) diff --git a/breathecode/payments/urls.py b/breathecode/payments/urls.py index 2bfb2ffc9..e6c666814 100644 --- a/breathecode/payments/urls.py +++ b/breathecode/payments/urls.py @@ -30,53 +30,60 @@ PaymentMethodView, ) -app_name = 'payments' +app_name = "payments" urlpatterns = [ - path('planoffer', PlanOfferView.as_view(), name='planoffer'), - path('plan', PlanView.as_view(), name='plan'), - path('plan/<slug:plan_slug>', PlanView.as_view(), name='plan_slug'), - path('academy/plan', AcademyPlanView.as_view(), name='academy_plan'), - path('academy/plan/<int:plan_id>', AcademyPlanView.as_view(), name='academy_plan_id'), - path('academy/plan/<slug:plan_slug>', AcademyPlanView.as_view(), name='academy_plan_slug'), - path('academy/cohortset/<int:cohort_set_id>/cohort', AcademyCohortSetCohortView.as_view()), - path('academy/cohortset/<slug:cohort_set_slug>/cohort', AcademyCohortSetCohortView.as_view()), - path('service', ServiceView.as_view()), - path('service/<slug:service_slug>', ServiceView.as_view()), - path('service/<slug:service_slug>/items', ServiceItemView.as_view()), - path('academy/service', AcademyServiceView.as_view()), - path('academy/service/<slug:service_slug>', AcademyServiceView.as_view()), - path('academy/academyservice', AcademyAcademyServiceView.as_view()), - path('academy/academyservice/<slug:service_slug>', AcademyAcademyServiceView.as_view()), - path('serviceitem', ServiceItemView.as_view(), name='serviceitem'), - path('mentorshipserviceset', MentorshipServiceSetView.as_view(), name='mentorshipserviceset'), - path('mentorshipserviceset/<int:mentorship_service_set_id>', - MentorshipServiceSetView.as_view(), - name='mentorshipserviceset_id'), - path('eventtypeset', EventTypeSetView.as_view(), name='eventtypeset'), - path('eventtypeset/<int:event_type_set_id>', EventTypeSetView.as_view(), name='eventtypeset_id'), - path('me/service/consumable', MeConsumableView.as_view(), name='me_service_consumable'), - path('consumable/checkout', ConsumableCheckoutView.as_view(), name='consumable_checkout'), - path('me/subscription', MeSubscriptionView.as_view(), name='me_subscription'), - path('me/subscription/charge', MeSubscriptionChargeView.as_view(), name='me_subscription_charge'), - path('me/subscription/<int:subscription_id>/cancel', - MeSubscriptionCancelView.as_view(), - name='me_subscription_id_cancel'), - path('academy/subscription', AcademySubscriptionView.as_view()), - path('academy/subscription/<int:subscription_id>', AcademySubscriptionView.as_view()), - path('me/invoice', MeInvoiceView.as_view()), - path('me/invoice/<int:invoice_id>', MeInvoiceView.as_view()), - path('academy/invoice', AcademyInvoiceView.as_view()), - path('academy/invoice/<int:invoice_id>', AcademyInvoiceView.as_view()), - path('coupon', CouponView.as_view(), name='coupon'), - path('me/service/<str:service_slug>/consume/<str:hash>', ConsumeView.as_view(), - name='me_service_slug_consume_hash'), - path('me/service/<str:service_slug>/cancel/<str:hash>', - CancelConsumptionView.as_view(), - name='me_service_slug_cancel_hash'), - path('card', CardView.as_view(), name='card'), - path('bag', BagView.as_view()), - path('bag/<int:bag_id>/coupon', BagCouponView.as_view(), name='bag_id_coupon'), - path('checking', CheckingView.as_view(), name='checking'), - path('pay', PayView.as_view(), name='pay'), - path('methods', PaymentMethodView.as_view(), name='methods'), + path("planoffer", PlanOfferView.as_view(), name="planoffer"), + path("plan", PlanView.as_view(), name="plan"), + path("plan/<slug:plan_slug>", PlanView.as_view(), name="plan_slug"), + path("academy/plan", AcademyPlanView.as_view(), name="academy_plan"), + path("academy/plan/<int:plan_id>", AcademyPlanView.as_view(), name="academy_plan_id"), + path("academy/plan/<slug:plan_slug>", AcademyPlanView.as_view(), name="academy_plan_slug"), + path("academy/cohortset/<int:cohort_set_id>/cohort", AcademyCohortSetCohortView.as_view()), + path("academy/cohortset/<slug:cohort_set_slug>/cohort", AcademyCohortSetCohortView.as_view()), + path("service", ServiceView.as_view()), + path("service/<slug:service_slug>", ServiceView.as_view()), + path("service/<slug:service_slug>/items", ServiceItemView.as_view()), + path("academy/service", AcademyServiceView.as_view()), + path("academy/service/<slug:service_slug>", AcademyServiceView.as_view()), + path("academy/academyservice", AcademyAcademyServiceView.as_view()), + path("academy/academyservice/<slug:service_slug>", AcademyAcademyServiceView.as_view()), + path("serviceitem", ServiceItemView.as_view(), name="serviceitem"), + path("mentorshipserviceset", MentorshipServiceSetView.as_view(), name="mentorshipserviceset"), + path( + "mentorshipserviceset/<int:mentorship_service_set_id>", + MentorshipServiceSetView.as_view(), + name="mentorshipserviceset_id", + ), + path("eventtypeset", EventTypeSetView.as_view(), name="eventtypeset"), + path("eventtypeset/<int:event_type_set_id>", EventTypeSetView.as_view(), name="eventtypeset_id"), + path("me/service/consumable", MeConsumableView.as_view(), name="me_service_consumable"), + path("consumable/checkout", ConsumableCheckoutView.as_view(), name="consumable_checkout"), + path("me/subscription", MeSubscriptionView.as_view(), name="me_subscription"), + path("me/subscription/charge", MeSubscriptionChargeView.as_view(), name="me_subscription_charge"), + path( + "me/subscription/<int:subscription_id>/cancel", + MeSubscriptionCancelView.as_view(), + name="me_subscription_id_cancel", + ), + path("academy/subscription", AcademySubscriptionView.as_view()), + path("academy/subscription/<int:subscription_id>", AcademySubscriptionView.as_view()), + path("me/invoice", MeInvoiceView.as_view()), + path("me/invoice/<int:invoice_id>", MeInvoiceView.as_view()), + path("academy/invoice", AcademyInvoiceView.as_view()), + path("academy/invoice/<int:invoice_id>", AcademyInvoiceView.as_view()), + path("coupon", CouponView.as_view(), name="coupon"), + path( + "me/service/<str:service_slug>/consume/<str:hash>", ConsumeView.as_view(), name="me_service_slug_consume_hash" + ), + path( + "me/service/<str:service_slug>/cancel/<str:hash>", + CancelConsumptionView.as_view(), + name="me_service_slug_cancel_hash", + ), + path("card", CardView.as_view(), name="card"), + path("bag", BagView.as_view()), + path("bag/<int:bag_id>/coupon", BagCouponView.as_view(), name="bag_id_coupon"), + path("checking", CheckingView.as_view(), name="checking"), + path("pay", PayView.as_view(), name="pay"), + path("methods", PaymentMethodView.as_view(), name="methods"), ] diff --git a/breathecode/payments/views.py b/breathecode/payments/views.py index 72a74c77e..edf631f8c 100644 --- a/breathecode/payments/views.py +++ b/breathecode/payments/views.py @@ -82,12 +82,12 @@ logger = getLogger(__name__) -IS_DJANGO_REDIS = hasattr(cache, 'delete_pattern') +IS_DJANGO_REDIS = hasattr(cache, "delete_pattern") class PlanView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def get(self, request, plan_slug=None, service_slug=None): @@ -95,7 +95,7 @@ def is_onboarding(value: str): if filtering: return Q() - return Q(is_onboarding=value.lower() == 'true') + return Q(is_onboarding=value.lower() == "true") handler = self.extensions(request) lang = get_user_language(request) @@ -103,125 +103,133 @@ def is_onboarding(value: str): if plan_slug: item = Plan.objects.filter(slug=plan_slug).first() if not item: - raise ValidationException(translation(lang, en='Plan not found', es='Plan no existe', slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Plan not found", es="Plan no existe", slug="not-found"), code=404 + ) - serializer = GetPlanSerializer(item, - many=False, - context={'academy_id': request.GET.get('academy')}, - select=request.GET.get('select')) + serializer = GetPlanSerializer( + item, many=False, context={"academy_id": request.GET.get("academy")}, select=request.GET.get("select") + ) return handler.response(serializer.data) - filtering = 'cohort' in request.GET or 'syllabus' in request.GET - query = handler.lookup.build(lang, - strings={ - 'exact': [ - 'service_items__service__slug', - ], - }, - overwrite={ - 'service_slug': 'service_items__service__slug', - }, - custom_fields={'is_onboarding': is_onboarding}) + filtering = "cohort" in request.GET or "syllabus" in request.GET + query = handler.lookup.build( + lang, + strings={ + "exact": [ + "service_items__service__slug", + ], + }, + overwrite={ + "service_slug": "service_items__service__slug", + }, + custom_fields={"is_onboarding": is_onboarding}, + ) if filtering: - items = PlanFinder(request, query=query).get_plans_belongs_from_request().exclude(status='DELETED') + items = PlanFinder(request, query=query).get_plans_belongs_from_request().exclude(status="DELETED") else: - items = Plan.objects.filter(query).exclude(status='DELETED') + items = Plan.objects.filter(query).exclude(status="DELETED") items = handler.queryset(items) - serializer = GetPlanSerializer(items, - many=True, - context={'academy_id': request.GET.get('academy')}, - select=request.GET.get('select')) + serializer = GetPlanSerializer( + items, many=True, context={"academy_id": request.GET.get("academy")}, select=request.GET.get("select") + ) return handler.response(serializer.data) class AcademyPlanView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) - @capable_of('read_plan') + @capable_of("read_plan") def get(self, request, plan_id=None, plan_slug=None, service_slug=None, academy_id=None): def is_onboarding(value: str): if filtering: return Q() - return Q(is_onboarding=value.lower() == 'true') + return Q(is_onboarding=value.lower() == "true") handler = self.extensions(request) lang = get_user_language(request) if plan_id or plan_slug: - item = Plan.objects.filter( - Q(id=plan_id) | Q(slug=plan_slug, slug__isnull=False), - Q(owner__id=academy_id) | Q(owner=None)).exclude(status='DELETED').first() + item = ( + Plan.objects.filter( + Q(id=plan_id) | Q(slug=plan_slug, slug__isnull=False), Q(owner__id=academy_id) | Q(owner=None) + ) + .exclude(status="DELETED") + .first() + ) if not item: - raise ValidationException(translation(lang, en='Plan not found', es='Plan no existe', slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Plan not found", es="Plan no existe", slug="not-found"), code=404 + ) - serializer = GetPlanSerializer(item, - many=False, - context={'academy_id': academy_id}, - select=request.GET.get('select')) + serializer = GetPlanSerializer( + item, many=False, context={"academy_id": academy_id}, select=request.GET.get("select") + ) return handler.response(serializer.data) - filtering = 'cohort' in request.GET or 'syllabus' in request.GET - query = handler.lookup.build(lang, - strings={ - 'exact': [ - 'service_items__service__slug', - ], - }, - overwrite={ - 'service_slug': 'service_items__service__slug', - }, - custom_fields={'is_onboarding': is_onboarding}) + filtering = "cohort" in request.GET or "syllabus" in request.GET + query = handler.lookup.build( + lang, + strings={ + "exact": [ + "service_items__service__slug", + ], + }, + overwrite={ + "service_slug": "service_items__service__slug", + }, + custom_fields={"is_onboarding": is_onboarding}, + ) if filtering: - items = PlanFinder( - request, query=query).get_plans_belongs_from_request().filter(Q(owner__id=academy_id) - | Q(owner=None)).exclude(status='DELETED') + items = ( + PlanFinder(request, query=query) + .get_plans_belongs_from_request() + .filter(Q(owner__id=academy_id) | Q(owner=None)) + .exclude(status="DELETED") + ) else: - items = Plan.objects.filter(query, Q(owner__id=academy_id) | Q(owner=None)).exclude(status='DELETED') + items = Plan.objects.filter(query, Q(owner__id=academy_id) | Q(owner=None)).exclude(status="DELETED") items = handler.queryset(items) - serializer = GetPlanSerializer(items, - many=True, - context={'academy_id': academy_id}, - select=request.GET.get('select')) + serializer = GetPlanSerializer( + items, many=True, context={"academy_id": academy_id}, select=request.GET.get("select") + ) return handler.response(serializer.data) - @capable_of('crud_plan') + @capable_of("crud_plan") def post(self, request, academy_id=None): lang = get_user_language(request) data = {} for key in request.data: - if key in ['owner', 'owner_id', 'currency']: + if key in ["owner", "owner_id", "currency"]: continue data[key] = request.data[key] data = request.data - if not 'owner' in data or data['owner'] is not None: - data['owner'] = academy_id + if not "owner" in data or data["owner"] is not None: + data["owner"] = academy_id - currency = data.get('currency', '') + currency = data.get("currency", "") if currency and (currency := Currency.objects.filter(code=currency).first()): - data['currency'] = currency.id + data["currency"] = currency.id else: - raise ValidationException(translation(lang, - en='Currency not found', - es='Divisa no encontrada', - slug='currency-not-found'), - code=400) + raise ValidationException( + translation(lang, en="Currency not found", es="Divisa no encontrada", slug="currency-not-found"), + code=400, + ) serializer = PlanSerializer(data=data) serializer.is_valid(raise_exception=True) @@ -229,24 +237,27 @@ def post(self, request, academy_id=None): return Response(serializer.data, status=201) - @capable_of('crud_plan') + @capable_of("crud_plan") def put(self, request, plan_id=None, plan_slug=None, academy_id=None): lang = get_user_language(request) - plan = Plan.objects.filter(Q(id=plan_id) | Q(slug=plan_slug), - Q(owner__id=academy_id) | Q(owner=None), - id=plan_id).exclude(status='DELETED').first() + plan = ( + Plan.objects.filter(Q(id=plan_id) | Q(slug=plan_slug), Q(owner__id=academy_id) | Q(owner=None), id=plan_id) + .exclude(status="DELETED") + .first() + ) if not plan: - raise ValidationException(translation(lang, en='Plan not found', es='Plan no existe', slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Plan not found", es="Plan no existe", slug="not-found"), code=404 + ) data = {} if plan.currency: - data['currency'] = plan.currency.id + data["currency"] = plan.currency.id for key in request.data: - if key in ['owner', 'owner_id']: + if key in ["owner", "owner_id"]: continue data[key] = request.data[key] @@ -258,49 +269,61 @@ def put(self, request, plan_id=None, plan_slug=None, academy_id=None): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_plan') + @capable_of("crud_plan") def delete(self, request, plan_id=None, plan_slug=None, academy_id=None): lang = get_user_language(request) - plan = Plan.objects.filter(Q(id=plan_id) | Q(slug=plan_slug), - Q(owner__id=academy_id) | Q(owner=None), - id=plan_id).exclude(status='DELETED').first() + plan = ( + Plan.objects.filter(Q(id=plan_id) | Q(slug=plan_slug), Q(owner__id=academy_id) | Q(owner=None), id=plan_id) + .exclude(status="DELETED") + .first() + ) if not plan: - raise ValidationException(translation(lang, en='Plan not found', es='Plan no existe', slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Plan not found", es="Plan no existe", slug="not-found"), code=404 + ) - plan.status = 'DELETED' + plan.status = "DELETED" plan.save() return Response(status=status.HTTP_204_NO_CONTENT) class AcademyCohortSetCohortView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) - @capable_of('crud_plan') + @capable_of("crud_plan") def put(self, request, cohort_set_id=None, cohort_set_slug=None, academy_id=None): lang = get_user_language(request) handler = self.extensions(request) - query = handler.lookup.build(lang, ints={ - 'in': [ - 'id', - ], - }, strings={ - 'in': [ - 'slug', - ], - }, fix={'lower': 'slug'}) + query = handler.lookup.build( + lang, + ints={ + "in": [ + "id", + ], + }, + strings={ + "in": [ + "slug", + ], + }, + fix={"lower": "slug"}, + ) errors = [] - if not (cohort_set := CohortSet.objects.filter(Q(id=cohort_set_id) | Q(slug=cohort_set_slug), - owner__id=academy_id).exclude(status='DELETED').first()): - errors.append(C(translation(lang, en='Plan not found', es='Plan no encontrado', slug='not-found'))) + if not ( + cohort_set := CohortSet.objects.filter(Q(id=cohort_set_id) | Q(slug=cohort_set_slug), owner__id=academy_id) + .exclude(status="DELETED") + .first() + ): + errors.append(C(translation(lang, en="Plan not found", es="Plan no encontrado", slug="not-found"))) if not (items := Cohort.objects.filter(query)): errors.append( - C(translation(lang, en='Cohort not found', es='Cohort no encontrada', slug='cohort-not-found'))) + C(translation(lang, en="Cohort not found", es="Cohort no encontrada", slug="cohort-not-found")) + ) if errors: raise ValidationException(errors, code=404) @@ -313,12 +336,12 @@ def put(self, request, cohort_set_id=None, cohort_set_slug=None, academy_id=None if to_add: cohort_set.cohorts.add(*to_add) - return Response({'status': 'ok'}, status=status.HTTP_201_CREATED if to_add else status.HTTP_200_OK) + return Response({"status": "ok"}, status=status.HTTP_201_CREATED if to_add else status.HTTP_200_OK) class ServiceView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def get(self, request, service_slug=None): handler = self.extensions(request) @@ -329,87 +352,80 @@ def get(self, request, service_slug=None): item = Service.objects.filter(slug=service_slug).first() if not item: - raise ValidationException(translation(lang, - en='Service not found', - es='No existe el Servicio', - slug='not-found'), - code=404) - - serializer = GetServiceSerializer(item, - many=False, - context={'academy_id': request.GET.get('academy')}, - select=request.GET.get('select')) + raise ValidationException( + translation(lang, en="Service not found", es="No existe el Servicio", slug="not-found"), code=404 + ) + + serializer = GetServiceSerializer( + item, many=False, context={"academy_id": request.GET.get("academy")}, select=request.GET.get("select") + ) return handler.response(serializer.data) items = Service.objects.filter() - if group := request.GET.get('group'): + if group := request.GET.get("group"): items = items.filter(group__codename=group) - if cohort_slug := request.GET.get('cohort_slug'): + if cohort_slug := request.GET.get("cohort_slug"): items = items.filter(cohorts__slug=cohort_slug) - if mentorship_service_slug := request.GET.get('mentorship_service_slug'): + if mentorship_service_slug := request.GET.get("mentorship_service_slug"): items = items.filter(mentorship_services__slug=mentorship_service_slug) items = handler.queryset(items) - serializer = GetServiceSerializer(items, - many=True, - context={'academy_id': request.GET.get('academy')}, - select=request.GET.get('select')) + serializer = GetServiceSerializer( + items, many=True, context={"academy_id": request.GET.get("academy")}, select=request.GET.get("select") + ) return handler.response(serializer.data) class AcademyServiceView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) - @capable_of('read_service') + @capable_of("read_service") def get(self, request, service_slug=None, academy_id=None): handler = self.extensions(request) lang = get_user_language(request) if service_slug: - item = Service.objects.filter(Q(owner__id=academy_id) | Q(owner=None) | Q(private=False), - slug=service_slug).first() + item = Service.objects.filter( + Q(owner__id=academy_id) | Q(owner=None) | Q(private=False), slug=service_slug + ).first() if not item: - raise ValidationException(translation(lang, - en='Service not found', - es='No existe el Servicio', - slug='not-found'), - code=404) - - serializer = GetServiceSerializer(item, - many=False, - context={'academy_id': academy_id}, - select=request.GET.get('select')) + raise ValidationException( + translation(lang, en="Service not found", es="No existe el Servicio", slug="not-found"), code=404 + ) + + serializer = GetServiceSerializer( + item, many=False, context={"academy_id": academy_id}, select=request.GET.get("select") + ) return handler.response(serializer.data) items = Service.objects.filter(Q(owner__id=academy_id) | Q(owner=None) | Q(private=False)) - if group := request.GET.get('group'): + if group := request.GET.get("group"): items = items.filter(group__codename=group) - if cohort_slug := request.GET.get('cohort_slug'): + if cohort_slug := request.GET.get("cohort_slug"): items = items.filter(cohorts__slug=cohort_slug) - if mentorship_service_slug := request.GET.get('mentorship_service_slug'): + if mentorship_service_slug := request.GET.get("mentorship_service_slug"): items = items.filter(mentorship_services__slug=mentorship_service_slug) items = handler.queryset(items) - serializer = GetServiceSerializer(items, - many=True, - context={'academy_id': academy_id}, - select=request.GET.get('select')) + serializer = GetServiceSerializer( + items, many=True, context={"academy_id": academy_id}, select=request.GET.get("select") + ) return handler.response(serializer.data) - @capable_of('crud_service') + @capable_of("crud_service") def post(self, request, academy_id=None): data = request.data - if not 'owner' in data or data['owner'] is not None: - data['owner'] = academy_id + if not "owner" in data or data["owner"] is not None: + data["owner"] = academy_id serializer = ServiceSerializer(data=request.data) if serializer.is_valid(): @@ -418,21 +434,19 @@ def post(self, request, academy_id=None): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_service') + @capable_of("crud_service") def put(self, request, service_slug=None, academy_id=None): service = Service.objects.filter(Q(owner__id=academy_id) | Q(owner=None), slug=service_slug).first() lang = get_user_language(request) if not service: - raise ValidationException(translation(lang, - en='Service not found', - es='No existe el Servicio', - slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Service not found", es="No existe el Servicio", slug="not-found"), code=404 + ) data = request.data - if not 'owner' in data or data['owner'] is not None: - data['owner'] = academy_id + if not "owner" in data or data["owner"] is not None: + data["owner"] = academy_id serializer = ServiceSerializer(service, data=data) if serializer.is_valid(): @@ -443,9 +457,9 @@ def put(self, request, service_slug=None, academy_id=None): class AcademyAcademyServiceView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) - @capable_of('read_academyservice') + @capable_of("read_academyservice") def get(self, request, academy_id=None, service_slug=None): handler = self.extensions(request) lang = get_user_language(request) @@ -453,21 +467,25 @@ def get(self, request, academy_id=None, service_slug=None): if service_slug is not None: item = AcademyService.objects.filter(academy__id=academy_id, service__slug=service_slug).first() if item is None: - raise ValidationException(translation(lang, - en='There is no Academy Service with that service slug', - es='No existe ningún Academy Service con ese slug de Service', - slug='academy-service-not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="There is no Academy Service with that service slug", + es="No existe ningún Academy Service con ese slug de Service", + slug="academy-service-not-found", + ), + code=404, + ) serializer = GetAcademyServiceSmallSerializer(item) return handler.response(serializer.data) items = AcademyService.objects.filter(academy__id=academy_id) - if mentorship_service_set := request.GET.get('mentorship_service_set'): + if mentorship_service_set := request.GET.get("mentorship_service_set"): items = items.filter(available_mentorship_service_sets__slug__exact=mentorship_service_set) - if event_type_set := request.GET.get('event_type_set'): + if event_type_set := request.GET.get("event_type_set"): items = items.filter(available_event_type_sets__slug__exact=event_type_set) items = handler.queryset(items) @@ -475,11 +493,11 @@ def get(self, request, academy_id=None, service_slug=None): return handler.response(serializer.data) - @capable_of('crud_academyservice') + @capable_of("crud_academyservice") def post(self, request, academy_id=None): data = request.data - data['academy'] = academy_id + data["academy"] = academy_id serializer = POSTAcademyServiceSerializer(data=request.data) if serializer.is_valid(): @@ -488,26 +506,29 @@ def post(self, request, academy_id=None): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_academyservice') + @capable_of("crud_academyservice") def put(self, request, service_slug=None, academy_id=None): service = Service.objects.filter(Q(owner__id=academy_id) | Q(owner=None), slug=service_slug).first() lang = get_user_language(request) if not service: - raise ValidationException(translation(lang, - en='Service not found', - es='No existe el Servicio', - slug='service-not-found'), - code=404) + raise ValidationException( + translation(lang, en="Service not found", es="No existe el Servicio", slug="service-not-found"), + code=404, + ) academyservice = AcademyService.objects.filter(service=service.id, academy__id=academy_id).first() if not academyservice: - raise ValidationException(translation(lang, - en='Academy Service not found', - es='No existe el Academy Service', - slug='academyservice-not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="Academy Service not found", + es="No existe el Academy Service", + slug="academyservice-not-found", + ), + code=404, + ) serializer = PUTAcademyServiceSerializer(academyservice, data=request.data) if serializer.is_valid(): @@ -519,7 +540,7 @@ def put(self, request, service_slug=None, academy_id=None): class ServiceItemView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def get(self, request, service_slug=None): handler = self.extensions(request) @@ -527,16 +548,14 @@ def get(self, request, service_slug=None): items = ServiceItem.objects.none() - if plan := request.GET.get('plan'): - args = {'id': int(plan)} if plan.isnumeric() else {'slug': plan} + if plan := request.GET.get("plan"): + args = {"id": int(plan)} if plan.isnumeric() else {"slug": plan} p = Plan.objects.filter(**args).first() if not p: - raise ValidationException(translation(lang, - en='Plan not found', - es='No existe el Plan', - slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Plan not found", es="No existe el Plan", slug="not-found"), code=404 + ) items |= p.service_items.all() items = items.distinct() @@ -547,8 +566,8 @@ def get(self, request, service_slug=None): if service_slug: items = items.filter(service__slug=service_slug) - if unit_type := request.GET.get('unit_type'): - items = items.filter(unit_type__in=unit_type.split(',')) + if unit_type := request.GET.get("unit_type"): + items = items.filter(unit_type__in=unit_type.split(",")) items = items.annotate(lang=Value(lang, output_field=CharField())) @@ -566,18 +585,18 @@ def get(self, request): items = Consumable.objects.filter(Q(valid_until__gte=utc_now) | Q(valid_until=None), user=request.user) mentorship_services = MentorshipServiceSet.objects.none() - mentorship_services = filter_consumables(request, items, mentorship_services, 'mentorship_service_set') + mentorship_services = filter_consumables(request, items, mentorship_services, "mentorship_service_set") cohorts = CohortSet.objects.none() - cohorts = filter_consumables(request, items, cohorts, 'cohort_set') + cohorts = filter_consumables(request, items, cohorts, "cohort_set") event_types = EventTypeSet.objects.none() - event_types = filter_consumables(request, items, event_types, 'event_type_set') + event_types = filter_consumables(request, items, event_types, "event_type_set") balance = { - 'mentorship_service_sets': get_balance_by_resource(mentorship_services, 'mentorship_service_set'), - 'cohort_sets': get_balance_by_resource(cohorts, 'cohort_set'), - 'event_type_sets': get_balance_by_resource(event_types, 'event_type_set'), + "mentorship_service_sets": get_balance_by_resource(mentorship_services, "mentorship_service_set"), + "cohort_sets": get_balance_by_resource(cohorts, "cohort_set"), + "event_type_sets": get_balance_by_resource(event_types, "event_type_set"), } return Response(balance) @@ -585,7 +604,7 @@ def get(self, request): class MentorshipServiceSetView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def get(self, request, mentorship_service_set_id=None): handler = self.extensions(request) @@ -595,11 +614,15 @@ def get(self, request, mentorship_service_set_id=None): if mentorship_service_set_id: item = MentorshipServiceSet.objects.filter(id=mentorship_service_set_id).first() if not item: - raise ValidationException(translation(lang, - en='Mentorship Service Set not found', - es='No existe el Servicio de Mentoría', - slug='not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="Mentorship Service Set not found", + es="No existe el Servicio de Mentoría", + slug="not-found", + ), + code=404, + ) serializer = GetMentorshipServiceSetSerializer(item, many=False) @@ -608,12 +631,12 @@ def get(self, request, mentorship_service_set_id=None): query = handler.lookup.build( lang, slugs=[ - '', - 'academy', - 'mentorship_services', + "", + "academy", + "mentorship_services", ], overwrite={ - 'mentorship_service': 'mentorship_services', + "mentorship_service": "mentorship_services", }, ) @@ -627,7 +650,7 @@ def get(self, request, mentorship_service_set_id=None): class EventTypeSetView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def get(self, request, event_type_set_id=None): handler = self.extensions(request) @@ -637,26 +660,27 @@ def get(self, request, event_type_set_id=None): if event_type_set_id: item = EventTypeSet.objects.filter(id=event_type_set_id).first() if not item: - raise ValidationException(translation(lang, - en='Event type set not found', - es='No existe el tipo de evento', - slug='not-found'), - code=404) + raise ValidationException( + translation( + lang, en="Event type set not found", es="No existe el tipo de evento", slug="not-found" + ), + code=404, + ) serializer = GetEventTypeSetSerializer(item, many=False) return handler.response(serializer.data) query = handler.lookup.build( lang, - strings={'exact': ['event_types__lang']}, + strings={"exact": ["event_types__lang"]}, slugs=[ - '', - 'academy', - 'event_types', + "", + "academy", + "event_types", ], overwrite={ - 'event_type': 'event_types', - 'lang': 'event_types__lang', + "event_type": "event_types", + "lang": "event_types__lang", }, ) @@ -671,15 +695,15 @@ def get(self, request, event_type_set_id=None): # TODO: this view is not cachable yet. class MeSubscriptionView(APIView): # this cannot support cache because the cache does not support associated two models to a response yet - extensions = APIViewExtensions(sort='-id') + extensions = APIViewExtensions(sort="-id") def get_lookup(self, key, value): args = () kwargs = {} - slug_key = f'{key}__slug__in' - pk_key = f'{key}__id__in' + slug_key = f"{key}__slug__in" + pk_key = f"{key}__id__in" - for v in value.split(','): + for v in value.split(","): if slug_key not in kwargs and not v.isnumeric(): kwargs[slug_key] = [] @@ -693,7 +717,7 @@ def get_lookup(self, key, value): kwargs[slug_key].append(v) if len(kwargs) > 1: - args = (Q(**{slug_key: kwargs[slug_key]}) | Q(**{pk_key: kwargs[pk_key]}), ) + args = (Q(**{slug_key: kwargs[slug_key]}) | Q(**{pk_key: kwargs[pk_key]}),) kwargs = {} return args, kwargs @@ -705,13 +729,13 @@ def get(self, request): subscriptions = Subscription.objects.filter(user=request.user) - #NOTE: this is before feature/add-plan-duration branch, this will be outdated + # NOTE: this is before feature/add-plan-duration branch, this will be outdated plan_financings = PlanFinancing.objects.filter(user=request.user) - if subscription := request.GET.get('subscription'): + if subscription := request.GET.get("subscription"): subscriptions = subscriptions.filter(id=int(subscription)) - if plan_financing := request.GET.get('plan-financing'): + if plan_financing := request.GET.get("plan-financing"): plan_financings = plan_financings.filter(id=int(plan_financing)) if subscription and not plan_financing: @@ -720,23 +744,25 @@ def get(self, request): if not subscription and plan_financing: subscriptions = Subscription.objects.none() - if status := request.GET.get('status'): - subscriptions = subscriptions.filter(status__in=status.split(',')) - plan_financings = plan_financings.filter(status__in=status.split(',')) + if status := request.GET.get("status"): + subscriptions = subscriptions.filter(status__in=status.split(",")) + plan_financings = plan_financings.filter(status__in=status.split(",")) else: - subscriptions = subscriptions.exclude(status='CANCELLED').exclude(status='DEPRECATED').exclude( - status='PAYMENT_ISSUE') - plan_financings = plan_financings.exclude(status='CANCELLED').exclude(status='DEPRECATED').exclude( - status='PAYMENT_ISSUE') - - if invoice := request.GET.get('invoice'): + subscriptions = ( + subscriptions.exclude(status="CANCELLED").exclude(status="DEPRECATED").exclude(status="PAYMENT_ISSUE") + ) + plan_financings = ( + plan_financings.exclude(status="CANCELLED").exclude(status="DEPRECATED").exclude(status="PAYMENT_ISSUE") + ) + + if invoice := request.GET.get("invoice"): ids = [int(x) for x in invoice if x.isnumeric()] subscriptions = subscriptions.filter(invoices__id__in=ids) plan_financings = plan_financings.filter(invoices__id__in=ids) - if service := request.GET.get('service'): - service_items_args, service_items_kwargs = self.get_lookup('service_items__service', service) - plans_args, plans_kwargs = self.get_lookup('plans__service_items__service', service) + if service := request.GET.get("service"): + service_items_args, service_items_kwargs = self.get_lookup("service_items__service", service) + plans_args, plans_kwargs = self.get_lookup("plans__service_items__service", service) if service_items_args: subscriptions = subscriptions.filter(Q(*service_items_args) | Q(*plans_args)) @@ -746,30 +772,32 @@ def get(self, request): subscriptions = subscriptions.filter(Q(**plans_kwargs) | Q(**service_items_kwargs)) plan_financings = plan_financings.filter(**plans_kwargs) - if plan := request.GET.get('plan'): - args, kwargs = self.get_lookup('plans', plan) + if plan := request.GET.get("plan"): + args, kwargs = self.get_lookup("plans", plan) subscriptions = subscriptions.filter(*args, **kwargs) plan_financings = plan_financings.filter(*args, **kwargs) - if selected_cohort_set := (request.GET.get('cohort-set-selected') or request.GET.get('cohort-set-selected')): - args, kwargs = self.get_lookup('selected_cohort_set', selected_cohort_set) + if selected_cohort_set := (request.GET.get("cohort-set-selected") or request.GET.get("cohort-set-selected")): + args, kwargs = self.get_lookup("selected_cohort_set", selected_cohort_set) subscriptions = subscriptions.filter(*args, **kwargs) plan_financings = plan_financings.filter(*args, **kwargs) - if selected_mentorship_service_set := (request.GET.get('mentorship-service-set-selected') - or request.GET.get('selected-mentorship-service-set')): - args, kwargs = self.get_lookup('selected_mentorship_service_set', selected_mentorship_service_set) + if selected_mentorship_service_set := ( + request.GET.get("mentorship-service-set-selected") or request.GET.get("selected-mentorship-service-set") + ): + args, kwargs = self.get_lookup("selected_mentorship_service_set", selected_mentorship_service_set) subscriptions = subscriptions.filter(*args, **kwargs) plan_financings = plan_financings.filter(*args, **kwargs) - if selected_event_type_set := (request.GET.get('event-type-set-selected') - or request.GET.get('selected-event-type-set')): - args, kwargs = self.get_lookup('selected_event_type_set', selected_event_type_set) + if selected_event_type_set := ( + request.GET.get("event-type-set-selected") or request.GET.get("selected-event-type-set") + ): + args, kwargs = self.get_lookup("selected_event_type_set", selected_event_type_set) subscriptions = subscriptions.filter(*args, **kwargs) plan_financings = plan_financings.filter(*args, **kwargs) - only_valid = request.GET.get('only_valid') - if only_valid == True or only_valid == 'true': + only_valid = request.GET.get("only_valid") + if only_valid == True or only_valid == "true": subscriptions = subscriptions.filter(Q(valid_until__gte=now) | Q(valid_until=None)) plan_financings = plan_financings.filter(valid_until__gte=now) @@ -779,72 +807,90 @@ def get(self, request): plan_financings = handler.queryset(plan_financings.distinct()) plan_financing_serializer = GetPlanFinancingSerializer(plan_financings, many=True) - return handler.response({ - 'subscriptions': subscription_serializer.data, - 'plan_financings': plan_financing_serializer.data, - }) + return handler.response( + { + "subscriptions": subscription_serializer.data, + "plan_financings": plan_financing_serializer.data, + } + ) class MeSubscriptionChargeView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def put(self, request, subscription_id): utc_now = timezone.now() if not (subscription := Subscription.objects.filter(id=subscription_id, user=request.user).first()): - raise ValidationException(translation(request.user.language, - en='Subscription not found', - es='No existe la suscripción', - slug='not-found'), - code=404) - - if subscription.status != 'PAYMENT_ISSUE' and subscription.status == 'ERROR': - raise ValidationException(translation(request.user.language, - en='Nothing to charge too', - es='No hay nada que cobrar', - slug='nothing-to-charge'), - code=400) + raise ValidationException( + translation( + request.user.language, en="Subscription not found", es="No existe la suscripción", slug="not-found" + ), + code=404, + ) + + if subscription.status != "PAYMENT_ISSUE" and subscription.status == "ERROR": + raise ValidationException( + translation( + request.user.language, + en="Nothing to charge too", + es="No hay nada que cobrar", + slug="nothing-to-charge", + ), + code=400, + ) if subscription.next_payment_at - timedelta(days=1) > utc_now: - raise ValidationException(translation(request.user.language, - en='The subscription time is not over', - es='El tiempo de la suscripción no ha terminado', - slug='time-not-over'), - code=400) + raise ValidationException( + translation( + request.user.language, + en="The subscription time is not over", + es="El tiempo de la suscripción no ha terminado", + slug="time-not-over", + ), + code=400, + ) tasks.charge_subscription.delay(subscription_id) - return Response({'status': 'loading'}, status=status.HTTP_202_ACCEPTED) + return Response({"status": "loading"}, status=status.HTTP_202_ACCEPTED) class MeSubscriptionCancelView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def put(self, request, subscription_id): lang = get_user_language(request) if not (subscription := Subscription.objects.filter(id=subscription_id, user=request.user).first()): - raise ValidationException(translation(lang, - en='Subscription not found', - es='No existe la suscripción', - slug='not-found'), - code=404) - - if subscription.status == 'CANCELLED': - raise ValidationException(translation(lang, - en='Subscription already cancelled', - es='La suscripción ya está cancelada', - slug='already-cancelled'), - code=400) - - if subscription.status == 'DEPRECATED': - raise ValidationException(translation(lang, - en='This subscription is deprecated, so is already cancelled', - es='Esta suscripción está obsoleta, por lo que ya está cancelada', - slug='deprecated'), - code=400) - - subscription.status = 'CANCELLED' + raise ValidationException( + translation(lang, en="Subscription not found", es="No existe la suscripción", slug="not-found"), + code=404, + ) + + if subscription.status == "CANCELLED": + raise ValidationException( + translation( + lang, + en="Subscription already cancelled", + es="La suscripción ya está cancelada", + slug="already-cancelled", + ), + code=400, + ) + + if subscription.status == "DEPRECATED": + raise ValidationException( + translation( + lang, + en="This subscription is deprecated, so is already cancelled", + es="Esta suscripción está obsoleta, por lo que ya está cancelada", + slug="deprecated", + ), + code=400, + ) + + subscription.status = "CANCELLED" subscription.save() serializer = GetSubscriptionSerializer(subscription) @@ -853,42 +899,51 @@ def put(self, request, subscription_id): class MePlanFinancingChargeView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def put(self, request, plan_financing_id): utc_now = timezone.now() if not (subscription := PlanFinancing.objects.filter(id=plan_financing_id, user=request.user).first()): - raise ValidationException(translation(request.user.language, - en='Subscription not found', - es='No existe la suscripción', - slug='not-found'), - code=404) - - if subscription.status != 'PAYMENT_ISSUE' and subscription.status == 'ERROR': - raise ValidationException(translation(request.user.language, - en='Nothing to charge too', - es='No hay nada que cobrar', - slug='nothing-to-charge'), - code=400) + raise ValidationException( + translation( + request.user.language, en="Subscription not found", es="No existe la suscripción", slug="not-found" + ), + code=404, + ) + + if subscription.status != "PAYMENT_ISSUE" and subscription.status == "ERROR": + raise ValidationException( + translation( + request.user.language, + en="Nothing to charge too", + es="No hay nada que cobrar", + slug="nothing-to-charge", + ), + code=400, + ) if subscription.next_payment_at - timedelta(days=1) > utc_now: - raise ValidationException(translation(request.user.language, - en='Your current installment is not due yet', - es='Tu cuota actual no está vencida', - slug='installment-is-not-due'), - code=400) + raise ValidationException( + translation( + request.user.language, + en="Your current installment is not due yet", + es="Tu cuota actual no está vencida", + slug="installment-is-not-due", + ), + code=400, + ) tasks.charge_plan_financing.delay(plan_financing_id) - return Response({'status': 'loading'}, status=status.HTTP_202_ACCEPTED) + return Response({"status": "loading"}, status=status.HTTP_202_ACCEPTED) class AcademySubscriptionView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) - @capable_of('read_subscription') + @capable_of("read_subscription") def get(self, request, subscription_id=None): handler = self.extensions(request) lang = get_user_language(request) @@ -896,35 +951,38 @@ def get(self, request, subscription_id=None): now = timezone.now() if subscription_id: - item = Subscription.objects.filter(Q(valid_until__gte=now) | Q(valid_until=None), - id=subscription_id).exclude(status='CANCELLED').exclude( - status='DEPRECATED').exclude(status='PAYMENT_ISSUE').first() + item = ( + Subscription.objects.filter(Q(valid_until__gte=now) | Q(valid_until=None), id=subscription_id) + .exclude(status="CANCELLED") + .exclude(status="DEPRECATED") + .exclude(status="PAYMENT_ISSUE") + .first() + ) if not item: - raise ValidationException(translation(lang, - en='Subscription not found', - es='No existe el suscripción', - slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Subscription not found", es="No existe el suscripción", slug="not-found"), + code=404, + ) serializer = GetSubscriptionSerializer(item, many=False) return handler.response(serializer.data) items = Subscription.objects.filter(Q(valid_until__gte=now) | Q(valid_until=None)) - if status := request.GET.get('status'): - items = items.filter(status__in=status.split(',')) + if status := request.GET.get("status"): + items = items.filter(status__in=status.split(",")) else: - items = items.exclude(status='CANCELLED').exclude(status='DEPRECATED').exclude(status='PAYMENT_ISSUE') + items = items.exclude(status="CANCELLED").exclude(status="DEPRECATED").exclude(status="PAYMENT_ISSUE") - if invoice_ids := request.GET.get('invoice_ids'): - items = items.filter(invoices__id__in=invoice_ids.split(',')) + if invoice_ids := request.GET.get("invoice_ids"): + items = items.filter(invoices__id__in=invoice_ids.split(",")) - if service_slugs := request.GET.get('service_slugs'): - items = items.filter(services__slug__in=service_slugs.split(',')) + if service_slugs := request.GET.get("service_slugs"): + items = items.filter(services__slug__in=service_slugs.split(",")) - if plan_slugs := request.GET.get('plan_slugs'): - items = items.filter(plans__slug__in=plan_slugs.split(',')) + if plan_slugs := request.GET.get("plan_slugs"): + items = items.filter(plans__slug__in=plan_slugs.split(",")) items = handler.queryset(items) serializer = GetSubscriptionSerializer(items, many=True) @@ -933,7 +991,7 @@ def get(self, request, subscription_id=None): class MeInvoiceView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def get(self, request, invoice_id=None): handler = self.extensions(request) @@ -943,19 +1001,17 @@ def get(self, request, invoice_id=None): item = Invoice.objects.filter(id=invoice_id, user=request.user).first() if not item: - raise ValidationException(translation(lang, - en='Invoice not found', - es='La factura no existe', - slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Invoice not found", es="La factura no existe", slug="not-found"), code=404 + ) serializer = GetInvoiceSerializer(item, many=True) return handler.response(serializer.data) items = Invoice.objects.filter(user=request.user) - if status := request.GET.get('status'): - items = items.filter(status__in=status.split(',')) + if status := request.GET.get("status"): + items = items.filter(status__in=status.split(",")) items = handler.queryset(items) serializer = GetInvoiceSmallSerializer(items, many=True) @@ -964,9 +1020,9 @@ def get(self, request, invoice_id=None): class AcademyInvoiceView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) - @capable_of('read_invoice') + @capable_of("read_invoice") def get(self, request, invoice_id=None, academy_id=None): handler = self.extensions(request) lang = get_user_language(request) @@ -975,19 +1031,17 @@ def get(self, request, invoice_id=None, academy_id=None): item = Invoice.objects.filter(id=invoice_id, user=request.user, academy__id=academy_id).first() if not item: - raise ValidationException(translation(lang, - en='Invoice not found', - es='La factura no existe', - slug='not-found'), - code=404) + raise ValidationException( + translation(lang, en="Invoice not found", es="La factura no existe", slug="not-found"), code=404 + ) serializer = GetInvoiceSerializer(item, many=False) return handler.response(serializer.data) items = Invoice.objects.filter(user=request.user, academy__id=academy_id) - if status := request.GET.get('status'): - items = items.filter(status__in=status.split(',')) + if status := request.GET.get("status"): + items = items.filter(status__in=status.split(",")) items = handler.queryset(items) serializer = GetInvoiceSmallSerializer(items, many=True) @@ -996,7 +1050,7 @@ def get(self, request, invoice_id=None, academy_id=None): class CardView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def post(self, request): lang = get_user_language(request) @@ -1005,18 +1059,22 @@ def post(self, request): s.set_language(lang) s.add_contact(request.user) - token = request.data.get('token') - card_number = request.data.get('card_number') - exp_month = request.data.get('exp_month') - exp_year = request.data.get('exp_year') - cvc = request.data.get('cvc') + token = request.data.get("token") + card_number = request.data.get("card_number") + exp_month = request.data.get("exp_month") + exp_year = request.data.get("exp_year") + cvc = request.data.get("cvc") if not ((card_number and exp_month and exp_year and cvc) or token): - raise ValidationException(translation(lang, - en='Missing card information', - es='Falta la información de la tarjeta', - slug='missing-card-information'), - code=404) + raise ValidationException( + translation( + lang, + en="Missing card information", + es="Falta la información de la tarjeta", + slug="missing-card-information", + ), + code=404, + ) try: if not token: @@ -1033,7 +1091,7 @@ def post(self, request): except Exception as e: raise ValidationException(str(e), code=400) - return Response({'status': 'ok'}) + return Response({"status": "ok"}) class ConsumeView(APIView): @@ -1043,22 +1101,22 @@ def put(self, request, service_slug, hash=None): session = ConsumptionSession.get_session(request) if session: - return Response({'status': 'ok'}, status=status.HTTP_200_OK) + return Response({"status": "ok"}, status=status.HTTP_200_OK) consumable = Consumable.get(user=request.user, lang=lang, service=service_slug) if consumable is None: raise PaymentException( - translation(lang, en='Insuficient credits', es='Créditos insuficientes', slug='insufficient-credits')) + translation(lang, en="Insuficient credits", es="Créditos insuficientes", slug="insufficient-credits") + ) session_duration = consumable.service_item.service.session_duration or timedelta(minutes=1) - session = ConsumptionSession.build_session(request, - consumable, - session_duration, - operation_code='unsafe-consume-service-set') + session = ConsumptionSession.build_session( + request, consumable, session_duration, operation_code="unsafe-consume-service-set" + ) session.will_consume(1) - return Response({'status': 'ok'}, status=status.HTTP_201_CREATED) + return Response({"status": "ok"}, status=status.HTTP_201_CREATED) class CancelConsumptionView(APIView): @@ -1066,34 +1124,37 @@ class CancelConsumptionView(APIView): def put(self, request, service_slug, hash=None): lang = get_user_language(request) - session = ConsumptionSession.objects.filter( - consumable__user=request.user, - consumable__service_item__service__type=Service.Type.VOID).exclude(status='CANCELLED').first() + session = ( + ConsumptionSession.objects.filter( + consumable__user=request.user, consumable__service_item__service__type=Service.Type.VOID + ) + .exclude(status="CANCELLED") + .first() + ) if session is None: - raise ValidationException(translation(lang, - en='Session not found', - es='Sesión no encontrada', - slug='session-not-found'), - code=status.HTTP_404_NOT_FOUND) + raise ValidationException( + translation(lang, en="Session not found", es="Sesión no encontrada", slug="session-not-found"), + code=status.HTTP_404_NOT_FOUND, + ) how_many = session.how_many consumable = session.consumable reimburse_service_units.send_robust(instance=consumable, sender=consumable.__class__, how_many=how_many) - return Response({'status': 'reversed'}, status=status.HTTP_200_OK) + return Response({"status": "reversed"}, status=status.HTTP_200_OK) class PlanOfferView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(cache=PlanOfferCache, sort='-id', paginate=True) + extensions = APIViewExtensions(cache=PlanOfferCache, sort="-id", paginate=True) def get_lookup(self, key, value): args = () kwargs = {} - slug_key = f'{key}__slug__in' - pk_key = f'{key}__id__in' + slug_key = f"{key}__slug__in" + pk_key = f"{key}__id__in" - for v in value.split(','): + for v in value.split(","): if slug_key not in kwargs and not v.isnumeric(): kwargs[slug_key] = [] @@ -1107,7 +1168,7 @@ def get_lookup(self, key, value): kwargs[slug_key].append(v) if len(kwargs) > 1: - args = (Q(**{slug_key: kwargs[slug_key]}) | Q(**{pk_key: kwargs[pk_key]}), ) + args = (Q(**{slug_key: kwargs[slug_key]}) | Q(**{pk_key: kwargs[pk_key]}),) kwargs = {} return args, kwargs @@ -1125,12 +1186,12 @@ def get(self, request): # do no show the bags of type preview they are build items = PlanOffer.objects.filter(Q(expires_at=None) | Q(expires_at__gt=utc_now)) - if suggested_plan := request.GET.get('suggested_plan'): - args, kwargs = self.get_lookup('suggested_plan', suggested_plan) + if suggested_plan := request.GET.get("suggested_plan"): + args, kwargs = self.get_lookup("suggested_plan", suggested_plan) items = items.filter(*args, **kwargs) - if original_plan := request.GET.get('original_plan'): - args, kwargs = self.get_lookup('original_plan', original_plan) + if original_plan := request.GET.get("original_plan"): + args, kwargs = self.get_lookup("original_plan", original_plan) items = items.filter(*args, **kwargs) items = items.distinct() @@ -1144,32 +1205,37 @@ def get(self, request): class CouponBaseView(APIView): def get_coupons(self) -> list[Coupon]: - plan_pk: str = self.request.GET.get('plan') + plan_pk: str = self.request.GET.get("plan") if not plan_pk: - raise ValidationException(translation(get_user_language(self.request), - en='Missing plan in query string', - es='Falta el plan en la consulta', - slug='missing-plan'), - code=404) + raise ValidationException( + translation( + get_user_language(self.request), + en="Missing plan in query string", + es="Falta el plan en la consulta", + slug="missing-plan", + ), + code=404, + ) extra = {} if plan_pk.isnumeric(): - extra['id'] = int(plan_pk) + extra["id"] = int(plan_pk) else: - extra['slug'] = plan_pk + extra["slug"] = plan_pk plan = Plan.objects.filter(**extra).first() if not plan: - raise ValidationException(translation(get_user_language(self.request), - en='Plan not found', - es='El plan no existe', - slug='plan-not-found'), - code=404) + raise ValidationException( + translation( + get_user_language(self.request), en="Plan not found", es="El plan no existe", slug="plan-not-found" + ), + code=404, + ) - coupon_codes = self.request.GET.get('coupons', '') + coupon_codes = self.request.GET.get("coupons", "") if coupon_codes: - coupon_codes = coupon_codes.split(',') + coupon_codes = coupon_codes.split(",") else: coupon_codes = [] @@ -1195,45 +1261,49 @@ def put(self, request, bag_id): # do no show the bags of type preview they are build client = None if IS_DJANGO_REDIS: - client = get_redis_connection('default') + client = get_redis_connection("default") try: - with Lock(client, f'lock:bag:user-{request.user.email}', timeout=30, blocking_timeout=30): - bag = Bag.objects.filter(id=bag_id, user=request.user, status='CHECKING', type__in=['BAG', - 'PREVIEW']).first() + with Lock(client, f"lock:bag:user-{request.user.email}", timeout=30, blocking_timeout=30): + bag = Bag.objects.filter( + id=bag_id, user=request.user, status="CHECKING", type__in=["BAG", "PREVIEW"] + ).first() if bag is None: - raise ValidationException(translation(lang, - en='Bag not found', - es='Bolsa no encontrada', - slug='bag-not-found'), - code=status.HTTP_404_NOT_FOUND) + raise ValidationException( + translation(lang, en="Bag not found", es="Bolsa no encontrada", slug="bag-not-found"), + code=status.HTTP_404_NOT_FOUND, + ) bag.coupons.set(coupons) except LockError: - raise ValidationException(translation(lang, - en='Timeout reached, operation timed out.', - es='Tiempo de espera alcanzado, operación agotada.', - slug='timeout'), - code=408) + raise ValidationException( + translation( + lang, + en="Timeout reached, operation timed out.", + es="Tiempo de espera alcanzado, operación agotada.", + slug="timeout", + ), + code=408, + ) serializer = GetBagSerializer(bag, many=False) return Response(serializer.data, status=status.HTTP_200_OK) class BagView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def get(self, request): handler = self.extensions(request) # do no show the bags of type preview they are build - items = Bag.objects.filter(user=request.user, type='BAG') + items = Bag.objects.filter(user=request.user, type="BAG") - if status := request.GET.get('status'): - items = items.filter(status__in=status.split(',')) + if status := request.GET.get("status"): + items = items.filter(status__in=status.split(",")) else: - items = items.filter(status='CHECKING') + items = items.filter(status="CHECKING") items = handler.queryset(items) serializer = GetBagSerializer(items, many=True) @@ -1247,41 +1317,53 @@ def put(self, request): s.set_language(lang) s.add_contact(request.user) - if 'coupons' in request.data and not isinstance(request.data['coupons'], list): - raise ValidationException(translation(lang, - en='Coupons must be a list of strings', - es='Cupones debe ser una lista de cadenas', - slug='invalid-coupons'), - code=400) - - if 'coupons' in request.data and len(request.data['coupons']) > (max := max_coupons_allowed()): - raise ValidationException(translation(lang, - en=f'Too many coupons (max {max})', - es=f'Demasiados cupones (max {max})', - slug='too-many-coupons'), - code=400) + if "coupons" in request.data and not isinstance(request.data["coupons"], list): + raise ValidationException( + translation( + lang, + en="Coupons must be a list of strings", + es="Cupones debe ser una lista de cadenas", + slug="invalid-coupons", + ), + code=400, + ) + + if "coupons" in request.data and len(request.data["coupons"]) > (max := max_coupons_allowed()): + raise ValidationException( + translation( + lang, + en=f"Too many coupons (max {max})", + es=f"Demasiados cupones (max {max})", + slug="too-many-coupons", + ), + code=400, + ) # do no show the bags of type preview they are build client = None if IS_DJANGO_REDIS: - client = get_redis_connection('default') + client = get_redis_connection("default") try: - with Lock(client, f'lock:bag:user-{request.user.email}', timeout=30, blocking_timeout=30): - bag, _ = Bag.objects.get_or_create(user=request.user, status='CHECKING', type='BAG') + with Lock(client, f"lock:bag:user-{request.user.email}", timeout=30, blocking_timeout=30): + bag, _ = Bag.objects.get_or_create(user=request.user, status="CHECKING", type="BAG") except LockError: - raise ValidationException(translation(lang, - en='Timeout reached, operation timed out.', - es='Tiempo de espera alcanzado, operación agotada.', - slug='timeout'), - code=408) + raise ValidationException( + translation( + lang, + en="Timeout reached, operation timed out.", + es="Tiempo de espera alcanzado, operación agotada.", + slug="timeout", + ), + code=408, + ) add_items_to_bag(request, bag, lang) plan = bag.plans.first() if plan: - coupons = get_available_coupons(plan, request.data.get('coupons', [])) + coupons = get_available_coupons(plan, request.data.get("coupons", [])) bag.coupons.set(coupons) # actions.check_dependencies_in_bag(bag, lang) @@ -1291,71 +1373,71 @@ def put(self, request): def delete(self, request): # do no show the bags of type preview they are build - Bag.objects.filter(user=request.user, status='CHECKING', type='BAG').delete() + Bag.objects.filter(user=request.user, status="CHECKING", type="BAG").delete() return Response(status=204) class CheckingView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def put(self, request): - bag_type = request.data.get('type', 'BAG').upper() + bag_type = request.data.get("type", "BAG").upper() created = False lang = get_user_language(request) client = None if IS_DJANGO_REDIS: - client = get_redis_connection('default') + client = get_redis_connection("default") try: # the lock must wrap the transaction - with Lock(client, f'lock:bag:user-{request.user.email}', timeout=30, blocking_timeout=30): + with Lock(client, f"lock:bag:user-{request.user.email}", timeout=30, blocking_timeout=30): with transaction.atomic(): sid = transaction.savepoint() try: - if bag_type == 'BAG' and not (bag := Bag.objects.filter( - user=request.user, status='CHECKING', type=bag_type).first()): - raise ValidationException(translation(lang, - en='Bag not found', - es='Bolsa no encontrada', - slug='not-found'), - code=404) - if bag_type == 'PREVIEW': - - academy = request.data.get('academy') + if bag_type == "BAG" and not ( + bag := Bag.objects.filter(user=request.user, status="CHECKING", type=bag_type).first() + ): + raise ValidationException( + translation(lang, en="Bag not found", es="Bolsa no encontrada", slug="not-found"), + code=404, + ) + if bag_type == "PREVIEW": + + academy = request.data.get("academy") kwargs = {} if academy and (isinstance(academy, int) or academy.isnumeric()): - kwargs['id'] = int(academy) + kwargs["id"] = int(academy) else: - kwargs['slug'] = academy + kwargs["slug"] = academy academy = Academy.objects.filter(main_currency__isnull=False, **kwargs).first() if not academy: - cohort = request.data.get('cohort') + cohort = request.data.get("cohort") kwargs = {} if cohort and (isinstance(cohort, int) or cohort.isnumeric()): - kwargs['id'] = int(cohort) + kwargs["id"] = int(cohort) else: - kwargs['slug'] = cohort + kwargs["slug"] = cohort cohort = Cohort.objects.filter(academy__main_currency__isnull=False, **kwargs).first() if cohort: academy = cohort.academy - request.data['cohort'] = cohort.id + request.data["cohort"] = cohort.id - if not academy and (plans := request.data.get('plans')) and len(plans) == 1: + if not academy and (plans := request.data.get("plans")) and len(plans) == 1: kwargs = {} pk = plans[0] if isinstance(pk, int): - kwargs['id'] = int(pk) + kwargs["id"] = int(pk) else: - kwargs['slug'] = pk + kwargs["slug"] = pk plan = Plan.objects.filter(owner__main_currency__isnull=False, **kwargs).first() @@ -1363,39 +1445,53 @@ def put(self, request): academy = plan.owner if not academy: - raise ValidationException(translation( - lang, - en='Academy not found or not configured properly', - es='Academia no encontrada o no configurada correctamente', - slug='not-found'), - code=404) - - if 'coupons' in request.data and not isinstance(request.data['coupons'], list): - raise ValidationException(translation(lang, - en='Coupons must be a list of strings', - es='Cupones debe ser una lista de cadenas', - slug='invalid-coupons'), - code=400) - - if 'coupons' in request.data and len(request.data['coupons']) > (max := - max_coupons_allowed()): - raise ValidationException(translation(lang, - en=f'Too many coupons (max {max})', - es=f'Demasiados cupones (max {max})', - slug='too-many-coupons'), - code=400) - - bag, created = Bag.objects.get_or_create(user=request.user, - status='CHECKING', - type=bag_type, - academy=academy, - currency=academy.main_currency) + raise ValidationException( + translation( + lang, + en="Academy not found or not configured properly", + es="Academia no encontrada o no configurada correctamente", + slug="not-found", + ), + code=404, + ) + + if "coupons" in request.data and not isinstance(request.data["coupons"], list): + raise ValidationException( + translation( + lang, + en="Coupons must be a list of strings", + es="Cupones debe ser una lista de cadenas", + slug="invalid-coupons", + ), + code=400, + ) + + if "coupons" in request.data and len(request.data["coupons"]) > ( + max := max_coupons_allowed() + ): + raise ValidationException( + translation( + lang, + en=f"Too many coupons (max {max})", + es=f"Demasiados cupones (max {max})", + slug="too-many-coupons", + ), + code=400, + ) + + bag, created = Bag.objects.get_or_create( + user=request.user, + status="CHECKING", + type=bag_type, + academy=academy, + currency=academy.main_currency, + ) add_items_to_bag(request, bag, lang) plan = bag.plans.first() if plan and bag.coupons.count() == 0: - coupons = get_available_coupons(plan, request.data.get('coupons', [])) + coupons = get_available_coupons(plan, request.data.get("coupons", [])) bag.coupons.set(coupons) # actions.check_dependencies_in_bag(bag, lang) @@ -1404,172 +1500,212 @@ def put(self, request): bag.token = Token.generate_key() bag.expires_at = utc_now + timedelta(minutes=60) - plan = bag.plans.filter(status='CHECKING').first() + plan = bag.plans.filter(status="CHECKING").first() - #FIXME: the service items should be bought without renewals + # FIXME: the service items should be bought without renewals if not plan or plan.is_renewable: - bag.amount_per_month, bag.amount_per_quarter, bag.amount_per_half, bag.amount_per_year = \ + bag.amount_per_month, bag.amount_per_quarter, bag.amount_per_half, bag.amount_per_year = ( get_amount(bag, bag.academy.main_currency, lang) + ) else: actions.ask_to_add_plan_and_charge_it_in_the_bag(bag, request.user, lang) - amount = bag.amount_per_month or bag.amount_per_quarter or bag.amount_per_half or bag.amount_per_year + amount = ( + bag.amount_per_month or bag.amount_per_quarter or bag.amount_per_half or bag.amount_per_year + ) plans = bag.plans.all() if not amount and plans.filter(financing_options__id__gte=1): amount = 1 if amount == 0 and PlanFinancing.objects.filter(plans__in=plans).count(): - raise ValidationException(translation(lang, - en='Your free trial was already took', - es='Tu prueba gratuita ya fue tomada', - slug='your-free-trial-was-already-took'), - code=400) + raise ValidationException( + translation( + lang, + en="Your free trial was already took", + es="Tu prueba gratuita ya fue tomada", + slug="your-free-trial-was-already-took", + ), + code=400, + ) bag.save() transaction.savepoint_commit(sid) serializer = GetBagSerializer(bag, many=False) - return Response(serializer.data, - status=status.HTTP_201_CREATED if created else status.HTTP_200_OK) + return Response( + serializer.data, status=status.HTTP_201_CREATED if created else status.HTTP_200_OK + ) except Exception as e: transaction.savepoint_rollback(sid) raise e except LockError: - raise ValidationException(translation(lang, - en='Timeout reached, operation timed out.', - es='Tiempo de espera alcanzado, operación agotada.', - slug='timeout'), - code=408) + raise ValidationException( + translation( + lang, + en="Timeout reached, operation timed out.", + es="Tiempo de espera alcanzado, operación agotada.", + slug="timeout", + ), + code=408, + ) class ConsumableCheckoutView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def post(self, request): lang = get_user_language(request) - service = request.data.get('service') - total_items = request.data.get('how_many') - academy = request.data.get('academy') + service = request.data.get("service") + total_items = request.data.get("how_many") + academy = request.data.get("academy") if not service: - raise ValidationException(translation(lang, - en='Service is required', - es='El servicio es requerido', - slug='service-is-required'), - code=400) + raise ValidationException( + translation(lang, en="Service is required", es="El servicio es requerido", slug="service-is-required"), + code=400, + ) query = {} if service and isinstance(service, int): - query['id'] = service + query["id"] = service elif service and isinstance(service, str): - query['slug'] = service + query["slug"] = service if not query or not (service := Service.objects.filter(**query).first()): raise ValidationException( - translation(lang, en='Service not found', es='El servicio no fue encontrado', slug='service-not-found')) + translation(lang, en="Service not found", es="El servicio no fue encontrado", slug="service-not-found") + ) if not total_items: - raise ValidationException(translation(lang, - en='How many is required', - es='La cantidad es requerida', - slug='how-many-is-required'), - code=400) + raise ValidationException( + translation( + lang, en="How many is required", es="La cantidad es requerida", slug="how-many-is-required" + ), + code=400, + ) if not (isinstance(total_items, int) or isinstance(total_items, float)) or total_items <= 0: - raise ValidationException(translation(lang, - en='How many is not valid', - es='La cantidad de paquetes no es válida', - slug='how-many-is-not-valid'), - code=400) + raise ValidationException( + translation( + lang, + en="How many is not valid", + es="La cantidad de paquetes no es válida", + slug="how-many-is-not-valid", + ), + code=400, + ) if not academy: - raise ValidationException(translation(lang, - en='Academy is required', - es='La academia es requerida', - slug='academy-is-required'), - code=400) + raise ValidationException( + translation(lang, en="Academy is required", es="La academia es requerida", slug="academy-is-required"), + code=400, + ) if not Academy.objects.filter(id=academy).exists(): raise ValidationException( - translation(lang, en='Academy not found', es='La academia no fue encontrada', slug='academy-not-found')) + translation(lang, en="Academy not found", es="La academia no fue encontrada", slug="academy-not-found") + ) - mentorship_service_set = request.data.get('mentorship_service_set') - event_type_set = request.data.get('event_type_set') + mentorship_service_set = request.data.get("mentorship_service_set") + event_type_set = request.data.get("event_type_set") if [mentorship_service_set, event_type_set].count(None) != 1: - raise ValidationException(translation( - lang, - en='Just can pass Mentorship service set or event type set is required, not both', - es='Solo puede pasar Mentoría o tipo de evento, no ambos', - slug='mentorship-service-set-or-event-type-set-is-required'), - code=400) - - if service.type == 'MENTORSHIP_SERVICE_SET' and not mentorship_service_set: - raise ValidationException(translation( - lang, - en='This service is type mentorship service set, but you provided other type of resource', - es='Este servicio es de tipo mentoría, pero usted proporcionó otro tipo de recurso', - slug='bad-service-type-mentorship-service-set'), - code=400) - - elif service.type == 'EVENT_TYPE_SET' and not event_type_set: - raise ValidationException(translation( - lang, - en='This service is type event type set, but you provided other type of resource', - es='Este servicio es de tipo tipo de evento, pero usted proporcionó otro tipo de recurso', - slug='bad-service-type-event-type-set'), - code=400) - - elif service.type not in ['MENTORSHIP_SERVICE_SET', 'EVENT_TYPE_SET']: - raise ValidationException(translation(lang, - en='This service can\'t be bought here yet', - es='Este servicio no se puede comprar aquí todavía', - slug='service-type-no-implemented'), - code=400) + raise ValidationException( + translation( + lang, + en="Just can pass Mentorship service set or event type set is required, not both", + es="Solo puede pasar Mentoría o tipo de evento, no ambos", + slug="mentorship-service-set-or-event-type-set-is-required", + ), + code=400, + ) + + if service.type == "MENTORSHIP_SERVICE_SET" and not mentorship_service_set: + raise ValidationException( + translation( + lang, + en="This service is type mentorship service set, but you provided other type of resource", + es="Este servicio es de tipo mentoría, pero usted proporcionó otro tipo de recurso", + slug="bad-service-type-mentorship-service-set", + ), + code=400, + ) + + elif service.type == "EVENT_TYPE_SET" and not event_type_set: + raise ValidationException( + translation( + lang, + en="This service is type event type set, but you provided other type of resource", + es="Este servicio es de tipo tipo de evento, pero usted proporcionó otro tipo de recurso", + slug="bad-service-type-event-type-set", + ), + code=400, + ) + + elif service.type not in ["MENTORSHIP_SERVICE_SET", "EVENT_TYPE_SET"]: + raise ValidationException( + translation( + lang, + en="This service can't be bought here yet", + es="Este servicio no se puede comprar aquí todavía", + slug="service-type-no-implemented", + ), + code=400, + ) kwargs = {} if mentorship_service_set: - kwargs['available_mentorship_service_sets'] = mentorship_service_set + kwargs["available_mentorship_service_sets"] = mentorship_service_set elif event_type_set: - kwargs['available_event_type_sets'] = event_type_set + kwargs["available_event_type_sets"] = event_type_set academy_service = AcademyService.objects.filter(academy_id=academy, service=service, **kwargs).first() if not academy_service: - raise ValidationException(translation(lang, - en='Academy service not found', - es='Servicio de academia no encontrado', - slug='academy-service-not-found'), - code=404) + raise ValidationException( + translation( + lang, + en="Academy service not found", + es="Servicio de academia no encontrado", + slug="academy-service-not-found", + ), + code=404, + ) currency = academy_service.currency if total_items > academy_service.max_items: - raise ValidationException(translation( - lang, - en=f'The amount of items is too high (max {academy_service.max_items})', - es=f'La cantidad de elementos es demasiado alta (máx {academy_service.max_items})', - slug='the-amount-of-items-is-too-high'), - code=400) + raise ValidationException( + translation( + lang, + en=f"The amount of items is too high (max {academy_service.max_items})", + es=f"La cantidad de elementos es demasiado alta (máx {academy_service.max_items})", + slug="the-amount-of-items-is-too-high", + ), + code=400, + ) amount = academy_service.get_discounted_price(total_items) if amount <= 0.5: - raise ValidationException(translation(lang, - en='The amount is too low', - es='El monto es muy bajo', - slug='the-amount-is-too-low'), - code=400) + raise ValidationException( + translation(lang, en="The amount is too low", es="El monto es muy bajo", slug="the-amount-is-too-low"), + code=400, + ) if amount > academy_service.max_amount: - raise ValidationException(translation(lang, - en=f'The amount is too high (max {academy_service.max_amount})', - es=f'El monto es demasiado alto (máx {academy_service.max_amount})', - slug='the-amount-is-too-high'), - code=400) + raise ValidationException( + translation( + lang, + en=f"The amount is too high (max {academy_service.max_amount})", + es=f"El monto es demasiado alto (máx {academy_service.max_amount})", + slug="the-amount-is-too-high", + ), + code=400, + ) s = None invoice = None @@ -1582,13 +1718,15 @@ def post(self, request): service_item, _ = ServiceItem.objects.get_or_create(service=service, how_many=total_items) # keeps this inside a transaction - bag = Bag(type='CHARGE', - status='PAID', - was_delivered=True, - user=request.user, - currency=currency, - academy_id=academy, - is_recurrent=False) + bag = Bag( + type="CHARGE", + status="PAID", + was_delivered=True, + user=request.user, + currency=currency, + academy_id=academy, + is_recurrent=False, + ) bag.save() @@ -1601,18 +1739,20 @@ def post(self, request): bag.service_items.add(service_item) if mentorship_service_set: - description = f'Can join to {int(total_items)} mentorships' + description = f"Can join to {int(total_items)} mentorships" else: - description = f'Can join to {int(total_items)} events' + description = f"Can join to {int(total_items)} events" invoice = s.pay(request.user, bag, amount, currency=bag.currency.code, description=description) - consumable = Consumable(service_item=service_item, - user=request.user, - how_many=total_items, - mentorship_service_set=mentorship_service_set, - event_type_set=event_type_set) + consumable = Consumable( + service_item=service_item, + user=request.user, + how_many=total_items, + mentorship_service_set=mentorship_service_set, + event_type_set=event_type_set, + ) consumable.save() @@ -1630,16 +1770,16 @@ def post(self, request): class PayView(APIView): - extensions = APIViewExtensions(sort='-id', paginate=True) + extensions = APIViewExtensions(sort="-id", paginate=True) def post(self, request): utc_now = timezone.now() lang = get_user_language(request) - conversion_info = request.data['conversion_info'] if 'conversion_info' in request.data else None + conversion_info = request.data["conversion_info"] if "conversion_info" in request.data else None validate_conversion_info(conversion_info, lang) - conversion_info = str(conversion_info) if conversion_info is not None else '' + conversion_info = str(conversion_info) if conversion_info is not None else "" with transaction.atomic(): sid = transaction.savepoint() @@ -1648,98 +1788,135 @@ def post(self, request): reputation, _ = FinancialReputation.objects.get_or_create(user=request.user) current_reputation = reputation.get_reputation() - if current_reputation == 'FRAUD' or current_reputation == 'BAD': - raise PaymentException(translation( - lang, - en='The payment could not be completed because you have a bad reputation on this platform', - es='No se pudo completar el pago porque tienes mala reputación en esta plataforma'), - slug='fraud-or-bad-reputation', - silent=True) + if current_reputation == "FRAUD" or current_reputation == "BAD": + raise PaymentException( + translation( + lang, + en="The payment could not be completed because you have a bad reputation on this platform", + es="No se pudo completar el pago porque tienes mala reputación en esta plataforma", + ), + slug="fraud-or-bad-reputation", + silent=True, + ) # do no show the bags of type preview they are build # type = request.data.get('type', 'BAG').upper() - token = request.data.get('token') + token = request.data.get("token") if not token: - raise ValidationException(translation(lang, - en='Invalid bag token', - es='El token de la bolsa es inválido', - slug='missing-token'), - code=404) - - recurrent = request.data.get('recurrent', False) - bag = Bag.objects.filter(user=request.user, - status='CHECKING', - token=token, - academy__main_currency__isnull=False, - expires_at__gte=utc_now).first() + raise ValidationException( + translation( + lang, en="Invalid bag token", es="El token de la bolsa es inválido", slug="missing-token" + ), + code=404, + ) + + recurrent = request.data.get("recurrent", False) + bag = Bag.objects.filter( + user=request.user, + status="CHECKING", + token=token, + academy__main_currency__isnull=False, + expires_at__gte=utc_now, + ).first() if not bag: - raise ValidationException(translation( - lang, - en='Bag not found, maybe you need to renew the checking', - es='Bolsa no encontrada, quizás necesitas renovar el checking', - slug='not-found-or-without-checking'), - code=404) + raise ValidationException( + translation( + lang, + en="Bag not found, maybe you need to renew the checking", + es="Bolsa no encontrada, quizás necesitas renovar el checking", + slug="not-found-or-without-checking", + ), + code=404, + ) if bag.service_items.count() == 0 and bag.plans.count() == 0: - raise ValidationException(translation(lang, - en='Bag is empty', - es='La bolsa esta vacía', - slug='bag-is-empty'), - code=400) + raise ValidationException( + translation(lang, en="Bag is empty", es="La bolsa esta vacía", slug="bag-is-empty"), code=400 + ) - how_many_installments = request.data.get('how_many_installments') - chosen_period = request.data.get('chosen_period', '').upper() + how_many_installments = request.data.get("how_many_installments") + chosen_period = request.data.get("chosen_period", "").upper() available_for_free_trial = False available_free = False if not how_many_installments and not chosen_period: - available_for_free_trial = (bag.amount_per_month == 0 and bag.amount_per_quarter == 0 - and bag.amount_per_half == 0 and bag.amount_per_year == 0) + available_for_free_trial = ( + bag.amount_per_month == 0 + and bag.amount_per_quarter == 0 + and bag.amount_per_half == 0 + and bag.amount_per_year == 0 + ) plan = bag.plans.first() available_for_free_trial = available_for_free_trial and ( - not plan.financing_options.filter().exists() if plan else False) + not plan.financing_options.filter().exists() if plan else False + ) available_free = available_for_free_trial and not plan.trial_duration available_for_free_trial = available_for_free_trial and plan.trial_duration - if (not available_for_free_trial and not available_free and not how_many_installments - and not chosen_period): - raise ValidationException(translation(lang, - en='Missing chosen period', - es='Falta el periodo elegido', - slug='missing-chosen-period'), - code=400) - - available_chosen_periods = ['MONTH', 'QUARTER', 'HALF', 'YEAR'] - if (not available_for_free_trial and not available_free and not how_many_installments - and chosen_period not in available_chosen_periods): - raise ValidationException(translation( - lang, - en=f"Invalid chosen period ({', '.join(available_chosen_periods)})", - es=f"Periodo elegido inválido ({', '.join(available_chosen_periods)})", - slug='invalid-chosen-period'), - code=400) - - if not available_for_free_trial and not available_free and not chosen_period and ( - not isinstance(how_many_installments, int) or how_many_installments <= 0): - raise ValidationException(translation( - lang, - en='how_many_installments must be a positive number greather than 0', - es='how_many_installments debe ser un número positivo mayor a 0', - slug='invalid-how-many-installments'), - code=400) - - if 'coupons' in request.data and not isinstance(request.data['coupons'], list): - raise ValidationException(translation(lang, - en='Coupons must be a list of strings', - es='Cupones debe ser una lista de cadenas', - slug='invalid-coupons'), - code=400) - - if (not available_for_free_trial and not available_free and not chosen_period - and how_many_installments): + if ( + not available_for_free_trial + and not available_free + and not how_many_installments + and not chosen_period + ): + raise ValidationException( + translation( + lang, + en="Missing chosen period", + es="Falta el periodo elegido", + slug="missing-chosen-period", + ), + code=400, + ) + + available_chosen_periods = ["MONTH", "QUARTER", "HALF", "YEAR"] + if ( + not available_for_free_trial + and not available_free + and not how_many_installments + and chosen_period not in available_chosen_periods + ): + raise ValidationException( + translation( + lang, + en=f"Invalid chosen period ({', '.join(available_chosen_periods)})", + es=f"Periodo elegido inválido ({', '.join(available_chosen_periods)})", + slug="invalid-chosen-period", + ), + code=400, + ) + + if ( + not available_for_free_trial + and not available_free + and not chosen_period + and (not isinstance(how_many_installments, int) or how_many_installments <= 0) + ): + raise ValidationException( + translation( + lang, + en="how_many_installments must be a positive number greather than 0", + es="how_many_installments debe ser un número positivo mayor a 0", + slug="invalid-how-many-installments", + ), + code=400, + ) + + if "coupons" in request.data and not isinstance(request.data["coupons"], list): + raise ValidationException( + translation( + lang, + en="Coupons must be a list of strings", + es="Cupones debe ser una lista de cadenas", + slug="invalid-coupons", + ), + code=400, + ) + + if not available_for_free_trial and not available_free and not chosen_period and how_many_installments: bag.how_many_installments = how_many_installments coupons = bag.coupons.none() @@ -1753,12 +1930,15 @@ def post(self, request): bag.monthly_price = option.monthly_price except Exception: - raise ValidationException(translation( - lang, - en='Bag bad configured, related to financing option', - es='La bolsa esta mal configurada, relacionado a la opción de financiamiento', - slug='invalid-bag-configured-by-installments'), - code=500) + raise ValidationException( + translation( + lang, + en="Bag bad configured, related to financing option", + es="La bolsa esta mal configurada, relacionado a la opción de financiamiento", + slug="invalid-bag-configured-by-installments", + ), + code=500, + ) elif not available_for_free_trial and not available_free: amount = get_amount_by_chosen_period(bag, chosen_period, lang) @@ -1769,22 +1949,32 @@ def post(self, request): amount = 0 if amount == 0 and Subscription.objects.filter(user=request.user, plans__in=bag.plans.all()).count(): - raise ValidationException(translation(lang, - en='Your free trial was already took', - es='Tu prueba gratuita ya fue tomada', - slug='your-free-trial-was-already-took'), - code=500) + raise ValidationException( + translation( + lang, + en="Your free trial was already took", + es="Tu prueba gratuita ya fue tomada", + slug="your-free-trial-was-already-took", + ), + code=500, + ) # actions.check_dependencies_in_bag(bag, lang) - if amount == 0 and not available_free and available_for_free_trial and not bag.plans.filter( - plan_offer_from__id__gte=1).exists(): + if ( + amount == 0 + and not available_free + and available_for_free_trial + and not bag.plans.filter(plan_offer_from__id__gte=1).exists() + ): raise ValidationException( translation( lang, - en='The plan was chosen does not have a pricing setup, it\'s not ready to be sold', - es='El plan elegido no tiene una configuracion de precios, no esta listo para venderse', - slug='the-plan-was-chosen-is-not-ready-too-be-sold')) + en="The plan was chosen does not have a pricing setup, it's not ready to be sold", + es="El plan elegido no tiene una configuracion de precios, no esta listo para venderse", + slug="the-plan-was-chosen-is-not-ready-too-be-sold", + ) + ) if amount >= 0.50: s = Stripe() @@ -1792,25 +1982,26 @@ def post(self, request): invoice = s.pay(request.user, bag, amount, currency=bag.currency.code) elif amount == 0: - invoice = Invoice(amount=0, - paid_at=utc_now, - user=request.user, - bag=bag, - academy=bag.academy, - status='FULFILLED', - currency=bag.academy.main_currency) + invoice = Invoice( + amount=0, + paid_at=utc_now, + user=request.user, + bag=bag, + academy=bag.academy, + status="FULFILLED", + currency=bag.academy.main_currency, + ) invoice.save() else: - raise ValidationException(translation(lang, - en='Amount is too low', - es='El monto es muy bajo', - slug='amount-is-too-low'), - code=500) - - bag.chosen_period = chosen_period or 'NO_SET' - bag.status = 'PAID' + raise ValidationException( + translation(lang, en="Amount is too low", es="El monto es muy bajo", slug="amount-is-too-low"), + code=500, + ) + + bag.chosen_period = chosen_period or "NO_SET" + bag.status = "PAID" bag.is_recurrent = recurrent bag.token = None bag.expires_at = None @@ -1832,7 +2023,7 @@ def post(self, request): if plan.owner: admissions_tasks.build_profile_academy.delay(plan.owner.id, bag.user.id) - if not plan.cohort_set or not (cohort := request.GET.get('selected_cohort')): + if not plan.cohort_set or not (cohort := request.GET.get("selected_cohort")): continue cohort = plan.cohort_set.cohorts.filter(slug=cohort).first() @@ -1846,14 +2037,16 @@ def post(self, request): serializer = GetInvoiceSerializer(invoice, many=False) - tasks_activity.add_activity.delay(request.user.id, - 'checkout_completed', - related_type='payments.Invoice', - related_id=serializer.instance.id) + tasks_activity.add_activity.delay( + request.user.id, + "checkout_completed", + related_type="payments.Invoice", + related_id=serializer.instance.id, + ) data = serializer.data serializer = GetCouponSerializer(coupons, many=True) - data['coupons'] = serializer.data + data["coupons"] = serializer.data return Response(data, status=201) @@ -1870,13 +2063,13 @@ def get(self, request): items = PaymentMethod.objects.all() lookup = {} - if 'academy_id' in self.request.GET: - academy_id = self.request.GET.get('academy_id') - lookup['academy__id__iexact'] = academy_id + if "academy_id" in self.request.GET: + academy_id = self.request.GET.get("academy_id") + lookup["academy__id__iexact"] = academy_id - if 'lang' in self.request.GET: - lang = self.request.GET.get('lang') - lookup['lang__iexact'] = lang + if "lang" in self.request.GET: + lang = self.request.GET.get("lang") + lookup["lang__iexact"] = lang items = items.filter(**lookup) diff --git a/breathecode/provisioning/actions.py b/breathecode/provisioning/actions.py index 6faddd06d..4175d724f 100644 --- a/breathecode/provisioning/actions.py +++ b/breathecode/provisioning/actions.py @@ -56,35 +56,38 @@ def get_provisioning_vendor(user_id, profile_academy, cohort): all_profiles = ProvisioningProfile.objects.filter(academy=academy) if all_profiles.count() == 0: raise Exception( - f'No provisioning vendors have been found for this academy {academy.name}, please speak with your program manager' + f"No provisioning vendors have been found for this academy {academy.name}, please speak with your program manager" ) for_me = all_profiles.filter(members__id=profile_academy.id, cohorts=None) if for_me.count() > 1: - vendors = [f'{p.vendor.name} in profile {p.id}' for p in for_me] + vendors = [f"{p.vendor.name} in profile {p.id}" for p in for_me] raise Exception( - 'More than one provisioning vendor found for your profile in this academy, please speak with your program manager: ' - + ','.join(vendors)) + "More than one provisioning vendor found for your profile in this academy, please speak with your program manager: " + + ",".join(vendors) + ) if for_me.count() == 1: p_profile = for_me.first() return p_profile.vendor for_my_cohort = all_profiles.filter(cohorts__id=cohort.id, members=None) if for_my_cohort.count() > 1: - vendors = [f'{p.vendor.name} in profile {p.id}' for p in for_my_cohort] + vendors = [f"{p.vendor.name} in profile {p.id}" for p in for_my_cohort] raise Exception( - 'More than one provisioning vendor found for your cohort, please speak with your program manager: ' + - ','.join(vendors)) + "More than one provisioning vendor found for your cohort, please speak with your program manager: " + + ",".join(vendors) + ) if for_my_cohort.count() == 1: p_profile = for_my_cohort.first() return p_profile.vendor entire_academy = all_profiles.filter(cohorts=None, members=None) if entire_academy.count() > 1: - vendors = [f'{p.vendor.name} in profile {p.id}' for p in entire_academy] + vendors = [f"{p.vendor.name} in profile {p.id}" for p in entire_academy] raise Exception( - 'More than one provisioning vendor found for the entire academy, please speak with your program manager: ' + - ','.join(vendors)) + "More than one provisioning vendor found for the entire academy, please speak with your program manager: " + + ",".join(vendors) + ) if entire_academy.count() == 1: p_profile = entire_academy.first() return p_profile.vendor @@ -96,35 +99,41 @@ def get_provisioning_vendor(user_id, profile_academy, cohort): def get_active_cohorts(user): now = timezone.now() - active = CohortUser.objects.filter(user=user, educational_status='ACTIVE', role='STUDENT') + active = CohortUser.objects.filter(user=user, educational_status="ACTIVE", role="STUDENT") # only cohorts that end cohorts_that_end = active.filter(never_ends=False) # also are withing calendar dates and STARTED or FINAL PROJECT - active_dates = cohorts_that_end.filter(cohort__kickoff_date__gte=now, - cohort__ending_date__lte=now, - cohort__stage__in=['STARTED', 'FINAL_PROJECT']) + active_dates = cohorts_that_end.filter( + cohort__kickoff_date__gte=now, cohort__ending_date__lte=now, cohort__stage__in=["STARTED", "FINAL_PROJECT"] + ) return active_dates -def create_container(user, task, fresh=False, lang='en'): +def create_container(user, task, fresh=False, lang="en"): cont = ProvisioningContainer.objects.filter(user=user, task_associated_slug=task.slug).first() if not fresh and cont is not None: raise ValidationException( - translation(en='There is another container already created for this assignment', - es='Hay otro contenedor ya creado para esta asignacion', - slug='duplicated-container')) + translation( + en="There is another container already created for this assignment", + es="Hay otro contenedor ya creado para esta asignacion", + slug="duplicated-container", + ) + ) # active_cohorts = get_active_cohorts(user) credentials = CredentialsGithub.objects.filter(user=user).first() if credentials is None: raise ValidationException( - translation(en='No github github credentials found, please connect your github account', - es='No se han encontrado credentials para github, por favor conecta tu cuenta de github', - slug='no-github-credentials')) + translation( + en="No github github credentials found, please connect your github account", + es="No se han encontrado credentials para github, por favor conecta tu cuenta de github", + slug="no-github-credentials", + ) + ) - #FIXME: the code belog have variables that are not defined, so, it never worked, uncomment it if you want to fix it + # FIXME: the code belog have variables that are not defined, so, it never worked, uncomment it if you want to fix it # gb = Github(token=credentials.token, host=provisioning_academy.vendor.api_url) # asset = Asset.objects.filter(slug=task.associated_slug).first() @@ -146,7 +155,7 @@ def iso_to_datetime(iso: str) -> datetime: self.bc.datetime.from_iso_string('2022-03-21T07:51:55.068Z') ``` """ - string = re.sub(r'Z$', '', iso) + string = re.sub(r"Z$", "", iso) date = datetime.fromisoformat(string) return timezone.make_aware(date) @@ -160,36 +169,37 @@ class GithubAcademyUserObject(TypedDict): def get_github_academy_user_logs(academy: Academy, username: str, limit: datetime) -> list[GithubAcademyUserObject]: ret = [] - logs = GithubAcademyUserLog.objects.filter(Q(valid_until__isnull=True) - | Q(valid_until__gte=limit - relativedelta(months=1, weeks=1)), - academy_user__username=username, - academy_user__academy=academy).order_by('created_at') + logs = GithubAcademyUserLog.objects.filter( + Q(valid_until__isnull=True) | Q(valid_until__gte=limit - relativedelta(months=1, weeks=1)), + academy_user__username=username, + academy_user__academy=academy, + ).order_by("created_at") for n in range(len(logs)): log = logs[n] if n != 0: - ret[n - 1]['ending_at'] = log.created_at + ret[n - 1]["ending_at"] = log.created_at obj = { - 'starting_at': log.created_at, - 'ending_at': limit, - 'storage_status': log.storage_status, - 'storage_action': log.storage_action, + "starting_at": log.created_at, + "ending_at": limit, + "storage_status": log.storage_status, + "storage_action": log.storage_action, } ret.append(obj) starts_limit = limit - relativedelta(months=1, weeks=1) - ret = [x for x in ret if x['ending_at'] < starts_limit] + ret = [x for x in ret if x["ending_at"] < starts_limit] - if len(ret) > 0 and ret[0]['storage_status'] == 'SYNCHED' and ret[0]['storage_action'] == 'DELETE': + if len(ret) > 0 and ret[0]["storage_status"] == "SYNCHED" and ret[0]["storage_action"] == "DELETE": ret = [ { - 'starting_at': logs[0].created_at - relativedelta(months=12), - 'ending_at': logs[0].created_at, - 'storage_status': log.storage_status, - 'storage_action': log.storage_action, + "starting_at": logs[0].created_at - relativedelta(months=12), + "ending_at": logs[0].created_at, + "storage_status": log.storage_status, + "storage_action": log.storage_action, }, *ret, ] @@ -210,16 +220,17 @@ class ActivityContext(TypedDict): def handle_pending_github_user(organization: str, username: str, starts: Optional[datetime] = None) -> list[Academy]: orgs = AcademyAuthSettings.objects.filter(github_username__iexact=organization) orgs = [ - x for x in orgs - if GithubAcademyUser.objects.filter(academy=x.academy, storage_action='ADD', storage_status='SYNCHED').count() + x + for x in orgs + if GithubAcademyUser.objects.filter(academy=x.academy, storage_action="ADD", storage_status="SYNCHED").count() ] if not orgs and organization: - logger.error(f'Organization {organization} not found') + logger.error(f"Organization {organization} not found") return [] if not orgs and organization is None: - logger.error('Organization not provided, in this case, all organizations will be used') + logger.error("Organization not provided, in this case, all organizations will be used") if not orgs: orgs = AcademyAuthSettings.objects.filter() @@ -237,11 +248,16 @@ def handle_pending_github_user(organization: str, username: str, starts: Optiona new_orgs = [] for org in orgs: - has_any_cohort_user = CohortUser.objects.filter( - Q(cohort__ending_date__lte=starts) | Q(cohort__never_ends=True), - cohort__kickoff_date__gte=starts, - cohort__academy__id=org.academy.id, - user__credentialsgithub__username=username).order_by('-created_at').exists() + has_any_cohort_user = ( + CohortUser.objects.filter( + Q(cohort__ending_date__lte=starts) | Q(cohort__never_ends=True), + cohort__kickoff_date__gte=starts, + cohort__academy__id=org.academy.id, + user__credentialsgithub__username=username, + ) + .order_by("-created_at") + .exists() + ) if has_any_cohort_user: new_orgs.append(org) @@ -250,17 +266,19 @@ def handle_pending_github_user(organization: str, username: str, starts: Optiona org = new_orgs for org in orgs: - pending, created = GithubAcademyUser.objects.get_or_create(username=username, - academy=org.academy, - user=user, - defaults={ - 'storage_status': 'PAYMENT_CONFLICT', - 'storage_action': 'IGNORE', - }) - - if not created and pending.storage_action not in ['ADD', 'DELETE']: - pending.storage_status = 'PAYMENT_CONFLICT' - pending.storage_action = 'IGNORE' + pending, created = GithubAcademyUser.objects.get_or_create( + username=username, + academy=org.academy, + user=user, + defaults={ + "storage_status": "PAYMENT_CONFLICT", + "storage_action": "IGNORE", + }, + ) + + if not created and pending.storage_action not in ["ADD", "DELETE"]: + pending.storage_status = "PAYMENT_CONFLICT" + pending.storage_action = "IGNORE" pending.save() return [org.academy for org in orgs] @@ -268,7 +286,7 @@ def handle_pending_github_user(organization: str, username: str, starts: Optiona def get_multiplier() -> float: try: - x = os.getenv('PROVISIONING_MULTIPLIER', '1.3').replace(',', '.') + x = os.getenv("PROVISIONING_MULTIPLIER", "1.3").replace(",", ".") x = float(x) except Exception: x = 1.3 @@ -277,38 +295,38 @@ def get_multiplier() -> float: def add_codespaces_activity(context: ActivityContext, field: dict, position: int) -> None: - if isinstance(field['Username'], float): - field['Username'] = '' + if isinstance(field["Username"], float): + field["Username"] = "" - github_academy_user_log = context['github_academy_user_logs'].get(field['Username'], None) + github_academy_user_log = context["github_academy_user_logs"].get(field["Username"], None) not_found = False academies = [] if github_academy_user_log is None: # make a function that calculate the user activity in the academies by percentage github_academy_user_log = GithubAcademyUserLog.objects.filter( - Q(valid_until__isnull=True) - | Q(valid_until__gte=context['limit'] - relativedelta(months=1, weeks=1)), - created_at__lte=context['limit'], - academy_user__username=field['Username'], - storage_status='SYNCHED', - storage_action='ADD').order_by('-created_at') + Q(valid_until__isnull=True) | Q(valid_until__gte=context["limit"] - relativedelta(months=1, weeks=1)), + created_at__lte=context["limit"], + academy_user__username=field["Username"], + storage_status="SYNCHED", + storage_action="ADD", + ).order_by("-created_at") - context['github_academy_user_logs'][field['Username']] = github_academy_user_log + context["github_academy_user_logs"][field["Username"]] = github_academy_user_log if github_academy_user_log: academies = [x.academy_user.academy for x in github_academy_user_log] if not academies: not_found = True - github_academy_users = GithubAcademyUser.objects.filter(username=field['Username'], - storage_status='PAYMENT_CONFLICT', - storage_action='IGNORE') + github_academy_users = GithubAcademyUser.objects.filter( + username=field["Username"], storage_status="PAYMENT_CONFLICT", storage_action="IGNORE" + ) academies = [x.academy for x in github_academy_users] - if not academies and not GithubAcademyUser.objects.filter(username=field['Username']).count(): - academies = handle_pending_github_user(field['Owner'], field['Username']) + if not academies and not GithubAcademyUser.objects.filter(username=field["Username"]).count(): + academies = handle_pending_github_user(field["Owner"], field["Username"]) if not not_found and academies: academies = random.choices(academies, k=1) @@ -319,99 +337,107 @@ def add_codespaces_activity(context: ActivityContext, field: dict, position: int provisioning_bills = {} provisioning_vendor = None - provisioning_vendor = context['provisioning_vendors'].get('Codespaces', None) + provisioning_vendor = context["provisioning_vendors"].get("Codespaces", None) if not provisioning_vendor: - provisioning_vendor = ProvisioningVendor.objects.filter(name='Codespaces').first() - context['provisioning_vendors']['Codespaces'] = provisioning_vendor + provisioning_vendor = ProvisioningVendor.objects.filter(name="Codespaces").first() + context["provisioning_vendors"]["Codespaces"] = provisioning_vendor if not provisioning_vendor: - errors.append('Provisioning vendor Codespaces not found') + errors.append("Provisioning vendor Codespaces not found") - #TODO: if not academies: no academy has been found responsable for this activity + # TODO: if not academies: no academy has been found responsable for this activity for academy in academies: - ls = context['logs'].get((field['Username'], academy.id), None) + ls = context["logs"].get((field["Username"], academy.id), None) if ls is None: - ls = get_github_academy_user_logs(academy, field['Username'], context['limit']) - context['logs'][(field['Username'], academy.id)] = ls + ls = get_github_academy_user_logs(academy, field["Username"], context["limit"]) + context["logs"][(field["Username"], academy.id)] = ls logs[academy.id] = ls - provisioning_bill = context['provisioning_bills'].get(academy.id, None) - if not provisioning_bill and (provisioning_bill := ProvisioningBill.objects.filter( - academy=academy, status='PENDING', hash=context['hash']).first()): - context['provisioning_bills'][academy.id] = provisioning_bill + provisioning_bill = context["provisioning_bills"].get(academy.id, None) + if not provisioning_bill and ( + provisioning_bill := ProvisioningBill.objects.filter( + academy=academy, status="PENDING", hash=context["hash"] + ).first() + ): + context["provisioning_bills"][academy.id] = provisioning_bill provisioning_bills[academy.id] = provisioning_bill if not provisioning_bill: provisioning_bill = ProvisioningBill() provisioning_bill.academy = academy provisioning_bill.vendor = provisioning_vendor - provisioning_bill.status = 'PENDING' - provisioning_bill.hash = context['hash'] + provisioning_bill.status = "PENDING" + provisioning_bill.hash = context["hash"] provisioning_bill.save() - context['provisioning_bills'][academy.id] = provisioning_bill + context["provisioning_bills"][academy.id] = provisioning_bill provisioning_bills[academy.id] = provisioning_bill - date = datetime.strptime(field['Date'], '%Y-%m-%d') + date = datetime.strptime(field["Date"], "%Y-%m-%d") if not_found: - warnings.append(f'We could not find enough information about {field["Username"]}, mark this user user as ' - 'deleted if you don\'t recognize it') + warnings.append( + f'We could not find enough information about {field["Username"]}, mark this user user as ' + "deleted if you don't recognize it" + ) - if not (kind := context['provisioning_activity_kinds'].get((field['Product'], field['SKU']), None)): + if not (kind := context["provisioning_activity_kinds"].get((field["Product"], field["SKU"]), None)): kind, _ = ProvisioningConsumptionKind.objects.get_or_create( - product_name=field['Product'], - sku=field['SKU'], + product_name=field["Product"], + sku=field["SKU"], ) - context['provisioning_activity_kinds'][(field['Product'], field['SKU'])] = kind + context["provisioning_activity_kinds"][(field["Product"], field["SKU"])] = kind - if not (currency := context['currencies'].get('USD', None)): - currency, _ = Currency.objects.get_or_create(code='USD', name='US Dollar', decimals=2) - context['currencies']['USD'] = currency + if not (currency := context["currencies"].get("USD", None)): + currency, _ = Currency.objects.get_or_create(code="USD", name="US Dollar", decimals=2) + context["currencies"]["USD"] = currency - if not (price := context['provisioning_activity_prices'].get( - (field['Unit Type'], field['Price Per Unit ($)'], field['Multiplier']), None)): + if not ( + price := context["provisioning_activity_prices"].get( + (field["Unit Type"], field["Price Per Unit ($)"], field["Multiplier"]), None + ) + ): price, _ = ProvisioningPrice.objects.get_or_create( currency=currency, - unit_type=field['Unit Type'], - price_per_unit=field['Price Per Unit ($)'] * context['provisioning_multiplier'], - multiplier=field['Multiplier'], + unit_type=field["Unit Type"], + price_per_unit=field["Price Per Unit ($)"] * context["provisioning_multiplier"], + multiplier=field["Multiplier"], ) - context['provisioning_activity_prices'][(field['Unit Type'], field['Price Per Unit ($)'], - field['Multiplier'])] = price + context["provisioning_activity_prices"][ + (field["Unit Type"], field["Price Per Unit ($)"], field["Multiplier"]) + ] = price - pa, _ = ProvisioningUserConsumption.objects.get_or_create(username=field['Username'], - hash=context['hash'], - kind=kind, - defaults={'processed_at': timezone.now()}) + pa, _ = ProvisioningUserConsumption.objects.get_or_create( + username=field["Username"], hash=context["hash"], kind=kind, defaults={"processed_at": timezone.now()} + ) item, _ = ProvisioningConsumptionEvent.objects.get_or_create( vendor=provisioning_vendor, price=price, registered_at=date, - quantity=field['Quantity'], + quantity=field["Quantity"], repository_url=f"https://github.com/{field['Owner']}/{field['Repository Slug']}", - task_associated_slug=field['Repository Slug'], + task_associated_slug=field["Repository Slug"], csv_row=position, ) - last_status_list = [x for x in pa.status_text.split(', ') if x] + last_status_list = [x for x in pa.status_text.split(", ") if x] if errors: - pa.status = 'ERROR' - pa.status_text = ', '.join(last_status_list + errors + warnings) + pa.status = "ERROR" + pa.status_text = ", ".join(last_status_list + errors + warnings) elif warnings: - if pa.status != 'ERROR': - pa.status = 'WARNING' + if pa.status != "ERROR": + pa.status = "WARNING" - pa.status_text = ', '.join(last_status_list + warnings) + pa.status_text = ", ".join(last_status_list + warnings) else: - pa.status = 'PERSISTED' - pa.status_text = ', '.join(last_status_list + errors + warnings) + pa.status = "PERSISTED" + pa.status_text = ", ".join(last_status_list + errors + warnings) - pa.status_text = ', '.join([x for x in sorted(set(pa.status_text.split(', '))) if x]) + pa.status_text = ", ".join([x for x in sorted(set(pa.status_text.split(", "))) if x]) pa.status_text = pa.status_text[:255] pa.save() @@ -425,138 +451,141 @@ def add_codespaces_activity(context: ActivityContext, field: dict, position: int def add_gitpod_activity(context: ActivityContext, field: dict, position: int): academies = [] - profile_academies = context['profile_academies'].get(field['userName'], None) + profile_academies = context["profile_academies"].get(field["userName"], None) if profile_academies is None: - profile_academies = ProfileAcademy.objects.filter(user__credentialsgithub__username=field['userName'], - status='ACTIVE') + profile_academies = ProfileAcademy.objects.filter( + user__credentialsgithub__username=field["userName"], status="ACTIVE" + ) - context['profile_academies'][field['userName']] = profile_academies + context["profile_academies"][field["userName"]] = profile_academies if profile_academies: academies = sorted(list({profile.academy for profile in profile_academies}), key=lambda x: x.id) - date = iso_to_datetime(field['startTime']) - end = iso_to_datetime(field['endTime']) + date = iso_to_datetime(field["startTime"]) + end = iso_to_datetime(field["endTime"]) if len(academies) > 1: cohort_users = CohortUser.objects.filter( Q(cohort__ending_date__lte=end) | Q(cohort__never_ends=True), cohort__kickoff_date__gte=date, - user__credentialsgithub__username=field['userName']).order_by('-created_at') + user__credentialsgithub__username=field["userName"], + ).order_by("-created_at") if cohort_users: academies = sorted(list({cohort_user.cohort.academy for cohort_user in cohort_users}), key=lambda x: x.id) if not academies: - if 'academies' not in context: - context['academies'] = Academy.objects.filter() - academies = list(context['academies']) + if "academies" not in context: + context["academies"] = Academy.objects.filter() + academies = list(context["academies"]) errors = [] warnings = [] if not academies: - warnings.append(f'We could not find enough information about {field["userName"]}, mark this user user as ' - 'deleted if you don\'t recognize it') + warnings.append( + f'We could not find enough information about {field["userName"]}, mark this user user as ' + "deleted if you don't recognize it" + ) - pattern = r'^https://github\.com/[^/]+/([^/]+)/?' - if not (result := re.findall(pattern, field['contextURL'])): + pattern = r"^https://github\.com/[^/]+/([^/]+)/?" + if not (result := re.findall(pattern, field["contextURL"])): warnings.append(f'Invalid repository URL {field["contextURL"]}') - slug = 'unknown' + slug = "unknown" else: slug = result[0] provisioning_bills = [] - provisioning_vendor = context['provisioning_vendors'].get('Gitpod', None) + provisioning_vendor = context["provisioning_vendors"].get("Gitpod", None) if not provisioning_vendor: - provisioning_vendor = ProvisioningVendor.objects.filter(name='Gitpod').first() - context['provisioning_vendors']['Gitpod'] = provisioning_vendor + provisioning_vendor = ProvisioningVendor.objects.filter(name="Gitpod").first() + context["provisioning_vendors"]["Gitpod"] = provisioning_vendor if not provisioning_vendor: - errors.append('Provisioning vendor Gitpod not found') + errors.append("Provisioning vendor Gitpod not found") if academies: for academy in academies: - provisioning_bill = context['provisioning_bills'].get(academy.id, None) + provisioning_bill = context["provisioning_bills"].get(academy.id, None) if provisioning_bill: provisioning_bills.append(provisioning_bill) - elif provisioning_bill := ProvisioningBill.objects.filter(academy=academy, - status='PENDING', - hash=context['hash']).first(): - context['provisioning_bills'][academy.id] = provisioning_bill + elif provisioning_bill := ProvisioningBill.objects.filter( + academy=academy, status="PENDING", hash=context["hash"] + ).first(): + context["provisioning_bills"][academy.id] = provisioning_bill provisioning_bills.append(provisioning_bill) else: provisioning_bill = ProvisioningBill() provisioning_bill.academy = academy provisioning_bill.vendor = provisioning_vendor - provisioning_bill.status = 'PENDING' - provisioning_bill.hash = context['hash'] + provisioning_bill.status = "PENDING" + provisioning_bill.hash = context["hash"] provisioning_bill.save() - context['provisioning_bills'][academy.id] = provisioning_bill + context["provisioning_bills"][academy.id] = provisioning_bill provisioning_bills.append(provisioning_bill) provisioning_bills = list(set(provisioning_bills)) - if not (kind := context['provisioning_activity_kinds'].get(field['kind'], None)): + if not (kind := context["provisioning_activity_kinds"].get(field["kind"], None)): kind, _ = ProvisioningConsumptionKind.objects.get_or_create( - product_name=field['kind'], - sku=field['kind'], + product_name=field["kind"], + sku=field["kind"], ) - context['provisioning_activity_kinds'][field['kind']] = kind + context["provisioning_activity_kinds"][field["kind"]] = kind - if not (currency := context['currencies'].get('USD', None)): - currency, _ = Currency.objects.get_or_create(code='USD', name='US Dollar', decimals=2) - context['currencies']['USD'] = currency + if not (currency := context["currencies"].get("USD", None)): + currency, _ = Currency.objects.get_or_create(code="USD", name="US Dollar", decimals=2) + context["currencies"]["USD"] = currency - if not (price := context['provisioning_activity_prices'].get(currency.id, None)): + if not (price := context["provisioning_activity_prices"].get(currency.id, None)): price, _ = ProvisioningPrice.objects.get_or_create( currency=currency, - unit_type='Credits', - price_per_unit=0.036 * context['provisioning_multiplier'], + unit_type="Credits", + price_per_unit=0.036 * context["provisioning_multiplier"], multiplier=1, ) - context['provisioning_activity_prices'][currency.id] = price + context["provisioning_activity_prices"][currency.id] = price - pa, _ = ProvisioningUserConsumption.objects.get_or_create(username=field['userName'], - hash=context['hash'], - kind=kind, - defaults={'processed_at': timezone.now()}) + pa, _ = ProvisioningUserConsumption.objects.get_or_create( + username=field["userName"], hash=context["hash"], kind=kind, defaults={"processed_at": timezone.now()} + ) item, _ = ProvisioningConsumptionEvent.objects.get_or_create( - external_pk=field['id'], + external_pk=field["id"], vendor=provisioning_vendor, price=price, registered_at=date, - quantity=field['credits'], - repository_url=field['contextURL'], + quantity=field["credits"], + repository_url=field["contextURL"], task_associated_slug=slug, csv_row=position, ) - if pa.status == 'PENDING': - pa.status = 'PERSISTED' if not errors else 'ERROR' + if pa.status == "PENDING": + pa.status = "PERSISTED" if not errors else "ERROR" - last_status_list = [x for x in pa.status_text.split(', ') if x] + last_status_list = [x for x in pa.status_text.split(", ") if x] if errors: - pa.status = 'ERROR' - pa.status_text = ', '.join(last_status_list + errors + warnings) + pa.status = "ERROR" + pa.status_text = ", ".join(last_status_list + errors + warnings) elif warnings: - if pa.status != 'ERROR': - pa.status = 'WARNING' + if pa.status != "ERROR": + pa.status = "WARNING" - pa.status_text = ', '.join(last_status_list + warnings) + pa.status_text = ", ".join(last_status_list + warnings) else: - pa.status = 'PERSISTED' - pa.status_text = ', '.join(last_status_list + errors + warnings) + pa.status = "PERSISTED" + pa.status_text = ", ".join(last_status_list + errors + warnings) - pa.status_text = ', '.join([x for x in sorted(set(pa.status_text.split(', '))) if x]) + pa.status_text = ", ".join([x for x in sorted(set(pa.status_text.split(", "))) if x]) pa.status_text = pa.status_text[:255] pa.save() @@ -572,49 +601,49 @@ def add_rigobot_activity(context: ActivityContext, field: dict, position: int) - errors = [] warnings = [] - if field['organization'] != '4Geeks': + if field["organization"] != "4Geeks": return - user = get_user(app='rigobot', sub=field['user_id']) + user = get_user(app="rigobot", sub=field["user_id"]) if user is None: logger.error(f'User {field["user_id"]} not found') return - if field['billing_status'] != 'OPEN': + if field["billing_status"] != "OPEN": return - github_academy_user_log = context['github_academy_user_logs'].get(user.id, None) - date = datetime.fromisoformat(field['consumption_period_start']) + github_academy_user_log = context["github_academy_user_logs"].get(user.id, None) + date = datetime.fromisoformat(field["consumption_period_start"]) academies = [] not_found = False if github_academy_user_log is None: # make a function that calculate the user activity in the academies by percentage github_academy_user_log = GithubAcademyUserLog.objects.filter( - Q(valid_until__isnull=True) - | Q(valid_until__gte=context['limit'] - relativedelta(months=1, weeks=1)), - created_at__lte=context['limit'], + Q(valid_until__isnull=True) | Q(valid_until__gte=context["limit"] - relativedelta(months=1, weeks=1)), + created_at__lte=context["limit"], academy_user__user=user, - academy_user__username=field['github_username'], - storage_status='SYNCHED', - storage_action='ADD').order_by('-created_at') + academy_user__username=field["github_username"], + storage_status="SYNCHED", + storage_action="ADD", + ).order_by("-created_at") - context['github_academy_user_logs'][user.id] = github_academy_user_log + context["github_academy_user_logs"][user.id] = github_academy_user_log if github_academy_user_log: academies = [x.academy_user.academy for x in github_academy_user_log] if not academies: not_found = True - github_academy_users = GithubAcademyUser.objects.filter(username=field['github_username'], - storage_status='PAYMENT_CONFLICT', - storage_action='IGNORE') + github_academy_users = GithubAcademyUser.objects.filter( + username=field["github_username"], storage_status="PAYMENT_CONFLICT", storage_action="IGNORE" + ) academies = [x.academy for x in github_academy_users] if not academies: - academies = handle_pending_github_user(None, field['github_username'], date) + academies = handle_pending_github_user(None, field["github_username"], date) if not_found is False and academies: academies = random.choices(academies, k=1) @@ -623,100 +652,103 @@ def add_rigobot_activity(context: ActivityContext, field: dict, position: int) - provisioning_bills = {} provisioning_vendor = None - provisioning_vendor = context['provisioning_vendors'].get('Rigobot', None) + provisioning_vendor = context["provisioning_vendors"].get("Rigobot", None) if not provisioning_vendor: - provisioning_vendor = ProvisioningVendor.objects.filter(name='Rigobot').first() - context['provisioning_vendors']['Rigobot'] = provisioning_vendor + provisioning_vendor = ProvisioningVendor.objects.filter(name="Rigobot").first() + context["provisioning_vendors"]["Rigobot"] = provisioning_vendor if not provisioning_vendor: - errors.append('Provisioning vendor Rigobot not found') + errors.append("Provisioning vendor Rigobot not found") for academy in academies: - ls = context['logs'].get((field['github_username'], academy.id), None) + ls = context["logs"].get((field["github_username"], academy.id), None) if ls is None: - ls = get_github_academy_user_logs(academy, field['github_username'], context['limit']) - context['logs'][(field['github_username'], academy.id)] = ls + ls = get_github_academy_user_logs(academy, field["github_username"], context["limit"]) + context["logs"][(field["github_username"], academy.id)] = ls logs[academy.id] = ls - provisioning_bill = context['provisioning_bills'].get(academy.id, None) - if not provisioning_bill and (provisioning_bill := ProvisioningBill.objects.filter( - academy=academy, status='PENDING', hash=context['hash']).first()): - context['provisioning_bills'][academy.id] = provisioning_bill + provisioning_bill = context["provisioning_bills"].get(academy.id, None) + if not provisioning_bill and ( + provisioning_bill := ProvisioningBill.objects.filter( + academy=academy, status="PENDING", hash=context["hash"] + ).first() + ): + context["provisioning_bills"][academy.id] = provisioning_bill provisioning_bills[academy.id] = provisioning_bill if not provisioning_bill: provisioning_bill = ProvisioningBill() provisioning_bill.academy = academy provisioning_bill.vendor = provisioning_vendor - provisioning_bill.status = 'PENDING' - provisioning_bill.hash = context['hash'] + provisioning_bill.status = "PENDING" + provisioning_bill.hash = context["hash"] provisioning_bill.save() - context['provisioning_bills'][academy.id] = provisioning_bill + context["provisioning_bills"][academy.id] = provisioning_bill provisioning_bills[academy.id] = provisioning_bill # not implemented yet if not_found: warnings.append( f'We could not find enough information about {field["github_username"]}, mark this user user as ' - 'deleted if you don\'t recognize it') + "deleted if you don't recognize it" + ) s_slug = f'{field["purpose_slug"] or "no-provided"}--{field["pricing_type"].lower()}--{field["model"].lower()}' s_name = f'{field["purpose"]} (type: {field["pricing_type"]}, model: {field["model"]})' - if not (kind := context['provisioning_activity_kinds'].get((s_name, s_slug), None)): + if not (kind := context["provisioning_activity_kinds"].get((s_name, s_slug), None)): kind, _ = ProvisioningConsumptionKind.objects.get_or_create( product_name=s_name, sku=s_slug, ) - context['provisioning_activity_kinds'][(s_name, s_slug)] = kind + context["provisioning_activity_kinds"][(s_name, s_slug)] = kind - if not (currency := context['currencies'].get('USD', None)): - currency, _ = Currency.objects.get_or_create(code='USD', name='US Dollar', decimals=2) - context['currencies']['USD'] = currency + if not (currency := context["currencies"].get("USD", None)): + currency, _ = Currency.objects.get_or_create(code="USD", name="US Dollar", decimals=2) + context["currencies"]["USD"] = currency - if not (price := context['provisioning_activity_prices'].get((field['total_spent'], field['total_tokens']), None)): + if not (price := context["provisioning_activity_prices"].get((field["total_spent"], field["total_tokens"]), None)): with localcontext(prec=10): price, _ = ProvisioningPrice.objects.get_or_create( currency=currency, - unit_type='Tokens', - price_per_unit=Decimal(field['total_spent']) / Decimal(field['total_tokens']), - multiplier=context['provisioning_multiplier'], + unit_type="Tokens", + price_per_unit=Decimal(field["total_spent"]) / Decimal(field["total_tokens"]), + multiplier=context["provisioning_multiplier"], ) - context['provisioning_activity_prices'][(field['total_spent'], field['total_tokens'])] = price + context["provisioning_activity_prices"][(field["total_spent"], field["total_tokens"])] = price - pa, _ = ProvisioningUserConsumption.objects.get_or_create(username=field['github_username'], - hash=context['hash'], - kind=kind, - defaults={'processed_at': timezone.now()}) + pa, _ = ProvisioningUserConsumption.objects.get_or_create( + username=field["github_username"], hash=context["hash"], kind=kind, defaults={"processed_at": timezone.now()} + ) item, _ = ProvisioningConsumptionEvent.objects.get_or_create( vendor=provisioning_vendor, price=price, registered_at=date, - external_pk=field['consumption_item_id'], - quantity=field['total_tokens'], + external_pk=field["consumption_item_id"], + quantity=field["total_tokens"], repository_url=None, task_associated_slug=None, csv_row=position, ) - last_status_list = [x for x in pa.status_text.split(', ') if x] + last_status_list = [x for x in pa.status_text.split(", ") if x] if errors: - pa.status = 'ERROR' - pa.status_text = ', '.join(last_status_list + errors + warnings) + pa.status = "ERROR" + pa.status_text = ", ".join(last_status_list + errors + warnings) elif warnings: - if pa.status != 'ERROR': - pa.status = 'WARNING' + if pa.status != "ERROR": + pa.status = "WARNING" - pa.status_text = ', '.join(last_status_list + warnings) + pa.status_text = ", ".join(last_status_list + warnings) else: - pa.status = 'PERSISTED' - pa.status_text = ', '.join(last_status_list + errors + warnings) + pa.status = "PERSISTED" + pa.status_text = ", ".join(last_status_list + errors + warnings) - pa.status_text = ', '.join([x for x in sorted(set(pa.status_text.split(', '))) if x]) + pa.status_text = ", ".join([x for x in sorted(set(pa.status_text.split(", "))) if x]) pa.status_text = pa.status_text[:255] pa.save() diff --git a/breathecode/provisioning/admin.py b/breathecode/provisioning/admin.py index 96322488d..8a68a93cc 100644 --- a/breathecode/provisioning/admin.py +++ b/breathecode/provisioning/admin.py @@ -3,9 +3,18 @@ from django.utils.html import format_html from breathecode.utils.admin import change_field from breathecode.provisioning import tasks -from .models import (ProvisioningConsumptionEvent, ProvisioningConsumptionKind, ProvisioningPrice, - ProvisioningUserConsumption, ProvisioningVendor, ProvisioningMachineTypes, ProvisioningAcademy, - ProvisioningBill, ProvisioningContainer, ProvisioningProfile) +from .models import ( + ProvisioningConsumptionEvent, + ProvisioningConsumptionKind, + ProvisioningPrice, + ProvisioningUserConsumption, + ProvisioningVendor, + ProvisioningMachineTypes, + ProvisioningAcademy, + ProvisioningBill, + ProvisioningContainer, + ProvisioningProfile, +) logger = logging.getLogger(__name__) @@ -13,69 +22,77 @@ @admin.register(ProvisioningVendor) class ProvisioningVendorAdmin(admin.ModelAdmin): # form = CustomForm - search_fields = ['name'] - list_display = ('id', 'name') + search_fields = ["name"] + list_display = ("id", "name") @admin.register(ProvisioningMachineTypes) class ProvisioningMachineTypesAdmin(admin.ModelAdmin): - list_display = ['name', 'slug', 'vendor'] - search_fields = ('name', 'slug') - list_filter = ['vendor'] + list_display = ["name", "slug", "vendor"] + search_fields = ("name", "slug") + list_filter = ["vendor"] @admin.register(ProvisioningAcademy) class ProvisioningAcademyAdmin(admin.ModelAdmin): - list_display = ['academy', 'vendor', 'created_at'] - search_fields = ('academy__name', 'academy__slug') - list_filter = ['vendor'] + list_display = ["academy", "vendor", "created_at"] + search_fields = ("academy__name", "academy__slug") + list_filter = ["vendor"] @admin.register(ProvisioningConsumptionKind) class ProvisioningConsumptionKindAdmin(admin.ModelAdmin): - list_display = ('id', 'product_name', 'sku') - search_fields = ['product_name'] - list_filter = ['product_name'] + list_display = ("id", "product_name", "sku") + search_fields = ["product_name"] + list_filter = ["product_name"] actions = [] @admin.register(ProvisioningPrice) class ProvisioningPriceAdmin(admin.ModelAdmin): - list_display = ('id', 'currency', 'unit_type', 'price_per_unit', 'multiplier') - search_fields = ['currency__code'] - list_filter = ['currency__code', 'unit_type'] + list_display = ("id", "currency", "unit_type", "price_per_unit", "multiplier") + search_fields = ["currency__code"] + list_filter = ["currency__code", "unit_type"] actions = [] @admin.register(ProvisioningConsumptionEvent) class ProvisioningConsumptionEventAdmin(admin.ModelAdmin): - list_display = ('id', 'registered_at', 'external_pk', 'csv_row', 'vendor', 'quantity', 'repository_url', - 'task_associated_slug') - search_fields = ['repository_url', 'task_associated_slug', 'provisioninguserconsumption__bills__hash'] - list_filter = ['vendor'] + list_display = ( + "id", + "registered_at", + "external_pk", + "csv_row", + "vendor", + "quantity", + "repository_url", + "task_associated_slug", + ) + search_fields = ["repository_url", "task_associated_slug", "provisioninguserconsumption__bills__hash"] + list_filter = ["vendor"] actions = [] @admin.register(ProvisioningUserConsumption) class ProvisioningUserConsumptionAdmin(admin.ModelAdmin): - list_display = ('id', 'username', 'kind', 'amount', 'quantity', '_status', 'processed_at') - search_fields = ['username', 'events__task_associated_slug', 'bills__hash'] - list_filter = ['bills__academy', 'status'] + list_display = ("id", "username", "kind", "amount", "quantity", "_status", "processed_at") + search_fields = ["username", "events__task_associated_slug", "bills__hash"] + list_filter = ["bills__academy", "status"] actions = [] def _status(self, obj): colors = { - 'PERSISTED': 'bg-success', - 'PENDING': 'bg-error', - 'ERROR': 'bg-error', - 'IGNORED': 'bg-warning', - None: 'bg-warning', + "PERSISTED": "bg-success", + "PENDING": "bg-error", + "ERROR": "bg-error", + "IGNORED": "bg-warning", + None: "bg-warning", } def from_status(s): if s in colors: return colors[s] - return '' + return "" return format_html(f"<p class='{from_status(obj.status)}'>{obj.status}</p><small>{obj.status_text}</small>") @@ -92,56 +109,66 @@ def reverse_bill(modeladmin, request, queryset): @admin.register(ProvisioningBill) class ProvisioningBillAdmin(admin.ModelAdmin): - list_display = ('id', 'title', 'vendor', 'academy', '_status', 'total_amount', 'currency_code', 'paid_at', - 'invoice_url') - search_fields = ['academy__name', 'academy__slug', 'id', 'title'] - list_filter = ['academy', 'status', 'vendor'] - - actions = [force_calculate_bill, reverse_bill] + change_field(['DUE', 'DISPUTED', 'PAID', 'PENDING'], name='status') + list_display = ( + "id", + "title", + "vendor", + "academy", + "_status", + "total_amount", + "currency_code", + "paid_at", + "invoice_url", + ) + search_fields = ["academy__name", "academy__slug", "id", "title"] + list_filter = ["academy", "status", "vendor"] + + actions = [force_calculate_bill, reverse_bill] + change_field(["DUE", "DISPUTED", "PAID", "PENDING"], name="status") def invoice_url(self, obj): return format_html( "<a rel='noopener noreferrer' target='_blank' href='/v1/provisioning/bill/{id}/html'>open invoice</a>", - id=obj.id) + id=obj.id, + ) def _status(self, obj): colors = { - 'PAID': 'bg-success', - 'DISPUTED': 'bg-error', - 'DUE': 'bg-warning', - None: 'bg-warning', - 'IGNORED': '', + "PAID": "bg-success", + "DISPUTED": "bg-error", + "DUE": "bg-warning", + None: "bg-warning", + "IGNORED": "", } def from_status(s): if s in colors: return colors[s] - return '' + return "" return format_html(f"<p class='{from_status(obj.status)}'>{obj.status}</p><small>{obj.status_details}</small>") @admin.register(ProvisioningContainer) class ProvisioningContainerAdmin(admin.ModelAdmin): - list_display = ('id', 'user', 'status', 'display_name', 'last_used_at') - search_fields = ['display_name', 'user__firstname', 'user__lastname', 'user__email', 'task_associated_slug'] - list_filter = ['status'] + list_display = ("id", "user", "status", "display_name", "last_used_at") + search_fields = ["display_name", "user__firstname", "user__lastname", "user__email", "task_associated_slug"] + list_filter = ["status"] actions = [] @admin.register(ProvisioningProfile) class ProvisioningProfileAdmin(admin.ModelAdmin): - list_display = ('id', 'academy', 'vendor', 'cohorts_list', 'member_list') + list_display = ("id", "academy", "vendor", "cohorts_list", "member_list") search_fields = [ - 'academy__name', - 'academy__slug', + "academy__name", + "academy__slug", ] - raw_id_fields = ['members', 'cohorts'] - list_filter = ['vendor'] + raw_id_fields = ["members", "cohorts"] + list_filter = ["vendor"] actions = [] def cohorts_list(self, obj): - return format_html(', '.join([str(c) for c in obj.cohorts.all()])) + return format_html(", ".join([str(c) for c in obj.cohorts.all()])) def member_list(self, obj): - return format_html(', '.join([str(pa) for pa in obj.members.all()])) + return format_html(", ".join([str(pa) for pa in obj.members.all()])) diff --git a/breathecode/provisioning/apps.py b/breathecode/provisioning/apps.py index 6bf1b7950..328151128 100644 --- a/breathecode/provisioning/apps.py +++ b/breathecode/provisioning/apps.py @@ -5,8 +5,8 @@ class RegistryConfig(AppConfig): - name = 'breathecode.provisioning' + name = "breathecode.provisioning" def ready(self): - logger.debug('Loading provisioning.receivers') + logger.debug("Loading provisioning.receivers") from . import receivers # noqa: F401 diff --git a/breathecode/provisioning/management/commands/archive_provisioning_bills.py b/breathecode/provisioning/management/commands/archive_provisioning_bills.py index 4952f9067..a74d94782 100644 --- a/breathecode/provisioning/management/commands/archive_provisioning_bills.py +++ b/breathecode/provisioning/management/commands/archive_provisioning_bills.py @@ -6,20 +6,20 @@ from dateutil.relativedelta import relativedelta from django.utils import timezone -HOST = os.environ.get('OLD_BREATHECODE_API') -DATETIME_FORMAT = '%Y-%m-%d' +HOST = os.environ.get("OLD_BREATHECODE_API") +DATETIME_FORMAT = "%Y-%m-%d" # archive_provisioning_bills class Command(BaseCommand): - help = 'Archive older provisioning bills' + help = "Archive older provisioning bills" def handle(self, *args, **options): now = timezone.now() - ids = ProvisioningBill.objects.filter(status='PAID', - paid_at__lte=now - relativedelta(months=1), - archived_at__isnull=True).values_list('id', flat=True) + ids = ProvisioningBill.objects.filter( + status="PAID", paid_at__lte=now - relativedelta(months=1), archived_at__isnull=True + ).values_list("id", flat=True) for id in ids: archive_provisioning_bill.delay(id) @@ -28,6 +28,6 @@ def handle(self, *args, **options): msg = self.style.SUCCESS(f"Cleaning {', '.join([str(id) for id in ids])} provisioning bills") else: - msg = self.style.SUCCESS('No provisioning bills to clean') + msg = self.style.SUCCESS("No provisioning bills to clean") self.stdout.write(self.style.SUCCESS(msg)) diff --git a/breathecode/provisioning/migrations/0001_initial.py b/breathecode/provisioning/migrations/0001_initial.py index 20988635f..55f2cdf6b 100644 --- a/breathecode/provisioning/migrations/0001_initial.py +++ b/breathecode/provisioning/migrations/0001_initial.py @@ -11,170 +11,228 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('authenticate', '0031_userinvite_syllabus'), - ('admissions', '0054_cohortuser_history_log'), + ("authenticate", "0031_userinvite_syllabus"), + ("admissions", "0054_cohortuser_history_log"), ] operations = [ migrations.CreateModel( - name='ProvisioningVendor', + name="ProvisioningVendor", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=200)), - ('api_url', models.URLField(blank=True)), - ('workspaces_url', - models.URLField(help_text='Points to the place were you can see all your containers')), - ('invite_url', - models.URLField( - help_text='Some vendors (like Gitpod) allow to share invite link to automatically join')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=200)), + ("api_url", models.URLField(blank=True)), + ( + "workspaces_url", + models.URLField(help_text="Points to the place were you can see all your containers"), + ), + ( + "invite_url", + models.URLField( + help_text="Some vendors (like Gitpod) allow to share invite link to automatically join" + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), migrations.CreateModel( - name='ProvisioningProfile', + name="ProvisioningProfile", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('cohorts', - models.ManyToManyField( - blank=True, - help_text='If set, only these cohorts will be provisioned with this vendor in this academy', - to='admissions.Cohort')), - ('members', - models.ManyToManyField( - blank=True, - help_text='If set, only these members will be provisioned with this vendor in this academy', - to='authenticate.ProfileAcademy')), - ('vendor', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='provisioning.provisioningvendor')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "cohorts", + models.ManyToManyField( + blank=True, + help_text="If set, only these cohorts will be provisioned with this vendor in this academy", + to="admissions.Cohort", + ), + ), + ( + "members", + models.ManyToManyField( + blank=True, + help_text="If set, only these members will be provisioned with this vendor in this academy", + to="authenticate.ProfileAcademy", + ), + ), + ( + "vendor", + models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="provisioning.provisioningvendor", + ), + ), ], ), migrations.CreateModel( - name='ProvisioningMachineTypes', + name="ProvisioningMachineTypes", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=200)), - ('slug', models.SlugField(max_length=80)), - ('description', models.CharField(max_length=255)), - ('cpu_cores', models.IntegerField()), - ('ram_in_bytes', models.IntegerField()), - ('disk_in_bytes', models.IntegerField()), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('vendor', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='provisioning.provisioningvendor')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=200)), + ("slug", models.SlugField(max_length=80)), + ("description", models.CharField(max_length=255)), + ("cpu_cores", models.IntegerField()), + ("ram_in_bytes", models.IntegerField()), + ("disk_in_bytes", models.IntegerField()), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "vendor", + models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="provisioning.provisioningvendor", + ), + ), ], ), migrations.CreateModel( - name='ProvisioningContainer', + name="ProvisioningContainer", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('web_url', models.URLField()), - ('status', - models.CharField(help_text='We have no control over this. Reported by the vendor', max_length=50)), - ('display_name', models.CharField(max_length=50)), - ('last_used_at', models.DateTimeField(blank=True, default=None, null=True)), - ('provisioned_at', models.DateTimeField(blank=True, default=None, null=True)), - ('has_unpushed_changes', models.BooleanField(default=False)), - ('has_uncommitted_changes', models.BooleanField(default=False)), - ('branch_name', models.CharField(blank=True, default=None, max_length=100, null=True)), - ('task_associated_slug', - models.SlugField(help_text='What assignment was the the student trying to complete with this', - max_length=100)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("web_url", models.URLField()), + ( + "status", + models.CharField(help_text="We have no control over this. Reported by the vendor", max_length=50), + ), + ("display_name", models.CharField(max_length=50)), + ("last_used_at", models.DateTimeField(blank=True, default=None, null=True)), + ("provisioned_at", models.DateTimeField(blank=True, default=None, null=True)), + ("has_unpushed_changes", models.BooleanField(default=False)), + ("has_uncommitted_changes", models.BooleanField(default=False)), + ("branch_name", models.CharField(blank=True, default=None, max_length=100, null=True)), + ( + "task_associated_slug", + models.SlugField( + help_text="What assignment was the the student trying to complete with this", max_length=100 + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( - name='ProvisioningBill', + name="ProvisioningBill", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('total_amount', models.FloatField()), - ('currency_code', models.CharField(default='usd', max_length=3)), - ('status', - models.CharField(choices=[('DUE', 'Due'), ('DISPUTED', 'Disputed'), ('IGNORED', 'Ignored'), - ('PAID', 'Paid')], - default='DUE', - max_length=20)), - ('paid_at', models.DateTimeField(blank=True, default=None, null=True)), - ('status_details', models.TextField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("total_amount", models.FloatField()), + ("currency_code", models.CharField(default="usd", max_length=3)), + ( + "status", + models.CharField( + choices=[("DUE", "Due"), ("DISPUTED", "Disputed"), ("IGNORED", "Ignored"), ("PAID", "Paid")], + default="DUE", + max_length=20, + ), + ), + ("paid_at", models.DateTimeField(blank=True, default=None, null=True)), + ("status_details", models.TextField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), migrations.CreateModel( - name='ProvisioningActivity', + name="ProvisioningActivity", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('username', - models.CharField(help_text='Native username in the provisioning platform, E.g: github username', - max_length=80)), - ('registered_at', - models.DateTimeField( - blank=True, - default=None, - help_text='When the activitiy happened, this field comes form the provisioning vendor', - null=True)), - ('product_name', models.CharField(max_length=100)), - ('sku', models.CharField(max_length=100)), - ('quantity', models.FloatField()), - ('unit_type', models.CharField(max_length=100)), - ('price_per_unit', models.FloatField(help_text='Price paid to the provisioning vendor, E.g: Github')), - ('currency_code', models.CharField(max_length=3)), - ('multiplier', - models.FloatField(blank=True, help_text='To increase price in a certain percentage', null=True)), - ('repository_url', models.URLField()), - ('task_associated_slug', - models.SlugField(help_text='What assignment was the the student trying to complete with this', - max_length=100)), - ('notes', models.TextField(blank=True, null=True)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('ERROR', 'Error')], - default='PENDING', - max_length=20)), - ('status_text', models.CharField(max_length=255)), - ('processed_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('bill', - models.ForeignKey(blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='provisioning.provisioningbill')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "username", + models.CharField( + help_text="Native username in the provisioning platform, E.g: github username", max_length=80 + ), + ), + ( + "registered_at", + models.DateTimeField( + blank=True, + default=None, + help_text="When the activitiy happened, this field comes form the provisioning vendor", + null=True, + ), + ), + ("product_name", models.CharField(max_length=100)), + ("sku", models.CharField(max_length=100)), + ("quantity", models.FloatField()), + ("unit_type", models.CharField(max_length=100)), + ("price_per_unit", models.FloatField(help_text="Price paid to the provisioning vendor, E.g: Github")), + ("currency_code", models.CharField(max_length=3)), + ( + "multiplier", + models.FloatField(blank=True, help_text="To increase price in a certain percentage", null=True), + ), + ("repository_url", models.URLField()), + ( + "task_associated_slug", + models.SlugField( + help_text="What assignment was the the student trying to complete with this", max_length=100 + ), + ), + ("notes", models.TextField(blank=True, null=True)), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("PERSISTED", "Persisted"), ("ERROR", "Error")], + default="PENDING", + max_length=20, + ), + ), + ("status_text", models.CharField(max_length=255)), + ("processed_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "bill", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="provisioning.provisioningbill", + ), + ), ], ), migrations.CreateModel( - name='ProvisioningAcademy', + name="ProvisioningAcademy", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('credentials_key', models.CharField(blank=True, max_length=200)), - ('credentials_token', models.CharField(blank=True, max_length=200)), - ('container_idle_timeout', - models.IntegerField( - default=15, help_text='If the container is idle for X amount of minutes, it will be shut down')), - ('max_active_containers', - models.IntegerField( - default=2, - help_text='If you already have X active containers you wont be able to create new ones. ')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('allowed_machine_types', models.ManyToManyField(blank=True, - to='provisioning.ProvisioningMachineTypes')), - ('vendor', - models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='provisioning.provisioningvendor')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("credentials_key", models.CharField(blank=True, max_length=200)), + ("credentials_token", models.CharField(blank=True, max_length=200)), + ( + "container_idle_timeout", + models.IntegerField( + default=15, help_text="If the container is idle for X amount of minutes, it will be shut down" + ), + ), + ( + "max_active_containers", + models.IntegerField( + default=2, + help_text="If you already have X active containers you wont be able to create new ones. ", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "allowed_machine_types", + models.ManyToManyField(blank=True, to="provisioning.ProvisioningMachineTypes"), + ), + ( + "vendor", + models.ForeignKey( + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="provisioning.provisioningvendor", + ), + ), ], ), ] diff --git a/breathecode/provisioning/migrations/0002_alter_provisioningvendor_invite_url.py b/breathecode/provisioning/migrations/0002_alter_provisioningvendor_invite_url.py index 98c023c12..9f828d1e7 100644 --- a/breathecode/provisioning/migrations/0002_alter_provisioningvendor_invite_url.py +++ b/breathecode/provisioning/migrations/0002_alter_provisioningvendor_invite_url.py @@ -6,17 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0001_initial'), + ("provisioning", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='provisioningvendor', - name='invite_url', + model_name="provisioningvendor", + name="invite_url", field=models.URLField( blank=True, default=None, - help_text='Some vendors (like Gitpod) allow to share invite link to automatically join', - null=True), + help_text="Some vendors (like Gitpod) allow to share invite link to automatically join", + null=True, + ), ), ] diff --git a/breathecode/provisioning/migrations/0003_auto_20230530_1832.py b/breathecode/provisioning/migrations/0003_auto_20230530_1832.py index 8c4ccee57..cc813fef5 100644 --- a/breathecode/provisioning/migrations/0003_auto_20230530_1832.py +++ b/breathecode/provisioning/migrations/0003_auto_20230530_1832.py @@ -6,40 +6,48 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0002_alter_provisioningvendor_invite_url'), + ("provisioning", "0002_alter_provisioningvendor_invite_url"), ] operations = [ migrations.AddField( - model_name='provisioningbill', - name='hash', + model_name="provisioningbill", + name="hash", field=models.CharField(blank=True, default=None, max_length=64, null=True), ), migrations.AlterField( - model_name='provisioningactivity', - name='registered_at', + model_name="provisioningactivity", + name="registered_at", field=models.DateTimeField( blank=True, default=None, - help_text='When the activity happened, this field comes form the provisioning vendor', - null=True), + help_text="When the activity happened, this field comes form the provisioning vendor", + null=True, + ), ), migrations.AlterField( - model_name='provisioningbill', - name='currency_code', - field=models.CharField(default='USD', max_length=3), + model_name="provisioningbill", + name="currency_code", + field=models.CharField(default="USD", max_length=3), ), migrations.AlterField( - model_name='provisioningbill', - name='status', - field=models.CharField(choices=[('DUE', 'Due'), ('DISPUTED', 'Disputed'), ('IGNORED', 'Ignored'), - ('PENDING', 'Pending'), ('PAID', 'Paid')], - default='DUE', - max_length=20), + model_name="provisioningbill", + name="status", + field=models.CharField( + choices=[ + ("DUE", "Due"), + ("DISPUTED", "Disputed"), + ("IGNORED", "Ignored"), + ("PENDING", "Pending"), + ("PAID", "Paid"), + ], + default="DUE", + max_length=20, + ), ), migrations.AlterField( - model_name='provisioningbill', - name='total_amount', + model_name="provisioningbill", + name="total_amount", field=models.FloatField(default=0), ), ] diff --git a/breathecode/provisioning/migrations/0004_auto_20230611_0534.py b/breathecode/provisioning/migrations/0004_auto_20230611_0534.py index 2111a7549..4bf4ec215 100644 --- a/breathecode/provisioning/migrations/0004_auto_20230611_0534.py +++ b/breathecode/provisioning/migrations/0004_auto_20230611_0534.py @@ -6,21 +6,29 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0003_auto_20230530_1832'), + ("provisioning", "0003_auto_20230530_1832"), ] operations = [ migrations.AddField( - model_name='provisioningactivity', - name='hash', + model_name="provisioningactivity", + name="hash", field=models.CharField(blank=True, default=None, max_length=64, null=True), ), migrations.AlterField( - model_name='provisioningbill', - name='status', - field=models.CharField(choices=[('DUE', 'Due'), ('DISPUTED', 'Disputed'), ('IGNORED', 'Ignored'), - ('PENDING', 'Pending'), ('PAID', 'Paid'), ('ERROR', 'Error')], - default='DUE', - max_length=20), + model_name="provisioningbill", + name="status", + field=models.CharField( + choices=[ + ("DUE", "Due"), + ("DISPUTED", "Disputed"), + ("IGNORED", "Ignored"), + ("PENDING", "Pending"), + ("PAID", "Paid"), + ("ERROR", "Error"), + ], + default="DUE", + max_length=20, + ), ), ] diff --git a/breathecode/provisioning/migrations/0005_provisioningbill_stripe_url.py b/breathecode/provisioning/migrations/0005_provisioningbill_stripe_url.py index 042b95025..2a5c4953f 100644 --- a/breathecode/provisioning/migrations/0005_provisioningbill_stripe_url.py +++ b/breathecode/provisioning/migrations/0005_provisioningbill_stripe_url.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0004_auto_20230611_0534'), + ("provisioning", "0004_auto_20230611_0534"), ] operations = [ migrations.AddField( - model_name='provisioningbill', - name='stripe_url', + model_name="provisioningbill", + name="stripe_url", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/provisioning/migrations/0006_auto_20230705_1635.py b/breathecode/provisioning/migrations/0006_auto_20230705_1635.py index 2591602ef..2d41ed10e 100644 --- a/breathecode/provisioning/migrations/0006_auto_20230705_1635.py +++ b/breathecode/provisioning/migrations/0006_auto_20230705_1635.py @@ -7,32 +7,34 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0005_provisioningbill_stripe_url'), + ("provisioning", "0005_provisioningbill_stripe_url"), ] operations = [ migrations.AddField( - model_name='provisioningbill', - name='fee', + model_name="provisioningbill", + name="fee", field=models.FloatField(default=0), ), migrations.AddField( - model_name='provisioningbill', - name='stripe_id', - field=models.CharField(blank=True, default=None, help_text='Stripe id', max_length=32, null=True), + model_name="provisioningbill", + name="stripe_id", + field=models.CharField(blank=True, default=None, help_text="Stripe id", max_length=32, null=True), ), migrations.AddField( - model_name='provisioningbill', - name='vendor', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='provisioning.provisioningvendor'), + model_name="provisioningbill", + name="vendor", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="provisioning.provisioningvendor", + ), ), migrations.AlterField( - model_name='provisioningactivity', - name='multiplier', - field=models.FloatField(blank=True, default=1, help_text='To increase price in a certain percentage'), + model_name="provisioningactivity", + name="multiplier", + field=models.FloatField(blank=True, default=1, help_text="To increase price in a certain percentage"), ), ] diff --git a/breathecode/provisioning/migrations/0007_alter_provisioningactivity_status.py b/breathecode/provisioning/migrations/0007_alter_provisioningactivity_status.py index c511601c2..aef0f5f1a 100644 --- a/breathecode/provisioning/migrations/0007_alter_provisioningactivity_status.py +++ b/breathecode/provisioning/migrations/0007_alter_provisioningactivity_status.py @@ -6,16 +6,22 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0006_auto_20230705_1635'), + ("provisioning", "0006_auto_20230705_1635"), ] operations = [ migrations.AlterField( - model_name='provisioningactivity', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('IGNORED', 'Ignored'), - ('ERROR', 'Error')], - default='PENDING', - max_length=20), + model_name="provisioningactivity", + name="status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("IGNORED", "Ignored"), + ("ERROR", "Error"), + ], + default="PENDING", + max_length=20, + ), ), ] diff --git a/breathecode/provisioning/migrations/0008_auto_20230715_1151.py b/breathecode/provisioning/migrations/0008_auto_20230715_1151.py index 4348161e1..b76c9faae 100644 --- a/breathecode/provisioning/migrations/0008_auto_20230715_1151.py +++ b/breathecode/provisioning/migrations/0008_auto_20230715_1151.py @@ -7,82 +7,106 @@ class Migration(migrations.Migration): dependencies = [ - ('payments', '0028_auto_20230607_2028'), - ('provisioning', '0007_alter_provisioningactivity_status'), + ("payments", "0028_auto_20230607_2028"), + ("provisioning", "0007_alter_provisioningactivity_status"), ] operations = [ migrations.CreateModel( - name='ProvisioningConsumptionEvent', + name="ProvisioningConsumptionEvent", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('registered_at', - models.DateTimeField( - help_text='When the activity happened, this field comes form the provisioning vendor')), - ('external_pk', models.CharField(blank=True, default=None, max_length=100, null=True)), - ('csv_row', models.IntegerField()), - ('quantity', models.FloatField()), - ('repository_url', models.URLField()), - ('task_associated_slug', - models.SlugField(help_text='What assignment was the the student trying to complete with this', - max_length=100)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "registered_at", + models.DateTimeField( + help_text="When the activity happened, this field comes form the provisioning vendor" + ), + ), + ("external_pk", models.CharField(blank=True, default=None, max_length=100, null=True)), + ("csv_row", models.IntegerField()), + ("quantity", models.FloatField()), + ("repository_url", models.URLField()), + ( + "task_associated_slug", + models.SlugField( + help_text="What assignment was the the student trying to complete with this", max_length=100 + ), + ), ], ), migrations.CreateModel( - name='ProvisioningConsumptionKind', + name="ProvisioningConsumptionKind", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('product_name', models.CharField(max_length=100)), - ('sku', models.CharField(max_length=100)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("product_name", models.CharField(max_length=100)), + ("sku", models.CharField(max_length=100)), ], ), migrations.CreateModel( - name='ProvisioningUserConsumption', + name="ProvisioningUserConsumption", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('username', - models.CharField(help_text='Native username in the provisioning platform, E.g: github username', - max_length=80)), - ('hash', models.CharField(blank=True, default=None, max_length=64, null=True)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('IGNORED', 'Ignored'), - ('ERROR', 'Error')], - default='PENDING', - max_length=20)), - ('status_text', models.CharField(max_length=255)), - ('processed_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('bills', models.ManyToManyField(blank=True, to='provisioning.ProvisioningBill')), - ('events', models.ManyToManyField(blank=True, to='provisioning.ProvisioningConsumptionEvent')), - ('kind', - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, - to='provisioning.provisioningconsumptionkind')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "username", + models.CharField( + help_text="Native username in the provisioning platform, E.g: github username", max_length=80 + ), + ), + ("hash", models.CharField(blank=True, default=None, max_length=64, null=True)), + ( + "status", + models.CharField( + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("IGNORED", "Ignored"), + ("ERROR", "Error"), + ], + default="PENDING", + max_length=20, + ), + ), + ("status_text", models.CharField(max_length=255)), + ("processed_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("bills", models.ManyToManyField(blank=True, to="provisioning.ProvisioningBill")), + ("events", models.ManyToManyField(blank=True, to="provisioning.ProvisioningConsumptionEvent")), + ( + "kind", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="provisioning.provisioningconsumptionkind" + ), + ), ], ), migrations.CreateModel( - name='ProvisioningPrice', + name="ProvisioningPrice", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('unit_type', models.CharField(max_length=100)), - ('price_per_unit', models.FloatField(help_text='Price paid to the provisioning vendor, E.g: Github')), - ('multiplier', - models.FloatField(blank=True, default=1, help_text='To increase price in a certain percentage')), - ('currency', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.currency')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("unit_type", models.CharField(max_length=100)), + ("price_per_unit", models.FloatField(help_text="Price paid to the provisioning vendor, E.g: Github")), + ( + "multiplier", + models.FloatField(blank=True, default=1, help_text="To increase price in a certain percentage"), + ), + ("currency", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="payments.currency")), ], ), migrations.AddField( - model_name='provisioningconsumptionevent', - name='price', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='provisioning.provisioningprice'), + model_name="provisioningconsumptionevent", + name="price", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="provisioning.provisioningprice"), ), migrations.AddField( - model_name='provisioningconsumptionevent', - name='vendor', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.CASCADE, - to='provisioning.provisioningvendor'), + model_name="provisioningconsumptionevent", + name="vendor", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="provisioning.provisioningvendor", + ), ), ] diff --git a/breathecode/provisioning/migrations/0009_provisioninguserconsumption_amount.py b/breathecode/provisioning/migrations/0009_provisioninguserconsumption_amount.py index 7c441881f..fb4326826 100644 --- a/breathecode/provisioning/migrations/0009_provisioninguserconsumption_amount.py +++ b/breathecode/provisioning/migrations/0009_provisioninguserconsumption_amount.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0008_auto_20230715_1151'), + ("provisioning", "0008_auto_20230715_1151"), ] operations = [ migrations.AddField( - model_name='provisioninguserconsumption', - name='amount', + model_name="provisioninguserconsumption", + name="amount", field=models.FloatField(default=0), ), ] diff --git a/breathecode/provisioning/migrations/0010_provisioninguserconsumption_quantity.py b/breathecode/provisioning/migrations/0010_provisioninguserconsumption_quantity.py index 0d8774983..439075761 100644 --- a/breathecode/provisioning/migrations/0010_provisioninguserconsumption_quantity.py +++ b/breathecode/provisioning/migrations/0010_provisioninguserconsumption_quantity.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0009_provisioninguserconsumption_amount'), + ("provisioning", "0009_provisioninguserconsumption_amount"), ] operations = [ migrations.AddField( - model_name='provisioninguserconsumption', - name='quantity', + model_name="provisioninguserconsumption", + name="quantity", field=models.FloatField(default=0), ), ] diff --git a/breathecode/provisioning/migrations/0011_auto_20230718_2159.py b/breathecode/provisioning/migrations/0011_auto_20230718_2159.py index 28ea84c05..7e843730b 100644 --- a/breathecode/provisioning/migrations/0011_auto_20230718_2159.py +++ b/breathecode/provisioning/migrations/0011_auto_20230718_2159.py @@ -6,14 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0010_provisioninguserconsumption_quantity'), + ("provisioning", "0010_provisioninguserconsumption_quantity"), ] operations = [ migrations.AlterField( - model_name='provisioninguserconsumption', - name='events', - field=models.ManyToManyField(blank=True, editable=False, to='provisioning.ProvisioningConsumptionEvent'), + model_name="provisioninguserconsumption", + name="events", + field=models.ManyToManyField(blank=True, editable=False, to="provisioning.ProvisioningConsumptionEvent"), + ), + migrations.DeleteModel( + name="ProvisioningActivity", ), - migrations.DeleteModel(name='ProvisioningActivity', ), ] diff --git a/breathecode/provisioning/migrations/0012_provisioningbill_title.py b/breathecode/provisioning/migrations/0012_provisioningbill_title.py index 784d5b084..fb2000c04 100644 --- a/breathecode/provisioning/migrations/0012_provisioningbill_title.py +++ b/breathecode/provisioning/migrations/0012_provisioningbill_title.py @@ -6,17 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0011_auto_20230718_2159'), + ("provisioning", "0011_auto_20230718_2159"), ] operations = [ migrations.AddField( - model_name='provisioningbill', - name='title', - field=models.CharField(blank=True, - default=None, - help_text="This title should describe what the Bill is about. I.e.: April's bill.", - max_length=40, - null=True), + model_name="provisioningbill", + name="title", + field=models.CharField( + blank=True, + default=None, + help_text="This title should describe what the Bill is about. I.e.: April's bill.", + max_length=40, + null=True, + ), ), ] diff --git a/breathecode/provisioning/migrations/0013_alter_provisioningbill_title.py b/breathecode/provisioning/migrations/0013_alter_provisioningbill_title.py index a19c30910..fbe05a24c 100644 --- a/breathecode/provisioning/migrations/0013_alter_provisioningbill_title.py +++ b/breathecode/provisioning/migrations/0013_alter_provisioningbill_title.py @@ -6,18 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0012_provisioningbill_title'), + ("provisioning", "0012_provisioningbill_title"), ] operations = [ migrations.AlterField( - model_name='provisioningbill', - name='title', + model_name="provisioningbill", + name="title", field=models.CharField( blank=True, default=None, help_text="This title should describe what the Bill is about. I.e.: April's bill. (MAX 64 chars)", max_length=64, - null=True), + null=True, + ), ), ] diff --git a/breathecode/provisioning/migrations/0014_auto_20230721_1945.py b/breathecode/provisioning/migrations/0014_auto_20230721_1945.py index a446108e4..b2069458f 100644 --- a/breathecode/provisioning/migrations/0014_auto_20230721_1945.py +++ b/breathecode/provisioning/migrations/0014_auto_20230721_1945.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0013_alter_provisioningbill_title'), + ("provisioning", "0013_alter_provisioningbill_title"), ] operations = [ migrations.AddField( - model_name='provisioningbill', - name='ended_at', + model_name="provisioningbill", + name="ended_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='provisioningbill', - name='started_at', + model_name="provisioningbill", + name="started_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/provisioning/migrations/0015_auto_20230811_0645.py b/breathecode/provisioning/migrations/0015_auto_20230811_0645.py index 1be2d9188..4de116105 100644 --- a/breathecode/provisioning/migrations/0015_auto_20230811_0645.py +++ b/breathecode/provisioning/migrations/0015_auto_20230811_0645.py @@ -6,32 +6,40 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0014_auto_20230721_1945'), + ("provisioning", "0014_auto_20230721_1945"), ] operations = [ migrations.AddField( - model_name='provisioningbill', - name='archived_at', + model_name="provisioningbill", + name="archived_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='provisioningbill', - name='hash', + model_name="provisioningbill", + name="hash", field=models.CharField(blank=True, db_index=True, default=None, max_length=64, null=True), ), migrations.AlterField( - model_name='provisioningbill', - name='paid_at', + model_name="provisioningbill", + name="paid_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='provisioningbill', - name='status', - field=models.CharField(choices=[('DUE', 'Due'), ('DISPUTED', 'Disputed'), ('IGNORED', 'Ignored'), - ('PENDING', 'Pending'), ('PAID', 'Paid'), ('ERROR', 'Error')], - db_index=True, - default='DUE', - max_length=20), + model_name="provisioningbill", + name="status", + field=models.CharField( + choices=[ + ("DUE", "Due"), + ("DISPUTED", "Disputed"), + ("IGNORED", "Ignored"), + ("PENDING", "Pending"), + ("PAID", "Paid"), + ("ERROR", "Error"), + ], + db_index=True, + default="DUE", + max_length=20, + ), ), ] diff --git a/breathecode/provisioning/migrations/0016_alter_provisioningconsumptionevent_repository_url_and_more.py b/breathecode/provisioning/migrations/0016_alter_provisioningconsumptionevent_repository_url_and_more.py index 0004573cb..48b23eee3 100644 --- a/breathecode/provisioning/migrations/0016_alter_provisioningconsumptionevent_repository_url_and_more.py +++ b/breathecode/provisioning/migrations/0016_alter_provisioningconsumptionevent_repository_url_and_more.py @@ -6,20 +6,20 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0015_auto_20230811_0645'), + ("provisioning", "0015_auto_20230811_0645"), ] operations = [ migrations.AlterField( - model_name='provisioningconsumptionevent', - name='repository_url', + model_name="provisioningconsumptionevent", + name="repository_url", field=models.URLField(null=True), ), migrations.AlterField( - model_name='provisioningconsumptionevent', - name='task_associated_slug', - field=models.SlugField(help_text='What assignment was the the student trying to complete with this', - max_length=100, - null=True), + model_name="provisioningconsumptionevent", + name="task_associated_slug", + field=models.SlugField( + help_text="What assignment was the the student trying to complete with this", max_length=100, null=True + ), ), ] diff --git a/breathecode/provisioning/migrations/0017_alter_provisioninguserconsumption_status.py b/breathecode/provisioning/migrations/0017_alter_provisioninguserconsumption_status.py index 7441534a4..4fd9bf46c 100644 --- a/breathecode/provisioning/migrations/0017_alter_provisioninguserconsumption_status.py +++ b/breathecode/provisioning/migrations/0017_alter_provisioninguserconsumption_status.py @@ -6,16 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('provisioning', '0016_alter_provisioningconsumptionevent_repository_url_and_more'), + ("provisioning", "0016_alter_provisioningconsumptionevent_repository_url_and_more"), ] operations = [ migrations.AlterField( - model_name='provisioninguserconsumption', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('PERSISTED', 'Persisted'), ('IGNORED', 'Ignored'), - ('WARNING', 'Warning'), ('ERROR', 'Error')], - default='PENDING', - max_length=20), + model_name="provisioninguserconsumption", + name="status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("PERSISTED", "Persisted"), + ("IGNORED", "Ignored"), + ("WARNING", "Warning"), + ("ERROR", "Error"), + ], + default="PENDING", + max_length=20, + ), ), ] diff --git a/breathecode/provisioning/models.py b/breathecode/provisioning/models.py index 084c8d9c1..844aec758 100644 --- a/breathecode/provisioning/models.py +++ b/breathecode/provisioning/models.py @@ -15,12 +15,13 @@ class ProvisioningVendor(models.Model): name = models.CharField(max_length=200) api_url = models.URLField(blank=True) - workspaces_url = models.URLField(help_text='Points to the place were you can see all your containers') + workspaces_url = models.URLField(help_text="Points to the place were you can see all your containers") invite_url = models.URLField( blank=True, null=True, default=None, - help_text='Some vendors (like Gitpod) allow to share invite link to automatically join') + help_text="Some vendors (like Gitpod) allow to share invite link to automatically join", + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -34,14 +35,16 @@ class ProvisioningProfile(models.Model): vendor = models.ForeignKey(ProvisioningVendor, on_delete=models.SET_NULL, null=True, default=None) cohorts = models.ManyToManyField( - Cohort, blank=True, help_text='If set, only these cohorts will be provisioned with this vendor in this academy') + Cohort, blank=True, help_text="If set, only these cohorts will be provisioned with this vendor in this academy" + ) members = models.ManyToManyField( ProfileAcademy, blank=True, - help_text='If set, only these members will be provisioned with this vendor in this academy') + help_text="If set, only these members will be provisioned with this vendor in this academy", + ) def __str__(self): - return self.academy.name + ' on ' + self.vendor.name + return self.academy.name + " on " + self.vendor.name # FIXME: the model name is wrong, it should be ProvisioningMachineType @@ -69,31 +72,33 @@ class ProvisioningAcademy(models.Model): credentials_token = models.CharField(max_length=200, blank=True) container_idle_timeout = models.IntegerField( - default=15, help_text='If the container is idle for X amount of minutes, it will be shut down') + default=15, help_text="If the container is idle for X amount of minutes, it will be shut down" + ) max_active_containers = models.IntegerField( - default=2, help_text='If you already have X active containers you wont be able to create new ones. ') + default=2, help_text="If you already have X active containers you wont be able to create new ones. " + ) allowed_machine_types = models.ManyToManyField(ProvisioningMachineTypes, blank=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return str(self.academy) + ' on ' + str(self.vendor) + return str(self.academy) + " on " + str(self.vendor) -DUE = 'DUE' -DISPUTED = 'DISPUTED' -PAID = 'PAID' -IGNORED = 'IGNORED' -PENDING = 'PENDING' -ERROR = 'ERROR' +DUE = "DUE" +DISPUTED = "DISPUTED" +PAID = "PAID" +IGNORED = "IGNORED" +PENDING = "PENDING" +ERROR = "ERROR" BILL_STATUS = ( - (DUE, 'Due'), - (DISPUTED, 'Disputed'), - (IGNORED, 'Ignored'), - (PENDING, 'Pending'), - (PAID, 'Paid'), - (ERROR, 'Error'), + (DUE, "Due"), + (DISPUTED, "Disputed"), + (IGNORED, "Ignored"), + (PENDING, "Pending"), + (PAID, "Paid"), + (ERROR, "Error"), ) @@ -102,13 +107,13 @@ class ProvisioningBill(models.Model): total_amount = models.FloatField(default=0) fee = models.FloatField(default=0) hash = models.CharField(max_length=64, blank=True, null=True, default=None, db_index=True) - currency_code = models.CharField(max_length=3, default='USD') + currency_code = models.CharField(max_length=3, default="USD") academy = models.ForeignKey(Academy, on_delete=models.CASCADE, db_index=True) status = models.CharField(max_length=20, choices=BILL_STATUS, default=DUE, db_index=True) paid_at = models.DateTimeField(null=True, default=None, blank=True, db_index=True) archived_at = models.DateTimeField(null=True, default=None, blank=True) status_details = models.TextField(default=None, null=True, blank=True) - stripe_id = models.CharField(max_length=32, null=True, default=None, blank=True, help_text='Stripe id') + stripe_id = models.CharField(max_length=32, null=True, default=None, blank=True, help_text="Stripe id") stripe_url = models.URLField(default=None, null=True, blank=True) started_at = models.DateTimeField(null=True, default=None, blank=True) ended_at = models.DateTimeField(null=True, default=None, blank=True) @@ -117,7 +122,8 @@ class ProvisioningBill(models.Model): blank=True, null=True, default=None, - help_text='This title should describe what the Bill is about. I.e.: April\'s bill. (MAX 64 chars)') + help_text="This title should describe what the Bill is about. I.e.: April's bill. (MAX 64 chars)", + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -133,18 +139,18 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) def __str__(self): - return str(self.id) + ' ' + str(self.academy) + ' - ' + str(self.total_amount) + str(self.currency_code) + return str(self.id) + " " + str(self.academy) + " - " + str(self.total_amount) + str(self.currency_code) -PENDING = 'PENDING' -PERSISTED = 'PERSISTED' -WARNING = 'WARNING' +PENDING = "PENDING" +PERSISTED = "PERSISTED" +WARNING = "WARNING" ACTIVITY_STATUS = ( - (PENDING, 'Pending'), - (PERSISTED, 'Persisted'), - (IGNORED, 'Ignored'), - (WARNING, 'Warning'), - (ERROR, 'Error'), + (PENDING, "Pending"), + (PERSISTED, "Persisted"), + (IGNORED, "Ignored"), + (WARNING, "Warning"), + (ERROR, "Error"), ) @@ -153,20 +159,19 @@ class ProvisioningConsumptionKind(models.Model): sku = models.CharField(max_length=100) def __str__(self): - return self.product_name + ' - ' + self.sku + return self.product_name + " - " + self.sku class ProvisioningPrice(models.Model): currency = models.ForeignKey(Currency, on_delete=models.CASCADE) unit_type = models.CharField(max_length=100) - price_per_unit = models.FloatField(help_text='Price paid to the provisioning vendor, E.g: Github') - multiplier = models.FloatField(blank=True, - null=False, - default=1, - help_text='To increase price in a certain percentage') + price_per_unit = models.FloatField(help_text="Price paid to the provisioning vendor, E.g: Github") + multiplier = models.FloatField( + blank=True, null=False, default=1, help_text="To increase price in a certain percentage" + ) def __str__(self): - return self.currency.code + ' - ' + self.unit_type + ' - ' + str(self.price_per_unit) + return self.currency.code + " - " + self.unit_type + " - " + str(self.price_per_unit) def get_price(self, how_many): return self.price_per_unit * self.multiplier * how_many @@ -174,7 +179,8 @@ def get_price(self, how_many): class ProvisioningConsumptionEvent(models.Model): registered_at = models.DateTimeField( - help_text='When the activity happened, this field comes form the provisioning vendor') + help_text="When the activity happened, this field comes form the provisioning vendor" + ) external_pk = models.CharField(max_length=100, blank=True, null=True, default=None) csv_row = models.IntegerField() @@ -188,15 +194,17 @@ class ProvisioningConsumptionEvent(models.Model): max_length=100, null=True, blank=False, - help_text='What assignment was the the student trying to complete with this') + help_text="What assignment was the the student trying to complete with this", + ) def __str__(self): - return str(self.quantity) + ' - ' + self.task_associated_slug + return str(self.quantity) + " - " + self.task_associated_slug class ProvisioningUserConsumption(models.Model): - username = models.CharField(max_length=80, - help_text='Native username in the provisioning platform, E.g: github username') + username = models.CharField( + max_length=80, help_text="Native username in the provisioning platform, E.g: github username" + ) hash = models.CharField(max_length=64, blank=True, null=True, default=None) kind = models.ForeignKey(ProvisioningConsumptionKind, on_delete=models.CASCADE) @@ -213,14 +221,14 @@ class ProvisioningUserConsumption(models.Model): updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self) -> str: - return str(self.username) + ' - ' + self.kind.product_name + ' - ' + str(self.kind.sku) + return str(self.username) + " - " + self.kind.product_name + " - " + str(self.kind.sku) class ProvisioningContainer(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) web_url = models.URLField() - status = models.CharField(max_length=50, help_text='We have no control over this. Reported by the vendor') + status = models.CharField(max_length=50, help_text="We have no control over this. Reported by the vendor") display_name = models.CharField(max_length=50) last_used_at = models.DateTimeField(null=True, default=None, blank=True) provisioned_at = models.DateTimeField(null=True, default=None, blank=True) @@ -230,7 +238,8 @@ class ProvisioningContainer(models.Model): branch_name = models.CharField(max_length=100, null=True, blank=True, default=None) task_associated_slug = models.SlugField( - max_length=100, help_text='What assignment was the the student trying to complete with this') + max_length=100, help_text="What assignment was the the student trying to complete with this" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) diff --git a/breathecode/provisioning/receivers.py b/breathecode/provisioning/receivers.py index 9e84367c9..7e702854d 100644 --- a/breathecode/provisioning/receivers.py +++ b/breathecode/provisioning/receivers.py @@ -11,20 +11,21 @@ @receiver(monitoring_signals.stripe_webhook, sender=StripeEvent) def bill_was_paid(sender: Type[StripeEvent], instance: StripeEvent, **kwargs): - if instance.type == 'checkout.session.completed': + if instance.type == "checkout.session.completed": try: - if instance.data['payment_link']: - ProvisioningBill.objects.filter(stripe_id=instance.data['payment_link']).update( - status='PAID', paid_at=instance.created_at) + if instance.data["payment_link"]: + ProvisioningBill.objects.filter(stripe_id=instance.data["payment_link"]).update( + status="PAID", paid_at=instance.created_at + ) except Exception: - instance.status_texts['provisioning.bill_was_paid'] = 'Invalid context' - instance.status = 'ERROR' + instance.status_texts["provisioning.bill_was_paid"] = "Invalid context" + instance.status = "ERROR" instance.save() return - if 'provisioning.bill_was_paid' in instance.status_texts: - instance.status_texts.pop('provisioning.bill_was_paid') + if "provisioning.bill_was_paid" in instance.status_texts: + instance.status_texts.pop("provisioning.bill_was_paid") - instance.status = 'DONE' if len(instance.status_texts) == 0 else 'ERROR' + instance.status = "DONE" if len(instance.status_texts) == 0 else "ERROR" instance.save() diff --git a/breathecode/provisioning/serializers.py b/breathecode/provisioning/serializers.py index 879ecf099..656408ae1 100644 --- a/breathecode/provisioning/serializers.py +++ b/breathecode/provisioning/serializers.py @@ -11,6 +11,7 @@ class AcademySerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() name = serpy.Field() @@ -18,6 +19,7 @@ class AcademySerializer(serpy.Serializer): class ContainerMeSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. web_url = serpy.Field() status = serpy.Field() @@ -30,6 +32,7 @@ class ContainerMeSmallSerializer(serpy.Serializer): class ContainerMeBigSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. web_url = serpy.Field() status = serpy.Field() @@ -95,25 +98,31 @@ class ProvisioningContainerSerializer(serializers.ModelSerializer): class Meta: model = ProvisioningContainer - include = ('task_associated_slug', 'has_uncommitted_changes', 'branch_name', 'destination_status', - 'destination_status_text') + include = ( + "task_associated_slug", + "has_uncommitted_changes", + "branch_name", + "destination_status", + "destination_status_text", + ) def validate(self, data): - if 'slug' in data and data['slug'] is not None: + if "slug" in data and data["slug"] is not None: - if not re.match(r'^[-\w]+$', data['slug']): + if not re.match(r"^[-\w]+$", data["slug"]): raise ValidationException( f'Invalid link slug {data["slug"]}, should only contain letters, numbers and slash "-"', - slug='invalid-slug-format') + slug="invalid-slug-format", + ) - #NOTE: this have the propertly academy but it's not defined here + # NOTE: this have the propertly academy but it's not defined here return data def create(self, validated_data): from breathecode.marketing.models import ShortLink - return ShortLink.objects.create(**validated_data, author=self.context.get('request').user) + return ShortLink.objects.create(**validated_data, author=self.context.get("request").user) class ProvisioningConsumptionKindHTMLSerializer(serpy.Serializer): @@ -164,29 +173,32 @@ class ProvisioningBillSerializer(serializers.ModelSerializer): class Meta: model = ProvisioningBill - fields = ('status', ) + fields = ("status",) def validate(self, data): - if self.instance and 'status' in data and self.instance.status in ['PAID', 'ERROR']: - status = data['status'].lower() - raise ValidationException(translation( - self.context['lang'], - en=f'You cannot change the status of this bill due to it is marked as {status}', - es='No puedes cambiar el estado de esta factura debido a que esta marcada ' - f'como {status}', - slug='readonly-bill-status'), - code=400) - - if self.instance and 'status' in data and data['status'] in ['PAID', 'ERROR']: - status = data['status'].lower() - raise ValidationException(translation( - self.context['lang'], - en=f'You cannot set the status of this bill to {status} because this status is ' - 'forbidden', - es=f'No puedes cambiar el estado de esta factura a {status} porque este estado esta ' - 'prohibido', - slug='invalid-bill-status'), - code=400) + if self.instance and "status" in data and self.instance.status in ["PAID", "ERROR"]: + status = data["status"].lower() + raise ValidationException( + translation( + self.context["lang"], + en=f"You cannot change the status of this bill due to it is marked as {status}", + es="No puedes cambiar el estado de esta factura debido a que esta marcada " f"como {status}", + slug="readonly-bill-status", + ), + code=400, + ) + + if self.instance and "status" in data and data["status"] in ["PAID", "ERROR"]: + status = data["status"].lower() + raise ValidationException( + translation( + self.context["lang"], + en=f"You cannot set the status of this bill to {status} because this status is " "forbidden", + es=f"No puedes cambiar el estado de esta factura a {status} porque este estado esta " "prohibido", + slug="invalid-bill-status", + ), + code=400, + ) return data diff --git a/breathecode/provisioning/signals.py b/breathecode/provisioning/signals.py index e98c366ed..bcf22b6b9 100644 --- a/breathecode/provisioning/signals.py +++ b/breathecode/provisioning/signals.py @@ -2,6 +2,6 @@ from task_manager.django.dispatch import Emisor -emisor = Emisor('breathecode.provisioning') +emisor = Emisor("breathecode.provisioning") -process_stripe_event = emisor.signal('process_stripe_event') +process_stripe_event = emisor.signal("process_stripe_event") diff --git a/breathecode/provisioning/tasks.py b/breathecode/provisioning/tasks.py index 25c4d0d0c..90b15ff06 100644 --- a/breathecode/provisioning/tasks.py +++ b/breathecode/provisioning/tasks.py @@ -23,16 +23,26 @@ def get_provisioning_credit_price(): - return float(os.getenv('PROVISIONING_CREDIT_PRICE', 10)) + return float(os.getenv("PROVISIONING_CREDIT_PRICE", 10)) def get_stripe_price_id(): - return os.getenv('STRIPE_PRICE_ID', None) + return os.getenv("STRIPE_PRICE_ID", None) MONTHS = [ - 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', - 'December' + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", ] PANDAS_ROWS_LIMIT = 100 @@ -41,56 +51,64 @@ def get_stripe_price_id(): @task(priority=TaskPriority.BILL.value) def calculate_bill_amounts(hash: str, *, force: bool = False, **_: Any): - logger.info(f'Starting calculate_bill_amounts for hash {hash}') + logger.info(f"Starting calculate_bill_amounts for hash {hash}") bills = ProvisioningBill.objects.filter(hash=hash) if force: - bills = bills.exclude(status='PAID') + bills = bills.exclude(status="PAID") else: - bills = bills.exclude(status__in=['DISPUTED', 'IGNORED', 'PAID']) + bills = bills.exclude(status__in=["DISPUTED", "IGNORED", "PAID"]) if not bills.exists(): - raise RetryTask(f'Does not exists bills for hash {hash}') + raise RetryTask(f"Does not exists bills for hash {hash}") - if bills[0].vendor.name == 'Gitpod': - fields = ['id', 'credits', 'startTime', 'endTime', 'kind', 'userName', 'contextURL'] + if bills[0].vendor.name == "Gitpod": + fields = ["id", "credits", "startTime", "endTime", "kind", "userName", "contextURL"] - elif bills[0].vendor.name == 'Codespaces': + elif bills[0].vendor.name == "Codespaces": fields = [ - 'Username', 'Date', 'Product', 'SKU', 'Quantity', 'Unit Type', 'Price Per Unit ($)', 'Multiplier', 'Owner' + "Username", + "Date", + "Product", + "SKU", + "Quantity", + "Unit Type", + "Price Per Unit ($)", + "Multiplier", + "Owner", ] storage = Storage() - cloud_file = storage.file(os.getenv('PROVISIONING_BUCKET', None), hash) + cloud_file = storage.file(os.getenv("PROVISIONING_BUCKET", None), hash) if not cloud_file.exists(): - raise AbortTask(f'File {hash} not found') + raise AbortTask(f"File {hash} not found") csv_string_io = BytesIO() cloud_file.download(csv_string_io) csv_string_io = cut_csv(csv_string_io, first=1) csv_string_io.seek(0) - df1 = pd.read_csv(csv_string_io, sep=',', usecols=fields) + df1 = pd.read_csv(csv_string_io, sep=",", usecols=fields) csv_string_io = BytesIO() cloud_file.download(csv_string_io) csv_string_io = cut_csv(csv_string_io, last=1) csv_string_io.seek(0) - df2 = pd.read_csv(csv_string_io, sep=',', usecols=fields) + df2 = pd.read_csv(csv_string_io, sep=",", usecols=fields) - if bills[0].vendor.name == 'Gitpod': - first = df2['startTime'][0].split('-') - last = df1['startTime'][0].split('-') + if bills[0].vendor.name == "Gitpod": + first = df2["startTime"][0].split("-") + last = df1["startTime"][0].split("-") - elif bills[0].vendor.name == 'Codespaces': - first = df1['Date'][0].split('-') - last = df2['Date'][0].split('-') + elif bills[0].vendor.name == "Codespaces": + first = df1["Date"][0].split("-") + last = df2["Date"][0].split("-") - first[2] = first[2].split('T')[0] - last[2] = last[2].split('T')[0] + first[2] = first[2].split("T")[0] + last[2] = last[2].split("T")[0] month = MONTHS[int(first[1]) - 1] @@ -99,7 +117,7 @@ def calculate_bill_amounts(hash: str, *, force: bool = False, **_: Any): for bill in bills: amount = 0 - for activity in ProvisioningUserConsumption.objects.filter(bills=bill, status__in=['PERSISTED', 'WARNING']): + for activity in ProvisioningUserConsumption.objects.filter(bills=bill, status__in=["PERSISTED", "WARNING"]): consumption_amount = 0 consumption_quantity = 0 for item in activity.events.all(): @@ -112,7 +130,7 @@ def calculate_bill_amounts(hash: str, *, force: bool = False, **_: Any): amount += consumption_amount - bill.status = 'DUE' if amount else 'PAID' + bill.status = "DUE" if amount else "PAID" if amount: credit_price = get_provisioning_credit_price() @@ -129,12 +147,12 @@ def calculate_bill_amounts(hash: str, *, force: bool = False, **_: Any): bill.started_at = first bill.ended_at = last - bill.title = f'{month} {first.year}' + bill.title = f"{month} {first.year}" bill.save() def reverse_upload(hash: str, **_: Any): - logger.info(f'Canceling upload for hash {hash}') + logger.info(f"Canceling upload for hash {hash}") ProvisioningConsumptionEvent.objects.filter(provisioninguserconsumption__hash=hash).delete() ProvisioningUserConsumption.objects.filter(hash=hash).delete() @@ -143,38 +161,38 @@ def reverse_upload(hash: str, **_: Any): @task(reverse=reverse_upload, priority=TaskPriority.BILL.value) def upload(hash: str, *, page: int = 0, force: bool = False, task_manager_id: int = 0, **_: Any): - logger.info(f'Starting upload for hash {hash}') + logger.info(f"Starting upload for hash {hash}") limit = PANDAS_ROWS_LIMIT start = page * limit end = start + limit context = { - 'provisioning_bills': {}, - 'provisioning_vendors': {}, - 'github_academy_user_logs': {}, - 'provisioning_activity_prices': {}, - 'provisioning_activity_kinds': {}, - 'provisioning_multiplier': actions.get_multiplier(), - 'currencies': {}, - 'profile_academies': {}, - 'hash': hash, - 'limit': timezone.now(), - 'logs': {}, + "provisioning_bills": {}, + "provisioning_vendors": {}, + "github_academy_user_logs": {}, + "provisioning_activity_prices": {}, + "provisioning_activity_kinds": {}, + "provisioning_multiplier": actions.get_multiplier(), + "currencies": {}, + "profile_academies": {}, + "hash": hash, + "limit": timezone.now(), + "logs": {}, } storage = Storage() - cloud_file = storage.file(os.getenv('PROVISIONING_BUCKET', None), hash) + cloud_file = storage.file(os.getenv("PROVISIONING_BUCKET", None), hash) if not cloud_file.exists(): - raise RetryTask(f'File {hash} not found') + raise RetryTask(f"File {hash} not found") - bills = ProvisioningBill.objects.filter(hash=hash).exclude(status='PENDING') + bills = ProvisioningBill.objects.filter(hash=hash).exclude(status="PENDING") if bills.exists() and not force: - raise AbortTask(f'File {hash} already processed') + raise AbortTask(f"File {hash} already processed") - pending_bills = bills.exclude(status__in=['DISPUTED', 'IGNORED', 'PAID']) + pending_bills = bills.exclude(status__in=["DISPUTED", "IGNORED", "PAID"]) if force and pending_bills.count() != bills.count(): - raise AbortTask('Cannot force upload because there are bills with status DISPUTED, IGNORED or PAID') + raise AbortTask("Cannot force upload because there are bills with status DISPUTED, IGNORED or PAID") if force: for bill in pending_bills: @@ -187,37 +205,52 @@ def upload(hash: str, *, page: int = 0, force: bool = False, task_manager_id: in csv_string_io = cut_csv(csv_string_io, start=start, end=end) csv_string_io.seek(0) - df = pd.read_csv(csv_string_io, sep=',') + df = pd.read_csv(csv_string_io, sep=",") handler = None # edit it - fields = ['id', 'credits', 'startTime', 'endTime', 'kind', 'userName', 'contextURL'] + fields = ["id", "credits", "startTime", "endTime", "kind", "userName", "contextURL"] if len(df.keys().intersection(fields)) == len(fields): handler = actions.add_gitpod_activity if not handler: - fields = ['Username', 'Date', 'Product', 'SKU', 'Quantity', 'Unit Type', 'Price Per Unit ($)', 'Multiplier'] + fields = ["Username", "Date", "Product", "SKU", "Quantity", "Unit Type", "Price Per Unit ($)", "Multiplier"] if not handler and len(df.keys().intersection(fields)) == len(fields): handler = actions.add_codespaces_activity if not handler: fields = [ - 'organization', 'consumption_period_id', 'consumption_period_start', 'consumption_period_end', - 'billing_status', 'total_spent_period', 'consumption_item_id', 'user_id', 'email', 'consumption_type', - 'pricing_type', 'total_spent', 'total_tokens', 'model', 'purpose_id', 'purpose_slug', 'purpose', - 'created_at', 'github_username' + "organization", + "consumption_period_id", + "consumption_period_start", + "consumption_period_end", + "billing_status", + "total_spent_period", + "consumption_item_id", + "user_id", + "email", + "consumption_type", + "pricing_type", + "total_spent", + "total_tokens", + "model", + "purpose_id", + "purpose_slug", + "purpose", + "created_at", + "github_username", ] if not handler and len(df.keys().intersection(fields)) == len(fields): handler = actions.add_rigobot_activity if not handler: - raise AbortTask(f'File {hash} has an unsupported origin or the provider had changed the file format') + raise AbortTask(f"File {hash} has an unsupported origin or the provider had changed the file format") prev_bill = ProvisioningBill.objects.filter(hash=hash).first() if prev_bill: - context['limit'] = prev_bill.created_at + context["limit"] = prev_bill.created_at try: i = 0 @@ -230,37 +263,36 @@ def upload(hash: str, *, page: int = 0, force: bool = False, task_manager_id: in break except Exception as e: - raise AbortTask(f'File {hash} cannot be processed due to: {str(e)}') + raise AbortTask(f"File {hash} cannot be processed due to: {str(e)}") - for bill in context['provisioning_bills'].values(): + for bill in context["provisioning_bills"].values(): if not ProvisioningUserConsumption.objects.filter(bills=bill).exists(): bill.delete() if len(df) == limit: upload.delay(hash, page=page + 1, task_manager_id=task_manager_id) - elif not ProvisioningUserConsumption.objects.filter(hash=hash, status='ERROR').exists(): + elif not ProvisioningUserConsumption.objects.filter(hash=hash, status="ERROR").exists(): calculate_bill_amounts.delay(hash) - elif ProvisioningUserConsumption.objects.filter(hash=hash, status='ERROR').exists(): - ProvisioningBill.objects.filter(hash=hash).update(status='ERROR') + elif ProvisioningUserConsumption.objects.filter(hash=hash, status="ERROR").exists(): + ProvisioningBill.objects.filter(hash=hash).update(status="ERROR") @task(priority=TaskPriority.BACKGROUND.value) def archive_provisioning_bill(bill_id: int, **_: Any): - logger.info(f'Starting archive_provisioning_bills for bill id {bill_id}') + logger.info(f"Starting archive_provisioning_bills for bill id {bill_id}") now = timezone.now() - bill = ProvisioningBill.objects.filter(id=bill_id, - status='PAID', - paid_at__lte=now - relativedelta(months=1), - archived_at__isnull=True).first() + bill = ProvisioningBill.objects.filter( + id=bill_id, status="PAID", paid_at__lte=now - relativedelta(months=1), archived_at__isnull=True + ).first() if not bill: - raise AbortTask(f'Bill {bill_id} not found or requirements not met') + raise AbortTask(f"Bill {bill_id} not found or requirements not met") q = ProvisioningConsumptionEvent.objects.filter(provisioninguserconsumption__hash=bill.hash) - while (pks_to_delete := q[:DELETE_LIMIT].values_list('pk', flat=True)): + while pks_to_delete := q[:DELETE_LIMIT].values_list("pk", flat=True): ProvisioningConsumptionEvent.objects.filter(pk__in=list(pks_to_delete)).delete() bill.archived_at = now diff --git a/breathecode/provisioning/tests/actions/tests_get_provisioning_vendor.py b/breathecode/provisioning/tests/actions/tests_get_provisioning_vendor.py index f0247cdf9..9ef32a2be 100644 --- a/breathecode/provisioning/tests/actions/tests_get_provisioning_vendor.py +++ b/breathecode/provisioning/tests/actions/tests_get_provisioning_vendor.py @@ -1,6 +1,7 @@ """ Test /answer """ + import pytest from random import randint from unittest.mock import MagicMock, call, patch @@ -30,11 +31,13 @@ def test__get_provisioning_vendor(self): profile_academy=1, ) model2 = self.bc.database.create(profile_academy=1) - model3 = self.bc.database.create(academy=model.profile_academy.academy, - provisioning_vendor={ - 'name': 'gitpod', - }, - provisioning_profile={'cohorts': [model.cohort.id]}) + model3 = self.bc.database.create( + academy=model.profile_academy.academy, + provisioning_vendor={ + "name": "gitpod", + }, + provisioning_profile={"cohorts": [model.cohort.id]}, + ) vendor = get_provisioning_vendor(model.user.id, model.profile_academy, model.cohort) self.assertEqual(vendor.id, model3.provisioning_vendor.id) @@ -51,20 +54,28 @@ def test__get_provisioning_vendor_two_vendors_same_member(self): cohort=1, profile_academy=1, ) - vendor1 = self.bc.database.create(provisioning_vendor={ - 'name': 'gitpod', - }, ) - vendor2 = self.bc.database.create(provisioning_vendor={ - 'name': 'github', - }, ) + vendor1 = self.bc.database.create( + provisioning_vendor={ + "name": "gitpod", + }, + ) + vendor2 = self.bc.database.create( + provisioning_vendor={ + "name": "github", + }, + ) - profile1 = self.bc.database.create(provisioning_vendor=vendor1, - academy=model.profile_academy.academy, - provisioning_profile={'members': [model.profile_academy.id]}) + profile1 = self.bc.database.create( + provisioning_vendor=vendor1, + academy=model.profile_academy.academy, + provisioning_profile={"members": [model.profile_academy.id]}, + ) - profile2 = self.bc.database.create(provisioning_vendor=vendor2, - academy=model.profile_academy.academy, - provisioning_profile={'members': [model.profile_academy.id]}) + profile2 = self.bc.database.create( + provisioning_vendor=vendor2, + academy=model.profile_academy.academy, + provisioning_profile={"members": [model.profile_academy.id]}, + ) with pytest.raises(Exception): vendor = get_provisioning_vendor(model.user.id, model.profile_academy, model.cohort) @@ -78,20 +89,28 @@ def test__get_provisioning_vendor_two_vendors_same_cohort(self): cohort=1, profile_academy=1, ) - vendor1 = self.bc.database.create(provisioning_vendor={ - 'name': 'gitpod', - }, ) - vendor2 = self.bc.database.create(provisioning_vendor={ - 'name': 'github', - }, ) + vendor1 = self.bc.database.create( + provisioning_vendor={ + "name": "gitpod", + }, + ) + vendor2 = self.bc.database.create( + provisioning_vendor={ + "name": "github", + }, + ) - profile1 = self.bc.database.create(provisioning_vendor=vendor1, - academy=model.profile_academy.academy, - provisioning_profile={'cohorts': [model.cohort.id]}) + profile1 = self.bc.database.create( + provisioning_vendor=vendor1, + academy=model.profile_academy.academy, + provisioning_profile={"cohorts": [model.cohort.id]}, + ) - profile2 = self.bc.database.create(provisioning_vendor=vendor2, - academy=model.profile_academy.academy, - provisioning_profile={'cohorts': [model.cohort.id]}) + profile2 = self.bc.database.create( + provisioning_vendor=vendor2, + academy=model.profile_academy.academy, + provisioning_profile={"cohorts": [model.cohort.id]}, + ) with pytest.raises(Exception): vendor = get_provisioning_vendor(model.user.id, model.profile_academy, model.cohort) @@ -105,38 +124,38 @@ def test__get_provisioning_vendor_member_has_priority(self): vendor1 = self.bc.database.create( provisioning_vendor=1, provisioning_vendor_kwargs={ - 'name': 'gitpod', + "name": "gitpod", }, ) vendor2 = self.bc.database.create( provisioning_vendor=1, provisioning_vendor_kwargs={ - 'name': 'github', + "name": "github", + }, + ) + + profile1 = self.bc.database.create( + academy=model.profile_academy.academy, + provisioning_profile=1, + provisioning_profile_kwargs={ + "vendor": vendor1.provisioning_vendor, + "members": None, + "cohorts": None, + "academy": model.profile_academy.academy, }, ) - profile1 = self.bc.database.create(academy=model.profile_academy.academy, - provisioning_profile=1, - provisioning_profile_kwargs={ - 'vendor': vendor1.provisioning_vendor, - 'members': None, - 'cohorts': None, - 'academy': model.profile_academy.academy - }) - - profile2 = self.bc.database.create(academy=model.profile_academy.academy, - provisioning_profile=1, - provisioning_profile_kwargs={ - 'vendor': vendor2.provisioning_vendor, - 'members': [model.profile_academy.id] - }) - - profile3 = self.bc.database.create(academy=model.profile_academy.academy, - provisioning_profile=1, - provisioning_profile_kwargs={ - 'vendor': vendor1.provisioning_vendor, - 'cohorts': [model.cohort.id] - }) + profile2 = self.bc.database.create( + academy=model.profile_academy.academy, + provisioning_profile=1, + provisioning_profile_kwargs={"vendor": vendor2.provisioning_vendor, "members": [model.profile_academy.id]}, + ) + + profile3 = self.bc.database.create( + academy=model.profile_academy.academy, + provisioning_profile=1, + provisioning_profile_kwargs={"vendor": vendor1.provisioning_vendor, "cohorts": [model.cohort.id]}, + ) vendor = get_provisioning_vendor(model.user.id, model.profile_academy, model.cohort) self.assertEqual(vendor.name, vendor2.provisioning_vendor.name) @@ -150,31 +169,32 @@ def test__get_provisioning_vendor_cohort_has_second_priority(self): vendor1 = self.bc.database.create( provisioning_vendor=1, provisioning_vendor_kwargs={ - 'name': 'gitpod', + "name": "gitpod", }, ) vendor2 = self.bc.database.create( provisioning_vendor=1, provisioning_vendor_kwargs={ - 'name': 'github', + "name": "github", }, ) - profile1 = self.bc.database.create(academy=model.profile_academy.academy, - provisioning_profile=1, - provisioning_profile_kwargs={ - 'vendor': vendor1.provisioning_vendor, - 'members': None, - 'cohorts': None, - 'academy': model.profile_academy.academy - }) - - profile2 = self.bc.database.create(academy=model.profile_academy.academy, - provisioning_profile=1, - provisioning_profile_kwargs={ - 'vendor': vendor2.provisioning_vendor, - 'cohorts': [model.cohort.id] - }) + profile1 = self.bc.database.create( + academy=model.profile_academy.academy, + provisioning_profile=1, + provisioning_profile_kwargs={ + "vendor": vendor1.provisioning_vendor, + "members": None, + "cohorts": None, + "academy": model.profile_academy.academy, + }, + ) + + profile2 = self.bc.database.create( + academy=model.profile_academy.academy, + provisioning_profile=1, + provisioning_profile_kwargs={"vendor": vendor2.provisioning_vendor, "cohorts": [model.cohort.id]}, + ) vendor = get_provisioning_vendor(model.user.id, model.profile_academy, model.cohort) self.assertEqual(vendor.name, vendor2.provisioning_vendor.name) diff --git a/breathecode/provisioning/tests/management/commands/tests_archive_provisioning_bills.py b/breathecode/provisioning/tests/management/commands/tests_archive_provisioning_bills.py index 920e6e87e..d82b06a91 100644 --- a/breathecode/provisioning/tests/management/commands/tests_archive_provisioning_bills.py +++ b/breathecode/provisioning/tests/management/commands/tests_archive_provisioning_bills.py @@ -18,76 +18,92 @@ class AcademyCohortTestSuite(ProvisioningTestCase): # When: No bills # Then: doesn't do anything - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) - @patch('breathecode.provisioning.tasks.archive_provisioning_bill.delay', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) + @patch("breathecode.provisioning.tasks.archive_provisioning_bill.delay", MagicMock()) def test_0_ibills(self): command = Command() result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('No provisioning bills to clean'), - ]) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("No provisioning bills to clean"), + ], + ) self.bc.check.calls(archive_provisioning_bill.delay.call_args_list, []) # Given: 1 ProvisioningBill # When: it's paid, not archived but paid_at is less than 1 month ago # Then: doesn't do anything - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.provisioning.tasks.archive_provisioning_bill.delay', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.provisioning.tasks.archive_provisioning_bill.delay", MagicMock()) def test__bill__requirements_not_meet(self): - provisioning_bills = [{ - 'status': - 'PAID', - 'paid_at': - self.bc.datetime.now() - relativedelta(months=1) + relativedelta(days=random.randint(1, 28)), - 'archived_at': - None, - } for _ in range(2)] + provisioning_bills = [ + { + "status": "PAID", + "paid_at": self.bc.datetime.now() - relativedelta(months=1) + relativedelta(days=random.randint(1, 28)), + "archived_at": None, + } + for _ in range(2) + ] model = self.bc.database.create(provisioning_bill=provisioning_bills) command = Command() result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), - self.bc.format.to_dict(model.provisioning_bill)) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), self.bc.format.to_dict(model.provisioning_bill) + ) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('No provisioning bills to clean'), - ]) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("No provisioning bills to clean"), + ], + ) self.bc.check.calls(archive_provisioning_bill.delay.call_args_list, []) # Given: 1 ProvisioningBill # When: it's paid, not archived and paid_at is more than 1 month ago # Then: archive it - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.provisioning.tasks.archive_provisioning_bill.delay', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.provisioning.tasks.archive_provisioning_bill.delay", MagicMock()) def test_1_bill__requirements_meet(self): - provisioning_bills = [{ - 'status': 'PAID', - 'paid_at': self.bc.datetime.now() - relativedelta(months=1, days=1), - 'archived_at': None, - } for _ in range(2)] + provisioning_bills = [ + { + "status": "PAID", + "paid_at": self.bc.datetime.now() - relativedelta(months=1, days=1), + "archived_at": None, + } + for _ in range(2) + ] model = self.bc.database.create(provisioning_bill=provisioning_bills) command = Command() result = command.handle() self.assertEqual(result, None) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill[0]), - }, - { - **self.bc.format.to_dict(model.provisioning_bill[1]), - }, - ]) - self.assertEqual(OutputWrapper.write.call_args_list, [ - call('Cleaning 1, 2 provisioning bills'), - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill[0]), + }, + { + **self.bc.format.to_dict(model.provisioning_bill[1]), + }, + ], + ) + self.assertEqual( + OutputWrapper.write.call_args_list, + [ + call("Cleaning 1, 2 provisioning bills"), + ], + ) self.bc.check.calls(archive_provisioning_bill.delay.call_args_list, [call(1), call(2)]) diff --git a/breathecode/provisioning/tests/mixins/__init__.py b/breathecode/provisioning/tests/mixins/__init__.py index 1fff7dfd8..12e0e8d44 100644 --- a/breathecode/provisioning/tests/mixins/__init__.py +++ b/breathecode/provisioning/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Media mixins """ + from .provisioning_test_case import ProvisioningTestCase # noqa: F401 diff --git a/breathecode/provisioning/tests/mixins/provisioning_test_case.py b/breathecode/provisioning/tests/mixins/provisioning_test_case.py index 847b0a242..d0de0e562 100644 --- a/breathecode/provisioning/tests/mixins/provisioning_test_case.py +++ b/breathecode/provisioning/tests/mixins/provisioning_test_case.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase from breathecode.tests.mixins import GenerateModelsMixin, CacheMixin, BreathecodeMixin diff --git a/breathecode/provisioning/tests/signals/tests_process_stripe_event.py b/breathecode/provisioning/tests/signals/tests_process_stripe_event.py index 390c51532..354a6c1ec 100644 --- a/breathecode/provisioning/tests/signals/tests_process_stripe_event.py +++ b/breathecode/provisioning/tests/signals/tests_process_stripe_event.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import math import random from django.utils import timezone @@ -12,7 +13,7 @@ from breathecode.tests.mixins.legacy import LegacyAPITestCase UTC_NOW = timezone.now() -STRIPE_ID = f'price_{random.randint(1000, 9999)}' +STRIPE_ID = f"price_{random.randint(1000, 9999)}" def apply_get_env(configuration={}): @@ -28,10 +29,10 @@ class TestMakeBills(LegacyAPITestCase): # When: with no bills and event type isn't checkout.session.completed # Then: nothing happens - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_nothing(self, enable_signals): enable_signals() @@ -39,101 +40,113 @@ def test_nothing(self, enable_signals): db = self.bc.format.to_dict(model.stripe_event) monitoring_signals.stripe_webhook.send(instance=model.stripe_event, sender=model.stripe_event.__class__) - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), [ - { - **db, - 'status_texts': {}, - }, - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) + self.assertEqual( + self.bc.database.list_of("monitoring.StripeEvent"), + [ + { + **db, + "status_texts": {}, + }, + ], + ) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) # Given: 1 StripeEvent # When: with no bills and event type is checkout.session.completed, bad context # Then: nothing happens - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_bad_context(self, enable_signals): enable_signals() - stripe_event = {'type': 'checkout.session.completed'} + stripe_event = {"type": "checkout.session.completed"} model = self.bc.database.create(stripe_event=stripe_event) db = self.bc.format.to_dict(model.stripe_event) monitoring_signals.stripe_webhook.send(instance=model.stripe_event, sender=model.stripe_event.__class__) - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), [ - { - **db, - 'status': 'ERROR', - 'status_texts': { - 'provisioning.bill_was_paid': 'Invalid context', + self.assertEqual( + self.bc.database.list_of("monitoring.StripeEvent"), + [ + { + **db, + "status": "ERROR", + "status_texts": { + "provisioning.bill_was_paid": "Invalid context", + }, }, - }, - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) + ], + ) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) # Given: 1 StripeEvent # When: with no bills and event type is checkout.session.completed # Then: nothing happens - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_no_bills(self, enable_signals): enable_signals() stripe_event = { - 'type': 'checkout.session.completed', - 'data': { - 'payment_link': STRIPE_ID, + "type": "checkout.session.completed", + "data": { + "payment_link": STRIPE_ID, }, } model = self.bc.database.create(stripe_event=stripe_event) db = self.bc.format.to_dict(model.stripe_event) monitoring_signals.stripe_webhook.send(instance=model.stripe_event, sender=model.stripe_event.__class__) - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), [ - { - **db, - 'status_texts': {}, - 'status': 'DONE', - }, - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) + self.assertEqual( + self.bc.database.list_of("monitoring.StripeEvent"), + [ + { + **db, + "status_texts": {}, + "status": "DONE", + }, + ], + ) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) # Given: 1 StripeEvent, 2 ProvisioningBills # When: with bills and event type is checkout.session.completed # Then: nothing happens - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_not_related_bills(self, enable_signals): enable_signals() stripe_event = { - 'type': 'checkout.session.completed', - 'data': { - 'payment_link': STRIPE_ID, + "type": "checkout.session.completed", + "data": { + "payment_link": STRIPE_ID, }, } model = self.bc.database.create(stripe_event=stripe_event, provisioning_bill=2) db = self.bc.format.to_dict(model.stripe_event) monitoring_signals.stripe_webhook.send(instance=model.stripe_event, sender=model.stripe_event.__class__) - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), [ - { - **db, - 'status_texts': {}, - 'status': 'DONE', - }, - ]) self.assertEqual( - self.bc.database.list_of('provisioning.ProvisioningBill'), + self.bc.database.list_of("monitoring.StripeEvent"), + [ + { + **db, + "status_texts": {}, + "status": "DONE", + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), self.bc.format.to_dict(model.provisioning_bill), ) @@ -141,35 +154,41 @@ def test_not_related_bills(self, enable_signals): # When: with bills and event type is checkout.session.completed # Then: nothing happens - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_a_related_bill(self, enable_signals): enable_signals() stripe_event = { - 'type': 'checkout.session.completed', - 'data': { - 'payment_link': STRIPE_ID, + "type": "checkout.session.completed", + "data": { + "payment_link": STRIPE_ID, }, } - provisioning_bill = {'stripe_id': STRIPE_ID} + provisioning_bill = {"stripe_id": STRIPE_ID} model = self.bc.database.create(stripe_event=stripe_event, provisioning_bill=provisioning_bill) db = self.bc.format.to_dict(model.stripe_event) monitoring_signals.stripe_webhook.send(instance=model.stripe_event, sender=model.stripe_event.__class__) - self.assertEqual(self.bc.database.list_of('monitoring.StripeEvent'), [ - { - **db, - 'status': 'DONE', - 'status_texts': {}, - }, - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - 'status': 'PAID', - 'paid_at': model.stripe_event.created_at, - }, - ]) + self.assertEqual( + self.bc.database.list_of("monitoring.StripeEvent"), + [ + { + **db, + "status": "DONE", + "status_texts": {}, + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + "status": "PAID", + "paid_at": model.stripe_event.created_at, + }, + ], + ) diff --git a/breathecode/provisioning/tests/tasks/tests_archive_provisioning_bill.py b/breathecode/provisioning/tests/tasks/tests_archive_provisioning_bill.py index 8bbd42233..031c1adff 100644 --- a/breathecode/provisioning/tests/tasks/tests_archive_provisioning_bill.py +++ b/breathecode/provisioning/tests/tasks/tests_archive_provisioning_bill.py @@ -19,82 +19,100 @@ class AcademyCohortTestSuite(ProvisioningTestCase): # When: No invites # Then: Shouldn't do anything - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.error", MagicMock()) def test_0_bills(self): archive_provisioning_bill.delay(1) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.bc.check.calls(Logger.error.call_args_list, [ - call('Bill 1 not found or requirements not met', exc_info=True), - ]) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.bc.check.calls( + Logger.error.call_args_list, + [ + call("Bill 1 not found or requirements not met", exc_info=True), + ], + ) # Given: 2 UserInvite, 1 Academy # When: email is not validated and academy is not available as saas # Then: validate all emails - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.error', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.error", MagicMock()) def test_2_bills__requirements_not_meet(self): hash = self.bc.fake.slug() provisioning_bill = { - 'status': 'PAID', - 'paid_at': self.bc.datetime.now() - relativedelta(months=1) + relativedelta(days=random.randint(1, 28)), - 'archived_at': None, - 'hash': hash, + "status": "PAID", + "paid_at": self.bc.datetime.now() - relativedelta(months=1) + relativedelta(days=random.randint(1, 28)), + "archived_at": None, + "hash": hash, } provisioning_user_consumption = { - 'hash': hash, + "hash": hash, } - model = self.bc.database.create(provisioning_bill=provisioning_bill, - provisioning_user_consumption=provisioning_user_consumption, - provisioning_consumption_event=10) + model = self.bc.database.create( + provisioning_bill=provisioning_bill, + provisioning_user_consumption=provisioning_user_consumption, + provisioning_consumption_event=10, + ) # with self.assertRaisesMessage(AbortTask, 'Bill 1 not found or requirements not met'): archive_provisioning_bill.delay(1) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - self.bc.format.to_dict(model.provisioning_bill), - ]) - - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), - self.bc.format.to_dict(model.provisioning_consumption_event)) - - self.bc.check.calls(Logger.error.call_args_list, [ - call('Bill 1 not found or requirements not met', exc_info=True), - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + self.bc.format.to_dict(model.provisioning_bill), + ], + ) + + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + self.bc.format.to_dict(model.provisioning_consumption_event), + ) + + self.bc.check.calls( + Logger.error.call_args_list, + [ + call("Bill 1 not found or requirements not met", exc_info=True), + ], + ) # Given: 2 UserInvite, 1 Academy, 1 Cohort # When: email is not validated and cohort from an academy is not available as saas # Then: validate all emails - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.error', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.error", MagicMock()) def test_2_bills__requirements_meet(self): hash = self.bc.fake.slug() provisioning_bill = { - 'status': 'PAID', - 'paid_at': self.bc.datetime.now() - relativedelta(months=1, days=1), - 'archived_at': None, - 'hash': hash, + "status": "PAID", + "paid_at": self.bc.datetime.now() - relativedelta(months=1, days=1), + "archived_at": None, + "hash": hash, } provisioning_user_consumption = { - 'hash': hash, + "hash": hash, } - model = self.bc.database.create(provisioning_bill=provisioning_bill, - provisioning_user_consumption=provisioning_user_consumption, - provisioning_consumption_event=10) + model = self.bc.database.create( + provisioning_bill=provisioning_bill, + provisioning_user_consumption=provisioning_user_consumption, + provisioning_consumption_event=10, + ) archive_provisioning_bill.delay(1) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - 'archived_at': UTC_NOW, - }, - ]) - - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), []) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + "archived_at": UTC_NOW, + }, + ], + ) + + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), []) self.bc.check.calls(Logger.error.call_args_list, []) diff --git a/breathecode/provisioning/tests/tasks/tests_calculate_bill_amounts.py b/breathecode/provisioning/tests/tasks/tests_calculate_bill_amounts.py index 77d27aa6c..99de53421 100644 --- a/breathecode/provisioning/tests/tasks/tests_calculate_bill_amounts.py +++ b/breathecode/provisioning/tests/tasks/tests_calculate_bill_amounts.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import logging import math import os @@ -20,13 +21,23 @@ from ..mixins import ProvisioningTestCase UTC_NOW = timezone.now() -STRIPE_PRICE_ID = f'price_{random.randint(1000, 9999)}' +STRIPE_PRICE_ID = f"price_{random.randint(1000, 9999)}" CREDIT_PRICE = random.randint(1, 20) -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) MONTHS = [ - 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', - 'December' + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", ] fake = Faker() @@ -34,7 +45,7 @@ def datetime_to_iso(date) -> str: - return re.sub(r'\+00:00$', 'Z', date.replace(tzinfo=UTC).isoformat()) + return re.sub(r"\+00:00$", "Z", date.replace(tzinfo=UTC).isoformat()) def apply_get_env(configuration={}): @@ -56,7 +67,7 @@ def csv_file_mock_inner(file): def datetime_to_show_date(date) -> str: - return date.strftime('%Y-%m-%d') + return date.strftime("%Y-%m-%d") def codespaces_csv(lines=1, data={}): @@ -73,16 +84,16 @@ def codespaces_csv(lines=1, data={}): # dictionary of lists return { - 'Repository Slug': repository_slugs, - 'Username': usernames, - 'Date': dates, - 'Product': products, - 'SKU': skus, - 'Quantity': quantities, - 'Unit Type': unit_types, - 'Price Per Unit ($)': price_per_units, - 'Multiplier': multipliers, - 'Owner': owners, + "Repository Slug": repository_slugs, + "Username": usernames, + "Date": dates, + "Product": products, + "SKU": skus, + "Quantity": quantities, + "Unit Type": unit_types, + "Price Per Unit ($)": price_per_units, + "Multiplier": multipliers, + "Owner": owners, **data, } @@ -93,17 +104,17 @@ def gitpod_csv(lines=1, data={}): effective_times = [datetime_to_iso(UTC_NOW - timedelta(days=n)) for n in range(lines)] kinds = [fake.slug() for _ in range(lines)] usernames = [fake.slug() for _ in range(lines)] - contextURLs = [f'https://github.com/{username}/{fake.slug()}/tree/{fake.slug()}/' for username in usernames] + contextURLs = [f"https://github.com/{username}/{fake.slug()}/tree/{fake.slug()}/" for username in usernames] # dictionary of lists return { - 'id': ids, - 'credits': credit_cents, - 'startTime': effective_times, - 'endTime': effective_times, - 'kind': kinds, - 'userName': usernames, - 'contextURL': contextURLs, + "id": ids, + "credits": credit_cents, + "startTime": effective_times, + "endTime": effective_times, + "kind": kinds, + "userName": usernames, + "contextURL": contextURLs, **data, } @@ -111,41 +122,45 @@ def gitpod_csv(lines=1, data={}): class MakeBillsTestSuite(ProvisioningTestCase): # When: with no bills # Then: nothing happens - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_no_bills(self): slug = self.bc.fake.slug() calculate_bill_amounts(slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) self.bc.check.calls( logging.Logger.info.call_args_list, [ - call(f'Starting calculate_bill_amounts for hash {slug}'), + call(f"Starting calculate_bill_amounts for hash {slug}"), # retried - call(f'Starting calculate_bill_amounts for hash {slug}'), - ]) - self.bc.check.calls(logging.Logger.error.call_args_list, [ - call(f'Does not exists bills for hash {slug}', exc_info=True), - ]) + call(f"Starting calculate_bill_amounts for hash {slug}"), + ], + ) + self.bc.check.calls( + logging.Logger.error.call_args_list, + [ + call(f"Does not exists bills for hash {slug}", exc_info=True), + ], + ) # Given 1 ProvisioningBill # When: hash does not match # Then: nothing happens - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_bill_but_hash_does_not_match(self): slug = self.bc.fake.slug() - provisioning_bill = {'hash': slug, 'total_amount': 0.0} - model = self.bc.database.create(provisioning_bill=provisioning_bill, provisioning_vendor={'name': 'Gitpod'}) + provisioning_bill = {"hash": slug, "total_amount": 0.0} + model = self.bc.database.create(provisioning_bill=provisioning_bill, provisioning_vendor={"name": "Gitpod"}) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] @@ -154,293 +169,392 @@ def test_bill_but_hash_does_not_match(self): calculate_bill_amounts(bad_slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - self.bc.format.to_dict(model.provisioning_bill), - ]) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + self.bc.format.to_dict(model.provisioning_bill), + ], + ) self.bc.check.calls( logging.Logger.info.call_args_list, [ - call(f'Starting calculate_bill_amounts for hash {bad_slug}'), + call(f"Starting calculate_bill_amounts for hash {bad_slug}"), # retried - call(f'Starting calculate_bill_amounts for hash {bad_slug}'), - ]) - self.bc.check.calls(logging.Logger.error.call_args_list, [ - call(f'Does not exists bills for hash {bad_slug}', exc_info=True), - ]) + call(f"Starting calculate_bill_amounts for hash {bad_slug}"), + ], + ) + self.bc.check.calls( + logging.Logger.error.call_args_list, + [ + call(f"Does not exists bills for hash {bad_slug}", exc_info=True), + ], + ) # Given 1 ProvisioningBill # When: hash match # Then: the bill keep with the amount 0 and the status changed to PAID - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_bill_exists(self): slug = self.bc.fake.slug() - provisioning_bill = {'hash': slug, 'total_amount': 0.0} + provisioning_bill = {"hash": slug, "total_amount": 0.0} csv = gitpod_csv(10) - model = self.bc.database.create(provisioning_bill=provisioning_bill, provisioning_vendor={'name': 'Gitpod'}) + model = self.bc.database.create(provisioning_bill=provisioning_bill, provisioning_vendor={"name": "Gitpod"}) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): calculate_bill_amounts(slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) started = UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=9) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - 'status': 'PAID', - 'total_amount': 0.0, - 'paid_at': UTC_NOW, - 'started_at': started, - 'ended_at': UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=0), - 'title': f'{MONTHS[started.month - 1]} {started.year}', - }, - ]) - - self.bc.check.calls(logging.Logger.info.call_args_list, - [call(f'Starting calculate_bill_amounts for hash {slug}')]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + "status": "PAID", + "total_amount": 0.0, + "paid_at": UTC_NOW, + "started_at": started, + "ended_at": UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=0), + "title": f"{MONTHS[started.month - 1]} {started.year}", + }, + ], + ) + + self.bc.check.calls( + logging.Logger.info.call_args_list, [call(f"Starting calculate_bill_amounts for hash {slug}")] + ) self.bc.check.calls(logging.Logger.error.call_args_list, []) # Given 1 ProvisioningBill, 2 ProvisioningActivity and 1 ProvisioningVendor # When: hash match and the bill is PENDING and the activities have amount of 0 # -> provisioning vendor from gitpod # Then: the bill keep with the amount 0 and the status changed to PAID - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_bill_exists_and_activities__gitpod(self): slug = self.bc.fake.slug() - provisioning_bill = {'hash': slug, 'total_amount': 0.0} + provisioning_bill = {"hash": slug, "total_amount": 0.0} csv = gitpod_csv(10) - provisioning_prices = [{ - 'price_per_unit': 0, - } for _ in range(2)] - - provisioning_consumption_events = [{ - 'quantity': 0, - 'price_id': n + 1, - } for n in range(2)] - - provisioning_user_consumptions = [{ - 'status': random.choice(['PERSISTED', 'WARNING']), - } for _ in range(2)] + provisioning_prices = [ + { + "price_per_unit": 0, + } + for _ in range(2) + ] - amount = sum([ - provisioning_prices[n]['price_per_unit'] * provisioning_consumption_events[n]['quantity'] for n in range(2) - ]) * 2 + provisioning_consumption_events = [ + { + "quantity": 0, + "price_id": n + 1, + } + for n in range(2) + ] - model = self.bc.database.create(provisioning_bill=provisioning_bill, - provisioning_price=provisioning_prices, - provisioning_vendor={'name': 'Gitpod'}, - provisioning_consumption_event=provisioning_consumption_events, - provisioning_user_consumption=provisioning_user_consumptions) + provisioning_user_consumptions = [ + { + "status": random.choice(["PERSISTED", "WARNING"]), + } + for _ in range(2) + ] + + amount = ( + sum( + [ + provisioning_prices[n]["price_per_unit"] * provisioning_consumption_events[n]["quantity"] + for n in range(2) + ] + ) + * 2 + ) + + model = self.bc.database.create( + provisioning_bill=provisioning_bill, + provisioning_price=provisioning_prices, + provisioning_vendor={"name": "Gitpod"}, + provisioning_consumption_event=provisioning_consumption_events, + provisioning_user_consumption=provisioning_user_consumptions, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): calculate_bill_amounts(slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - { - **self.bc.format.to_dict(model.provisioning_user_consumption[0]), - 'amount': amount / 2, - }, - { - **self.bc.format.to_dict(model.provisioning_user_consumption[1]), - 'amount': amount / 2, - }, - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + { + **self.bc.format.to_dict(model.provisioning_user_consumption[0]), + "amount": amount / 2, + }, + { + **self.bc.format.to_dict(model.provisioning_user_consumption[1]), + "amount": amount / 2, + }, + ], + ) started = UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=9) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - 'status': 'PAID', - 'total_amount': 0.0, - 'paid_at': UTC_NOW, - 'started_at': UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=9), - 'ended_at': UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=0), - 'title': f'{MONTHS[started.month - 1]} {started.year}', - }, - ]) - - self.bc.check.calls(logging.Logger.info.call_args_list, - [call(f'Starting calculate_bill_amounts for hash {slug}')]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + "status": "PAID", + "total_amount": 0.0, + "paid_at": UTC_NOW, + "started_at": UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=9), + "ended_at": UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=0), + "title": f"{MONTHS[started.month - 1]} {started.year}", + }, + ], + ) + + self.bc.check.calls( + logging.Logger.info.call_args_list, [call(f"Starting calculate_bill_amounts for hash {slug}")] + ) self.bc.check.calls(logging.Logger.error.call_args_list, []) # Given 1 ProvisioningBill, 2 ProvisioningActivity and 1 ProvisioningVendor # When: hash match and the bill is PENDING and the activities have amount of 0 # -> provisioning vendor from codespaces # Then: the bill keep with the amount 0 and the status changed to PAID - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_bill_exists_and_activities__codespaces(self): slug = self.bc.fake.slug() - provisioning_bill = {'hash': slug, 'total_amount': 0.0} + provisioning_bill = {"hash": slug, "total_amount": 0.0} csv = codespaces_csv(10) - provisioning_prices = [{ - 'price_per_unit': 0, - } for _ in range(2)] - - provisioning_consumption_events = [{ - 'quantity': 0, - 'price_id': n + 1, - } for n in range(2)] - - provisioning_user_consumptions = [{ - 'status': random.choice(['PERSISTED', 'WARNING']), - } for _ in range(2)] + provisioning_prices = [ + { + "price_per_unit": 0, + } + for _ in range(2) + ] - amount = sum([ - provisioning_prices[n]['price_per_unit'] * provisioning_consumption_events[n]['quantity'] for n in range(2) - ]) * 2 + provisioning_consumption_events = [ + { + "quantity": 0, + "price_id": n + 1, + } + for n in range(2) + ] - model = self.bc.database.create(provisioning_bill=provisioning_bill, - provisioning_price=provisioning_prices, - provisioning_vendor={'name': 'Codespaces'}, - provisioning_consumption_event=provisioning_consumption_events, - provisioning_user_consumption=provisioning_user_consumptions) + provisioning_user_consumptions = [ + { + "status": random.choice(["PERSISTED", "WARNING"]), + } + for _ in range(2) + ] + + amount = ( + sum( + [ + provisioning_prices[n]["price_per_unit"] * provisioning_consumption_events[n]["quantity"] + for n in range(2) + ] + ) + * 2 + ) + + model = self.bc.database.create( + provisioning_bill=provisioning_bill, + provisioning_price=provisioning_prices, + provisioning_vendor={"name": "Codespaces"}, + provisioning_consumption_event=provisioning_consumption_events, + provisioning_user_consumption=provisioning_user_consumptions, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): calculate_bill_amounts(slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - { - **self.bc.format.to_dict(model.provisioning_user_consumption[0]), - 'amount': amount / 2, - }, - { - **self.bc.format.to_dict(model.provisioning_user_consumption[1]), - 'amount': amount / 2, - }, - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + { + **self.bc.format.to_dict(model.provisioning_user_consumption[0]), + "amount": amount / 2, + }, + { + **self.bc.format.to_dict(model.provisioning_user_consumption[1]), + "amount": amount / 2, + }, + ], + ) started = UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=0) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - 'status': 'PAID', - 'total_amount': 0.0, - 'paid_at': UTC_NOW, - 'started_at': started, - 'ended_at': UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=9), - 'title': f'{MONTHS[started.month - 1]} {started.year}', - }, - ]) - - self.bc.check.calls(logging.Logger.info.call_args_list, - [call(f'Starting calculate_bill_amounts for hash {slug}')]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + "status": "PAID", + "total_amount": 0.0, + "paid_at": UTC_NOW, + "started_at": started, + "ended_at": UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=9), + "title": f"{MONTHS[started.month - 1]} {started.year}", + }, + ], + ) + + self.bc.check.calls( + logging.Logger.info.call_args_list, [call(f"Starting calculate_bill_amounts for hash {slug}")] + ) self.bc.check.calls(logging.Logger.error.call_args_list, []) # Given 1 ProvisioningBill and 2 ProvisioningActivity # When: hash match and the bill is PENDING the activities have a random amount # Then: the bill amount is override with the sum of the activities - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROVISIONING_CREDIT_PRICE': CREDIT_PRICE, - 'STRIPE_PRICE_ID': STRIPE_PRICE_ID, - }))) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "PROVISIONING_CREDIT_PRICE": CREDIT_PRICE, + "STRIPE_PRICE_ID": STRIPE_PRICE_ID, + } + ) + ), + ) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_bill_exists_and_activities_with_random_amounts__bill_amount_is_override(self): slug = self.bc.fake.slug() - provisioning_bill = {'hash': slug, 'total_amount': random.random() * 1000} + provisioning_bill = {"hash": slug, "total_amount": random.random() * 1000} csv = gitpod_csv(10) - provisioning_prices = [{ - 'price_per_unit': random.random() * 100, - } for _ in range(2)] - - provisioning_consumption_events = [{ - 'quantity': random.random() * 10, - 'price_id': n + 1, - } for n in range(2)] - - provisioning_user_consumptions = [{ - 'status': 'PERSISTED', - } for _ in range(2)] - - amount = sum([ - provisioning_prices[n]['price_per_unit'] * provisioning_consumption_events[n]['quantity'] for n in range(2) - ]) * 2 - q = sum([provisioning_consumption_events[n]['quantity'] for n in range(2)]) - model = self.bc.database.create(provisioning_bill=provisioning_bill, - provisioning_price=provisioning_prices, - provisioning_vendor={'name': 'Gitpod'}, - provisioning_consumption_event=provisioning_consumption_events, - provisioning_user_consumption=provisioning_user_consumptions) + provisioning_prices = [ + { + "price_per_unit": random.random() * 100, + } + for _ in range(2) + ] + + provisioning_consumption_events = [ + { + "quantity": random.random() * 10, + "price_id": n + 1, + } + for n in range(2) + ] + + provisioning_user_consumptions = [ + { + "status": "PERSISTED", + } + for _ in range(2) + ] + + amount = ( + sum( + [ + provisioning_prices[n]["price_per_unit"] * provisioning_consumption_events[n]["quantity"] + for n in range(2) + ] + ) + * 2 + ) + q = sum([provisioning_consumption_events[n]["quantity"] for n in range(2)]) + model = self.bc.database.create( + provisioning_bill=provisioning_bill, + provisioning_price=provisioning_prices, + provisioning_vendor={"name": "Gitpod"}, + provisioning_consumption_event=provisioning_consumption_events, + provisioning_user_consumption=provisioning_user_consumptions, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] stripe_id = self.bc.fake.slug() stripe_url = self.bc.fake.url() - with patch('breathecode.payments.services.stripe.Stripe.create_payment_link', - MagicMock(return_value=(stripe_id, stripe_url))): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch( + "breathecode.payments.services.stripe.Stripe.create_payment_link", + MagicMock(return_value=(stripe_id, stripe_url)), + ): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): calculate_bill_amounts(slug) quantity = math.ceil(amount / CREDIT_PRICE) @@ -449,167 +563,223 @@ def test_bill_exists_and_activities_with_random_amounts__bill_amount_is_override self.bc.check.calls(Stripe.create_payment_link.call_args_list, [call(STRIPE_PRICE_ID, quantity)]) fee = new_amount - amount - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - { - **self.bc.format.to_dict(model.provisioning_user_consumption[0]), - 'amount': amount / 2, - 'quantity': q, - }, - { - **self.bc.format.to_dict(model.provisioning_user_consumption[1]), - 'amount': amount / 2, - 'quantity': q, - }, - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + { + **self.bc.format.to_dict(model.provisioning_user_consumption[0]), + "amount": amount / 2, + "quantity": q, + }, + { + **self.bc.format.to_dict(model.provisioning_user_consumption[1]), + "amount": amount / 2, + "quantity": q, + }, + ], + ) started = UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=9) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - 'status': 'DUE', - 'total_amount': new_amount, - 'fee': fee, - 'paid_at': None, - 'stripe_id': stripe_id, - 'stripe_url': stripe_url, - 'started_at': started, - 'ended_at': UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=0), - 'title': f'{MONTHS[started.month - 1]} {started.year}', - }, - ]) - - self.bc.check.calls(logging.Logger.info.call_args_list, - [call(f'Starting calculate_bill_amounts for hash {slug}')]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + "status": "DUE", + "total_amount": new_amount, + "fee": fee, + "paid_at": None, + "stripe_id": stripe_id, + "stripe_url": stripe_url, + "started_at": started, + "ended_at": UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=0), + "title": f"{MONTHS[started.month - 1]} {started.year}", + }, + ], + ) + + self.bc.check.calls( + logging.Logger.info.call_args_list, [call(f"Starting calculate_bill_amounts for hash {slug}")] + ) self.bc.check.calls(logging.Logger.error.call_args_list, []) # Given 1 ProvisioningBill and 2 ProvisioningActivity # When: hash match and the bill is DISPUTED, IGNORED or PAID and the activities have a random amount # Then: don't override the bill amount - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_academy_reacted_to_bill(self): slug = self.bc.fake.slug() provisioning_bill = { - 'hash': slug, - 'total_amount': random.random() * 1000, - 'status': random.choice(['DISPUTED', 'IGNORED', 'PAID']), + "hash": slug, + "total_amount": random.random() * 1000, + "status": random.choice(["DISPUTED", "IGNORED", "PAID"]), } - provisioning_prices = [{ - 'price_per_unit': random.random() * 100, - } for _ in range(2)] - - provisioning_consumption_events = [{ - 'quantity': random.random() * 10, - 'price_id': n + 1, - } for n in range(2)] + provisioning_prices = [ + { + "price_per_unit": random.random() * 100, + } + for _ in range(2) + ] - provisioning_user_consumptions = [{ - 'status': 'PERSISTED', - } for _ in range(2)] + provisioning_consumption_events = [ + { + "quantity": random.random() * 10, + "price_id": n + 1, + } + for n in range(2) + ] - model = self.bc.database.create(provisioning_bill=provisioning_bill, - provisioning_price=provisioning_prices, - provisioning_vendor={'name': 'Gitpod'}, - provisioning_consumption_event=provisioning_consumption_events, - provisioning_user_consumption=provisioning_user_consumptions) + provisioning_user_consumptions = [ + { + "status": "PERSISTED", + } + for _ in range(2) + ] + + model = self.bc.database.create( + provisioning_bill=provisioning_bill, + provisioning_price=provisioning_prices, + provisioning_vendor={"name": "Gitpod"}, + provisioning_consumption_event=provisioning_consumption_events, + provisioning_user_consumption=provisioning_user_consumptions, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] calculate_bill_amounts(slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - { - **self.bc.format.to_dict(model.provisioning_user_consumption[0]), - }, - { - **self.bc.format.to_dict(model.provisioning_user_consumption[1]), - }, - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - }, - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + { + **self.bc.format.to_dict(model.provisioning_user_consumption[0]), + }, + { + **self.bc.format.to_dict(model.provisioning_user_consumption[1]), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + }, + ], + ) self.bc.check.calls( logging.Logger.info.call_args_list, [ - call(f'Starting calculate_bill_amounts for hash {slug}'), + call(f"Starting calculate_bill_amounts for hash {slug}"), # retried - call(f'Starting calculate_bill_amounts for hash {slug}'), - ]) - self.bc.check.calls(logging.Logger.error.call_args_list, [ - call(f'Does not exists bills for hash {slug}', exc_info=True), - ]) + call(f"Starting calculate_bill_amounts for hash {slug}"), + ], + ) + self.bc.check.calls( + logging.Logger.error.call_args_list, + [ + call(f"Does not exists bills for hash {slug}", exc_info=True), + ], + ) # Given 1 ProvisioningBill and 2 ProvisioningActivity # When: hash match and the bill is DISPUTED or IGNORED the activities have a random amount, force = True # Then: the bill amount is override with the sum of the activities - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'PROVISIONING_CREDIT_PRICE': CREDIT_PRICE, - 'STRIPE_PRICE_ID': STRIPE_PRICE_ID, - }))) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "PROVISIONING_CREDIT_PRICE": CREDIT_PRICE, + "STRIPE_PRICE_ID": STRIPE_PRICE_ID, + } + ) + ), + ) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_academy_reacted_to_bill__no_paid__force(self): slug = self.bc.fake.slug() provisioning_bill = { - 'hash': slug, - 'total_amount': random.random() * 1000, - 'status': random.choice(['DISPUTED', 'IGNORED']), + "hash": slug, + "total_amount": random.random() * 1000, + "status": random.choice(["DISPUTED", "IGNORED"]), } csv = gitpod_csv(10) - provisioning_prices = [{ - 'price_per_unit': random.random() * 100, - } for _ in range(2)] - - provisioning_consumption_events = [{ - 'quantity': random.random() * 10, - 'price_id': n + 1, - } for n in range(2)] - - provisioning_user_consumptions = [{ - 'status': 'PERSISTED', - } for _ in range(2)] - - amount = sum([ - provisioning_prices[n]['price_per_unit'] * provisioning_consumption_events[n]['quantity'] for n in range(2) - ]) * 2 - q = sum([provisioning_consumption_events[n]['quantity'] for n in range(2)]) - model = self.bc.database.create(provisioning_bill=provisioning_bill, - provisioning_price=provisioning_prices, - provisioning_vendor={'name': 'Gitpod'}, - provisioning_consumption_event=provisioning_consumption_events, - provisioning_user_consumption=provisioning_user_consumptions) + provisioning_prices = [ + { + "price_per_unit": random.random() * 100, + } + for _ in range(2) + ] + + provisioning_consumption_events = [ + { + "quantity": random.random() * 10, + "price_id": n + 1, + } + for n in range(2) + ] + + provisioning_user_consumptions = [ + { + "status": "PERSISTED", + } + for _ in range(2) + ] + + amount = ( + sum( + [ + provisioning_prices[n]["price_per_unit"] * provisioning_consumption_events[n]["quantity"] + for n in range(2) + ] + ) + * 2 + ) + q = sum([provisioning_consumption_events[n]["quantity"] for n in range(2)]) + model = self.bc.database.create( + provisioning_bill=provisioning_bill, + provisioning_price=provisioning_prices, + provisioning_vendor={"name": "Gitpod"}, + provisioning_consumption_event=provisioning_consumption_events, + provisioning_user_consumption=provisioning_user_consumptions, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] stripe_id = self.bc.fake.slug() stripe_url = self.bc.fake.url() - with patch('breathecode.payments.services.stripe.Stripe.create_payment_link', - MagicMock(return_value=(stripe_id, stripe_url))): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch( + "breathecode.payments.services.stripe.Stripe.create_payment_link", + MagicMock(return_value=(stripe_id, stripe_url)), + ): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): calculate_bill_amounts(slug, force=True) quantity = math.ceil(amount / CREDIT_PRICE) @@ -618,99 +788,127 @@ def test_academy_reacted_to_bill__no_paid__force(self): self.bc.check.calls(Stripe.create_payment_link.call_args_list, [call(STRIPE_PRICE_ID, quantity)]) fee = new_amount - amount - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - { - **self.bc.format.to_dict(model.provisioning_user_consumption[0]), - 'amount': amount / 2, - 'quantity': q, - }, - { - **self.bc.format.to_dict(model.provisioning_user_consumption[1]), - 'amount': amount / 2, - 'quantity': q, - }, - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + { + **self.bc.format.to_dict(model.provisioning_user_consumption[0]), + "amount": amount / 2, + "quantity": q, + }, + { + **self.bc.format.to_dict(model.provisioning_user_consumption[1]), + "amount": amount / 2, + "quantity": q, + }, + ], + ) started = UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=9) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - 'status': 'DUE', - 'total_amount': quantity * CREDIT_PRICE, - 'fee': fee, - 'paid_at': None, - 'stripe_id': stripe_id, - 'stripe_url': stripe_url, - 'started_at': started, - 'ended_at': UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=0), - 'title': f'{MONTHS[started.month - 1]} {started.year}', - }, - ]) - - self.bc.check.calls(logging.Logger.info.call_args_list, - [call(f'Starting calculate_bill_amounts for hash {slug}')]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + "status": "DUE", + "total_amount": quantity * CREDIT_PRICE, + "fee": fee, + "paid_at": None, + "stripe_id": stripe_id, + "stripe_url": stripe_url, + "started_at": started, + "ended_at": UTC_NOW.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=0), + "title": f"{MONTHS[started.month - 1]} {started.year}", + }, + ], + ) + + self.bc.check.calls( + logging.Logger.info.call_args_list, [call(f"Starting calculate_bill_amounts for hash {slug}")] + ) self.bc.check.calls(logging.Logger.error.call_args_list, []) # Given 1 ProvisioningBill and 2 ProvisioningActivity # When: hash match and the bill is PAID the activities have a random amount, force = True # Then: the bill amount is override with the sum of the activities - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_academy_reacted_to_bill__paid__force(self): slug = self.bc.fake.slug() provisioning_bill = { - 'hash': slug, - 'total_amount': random.random() * 1000, - 'status': 'PAID', + "hash": slug, + "total_amount": random.random() * 1000, + "status": "PAID", } - provisioning_prices = [{ - 'price_per_unit': random.random() * 100, - } for _ in range(2)] - - provisioning_consumption_events = [{ - 'quantity': random.random() * 10, - 'price_id': n + 1, - } for n in range(2)] + provisioning_prices = [ + { + "price_per_unit": random.random() * 100, + } + for _ in range(2) + ] - provisioning_user_consumptions = [{ - 'status': 'PERSISTED', - } for _ in range(2)] + provisioning_consumption_events = [ + { + "quantity": random.random() * 10, + "price_id": n + 1, + } + for n in range(2) + ] - model = self.bc.database.create(provisioning_bill=provisioning_bill, - provisioning_price=provisioning_prices, - provisioning_vendor={'name': 'Gitpod'}, - provisioning_consumption_event=provisioning_consumption_events, - provisioning_user_consumption=provisioning_user_consumptions) + provisioning_user_consumptions = [ + { + "status": "PERSISTED", + } + for _ in range(2) + ] + + model = self.bc.database.create( + provisioning_bill=provisioning_bill, + provisioning_price=provisioning_prices, + provisioning_vendor={"name": "Gitpod"}, + provisioning_consumption_event=provisioning_consumption_events, + provisioning_user_consumption=provisioning_user_consumptions, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] calculate_bill_amounts(slug, force=True) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - { - **self.bc.format.to_dict(model.provisioning_user_consumption[0]), - }, - { - **self.bc.format.to_dict(model.provisioning_user_consumption[1]), - }, - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - { - **self.bc.format.to_dict(model.provisioning_bill), - 'status': 'PAID', - }, - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + { + **self.bc.format.to_dict(model.provisioning_user_consumption[0]), + }, + { + **self.bc.format.to_dict(model.provisioning_user_consumption[1]), + }, + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + "status": "PAID", + }, + ], + ) self.bc.check.calls( logging.Logger.info.call_args_list, [ - call(f'Starting calculate_bill_amounts for hash {slug}'), + call(f"Starting calculate_bill_amounts for hash {slug}"), # retried - call(f'Starting calculate_bill_amounts for hash {slug}'), - ]) - self.bc.check.calls(logging.Logger.error.call_args_list, [ - call(f'Does not exists bills for hash {slug}', exc_info=True), - ]) + call(f"Starting calculate_bill_amounts for hash {slug}"), + ], + ) + self.bc.check.calls( + logging.Logger.error.call_args_list, + [ + call(f"Does not exists bills for hash {slug}", exc_info=True), + ], + ) diff --git a/breathecode/provisioning/tests/tasks/tests_upload.py b/breathecode/provisioning/tests/tasks/tests_upload.py index ef863a707..720688823 100644 --- a/breathecode/provisioning/tests/tasks/tests_upload.py +++ b/breathecode/provisioning/tests/tasks/tests_upload.py @@ -1,6 +1,7 @@ """ Test /answer/:id """ + import json import logging import os @@ -24,7 +25,7 @@ from ..mixins import ProvisioningTestCase -GOOGLE_CLOUD_KEY = os.getenv('GOOGLE_CLOUD_KEY', None) +GOOGLE_CLOUD_KEY = os.getenv("GOOGLE_CLOUD_KEY", None) fake = Faker() fake_url = fake.url() @@ -40,7 +41,7 @@ @pytest.fixture(autouse=True) def setup(monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr('linked_services.django.tasks.check_credentials.delay', MagicMock()) + monkeypatch.setattr("linked_services.django.tasks.check_credentials.delay", MagicMock()) def parse(s): @@ -48,26 +49,26 @@ def parse(s): def repo_name(url): - pattern = r'^https://github\.com/[^/]+/([^/]+)/' + pattern = r"^https://github\.com/[^/]+/([^/]+)/" result = re.findall(pattern, url) return result[0] def random_string(): - return ''.join(choices(string.ascii_letters, k=10)) + return "".join(choices(string.ascii_letters, k=10)) def datetime_to_iso(date) -> str: - return re.sub(r'\+00:00$', 'Z', date.replace(tzinfo=UTC).isoformat()) + return re.sub(r"\+00:00$", "Z", date.replace(tzinfo=UTC).isoformat()) def datetime_to_show_date(date) -> str: - return date.strftime('%Y-%m-%d') + return date.strftime("%Y-%m-%d") def response_mock(status_code=200, content=[]): - class Response(): + class Response: def __init__(self, status_code, content): self.status_code = status_code @@ -102,16 +103,16 @@ def codespaces_csv(lines=1, data={}): # dictionary of lists return { - 'Repository Slug': repository_slugs, - 'Username': usernames, - 'Date': dates, - 'Product': products, - 'SKU': skus, - 'Quantity': quantities, - 'Unit Type': unit_types, - 'Price Per Unit ($)': price_per_units, - 'Multiplier': multipliers, - 'Owner': owners, + "Repository Slug": repository_slugs, + "Username": usernames, + "Date": dates, + "Product": products, + "SKU": skus, + "Quantity": quantities, + "Unit Type": unit_types, + "Price Per Unit ($)": price_per_units, + "Multiplier": multipliers, + "Owner": owners, **data, } @@ -122,43 +123,43 @@ def gitpod_csv(lines=1, data={}): effective_times = [datetime_to_iso(timezone.now()) for _ in range(lines)] kinds = [fake.slug() for _ in range(lines)] usernames = [fake.slug() for _ in range(lines)] - contextURLs = [f'https://github.com/{username}/{fake.slug()}/tree/{fake.slug()}/' for username in usernames] + contextURLs = [f"https://github.com/{username}/{fake.slug()}/tree/{fake.slug()}/" for username in usernames] # dictionary of lists return { - 'id': ids, - 'credits': credit_cents, - 'startTime': effective_times, - 'endTime': effective_times, - 'kind': kinds, - 'userName': usernames, - 'contextURL': contextURLs, + "id": ids, + "credits": credit_cents, + "startTime": effective_times, + "endTime": effective_times, + "kind": kinds, + "userName": usernames, + "contextURL": contextURLs, **data, } def datetime_to_date_str(date: datetime) -> str: - return date.strftime('%Y-%m-%d') + return date.strftime("%Y-%m-%d") def rigobot_csv(lines=1, data={}): - organizations = ['4Geeks' for _ in range(lines)] + organizations = ["4Geeks" for _ in range(lines)] consumption_period_ids = [random.randint(1, 10) for _ in range(lines)] times = [datetime_to_iso(timezone.now()) for _ in range(lines)] - billing_statuses = ['OPEN' for _ in range(lines)] + billing_statuses = ["OPEN" for _ in range(lines)] total_spent_periods = [(random.random() * 30) + 0.01 for _ in range(lines)] consumption_item_ids = [random.randint(1, 10) for _ in range(lines)] user_ids = [10 for _ in range(lines)] emails = [fake.email() for _ in range(lines)] - consumption_types = ['MESSAGE' for _ in range(lines)] - pricing_types = [random.choice(['INPUT', 'OUTPUT']) for _ in range(lines)] + consumption_types = ["MESSAGE" for _ in range(lines)] + pricing_types = [random.choice(["INPUT", "OUTPUT"]) for _ in range(lines)] total_tokens = [random.randint(1, 100) for _ in range(lines)] total_spents = [] res = [] for i in range(lines): total_token = total_tokens[i] pricing_type = pricing_types[i] - price = 0.04 if pricing_type == 'OUTPUT' else 0.02 + price = 0.04 if pricing_type == "OUTPUT" else 0.02 total_spent = price * total_token while total_spent in res: total_tokens[i] = random.randint(1, 100) @@ -169,37 +170,37 @@ def rigobot_csv(lines=1, data={}): res.append(total_spent) models = [ - random.choice(['gpt-4-turbo', 'gpt-4', 'gpt-4-turbo', 'gpt-4o', 'gpt-3.5-turbo', 'gpt-3.5']) + random.choice(["gpt-4-turbo", "gpt-4", "gpt-4-turbo", "gpt-4o", "gpt-3.5-turbo", "gpt-3.5"]) for _ in range(lines) ] purpose_ids = [random.randint(1, 10) for _ in range(lines)] purpose_slugs = [fake.slug() for _ in range(lines)] - purposes = [' '.join(fake.words()) for _ in range(lines)] + purposes = [" ".join(fake.words()) for _ in range(lines)] github_usernames = [fake.user_name() for _ in range(lines)] created_ats = [datetime_to_iso(timezone.now()) for _ in range(lines)] # dictionary of lists return { - 'organization': organizations, - 'consumption_period_id': consumption_period_ids, - 'consumption_period_start': times, - 'consumption_period_end': times, - 'billing_status': billing_statuses, - 'total_spent_period': total_spent_periods, - 'consumption_item_id': consumption_item_ids, - 'user_id': user_ids, - 'email': emails, - 'consumption_type': consumption_types, - 'pricing_type': pricing_types, - 'total_spent': total_spents, - 'total_tokens': total_tokens, - 'model': models, - 'purpose_id': purpose_ids, - 'purpose_slug': purpose_slugs, - 'purpose': purposes, - 'created_at': created_ats, - 'github_username': github_usernames, + "organization": organizations, + "consumption_period_id": consumption_period_ids, + "consumption_period_start": times, + "consumption_period_end": times, + "billing_status": billing_statuses, + "total_spent_period": total_spent_periods, + "consumption_item_id": consumption_item_ids, + "user_id": user_ids, + "email": emails, + "consumption_type": consumption_types, + "pricing_type": pricing_types, + "total_spent": total_spents, + "total_tokens": total_tokens, + "model": models, + "purpose_id": purpose_ids, + "purpose_slug": purpose_slugs, + "purpose": purposes, + "created_at": created_ats, + "github_username": github_usernames, **data, } @@ -216,100 +217,100 @@ def csv_file_mock_inner(file): def currency_data(data={}): return { - 'code': 'USD', - 'decimals': 2, - 'id': 1, - 'name': 'US Dollar', + "code": "USD", + "decimals": 2, + "id": 1, + "name": "US Dollar", **data, } def provisioning_activity_kind_data(data={}): return { - 'id': 1, - 'product_name': 'Lori Cook', - 'sku': 'point-yes-another', + "id": 1, + "product_name": "Lori Cook", + "sku": "point-yes-another", **data, } def provisioning_activity_price_data(data={}): return { - 'id': 1, - 'currency_id': 1, - 'multiplier': 1.0, - 'price_per_unit': 0.0, - 'unit_type': '', + "id": 1, + "currency_id": 1, + "multiplier": 1.0, + "price_per_unit": 0.0, + "unit_type": "", **data, } def provisioning_activity_item_data(data={}): return { - 'external_pk': None, - 'id': 1, - 'price_id': 1, - 'quantity': 0.0, - 'registered_at': ..., - 'repository_url': None, - 'task_associated_slug': None, - 'vendor_id': None, - 'csv_row': 0, + "external_pk": None, + "id": 1, + "price_id": 1, + "quantity": 0.0, + "registered_at": ..., + "repository_url": None, + "task_associated_slug": None, + "vendor_id": None, + "csv_row": 0, **data, } def provisioning_activity_data(data={}): return { - 'id': 1, - 'processed_at': ..., - 'status': 'PERSISTED', - 'status_text': '', - 'username': 'soldier-job-woman', - 'amount': 0.0, - 'quantity': 0.0, + "id": 1, + "processed_at": ..., + "status": "PERSISTED", + "status_text": "", + "username": "soldier-job-woman", + "amount": 0.0, + "quantity": 0.0, **data, } def provisioning_bill_data(data={}): return { - 'academy_id': 1, - 'currency_code': 'USD', - 'id': 1, - 'paid_at': None, - 'status': 'PENDING', - 'status_details': None, - 'total_amount': 0.0, - 'fee': 0.0, - 'stripe_id': None, - 'stripe_url': None, - 'vendor_id': None, - 'started_at': None, - 'ended_at': None, - 'title': None, - 'archived_at': None, + "academy_id": 1, + "currency_code": "USD", + "id": 1, + "paid_at": None, + "status": "PENDING", + "status_details": None, + "total_amount": 0.0, + "fee": 0.0, + "stripe_id": None, + "stripe_url": None, + "vendor_id": None, + "started_at": None, + "ended_at": None, + "title": None, + "archived_at": None, **data, } def github_academy_user_data(data={}): return { - 'academy_id': 0, - 'id': 0, - 'storage_action': 'ADD', - 'storage_log': None, - 'storage_status': 'PENDING', - 'storage_synch_at': None, - 'user_id': None, - 'username': None, + "academy_id": 0, + "id": 0, + "storage_action": "ADD", + "storage_log": None, + "storage_status": "PENDING", + "storage_synch_at": None, + "user_id": None, + "username": None, **data, } def get_last_task_manager_id(bc): - task_manager_cls = bc.database.get_model('task_manager.TaskManager') - task_manager = task_manager_cls.objects.order_by('-id').first() + task_manager_cls = bc.database.get_model("task_manager.TaskManager") + task_manager = task_manager_cls.objects.order_by("-id").first() if task_manager is None: return 0 @@ -320,88 +321,105 @@ def get_last_task_manager_id(bc): class RandomFileTestSuite(ProvisioningTestCase): # When: random csv is uploaded and the file does not exists # Then: the task should not create any bill or activity - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=False), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=False), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_random_csv__file_does_not_exists(self): csv = random_csv(10) slug = self.bc.fake.slug() - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), []) self.bc.check.calls( logging.Logger.info.call_args_list, [ - call(f'Starting upload for hash {slug}'), + call(f"Starting upload for hash {slug}"), # retrying - call(f'Starting upload for hash {slug}'), - ]) - self.bc.check.calls(logging.Logger.error.call_args_list, [ - call(f'File {slug} not found', exc_info=True), - ]) + call(f"Starting upload for hash {slug}"), + ], + ) + self.bc.check.calls( + logging.Logger.error.call_args_list, + [ + call(f"File {slug} not found", exc_info=True), + ], + ) self.bc.check.calls(tasks.upload.delay.call_args_list, []) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, []) # When: random csv is uploaded and the file exists # Then: the task should not create any bill or activity - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_random_csv__file_exists(self): csv = random_csv(10) slug = self.bc.fake.slug() - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), []) - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) - self.bc.check.calls(logging.Logger.error.call_args_list, [ - call(f'File {slug} has an unsupported origin or the provider had changed the file format', exc_info=True), - ]) + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) + self.bc.check.calls( + logging.Logger.error.call_args_list, + [ + call( + f"File {slug} has an unsupported origin or the provider had changed the file format", exc_info=True + ), + ], + ) self.bc.check.calls(tasks.upload.delay.call_args_list, []) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, []) @@ -409,50 +427,63 @@ def test_random_csv__file_exists(self): # Given: a csv and 1 ProvisioningBill # When: random csv is uploaded and the file exists # Then: the task should not create any bill or activity - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_random_csv__file_exists__already_processed(self): csv = random_csv(10) slug = self.bc.fake.slug() - provisioning_bill = {'hash': slug} + provisioning_bill = {"hash": slug} model = self.bc.database.create(provisioning_bill=provisioning_bill) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - self.bc.format.to_dict(model.provisioning_bill), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), []) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + self.bc.format.to_dict(model.provisioning_bill), + ], + ) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), []) - self.bc.check.calls(logging.Logger.info.call_args_list, [ - call(f'Starting upload for hash {slug}'), - ]) - self.bc.check.calls(logging.Logger.error.call_args_list, [ - call(f'File {slug} already processed', exc_info=True), - ]) + self.bc.check.calls( + logging.Logger.info.call_args_list, + [ + call(f"Starting upload for hash {slug}"), + ], + ) + self.bc.check.calls( + logging.Logger.error.call_args_list, + [ + call(f"File {slug} already processed", exc_info=True), + ], + ) self.bc.check.calls(tasks.upload.delay.call_args_list, []) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, []) @@ -460,53 +491,68 @@ def test_random_csv__file_exists__already_processed(self): # Given: a csv and 1 ProvisioningBill # When: random csv is uploaded and the file exists # Then: the task should not create any bill or activity - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_random_csv__file_exists__already_processed__(self): csv = random_csv(10) slug = self.bc.fake.slug() provisioning_bill = { - 'hash': slug, - 'status': random.choice(['DISPUTED', 'IGNORED', 'PAID']), + "hash": slug, + "status": random.choice(["DISPUTED", "IGNORED", "PAID"]), } model = self.bc.database.create(provisioning_bill=provisioning_bill) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug, force=True) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - self.bc.format.to_dict(model.provisioning_bill), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), []) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + self.bc.format.to_dict(model.provisioning_bill), + ], + ) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), []) - self.bc.check.calls(logging.Logger.info.call_args_list, [ - call(f'Starting upload for hash {slug}'), - ]) - self.bc.check.calls(logging.Logger.error.call_args_list, [ - call('Cannot force upload because there are bills with status DISPUTED, IGNORED or PAID', exc_info=True), - ]) + self.bc.check.calls( + logging.Logger.info.call_args_list, + [ + call(f"Starting upload for hash {slug}"), + ], + ) + self.bc.check.calls( + logging.Logger.error.call_args_list, + [ + call( + "Cannot force upload because there are bills with status DISPUTED, IGNORED or PAID", exc_info=True + ), + ], + ) self.bc.check.calls(tasks.upload.delay.call_args_list, []) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, []) @@ -518,92 +564,115 @@ class CodespacesTestSuite(ProvisioningTestCase): # Given: a csv with codespaces data # When: users does not exist and vendor not found # Then: the task should not create any bill or activity - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_users_not_found(self): csv = codespaces_csv(10) slug = self.bc.fake.slug() - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['Product'][n], - 'sku': str(csv['SKU'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': n + 1, - 'multiplier': csv['Multiplier'][n], - 'price_per_unit': csv['Price Per Unit ($)'][n] * 1.3, - 'unit_type': csv['Unit Type'][n], - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data( - { - 'id': n + 1, - 'price_id': n + 1, - 'quantity': float(csv['Quantity'][n]), - 'registered_at': datetime.strptime(csv['Date'][n], '%Y-%m-%d').replace(tzinfo=pytz.UTC), - 'repository_url': f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", - 'task_associated_slug': csv['Repository Slug'][n], - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': - n + 1, - 'kind_id': - n + 1, - 'hash': - slug, - 'username': - csv['Username'][n], - 'processed_at': - UTC_NOW, - 'status': - 'ERROR', - 'status_text': - ', '.join([ - 'Provisioning vendor Codespaces not found', - f"We could not find enough information about {csv['Username'][n]}, mark this user user " - "as deleted if you don't recognize it", - ]), - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), []) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) - - self.bc.check.calls(logging.Logger.error.call_args_list, - [call(f"Organization {csv['Owner'][n]} not found") for n in range(10)]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["Product"][n], + "sku": str(csv["SKU"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": n + 1, + "multiplier": csv["Multiplier"][n], + "price_per_unit": csv["Price Per Unit ($)"][n] * 1.3, + "unit_type": csv["Unit Type"][n], + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": n + 1, + "quantity": float(csv["Quantity"][n]), + "registered_at": datetime.strptime(csv["Date"][n], "%Y-%m-%d").replace(tzinfo=pytz.UTC), + "repository_url": f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", + "task_associated_slug": csv["Repository Slug"][n], + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["Username"][n], + "processed_at": UTC_NOW, + "status": "ERROR", + "status_text": ", ".join( + [ + "Provisioning vendor Codespaces not found", + f"We could not find enough information about {csv['Username'][n]}, mark this user user " + "as deleted if you don't recognize it", + ] + ), + } + ) + for n in range(10) + ], + ) + + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), []) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) + + self.bc.check.calls( + logging.Logger.error.call_args_list, [call(f"Organization {csv['Owner'][n]} not found") for n in range(10)] + ) self.bc.check.calls(tasks.upload.delay.call_args_list, []) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, []) @@ -612,117 +681,147 @@ def test_users_not_found(self): # When: users does not exist, vendor not found and # -> each github organization have two AcademyAuthSettings # Then: the task should create 20 bills, 20 activities and two GithubAcademyUser per academy - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_users_not_found__case1(self): csv = codespaces_csv(10) academy_auth_settings = [] id = 0 - for owner in csv['Owner']: - academy_auth_settings.append({'github_username': owner, 'academy_id': id + 1}) - academy_auth_settings.append({'github_username': owner, 'academy_id': id + 2}) + for owner in csv["Owner"]: + academy_auth_settings.append({"github_username": owner, "academy_id": id + 1}) + academy_auth_settings.append({"github_username": owner, "academy_id": id + 2}) id += 2 - github_academy_users = [{ - 'user_id': 1, - 'academy_id': n + 1, - 'storage_action': 'ADD', - 'storage_status': 'SYNCHED', - } for n in range(20)] - - model = self.bc.database.create(academy_auth_settings=academy_auth_settings, - academy=20, - user=1, - github_academy_user=github_academy_users) + github_academy_users = [ + { + "user_id": 1, + "academy_id": n + 1, + "storage_action": "ADD", + "storage_status": "SYNCHED", + } + for n in range(20) + ] + + model = self.bc.database.create( + academy_auth_settings=academy_auth_settings, academy=20, user=1, github_academy_user=github_academy_users + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': n + 1, - 'academy_id': n + 1, - 'vendor_id': None, - 'hash': slug, - 'total_amount': 0.0, - 'status': 'ERROR', - }) for n in range(20) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['Product'][n], - 'sku': str(csv['SKU'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': n + 1, - 'multiplier': csv['Multiplier'][n], - 'price_per_unit': csv['Price Per Unit ($)'][n] * 1.3, - 'unit_type': csv['Unit Type'][n], - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data( - { - 'id': n + 1, - 'price_id': n + 1, - 'quantity': float(csv['Quantity'][n]), - 'registered_at': datetime.strptime(csv['Date'][n], '%Y-%m-%d').replace(tzinfo=pytz.UTC), - 'repository_url': f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", - 'task_associated_slug': csv['Repository Slug'][n], - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': - n + 1, - 'kind_id': - n + 1, - 'hash': - slug, - 'username': - csv['Username'][n], - 'processed_at': - UTC_NOW, - 'status': - 'ERROR', - 'status_text': - ', '.join([ - 'Provisioning vendor Codespaces not found', - f"We could not find enough information about {csv['Username'][n]}, mark this user user " - "as deleted if you don't recognize it", - ]), - }) for n in range(10) - ]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": n + 1, + "academy_id": n + 1, + "vendor_id": None, + "hash": slug, + "total_amount": 0.0, + "status": "ERROR", + } + ) + for n in range(20) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["Product"][n], + "sku": str(csv["SKU"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": n + 1, + "multiplier": csv["Multiplier"][n], + "price_per_unit": csv["Price Per Unit ($)"][n] * 1.3, + "unit_type": csv["Unit Type"][n], + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": n + 1, + "quantity": float(csv["Quantity"][n]), + "registered_at": datetime.strptime(csv["Date"][n], "%Y-%m-%d").replace(tzinfo=pytz.UTC), + "repository_url": f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", + "task_associated_slug": csv["Repository Slug"][n], + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["Username"][n], + "processed_at": UTC_NOW, + "status": "ERROR", + "status_text": ", ".join( + [ + "Provisioning vendor Codespaces not found", + f"We could not find enough information about {csv['Username'][n]}, mark this user user " + "as deleted if you don't recognize it", + ] + ), + } + ) + for n in range(10) + ], + ) id = 0 github_academy_users = self.bc.format.to_dict(model.github_academy_user) @@ -731,28 +830,32 @@ def test_users_not_found__case1(self): github_academy_users.append( github_academy_user_data( data={ - 'id': id + 1 + 20, - 'academy_id': id + 1, - 'storage_action': 'IGNORE', - 'storage_status': 'PAYMENT_CONFLICT', - 'username': csv['Username'][n], - })) + "id": id + 1 + 20, + "academy_id": id + 1, + "storage_action": "IGNORE", + "storage_status": "PAYMENT_CONFLICT", + "username": csv["Username"][n], + } + ) + ) github_academy_users.append( github_academy_user_data( data={ - 'id': id + 2 + 20, - 'academy_id': id + 2, - 'storage_action': 'IGNORE', - 'storage_status': 'PAYMENT_CONFLICT', - 'username': csv['Username'][n], - })) + "id": id + 2 + 20, + "academy_id": id + 2, + "storage_action": "IGNORE", + "storage_status": "PAYMENT_CONFLICT", + "username": csv["Username"][n], + } + ) + ) id += 2 - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), github_academy_users) + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), github_academy_users) - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -762,118 +865,151 @@ def test_users_not_found__case1(self): # When: users does not exist, vendor not found and # -> each github organization have two AcademyAuthSettings # Then: the task should create 20 bills, 20 activities and two GithubAcademyUser per academy - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_users_not_found__case2(self): csv = codespaces_csv(10) academy_auth_settings = [] id = 0 - for owner in csv['Owner']: - academy_auth_settings.append({'github_username': owner, 'academy_id': id + 1}) - academy_auth_settings.append({'github_username': owner, 'academy_id': id + 2}) + for owner in csv["Owner"]: + academy_auth_settings.append({"github_username": owner, "academy_id": id + 1}) + academy_auth_settings.append({"github_username": owner, "academy_id": id + 2}) id += 2 - credentials_github = [{'username': csv['Username'][n], 'user_id': n + 1} for n in range(10)] - github_academy_users = [{ - 'user_id': 11, - 'academy_id': n + 1, - 'storage_action': 'ADD', - 'storage_status': 'SYNCHED', - } for n in range(20)] - model = self.bc.database.create(academy_auth_settings=academy_auth_settings, - academy=20, - user=11, - github_academy_user=github_academy_users, - credentials_github=credentials_github) + credentials_github = [{"username": csv["Username"][n], "user_id": n + 1} for n in range(10)] + github_academy_users = [ + { + "user_id": 11, + "academy_id": n + 1, + "storage_action": "ADD", + "storage_status": "SYNCHED", + } + for n in range(20) + ] + model = self.bc.database.create( + academy_auth_settings=academy_auth_settings, + academy=20, + user=11, + github_academy_user=github_academy_users, + credentials_github=credentials_github, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': n + 1, - 'academy_id': n + 1, - 'vendor_id': None, - 'hash': slug, - 'total_amount': 0.0, - 'status': 'ERROR', - }) for n in range(20) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['Product'][n], - 'sku': str(csv['SKU'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': n + 1, - 'multiplier': csv['Multiplier'][n], - 'price_per_unit': csv['Price Per Unit ($)'][n] * 1.3, - 'unit_type': csv['Unit Type'][n], - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data( - { - 'id': n + 1, - 'price_id': n + 1, - 'quantity': float(csv['Quantity'][n]), - 'registered_at': datetime.strptime(csv['Date'][n], '%Y-%m-%d').replace(tzinfo=pytz.UTC), - 'repository_url': f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", - 'task_associated_slug': csv['Repository Slug'][n], - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': - n + 1, - 'kind_id': - n + 1, - 'hash': - slug, - 'username': - csv['Username'][n], - 'processed_at': - UTC_NOW, - 'status': - 'ERROR', - 'status_text': - ', '.join([ - 'Provisioning vendor Codespaces not found', - f"We could not find enough information about {csv['Username'][n]}, mark this user user " - "as deleted if you don't recognize it", - ]), - }) for n in range(10) - ]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": n + 1, + "academy_id": n + 1, + "vendor_id": None, + "hash": slug, + "total_amount": 0.0, + "status": "ERROR", + } + ) + for n in range(20) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["Product"][n], + "sku": str(csv["SKU"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": n + 1, + "multiplier": csv["Multiplier"][n], + "price_per_unit": csv["Price Per Unit ($)"][n] * 1.3, + "unit_type": csv["Unit Type"][n], + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": n + 1, + "quantity": float(csv["Quantity"][n]), + "registered_at": datetime.strptime(csv["Date"][n], "%Y-%m-%d").replace(tzinfo=pytz.UTC), + "repository_url": f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", + "task_associated_slug": csv["Repository Slug"][n], + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["Username"][n], + "processed_at": UTC_NOW, + "status": "ERROR", + "status_text": ", ".join( + [ + "Provisioning vendor Codespaces not found", + f"We could not find enough information about {csv['Username'][n]}, mark this user user " + "as deleted if you don't recognize it", + ] + ), + } + ) + for n in range(10) + ], + ) id = 0 github_academy_users = self.bc.format.to_dict(model.github_academy_user) @@ -882,30 +1018,34 @@ def test_users_not_found__case2(self): github_academy_users.append( github_academy_user_data( data={ - 'id': id + 1 + 20, - 'user_id': (id / 2) + 1, - 'academy_id': id + 1, - 'storage_action': 'IGNORE', - 'storage_status': 'PAYMENT_CONFLICT', - 'username': csv['Username'][n], - })) + "id": id + 1 + 20, + "user_id": (id / 2) + 1, + "academy_id": id + 1, + "storage_action": "IGNORE", + "storage_status": "PAYMENT_CONFLICT", + "username": csv["Username"][n], + } + ) + ) github_academy_users.append( github_academy_user_data( data={ - 'id': id + 2 + 20, - 'user_id': (id / 2) + 1, - 'academy_id': id + 2, - 'storage_action': 'IGNORE', - 'storage_status': 'PAYMENT_CONFLICT', - 'username': csv['Username'][n], - })) + "id": id + 2 + 20, + "user_id": (id / 2) + 1, + "academy_id": id + 2, + "storage_action": "IGNORE", + "storage_status": "PAYMENT_CONFLICT", + "username": csv["Username"][n], + } + ) + ) id += 2 - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), github_academy_users) + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), github_academy_users) - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -915,103 +1055,143 @@ def test_users_not_found__case2(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct # Then: the task should create 1 bills and 10 activities - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__generate_anything(self): csv = codespaces_csv(10) - github_academy_users = [{ - 'username': x, - } for x in csv['Username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Codespaces'} - model = self.bc.database.create(user=10, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + github_academy_users = [ + { + "username": x, + } + for x in csv["Username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Codespaces"} + model = self.bc.database.create( + user=10, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['Product'][n], - 'sku': str(csv['SKU'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': n + 1, - 'multiplier': csv['Multiplier'][n], - 'price_per_unit': csv['Price Per Unit ($)'][n] * 1.3, - 'unit_type': csv['Unit Type'][n], - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data( - { - 'id': n + 1, - 'price_id': n + 1, - 'vendor_id': 1, - 'quantity': float(csv['Quantity'][n]), - 'registered_at': datetime.strptime(csv['Date'][n], '%Y-%m-%d').replace(tzinfo=pytz.UTC), - 'repository_url': f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", - 'task_associated_slug': csv['Repository Slug'][n], - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['Username'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual( - self.bc.database.list_of('authenticate.GithubAcademyUser'), + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["Product"][n], + "sku": str(csv["SKU"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": n + 1, + "multiplier": csv["Multiplier"][n], + "price_per_unit": csv["Price Per Unit ($)"][n] * 1.3, + "unit_type": csv["Unit Type"][n], + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": n + 1, + "vendor_id": 1, + "quantity": float(csv["Quantity"][n]), + "registered_at": datetime.strptime(csv["Date"][n], "%Y-%m-%d").replace(tzinfo=pytz.UTC), + "repository_url": f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", + "task_associated_slug": csv["Repository Slug"][n], + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["Username"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), self.bc.format.to_dict(model.github_academy_user), ) - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -1021,45 +1201,57 @@ def test_from_github_credentials__generate_anything(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, and the amount of rows is greater than the limit # Then: the task should create 1 bills and 10 activities - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.provisioning.tasks.PANDAS_ROWS_LIMIT', PropertyMock(return_value=3)) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.provisioning.tasks.PANDAS_ROWS_LIMIT", PropertyMock(return_value=3)) def test_pagination(self): csv = codespaces_csv(10) limit = tasks.PANDAS_ROWS_LIMIT tasks.PANDAS_ROWS_LIMIT = 3 - github_academy_users = [{ - 'username': x, - } for x in csv['Username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Codespaces'} - model = self.bc.database.create(user=10, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + github_academy_users = [ + { + "username": x, + } + for x in csv["Username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Codespaces"} + model = self.bc.database.create( + user=10, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] @@ -1067,71 +1259,103 @@ def test_pagination(self): task_manager_id = get_last_task_manager_id(self.bc) + 1 slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['Product'][n], - 'sku': str(csv['SKU'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': n + 1, - 'multiplier': csv['Multiplier'][n], - 'price_per_unit': csv['Price Per Unit ($)'][n] * 1.3, - 'unit_type': csv['Unit Type'][n], - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data( - { - 'id': n + 1, - 'price_id': n + 1, - 'vendor_id': 1, - 'quantity': float(csv['Quantity'][n]), - 'registered_at': datetime.strptime(csv['Date'][n], '%Y-%m-%d').replace(tzinfo=pytz.UTC), - 'repository_url': f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", - 'task_associated_slug': csv['Repository Slug'][n], - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['Username'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual( - self.bc.database.list_of('authenticate.GithubAcademyUser'), + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["Product"][n], + "sku": str(csv["SKU"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": n + 1, + "multiplier": csv["Multiplier"][n], + "price_per_unit": csv["Price Per Unit ($)"][n] * 1.3, + "unit_type": csv["Unit Type"][n], + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": n + 1, + "vendor_id": 1, + "quantity": float(csv["Quantity"][n]), + "registered_at": datetime.strptime(csv["Date"][n], "%Y-%m-%d").replace(tzinfo=pytz.UTC), + "repository_url": f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", + "task_associated_slug": csv["Repository Slug"][n], + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["Username"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), self.bc.format.to_dict(model.github_academy_user), ) - self.bc.check.calls(logging.Logger.info.call_args_list, - [call(f'Starting upload for hash {slug}') for _ in range(4)]) + self.bc.check.calls( + logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}") for _ in range(4)] + ) self.bc.check.calls(logging.Logger.error.call_args_list, []) - self.bc.check.calls(tasks.upload.delay.call_args_list, [ - call(slug, page=1, task_manager_id=task_manager_id), - call(slug, page=2, task_manager_id=task_manager_id), - call(slug, page=3, task_manager_id=task_manager_id), - ]) + self.bc.check.calls( + tasks.upload.delay.call_args_list, + [ + call(slug, page=1, task_manager_id=task_manager_id), + call(slug, page=2, task_manager_id=task_manager_id), + call(slug, page=3, task_manager_id=task_manager_id), + ], + ) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, [call(slug)]) @@ -1141,106 +1365,146 @@ def test_pagination(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, force = True # Then: the task should create 1 bills and 10 activities - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__generate_anything__force(self): csv = codespaces_csv(10) slug = self.bc.fake.slug() - github_academy_users = [{ - 'username': x, - } for x in csv['Username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Codespaces'} - provisioning_bill = {'hash': slug} - model = self.bc.database.create(user=10, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor, - provisioning_bill=provisioning_bill) + github_academy_users = [ + { + "username": x, + } + for x in csv["Username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Codespaces"} + provisioning_bill = {"hash": slug} + model = self.bc.database.create( + user=10, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + provisioning_bill=provisioning_bill, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug, force=True) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': 2, - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['Product'][n], - 'sku': str(csv['SKU'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': n + 1, - 'multiplier': csv['Multiplier'][n], - 'price_per_unit': csv['Price Per Unit ($)'][n] * 1.3, - 'unit_type': csv['Unit Type'][n], - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data( - { - 'id': n + 1, - 'price_id': n + 1, - 'vendor_id': 1, - 'quantity': float(csv['Quantity'][n]), - 'registered_at': datetime.strptime(csv['Date'][n], '%Y-%m-%d').replace(tzinfo=pytz.UTC), - 'repository_url': f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", - 'task_associated_slug': csv['Repository Slug'][n], - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['Username'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual( - self.bc.database.list_of('authenticate.GithubAcademyUser'), + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": 2, + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["Product"][n], + "sku": str(csv["SKU"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": n + 1, + "multiplier": csv["Multiplier"][n], + "price_per_unit": csv["Price Per Unit ($)"][n] * 1.3, + "unit_type": csv["Unit Type"][n], + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": n + 1, + "vendor_id": 1, + "quantity": float(csv["Quantity"][n]), + "registered_at": datetime.strptime(csv["Date"][n], "%Y-%m-%d").replace(tzinfo=pytz.UTC), + "repository_url": f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", + "task_associated_slug": csv["Repository Slug"][n], + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["Username"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), self.bc.format.to_dict(model.github_academy_user), ) - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -1250,114 +1514,148 @@ def test_from_github_credentials__generate_anything__force(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, GithubAcademyUser with PAYMENT_CONFLICT and IGNORE # Then: the task should create 1 bills and 10 activities - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__generate_anything__case1(self): csv = codespaces_csv(10) - github_academy_users = [{ - 'username': x, - 'storage_status': 'PAYMENT_CONFLICT', - 'storage_action': 'IGNORE', - } for x in csv['Username']] - - github_academy_users += [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'user_id': 11, - } for n in range(10)] - - provisioning_vendor = {'name': 'Codespaces'} - model = self.bc.database.create(user=11, - github_academy_user=github_academy_users, - provisioning_vendor=provisioning_vendor) + github_academy_users = [ + { + "username": x, + "storage_status": "PAYMENT_CONFLICT", + "storage_action": "IGNORE", + } + for x in csv["Username"] + ] + + github_academy_users += [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "user_id": 11, + } + for n in range(10) + ] + + provisioning_vendor = {"name": "Codespaces"} + model = self.bc.database.create( + user=11, github_academy_user=github_academy_users, provisioning_vendor=provisioning_vendor + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['Product'][n], - 'sku': str(csv['SKU'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': n + 1, - 'multiplier': csv['Multiplier'][n], - 'price_per_unit': csv['Price Per Unit ($)'][n] * 1.3, - 'unit_type': csv['Unit Type'][n], - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data( - { - 'id': n + 1, - 'price_id': n + 1, - 'vendor_id': 1, - 'quantity': float(csv['Quantity'][n]), - 'registered_at': datetime.strptime(csv['Date'][n], '%Y-%m-%d').replace(tzinfo=pytz.UTC), - 'repository_url': f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", - 'task_associated_slug': csv['Repository Slug'][n], - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': - n + 1, - 'kind_id': - n + 1, - 'hash': - slug, - 'username': - csv['Username'][n], - 'processed_at': - UTC_NOW, - 'status': - 'WARNING', - 'status_text': (f"We could not find enough information about {csv['Username'][n]}, mark this user user " - "as deleted if you don't recognize it"), - }) for n in range(10) - ]) - - self.assertEqual( - self.bc.database.list_of('authenticate.GithubAcademyUser'), + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["Product"][n], + "sku": str(csv["SKU"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": n + 1, + "multiplier": csv["Multiplier"][n], + "price_per_unit": csv["Price Per Unit ($)"][n] * 1.3, + "unit_type": csv["Unit Type"][n], + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": n + 1, + "vendor_id": 1, + "quantity": float(csv["Quantity"][n]), + "registered_at": datetime.strptime(csv["Date"][n], "%Y-%m-%d").replace(tzinfo=pytz.UTC), + "repository_url": f"https://github.com/{csv['Owner'][n]}/{csv['Repository Slug'][n]}", + "task_associated_slug": csv["Repository Slug"][n], + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["Username"][n], + "processed_at": UTC_NOW, + "status": "WARNING", + "status_text": ( + f"We could not find enough information about {csv['Username'][n]}, mark this user user " + "as deleted if you don't recognize it" + ), + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), self.bc.format.to_dict(model.github_academy_user), ) - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -1369,88 +1667,110 @@ class GitpodTestSuite(ProvisioningTestCase): # Given: a csv with codespaces data # When: users does not exist # Then: the task should not create any bill, create an activity with wrong status - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_users_not_found(self): csv = gitpod_csv(10) slug = self.bc.fake.slug() - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['kind'][n], - 'sku': str(csv['kind'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.0, - 'price_per_unit': 0.036 * 1.3, - 'unit_type': 'Credits', - }) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': n + 1, - 'price_id': 1, - 'quantity': float(csv['credits'][n]), - 'external_pk': str(csv['id'][n]), - 'registered_at': self.bc.datetime.from_iso_string(csv['startTime'][n]), - 'repository_url': csv['contextURL'][n], - 'task_associated_slug': repo_name(csv['contextURL'][n]), - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': - n + 1, - 'kind_id': - n + 1, - 'hash': - slug, - 'username': - csv['userName'][n], - 'processed_at': - UTC_NOW, - 'status': - 'ERROR', - 'status_text': - ', '.join([ - 'Provisioning vendor Gitpod not found', - f"We could not find enough information about {csv['userName'][n]}, mark this user user " - "as deleted if you don't recognize it", - ]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), []) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["kind"][n], + "sku": str(csv["kind"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.0, + "price_per_unit": 0.036 * 1.3, + "unit_type": "Credits", + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": 1, + "quantity": float(csv["credits"][n]), + "external_pk": str(csv["id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["startTime"][n]), + "repository_url": csv["contextURL"][n], + "task_associated_slug": repo_name(csv["contextURL"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["userName"][n], + "processed_at": UTC_NOW, + "status": "ERROR", + "status_text": ", ".join( + [ + "Provisioning vendor Gitpod not found", + f"We could not find enough information about {csv['userName'][n]}, mark this user user " + "as deleted if you don't recognize it", + ] + ), + } + ) + for n in range(10) + ], + ) + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), []) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -1459,97 +1779,135 @@ def test_users_not_found(self): # Given: a csv with codespaces data and 10 User, 10 GithubAcademyUser and 10 GithubAcademyUserLog # When: vendor not found # Then: the task should not create any bill or activity - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__vendor_not_found(self): csv = gitpod_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['userName']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - model = self.bc.database.create(user=10, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs) + github_academy_users = [ + { + "username": username, + } + for username in csv["userName"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + model = self.bc.database.create( + user=10, github_academy_user=github_academy_users, github_academy_user_log=github_academy_user_logs + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'status': 'ERROR', - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['kind'][n], - 'sku': str(csv['kind'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.0, - 'price_per_unit': 0.036 * 1.3, - 'unit_type': 'Credits', - }) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': n + 1, - 'price_id': 1, - 'vendor_id': None, - 'quantity': float(csv['credits'][n]), - 'external_pk': str(csv['id'][n]), - 'registered_at': self.bc.datetime.from_iso_string(csv['startTime'][n]), - 'repository_url': csv['contextURL'][n], - 'task_associated_slug': repo_name(csv['contextURL'][n]), - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['userName'][n], - 'processed_at': UTC_NOW, - 'status': 'ERROR', - 'status_text': ', '.join(['Provisioning vendor Gitpod not found']), - }) for n in range(10) - ]) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "status": "ERROR", + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["kind"][n], + "sku": str(csv["kind"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.0, + "price_per_unit": 0.036 * 1.3, + "unit_type": "Credits", + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": 1, + "vendor_id": None, + "quantity": float(csv["credits"][n]), + "external_pk": str(csv["id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["startTime"][n]), + "repository_url": csv["contextURL"][n], + "task_associated_slug": repo_name(csv["contextURL"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["userName"][n], + "processed_at": UTC_NOW, + "status": "ERROR", + "status_text": ", ".join(["Provisioning vendor Gitpod not found"]), + } + ) + for n in range(10) + ], + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -1559,101 +1917,143 @@ def test_from_github_credentials__vendor_not_found(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct # Then: the task should create 1 bills and 10 activities - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__generate_anything(self): csv = gitpod_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['userName']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Gitpod'} - model = self.bc.database.create(user=10, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + github_academy_users = [ + { + "username": username, + } + for username in csv["userName"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Gitpod"} + model = self.bc.database.create( + user=10, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['kind'][n], - 'sku': str(csv['kind'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.0, - 'price_per_unit': 0.036 * 1.3, - 'unit_type': 'Credits', - }) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': n + 1, - 'price_id': 1, - 'vendor_id': 1, - 'quantity': float(csv['credits'][n]), - 'external_pk': str(csv['id'][n]), - 'registered_at': self.bc.datetime.from_iso_string(csv['startTime'][n]), - 'repository_url': csv['contextURL'][n], - 'task_associated_slug': repo_name(csv['contextURL'][n]), - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['userName'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["kind"][n], + "sku": str(csv["kind"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.0, + "price_per_unit": 0.036 * 1.3, + "unit_type": "Credits", + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": 1, + "vendor_id": 1, + "quantity": float(csv["credits"][n]), + "external_pk": str(csv["id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["startTime"][n]), + "repository_url": csv["contextURL"][n], + "task_associated_slug": repo_name(csv["contextURL"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["userName"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -1663,45 +2063,57 @@ def test_from_github_credentials__generate_anything(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, and the amount of rows is greater than the limit # Then: the task should create 1 bills and 10 activities - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.provisioning.tasks.PANDAS_ROWS_LIMIT', PropertyMock(return_value=3)) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.provisioning.tasks.PANDAS_ROWS_LIMIT", PropertyMock(return_value=3)) def test_pagination(self): csv = gitpod_csv(10) limit = tasks.PANDAS_ROWS_LIMIT tasks.PANDAS_ROWS_LIMIT = 3 - provisioning_vendor = {'name': 'Gitpod'} - github_academy_users = [{ - 'username': username, - } for username in csv['userName']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - model = self.bc.database.create(user=10, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + provisioning_vendor = {"name": "Gitpod"} + github_academy_users = [ + { + "username": username, + } + for username in csv["userName"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + model = self.bc.database.create( + user=10, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] @@ -1709,69 +2121,103 @@ def test_pagination(self): task_manager_id = get_last_task_manager_id(self.bc) + 1 slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['kind'][n], - 'sku': str(csv['kind'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.0, - 'price_per_unit': 0.036 * 1.3, - 'unit_type': 'Credits', - }) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': n + 1, - 'price_id': 1, - 'vendor_id': 1, - 'quantity': float(csv['credits'][n]), - 'external_pk': str(csv['id'][n]), - 'registered_at': self.bc.datetime.from_iso_string(csv['startTime'][n]), - 'repository_url': csv['contextURL'][n], - 'task_associated_slug': repo_name(csv['contextURL'][n]), - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['userName'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, - [call(f'Starting upload for hash {slug}') for _ in range(4)]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["kind"][n], + "sku": str(csv["kind"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.0, + "price_per_unit": 0.036 * 1.3, + "unit_type": "Credits", + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": 1, + "vendor_id": 1, + "quantity": float(csv["credits"][n]), + "external_pk": str(csv["id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["startTime"][n]), + "repository_url": csv["contextURL"][n], + "task_associated_slug": repo_name(csv["contextURL"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["userName"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls( + logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}") for _ in range(4)] + ) self.bc.check.calls(logging.Logger.error.call_args_list, []) - self.bc.check.calls(tasks.upload.delay.call_args_list, [ - call(slug, page=1, task_manager_id=task_manager_id), - call(slug, page=2, task_manager_id=task_manager_id), - call(slug, page=3, task_manager_id=task_manager_id), - ]) + self.bc.check.calls( + tasks.upload.delay.call_args_list, + [ + call(slug, page=1, task_manager_id=task_manager_id), + call(slug, page=2, task_manager_id=task_manager_id), + call(slug, page=3, task_manager_id=task_manager_id), + ], + ) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, [call(slug)]) @@ -1781,117 +2227,163 @@ def test_pagination(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, without ProfileAcademy # Then: the task should create 1 bills and 10 activities per academy - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__generate_anything__case1(self): csv = gitpod_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['userName']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Gitpod'} - - model = self.bc.database.create(user=10, - academy=3, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + github_academy_users = [ + { + "username": username, + } + for username in csv["userName"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Gitpod"} + + model = self.bc.database.create( + user=10, + academy=3, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': 1, - 'academy_id': 1, - 'vendor_id': 1, - 'hash': slug, - }), - provisioning_bill_data({ - 'id': 2, - 'academy_id': 2, - 'vendor_id': 1, - 'hash': slug, - }), - provisioning_bill_data({ - 'id': 3, - 'academy_id': 3, - 'vendor_id': 1, - 'hash': slug, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['kind'][n], - 'sku': str(csv['kind'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.0, - 'price_per_unit': 0.036 * 1.3, - 'unit_type': 'Credits', - }) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': n + 1, - 'price_id': 1, - 'vendor_id': 1, - 'quantity': float(csv['credits'][n]), - 'external_pk': str(csv['id'][n]), - 'registered_at': self.bc.datetime.from_iso_string(csv['startTime'][n]), - 'repository_url': csv['contextURL'][n], - 'task_associated_slug': repo_name(csv['contextURL'][n]), - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['userName'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": 1, + "academy_id": 1, + "vendor_id": 1, + "hash": slug, + } + ), + provisioning_bill_data( + { + "id": 2, + "academy_id": 2, + "vendor_id": 1, + "hash": slug, + } + ), + provisioning_bill_data( + { + "id": 3, + "academy_id": 3, + "vendor_id": 1, + "hash": slug, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["kind"][n], + "sku": str(csv["kind"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.0, + "price_per_unit": 0.036 * 1.3, + "unit_type": "Credits", + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": 1, + "vendor_id": 1, + "quantity": float(csv["credits"][n]), + "external_pk": str(csv["id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["startTime"][n]), + "repository_url": csv["contextURL"][n], + "task_associated_slug": repo_name(csv["contextURL"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["userName"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -1901,135 +2393,186 @@ def test_from_github_credentials__generate_anything__case1(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, with ProfileAcademy # Then: the task should create 1 bills and 10 activities per user's ProfileAcademy - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.authenticate.signals.academy_invite_accepted.send_robust', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.authenticate.signals.academy_invite_accepted.send_robust", MagicMock()) def test_from_github_credentials__generate_anything__case2(self): csv = gitpod_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['userName']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Gitpod'} + github_academy_users = [ + { + "username": username, + } + for username in csv["userName"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Gitpod"} profile_academies = [] for user_n in range(10): for academy_n in range(3): - profile_academies.append({ - 'academy_id': academy_n + 1, - 'user_id': user_n + 1, - 'status': 'ACTIVE', - }) - - credentials_github = [{ - 'username': csv['userName'][n], - 'user_id': n + 1, - } for n in range(10)] - - model = self.bc.database.create(user=10, - credentials_github=credentials_github, - academy=3, - profile_academy=profile_academies, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + profile_academies.append( + { + "academy_id": academy_n + 1, + "user_id": user_n + 1, + "status": "ACTIVE", + } + ) + + credentials_github = [ + { + "username": csv["userName"][n], + "user_id": n + 1, + } + for n in range(10) + ] + + model = self.bc.database.create( + user=10, + credentials_github=credentials_github, + academy=3, + profile_academy=profile_academies, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': 1, - 'academy_id': 1, - 'hash': slug, - 'vendor_id': 1, - }), - provisioning_bill_data({ - 'id': 2, - 'academy_id': 2, - 'hash': slug, - 'vendor_id': 1, - }), - provisioning_bill_data({ - 'id': 3, - 'academy_id': 3, - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['kind'][n], - 'sku': str(csv['kind'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.0, - 'price_per_unit': 0.036 * 1.3, - 'unit_type': 'Credits', - }) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': n + 1, - 'price_id': 1, - 'vendor_id': 1, - 'quantity': float(csv['credits'][n]), - 'external_pk': str(csv['id'][n]), - 'registered_at': self.bc.datetime.from_iso_string(csv['startTime'][n]), - 'repository_url': csv['contextURL'][n], - 'task_associated_slug': repo_name(csv['contextURL'][n]), - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['userName'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": 1, + "academy_id": 1, + "hash": slug, + "vendor_id": 1, + } + ), + provisioning_bill_data( + { + "id": 2, + "academy_id": 2, + "hash": slug, + "vendor_id": 1, + } + ), + provisioning_bill_data( + { + "id": 3, + "academy_id": 3, + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["kind"][n], + "sku": str(csv["kind"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.0, + "price_per_unit": 0.036 * 1.3, + "unit_type": "Credits", + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": 1, + "vendor_id": 1, + "quantity": float(csv["credits"][n]), + "external_pk": str(csv["id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["startTime"][n]), + "repository_url": csv["contextURL"][n], + "task_associated_slug": repo_name(csv["contextURL"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["userName"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -2039,73 +2582,93 @@ def test_from_github_credentials__generate_anything__case2(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, with ProfileAcademy # Then: the task should create 1 bills and 10 activities per user's ProfileAcademy - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.authenticate.signals.academy_invite_accepted.send_robust', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.authenticate.signals.academy_invite_accepted.send_robust", MagicMock()) def test_from_github_credentials__generate_anything__case3(self): csv = gitpod_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['userName']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Gitpod'} + github_academy_users = [ + { + "username": username, + } + for username in csv["userName"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Gitpod"} profile_academies = [] for user_n in range(10): for academy_n in range(3): - profile_academies.append({ - 'academy_id': academy_n + 1, - 'user_id': user_n + 1, - 'status': 'ACTIVE', - }) - - credentials_github = [{ - 'username': csv['userName'][n], - 'user_id': n + 1, - } for n in range(10)] - - cohort_users = [{ - 'user_id': n + 1, - 'cohort_id': 1, - } for n in range(10)] + profile_academies.append( + { + "academy_id": academy_n + 1, + "user_id": user_n + 1, + "status": "ACTIVE", + } + ) + + credentials_github = [ + { + "username": csv["userName"][n], + "user_id": n + 1, + } + for n in range(10) + ] + + cohort_users = [ + { + "user_id": n + 1, + "cohort_id": 1, + } + for n in range(10) + ] cohort = { - 'academy_id': 1, - 'kickoff_date': timezone.now() + timedelta(days=1), - 'ending_date': timezone.now() - timedelta(days=1), + "academy_id": 1, + "kickoff_date": timezone.now() + timedelta(days=1), + "ending_date": timezone.now() - timedelta(days=1), } - model = self.bc.database.create(user=10, - credentials_github=credentials_github, - academy=3, - cohort=cohort, - cohort_user=cohort_users, - profile_academy=profile_academies, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + model = self.bc.database.create( + user=10, + credentials_github=credentials_github, + academy=3, + cohort=cohort, + cohort_user=cohort_users, + profile_academy=profile_academies, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] @@ -2114,8 +2677,8 @@ def test_from_github_credentials__generate_anything__case3(self): y = [[model.academy[RANDOM_ACADEMIES[x]]] for x in range(10)] - with patch('random.choices', MagicMock(side_effect=y)): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("random.choices", MagicMock(side_effect=y)): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) @@ -2127,59 +2690,89 @@ def test_from_github_credentials__generate_anything__case3(self): academies = list(academies) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': 1, - 'academy_id': 1, - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data({ - 'id': n + 1, - 'product_name': csv['kind'][n], - 'sku': str(csv['kind'][n]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.0, - 'price_per_unit': 0.036 * 1.3, - 'unit_type': 'Credits', - }) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': n + 1, - 'price_id': 1, - 'vendor_id': 1, - 'quantity': float(csv['credits'][n]), - 'external_pk': str(csv['id'][n]), - 'registered_at': self.bc.datetime.from_iso_string(csv['startTime'][n]), - 'repository_url': csv['contextURL'][n], - 'task_associated_slug': repo_name(csv['contextURL'][n]), - 'csv_row': n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['userName'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": 1, + "academy_id": 1, + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": csv["kind"][n], + "sku": str(csv["kind"][n]), + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.0, + "price_per_unit": 0.036 * 1.3, + "unit_type": "Credits", + } + ) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": 1, + "vendor_id": 1, + "quantity": float(csv["credits"][n]), + "external_pk": str(csv["id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["startTime"][n]), + "repository_url": csv["contextURL"][n], + "task_associated_slug": repo_name(csv["contextURL"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["userName"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -2191,107 +2784,129 @@ class RigobotTestSuite(ProvisioningTestCase): # Given: a csv with codespaces data # When: users does not exist # Then: the task should not create any bill, create an activity with wrong status - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_users_not_found(self): csv = rigobot_csv(10) - self.bc.database.create(app={'slug': 'rigobot'}, first_party_credentials={'app': {'rigobot': 10}}) + self.bc.database.create(app={"slug": "rigobot"}, first_party_credentials={"app": {"rigobot": 10}}) logging.Logger.info.call_args_list = [] slug = self.bc.fake.slug() - with patch('requests.get', response_mock(content=[{'id': 1} for _ in range(10)])): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("requests.get", response_mock(content=[{"id": 1} for _ in range(10)])): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data( - { - 'id': n + 1, - 'product_name': f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', - 'sku': f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] == 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 2, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] != 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - ]) - output_was_first = csv['pricing_type'][0] == 'OUTPUT' - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': - n + 1, - 'price_id': (1 if output_was_first else 2) if csv['pricing_type'][n] == 'OUTPUT' else - (2 if output_was_first else 1), - 'quantity': - float(csv['total_tokens'][n]), - 'external_pk': - str(csv['consumption_item_id'][n]), - 'registered_at': - self.bc.datetime.from_iso_string(csv['consumption_period_start'][n]), - 'csv_row': - n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': - n + 1, - 'kind_id': - n + 1, - 'hash': - slug, - 'username': - csv['github_username'][n], - 'processed_at': - UTC_NOW, - 'status': - 'ERROR', - 'status_text': - ', '.join([ - 'Provisioning vendor Rigobot not found', - f"We could not find enough information about {csv['github_username'][n]}, mark this user user " - "as deleted if you don't recognize it", - ]), - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), []) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', + "sku": f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] == "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 2, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] != "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + ], + ) + output_was_first = csv["pricing_type"][0] == "OUTPUT" + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": ( + (1 if output_was_first else 2) + if csv["pricing_type"][n] == "OUTPUT" + else (2 if output_was_first else 1) + ), + "quantity": float(csv["total_tokens"][n]), + "external_pk": str(csv["consumption_item_id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["consumption_period_start"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["github_username"][n], + "processed_at": UTC_NOW, + "status": "ERROR", + "status_text": ", ".join( + [ + "Provisioning vendor Rigobot not found", + f"We could not find enough information about {csv['github_username'][n]}, mark this user user " + "as deleted if you don't recognize it", + ] + ), + } + ) + for n in range(10) + ], + ) + self.assertEqual(self.bc.database.list_of("authenticate.GithubAcademyUser"), []) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls( logging.Logger.error.call_args_list, - [call('Organization not provided, in this case, all organizations will be used') for _ in range(10)]) + [call("Organization not provided, in this case, all organizations will be used") for _ in range(10)], + ) self.bc.check.calls(tasks.upload.delay.call_args_list, []) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, []) @@ -2299,113 +2914,150 @@ def test_users_not_found(self): # Given: a csv with codespaces data and 10 User, 10 GithubAcademyUser and 10 GithubAcademyUserLog # When: vendor not found # Then: the task should not create any bill or activity - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__vendor_not_found(self): csv = rigobot_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['github_username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - model = self.bc.database.create(user=10, - app={'slug': 'rigobot'}, - first_party_credentials={'app': { - 'rigobot': 10 - }}, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs) + github_academy_users = [ + { + "username": username, + } + for username in csv["github_username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + model = self.bc.database.create( + user=10, + app={"slug": "rigobot"}, + first_party_credentials={"app": {"rigobot": 10}}, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'status': 'ERROR', - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data( - { - 'id': n + 1, - 'product_name': f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', - 'sku': f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] == 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 2, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] != 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - ]) - output_was_first = csv['pricing_type'][0] == 'OUTPUT' - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': - n + 1, - 'price_id': (1 if output_was_first else 2) if csv['pricing_type'][n] == 'OUTPUT' else - (2 if output_was_first else 1), - 'quantity': - float(csv['total_tokens'][n]), - 'external_pk': - str(csv['consumption_item_id'][n]), - 'registered_at': - self.bc.datetime.from_iso_string(csv['consumption_period_start'][n]), - 'csv_row': - n, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['github_username'][n], - 'processed_at': UTC_NOW, - 'status': 'ERROR', - 'status_text': ', '.join(['Provisioning vendor Rigobot not found']), - }) for n in range(10) - ]) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "status": "ERROR", + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', + "sku": f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] == "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 2, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] != "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + ], + ) + output_was_first = csv["pricing_type"][0] == "OUTPUT" + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": ( + (1 if output_was_first else 2) + if csv["pricing_type"][n] == "OUTPUT" + else (2 if output_was_first else 1) + ), + "quantity": float(csv["total_tokens"][n]), + "external_pk": str(csv["consumption_item_id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["consumption_period_start"][n]), + "csv_row": n, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["github_username"][n], + "processed_at": UTC_NOW, + "status": "ERROR", + "status_text": ", ".join(["Provisioning vendor Rigobot not found"]), + } + ) + for n in range(10) + ], + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -2415,119 +3067,157 @@ def test_from_github_credentials__vendor_not_found(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct # Then: the task should create 1 bills and 10 activities - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__generate_anything(self): csv = rigobot_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['github_username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Rigobot'} - model = self.bc.database.create(user=10, - app={'slug': 'rigobot'}, - first_party_credentials={'app': { - 'rigobot': 10 - }}, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + github_academy_users = [ + { + "username": username, + } + for username in csv["github_username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Rigobot"} + model = self.bc.database.create( + user=10, + app={"slug": "rigobot"}, + first_party_credentials={"app": {"rigobot": 10}}, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data( - { - 'id': n + 1, - 'product_name': f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', - 'sku': f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] == 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 2, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] != 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - ]) - output_was_first = csv['pricing_type'][0] == 'OUTPUT' - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': - n + 1, - 'price_id': (1 if output_was_first else 2) if csv['pricing_type'][n] == 'OUTPUT' else - (2 if output_was_first else 1), - 'quantity': - float(csv['total_tokens'][n]), - 'external_pk': - str(csv['consumption_item_id'][n]), - 'registered_at': - self.bc.datetime.from_iso_string(csv['consumption_period_start'][n]), - 'csv_row': - n, - 'vendor_id': - 1, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['github_username'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', + "sku": f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] == "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 2, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] != "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + ], + ) + output_was_first = csv["pricing_type"][0] == "OUTPUT" + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": ( + (1 if output_was_first else 2) + if csv["pricing_type"][n] == "OUTPUT" + else (2 if output_was_first else 1) + ), + "quantity": float(csv["total_tokens"][n]), + "external_pk": str(csv["consumption_item_id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["consumption_period_start"][n]), + "csv_row": n, + "vendor_id": 1, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["github_username"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -2537,49 +3227,59 @@ def test_from_github_credentials__generate_anything(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, and the amount of rows is greater than the limit # Then: the task should create 1 bills and 10 activities - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.provisioning.tasks.PANDAS_ROWS_LIMIT', PropertyMock(return_value=3)) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.provisioning.tasks.PANDAS_ROWS_LIMIT", PropertyMock(return_value=3)) def test_pagination(self): csv = rigobot_csv(10) limit = tasks.PANDAS_ROWS_LIMIT tasks.PANDAS_ROWS_LIMIT = 3 - provisioning_vendor = {'name': 'Rigobot'} - github_academy_users = [{ - 'username': username, - } for username in csv['github_username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - model = self.bc.database.create(user=10, - app={'slug': 'rigobot'}, - first_party_credentials={'app': { - 'rigobot': 10 - }}, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + provisioning_vendor = {"name": "Rigobot"} + github_academy_users = [ + { + "username": username, + } + for username in csv["github_username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + model = self.bc.database.create( + user=10, + app={"slug": "rigobot"}, + first_party_credentials={"app": {"rigobot": 10}}, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] @@ -2587,83 +3287,115 @@ def test_pagination(self): task_manager_id = get_last_task_manager_id(self.bc) + 1 slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data( - { - 'id': n + 1, - 'product_name': f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', - 'sku': f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] == 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 2, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] != 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - ]) - output_was_first = csv['pricing_type'][0] == 'OUTPUT' - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': - n + 1, - 'price_id': (1 if output_was_first else 2) if csv['pricing_type'][n] == 'OUTPUT' else - (2 if output_was_first else 1), - 'quantity': - float(csv['total_tokens'][n]), - 'external_pk': - str(csv['consumption_item_id'][n]), - 'registered_at': - self.bc.datetime.from_iso_string(csv['consumption_period_start'][n]), - 'csv_row': - n, - 'vendor_id': - 1, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['github_username'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, - [call(f'Starting upload for hash {slug}') for _ in range(4)]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', + "sku": f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] == "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 2, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] != "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + ], + ) + output_was_first = csv["pricing_type"][0] == "OUTPUT" + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": ( + (1 if output_was_first else 2) + if csv["pricing_type"][n] == "OUTPUT" + else (2 if output_was_first else 1) + ), + "quantity": float(csv["total_tokens"][n]), + "external_pk": str(csv["consumption_item_id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["consumption_period_start"][n]), + "csv_row": n, + "vendor_id": 1, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["github_username"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls( + logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}") for _ in range(4)] + ) self.bc.check.calls(logging.Logger.error.call_args_list, []) - self.bc.check.calls(tasks.upload.delay.call_args_list, [ - call(slug, page=1, task_manager_id=task_manager_id), - call(slug, page=2, task_manager_id=task_manager_id), - call(slug, page=3, task_manager_id=task_manager_id), - ]) + self.bc.check.calls( + tasks.upload.delay.call_args_list, + [ + call(slug, page=1, task_manager_id=task_manager_id), + call(slug, page=2, task_manager_id=task_manager_id), + call(slug, page=3, task_manager_id=task_manager_id), + ], + ) self.bc.check.calls(tasks.calculate_bill_amounts.delay.call_args_list, [call(slug)]) @@ -2673,126 +3405,162 @@ def test_pagination(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, without ProfileAcademy # Then: the task should create 1 bills and 10 activities per academy - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) def test_from_github_credentials__generate_anything__case1(self): csv = rigobot_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['github_username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Rigobot'} - - model = self.bc.database.create(user=10, - academy_auth_settings=[{ - 'academy_id': n + 1 - } for n in range(3)], - academy=3, - app={'slug': 'rigobot'}, - first_party_credentials={'app': { - 'rigobot': 10 - }}, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + github_academy_users = [ + { + "username": username, + } + for username in csv["github_username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Rigobot"} + + model = self.bc.database.create( + user=10, + academy_auth_settings=[{"academy_id": n + 1} for n in range(3)], + academy=3, + app={"slug": "rigobot"}, + first_party_credentials={"app": {"rigobot": 10}}, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': 1, - 'academy_id': 1, - 'vendor_id': 1, - 'hash': slug, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data( - { - 'id': n + 1, - 'product_name': f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', - 'sku': f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] == 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 2, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] != 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - ]) - output_was_first = csv['pricing_type'][0] == 'OUTPUT' - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': - n + 1, - 'price_id': (1 if output_was_first else 2) if csv['pricing_type'][n] == 'OUTPUT' else - (2 if output_was_first else 1), - 'quantity': - float(csv['total_tokens'][n]), - 'external_pk': - str(csv['consumption_item_id'][n]), - 'registered_at': - self.bc.datetime.from_iso_string(csv['consumption_period_start'][n]), - 'csv_row': - n, - 'vendor_id': - 1, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['github_username'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": 1, + "academy_id": 1, + "vendor_id": 1, + "hash": slug, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', + "sku": f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] == "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 2, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] != "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + ], + ) + output_was_first = csv["pricing_type"][0] == "OUTPUT" + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": ( + (1 if output_was_first else 2) + if csv["pricing_type"][n] == "OUTPUT" + else (2 if output_was_first else 1) + ), + "quantity": float(csv["total_tokens"][n]), + "external_pk": str(csv["consumption_item_id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["consumption_period_start"][n]), + "csv_row": n, + "vendor_id": 1, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["github_username"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -2802,144 +3570,185 @@ def test_from_github_credentials__generate_anything__case1(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, with ProfileAcademy # Then: the task should create 1 bills and 10 activities per user's ProfileAcademy - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.authenticate.signals.academy_invite_accepted.send_robust', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.authenticate.signals.academy_invite_accepted.send_robust", MagicMock()) def test_from_github_credentials__generate_anything__case2(self): csv = rigobot_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['github_username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Rigobot'} + github_academy_users = [ + { + "username": username, + } + for username in csv["github_username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Rigobot"} profile_academies = [] for user_n in range(10): for academy_n in range(3): - profile_academies.append({ - 'academy_id': academy_n + 1, - 'user_id': user_n + 1, - 'status': 'ACTIVE', - }) - - credentials_github = [{ - 'username': csv['github_username'][n], - 'user_id': n + 1, - } for n in range(10)] - - model = self.bc.database.create(user=10, - academy_auth_settings=[{ - 'academy_id': n + 1 - } for n in range(3)], - credentials_github=credentials_github, - app={'slug': 'rigobot'}, - first_party_credentials={'app': { - 'rigobot': 10 - }}, - academy=3, - profile_academy=profile_academies, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + profile_academies.append( + { + "academy_id": academy_n + 1, + "user_id": user_n + 1, + "status": "ACTIVE", + } + ) + + credentials_github = [ + { + "username": csv["github_username"][n], + "user_id": n + 1, + } + for n in range(10) + ] + + model = self.bc.database.create( + user=10, + academy_auth_settings=[{"academy_id": n + 1} for n in range(3)], + credentials_github=credentials_github, + app={"slug": "rigobot"}, + first_party_credentials={"app": {"rigobot": 10}}, + academy=3, + profile_academy=profile_academies, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] slug = self.bc.fake.slug() - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': 1, - 'academy_id': 1, - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data( - { - 'id': n + 1, - 'product_name': f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', - 'sku': f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] == 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 2, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] != 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - ]) - output_was_first = csv['pricing_type'][0] == 'OUTPUT' - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': - n + 1, - 'price_id': (1 if output_was_first else 2) if csv['pricing_type'][n] == 'OUTPUT' else - (2 if output_was_first else 1), - 'quantity': - float(csv['total_tokens'][n]), - 'external_pk': - str(csv['consumption_item_id'][n]), - 'registered_at': - self.bc.datetime.from_iso_string(csv['consumption_period_start'][n]), - 'csv_row': - n, - 'vendor_id': - 1, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['github_username'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": 1, + "academy_id": 1, + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', + "sku": f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] == "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 2, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] != "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + ], + ) + output_was_first = csv["pricing_type"][0] == "OUTPUT" + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": ( + (1 if output_was_first else 2) + if csv["pricing_type"][n] == "OUTPUT" + else (2 if output_was_first else 1) + ), + "quantity": float(csv["total_tokens"][n]), + "external_pk": str(csv["consumption_item_id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["consumption_period_start"][n]), + "csv_row": n, + "vendor_id": 1, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["github_username"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) @@ -2949,80 +3758,96 @@ def test_from_github_credentials__generate_anything__case2(self): # -> and 1 ProvisioningVendor of type codespaces # When: all the data is correct, with ProfileAcademy # Then: the task should create 1 bills and 10 activities per user's ProfileAcademy - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock(wraps=upload.delay)) - @patch('breathecode.provisioning.tasks.calculate_bill_amounts.delay', MagicMock()) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('logging.Logger.info', MagicMock()) - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock()) - @patch('breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event', MagicMock()) - @patch('breathecode.authenticate.signals.academy_invite_accepted.send_robust', MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock(wraps=upload.delay)) + @patch("breathecode.provisioning.tasks.calculate_bill_amounts.delay", MagicMock()) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("logging.Logger.info", MagicMock()) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock()) + @patch("breathecode.notify.utils.hook_manager.HookManagerClass.process_model_event", MagicMock()) + @patch("breathecode.authenticate.signals.academy_invite_accepted.send_robust", MagicMock()) def test_from_github_credentials__generate_anything__case3(self): csv = rigobot_csv(10) - github_academy_users = [{ - 'username': username, - } for username in csv['github_username']] - github_academy_user_logs = [{ - 'storage_status': 'SYNCHED', - 'storage_action': 'ADD', - 'academy_user_id': n + 1, - } for n in range(10)] - provisioning_vendor = {'name': 'Rigobot'} + github_academy_users = [ + { + "username": username, + } + for username in csv["github_username"] + ] + github_academy_user_logs = [ + { + "storage_status": "SYNCHED", + "storage_action": "ADD", + "academy_user_id": n + 1, + } + for n in range(10) + ] + provisioning_vendor = {"name": "Rigobot"} profile_academies = [] for user_n in range(10): for academy_n in range(3): - profile_academies.append({ - 'academy_id': academy_n + 1, - 'user_id': user_n + 1, - 'status': 'ACTIVE', - }) - - credentials_github = [{ - 'username': csv['github_username'][n], - 'user_id': n + 1, - } for n in range(10)] - - cohort_users = [{ - 'user_id': n + 1, - 'cohort_id': 1, - } for n in range(10)] + profile_academies.append( + { + "academy_id": academy_n + 1, + "user_id": user_n + 1, + "status": "ACTIVE", + } + ) + + credentials_github = [ + { + "username": csv["github_username"][n], + "user_id": n + 1, + } + for n in range(10) + ] + + cohort_users = [ + { + "user_id": n + 1, + "cohort_id": 1, + } + for n in range(10) + ] cohort = { - 'academy_id': 1, - 'kickoff_date': timezone.now() + timedelta(days=1), - 'ending_date': timezone.now() - timedelta(days=1), + "academy_id": 1, + "kickoff_date": timezone.now() + timedelta(days=1), + "ending_date": timezone.now() - timedelta(days=1), } - model = self.bc.database.create(user=10, - academy_auth_settings=[{ - 'academy_id': n + 1 - } for n in range(3)], - app={'slug': 'rigobot'}, - first_party_credentials={'app': { - 'rigobot': 10 - }}, - credentials_github=credentials_github, - academy=3, - cohort=cohort, - cohort_user=cohort_users, - profile_academy=profile_academies, - github_academy_user=github_academy_users, - github_academy_user_log=github_academy_user_logs, - provisioning_vendor=provisioning_vendor) + model = self.bc.database.create( + user=10, + academy_auth_settings=[{"academy_id": n + 1} for n in range(3)], + app={"slug": "rigobot"}, + first_party_credentials={"app": {"rigobot": 10}}, + credentials_github=credentials_github, + academy=3, + cohort=cohort, + cohort_user=cohort_users, + profile_academy=profile_academies, + github_academy_user=github_academy_users, + github_academy_user_log=github_academy_user_logs, + provisioning_vendor=provisioning_vendor, + ) logging.Logger.info.call_args_list = [] logging.Logger.error.call_args_list = [] @@ -3031,8 +3856,8 @@ def test_from_github_credentials__generate_anything__case3(self): y = [[model.academy[RANDOM_ACADEMIES[x]]] for x in range(10)] - with patch('random.choices', MagicMock(side_effect=y)): - with patch('breathecode.services.google_cloud.File.download', MagicMock(side_effect=csv_file_mock(csv))): + with patch("random.choices", MagicMock(side_effect=y)): + with patch("breathecode.services.google_cloud.File.download", MagicMock(side_effect=csv_file_mock(csv))): upload(slug) @@ -3044,86 +3869,118 @@ def test_from_github_credentials__generate_anything__case3(self): academies = list(academies) - self.assertEqual(self.bc.database.list_of('payments.Currency'), [currency_data()]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - provisioning_bill_data({ - 'id': 1, - 'academy_id': RANDOM_ACADEMIES[0] + 1, - 'hash': slug, - 'vendor_id': 1, - }), - provisioning_bill_data({ - 'id': 2, - 'academy_id': RANDOM_ACADEMIES[1] + 1, - 'hash': slug, - 'vendor_id': 1, - }), - provisioning_bill_data({ - 'id': 3, - 'academy_id': RANDOM_ACADEMIES[2] + 1, - 'hash': slug, - 'vendor_id': 1, - }), - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionKind'), [ - provisioning_activity_kind_data( - { - 'id': n + 1, - 'product_name': f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', - 'sku': f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningPrice'), [ - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 1, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] == 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - provisioning_activity_price_data({ - 'currency_id': 1, - 'id': 2, - 'multiplier': 1.3, - 'price_per_unit': 0.04 if csv['pricing_type'][0] != 'OUTPUT' else 0.02, - 'unit_type': 'Tokens', - }), - ]) - output_was_first = csv['pricing_type'][0] == 'OUTPUT' - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningConsumptionEvent'), [ - provisioning_activity_item_data({ - 'id': - n + 1, - 'price_id': (1 if output_was_first else 2) if csv['pricing_type'][n] == 'OUTPUT' else - (2 if output_was_first else 1), - 'quantity': - float(csv['total_tokens'][n]), - 'external_pk': - str(csv['consumption_item_id'][n]), - 'registered_at': - self.bc.datetime.from_iso_string(csv['consumption_period_start'][n]), - 'csv_row': - n, - 'vendor_id': - 1, - }) for n in range(10) - ]) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), [ - provisioning_activity_data({ - 'id': n + 1, - 'kind_id': n + 1, - 'hash': slug, - 'username': csv['github_username'][n], - 'processed_at': UTC_NOW, - 'status': 'PERSISTED', - 'status_text': '', - }) for n in range(10) - ]) - - self.assertEqual(self.bc.database.list_of('authenticate.GithubAcademyUser'), - self.bc.format.to_dict(model.github_academy_user)) - - self.bc.check.calls(logging.Logger.info.call_args_list, [call(f'Starting upload for hash {slug}')]) + self.assertEqual(self.bc.database.list_of("payments.Currency"), [currency_data()]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + provisioning_bill_data( + { + "id": 1, + "academy_id": RANDOM_ACADEMIES[0] + 1, + "hash": slug, + "vendor_id": 1, + } + ), + provisioning_bill_data( + { + "id": 2, + "academy_id": RANDOM_ACADEMIES[1] + 1, + "hash": slug, + "vendor_id": 1, + } + ), + provisioning_bill_data( + { + "id": 3, + "academy_id": RANDOM_ACADEMIES[2] + 1, + "hash": slug, + "vendor_id": 1, + } + ), + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionKind"), + [ + provisioning_activity_kind_data( + { + "id": n + 1, + "product_name": f'{csv["purpose"][n]} (type: {csv["pricing_type"][n]}, model: {csv["model"][n]})', + "sku": f'{csv["purpose_slug"][n]}--{csv["pricing_type"][n].lower()}--{csv["model"][n].lower()}', + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningPrice"), + [ + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 1, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] == "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + provisioning_activity_price_data( + { + "currency_id": 1, + "id": 2, + "multiplier": 1.3, + "price_per_unit": 0.04 if csv["pricing_type"][0] != "OUTPUT" else 0.02, + "unit_type": "Tokens", + } + ), + ], + ) + output_was_first = csv["pricing_type"][0] == "OUTPUT" + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningConsumptionEvent"), + [ + provisioning_activity_item_data( + { + "id": n + 1, + "price_id": ( + (1 if output_was_first else 2) + if csv["pricing_type"][n] == "OUTPUT" + else (2 if output_was_first else 1) + ), + "quantity": float(csv["total_tokens"][n]), + "external_pk": str(csv["consumption_item_id"][n]), + "registered_at": self.bc.datetime.from_iso_string(csv["consumption_period_start"][n]), + "csv_row": n, + "vendor_id": 1, + } + ) + for n in range(10) + ], + ) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), + [ + provisioning_activity_data( + { + "id": n + 1, + "kind_id": n + 1, + "hash": slug, + "username": csv["github_username"][n], + "processed_at": UTC_NOW, + "status": "PERSISTED", + "status_text": "", + } + ) + for n in range(10) + ], + ) + + self.assertEqual( + self.bc.database.list_of("authenticate.GithubAcademyUser"), + self.bc.format.to_dict(model.github_academy_user), + ) + + self.bc.check.calls(logging.Logger.info.call_args_list, [call(f"Starting upload for hash {slug}")]) self.bc.check.calls(logging.Logger.error.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, []) diff --git a/breathecode/provisioning/tests/urls/tests_academy_bill_id.py b/breathecode/provisioning/tests/urls/tests_academy_bill_id.py index 7b45ed5f5..9d264e1e6 100644 --- a/breathecode/provisioning/tests/urls/tests_academy_bill_id.py +++ b/breathecode/provisioning/tests/urls/tests_academy_bill_id.py @@ -1,6 +1,7 @@ """ Test /v1/marketing/upload """ + import random from unittest.mock import MagicMock, Mock, PropertyMock, call, patch @@ -21,7 +22,7 @@ def put_serializer(provisioning_bill, data={}): return { - 'status': provisioning_bill.status, + "status": provisioning_bill.status, **data, } @@ -33,13 +34,13 @@ class MarketingTestSuite(ProvisioningTestCase): # Then: should return 401 def test_upload_without_auth(self): - self.headers(accept='application/json', content_disposition='attachment; filename="filename.csv"') + self.headers(accept="application/json", content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("provisioning:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -51,16 +52,16 @@ def test_upload_without_capability(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - self.headers(academy=1, accept='application/json', content_disposition='attachment; filename="filename.csv"') + self.headers(academy=1, accept="application/json", content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("provisioning:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: crud_provisioning_bill for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: crud_provisioning_bill for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -70,39 +71,37 @@ def test_upload_without_capability(self): # Then: should return 404 def test_no_bill(self): - model = self.bc.database.create(user=1, profile_academy=1, role=1, capability='crud_provisioning_bill') + model = self.bc.database.create(user=1, profile_academy=1, role=1, capability="crud_provisioning_bill") self.client.force_authenticate(model.user) self.headers(academy=1) - url = reverse_lazy('provisioning:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("provisioning:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url) content = response.json() expected = { - 'detail': 'not-found', - 'status_code': 404, + "detail": "not-found", + "status_code": 404, } self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) # When: bill # Then: should return 404 def test_bill(self): - model = self.bc.database.create(user=1, - profile_academy=1, - role=1, - capability='crud_provisioning_bill', - provisioning_bill=1) + model = self.bc.database.create( + user=1, profile_academy=1, role=1, capability="crud_provisioning_bill", provisioning_bill=1 + ) self.client.force_authenticate(model.user) self.headers(academy=1) - url = reverse_lazy('provisioning:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("provisioning:academy_bill_id", kwargs={"bill_id": 1}) response = self.client.put(url) @@ -111,96 +110,104 @@ def test_bill(self): self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - self.bc.format.to_dict(model.provisioning_bill), - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + self.bc.format.to_dict(model.provisioning_bill), + ], + ) # When: bill # Then: should return 404 def test_bill__valid_statuses(self): - statuses = ['DUE', 'DISPUTED', 'IGNORED', 'PENDING'] + statuses = ["DUE", "DISPUTED", "IGNORED", "PENDING"] - model = self.bc.database.create(user=1, - profile_academy=1, - role=1, - capability='crud_provisioning_bill', - provisioning_bill=1) + model = self.bc.database.create( + user=1, profile_academy=1, role=1, capability="crud_provisioning_bill", provisioning_bill=1 + ) self.client.force_authenticate(model.user) self.headers(academy=1) - url = reverse_lazy('provisioning:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("provisioning:academy_bill_id", kwargs={"bill_id": 1}) for s in statuses: - data = {'status': s} - response = self.client.put(url, data, format='json') + data = {"status": s} + response = self.client.put(url, data, format="json") content = response.json() expected = put_serializer(model.provisioning_bill, data=data) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [{ - **self.bc.format.to_dict(model.provisioning_bill), - **data, - }]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + { + **self.bc.format.to_dict(model.provisioning_bill), + **data, + } + ], + ) # When: change the status, the status is valid # Then: should return 200 def test_bill__valid_statuses__but_status_is_paid_or_error(self): - statuses = ['DUE', 'DISPUTED', 'IGNORED', 'PENDING'] + statuses = ["DUE", "DISPUTED", "IGNORED", "PENDING"] - provisioning_bill = {'status': random.choice(['PAID', 'ERROR'])} + provisioning_bill = {"status": random.choice(["PAID", "ERROR"])} - model = self.bc.database.create(user=1, - profile_academy=1, - role=1, - capability='crud_provisioning_bill', - provisioning_bill=provisioning_bill) + model = self.bc.database.create( + user=1, profile_academy=1, role=1, capability="crud_provisioning_bill", provisioning_bill=provisioning_bill + ) self.client.force_authenticate(model.user) self.headers(academy=1) - url = reverse_lazy('provisioning:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("provisioning:academy_bill_id", kwargs={"bill_id": 1}) for s in statuses: - data = {'status': s} - response = self.client.put(url, data, format='json') + data = {"status": s} + response = self.client.put(url, data, format="json") content = response.json() - expected = {'detail': 'readonly-bill-status', 'status_code': 400} + expected = {"detail": "readonly-bill-status", "status_code": 400} self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - self.bc.format.to_dict(model.provisioning_bill), - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + self.bc.format.to_dict(model.provisioning_bill), + ], + ) # When: change the status, but the status is invalid # Then: should return 200 def test_bill__invalid_statuses(self): - statuses = ['PAID', 'ERROR'] + statuses = ["PAID", "ERROR"] - model = self.bc.database.create(user=1, - profile_academy=1, - role=1, - capability='crud_provisioning_bill', - provisioning_bill=1) + model = self.bc.database.create( + user=1, profile_academy=1, role=1, capability="crud_provisioning_bill", provisioning_bill=1 + ) self.client.force_authenticate(model.user) self.headers(academy=1) - url = reverse_lazy('provisioning:academy_bill_id', kwargs={'bill_id': 1}) + url = reverse_lazy("provisioning:academy_bill_id", kwargs={"bill_id": 1}) for s in statuses: - data = {'status': s} - response = self.client.put(url, data, format='json') + data = {"status": s} + response = self.client.put(url, data, format="json") content = response.json() - expected = {'detail': 'invalid-bill-status', 'status_code': 400} + expected = {"detail": "invalid-bill-status", "status_code": 400} self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - self.bc.format.to_dict(model.provisioning_bill), - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + self.bc.format.to_dict(model.provisioning_bill), + ], + ) diff --git a/breathecode/provisioning/tests/urls/tests_academy_userconsumption.py b/breathecode/provisioning/tests/urls/tests_academy_userconsumption.py index 3c285e148..d8e92822e 100644 --- a/breathecode/provisioning/tests/urls/tests_academy_userconsumption.py +++ b/breathecode/provisioning/tests/urls/tests_academy_userconsumption.py @@ -1,6 +1,7 @@ """ Test /v1/marketing/upload """ + import csv import hashlib import json @@ -26,70 +27,74 @@ def format_field(x): if x is None: - return '' + return "" return str(x) -HEADER = ','.join([ - 'amount', - 'id', - 'kind.id', - 'kind.product_name', - 'kind.sku', - 'processed_at', - 'quantity', - 'status', - 'username', -]) +HEADER = ",".join( + [ + "amount", + "id", + "kind.id", + "kind.product_name", + "kind.sku", + "processed_at", + "quantity", + "status", + "username", + ] +) def format_csv(provisioning_activity, provisioning_user_kind): - return ','.join([ - format_field(float(provisioning_activity.amount)), - format_field(provisioning_activity.id), - format_field(provisioning_user_kind.id), - format_field(provisioning_user_kind.product_name), - format_field(provisioning_user_kind.sku), - format_field(provisioning_activity.processed_at), - format_field(float(provisioning_activity.quantity)), - format_field(provisioning_activity.status), - format_field(provisioning_activity.username), - ]) + return ",".join( + [ + format_field(float(provisioning_activity.amount)), + format_field(provisioning_activity.id), + format_field(provisioning_user_kind.id), + format_field(provisioning_user_kind.product_name), + format_field(provisioning_user_kind.sku), + format_field(provisioning_activity.processed_at), + format_field(float(provisioning_activity.quantity)), + format_field(provisioning_activity.status), + format_field(provisioning_activity.username), + ] + ) def provisioning_bill_serializer(self, provisioning_bill): return { - 'created_at': self.bc.datetime.to_iso_string(provisioning_bill.created_at), - 'fee': provisioning_bill.fee, - 'id': provisioning_bill.id, - 'paid_at': provisioning_bill.paid_at, - 'status': provisioning_bill.status, - 'status_details': provisioning_bill.status_details, - 'stripe_url': provisioning_bill.stripe_url, - 'total_amount': provisioning_bill.total_amount, - 'vendor': provisioning_bill.vendor, + "created_at": self.bc.datetime.to_iso_string(provisioning_bill.created_at), + "fee": provisioning_bill.fee, + "id": provisioning_bill.id, + "paid_at": provisioning_bill.paid_at, + "status": provisioning_bill.status, + "status_details": provisioning_bill.status_details, + "stripe_url": provisioning_bill.stripe_url, + "total_amount": provisioning_bill.total_amount, + "vendor": provisioning_bill.vendor, } def provisioning_consumption_kind_serializer(provisioning_consumption_kind): return { - 'id': provisioning_consumption_kind.id, - 'product_name': provisioning_consumption_kind.product_name, - 'sku': provisioning_consumption_kind.sku, + "id": provisioning_consumption_kind.id, + "product_name": provisioning_consumption_kind.product_name, + "sku": provisioning_consumption_kind.sku, } def get_serializer(self, provisioning_activity, provisioning_consumption_kind): return { # 'bills': [provisioning_bill_serializer(self, x) for x in provisioning_bills], - 'kind': provisioning_consumption_kind_serializer(provisioning_consumption_kind), - 'id': provisioning_activity.id, - 'processed_at': provisioning_activity.processed_at, - 'amount': provisioning_activity.amount, - 'quantity': provisioning_activity.quantity, - 'status': provisioning_activity.status, - 'username': provisioning_activity.username, + "kind": provisioning_consumption_kind_serializer(provisioning_consumption_kind), + "id": provisioning_activity.id, + "processed_at": provisioning_activity.processed_at, + "amount": provisioning_activity.amount, + "quantity": provisioning_activity.quantity, + "status": provisioning_activity.status, + "username": provisioning_activity.username, } @@ -100,13 +105,13 @@ class MarketingTestSuite(ProvisioningTestCase): # Then: should return 401 def test_upload_without_auth(self): - self.headers(accept='application/json', content_disposition='attachment; filename="filename.csv"') + self.headers(accept="application/json", content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:academy_userconsumption') + url = reverse_lazy("provisioning:academy_userconsumption") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -118,16 +123,16 @@ def test_upload_without_capability(self): model = self.bc.database.create(user=1) self.client.force_authenticate(model.user) - self.headers(academy=1, accept='application/json', content_disposition='attachment; filename="filename.csv"') + self.headers(academy=1, accept="application/json", content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:academy_userconsumption') + url = reverse_lazy("provisioning:academy_userconsumption") response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_provisioning_activity for academy 1", - 'status_code': 403, + "detail": "You (user: 1) don't have this capability: read_provisioning_activity for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -137,53 +142,57 @@ def test_upload_without_capability(self): # Then: Should return empty csv def test_no_activity(self): - model = self.bc.database.create(user=1, profile_academy=1, role=1, capability='read_provisioning_activity') + model = self.bc.database.create(user=1, profile_academy=1, role=1, capability="read_provisioning_activity") self.client.force_authenticate(model.user) - self.headers(academy=1, accept='text/csv', content_disposition='attachment; filename="filename.csv"') + self.headers(academy=1, accept="text/csv", content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:academy_userconsumption') + url = reverse_lazy("provisioning:academy_userconsumption") response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) # Given: 2 ProvisioningActivity and 1 ProvisioningBill # When: no filters # Then: Should return 2 rows def test__csv__activities(self): - model = self.bc.database.create(user=1, - profile_academy=1, - role=1, - capability='read_provisioning_activity', - provisioning_user_consumption=2, - provisioning_bill=1) + model = self.bc.database.create( + user=1, + profile_academy=1, + role=1, + capability="read_provisioning_activity", + provisioning_user_consumption=2, + provisioning_bill=1, + ) self.client.force_authenticate(model.user) - self.headers(academy=1, accept='text/csv', content_disposition='attachment; filename="filename.csv"') + self.headers(academy=1, accept="text/csv", content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:academy_userconsumption') + url = reverse_lazy("provisioning:academy_userconsumption") response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '\r\n'.join([ - HEADER, - format_csv(model.provisioning_user_consumption[1], model.provisioning_consumption_kind), - format_csv(model.provisioning_user_consumption[0], model.provisioning_consumption_kind), - '', - ]) + expected = "\r\n".join( + [ + HEADER, + format_csv(model.provisioning_user_consumption[1], model.provisioning_consumption_kind), + format_csv(model.provisioning_user_consumption[0], model.provisioning_consumption_kind), + "", + ] + ) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), self.bc.format.to_dict(model.provisioning_user_consumption), ) @@ -192,17 +201,19 @@ def test__csv__activities(self): # Then: Should return 2 rows def test__json__activities(self): - model = self.bc.database.create(user=1, - profile_academy=1, - role=1, - capability='read_provisioning_activity', - provisioning_user_consumption=2, - provisioning_bill=1) + model = self.bc.database.create( + user=1, + profile_academy=1, + role=1, + capability="read_provisioning_activity", + provisioning_user_consumption=2, + provisioning_bill=1, + ) self.client.force_authenticate(model.user) - self.headers(academy=1, accept='application/json', content_disposition='attachment; filename="filename.csv"') + self.headers(academy=1, accept="application/json", content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:academy_userconsumption') + url = reverse_lazy("provisioning:academy_userconsumption") response = self.client.get(url) @@ -215,69 +226,73 @@ def test__json__activities(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), self.bc.format.to_dict(model.provisioning_user_consumption), ) # Given: compile_lookup was mocked # When: the mock is called # Then: the mock should be called with the correct arguments and does not raise an exception - @patch('breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup', - MagicMock(wraps=lookup_extension.compile_lookup)) + @patch( + "breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup", + MagicMock(wraps=lookup_extension.compile_lookup), + ) def test_lookup_extension(self): - self.bc.request.set_headers(academy=1, accept='application/json') - - model = self.bc.database.create(user=1, - profile_academy=1, - role=1, - capability='read_provisioning_activity', - provisioning_user_consumption=2, - provisioning_bill=1) + self.bc.request.set_headers(academy=1, accept="application/json") + + model = self.bc.database.create( + user=1, + profile_academy=1, + role=1, + capability="read_provisioning_activity", + provisioning_user_consumption=2, + provisioning_bill=1, + ) self.client.force_authenticate(model.user) args, kwargs = self.bc.format.call( - 'en', + "en", strings={ - 'iexact': [ - 'hash', - 'username', - 'status', - 'kind__product_name', - 'kind__sku', + "iexact": [ + "hash", + "username", + "status", + "kind__product_name", + "kind__sku", ], }, datetimes={ - 'gte': ['processed_at'], - 'lte': ['created_at'], # fix it + "gte": ["processed_at"], + "lte": ["created_at"], # fix it }, overwrite={ - 'start': 'processed_at', - 'end': 'created_at', - 'product_name': 'kind__product_name', - 'sku': 'kind__sku', + "start": "processed_at", + "end": "created_at", + "product_name": "kind__product_name", + "sku": "kind__sku", }, ) query = self.bc.format.lookup(*args, **kwargs) - url = reverse_lazy('provisioning:academy_userconsumption') + '?' + self.bc.format.querystring(query) + url = reverse_lazy("provisioning:academy_userconsumption") + "?" + self.bc.format.querystring(query) - self.assertEqual([x for x in query], ['hash', 'username', 'status', 'product_name', 'sku', 'start', 'end']) + self.assertEqual([x for x in query], ["hash", "username", "status", "product_name", "sku", "start", "end"]) response = self.client.get(url) json = response.json() expected = [] - for x in ['overwrite', 'custom_fields']: + for x in ["overwrite", "custom_fields"]: if x in kwargs: del kwargs[x] - for field in ['ids', 'slugs']: + for field in ["ids", "slugs"]: values = kwargs.get(field, tuple()) kwargs[field] = tuple(values) - for field in ['ints', 'strings', 'bools', 'datetimes']: + for field in ["ints", "strings", "bools", "datetimes"]: modes = kwargs.get(field, {}) for mode in modes: if not isinstance(kwargs[field][mode], tuple): @@ -285,39 +300,50 @@ def test_lookup_extension(self): kwargs[field] = frozenset(modes.items()) - self.bc.check.calls(lookup_extension.compile_lookup.call_args_list, [ - call(**kwargs), - ]) + self.bc.check.calls( + lookup_extension.compile_lookup.call_args_list, + [ + call(**kwargs), + ], + ) self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), + self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), self.bc.format.to_dict(model.provisioning_user_consumption), ) # When: get is called # Then: it's setup properly - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_get__spy_extensions(self): - model = self.bc.database.create(user=1, - profile_academy=1, - role=1, - capability='read_provisioning_activity', - provisioning_user_consumption=2, - provisioning_bill=1) - - self.bc.request.set_headers(academy=1, accept='application/json') + model = self.bc.database.create( + user=1, + profile_academy=1, + role=1, + capability="read_provisioning_activity", + provisioning_user_consumption=2, + provisioning_bill=1, + ) + + self.bc.request.set_headers(academy=1, accept="application/json") self.client.force_authenticate(model.user) - url = reverse_lazy('provisioning:academy_userconsumption') + url = reverse_lazy("provisioning:academy_userconsumption") self.client.get(url) - self.bc.check.calls(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['LanguageExtension', 'LookupExtension', 'SortExtension']), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call(["LanguageExtension", "LookupExtension", "SortExtension"]), + ], + ) - self.bc.check.calls(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(sort='-id'), - ]) + self.bc.check.calls( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(sort="-id"), + ], + ) diff --git a/breathecode/provisioning/tests/urls/tests_admin_upload.py b/breathecode/provisioning/tests/urls/tests_admin_upload.py index dd47598b5..fbcc2036b 100644 --- a/breathecode/provisioning/tests/urls/tests_admin_upload.py +++ b/breathecode/provisioning/tests/urls/tests_admin_upload.py @@ -1,6 +1,7 @@ """ Test /v1/marketing/upload """ + import hashlib import os import random @@ -25,27 +26,27 @@ def datetime_to_iso(date) -> str: - return re.sub(r'\+00:00$', 'Z', date.replace(tzinfo=UTC).isoformat()) + return re.sub(r"\+00:00$", "Z", date.replace(tzinfo=UTC).isoformat()) def rigobot_csv(lines=1, data={}): - organizations = ['4Geeks' for _ in range(lines)] + organizations = ["4Geeks" for _ in range(lines)] consumption_period_ids = [random.randint(1, 10) for _ in range(lines)] times = [datetime_to_iso(timezone.now()) for _ in range(lines)] - billing_statuses = ['OPEN' for _ in range(lines)] + billing_statuses = ["OPEN" for _ in range(lines)] total_spent_periods = [(random.random() * 30) + 0.01 for _ in range(lines)] consumption_item_ids = [random.randint(1, 10) for _ in range(lines)] user_ids = [10 for _ in range(lines)] emails = [fake.email() for _ in range(lines)] - consumption_types = ['MESSAGE' for _ in range(lines)] - pricing_types = [random.choice(['INPUT', 'OUTPUT']) for _ in range(lines)] + consumption_types = ["MESSAGE" for _ in range(lines)] + pricing_types = [random.choice(["INPUT", "OUTPUT"]) for _ in range(lines)] total_tokens = [random.randint(1, 100) for _ in range(lines)] total_spents = [] res = [] for i in range(lines): total_token = total_tokens[i] pricing_type = pricing_types[i] - price = 0.04 if pricing_type == 'OUTPUT' else 0.02 + price = 0.04 if pricing_type == "OUTPUT" else 0.02 total_spent = price * total_token while total_spent in res: total_tokens[i] = random.randint(1, 100) @@ -56,37 +57,37 @@ def rigobot_csv(lines=1, data={}): res.append(total_spent) models = [ - random.choice(['gpt-4-turbo', 'gpt-4', 'gpt-4-turbo', 'gpt-4o', 'gpt-3.5-turbo', 'gpt-3.5']) + random.choice(["gpt-4-turbo", "gpt-4", "gpt-4-turbo", "gpt-4o", "gpt-3.5-turbo", "gpt-3.5"]) for _ in range(lines) ] purpose_ids = [random.randint(1, 10) for _ in range(lines)] purpose_slugs = [fake.slug() for _ in range(lines)] - purposes = [' '.join(fake.words()) for _ in range(lines)] + purposes = [" ".join(fake.words()) for _ in range(lines)] github_usernames = [fake.user_name() for _ in range(lines)] created_ats = [datetime_to_iso(timezone.now()) for _ in range(lines)] # dictionary of lists return { - 'organization': organizations, - 'consumption_period_id': consumption_period_ids, - 'consumption_period_start': times, - 'consumption_period_end': times, - 'billing_status': billing_statuses, - 'total_spent_period': total_spent_periods, - 'consumption_item_id': consumption_item_ids, - 'user_id': user_ids, - 'email': emails, - 'consumption_type': consumption_types, - 'pricing_type': pricing_types, - 'total_spent': total_spents, - 'total_tokens': total_tokens, - 'model': models, - 'purpose_id': purpose_ids, - 'purpose_slug': purpose_slugs, - 'purpose': purposes, - 'created_at': created_ats, - 'github_username': github_usernames, + "organization": organizations, + "consumption_period_id": consumption_period_ids, + "consumption_period_start": times, + "consumption_period_end": times, + "billing_status": billing_statuses, + "total_spent_period": total_spent_periods, + "consumption_item_id": consumption_item_ids, + "user_id": user_ids, + "email": emails, + "consumption_type": consumption_types, + "pricing_type": pricing_types, + "total_spent": total_spents, + "total_tokens": total_tokens, + "model": models, + "purpose_id": purpose_ids, + "purpose_slug": purpose_slugs, + "purpose": purposes, + "created_at": created_ats, + "github_username": github_usernames, **data, } @@ -96,7 +97,7 @@ class MarketingTestSuite(ProvisioningTestCase): def setUp(self): super().setUp() - self.file_name = '' + self.file_name = "" def tearDown(self): if self.file_name: @@ -109,11 +110,11 @@ def test_upload_without_auth(self): self.headers(content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) @@ -125,43 +126,47 @@ def test_upload_without_capability(self): self.headers(academy=1, content_disposition='attachment; filename="filename.csv"') - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") self.generate_models(authenticate=True) data = {} response = self.client.put(url, data) json = response.json() - expected = {'detail': 'without-permission', 'status_code': 403} + expected = {"detail": "without-permission", "status_code": 403} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) # When: auth and no files # Then: should return empty list - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) def test_no_files(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) - url = reverse_lazy('provisioning:admin_upload') + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) + url = reverse_lazy("provisioning:admin_upload") response = self.client.put(url, {}) json = response.json() - self.assertEqual(json, {'success': [], 'failure': []}) + self.assertEqual(json, {"success": [], "failure": []}) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) self.assertEqual(Storage.__init__.call_args_list, []) @@ -171,53 +176,61 @@ def test_no_files(self): # When: auth and bad file type # Then: should return empty list - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_bad_file_type(self): from breathecode.marketing.tasks import create_form_entry from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") - file = tempfile.NamedTemporaryFile(suffix='.txt', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".txt", delete=False, mode="w+") self.file_name = file.name - file.write('Hello world') + file.write("Hello world") - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) json = response.json() expected = { - 'failure': [{ - 'detail': 'bad-format', - 'resources': [{ - 'display_field': 'index', - 'display_value': 1, - }], - 'status_code': 400, - }], - 'success': [], + "failure": [ + { + "detail": "bad-format", + "resources": [ + { + "display_field": "index", + "display_value": 1, + } + ], + "status_code": 400, + } + ], + "success": [], } self.assertEqual(json, expected) @@ -231,32 +244,36 @@ def test_bad_file_type(self): # When: auth and bad files # Then: should return empty list - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_bad_format(self): from breathecode.marketing.tasks import create_form_entry from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") - file = tempfile.NamedTemporaryFile(suffix='.csv', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False, mode="w+") # list of name, degree, score first_names = [self.bc.fake.first_name() for _ in range(0, 3)] @@ -268,12 +285,12 @@ def test_bad_format(self): # dictionary of lists obj = { - 'first_name': first_names, - 'last_name': last_names, - 'email': emails, - 'location': locations, - 'phone': phone_numbers, - 'language': languages, + "first_name": first_names, + "last_name": last_names, + "email": emails, + "location": locations, + "phone": phone_numbers, + "language": languages, } df = pd.DataFrame(obj) @@ -284,23 +301,27 @@ def test_bad_format(self): df.to_csv(file.name) - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) json = response.json() expected = { - 'failure': [{ - 'detail': 'csv-from-unknown-source', - 'resources': [{ - 'display_field': 'index', - 'display_value': 1, - }], - 'status_code': 400, - }], - 'success': [], + "failure": [ + { + "detail": "csv-from-unknown-source", + "resources": [ + { + "display_field": "index", + "display_value": 1, + } + ], + "status_code": 400, + } + ], + "success": [], } self.assertEqual(json, expected) @@ -314,36 +335,40 @@ def test_bad_format(self): # When: auth and file with codespaces format # Then: should return a 201 - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=False), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock()) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=False), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock()) def test_codespaces(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") - file = tempfile.NamedTemporaryFile(suffix='.csv', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False, mode="w+") usernames = [self.bc.fake.slug() for _ in range(0, 3)] - dates = [self.bc.datetime.to_iso_string(self.bc.datetime.now()).split('T')[0] for _ in range(0, 3)] + dates = [self.bc.datetime.to_iso_string(self.bc.datetime.now()).split("T")[0] for _ in range(0, 3)] products = [self.bc.fake.name() for _ in range(0, 3)] skus = [self.bc.fake.slug() for _ in range(0, 3)] quantities = [random.randint(1, 10) for _ in range(0, 3)] @@ -354,15 +379,15 @@ def test_codespaces(self): # dictionary of lists obj = { - 'Username': usernames, - 'Date': dates, - 'Product': products, - 'SKU': skus, - 'Quantity': quantities, - 'Unit Type': unit_types, - 'Price Per Unit ($)': price_per_units, - 'Multiplier': multipliers, - 'Owner': owners, + "Username": usernames, + "Date": dates, + "Product": products, + "SKU": skus, + "Quantity": quantities, + "Unit Type": unit_types, + "Price Per Unit ($)": price_per_units, + "Multiplier": multipliers, + "Owner": owners, } df = pd.DataFrame.from_dict(obj) @@ -370,25 +395,25 @@ def test_codespaces(self): df.to_csv(file.name) - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) json = response.json() expected = { - 'failure': [], - 'success': [ + "failure": [], + "success": [ { - 'resources': [ + "resources": [ { - 'display_field': 'index', - 'display_value': 1, - 'pk': hash, + "display_field": "index", + "display_value": 1, + "pk": hash, }, ], - 'status_code': 201, + "status_code": 201, }, ], } @@ -396,13 +421,16 @@ def test_codespaces(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -410,43 +438,47 @@ def test_codespaces(self): self.assertEqual(len(args), 1) self.assertEqual(args[0].name, os.path.basename(file.name)) - self.assertEqual(kwargs, {'content_type': 'text/csv'}) + self.assertEqual(kwargs, {"content_type": "text/csv"}) self.assertEqual(File.url.call_args_list, []) self.bc.check.calls(tasks.upload.delay.call_args_list, [call(hash, total_pages=1)]) # When: auth and file with codespaces format, file exists # Then: should return a 200 - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock()) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock()) def test_codespaces__update(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") - file = tempfile.NamedTemporaryFile(suffix='.csv', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False, mode="w+") usernames = [self.bc.fake.slug() for _ in range(0, 3)] - dates = [self.bc.datetime.to_iso_string(self.bc.datetime.now()).split('T')[0] for _ in range(0, 3)] + dates = [self.bc.datetime.to_iso_string(self.bc.datetime.now()).split("T")[0] for _ in range(0, 3)] products = [self.bc.fake.name() for _ in range(0, 3)] skus = [self.bc.fake.slug() for _ in range(0, 3)] quantities = [random.randint(1, 10) for _ in range(0, 3)] @@ -457,15 +489,15 @@ def test_codespaces__update(self): # dictionary of lists obj = { - 'Username': usernames, - 'Date': dates, - 'Product': products, - 'SKU': skus, - 'Quantity': quantities, - 'Unit Type': unit_types, - 'Price Per Unit ($)': price_per_units, - 'Multiplier': multipliers, - 'Owner': owners, + "Username": usernames, + "Date": dates, + "Product": products, + "SKU": skus, + "Quantity": quantities, + "Unit Type": unit_types, + "Price Per Unit ($)": price_per_units, + "Multiplier": multipliers, + "Owner": owners, } df = pd.DataFrame.from_dict(obj) @@ -473,25 +505,25 @@ def test_codespaces__update(self): df.to_csv(file.name) - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) json = response.json() expected = { - 'failure': [], - 'success': [ + "failure": [], + "success": [ { - 'resources': [ + "resources": [ { - 'display_field': 'index', - 'display_value': 1, - 'pk': hash, + "display_field": "index", + "display_value": 1, + "pk": hash, }, ], - 'status_code': 200, + "status_code": 200, }, ], } @@ -499,13 +531,16 @@ def test_codespaces__update(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) self.assertEqual(File.upload.call_args_list, []) self.assertEqual(File.url.call_args_list, []) @@ -513,33 +548,37 @@ def test_codespaces__update(self): # When: auth and file with gitpod format # Then: should return a 201 - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=False), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock()) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=False), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock()) def test_gitpod(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") - file = tempfile.NamedTemporaryFile(suffix='.csv', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False, mode="w+") ids = [random.randint(1, 10) for _ in range(0, 3)] credit_cents = [random.randint(1, 10000) for _ in range(0, 3)] @@ -547,18 +586,18 @@ def test_gitpod(self): kinds = [self.bc.fake.slug() for _ in range(0, 3)] usernames = [self.bc.fake.slug() for _ in range(0, 3)] contextURLs = [ - f'https://github.com/{username}/{self.bc.fake.slug()}/tree/{self.bc.fake.slug()}/' for username in usernames + f"https://github.com/{username}/{self.bc.fake.slug()}/tree/{self.bc.fake.slug()}/" for username in usernames ] # dictionary of lists obj = { - 'id': ids, - 'credits': credit_cents, - 'startTime': effective_times, - 'endTime': effective_times, - 'kind': kinds, - 'userName': usernames, - 'contextURL': contextURLs, + "id": ids, + "credits": credit_cents, + "startTime": effective_times, + "endTime": effective_times, + "kind": kinds, + "userName": usernames, + "contextURL": contextURLs, } df = pd.DataFrame.from_dict(obj) @@ -566,25 +605,25 @@ def test_gitpod(self): df.to_csv(file.name) - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) j = response.json() expected = { - 'failure': [], - 'success': [ + "failure": [], + "success": [ { - 'resources': [ + "resources": [ { - 'display_field': 'index', - 'display_value': 1, - 'pk': hash, + "display_field": "index", + "display_value": 1, + "pk": hash, }, ], - 'status_code': 201, + "status_code": 201, }, ], } @@ -592,13 +631,16 @@ def test_gitpod(self): self.assertEqual(j, expected) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -606,7 +648,7 @@ def test_gitpod(self): self.assertEqual(len(args), 1) self.assertEqual(args[0].name, os.path.basename(file.name)) - self.assertEqual(kwargs, {'content_type': 'text/csv'}) + self.assertEqual(kwargs, {"content_type": "text/csv"}) self.assertEqual(File.url.call_args_list, []) @@ -614,33 +656,37 @@ def test_gitpod(self): # When: auth and file with gitpod format, file exists # Then: should return a 200 - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock()) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock()) def test_gitpod__update(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") - file = tempfile.NamedTemporaryFile(suffix='.csv', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False, mode="w+") ids = [random.randint(1, 10) for _ in range(0, 3)] credit_cents = [random.randint(1, 10000) for _ in range(0, 3)] @@ -648,18 +694,18 @@ def test_gitpod__update(self): kinds = [self.bc.fake.slug() for _ in range(0, 3)] usernames = [self.bc.fake.slug() for _ in range(0, 3)] contextURLs = [ - f'https://github.com/{username}/{self.bc.fake.slug()}/tree/{self.bc.fake.slug()}/' for username in usernames + f"https://github.com/{username}/{self.bc.fake.slug()}/tree/{self.bc.fake.slug()}/" for username in usernames ] # dictionary of lists obj = { - 'id': ids, - 'credits': credit_cents, - 'startTime': effective_times, - 'endTime': effective_times, - 'kind': kinds, - 'userName': usernames, - 'contextURL': contextURLs, + "id": ids, + "credits": credit_cents, + "startTime": effective_times, + "endTime": effective_times, + "kind": kinds, + "userName": usernames, + "contextURL": contextURLs, } df = pd.DataFrame.from_dict(obj) @@ -667,25 +713,25 @@ def test_gitpod__update(self): df.to_csv(file.name) - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) j = response.json() expected = { - 'failure': [], - 'success': [ + "failure": [], + "success": [ { - 'resources': [ + "resources": [ { - 'display_field': 'index', - 'display_value': 1, - 'pk': hash, + "display_field": "index", + "display_value": 1, + "pk": hash, }, ], - 'status_code': 200, + "status_code": 200, }, ], } @@ -693,13 +739,16 @@ def test_gitpod__update(self): self.assertEqual(j, expected) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) self.assertEqual(File.upload.call_args_list, []) self.assertEqual(File.url.call_args_list, []) @@ -707,33 +756,37 @@ def test_gitpod__update(self): # When: auth and file with gitpod format # Then: should return a 201 - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=False), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock()) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=False), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock()) def test_rigobot(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") - file = tempfile.NamedTemporaryFile(suffix='.csv', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False, mode="w+") # dictionary of lists obj = rigobot_csv(lines=3, data={}) @@ -743,25 +796,25 @@ def test_rigobot(self): df.to_csv(file.name) - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) j = response.json() expected = { - 'failure': [], - 'success': [ + "failure": [], + "success": [ { - 'resources': [ + "resources": [ { - 'display_field': 'index', - 'display_value': 1, - 'pk': hash, + "display_field": "index", + "display_value": 1, + "pk": hash, }, ], - 'status_code': 201, + "status_code": 201, }, ], } @@ -769,13 +822,16 @@ def test_rigobot(self): self.assertEqual(j, expected) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) args, kwargs = File.upload.call_args_list[0] @@ -783,7 +839,7 @@ def test_rigobot(self): self.assertEqual(len(args), 1) self.assertEqual(args[0].name, os.path.basename(file.name)) - self.assertEqual(kwargs, {'content_type': 'text/csv'}) + self.assertEqual(kwargs, {"content_type": "text/csv"}) self.assertEqual(File.url.call_args_list, []) @@ -791,33 +847,37 @@ def test_rigobot(self): # When: auth and file with gitpod format, file exists # Then: should return a 200 - @patch('breathecode.marketing.tasks.create_form_entry.delay', MagicMock()) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - upload=MagicMock(), - exists=MagicMock(return_value=True), - url=MagicMock(return_value='https://storage.cloud.google.com/media-breathecode/hardcoded_url'), - create=True) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - @patch('breathecode.provisioning.tasks.upload.delay', MagicMock()) + @patch("breathecode.marketing.tasks.create_form_entry.delay", MagicMock()) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + upload=MagicMock(), + exists=MagicMock(return_value=True), + url=MagicMock(return_value="https://storage.cloud.google.com/media-breathecode/hardcoded_url"), + create=True, + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + @patch("breathecode.provisioning.tasks.upload.delay", MagicMock()) def test_rigobot__update(self): from breathecode.services.google_cloud import File, Storage self.headers(academy=1) - model = self.generate_models(authenticate=True, - group=1, - permission={'codename': 'upload_provisioning_activity'}) + model = self.generate_models( + authenticate=True, group=1, permission={"codename": "upload_provisioning_activity"} + ) - url = reverse_lazy('provisioning:admin_upload') + url = reverse_lazy("provisioning:admin_upload") - file = tempfile.NamedTemporaryFile(suffix='.csv', delete=False, mode='w+') + file = tempfile.NamedTemporaryFile(suffix=".csv", delete=False, mode="w+") # dictionary of lists obj = rigobot_csv(lines=3, data={}) @@ -827,25 +887,25 @@ def test_rigobot__update(self): df.to_csv(file.name) - with open(file.name, 'rb') as data: + with open(file.name, "rb") as data: hash = hashlib.sha256(data.read()).hexdigest() - with open(file.name, 'rb') as data: - response = self.client.put(url, {'name': file.name, 'file': data}) + with open(file.name, "rb") as data: + response = self.client.put(url, {"name": file.name, "file": data}) j = response.json() expected = { - 'failure': [], - 'success': [ + "failure": [], + "success": [ { - 'resources': [ + "resources": [ { - 'display_field': 'index', - 'display_value': 1, - 'pk': hash, + "display_field": "index", + "display_value": 1, + "pk": hash, }, ], - 'status_code': 200, + "status_code": 200, }, ], } @@ -853,13 +913,16 @@ def test_rigobot__update(self): self.assertEqual(j, expected) self.assertEqual(response.status_code, status.HTTP_207_MULTI_STATUS) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), []) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningUserConsumption'), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningBill"), []) + self.assertEqual(self.bc.database.list_of("provisioning.ProvisioningUserConsumption"), []) self.assertEqual(Storage.__init__.call_args_list, [call()]) - self.assertEqual(File.__init__.call_args_list, [ - call(Storage().client.bucket('bucket'), hash), - ]) + self.assertEqual( + File.__init__.call_args_list, + [ + call(Storage().client.bucket("bucket"), hash), + ], + ) self.assertEqual(File.upload.call_args_list, []) self.assertEqual(File.url.call_args_list, []) diff --git a/breathecode/provisioning/tests/urls/tests_bill_html.py b/breathecode/provisioning/tests/urls/tests_bill_html.py index 3562eddaf..da98a6f51 100644 --- a/breathecode/provisioning/tests/urls/tests_bill_html.py +++ b/breathecode/provisioning/tests/urls/tests_bill_html.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os from django.template import loader @@ -16,39 +17,39 @@ # IMPORTANT: the loader.render_to_string in a function is inside of function render def render_successfully(provisioning_bills=[], token=None, data={}): request = None - APP_URL = os.getenv('APP_URL', '') + APP_URL = os.getenv("APP_URL", "") - template = loader.get_template('provisioning_bills.html') + template = loader.get_template("provisioning_bills.html") status_mapper = { - 'DUE': 'Due', - 'DISPUTED': 'Disputed', - 'IGNORED': 'Ignored', - 'PENDING': 'Pending', - 'PAID': 'Paid', - 'ERROR': 'Error' + "DUE": "Due", + "DISPUTED": "Disputed", + "IGNORED": "Ignored", + "PENDING": "Pending", + "PAID": "Paid", + "ERROR": "Error", } total_price = 0 for bill in []: - total_price += bill['total_price'] + total_price += bill["total_price"] - status = data.get('status', 'DUE') + status = data.get("status", "DUE") data = { - 'API_URL': None, - 'COMPANY_NAME': '', - 'COMPANY_CONTACT_URL': '', - 'COMPANY_LEGAL_NAME': '', - 'COMPANY_ADDRESS': '', - 'style__success': '#99ccff', - 'style__danger': '#ffcccc', - 'style__secondary': '#ededed', - 'status': status, - 'token': token.key, - 'title': f'Payments {status_mapper[status]}', - 'possible_status': [(key, status_mapper[key]) for key in status_mapper], - 'bills': provisioning_bills, - 'total_price': total_price, + "API_URL": None, + "COMPANY_NAME": "", + "COMPANY_CONTACT_URL": "", + "COMPANY_LEGAL_NAME": "", + "COMPANY_ADDRESS": "", + "style__success": "#99ccff", + "style__danger": "#ffcccc", + "style__secondary": "#ededed", + "status": status, + "token": token.key, + "title": f"Payments {status_mapper[status]}", + "possible_status": [(key, status_mapper[key]) for key in status_mapper], + "bills": provisioning_bills, + "total_price": total_price, **data, } @@ -58,13 +59,8 @@ def render_successfully(provisioning_bills=[], token=None, data={}): def render(message): request = None return loader.render_to_string( - 'message.html', - { - 'MESSAGE': message, - 'BUTTON': None, - 'BUTTON_TARGET': '_blank', - 'LINK': None - }, + "message.html", + {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None}, request, using=None, ) @@ -74,106 +70,110 @@ class AuthenticateTestSuite(ProvisioningTestCase): # When: no auth # Then: return 302 def test_without_auth(self): - url = reverse_lazy('provisioning:bill_html') + url = reverse_lazy("provisioning:bill_html") response = self.client.get(url) - hash = self.bc.format.to_base64('/v1/provisioning/bill/html') + hash = self.bc.format.to_base64("/v1/provisioning/bill/html") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) # When: no profile academies # Then: return 403 def test_403(self): model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('provisioning:bill_html') + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("provisioning:bill_html") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render('no-access') + expected = render("no-access") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) # When: 2 bills # Then: return 200 def test_2_bills(self): - profile_academies = [{'academy_id': n + 1} for n in range(2)] - provisioning_bills = [{'academy_id': n + 1} for n in range(2)] - model = self.bc.database.create(user=1, - token=1, - provisioning_bill=provisioning_bills, - profile_academy=profile_academies, - academy=2, - role=1, - capability='read_provisioning_bill') - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('provisioning:bill_html') + f'?{querystring}' + profile_academies = [{"academy_id": n + 1} for n in range(2)] + provisioning_bills = [{"academy_id": n + 1} for n in range(2)] + model = self.bc.database.create( + user=1, + token=1, + provisioning_bill=provisioning_bills, + profile_academy=profile_academies, + academy=2, + role=1, + capability="read_provisioning_bill", + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("provisioning:bill_html") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) expected = render_successfully(provisioning_bills=model.provisioning_bill, token=model.token, data={}) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('provisioning.ProvisioningBill'), + self.bc.database.list_of("provisioning.ProvisioningBill"), self.bc.format.to_dict(model.provisioning_bill), ) # When: 2 bills, just show the bills belong to one academy # Then: return 200 def test_2_bills__just_show_one_academy(self): - profile_academies = [{'academy_id': n + 1} for n in range(2)] - provisioning_bills = [{'academy_id': n + 1} for n in range(2)] - model = self.bc.database.create(user=1, - token=1, - provisioning_bill=provisioning_bills, - profile_academy=profile_academies, - academy=2, - role=1, - capability='read_provisioning_bill') - - querystring = self.bc.format.to_querystring({'token': model.token.key, 'academy': 1}) - url = reverse_lazy('provisioning:bill_html') + f'?{querystring}' + profile_academies = [{"academy_id": n + 1} for n in range(2)] + provisioning_bills = [{"academy_id": n + 1} for n in range(2)] + model = self.bc.database.create( + user=1, + token=1, + provisioning_bill=provisioning_bills, + profile_academy=profile_academies, + academy=2, + role=1, + capability="read_provisioning_bill", + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key, "academy": 1}) + url = reverse_lazy("provisioning:bill_html") + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) expected = render_successfully(provisioning_bills=[model.provisioning_bill[0]], token=model.token, data={}) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('provisioning.ProvisioningBill'), + self.bc.database.list_of("provisioning.ProvisioningBill"), self.bc.format.to_dict(model.provisioning_bill), ) diff --git a/breathecode/provisioning/tests/urls/tests_bill_id_html.py b/breathecode/provisioning/tests/urls/tests_bill_id_html.py index d2d182102..03668a22d 100644 --- a/breathecode/provisioning/tests/urls/tests_bill_id_html.py +++ b/breathecode/provisioning/tests/urls/tests_bill_id_html.py @@ -1,6 +1,7 @@ """ Test cases for /academy/:id/member/:id """ + import os from django.template import loader @@ -15,34 +16,33 @@ def academy_serializer(academy): return { - 'id': academy.id, - 'name': academy.name, + "id": academy.id, + "name": academy.name, } def provisioning_bill_serializer(provisioning_bill, academy): return { - 'id': provisioning_bill.id, - 'total_amount': provisioning_bill.total_amount, - 'academy': academy_serializer(academy), - 'status': provisioning_bill.status, - 'paid_at': provisioning_bill.paid_at, - 'stripe_url': provisioning_bill.stripe_url, - 'created_at': provisioning_bill.created_at, + "id": provisioning_bill.id, + "total_amount": provisioning_bill.total_amount, + "academy": academy_serializer(academy), + "status": provisioning_bill.status, + "paid_at": provisioning_bill.paid_at, + "stripe_url": provisioning_bill.stripe_url, + "created_at": provisioning_bill.created_at, } def provisioning_consumption_kind_serializer(provisioning_consumption_kind): return { - 'product_name': provisioning_consumption_kind.product_name, - 'sku': provisioning_consumption_kind.sku, + "product_name": provisioning_consumption_kind.product_name, + "sku": provisioning_consumption_kind.sku, } -def provisioning_user_consumption_serializer(provisioning_user_consumption, - provisioning_consumption_kind, - provisioning_price, - provisioning_consumption_events=[]): +def provisioning_user_consumption_serializer( + provisioning_user_consumption, provisioning_consumption_kind, provisioning_price, provisioning_consumption_events=[] +): quantity = 0 price = 0 @@ -52,106 +52,94 @@ def provisioning_user_consumption_serializer(provisioning_user_consumption, quantity += event.quantity p = event.quantity * provisioning_price.price_per_unit * provisioning_price.multiplier price += p - prices.append({'price': p, 'price_per_unit': provisioning_price.price_per_unit, 'quantity': event.quantity}) + prices.append({"price": p, "price_per_unit": provisioning_price.price_per_unit, "quantity": event.quantity}) - resume = '' + resume = "" for p in prices: resume += f'{p["quantity"]} x {p["price_per_unit"]} = {p["price"]}\n' return { - 'username': provisioning_user_consumption.username, - 'status': provisioning_user_consumption.status, - 'amount': float(provisioning_user_consumption.amount), - 'status_text': provisioning_user_consumption.status_text, - 'kind': provisioning_consumption_kind_serializer(provisioning_consumption_kind), - 'price_description': (quantity, price, resume) + "username": provisioning_user_consumption.username, + "status": provisioning_user_consumption.status, + "amount": float(provisioning_user_consumption.amount), + "status_text": provisioning_user_consumption.status_text, + "kind": provisioning_consumption_kind_serializer(provisioning_consumption_kind), + "price_description": (quantity, price, resume), } # IMPORTANT: the loader.render_to_string in a function is inside of function render -def render_successfully(provisioning_bill=None, - token=None, - academy=None, - provisioning_consumption_kind=None, - provisioning_price=None, - provisioning_user_consumptions=[], - provisioning_consumption_events=[], - data={}): +def render_successfully( + provisioning_bill=None, + token=None, + academy=None, + provisioning_consumption_kind=None, + provisioning_price=None, + provisioning_user_consumptions=[], + provisioning_consumption_events=[], + data={}, +): request = None - APP_URL = os.getenv('APP_URL', '') + APP_URL = os.getenv("APP_URL", "") - template = loader.get_template('provisioning_invoice.html') + template = loader.get_template("provisioning_invoice.html") status_map = { - 'DUE': 'Due', - 'DISPUTED': 'Disputed', - 'IGNORED': 'Ignored', - 'PENDING': 'Pending', - 'PAID': 'Paid', - 'ERROR': 'Error' + "DUE": "Due", + "DISPUTED": "Disputed", + "IGNORED": "Ignored", + "PENDING": "Pending", + "PAID": "Paid", + "ERROR": "Error", } total_price = 0 for bill in []: - total_price += bill['total_price'] + total_price += bill["total_price"] - status = data.get('status', 'DUE') + status = data.get("status", "DUE") provisioning_user_consumptions = sorted(provisioning_user_consumptions, key=lambda x: x.username) data = { - 'API_URL': - None, - 'COMPANY_NAME': - '', - 'COMPANY_CONTACT_URL': - '', - 'COMPANY_LEGAL_NAME': - '', - 'COMPANY_ADDRESS': - '', - 'style__success': - '#99ccff', - 'style__danger': - '#ffcccc', - 'style__secondary': - '#ededed', - 'status': - status, - 'token': - token.key, - 'title': - f'Payments {status_map[status]}', - 'possible_status': [(key, status_map[key]) for key in status_map], - 'bills': - provisioning_bill, - 'total_price': - total_price, - **data, 'bill': - provisioning_bill_serializer(provisioning_bill, academy), - 'consumptions': [ - provisioning_user_consumption_serializer(provisioning_user_consumption, - provisioning_consumption_kind, - provisioning_price, - provisioning_consumption_events=provisioning_consumption_events) + "API_URL": None, + "COMPANY_NAME": "", + "COMPANY_CONTACT_URL": "", + "COMPANY_LEGAL_NAME": "", + "COMPANY_ADDRESS": "", + "style__success": "#99ccff", + "style__danger": "#ffcccc", + "style__secondary": "#ededed", + "status": status, + "token": token.key, + "title": f"Payments {status_map[status]}", + "possible_status": [(key, status_map[key]) for key in status_map], + "bills": provisioning_bill, + "total_price": total_price, + **data, + "bill": provisioning_bill_serializer(provisioning_bill, academy), + "consumptions": [ + provisioning_user_consumption_serializer( + provisioning_user_consumption, + provisioning_consumption_kind, + provisioning_price, + provisioning_consumption_events=provisioning_consumption_events, + ) for provisioning_user_consumption in provisioning_user_consumptions ], - 'status': - status_map[provisioning_bill.status], - 'title': - academy.name, - 'url': - f'/v1/provisioning/bill/{provisioning_bill.id}/html?token={token.key}' + "status": status_map[provisioning_bill.status], + "title": academy.name, + "url": f"/v1/provisioning/bill/{provisioning_bill.id}/html?token={token.key}", } if academy: - data['COMPANY_INFO_EMAIL'] = academy.feedback_email - data['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - data['COMPANY_LOGO'] = academy.logo_url - data['COMPANY_NAME'] = academy.name + data["COMPANY_INFO_EMAIL"] = academy.feedback_email + data["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + data["COMPANY_LOGO"] = academy.logo_url + data["COMPANY_NAME"] = academy.name - if 'heading' not in data: - data['heading'] = academy.name + if "heading" not in data: + data["heading"] = academy.name return template.render(data) @@ -159,13 +147,8 @@ def render_successfully(provisioning_bill=None, def render(message): request = None return loader.render_to_string( - 'message.html', - { - 'MESSAGE': message, - 'BUTTON': None, - 'BUTTON_TARGET': '_blank', - 'LINK': None - }, + "message.html", + {"MESSAGE": message, "BUTTON": None, "BUTTON_TARGET": "_blank", "LINK": None}, request, using=None, ) @@ -175,82 +158,89 @@ class AuthenticateTestSuite(ProvisioningTestCase): # When: no auth # Then: return 302 def test_without_auth(self): - url = reverse_lazy('provisioning:bill_id_html', kwargs={'id': 1}) + url = reverse_lazy("provisioning:bill_id_html", kwargs={"id": 1}) response = self.client.get(url) - hash = self.bc.format.to_base64('/v1/provisioning/bill/1/html') + hash = self.bc.format.to_base64("/v1/provisioning/bill/1/html") content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={hash}') + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={hash}") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) # When: no profile academies # Then: return 403 def test_403(self): model = self.bc.database.create(user=1, token=1) - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('provisioning:bill_id_html', kwargs={'id': 1}) + f'?{querystring}' + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("provisioning:bill_id_html", kwargs={"id": 1}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render('no-access') + expected = render("no-access") # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(self.bc.database.list_of('authenticate.ProfileAcademy'), []) + self.assertEqual(self.bc.database.list_of("authenticate.ProfileAcademy"), []) # When: 1 bill and 2 activities # Then: return 200 def test_2_activities(self): - model = self.bc.database.create(user=1, - token=1, - provisioning_bill=1, - provisioning_user_consumption=2, - provisioning_consumption_event=2, - profile_academy=1, - academy=1, - role=1, - capability='crud_provisioning_bill') - - querystring = self.bc.format.to_querystring({'token': model.token.key}) - url = reverse_lazy('provisioning:bill_id_html', kwargs={'id': 1}) + f'?{querystring}' + model = self.bc.database.create( + user=1, + token=1, + provisioning_bill=1, + provisioning_user_consumption=2, + provisioning_consumption_event=2, + profile_academy=1, + academy=1, + role=1, + capability="crud_provisioning_bill", + ) + + querystring = self.bc.format.to_querystring({"token": model.token.key}) + url = reverse_lazy("provisioning:bill_id_html", kwargs={"id": 1}) + f"?{querystring}" response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = render_successfully(provisioning_bill=model.provisioning_bill, - token=model.token, - academy=model.academy, - provisioning_consumption_kind=model.provisioning_consumption_kind, - provisioning_price=model.provisioning_price, - provisioning_user_consumptions=model.provisioning_user_consumption, - provisioning_consumption_events=model.provisioning_consumption_event, - data={ - 'pages': 1, - 'page': 1, - }) + expected = render_successfully( + provisioning_bill=model.provisioning_bill, + token=model.token, + academy=model.academy, + provisioning_consumption_kind=model.provisioning_consumption_kind, + provisioning_price=model.provisioning_price, + provisioning_user_consumptions=model.provisioning_user_consumption, + provisioning_consumption_events=model.provisioning_consumption_event, + data={ + "pages": 1, + "page": 1, + }, + ) # dump error in external files if content != expected: - with open('content.html', 'w') as f: + with open("content.html", "w") as f: f.write(content) - with open('expected.html', 'w') as f: + with open("expected.html", "w") as f: f.write(expected) self.assertEqual(content, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('provisioning.ProvisioningBill'), [ - self.bc.format.to_dict(model.provisioning_bill), - ]) + self.assertEqual( + self.bc.database.list_of("provisioning.ProvisioningBill"), + [ + self.bc.format.to_dict(model.provisioning_bill), + ], + ) diff --git a/breathecode/provisioning/urls.py b/breathecode/provisioning/urls.py index f9ef375e8..ffa4ea11b 100644 --- a/breathecode/provisioning/urls.py +++ b/breathecode/provisioning/urls.py @@ -1,18 +1,26 @@ from django.urls import path -from .views import (AcademyProvisioningUserConsumptionView, AcademyBillView, UploadView, redirect_new_container, - redirect_new_container_public, redirect_workspaces, render_html_all_bills, render_html_bill) +from .views import ( + AcademyProvisioningUserConsumptionView, + AcademyBillView, + UploadView, + redirect_new_container, + redirect_new_container_public, + redirect_workspaces, + render_html_all_bills, + render_html_bill, +) -app_name = 'provisioning' +app_name = "provisioning" urlpatterns = [ - path('me/container/new', redirect_new_container), - path('public/container/new', redirect_new_container_public), - path('me/workspaces', redirect_workspaces), - path('admin/upload', UploadView.as_view(), name='admin_upload'), - path('academy/userconsumption', AcademyProvisioningUserConsumptionView.as_view(), name='academy_userconsumption'), - path('academy/bill', AcademyBillView.as_view(), name='academy_bill_id'), - path('academy/bill/<int:bill_id>', AcademyBillView.as_view(), name='academy_bill_id'), - path('bill/html', render_html_all_bills, name='bill_html'), - path('bill/<int:id>/html', render_html_bill, name='bill_id_html'), + path("me/container/new", redirect_new_container), + path("public/container/new", redirect_new_container_public), + path("me/workspaces", redirect_workspaces), + path("admin/upload", UploadView.as_view(), name="admin_upload"), + path("academy/userconsumption", AcademyProvisioningUserConsumptionView.as_view(), name="academy_userconsumption"), + path("academy/bill", AcademyBillView.as_view(), name="academy_bill_id"), + path("academy/bill/<int:bill_id>", AcademyBillView.as_view(), name="academy_bill_id"), + path("bill/html", render_html_all_bills, name="bill_html"), + path("bill/<int:id>/html", render_html_bill, name="bill_id_html"), # path('academy/me/container', ContainerMeView.as_view()), # path('me/container', ContainerMeView.as_view()), # path('me/container/<int:container_id>', ContainerMeView.as_view()), diff --git a/breathecode/provisioning/urls_shortner.py b/breathecode/provisioning/urls_shortner.py index 1ed1dedd7..cc7a1b883 100644 --- a/breathecode/provisioning/urls_shortner.py +++ b/breathecode/provisioning/urls_shortner.py @@ -1,7 +1,7 @@ from django.urls import path from .views import redirect_new_container_public -app_name = 'provisioning' +app_name = "provisioning" urlpatterns = [ - path('', redirect_new_container_public), + path("", redirect_new_container_public), ] diff --git a/breathecode/provisioning/views.py b/breathecode/provisioning/views.py index cc34380b3..f4fd982db 100644 --- a/breathecode/provisioning/views.py +++ b/breathecode/provisioning/views.py @@ -45,18 +45,18 @@ def redirect_new_container(request, token): user = token.user - cohort_id = request.GET.get('cohort', None) - if cohort_id is None or cohort_id in ['','undefined']: - return render_message(request, 'Please specificy a cohort in the URL') + cohort_id = request.GET.get("cohort", None) + if cohort_id is None or cohort_id in ["", "undefined"]: + return render_message(request, "Please specificy a cohort in the URL") - url = request.GET.get('repo', None) + url = request.GET.get("repo", None) if url is None: cohort = Cohort.objects.filter(id=cohort_id).first() academy = None if cohort: academy = cohort.academy - return render_message(request, 'Please specify a repository in the URL', academy=academy) + return render_message(request, "Please specify a repository in the URL", academy=academy) cu = CohortUser.objects.filter(user=user, cohort_id=cohort_id).first() if cu is None: @@ -72,11 +72,11 @@ def redirect_new_container(request, token): if pa is None: obj = {} if cu.cohort.academy: - obj['COMPANY_INFO_EMAIL'] = cu.cohort.academy.feedback_email + obj["COMPANY_INFO_EMAIL"] = cu.cohort.academy.feedback_email - return render_message(request, - f"You don't seem to belong to academy {cu.cohort.academy.name}", - academy=cu.cohort.academy) + return render_message( + request, f"You don't seem to belong to academy {cu.cohort.academy.name}", academy=cu.cohort.academy + ) vendor = None try: @@ -84,58 +84,61 @@ def redirect_new_container(request, token): except Exception as e: return render_message(request, str(e), academy=cu.cohort.academy) - if vendor.name.lower() == 'gitpod': - return redirect(f'https://gitpod.io/#{url}') - if vendor.name.lower() == 'codespaces': - url = url.replace('https://github.com/', '') - return redirect(f'https://codespaces.new/?repo={url}') + if vendor.name.lower() == "gitpod": + return redirect(f"https://gitpod.io/#{url}") + if vendor.name.lower() == "codespaces": + url = url.replace("https://github.com/", "") + return redirect(f"https://codespaces.new/?repo={url}") - return render_message(request, - f"Unknown provisioning vendor: '{vendor.name}', please speak with your program manager.", - academy=cu.cohort.academy) + return render_message( + request, + f"Unknown provisioning vendor: '{vendor.name}', please speak with your program manager.", + academy=cu.cohort.academy, + ) def redirect_new_container_public(request): # user = token.user - repo = request.GET.get('repo', None) + repo = request.GET.get("repo", None) if repo is None: - return render_message(request, 'Please specify a repository in the URL') + return render_message(request, "Please specify a repository in the URL") - urls = {'gitpod': 'https://gitpod.io/#', 'codespaces': 'https://codespaces.new/?repo='} - vendors = request.GET.get('vendor', 'codespaces,gitpod').split(',') + urls = {"gitpod": "https://gitpod.io/#", "codespaces": "https://codespaces.new/?repo="} + vendors = request.GET.get("vendor", "codespaces,gitpod").split(",") buttons = [] for v in vendors: if v not in urls: - return render_message(request, f'Invalid provisioning vendor: {v}') + return render_message(request, f"Invalid provisioning vendor: {v}") - buttons.append({'label': f'Open in {v.capitalize()}', 'url': (urls[v] + repo), 'icon': f'/static/img/{v}.svg'}) + buttons.append({"label": f"Open in {v.capitalize()}", "url": (urls[v] + repo), "icon": f"/static/img/{v}.svg"}) data = { # 'title': item.academy.name, - 'buttons': buttons, + "buttons": buttons, # 'COMPANY_INFO_EMAIL': item.academy.feedback_email, } - template = get_template_content('choose_vendor', data) - return HttpResponse(template['html']) + template = get_template_content("choose_vendor", data) + return HttpResponse(template["html"]) @private_view() def redirect_workspaces(request, token): user = token.user - cohort_id = request.GET.get('cohort', None) - if cohort_id is None: return render_message(request, 'Please specificy a cohort in the URL') + cohort_id = request.GET.get("cohort", None) + if cohort_id is None: + return render_message(request, "Please specificy a cohort in the URL") - url = request.GET.get('repo', None) + url = request.GET.get("repo", None) if url is None: cohort = Cohort.objects.filter(id=cohort_id).first() academy = None if cohort: academy = cohort.academy - return render_message(request, "Please specificy a repository \"repo\" in the URL", academy=academy) + return render_message(request, 'Please specificy a repository "repo" in the URL', academy=academy) cu = CohortUser.objects.filter(user=user, cohort_id=cohort_id).first() if cu is None: @@ -149,9 +152,9 @@ def redirect_workspaces(request, token): academy_id = cu.cohort.academy.id pa = ProfileAcademy.objects.filter(user=user, academy__id=academy_id).first() if pa is None: - return render_message(request, - f"You don't seem to belong to academy {cu.cohort.academy.name}", - academy=cu.cohort.academy) + return render_message( + request, f"You don't seem to belong to academy {cu.cohort.academy.name}", academy=cu.cohort.academy + ) vendor = None try: @@ -164,11 +167,11 @@ def redirect_workspaces(request, token): class AcademyProvisioningUserConsumptionView(APIView): - extensions = APIViewExtensions(sort='-id') + extensions = APIViewExtensions(sort="-id") renderer_classes = [JSONRenderer, CSVRenderer] - @capable_of('read_provisioning_activity') + @capable_of("read_provisioning_activity") def get(self, request, academy_id=None): handler = self.extensions(request) lang = get_user_language(request) @@ -176,23 +179,23 @@ def get(self, request, academy_id=None): query = handler.lookup.build( lang, strings={ - 'iexact': [ - 'hash', - 'username', - 'status', - 'kind__product_name', - 'kind__sku', + "iexact": [ + "hash", + "username", + "status", + "kind__product_name", + "kind__sku", ], }, datetimes={ - 'gte': ['processed_at'], - 'lte': ['created_at'], # fix it + "gte": ["processed_at"], + "lte": ["created_at"], # fix it }, overwrite={ - 'start': 'processed_at', - 'end': 'created_at', - 'product_name': 'kind__product_name', - 'sku': 'kind__sku', + "start": "processed_at", + "end": "created_at", + "product_name": "kind__product_name", + "sku": "kind__sku", }, ) @@ -208,6 +211,7 @@ class UploadView(APIView): put: Upload a file to Google Cloud. """ + parser_classes = [MultiPartParser, FileUploadParser] # permission_classes = [AllowAny] @@ -218,166 +222,215 @@ def upload(self, lang, file): from ..services.google_cloud import Storage # files validation below - if file.content_type != 'text/csv': + if file.content_type != "text/csv": raise ValidationException( - translation(lang, - en='You can upload only files on the following formats: application/csv', - es='Solo puedes subir archivos en los siguientes formatos: application/csv', - slug='bad-format')) + translation( + lang, + en="You can upload only files on the following formats: application/csv", + es="Solo puedes subir archivos en los siguientes formatos: application/csv", + slug="bad-format", + ) + ) content_bytes = file.read() hash = hashlib.sha256(content_bytes).hexdigest() file.seek(0) csv_first_line = cut_csv(file, first=1) - df = pd.read_csv(csv_first_line, sep=',') + df = pd.read_csv(csv_first_line, sep=",") df.reset_index() format_error = True # gitpod - fields = ['id', 'credits', 'startTime', 'endTime', 'kind', 'userName', 'contextURL'] + fields = ["id", "credits", "startTime", "endTime", "kind", "userName", "contextURL"] if len(df.keys().intersection(fields)) == len(fields): format_error = False csv_last_line = cut_csv(file, last=1) - df2 = pd.read_csv(csv_last_line, sep=',', usecols=fields) + df2 = pd.read_csv(csv_last_line, sep=",", usecols=fields) df2.reset_index() try: - first = df2['startTime'][0].split('-') - last = df['startTime'][0].split('-') + first = df2["startTime"][0].split("-") + last = df["startTime"][0].split("-") - first[2] = first[2].split('T')[0] - last[2] = last[2].split('T')[0] + first[2] = first[2].split("T")[0] + last[2] = last[2].split("T")[0] first = date(int(first[0]), int(first[1]), int(first[2])) last = date(int(last[0]), int(last[1]), int(last[2])) except Exception: raise ValidationException( - translation(lang, - en='CSV file from unknown source', - es='Archivo CSV de fuente desconocida', - slug='bad-date-format')) + translation( + lang, + en="CSV file from unknown source", + es="Archivo CSV de fuente desconocida", + slug="bad-date-format", + ) + ) delta = relativedelta(last, first) if delta.years > 0 or delta.months > 1 or (delta.months > 1 and delta.days > 1): raise ValidationException( - translation(lang, - en='Each file must have only one month of data', - es='Cada archivo debe tener solo un mes de datos', - slug='overflow')) + translation( + lang, + en="Each file must have only one month of data", + es="Cada archivo debe tener solo un mes de datos", + slug="overflow", + ) + ) if format_error: # codespaces fields = [ - 'Username', 'Date', 'Product', 'SKU', 'Quantity', 'Unit Type', 'Price Per Unit ($)', 'Multiplier', - 'Owner' + "Username", + "Date", + "Product", + "SKU", + "Quantity", + "Unit Type", + "Price Per Unit ($)", + "Multiplier", + "Owner", ] if format_error and len(df.keys().intersection(fields)) == len(fields): format_error = False csv_last_line = cut_csv(file, last=1) - df2 = pd.read_csv(csv_last_line, sep=',', usecols=fields) + df2 = pd.read_csv(csv_last_line, sep=",", usecols=fields) df2.reset_index() try: - first = df['Date'][0].split('-') - last = df2['Date'][0].split('-') + first = df["Date"][0].split("-") + last = df2["Date"][0].split("-") first = date(int(first[0]), int(first[1]), int(first[2])) last = date(int(last[0]), int(last[1]), int(last[2])) except Exception: raise ValidationException( - translation(lang, - en='CSV file from unknown source', - es='Archivo CSV de fuente desconocida', - slug='bad-date-format')) + translation( + lang, + en="CSV file from unknown source", + es="Archivo CSV de fuente desconocida", + slug="bad-date-format", + ) + ) delta = relativedelta(last, first) if delta.years > 0 or delta.months > 1 or (delta.months > 1 and delta.days > 1): raise ValidationException( - translation(lang, - en='Each file must have only one month of data', - es='Cada archivo debe tener solo un mes de datos', - slug='overflow')) + translation( + lang, + en="Each file must have only one month of data", + es="Cada archivo debe tener solo un mes de datos", + slug="overflow", + ) + ) if format_error: # rigobot fields = [ - 'organization', 'consumption_period_id', 'consumption_period_start', 'consumption_period_end', - 'billing_status', 'total_spent_period', 'consumption_item_id', 'user_id', 'email', 'consumption_type', - 'pricing_type', 'total_spent', 'total_tokens', 'model', 'purpose_id', 'purpose_slug', 'purpose', - 'created_at', 'github_username' + "organization", + "consumption_period_id", + "consumption_period_start", + "consumption_period_end", + "billing_status", + "total_spent_period", + "consumption_item_id", + "user_id", + "email", + "consumption_type", + "pricing_type", + "total_spent", + "total_tokens", + "model", + "purpose_id", + "purpose_slug", + "purpose", + "created_at", + "github_username", ] if format_error and len(df.keys().intersection(fields)) == len(fields): format_error = False try: - first = datetime.fromisoformat(df['consumption_period_start'].min()) - last = datetime.fromisoformat(df['consumption_period_end'].max()) + first = datetime.fromisoformat(df["consumption_period_start"].min()) + last = datetime.fromisoformat(df["consumption_period_end"].max()) except Exception: raise ValidationException( - translation(lang, - en='CSV file from unknown source', - es='Archivo CSV de fuente desconocida', - slug='bad-date-format')) + translation( + lang, + en="CSV file from unknown source", + es="Archivo CSV de fuente desconocida", + slug="bad-date-format", + ) + ) delta = relativedelta(last, first) if delta.years > 0 or delta.months > 1 or (delta.months > 1 and delta.days > 1): raise ValidationException( - translation(lang, - en='Each file must have only one month of data', - es='Cada archivo debe tener solo un mes de datos', - slug='overflow')) + translation( + lang, + en="Each file must have only one month of data", + es="Cada archivo debe tener solo un mes de datos", + slug="overflow", + ) + ) # Think about uploading correct files and leaving out incorrect ones if format_error: raise ValidationException( - translation(lang, - en='CSV file from unknown source or the format has changed and this code must be updated', - es='Archivo CSV de fuente desconocida o el formato ha cambiado y este código debe ser ' - 'actualizado', - slug='csv-from-unknown-source')) + translation( + lang, + en="CSV file from unknown source or the format has changed and this code must be updated", + es="Archivo CSV de fuente desconocida o el formato ha cambiado y este código debe ser " + "actualizado", + slug="csv-from-unknown-source", + ) + ) # upload file section try: storage = Storage() - cloud_file = storage.file(os.getenv('PROVISIONING_BUCKET', None), hash) + cloud_file = storage.file(os.getenv("PROVISIONING_BUCKET", None), hash) created = not cloud_file.exists() if created: cloud_file.upload(file, content_type=file.content_type) except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) tasks.upload.delay(hash, total_pages=math.ceil(count_csv_rows(file) / tasks.PANDAS_ROWS_LIMIT)) - data = {'file_name': hash, 'status': 'PENDING', 'created': created} + data = {"file_name": hash, "status": "PENDING", "created": created} return data - @has_permission('upload_provisioning_activity') + @has_permission("upload_provisioning_activity") def put(self, request, academy_id=None): - files = request.data.getlist('file') + files = request.data.getlist("file") lang = get_user_language(request) created = [] @@ -385,8 +438,8 @@ def put(self, request, academy_id=None): errors = {} result = { - 'success': [], - 'failure': [], + "success": [], + "failure": [], } for i in range(len(files)): @@ -394,12 +447,12 @@ def put(self, request, academy_id=None): try: data = self.upload(lang, file) - was_created = data.pop('created') + was_created = data.pop("created") serialized = { - 'pk': data['file_name'], - 'display_field': 'index', - 'display_value': i + 1, + "pk": data["file_name"], + "display_field": "index", + "display_value": i + 1, } if was_created: @@ -411,24 +464,28 @@ def put(self, request, academy_id=None): if key not in errors: errors[key] = [] - errors[key].append({ - 'display_field': 'index', - 'display_value': i + 1, - }) + errors[key].append( + { + "display_field": "index", + "display_value": i + 1, + } + ) if created: - result['success'].append({'status_code': 201, 'resources': created}) + result["success"].append({"status_code": 201, "resources": created}) if updated: - result['success'].append({'status_code': 200, 'resources': updated}) + result["success"].append({"status_code": 200, "resources": updated}) if errors: - for ((status_code, detail), value) in errors.items(): - result['failure'].append({ - 'status_code': status_code, - 'detail': detail, - 'resources': value, - }) + for (status_code, detail), value in errors.items(): + result["failure"].append( + { + "status_code": status_code, + "detail": detail, + "resources": value, + } + ) return Response(result, status=status.HTTP_207_MULTI_STATUS) @@ -438,19 +495,23 @@ def render_html_all_bills(request, token): lang = get_user_language(request) academy_ids = { x.academy.id - for x in ProfileAcademy.objects.filter(user=request.user, role__capabilities__slug='read_provisioning_bill') + for x in ProfileAcademy.objects.filter(user=request.user, role__capabilities__slug="read_provisioning_bill") } if not academy_ids: - return render(request, - 'message.html', { - 'MESSAGE': - translation(lang, - en="You don't have the capabilities to read provisioning bills in this academy", - es='No tienes capacidads para leer provisioning bills en esta academia', - slug='no-access') - }, - status=403) + return render( + request, + "message.html", + { + "MESSAGE": translation( + lang, + en="You don't have the capabilities to read provisioning bills in this academy", + es="No tienes capacidads para leer provisioning bills en esta academia", + slug="no-access", + ) + }, + status=403, + ) status_mapper = {} for key, value in BILL_STATUS: @@ -458,34 +519,34 @@ def render_html_all_bills(request, token): lookup = {} - status = 'DUE' - if 'status' in request.GET: - status = request.GET.get('status') - lookup['status'] = status.upper() + status = "DUE" + if "status" in request.GET: + status = request.GET.get("status") + lookup["status"] = status.upper() - if 'academy' in request.GET: - ids = {int(x) for x in request.GET.get('academy').split(',')} - lookup['academy__id__in'] = academy_ids.intersection(ids) + if "academy" in request.GET: + ids = {int(x) for x in request.GET.get("academy").split(",")} + lookup["academy__id__in"] = academy_ids.intersection(ids) else: - lookup['academy__id__in'] = academy_ids + lookup["academy__id__in"] = academy_ids items = ProvisioningBill.objects.filter(**lookup).exclude(academy__isnull=True) total_price = 0 for bill in []: - total_price += bill['total_price'] + total_price += bill["total_price"] data = { - 'status': status, - 'token': token.key, - 'title': f'Payments {status_mapper[status]}', - 'possible_status': [(key, status_mapper[key]) for key, label in BILL_STATUS], - 'bills': items, - 'total_price': total_price + "status": status, + "token": token.key, + "title": f"Payments {status_mapper[status]}", + "possible_status": [(key, status_mapper[key]) for key, label in BILL_STATUS], + "bills": items, + "total_price": total_price, } - template = get_template_content('provisioning_bills', data) - return HttpResponse(template['html']) + template = get_template_content("provisioning_bills", data) + return HttpResponse(template["html"]) LIMIT_PER_PAGE_HTML = 10 @@ -496,45 +557,52 @@ def render_html_bill(request, token, id=None): lang = get_user_language(request) academy_ids = { x.academy.id - for x in ProfileAcademy.objects.filter(user=request.user, role__capabilities__slug='crud_provisioning_bill') + for x in ProfileAcademy.objects.filter(user=request.user, role__capabilities__slug="crud_provisioning_bill") } if not academy_ids: - return render(request, - 'message.html', { - 'MESSAGE': - translation(lang, - en='You have no access to this resource', - es='No tienes acceso a este recurso', - slug='no-access') - }, - status=403) + return render( + request, + "message.html", + { + "MESSAGE": translation( + lang, + en="You have no access to this resource", + es="No tienes acceso a este recurso", + slug="no-access", + ) + }, + status=403, + ) item = ProvisioningBill.objects.filter(id=id, academy__isnull=False).first() if item is None: obj = {} if item.academy: - obj['COMPANY_INFO_EMAIL'] = item.academy.feedback_email - obj['COMPANY_LEGAL_NAME'] = item.academy.legal_name or item.academy.name - obj['COMPANY_LOGO'] = item.academy.logo_url - obj['COMPANY_NAME'] = item.academy.name - - if 'heading' not in obj: - obj['heading'] = item.academy.name - - return render(request, 'message.html', { - 'MESSAGE': - translation( - lang, - en='Bill not found', - es='Factura no encontrada', - slug='bill-not-found', - **obj, - ) - }) + obj["COMPANY_INFO_EMAIL"] = item.academy.feedback_email + obj["COMPANY_LEGAL_NAME"] = item.academy.legal_name or item.academy.name + obj["COMPANY_LOGO"] = item.academy.logo_url + obj["COMPANY_NAME"] = item.academy.name + + if "heading" not in obj: + obj["heading"] = item.academy.name + + return render( + request, + "message.html", + { + "MESSAGE": translation( + lang, + en="Bill not found", + es="Factura no encontrada", + slug="bill-not-found", + **obj, + ) + }, + ) - status_map = {'DUE': 'UNDER_REVIEW', 'APPROVED': 'READY_TO_PAY', 'PAID': 'ALREADY PAID', 'PENDING': 'PENDING'} + status_map = {"DUE": "UNDER_REVIEW", "APPROVED": "READY_TO_PAY", "PAID": "ALREADY PAID", "PENDING": "PENDING"} status_mapper = {} for key, value in BILL_STATUS: status_mapper[key] = value @@ -543,9 +611,9 @@ def render_html_bill(request, token, id=None): consumptions = ProvisioningUserConsumption.objects.filter(bills=item) pages = math.ceil(consumptions.count() / LIMIT_PER_PAGE_HTML) - page = int(request.GET.get('page', 0)) + page = int(request.GET.get("page", 0)) - consumptions = consumptions.order_by('username')[0:(page * LIMIT_PER_PAGE_HTML) + LIMIT_PER_PAGE_HTML] + consumptions = consumptions.order_by("username")[0 : (page * LIMIT_PER_PAGE_HTML) + LIMIT_PER_PAGE_HTML] consumptions_serialized = ProvisioningUserConsumptionHTMLResumeSerializer(consumptions, many=True).data @@ -553,37 +621,38 @@ def render_html_bill(request, token, id=None): u = urlparse(url) query = parse_qs(u.query, keep_blank_values=True) - query.pop('page', None) + query.pop("page", None) u = u._replace(query=urlencode(query, True)) url = urlunparse(u) - if not '?' in url: - url += '?' + if not "?" in url: + url += "?" page += 1 data = { - 'bill': bill_serialized, - 'consumptions': consumptions_serialized, - 'status': status_map[item.status], - 'title': item.academy.name, - 'pages': pages, - 'page': page, - 'url': url, - 'COMPANY_INFO_EMAIL': item.academy.feedback_email, + "bill": bill_serialized, + "consumptions": consumptions_serialized, + "status": status_map[item.status], + "title": item.academy.name, + "pages": pages, + "page": page, + "url": url, + "COMPANY_INFO_EMAIL": item.academy.feedback_email, } - template = get_template_content('provisioning_invoice', data, academy=item.academy) - return HttpResponse(template['html']) + template = get_template_content("provisioning_invoice", data, academy=item.academy) + return HttpResponse(template["html"]) class AcademyBillView(APIView): """ List all snippets, or create a new snippet. """ + extensions = APIViewExtensions(paginate=True) - @capable_of('read_provisioning_bill') + @capable_of("read_provisioning_bill") def get(self, request, academy_id=None, bill_id=None): handler = self.extensions(request) @@ -591,37 +660,39 @@ def get(self, request, academy_id=None, bill_id=None): bill = ProvisioningBill.objects.filter(academy__id=academy_id, id=bill_id).first() if bill is None: - raise ValidationException('Provisioning Bill not found', code=404, slug='provisioning_bill-not-found') + raise ValidationException("Provisioning Bill not found", code=404, slug="provisioning_bill-not-found") serializer = GetProvisioningBillSerializer(bill, many=False) return Response(serializer.data) items = ProvisioningBill.objects.filter(academy__id=academy_id) - status = request.GET.get('status', None) + status = request.GET.get("status", None) if status is not None: - items = items.filter(status__in=status.upper().split(',')) + items = items.filter(status__in=status.upper().split(",")) items = handler.queryset(items) serializer = GetProvisioningBillSmallSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_provisioning_bill') + @capable_of("crud_provisioning_bill") def put(self, request, bill_id=None, academy_id=None): lang = get_user_language(request) item = ProvisioningBill.objects.filter(id=bill_id, academy__id=academy_id).first() if item is None: - raise ValidationException(translation( - lang, - en='Not found', - es='No encontrado', - slug='not-found', - ), - code=404) - - serializer = ProvisioningBillSerializer(item, data=request.data, many=False, context={'lang': lang}) + raise ValidationException( + translation( + lang, + en="Not found", + es="No encontrado", + slug="not-found", + ), + code=404, + ) + + serializer = ProvisioningBillSerializer(item, data=request.data, many=False, context={"lang": lang}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/breathecode/registry/actions.py b/breathecode/registry/actions.py index e81efb6f9..47f0af65f 100644 --- a/breathecode/registry/actions.py +++ b/breathecode/registry/actions.py @@ -57,19 +57,19 @@ def unmark_element(element, stream=None): return stream.getvalue() # patching Markdown - Markdown.output_formats['plain'] = unmark_element - __md = Markdown(output_format='plain') + Markdown.output_formats["plain"] = unmark_element + __md = Markdown(output_format="plain") __md.stripTopLevelTags = False return __md.convert(text) def allowed_mimes(): - return ['image/png', 'image/svg+xml', 'image/jpeg', 'image/gif', 'image/jpg'] + return ["image/png", "image/svg+xml", "image/jpeg", "image/gif", "image/jpg"] def asset_images_bucket(default=None): - return os.getenv('ASSET_IMAGES_BUCKET', default) + return os.getenv("ASSET_IMAGES_BUCKET", default) def generate_external_readme(a): @@ -79,27 +79,27 @@ def generate_external_readme(a): readme_lang = a.lang.lower() - if readme_lang == 'us': - readme = get_template('external.md') + if readme_lang == "us": + readme = get_template("external.md") else: - readme = get_template(f'external.{readme_lang}.md') + readme = get_template(f"external.{readme_lang}.md") a.set_readme(readme.render(AssetBigSerializer(a).data)) a.save() return True def get_video_url(video_id): - if re.search(r'https?:\/\/', video_id) is None: - return 'https://www.youtube.com/watch?v=' + video_id + if re.search(r"https?:\/\/", video_id) is None: + return "https://www.youtube.com/watch?v=" + video_id else: - patterns = ((r'(https?:\/\/www\.loom\.com\/)embed(\/.+)', r'\1share\2'), ) + patterns = ((r"(https?:\/\/www\.loom\.com\/)embed(\/.+)", r"\1share\2"),) for regex, replacement in patterns: video_id = re.sub(regex, replacement, video_id) return video_id def get_user_from_github_username(username): - authors = username.split(',') + authors = username.split(",") github_users = [] for _author in authors: u = CredentialsGithub.objects.filter(username=_author).first() @@ -110,22 +110,22 @@ def get_user_from_github_username(username): def pull_from_github(asset_slug, author_id=None, override_meta=False): - logger.debug(f'Sync with github asset {asset_slug}') + logger.debug(f"Sync with github asset {asset_slug}") asset = None try: asset = Asset.objects.filter(slug=asset_slug).first() if asset is None: - raise Exception(f'Asset with slug {asset_slug} not found when attempting to sync with github') + raise Exception(f"Asset with slug {asset_slug} not found when attempting to sync with github") - asset.status_text = 'Starting to sync...' - asset.sync_status = 'PENDING' + asset.status_text = "Starting to sync..." + asset.sync_status = "PENDING" asset.save() if generate_external_readme(asset): - asset.status_text = 'Readme file for external asset generated, not github sync' - asset.sync_status = 'OK' + asset.status_text = "Readme file for external asset generated, not github sync" + asset.sync_status = "OK" asset.last_synch_at = None asset.save() return asset.sync_status @@ -135,63 +135,64 @@ def pull_from_github(asset_slug, author_id=None, override_meta=False): if author_id is None: raise Exception( - f'System does not know what github credentials to use to retrieve asset info for: {asset_slug}') + f"System does not know what github credentials to use to retrieve asset info for: {asset_slug}" + ) - if asset.readme_url is None or 'github.com' not in asset.readme_url: - raise Exception(f'Missing or invalid URL on {asset_slug}, it does not belong to github.com') + if asset.readme_url is None or "github.com" not in asset.readme_url: + raise Exception(f"Missing or invalid URL on {asset_slug}, it does not belong to github.com") credentials = CredentialsGithub.objects.filter(user__id=author_id).first() if credentials is None: - raise Exception(f'Github credentials for this user {author_id} not found when sync asset {asset_slug}') + raise Exception(f"Github credentials for this user {author_id} not found when sync asset {asset_slug}") g = Github(credentials.token) - if asset.asset_type in ['LESSON', 'ARTICLE']: + if asset.asset_type in ["LESSON", "ARTICLE"]: asset = pull_github_lesson(g, asset, override_meta=override_meta) - elif asset.asset_type in ['QUIZ']: + elif asset.asset_type in ["QUIZ"]: asset = pull_quiz_asset(g, asset) else: asset = pull_learnpack_asset(g, asset, override_meta=True) - asset.status_text = 'Successfully Synched' - asset.sync_status = 'OK' + asset.status_text = "Successfully Synched" + asset.sync_status = "OK" asset.last_synch_at = timezone.now() asset.save() - logger.debug(f'Successfully re-synched asset {asset_slug} with github') + logger.debug(f"Successfully re-synched asset {asset_slug} with github") return asset except Exception as e: logger.exception(e) - message = '' - if hasattr(e, 'data') and e.data: - message = e.data['message'] + message = "" + if hasattr(e, "data") and e.data: + message = e.data["message"] else: - message = str(e).replace('"', '\'') + message = str(e).replace('"', "'") - logger.error(f'Error updating {asset_slug} from github: ' + str(message)) + logger.error(f"Error updating {asset_slug} from github: " + str(message)) # if the exception triggered too early, the asset will be early if asset is not None: asset.status_text = str(message) - asset.sync_status = 'ERROR' + asset.sync_status = "ERROR" asset.save() return asset.sync_status - return 'ERROR' + return "ERROR" def push_to_github(asset_slug, author=None): - logger.debug(f'Push asset {asset_slug} to github') + logger.debug(f"Push asset {asset_slug} to github") asset = None try: asset = Asset.objects.filter(slug=asset_slug).first() if asset is None: - raise Exception(f'Asset with slug {asset_slug} not found when attempting github push') + raise Exception(f"Asset with slug {asset_slug} not found when attempting github push") - asset.status_text = 'Starting to push...' - asset.sync_status = 'PENDING' + asset.status_text = "Starting to push..." + asset.sync_status = "PENDING" asset.save() if author is None: @@ -201,55 +202,55 @@ def push_to_github(asset_slug, author=None): raise Exception('Asset is marked as "external" so it cannot push to github') if author is None: - raise Exception('Asset must have an owner with write permissions on the repository') + raise Exception("Asset must have an owner with write permissions on the repository") - if asset.readme_url is None or 'github.com' not in asset.readme_url: - raise Exception(f'Missing or invalid URL on {asset_slug}, it does not belong to github.com') + if asset.readme_url is None or "github.com" not in asset.readme_url: + raise Exception(f"Missing or invalid URL on {asset_slug}, it does not belong to github.com") credentials = CredentialsGithub.objects.filter(user__id=author.id).first() if credentials is None: raise Exception( - f'Github credentials for user {author.first_name} {author.last_name} (id: {author.id}) not found when synching asset {asset_slug}' + f"Github credentials for user {author.first_name} {author.last_name} (id: {author.id}) not found when synching asset {asset_slug}" ) g = Github(credentials.token) asset = push_github_asset(g, asset) - asset.status_text = 'Successfully Synched' - asset.sync_status = 'OK' + asset.status_text = "Successfully Synched" + asset.sync_status = "OK" asset.last_synch_at = timezone.now() asset.save() - logger.debug(f'Successfully re-synched asset {asset_slug} with github') + logger.debug(f"Successfully re-synched asset {asset_slug} with github") return asset except Exception as e: logger.exception(e) - message = '' - if hasattr(e, 'data'): - message = e.data['message'] + message = "" + if hasattr(e, "data"): + message = e.data["message"] else: - message = str(e).replace('"', '\'') + message = str(e).replace('"', "'") - logger.error(f'Error updating {asset_slug} from github: ' + str(message)) + logger.error(f"Error updating {asset_slug} from github: " + str(message)) # if the exception triggered too early, the asset will be early if asset is not None: asset.status_text = str(message) - asset.sync_status = 'ERROR' + asset.sync_status = "ERROR" asset.save() return asset.sync_status - return 'ERROR' + return "ERROR" -def get_blob_content(repo, path_name, branch='main'): +def get_blob_content(repo, path_name, branch="main"): - if '?' in path_name: - path_name = path_name.split('?')[0] + if "?" in path_name: + path_name = path_name.split("?")[0] # first get the branch reference - ref = repo.get_git_ref(f'heads/{branch}') + ref = repo.get_git_ref(f"heads/{branch}") # then get the tree - tree = repo.get_git_tree(ref.object.sha, recursive='/' in path_name).tree + tree = repo.get_git_tree(ref.object.sha, recursive="/" in path_name).tree # look for path in tree sha = [x.sha for x in tree if x.path == path_name] if not sha: @@ -259,15 +260,15 @@ def get_blob_content(repo, path_name, branch='main'): return repo.get_git_blob(sha[0]) -def set_blob_content(repo, path_name, content, file_name, branch='main'): +def set_blob_content(repo, path_name, content, file_name, branch="main"): - if content is None or content == '': - raise Exception(f'Blob content is empty for {path_name}') + if content is None or content == "": + raise Exception(f"Blob content is empty for {path_name}") # first get the branch reference - ref = repo.get_git_ref(f'heads/{branch}') + ref = repo.get_git_ref(f"heads/{branch}") # then get the tree - tree = repo.get_git_tree(ref.object.sha, recursive='/' in path_name).tree + tree = repo.get_git_tree(ref.object.sha, recursive="/" in path_name).tree # look for path in tree file = [x for x in tree if x.path == path_name] if not file: @@ -275,84 +276,84 @@ def set_blob_content(repo, path_name, content, file_name, branch='main'): return None # update - return repo.update_file(file[0].path, f'Update {file_name}', content, file[0].sha) + return repo.update_file(file[0].path, f"Update {file_name}", content, file[0].sha) -def generate_screenshot(url: str, dimension: str = '1200x630', **kwargs): - screenshot_key = os.getenv('SCREENSHOT_MACHINE_KEY', '') +def generate_screenshot(url: str, dimension: str = "1200x630", **kwargs): + screenshot_key = os.getenv("SCREENSHOT_MACHINE_KEY", "") params = { - 'key': screenshot_key, - 'url': url, - 'dimension': dimension, + "key": screenshot_key, + "url": url, + "dimension": dimension, **kwargs, } - request = requests.request('GET', 'https://api.screenshotmachine.com', params=params, timeout=8) + request = requests.request("GET", "https://api.screenshotmachine.com", params=params, timeout=8) return request def push_github_asset(github, asset: Asset): - logger.debug(f'Sync pull_github_lesson {asset.slug}') + logger.debug(f"Sync pull_github_lesson {asset.slug}") if asset.readme_url is None: - raise Exception('Missing Readme URL for asset ' + asset.slug + '.') + raise Exception("Missing Readme URL for asset " + asset.slug + ".") org_name, repo_name, branch_name = asset.get_repo_meta() - repo = github.get_repo(f'{org_name}/{repo_name}') + repo = github.get_repo(f"{org_name}/{repo_name}") file_name = os.path.basename(asset.readme_url) if branch_name is None: - raise Exception('Readme URL must include branch name after blob') + raise Exception("Readme URL must include branch name after blob") - result = re.search(r'\/blob\/([\w\d_\-]+)\/(.+)', asset.readme_url) + result = re.search(r"\/blob\/([\w\d_\-]+)\/(.+)", asset.readme_url) branch, file_path = result.groups() - logger.debug(f'Fetching readme: {file_path}') + logger.debug(f"Fetching readme: {file_path}") decoded_readme = None - if asset.asset_type in ['LESSON', 'ARTICLE']: + if asset.asset_type in ["LESSON", "ARTICLE"]: # we commit the raw readme, we don't want images to be replaced in the original github - decoded_readme = base64.b64decode(asset.readme_raw.encode('utf-8')).decode('utf-8') + decoded_readme = base64.b64decode(asset.readme_raw.encode("utf-8")).decode("utf-8") - elif asset.asset_type == 'QUIZ': + elif asset.asset_type == "QUIZ": decoded_readme = json.dumps(asset.config, indent=4) else: - raise Exception(f'Assets with type {asset.asset_type} cannot be commited to Github') + raise Exception(f"Assets with type {asset.asset_type} cannot be commited to Github") - if decoded_readme is None or decoded_readme == 'None' or decoded_readme == '': - raise Exception('The content you are trying to push to Github is empty') + if decoded_readme is None or decoded_readme == "None" or decoded_readme == "": + raise Exception("The content you are trying to push to Github is empty") result = set_blob_content(repo, file_path, decoded_readme, file_name, branch=branch) - if 'commit' in result: - asset.github_commit_hash = result['commit'].sha + if "commit" in result: + asset.github_commit_hash = result["commit"].sha return asset def pull_github_lesson(github, asset: Asset, override_meta=False): - logger.debug(f'Sync pull_github_lesson {asset.slug}') + logger.debug(f"Sync pull_github_lesson {asset.slug}") if asset.readme_url is None: - raise Exception('Missing Readme URL for lesson ' + asset.slug + '.') + raise Exception("Missing Readme URL for lesson " + asset.slug + ".") org_name, repo_name, branch_name = asset.get_repo_meta() - repo = github.get_repo(f'{org_name}/{repo_name}') + repo = github.get_repo(f"{org_name}/{repo_name}") os.path.basename(asset.readme_url) if branch_name is None: - raise Exception('Lesson URL must include branch name after blob') + raise Exception("Lesson URL must include branch name after blob") - result = re.search(r'\/blob\/([\w\d_\-]+)\/(.+)', asset.readme_url) + result = re.search(r"\/blob\/([\w\d_\-]+)\/(.+)", asset.readme_url) _, file_path = result.groups() - logger.debug(f'Fetching readme: {file_path}') + logger.debug(f"Fetching readme: {file_path}") blob_file = get_blob_content(repo, file_path, branch=branch_name) if blob_file is None: - raise Exception('Nothing was found under ' + file_path) + raise Exception("Nothing was found under " + file_path) base64_readme = blob_file.content asset.readme_raw = base64_readme @@ -365,24 +366,24 @@ def pull_github_lesson(github, asset: Asset, override_meta=False): # only the first time a lesson is synched it will override some of the properties readme = asset.get_readme(parse=True) if asset.last_synch_at is None or override_meta: - fm = dict(readme['frontmatter'].items()) - if 'slug' in fm and fm['slug'] != asset.slug: + fm = dict(readme["frontmatter"].items()) + if "slug" in fm and fm["slug"] != asset.slug: logger.debug(f'New slug {fm["slug"]} found for lesson {asset.slug}') - asset.slug = fm['slug'] + asset.slug = fm["slug"] - if 'excerpt' in fm: - asset.description = fm['excerpt'] - elif 'description' in fm: - asset.description = fm['description'] - elif 'subtitle' in fm: - asset.description = fm['subtitle'] + if "excerpt" in fm: + asset.description = fm["excerpt"] + elif "description" in fm: + asset.description = fm["description"] + elif "subtitle" in fm: + asset.description = fm["subtitle"] - if 'title' in fm and fm['title'] != '': - asset.title = fm['title'] + if "title" in fm and fm["title"] != "": + asset.title = fm["title"] def parse_boolean(value): - true_values = {'1', 'true'} - false_values = {'0', 'false'} + true_values = {"1", "true"} + false_values = {"0", "false"} if isinstance(value, bool): return value if isinstance(value, (int, str)): @@ -392,21 +393,23 @@ def parse_boolean(value): elif value_str in false_values: return False - raise ValueError(f'Invalid value for boolean conversion: {value}') + raise ValueError(f"Invalid value for boolean conversion: {value}") - if 'table_of_contents' in fm and fm['table_of_contents'] != '': - asset.enable_table_of_content = parse_boolean(fm['table_of_contents']) + if "table_of_contents" in fm and fm["table_of_contents"] != "": + asset.enable_table_of_content = parse_boolean(fm["table_of_contents"]) - if 'video' in fm and fm['video'] != '': - asset.intro_video_url = fm['video'] + if "video" in fm and fm["video"] != "": + asset.intro_video_url = fm["video"] - if 'authors' in fm and fm['authors'] != '': - asset.authors_username = ','.join(fm['authors']) + if "authors" in fm and fm["authors"] != "": + asset.authors_username = ",".join(fm["authors"]) # retrive technologies from the frontmatter _techs = [] - if 'tags' in fm and isinstance(fm['tags'], list): _techs = fm['tags'] - elif 'technologies' in fm and isinstance(fm['technologies'], list): _techs = fm['technologies'] + if "tags" in fm and isinstance(fm["tags"], list): + _techs = fm["tags"] + elif "technologies" in fm and isinstance(fm["technologies"], list): + _techs = fm["technologies"] if len(_techs) > 0: asset.technologies.clear() @@ -414,9 +417,9 @@ def parse_boolean(value): technology = AssetTechnology.get_or_create(tech_slug) # if the technology is not multi lang - if technology.lang is not None and technology.lang != '': + if technology.lang is not None and technology.lang != "": # skip technology because it does not match the asset lang - if technology.lang in ['us', 'en'] and asset.lang not in ['us', 'en']: + if technology.lang in ["us", "en"] and asset.lang not in ["us", "en"]: continue elif technology.lang != asset.lang: continue @@ -427,7 +430,7 @@ def parse_boolean(value): def clean_asset_readme(asset: Asset): - if asset.readme_raw is None or asset.readme_raw == '': + if asset.readme_raw is None or asset.readme_raw == "": return asset asset.last_cleaning_at = timezone.now() @@ -437,13 +440,13 @@ def clean_asset_readme(asset: Asset): asset = clean_h1s(asset) asset = clean_content_variables(asset) readme = asset.get_readme(parse=True) - if 'html' in readme: - asset.html = readme['html'] + if "html" in readme: + asset.html = readme["html"] - asset.cleaning_status = 'OK' + asset.cleaning_status = "OK" asset.save() except Exception as e: - asset.cleaning_status = 'ERROR' + asset.cleaning_status = "ERROR" asset.cleaning_status_details = str(e) asset.save() @@ -451,11 +454,13 @@ def clean_asset_readme(asset: Asset): def clean_content_variables(asset: Asset): - logger.debug(f'Clearning content variables for readme for asset {asset.slug}') + logger.debug(f"Clearning content variables for readme for asset {asset.slug}") readme = asset.get_readme() - pattern = r'{%\s+([^\s%]+)\s+%}' # This regex pattern matches {% variable_name %} or {% variable_name:"default_value" %} - markdown_text = readme['decoded'] - logger.debug('Original text:' + markdown_text) + pattern = ( + r"{%\s+([^\s%]+)\s+%}" # This regex pattern matches {% variable_name %} or {% variable_name:"default_value" %} + ) + markdown_text = readme["decoded"] + logger.debug("Original text:" + markdown_text) variables_dict = {} variables = ContentVariable.objects.filter(academy=asset.academy).filter(Q(lang__isnull=True) | Q(lang=asset.lang)) @@ -465,26 +470,26 @@ def clean_content_variables(asset: Asset): else: variables_dict[varia.key] = varia.value - logger.debug('Variables') + logger.debug("Variables") logger.debug(variables_dict) def replace(match): variable_data = match.group(1).strip() - variable_parts = variable_data.split(':', 1) # Split variable name and default value + variable_parts = variable_data.split(":", 1) # Split variable name and default value variable_name = variable_parts[0].strip() - logger.debug('Found variable ' + variable_name) + logger.debug("Found variable " + variable_name) if len(variable_parts) > 1: default_value = variable_parts[1].strip() else: - asset.log_error('missing-variable', f'Variable {variable_name} is missing and it has not default value') - default_value = '{% ' + variable_name + ' %}' + asset.log_error("missing-variable", f"Variable {variable_name} is missing and it has not default value") + default_value = "{% " + variable_name + " %}" value = variables_dict.get(variable_name, default_value) return value if value is not None else match.group(0) replaced_text = re.sub(pattern, replace, markdown_text) - logger.debug('Replaced text:' + replaced_text) + logger.debug("Replaced text:" + replaced_text) asset.set_readme(replaced_text) return asset @@ -492,38 +497,40 @@ def replace(match): def clean_readme_relative_paths(asset: Asset): readme = asset.get_readme() base_url = os.path.dirname(asset.readme_url) - relative_urls = list(re.finditer(r'((?:\.\.?\/)+[^)"\']+)', readme['decoded'])) + relative_urls = list(re.finditer(r'((?:\.\.?\/)+[^)"\']+)', readme["decoded"])) - replaced = readme['decoded'] + replaced = readme["decoded"] while len(relative_urls) > 0: match = relative_urls.pop(0) found_url = match.group() - if found_url.endswith('\\'): + if found_url.endswith("\\"): found_url = found_url[:-1].strip() extension = pathlib.Path(found_url).suffix - if readme['decoded'][match.start() - 1] in ['(', "'", '"'] and extension and extension.strip() in [ - '.png', '.jpg', '.png', '.jpeg', '.svg', '.gif' - ]: - logger.debug('Replaced url: ' + base_url + '/' + found_url + '?raw=true') - replaced = replaced.replace(found_url, base_url + '/' + found_url + '?raw=true') + if ( + readme["decoded"][match.start() - 1] in ["(", "'", '"'] + and extension + and extension.strip() in [".png", ".jpg", ".png", ".jpeg", ".svg", ".gif"] + ): + logger.debug("Replaced url: " + base_url + "/" + found_url + "?raw=true") + replaced = replaced.replace(found_url, base_url + "/" + found_url + "?raw=true") asset.set_readme(replaced) return asset def clean_readme_hide_comments(asset: Asset): - logger.debug(f'Clearning readme for asset {asset.slug}') + logger.debug(f"Clearning readme for asset {asset.slug}") readme = asset.get_readme() - regex = r'<!--\s*(:?end)?hide\s*-->' + regex = r"<!--\s*(:?end)?hide\s*-->" - content = readme['decoded'] + content = readme["decoded"] findings = list(re.finditer(regex, content)) if len(findings) % 2 != 0: - asset.log_error(AssetErrorLog.README_SYNTAX, 'Readme with to many <!-- hide -> comments') - raise Exception('Readme with to many <!-- hide -> comments') + asset.log_error(AssetErrorLog.README_SYNTAX, "Readme with to many <!-- hide -> comments") + raise Exception("Readme with to many <!-- hide -> comments") - replaced = '' + replaced = "" start_index = 0 while len(findings) > 1: opening_comment = findings.pop(0) @@ -540,45 +547,45 @@ def clean_readme_hide_comments(asset: Asset): def clean_h1s(asset: Asset): - logger.debug(f'Clearning first heading 1 for {asset.slug}') + logger.debug(f"Clearning first heading 1 for {asset.slug}") readme = asset.get_readme() - content = readme['decoded'].strip() + content = readme["decoded"].strip() - frontmatter = '' - frontmatter_regex = r'---\n(.*?\n)*?---\n' + frontmatter = "" + frontmatter_regex = r"---\n(.*?\n)*?---\n" match = re.search(frontmatter_regex, content, flags=re.DOTALL) if match: frontmatter_content = match.group() - frontmatter = frontmatter_content.strip() if frontmatter_content else '' + frontmatter = frontmatter_content.strip() if frontmatter_content else "" - content = content[match.end():].strip() + content = content[match.end() :].strip() - end = r'.*\n' + end = r".*\n" lines = list(re.finditer(end, content)) if len(lines) == 0: - logger.debug('no jump of lines found') + logger.debug("no jump of lines found") return asset first_line_end = lines.pop(0).end() - logger.debug('first line ends at') + logger.debug("first line ends at") logger.debug(first_line_end) - regex = r'\s?#\s[`\-_\w]+[`\-_\w\s]*\n' + regex = r"\s?#\s[`\-_\w]+[`\-_\w\s]*\n" findings = list(re.finditer(regex, content[:first_line_end])) if len(findings) > 0: replaced = content[first_line_end:].strip() - if frontmatter != '': - replaced = f'{frontmatter}\n\n{replaced}' + if frontmatter != "": + replaced = f"{frontmatter}\n\n{replaced}" asset.set_readme(replaced) return asset def screenshots_bucket(): - return os.getenv('SCREENSHOTS_BUCKET', '') + return os.getenv("SCREENSHOTS_BUCKET", "") class AssetThumbnailGenerator: @@ -608,7 +615,7 @@ def get_thumbnail_url(self) -> tuple[str, bool]: media.hits += 1 media.save() - if self.asset.preview is None or self.asset.preview == '': + if self.asset.preview is None or self.asset.preview == "": self.asset.preview = media.url self.asset.save() @@ -626,14 +633,14 @@ def get_thumbnail_url(self) -> tuple[str, bool]: media_resolution.hits += 1 media_resolution.save() - if self.asset.preview is None or self.asset.preview == '': + if self.asset.preview is None or self.asset.preview == "": self.asset.preview = media.url self.asset.save() - return (f'{media.url}-{media_resolution.width}x{media_resolution.height}', True) + return (f"{media.url}-{media_resolution.width}x{media_resolution.height}", True) def _get_default_url(self) -> str: - return os.getenv('DEFAULT_ASSET_PREVIEW_URL', '') + return os.getenv("DEFAULT_ASSET_PREVIEW_URL", "") def _get_asset_url(self) -> str: return (self.asset and self.asset.preview) or self._get_default_url() @@ -642,7 +649,7 @@ def _get_media(self) -> Optional[Media]: if not self.asset: return None - slug = self.asset.get_thumbnail_name().split('.')[0] + slug = self.asset.get_thumbnail_name().split(".")[0] return Media.objects.filter(slug=slug).first() def _get_media_resolution(self, hash: str) -> Optional[MediaResolution]: @@ -659,30 +666,34 @@ def create(self, delay=600): preview_url = self.asset.get_preview_generation_url() if preview_url is None: - raise Exception('Not able to retrieve a preview generation url') + raise Exception("Not able to retrieve a preview generation url") filename = self.asset.get_thumbnail_name() - url = set_query_parameter(preview_url, 'slug', self.asset.slug) + url = set_query_parameter(preview_url, "slug", self.asset.slug) response = None try: - logger.debug(f'Generating screenshot with URL {url}') - query_string = urlencode({ - 'key': os.environ.get('SCREENSHOT_MACHINE_KEY'), - 'url': url, - 'device': 'desktop', - 'delay': delay, - 'cacheLimit': '0', - 'dimension': '1024x707', - }) - response = requests.get(f'https://api.screenshotmachine.com?{query_string}', stream=True) + logger.debug(f"Generating screenshot with URL {url}") + query_string = urlencode( + { + "key": os.environ.get("SCREENSHOT_MACHINE_KEY"), + "url": url, + "device": "desktop", + "delay": delay, + "cacheLimit": "0", + "dimension": "1024x707", + } + ) + response = requests.get(f"https://api.screenshotmachine.com?{query_string}", stream=True) except Exception as e: - raise Exception('Error calling service to generate thumbnail screenshot: ' + str(e)) + raise Exception("Error calling service to generate thumbnail screenshot: " + str(e)) if response.status_code >= 400: - raise Exception('Unhandled error with async_create_asset_thumbnail, the cloud function `screenshots` ' - f'returns status code {response.status_code}') + raise Exception( + "Unhandled error with async_create_asset_thumbnail, the cloud function `screenshots` " + f"returns status code {response.status_code}" + ) storage = Storage() cloud_file = storage.file(screenshots_bucket(), filename) @@ -697,122 +708,124 @@ def create(self, delay=600): def process_asset_config(asset, config): if not config: - raise Exception('No configuration json found') + raise Exception("No configuration json found") - if asset.asset_type in ['QUIZ']: - raise Exception('Can only process exercise and project config objects') + if asset.asset_type in ["QUIZ"]: + raise Exception("Can only process exercise and project config objects") # only replace title and description of English language - if 'title' in config: - if isinstance(config['title'], str): - if (asset.lang in ['', 'us', 'en'] or asset.title == '' or asset.title is None): - asset.title = config['title'] - elif isinstance(config['title'], dict) and asset.lang in config['title']: - asset.title = config['title'][asset.lang] - - if 'description' in config: - if isinstance(config['description'], str): + if "title" in config: + if isinstance(config["title"], str): + if asset.lang in ["", "us", "en"] or asset.title == "" or asset.title is None: + asset.title = config["title"] + elif isinstance(config["title"], dict) and asset.lang in config["title"]: + asset.title = config["title"][asset.lang] + + if "description" in config: + if isinstance(config["description"], str): # avoid replacing descriptions for other languages - if (asset.lang in ['', 'us', 'en'] or asset.description == '' or asset.description is None): - asset.description = config['description'] + if asset.lang in ["", "us", "en"] or asset.description == "" or asset.description is None: + asset.description = config["description"] # there are multiple translations, and the translation exists for this lang - elif isinstance(config['description'], dict) and asset.lang in config['description']: - asset.description = config['description'][asset.lang] + elif isinstance(config["description"], dict) and asset.lang in config["description"]: + asset.description = config["description"][asset.lang] - if 'preview' in config: - asset.preview = config['preview'] + if "preview" in config: + asset.preview = config["preview"] else: - raise Exception('Missing preview URL') + raise Exception("Missing preview URL") - if 'video-id' in config: - asset.solution_video_url = get_video_url(str(config['video-id'])) + if "video-id" in config: + asset.solution_video_url = get_video_url(str(config["video-id"])) asset.with_video = True - if 'video' in config and isinstance(config['video'], dict): - if 'intro' in config['video'] and config['video']['intro'] is not None: - if isinstance(config['video']['intro'], str): - asset.intro_video_url = get_video_url(str(config['video']['intro'])) + if "video" in config and isinstance(config["video"], dict): + if "intro" in config["video"] and config["video"]["intro"] is not None: + if isinstance(config["video"]["intro"], str): + asset.intro_video_url = get_video_url(str(config["video"]["intro"])) else: - if 'en' in config['video']['intro']: - config['video']['intro']['us'] = config['video']['intro']['en'] - elif 'us' in config['video']['intro']: - config['video']['intro']['en'] = config['video']['intro']['us'] - - if asset.lang in config['video']['intro']: - print('get_video_url', get_video_url(str(config['video']['intro'][asset.lang]))) - asset.intro_video_url = get_video_url(str(config['video']['intro'][asset.lang])) - - if 'solution' in config['video'] and config['video']['solution'] is not None: - if isinstance(config['video']['solution'], str): - asset.solution_video_url = get_video_url(str(config['video']['solution'])) + if "en" in config["video"]["intro"]: + config["video"]["intro"]["us"] = config["video"]["intro"]["en"] + elif "us" in config["video"]["intro"]: + config["video"]["intro"]["en"] = config["video"]["intro"]["us"] + + if asset.lang in config["video"]["intro"]: + print("get_video_url", get_video_url(str(config["video"]["intro"][asset.lang]))) + asset.intro_video_url = get_video_url(str(config["video"]["intro"][asset.lang])) + + if "solution" in config["video"] and config["video"]["solution"] is not None: + if isinstance(config["video"]["solution"], str): + asset.solution_video_url = get_video_url(str(config["video"]["solution"])) asset.with_video = True asset.with_solutions = True else: - if 'en' in config['video']['solution']: - config['video']['solution']['us'] = config['video']['solution']['en'] - elif 'us' in config['video']['solution']: - config['video']['solution']['en'] = config['video']['solution']['us'] + if "en" in config["video"]["solution"]: + config["video"]["solution"]["us"] = config["video"]["solution"]["en"] + elif "us" in config["video"]["solution"]: + config["video"]["solution"]["en"] = config["video"]["solution"]["us"] - if asset.lang in config['video']['solution']: + if asset.lang in config["video"]["solution"]: asset.with_solutions = True - asset.solution_video_url = get_video_url(str(config['video']['solution'][asset.lang])) + asset.solution_video_url = get_video_url(str(config["video"]["solution"][asset.lang])) asset.with_video = True - if 'duration' in config: - asset.duration = config['duration'] - if 'difficulty' in config: - asset.difficulty = config['difficulty'].upper() - if 'videoSolutions' in config: + if "duration" in config: + asset.duration = config["duration"] + if "difficulty" in config: + asset.difficulty = config["difficulty"].upper() + if "videoSolutions" in config: asset.with_solutions = True asset.with_video = True - if 'solution' in config: - asset.solution_url = config['solution'] + if "solution" in config: + asset.solution_url = config["solution"] asset.with_solutions = True - if 'grading' not in config and ('projectType' not in config or config['projectType'] != 'tutorial'): + if "grading" not in config and ("projectType" not in config or config["projectType"] != "tutorial"): asset.interactive = False asset.gitpod = False - elif 'projectType' in config and config['projectType'] == 'tutorial': - asset.gitpod = 'localhostOnly' not in config or not config['localhostOnly'] + elif "projectType" in config and config["projectType"] == "tutorial": + asset.gitpod = "localhostOnly" not in config or not config["localhostOnly"] asset.interactive = True - elif 'grading' in config and config['grading'] in ['isolated', 'incremental']: - asset.gitpod = 'localhostOnly' not in config or not config['localhostOnly'] + elif "grading" in config and config["grading"] in ["isolated", "incremental"]: + asset.gitpod = "localhostOnly" not in config or not config["localhostOnly"] asset.interactive = True - if 'technologies' in config: + if "technologies" in config: asset.technologies.clear() - for tech_slug in config['technologies']: + for tech_slug in config["technologies"]: technology = AssetTechnology.get_or_create(tech_slug) # if the technology is not multi lang - if technology.lang is not None and technology.lang != '': + if technology.lang is not None and technology.lang != "": # skip technology because it does not match the asset lang - if technology.lang in ['us', 'en'] and asset.lang not in ['us', 'en']: + if technology.lang in ["us", "en"] and asset.lang not in ["us", "en"]: continue elif technology.lang != asset.lang: continue asset.technologies.add(technology) - if 'delivery' in config: - if 'instructions' in config['delivery']: - if isinstance(config['delivery']['instructions'], str): - asset.delivery_instructions = config['delivery']['instructions'] - elif isinstance(config['delivery']['instructions'], - dict) and asset.lang in config['delivery']['instructions']: - asset.delivery_instructions = config['delivery']['instructions'][asset.lang] - - if 'formats' in config['delivery']: - if isinstance(config['delivery']['formats'], list): - asset.delivery_formats = ','.join(config['delivery']['formats']) - elif isinstance(config['delivery']['formats'], str): - asset.delivery_formats = config['delivery']['formats'] - - if 'url' in asset.delivery_formats: - if 'regex' in config['delivery'] and isinstance(config['delivery']['regex'], str): - asset.delivery_regex_url = config['delivery']['regex'].replace('\\\\', '\\') + if "delivery" in config: + if "instructions" in config["delivery"]: + if isinstance(config["delivery"]["instructions"], str): + asset.delivery_instructions = config["delivery"]["instructions"] + elif ( + isinstance(config["delivery"]["instructions"], dict) + and asset.lang in config["delivery"]["instructions"] + ): + asset.delivery_instructions = config["delivery"]["instructions"][asset.lang] + + if "formats" in config["delivery"]: + if isinstance(config["delivery"]["formats"], list): + asset.delivery_formats = ",".join(config["delivery"]["formats"]) + elif isinstance(config["delivery"]["formats"], str): + asset.delivery_formats = config["delivery"]["formats"] + + if "url" in asset.delivery_formats: + if "regex" in config["delivery"] and isinstance(config["delivery"]["regex"], str): + asset.delivery_regex_url = config["delivery"]["regex"].replace("\\\\", "\\") else: - asset.delivery_instructions = '' - asset.delivery_formats = 'url' - asset.delivery_regex_url = '' + asset.delivery_instructions = "" + asset.delivery_formats = "url" + asset.delivery_regex_url = "" asset.save() return asset @@ -821,46 +834,46 @@ def process_asset_config(asset, config): def pull_learnpack_asset(github, asset: Asset, override_meta): if asset.readme_url is None: - raise Exception('Missing Readme URL for asset ' + asset.slug + '.') + raise Exception("Missing Readme URL for asset " + asset.slug + ".") org_name, repo_name, branch_name = asset.get_repo_meta() - repo = github.get_repo(f'{org_name}/{repo_name}') + repo = github.get_repo(f"{org_name}/{repo_name}") lang = asset.lang - if lang is None or lang == '': - raise Exception('Language for this asset is not defined, impossible to retrieve readme') - elif lang in ['us', 'en']: - lang = '' + if lang is None or lang == "": + raise Exception("Language for this asset is not defined, impossible to retrieve readme") + elif lang in ["us", "en"]: + lang = "" else: - lang = '.' + lang + lang = "." + lang readme_file = None try: - readme_file = repo.get_contents(f'README{lang}.md') + readme_file = repo.get_contents(f"README{lang}.md") except Exception: - raise Exception(f'Translation on README{lang}.md not found') + raise Exception(f"Translation on README{lang}.md not found") learn_file = None try: - learn_file = repo.get_contents('learn.json') + learn_file = repo.get_contents("learn.json") except Exception: try: - learn_file = repo.get_contents('.learn/learn.json') + learn_file = repo.get_contents(".learn/learn.json") except Exception: try: - learn_file = repo.get_contents('bc.json') + learn_file = repo.get_contents("bc.json") except Exception: try: - learn_file = repo.get_contents('.learn/bc.json') + learn_file = repo.get_contents(".learn/bc.json") except Exception: - raise Exception('No configuration learn.json or bc.json file was found') + raise Exception("No configuration learn.json or bc.json file was found") base64_readme = str(readme_file.content) asset.readme_raw = base64_readme config = None if learn_file is not None and (asset.last_synch_at is None or override_meta): - config = json.loads(learn_file.decoded_content.decode('utf-8')) + config = json.loads(learn_file.decoded_content.decode("utf-8")) asset.config = config asset = process_asset_config(asset, config) @@ -869,22 +882,22 @@ def pull_learnpack_asset(github, asset: Asset, override_meta): def pull_quiz_asset(github, asset: Asset): - logger.debug(f'Sync pull_quiz_asset {asset.slug}') + logger.debug(f"Sync pull_quiz_asset {asset.slug}") if asset.readme_url is None: - raise Exception('Missing Readme URL for quiz ' + asset.slug + '.') + raise Exception("Missing Readme URL for quiz " + asset.slug + ".") org_name, repo_name, branch_name = asset.get_repo_meta() - repo = github.get_repo(f'{org_name}/{repo_name}') + repo = github.get_repo(f"{org_name}/{repo_name}") os.path.basename(asset.readme_url) if branch_name is None: - raise Exception('Quiz URL must include branch name after blob') + raise Exception("Quiz URL must include branch name after blob") - result = re.search(r'\/blob\/([\w\d_\-]+)\/(.+)', asset.readme_url) + result = re.search(r"\/blob\/([\w\d_\-]+)\/(.+)", asset.readme_url) _, file_path = result.groups() - logger.debug(f'Fetching quiz json: {file_path}') + logger.debug(f"Fetching quiz json: {file_path}") encoded_config = get_blob_content(repo, file_path, branch=branch_name).content decoded_config = Asset.decode(encoded_config) @@ -901,21 +914,24 @@ def pull_quiz_asset(github, asset: Asset): # "badges": [ # { "slug": "cybersecurity_guru", "points": 5 } # ] - if 'info' in _config: - _config = _config['info'] - if 'name' in _config and _config['name'] != '': asset.title = _config['name'] + if "info" in _config: + _config = _config["info"] + if "name" in _config and _config["name"] != "": + asset.title = _config["name"] - if 'main' in _config and _config['main']: asset.description = _config['main'] - elif 'description' in _config and _config['description']: asset.description = _config['description'] + if "main" in _config and _config["main"]: + asset.description = _config["main"] + elif "description" in _config and _config["description"]: + asset.description = _config["description"] - if 'technologies' in _config and _config['technologies'] != '': + if "technologies" in _config and _config["technologies"] != "": asset.technologies.clear() - for tech_slug in _config['technologies']: + for tech_slug in _config["technologies"]: technology = AssetTechnology.get_or_create(tech_slug) asset.technologies.add(technology) - if 'difficulty' in _config and _config['technologies'] != '': - asset.difficulty = _config['difficulty'] + if "difficulty" in _config and _config["technologies"] != "": + asset.difficulty = _config["difficulty"] asset.save() @@ -929,20 +945,20 @@ def test_asset(asset: Asset): try: validator = None - if asset.asset_type == 'LESSON': + if asset.asset_type == "LESSON": validator = LessonValidator(asset) - elif asset.asset_type == 'EXERCISE': + elif asset.asset_type == "EXERCISE": validator = ExerciseValidator(asset) - elif asset.asset_type == 'PROJECT': + elif asset.asset_type == "PROJECT": validator = ProjectValidator(asset) - elif asset.asset_type == 'QUIZ': + elif asset.asset_type == "QUIZ": validator = QuizValidator(asset) - elif asset.asset_type == 'ARTICLE': + elif asset.asset_type == "ARTICLE": validator = ArticleValidator(asset) validator.validate() - asset.status_text = 'Test Successfull' - asset.test_status = 'OK' + asset.status_text = "Test Successfull" + asset.test_status = "OK" asset.last_test_at = timezone.now() asset.save() return True @@ -955,7 +971,7 @@ def test_asset(asset: Asset): return False except Exception as e: asset.status_text = str(e) - asset.test_status = 'ERROR' + asset.test_status = "ERROR" asset.last_test_at = timezone.now() asset.save() raise e @@ -968,8 +984,8 @@ def scan_asset_originality(asset: Asset): try: credentials = asset.academy.credentialsoriginality except Exception as e: - scan.status_text = 'Error retriving originality credentials for academy: ' + str(e) - scan.status = 'ERROR' + scan.status_text = "Error retriving originality credentials for academy: " + str(e) + scan.status = "ERROR" scan.save() raise Exception(scan.status_text) @@ -979,27 +995,28 @@ def scan_asset_originality(asset: Asset): from bs4 import BeautifulSoup from markdown import markdown - html = markdown(readme['html']) - text = ''.join(BeautifulSoup(html).findAll(text=True)) + + html = markdown(readme["html"]) + text = "".join(BeautifulSoup(html).findAll(text=True)) scanner = OriginalityWrapper(credentials.token) result = scanner.detect(text) if isinstance(result, dict): - scan.success = result['success'] - scan.score_original = result['score']['original'] - scan.score_ai = result['score']['ai'] - scan.credits_used = result['credits_used'] - scan.content = result['content'] + scan.success = result["success"] + scan.score_original = result["score"]["original"] + scan.score_ai = result["score"]["ai"] + scan.credits_used = result["credits_used"] + scan.content = result["content"] else: - raise Exception('Error receiving originality API response payload') + raise Exception("Error receiving originality API response payload") except Exception as e: - scan.status_text = 'Error scanning originality for asset: ' + str(e) - scan.status = 'ERROR' + scan.status_text = "Error scanning originality for asset: " + str(e) + scan.status = "ERROR" scan.save() raise Exception(scan.status_text) - scan.status = 'COMPLETED' + scan.status = "COMPLETED" scan.save() @@ -1008,20 +1025,21 @@ def upload_image_to_bucket(img: AssetImage, asset=None): from ..services.google_cloud import Storage link = img.original_url - if 'github.com' in link and not 'raw=true' in link: - if '?' in link: - link = link + '&raw=true' + if "github.com" in link and not "raw=true" in link: + if "?" in link: + link = link + "&raw=true" else: - link = link + '?raw=true' + link = link + "?raw=true" r = requests.get(link, stream=True, timeout=2) if r.status_code != 200: - raise Exception(f'Error downloading image from asset image {img.name}: {link}') + raise Exception(f"Error downloading image from asset image {img.name}: {link}") - found_mime = [mime for mime in allowed_mimes() if r.headers['content-type'] in mime] + found_mime = [mime for mime in allowed_mimes() if r.headers["content-type"] in mime] if len(found_mime) == 0: raise Exception( - f"Skipping image download for {link} in asset image {img.name}, invalid mime {r.headers['content-type']}") + f"Skipping image download for {link} in asset image {img.name}, invalid mime {r.headers['content-type']}" + ) img.hash = hashlib.sha256(r.content).hexdigest() @@ -1035,7 +1053,7 @@ def upload_image_to_bucket(img: AssetImage, asset=None): img.hash = img.hash img.mime = found_mime[0] img.bucket_url = cloud_file.url() - img.download_status = 'OK' + img.download_status = "OK" img.save() if asset: @@ -1045,37 +1063,37 @@ def upload_image_to_bucket(img: AssetImage, asset=None): def add_syllabus_translations(_json: dict): - if not isinstance(_json, dict) or 'days' not in _json or not isinstance(_json['days'], list): + if not isinstance(_json, dict) or "days" not in _json or not isinstance(_json["days"], list): return _json day_count = -1 - for day in _json.get('days', []): + for day in _json.get("days", []): day_count += 1 - for asset_type in ['assignments', 'lessons', 'quizzes', 'replits']: + for asset_type in ["assignments", "lessons", "quizzes", "replits"]: index = -1 if asset_type not in day: continue for ass in day[asset_type]: index += 1 - slug = ass['slug'] if 'slug' in ass else ass + slug = ass["slug"] if "slug" in ass else ass _asset = Asset.get_by_slug(slug) if _asset is not None: - if 'slug' not in ass: - _json['days'][day_count][asset_type][index] = { - 'slug': _asset.slug, - 'title': _asset.title, + if "slug" not in ass: + _json["days"][day_count][asset_type][index] = { + "slug": _asset.slug, + "title": _asset.title, } - _json['days'][day_count][asset_type][index]['translations'] = {} + _json["days"][day_count][asset_type][index]["translations"] = {} for a in _asset.all_translations.all(): - _json['days'][day_count][asset_type][index]['translations'][a.lang] = { - 'slug': a.slug, - 'title': a.title + _json["days"][day_count][asset_type][index]["translations"][a.lang] = { + "slug": a.slug, + "title": a.title, } - if _asset.lang not in _json['days'][day_count][asset_type][index]['translations']: - _json['days'][day_count][asset_type][index]['translations'][_asset.lang] = { - 'slug': _asset.slug, - 'title': _asset.title + if _asset.lang not in _json["days"][day_count][asset_type][index]["translations"]: + _json["days"][day_count][asset_type][index]["translations"][_asset.lang] = { + "slug": _asset.slug, + "title": _asset.title, } return _json diff --git a/breathecode/registry/admin.py b/breathecode/registry/admin.py index 6a877b304..d8c66d8a2 100644 --- a/breathecode/registry/admin.py +++ b/breathecode/registry/admin.py @@ -6,42 +6,68 @@ from breathecode.utils.admin import change_field from breathecode.services.seo import SEOAnalyzer -from .models import (Asset, AssetTechnology, AssetAlias, AssetErrorLog, KeywordCluster, AssetCategory, AssetKeyword, - AssetComment, SEOReport, AssetImage, OriginalityScan, CredentialsOriginality, SyllabusVersionProxy, - ContentVariable) -from .tasks import (async_pull_from_github, async_test_asset, async_download_readme_images, async_remove_img_from_cloud, - async_upload_image_to_bucket, async_update_frontend_asset_cache) -from .actions import (get_user_from_github_username, AssetThumbnailGenerator, scan_asset_originality, - add_syllabus_translations, clean_asset_readme, process_asset_config, push_to_github) +from .models import ( + Asset, + AssetTechnology, + AssetAlias, + AssetErrorLog, + KeywordCluster, + AssetCategory, + AssetKeyword, + AssetComment, + SEOReport, + AssetImage, + OriginalityScan, + CredentialsOriginality, + SyllabusVersionProxy, + ContentVariable, +) +from .tasks import ( + async_pull_from_github, + async_test_asset, + async_download_readme_images, + async_remove_img_from_cloud, + async_upload_image_to_bucket, + async_update_frontend_asset_cache, +) +from .actions import ( + get_user_from_github_username, + AssetThumbnailGenerator, + scan_asset_originality, + add_syllabus_translations, + clean_asset_readme, + process_asset_config, + push_to_github, +) logger = logging.getLogger(__name__) lang_flags = { - 'en': '🇺🇸', - 'us': '🇺🇸', - 'ge': '🇩🇪', - 'po': '🇵🇹', - 'es': '🇪🇸', - 'it': '🇮🇹', - None: '', + "en": "🇺🇸", + "us": "🇺🇸", + "ge": "🇩🇪", + "po": "🇵🇹", + "es": "🇪🇸", + "it": "🇮🇹", + None: "", } -@admin.display(description='Add GITPOD flag (to open on gitpod)') +@admin.display(description="Add GITPOD flag (to open on gitpod)") def add_gitpod(modeladmin, request, queryset): queryset.update(gitpod=True) -@admin.display(description='Remove GITPOD flag') +@admin.display(description="Remove GITPOD flag") def remove_gitpod(modeladmin, request, queryset): queryset.update(gitpod=False) -@admin.display(description='Make it an EXTERNAL resource (new window)') +@admin.display(description="Make it an EXTERNAL resource (new window)") def make_external(modeladmin, request, queryset): queryset.update(external=True) -@admin.display(description='Make it an INTERNAL resource (same window)') +@admin.display(description="Make it an INTERNAL resource (same window)") def make_internal(modeladmin, request, queryset): queryset.update(external=False) @@ -53,18 +79,18 @@ def process_config_object(modeladmin, request, queryset): def pull_content_from_github(modeladmin, request, queryset): - queryset.update(sync_status='PENDING', status_text='Starting to sync...') + queryset.update(sync_status="PENDING", status_text="Starting to sync...") assets = queryset.all() for a in assets: try: async_pull_from_github.delay(a.slug, request.user.id) # async_pull_from_github(a.slug, request.user.id) # uncomment for testing purposes except Exception as e: - messages.error(request, a.slug + ': ' + str(e)) + messages.error(request, a.slug + ": " + str(e)) def push_content_to_github(modeladmin, request, queryset): - queryset.update(sync_status='PENDING', status_text='Starting to sync...') + queryset.update(sync_status="PENDING", status_text="Starting to sync...") assets = queryset.all() for a in assets: # try: @@ -75,7 +101,7 @@ def push_content_to_github(modeladmin, request, queryset): def pull_content_from_github_override_meta(modeladmin, request, queryset): - queryset.update(sync_status='PENDING', status_text='Starting to sync...') + queryset.update(sync_status="PENDING", status_text="Starting to sync...") assets = queryset.all() for a in assets: async_pull_from_github.delay(a.slug, request.user.id, override_meta=True) @@ -83,10 +109,10 @@ def pull_content_from_github_override_meta(modeladmin, request, queryset): def async_regenerate_readme(modeladmin, request, queryset): - queryset.update(cleaning_status='PENDING', cleaning_status_details='Starting to clean...') + queryset.update(cleaning_status="PENDING", cleaning_status_details="Starting to clean...") assets = queryset.all() for a in assets: - #async_regenerate_asset_readme.delay(a.slug) + # async_regenerate_asset_readme.delay(a.slug) clean_asset_readme(a) @@ -116,15 +142,15 @@ def make_me_owner(modeladmin, request, queryset): def remove_dot_from_slug(modeladmin, request, queryset): assets = queryset.all() for a in assets: - if '.' in a.slug: - a.slug = a.slug.replace('.', '-') + if "." in a.slug: + a.slug = a.slug.replace(".", "-") a.save() def async_generate_thumbnail(modeladmin, request, queryset): assets = queryset.all() for a in assets: - generator = AssetThumbnailGenerator(a, '800', '600') + generator = AssetThumbnailGenerator(a, "800", "600") url, permanent = generator.get_thumbnail_url() @@ -132,26 +158,26 @@ def generate_spanish_translation(modeladmin, request, queryset): assets = queryset.all() for old in assets: old_id = old.id - if old.lang not in ['us', 'en']: - messages.error(request, f'Error in {old.slug}: Can only generate trasnlations for english lessons') + if old.lang not in ["us", "en"]: + messages.error(request, f"Error in {old.slug}: Can only generate trasnlations for english lessons") continue new_asset = old.all_translations.filter( - Q(lang__iexact='es') | Q(slug__iexact=old.slug + '-es') - | Q(slug__iexact=old.slug + '.es')).first() + Q(lang__iexact="es") | Q(slug__iexact=old.slug + "-es") | Q(slug__iexact=old.slug + ".es") + ).first() if new_asset is not None: - messages.error(request, f'Translation to {old.slug} already exists with {new_asset.slug}') - if '.es' in new_asset.slug: - new_asset.slug = new_asset.slug.split('.')[0] + '-es' + messages.error(request, f"Translation to {old.slug} already exists with {new_asset.slug}") + if ".es" in new_asset.slug: + new_asset.slug = new_asset.slug.split(".")[0] + "-es" new_asset.save() else: new_asset = old new_asset.pk = None - new_asset.lang = 'es' - new_asset.sync_status = 'PENDING' - new_asset.status_text = 'Translation generated, waiting for sync' - new_asset.slug = old.slug + '-es' + new_asset.lang = "es" + new_asset.sync_status = "PENDING" + new_asset.status_text = "Translation generated, waiting for sync" + new_asset.slug = old.slug + "-es" new_asset.save() old = Asset.objects.get(id=old_id) @@ -163,15 +189,15 @@ def generate_spanish_translation(modeladmin, request, queryset): def test_asset_integrity(modeladmin, request, queryset): - queryset.update(test_status='PENDING') + queryset.update(test_status="PENDING") assets = queryset.all() for a in assets: try: async_test_asset(a.slug) - #test_asset(a) + # test_asset(a) except Exception as e: - messages.error(request, a.slug + ': ' + str(e)) + messages.error(request, a.slug + ": " + str(e)) def seo_report(modeladmin, request, queryset): @@ -182,7 +208,7 @@ def seo_report(modeladmin, request, queryset): # async_execute_seo_report.delay(a.slug) SEOAnalyzer(a).start() except Exception as e: - messages.error(request, a.slug + ': ' + str(e)) + messages.error(request, a.slug + ": " + str(e)) def originality_report(modeladmin, request, queryset): @@ -194,7 +220,7 @@ def originality_report(modeladmin, request, queryset): scan_asset_originality(a) except Exception as e: raise e - messages.error(request, a.slug + ': ' + str(e)) + messages.error(request, a.slug + ": " + str(e)) def seo_optimization_off(modeladmin, request, queryset): @@ -210,11 +236,11 @@ def load_readme_tasks(modeladmin, request, queryset): for a in assets: try: tasks = a.get_tasks() - print(f'{len(tasks)} tasks') + print(f"{len(tasks)} tasks") for t in tasks: - print(t['status'] + ': ' + t['slug'] + '\n') + print(t["status"] + ": " + t["slug"] + "\n") except Exception as e: - messages.error(request, a.slug + ': ' + str(e)) + messages.error(request, a.slug + ": " + str(e)) def download_and_replace_images(modeladmin, request, queryset): @@ -222,9 +248,9 @@ def download_and_replace_images(modeladmin, request, queryset): for a in assets: try: async_download_readme_images.delay(a.slug) - messages.success(request, message='Asset was schedule for download') + messages.success(request, message="Asset was schedule for download") except Exception as e: - messages.error(request, a.slug + ': ' + str(e)) + messages.error(request, a.slug + ": " + str(e)) def reset_4geeks_com_cache(modeladmin, request, queryset): @@ -232,30 +258,30 @@ def reset_4geeks_com_cache(modeladmin, request, queryset): for a in assets: try: async_update_frontend_asset_cache.delay(a.slug) - messages.success(request, message='Assets cache on 4Geeks.com will be updated soon') + messages.success(request, message="Assets cache on 4Geeks.com will be updated soon") except Exception as e: - messages.error(request, a.slug + ': ' + str(e)) + messages.error(request, a.slug + ": " + str(e)) class AssessmentFilter(admin.SimpleListFilter): - title = 'Associated Assessment' + title = "Associated Assessment" - parameter_name = 'has_assessment' + parameter_name = "has_assessment" def lookups(self, request, model_admin): return ( - ('yes', 'Has assessment'), - ('no', 'No assessment'), + ("yes", "Has assessment"), + ("no", "No assessment"), ) def queryset(self, request, queryset): - if self.value() == 'yes': + if self.value() == "yes": return queryset.filter(assessment__isnull=False) - if self.value() == 'no': + if self.value() == "no": return queryset.filter(assessment__isnull=True) @@ -263,79 +289,80 @@ class AssetForm(forms.ModelForm): class Meta: model = Asset - fields = '__all__' + fields = "__all__" def __init__(self, *args, **kwargs): super(AssetForm, self).__init__(*args, **kwargs) - self.fields['all_translations'].queryset = Asset.objects.filter(asset_type=self.instance.asset_type).order_by( - 'slug') # or something else - self.fields['technologies'].queryset = AssetTechnology.objects.all().order_by('slug') # or something else - self.fields['assets_related'].queryset = Asset.objects.exclude(pk=self.instance.pk) + self.fields["all_translations"].queryset = Asset.objects.filter(asset_type=self.instance.asset_type).order_by( + "slug" + ) # or something else + self.fields["technologies"].queryset = AssetTechnology.objects.all().order_by("slug") # or something else + self.fields["assets_related"].queryset = Asset.objects.exclude(pk=self.instance.pk) class WithDescription(admin.SimpleListFilter): - title = 'With description' + title = "With description" - parameter_name = 'has_description' + parameter_name = "has_description" def lookups(self, request, model_admin): return ( - ('yes', 'Has description'), - ('no', 'No description'), + ("yes", "Has description"), + ("no", "No description"), ) def queryset(self, request, queryset): - if self.value() == 'yes': + if self.value() == "yes": return queryset.filter(description__isnull=False) - if self.value() == 'no': + if self.value() == "no": return queryset.filter(description__isnull=True) class IsMarkdown(admin.SimpleListFilter): - title = 'Markdown Based' + title = "Markdown Based" - parameter_name = 'is_markdown' + parameter_name = "is_markdown" def lookups(self, request, model_admin): return ( - ('yes', 'Is Markdown'), - ('no', 'Is notebook or other'), + ("yes", "Is Markdown"), + ("no", "Is notebook or other"), ) def queryset(self, request, queryset): - if self.value() == 'yes': - return queryset.filter(readme_url__contains='.md') + if self.value() == "yes": + return queryset.filter(readme_url__contains=".md") - if self.value() == 'no': - return queryset.exclude(readme_url__contains='.md') + if self.value() == "no": + return queryset.exclude(readme_url__contains=".md") class WithKeywordFilter(admin.SimpleListFilter): - title = 'With Keyword' + title = "With Keyword" - parameter_name = 'has_keyword' + parameter_name = "has_keyword" def lookups(self, request, model_admin): return ( - ('yes', 'Has keyword'), - ('no', 'No keyword'), + ("yes", "Has keyword"), + ("no", "No keyword"), ) def queryset(self, request, queryset): - if self.value() == 'yes': + if self.value() == "yes": return queryset.filter(seo_keywords__isnull=False) - if self.value() == 'no': + if self.value() == "no": return queryset.filter(seo_keywords__isnull=True) @@ -343,105 +370,130 @@ def queryset(self, request, queryset): @admin.register(Asset) class AssetAdmin(admin.ModelAdmin): form = AssetForm - search_fields = ['title', 'slug', 'author__email', 'url'] - filter_horizontal = ('technologies', 'all_translations', 'seo_keywords', 'assets_related') - list_display = ('main', 'current_status', 'alias', 'techs', 'url_path') + search_fields = ["title", "slug", "author__email", "url"] + filter_horizontal = ("technologies", "all_translations", "seo_keywords", "assets_related") + list_display = ("main", "current_status", "alias", "techs", "url_path") list_filter = [ - 'asset_type', 'status', 'sync_status', 'test_status', 'lang', 'external', AssessmentFilter, WithKeywordFilter, - WithDescription, IsMarkdown + "asset_type", + "status", + "sync_status", + "test_status", + "lang", + "external", + AssessmentFilter, + WithKeywordFilter, + WithDescription, + IsMarkdown, ] - raw_id_fields = ['author', 'owner', 'superseded_by'] - actions = [ - test_asset_integrity, - add_gitpod, - remove_gitpod, - process_config_object, - pull_content_from_github, - pull_content_from_github_override_meta, - push_content_to_github, - seo_optimization_off, - seo_optimization_on, - seo_report, - originality_report, - make_me_author, - make_me_owner, - get_author_grom_github_usernames, - generate_spanish_translation, - remove_dot_from_slug, - load_readme_tasks, - async_regenerate_readme, - async_generate_thumbnail, - download_and_replace_images, - reset_4geeks_com_cache, - ] + change_field(['DRAFT', 'NOT_STARTED', 'PUBLISHED', 'OPTIMIZED'], name='status') + change_field(['us', 'es'], - name='lang') + raw_id_fields = ["author", "owner", "superseded_by"] + actions = ( + [ + test_asset_integrity, + add_gitpod, + remove_gitpod, + process_config_object, + pull_content_from_github, + pull_content_from_github_override_meta, + push_content_to_github, + seo_optimization_off, + seo_optimization_on, + seo_report, + originality_report, + make_me_author, + make_me_owner, + get_author_grom_github_usernames, + generate_spanish_translation, + remove_dot_from_slug, + load_readme_tasks, + async_regenerate_readme, + async_generate_thumbnail, + download_and_replace_images, + reset_4geeks_com_cache, + ] + + change_field(["DRAFT", "NOT_STARTED", "PUBLISHED", "OPTIMIZED"], name="status") + + change_field(["us", "es"], name="lang") + ) def get_form(self, request, obj=None, **kwargs): - if obj is not None and obj.readme is not None and obj.url is not None and 'ipynb' in obj.url and len( - obj.readme) > 2000: - self.exclude = ('readme', 'html') + if ( + obj is not None + and obj.readme is not None + and obj.url is not None + and "ipynb" in obj.url + and len(obj.readme) > 2000 + ): + self.exclude = ("readme", "html") form = super(AssetAdmin, self).get_form(request, obj, **kwargs) return form def url_path(self, obj): - return format_html(f""" + return format_html( + f""" <a rel='noopener noreferrer' target='_blank' href='{obj.url}'>github</a> | <a rel='noopener noreferrer' target='_blank' href='/v1/registry/asset/preview/{obj.slug}'>preview</a> - """) + """ + ) def main(self, obj): - lang = obj.lang.lower() if isinstance(obj.lang, str) else '?' - return format_html(f''' + lang = obj.lang.lower() if isinstance(obj.lang, str) else "?" + return format_html( + f""" <p style="border: 1px solid #BDBDBD; border-radius: 3px; font-size: 10px; padding: 3px;margin: 0;">{lang_flags.get(lang, None)} {obj.asset_type}</p> <p style="margin: 0; padding: 0;">{obj.slug}</p> <p style="color: white; font-size: 10px;margin: 0; padding: 0;">{obj.title}</p> - ''') + """ + ) def current_status(self, obj): colors = { - 'PUBLISHED': 'bg-success', - 'OK': 'bg-success', - 'ERROR': 'bg-error', - 'WARNING': 'bg-warning', - None: 'bg-warning', - 'DRAFT': 'bg-error', - 'OPTIMIZED': 'bg-error', - 'PENDING_TRANSLATION': 'bg-error', - 'PENDING': 'bg-warning', - 'WARNING': 'bg-warning', - 'NOT_STARTED': 'bg-error', - 'NEEDS_RESYNC': 'bg-error', - 'UNLISTED': 'bg-warning', + "PUBLISHED": "bg-success", + "OK": "bg-success", + "ERROR": "bg-error", + "WARNING": "bg-warning", + None: "bg-warning", + "DRAFT": "bg-error", + "OPTIMIZED": "bg-error", + "PENDING_TRANSLATION": "bg-error", + "PENDING": "bg-warning", + "WARNING": "bg-warning", + "NOT_STARTED": "bg-error", + "NEEDS_RESYNC": "bg-error", + "UNLISTED": "bg-warning", } def from_status(s): if s in colors: return colors[s] - return '' + return "" - status = 'No status' + status = "No status" if obj.status_text is not None: - status = re.sub(r'[^\w\._\-]', ' ', obj.status_text) + status = re.sub(r"[^\w\._\-]", " ", obj.status_text) return format_html( f"""<table style='max-width: 200px;'><tr><td style='font-size: 10px !important;'>Publish</td><td style='font-size: 10px !important;'>Synch</td><td style='font-size: 10px !important;'>Test</td></tr> <td><span class='badge {from_status(obj.status)}'>{obj.status}</span></td> <td><span class='badge {from_status(obj.sync_status)}'>{obj.sync_status}</span></td> <td><span class='badge {from_status(obj.test_status)}'>{obj.test_status}</span></td> <tr><td colspan='3'>{status}</td></tr> - </table>""") + </table>""" + ) def techs(self, obj): - return ', '.join([t.slug for t in obj.technologies.all()]) + return ", ".join([t.slug for t in obj.technologies.all()]) def alias(self, obj): aliases = AssetAlias.objects.filter(asset__all_translations__slug=obj.slug) - get_lang = lambda l: l.lower() if isinstance(l, str) else '?' - return format_html(''.join([ - f'<span style="display: inline-block; background: #2d302d; padding: 2px; border-radius: 3px; margin: 2px;">{lang_flags.get(get_lang(a.asset.lang), None)}{a.slug}</span>' - for a in aliases - ])) + get_lang = lambda l: l.lower() if isinstance(l, str) else "?" + return format_html( + "".join( + [ + f'<span style="display: inline-block; background: #2d302d; padding: 2px; border-radius: 3px; margin: 2px;">{lang_flags.get(get_lang(a.asset.lang), None)}{a.slug}</span>' + for a in aliases + ] + ) + ) def merge_technologies(modeladmin, request, queryset): @@ -481,98 +533,102 @@ def slug_to_lower_case(modeladmin, request, queryset): class ParentFilter(admin.SimpleListFilter): - title = 'With Parent' + title = "With Parent" - parameter_name = 'has_parent' + parameter_name = "has_parent" def lookups(self, request, model_admin): return ( - ('parents', 'Parents'), - ('alias', 'Aliases'), + ("parents", "Parents"), + ("alias", "Aliases"), ) def queryset(self, request, queryset): - if self.value() == 'parents': + if self.value() == "parents": return queryset.filter(parent__isnull=True) - if self.value() == 'alias': + if self.value() == "alias": return queryset.filter(parent__isnull=False) class IsDeprecatedFilter(admin.SimpleListFilter): - title = 'Is Deprecated' + title = "Is Deprecated" - parameter_name = 'is_deprecated' + parameter_name = "is_deprecated" def lookups(self, request, model_admin): - return (('true', 'True'), ('false', 'False')) + return (("true", "True"), ("false", "False")) def queryset(self, request, queryset): - if self.value() == 'true': + if self.value() == "true": return queryset.filter(is_deprecated=True) - if self.value() == 'false': + if self.value() == "false": return queryset.filter(is_deprecated=False) class VisibilityFilter(admin.SimpleListFilter): - title = 'Visibility' + title = "Visibility" - parameter_name = 'visibility' + parameter_name = "visibility" def lookups(self, request, model_admin): - return (('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')) + return (("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")) def queryset(self, request, queryset): - if self.value() == 'PUBLIC': - return queryset.filter(visibility='PUBLIC') + if self.value() == "PUBLIC": + return queryset.filter(visibility="PUBLIC") - if self.value() == 'UNLISTED': - return queryset.filter(visibility='UNLISTED') + if self.value() == "UNLISTED": + return queryset.filter(visibility="UNLISTED") - if self.value() == 'PRIVATE': - return queryset.filter(visibility='PRIVATE') + if self.value() == "PRIVATE": + return queryset.filter(visibility="PRIVATE") def mark_technologies_as_unlisted(modeladmin, request, queryset): technologies = queryset.all() for technology in technologies: if technology.parent is not None or technology.asset_set.count() < 3: - AssetTechnology.objects.filter(slug=technology.slug).update(visibility='UNLISTED') + AssetTechnology.objects.filter(slug=technology.slug).update(visibility="UNLISTED") @admin.register(AssetTechnology) class AssetTechnologyAdmin(admin.ModelAdmin): - search_fields = ['title', 'slug'] - list_display = ('id', 'get_slug', 'title', 'parent', 'featured_asset', 'description', 'visibility', 'is_deprecated') + search_fields = ["title", "slug"] + list_display = ("id", "get_slug", "title", "parent", "featured_asset", "description", "visibility", "is_deprecated") list_filter = (ParentFilter, VisibilityFilter, IsDeprecatedFilter) - raw_id_fields = ['parent', 'featured_asset'] + raw_id_fields = ["parent", "featured_asset"] actions = (merge_technologies, slug_to_lower_case, mark_technologies_as_unlisted) def get_slug(self, obj): - parent = '' + parent = "" if obj.parent is None: - parent = '🤰🏻' + parent = "🤰🏻" - return format_html(parent + ' ' + f'<a href="/admin/registry/assettechnology/{obj.id}/change/">{obj.slug}</a>') + return format_html(parent + " " + f'<a href="/admin/registry/assettechnology/{obj.id}/change/">{obj.slug}</a>') @admin.register(AssetAlias) class AssetAliasAdmin(admin.ModelAdmin): - search_fields = ['slug'] - list_display = ('slug', 'asset', 'created_at') + search_fields = ["slug"] + list_display = ("slug", "asset", "created_at") list_filter = [ - 'asset__asset_type', 'asset__status', 'asset__sync_status', 'asset__test_status', 'asset__lang', - 'asset__external' + "asset__asset_type", + "asset__status", + "asset__sync_status", + "asset__test_status", + "asset__lang", + "asset__external", ] - raw_id_fields = ['asset'] + raw_id_fields = ["asset"] def make_alias(modeladmin, request, queryset): @@ -580,70 +636,74 @@ def make_alias(modeladmin, request, queryset): for e in errors: if e.slug != AssetErrorLog.SLUG_NOT_FOUND: messages.error( - request, - f'Error: You can only make alias for {AssetErrorLog.SLUG_NOT_FOUND} errors and it was {e.slug}') + request, f"Error: You can only make alias for {AssetErrorLog.SLUG_NOT_FOUND} errors and it was {e.slug}" + ) if e.asset is None: messages.error( request, - f'Error: Cannot make alias to fix error {e.slug} ({e.id}), please assign asset before trying to fix it') + f"Error: Cannot make alias to fix error {e.slug} ({e.id}), please assign asset before trying to fix it", + ) else: alias = AssetAlias.objects.filter(slug=e.path).first() if alias is None: alias = AssetAlias(slug=e.path, asset=e.asset) alias.save() - AssetErrorLog.objects.filter(slug=e.slug, - asset_type=e.asset_type, - status='ERROR', - path=e.path, - asset=e.asset).update(status='FIXED') + AssetErrorLog.objects.filter( + slug=e.slug, asset_type=e.asset_type, status="ERROR", path=e.path, asset=e.asset + ).update(status="FIXED") continue if alias.asset.id != e.asset.id: - messages.error(request, f'Slug {e.path} already exists for a different asset {alias.asset.asset_type}') + messages.error(request, f"Slug {e.path} already exists for a different asset {alias.asset.asset_type}") def change_status_fixed_including_similar(modeladmin, request, queryset): errors = queryset.all() for e in errors: - AssetErrorLog.objects.filter(slug=e.slug, asset_type=e.asset_type, path=e.path, - asset=e.asset).update(status='FIXED') + AssetErrorLog.objects.filter(slug=e.slug, asset_type=e.asset_type, path=e.path, asset=e.asset).update( + status="FIXED" + ) def change_status_error_including_similar(modeladmin, request, queryset): errors = queryset.all() for e in errors: - AssetErrorLog.objects.filter(slug=e.slug, asset_type=e.asset_type, path=e.path, - asset=e.asset).update(status='ERROR') + AssetErrorLog.objects.filter(slug=e.slug, asset_type=e.asset_type, path=e.path, asset=e.asset).update( + status="ERROR" + ) def change_status_ignored_including_similar(modeladmin, request, queryset): errors = queryset.all() for e in errors: - AssetErrorLog.objects.filter(slug=e.slug, asset_type=e.asset_type, path=e.path, - asset=e.asset).update(status='IGNORED') + AssetErrorLog.objects.filter(slug=e.slug, asset_type=e.asset_type, path=e.path, asset=e.asset).update( + status="IGNORED" + ) @admin.register(AssetErrorLog) class AssetErrorLogAdmin(admin.ModelAdmin): - search_fields = ['slug', 'user__email', 'user__first_name', 'user__last_name'] - list_display = ('slug', 'path', 'current_status', 'user', 'created_at', 'asset') - raw_id_fields = ['user', 'asset'] - list_filter = ['status', 'slug', 'asset_type'] + search_fields = ["slug", "user__email", "user__first_name", "user__last_name"] + list_display = ("slug", "path", "current_status", "user", "created_at", "asset") + raw_id_fields = ["user", "asset"] + list_filter = ["status", "slug", "asset_type"] actions = [ - make_alias, change_status_fixed_including_similar, change_status_error_including_similar, - change_status_ignored_including_similar + make_alias, + change_status_fixed_including_similar, + change_status_error_including_similar, + change_status_ignored_including_similar, ] def current_status(self, obj): colors = { - 'FIXED': 'bg-success', - 'ERROR': 'bg-error', - 'IGNORED': '', - None: 'bg-warning', + "FIXED": "bg-success", + "ERROR": "bg-error", + "IGNORED": "", + None: "bg-warning", } - message = '' + message = "" if obj.status_text is not None: message = html.escape(obj.status_text) return format_html( @@ -653,80 +713,80 @@ def current_status(self, obj): @admin.register(AssetCategory) class AssetCategoryAdmin(admin.ModelAdmin): - search_fields = ['slug', 'title'] - list_display = ('slug', 'title', 'academy') - raw_id_fields = ['academy'] - list_filter = ['academy'] + search_fields = ["slug", "title"] + list_display = ("slug", "title", "academy") + raw_id_fields = ["academy"] + list_filter = ["academy"] class KeywordAssignedFilter(admin.SimpleListFilter): - title = 'With Article' + title = "With Article" - parameter_name = 'has_article' + parameter_name = "has_article" def lookups(self, request, model_admin): return ( - ('yes', 'Has article'), - ('no', 'No article'), + ("yes", "Has article"), + ("no", "No article"), ) def queryset(self, request, queryset): - if self.value() == 'yes': + if self.value() == "yes": return queryset.filter(asset__isnull=False) - if self.value() == 'no': + if self.value() == "no": return queryset.filter(asset__isnull=True) @admin.register(AssetKeyword) class AssetKeywordAdmin(admin.ModelAdmin): - search_fields = ['slug', 'title'] - list_display = ('id', 'slug', 'title', 'cluster') + search_fields = ["slug", "title"] + list_display = ("id", "slug", "title", "cluster") # raw_id_fields = ['academy'] list_filter = [KeywordAssignedFilter] @admin.register(KeywordCluster) class KeywordClusterAdmin(admin.ModelAdmin): - search_fields = ['slug', 'title'] - list_display = ('id', 'slug', 'title', 'academy') - raw_id_fields = ['academy'] - list_filter = ['academy'] + search_fields = ["slug", "title"] + list_display = ("id", "slug", "title", "academy") + raw_id_fields = ["academy"] + list_filter = ["academy"] @admin.register(AssetComment) class AssetCommentAdmin(admin.ModelAdmin): - list_display = ['asset', 'text', 'author'] - search_fields = ('asset__slug', 'author__first_name', 'author__last_name', 'author__email') - raw_id_fields = ['asset', 'author', 'owner'] - list_filter = ['asset__academy'] + list_display = ["asset", "text", "author"] + search_fields = ("asset__slug", "author__first_name", "author__last_name", "author__email") + raw_id_fields = ["asset", "author", "owner"] + list_filter = ["asset__academy"] @admin.register(SEOReport) class SEOReportAdmin(admin.ModelAdmin): - list_display = ['report_type', 'created_at', 'status', 'asset'] - search_fields = ('asset__slug', 'asset__title', 'report_type') - raw_id_fields = ['asset'] - list_filter = ['asset__academy'] + list_display = ["report_type", "created_at", "status", "asset"] + search_fields = ("asset__slug", "asset__title", "report_type") + raw_id_fields = ["asset"] + list_filter = ["asset__academy"] @admin.register(OriginalityScan) class OriginalityScanAdmin(admin.ModelAdmin): - list_display = ['id', 'created_at', 'status', 'asset', 'success', 'score_original', 'score_ai'] - search_fields = ('asset__slug', 'asset__title', 'report_type') - raw_id_fields = ['asset'] - list_filter = ['asset__academy'] + list_display = ["id", "created_at", "status", "asset", "success", "score_original", "score_ai"] + search_fields = ("asset__slug", "asset__title", "report_type") + raw_id_fields = ["asset"] + list_filter = ["asset__academy"] @admin.register(CredentialsOriginality) class CredentialsOriginalityAdmin(admin.ModelAdmin): - list_display = ['id', 'academy', 'created_at', 'balance', 'last_call_at'] - search_fields = ('academy__slug', 'academy__title') - raw_id_fields = ['academy'] - list_filter = ['academy'] + list_display = ["id", "academy", "created_at", "balance", "last_call_at"] + search_fields = ("academy__slug", "academy__title") + raw_id_fields = ["academy"] + list_filter = ["academy"] def remove_image_from_bucket(modeladmin, request, queryset): @@ -743,17 +803,21 @@ def upload_image_to_bucket(modeladmin, request, queryset): @admin.register(AssetImage) class AssetImageAdmin(admin.ModelAdmin): - list_display = ['name', 'current_status', 'mime', 'related_assets', 'original', 'bucket'] - search_fields = ('name', 'original_url', 'bucket_url', 'assets__slug') - raw_id_fields = ['assets'] - list_filter = ['mime', 'assets__academy', 'download_status'] + list_display = ["name", "current_status", "mime", "related_assets", "original", "bucket"] + search_fields = ("name", "original_url", "bucket_url", "assets__slug") + raw_id_fields = ["assets"] + list_filter = ["mime", "assets__academy", "download_status"] actions = [remove_image_from_bucket] def related_assets(self, obj): - return format_html(''.join([ - f'<a href="/admin/registry/asset/{a.id}/change/" style="display: inline-block; background: #2d302d; padding: 2px; border-radius: 3px; margin: 2px;">{a.slug}</a>' - for a in obj.assets.all() - ])) + return format_html( + "".join( + [ + f'<a href="/admin/registry/asset/{a.id}/change/" style="display: inline-block; background: #2d302d; padding: 2px; border-radius: 3px; margin: 2px;">{a.slug}</a>' + for a in obj.assets.all() + ] + ) + ) return format_html(f'<a href="/admin/registry/asset/{obj.asset.id}/change/">{obj.asset.slug}</a>') def original(self, obj): @@ -764,12 +828,12 @@ def bucket(self, obj): def current_status(self, obj): colors = { - 'DONE': 'bg-success', - 'OK': 'bg-success', - 'PENDING': 'bg-warning', - 'WARNING': 'bg-warning', - 'ERROR': 'bg-error', - 'NEEDS_RESYNC': 'bg-error', + "DONE": "bg-success", + "OK": "bg-success", + "PENDING": "bg-warning", + "WARNING": "bg-warning", + "ERROR": "bg-error", + "NEEDS_RESYNC": "bg-error", } return format_html(f"<span class='badge {colors[obj.download_status]}'>{obj.download_status}</span>") @@ -783,25 +847,25 @@ def add_translations_into_json(modeladmin, request, queryset): @admin.register(SyllabusVersionProxy) class SyllabusVersionAdmin(admin.ModelAdmin): - list_display = ['syllabus', 'version', 'status'] - search_fields = ('syllabus__slug', 'syllabus__name') + list_display = ["syllabus", "version", "status"] + search_fields = ("syllabus__slug", "syllabus__name") # raw_id_fields = ['assets'] - list_filter = ['syllabus'] + list_filter = ["syllabus"] actions = [add_translations_into_json] @admin.register(ContentVariable) class ContentVariablesAdmin(admin.ModelAdmin): - list_display = ['key', 'academy', 'lang', 'real_value'] - search_fields = ('key', ) + list_display = ["key", "academy", "lang", "real_value"] + search_fields = ("key",) # raw_id_fields = ['assets'] - list_filter = ['academy'] + list_filter = ["academy"] def real_value(self, obj): _values = { - 'MARKDOWN': obj.value[:200], - 'PYTHON_CODE': 'python code', - 'FETCH_JSON': 'JSON from: ' + obj.value, - 'FETCH_TEXT': 'Fetch from: ' + obj.value, + "MARKDOWN": obj.value[:200], + "PYTHON_CODE": "python code", + "FETCH_JSON": "JSON from: " + obj.value, + "FETCH_TEXT": "Fetch from: " + obj.value, } - return format_html(f'{_values[obj.var_type]}') + return format_html(f"{_values[obj.var_type]}") diff --git a/breathecode/registry/apps.py b/breathecode/registry/apps.py index 9c1491322..b35c3b615 100644 --- a/breathecode/registry/apps.py +++ b/breathecode/registry/apps.py @@ -5,8 +5,8 @@ class RegistryConfig(AppConfig): - name = 'breathecode.registry' + name = "breathecode.registry" def ready(self): - logger.debug('Loading registry.receivers') + logger.debug("Loading registry.receivers") from . import receivers # noqa: F401 diff --git a/breathecode/registry/caches.py b/breathecode/registry/caches.py index bc33f0693..5284cf006 100644 --- a/breathecode/registry/caches.py +++ b/breathecode/registry/caches.py @@ -1,5 +1,14 @@ from breathecode.utils import Cache -from .models import Asset, AssetComment, AssetTechnology, AssetKeyword, KeywordCluster, AssetCategory, ContentVariable, AssetAlias +from .models import ( + Asset, + AssetComment, + AssetTechnology, + AssetKeyword, + KeywordCluster, + AssetCategory, + ContentVariable, + AssetAlias, +) class AssetCache(Cache): diff --git a/breathecode/registry/management/commands/assign_asset_academy.py b/breathecode/registry/management/commands/assign_asset_academy.py index eede45d19..575e437a0 100644 --- a/breathecode/registry/management/commands/assign_asset_academy.py +++ b/breathecode/registry/management/commands/assign_asset_academy.py @@ -7,11 +7,11 @@ class Command(BaseCommand): - help = 'Assign miami as default academy for lessons' + help = "Assign miami as default academy for lessons" def handle(self, *args, **options): - miami = Academy.objects.filter(slug='downtown-miami').first() + miami = Academy.objects.filter(slug="downtown-miami").first() Asset.objects.filter(academy__isnull=True).update(academy=miami) - Asset.objects.filter(status='OK').update(status='PUBLISHED') - Asset.objects.filter(status='UNASSIGNED').update(status='NOT_STARTED') + Asset.objects.filter(status="OK").update(status="PUBLISHED") + Asset.objects.filter(status="UNASSIGNED").update(status="NOT_STARTED") diff --git a/breathecode/registry/management/commands/change_asset_readme_url.py b/breathecode/registry/management/commands/change_asset_readme_url.py index cc9922349..b69725877 100644 --- a/breathecode/registry/management/commands/change_asset_readme_url.py +++ b/breathecode/registry/management/commands/change_asset_readme_url.py @@ -7,16 +7,17 @@ class Command(BaseCommand): - help = 'This command changes every readme_url instance with this structure >>https://raw.githubusercontent.com/breatheco-de/exercise-postcard/main/README.md and turns it in one with this structure >>https://github.com/breatheco-de/exercise-postcard/blob/main/README.md' + help = "This command changes every readme_url instance with this structure >>https://raw.githubusercontent.com/breatheco-de/exercise-postcard/main/README.md and turns it in one with this structure >>https://github.com/breatheco-de/exercise-postcard/blob/main/README.md" def handle(self, *args, **options): - assets = Asset.objects.filter(readme_url__startswith='https://raw.githubusercontent.com/') + assets = Asset.objects.filter(readme_url__startswith="https://raw.githubusercontent.com/") for asset in assets: - result = re.findall('^https?://raw.githubusercontent.com/([a-zA-Z-_0-9]+)/([a-zA-Z-_0-9]+)/(.+)$', - asset.readme_url) + result = re.findall( + "^https?://raw.githubusercontent.com/([a-zA-Z-_0-9]+)/([a-zA-Z-_0-9]+)/(.+)$", asset.readme_url + ) if result: - new = f'https://github.com/{result[0][0]}/{result[0][1]}/blob/{result[0][2]}' + new = f"https://github.com/{result[0][0]}/{result[0][1]}/blob/{result[0][2]}" asset.readme_url = new asset.save() diff --git a/breathecode/registry/management/commands/create_asset_thumbnail.py b/breathecode/registry/management/commands/create_asset_thumbnail.py index d005b45d5..b4c1151eb 100644 --- a/breathecode/registry/management/commands/create_asset_thumbnail.py +++ b/breathecode/registry/management/commands/create_asset_thumbnail.py @@ -8,11 +8,11 @@ class Command(BaseCommand): - help = 'Generate preview for assets without preview' + help = "Generate preview for assets without preview" def handle(self, *args, **options): - default_asset_preview_url = os.getenv('DEFAULT_ASSET_PREVIEW_URL', '') + default_asset_preview_url = os.getenv("DEFAULT_ASSET_PREVIEW_URL", "") assets = Asset.objects.filter(Q(preview=None) | Q(preview=default_asset_preview_url)) for a in assets: async_create_asset_thumbnail.delay(a.slug) diff --git a/breathecode/registry/management/commands/import_blog_registry.py b/breathecode/registry/management/commands/import_blog_registry.py index 8d65def8c..9a22486e5 100644 --- a/breathecode/registry/management/commands/import_blog_registry.py +++ b/breathecode/registry/management/commands/import_blog_registry.py @@ -7,98 +7,100 @@ logger = logging.getLogger(__name__) -HOST = 'https://github.com/4GeeksAcademy/blog/blob/main' -ACADEMY_SLUG = 'online' +HOST = "https://github.com/4GeeksAcademy/blog/blob/main" +ACADEMY_SLUG = "online" class Command(BaseCommand): - help = 'Sync exercises and projects from old breathecode' + help = "Sync exercises and projects from old breathecode" def add_arguments(self, parser): parser.add_argument( - '--override', - action='store_true', - help='Delete and add again', + "--override", + action="store_true", + help="Delete and add again", ) def handle(self, *args, **options): def fetch_article(file_name): - _resp = requests.get(f'{HOST}/blog/{file_name}?raw=true') + _resp = requests.get(f"{HOST}/blog/{file_name}?raw=true") if _resp.status_code == 200: return _resp.text - resp = requests.get(f'{HOST}/api/posts.json?raw=true') + resp = requests.get(f"{HOST}/api/posts.json?raw=true") if resp.status_code != 200: - raise Exception('Error fetching article list') + raise Exception("Error fetching article list") academy = Academy.objects.filter(slug=ACADEMY_SLUG).first() if academy is None: - raise Exception('Academy with slug {} not found'.format(ACADEMY_SLUG)) + raise Exception("Academy with slug {} not found".format(ACADEMY_SLUG)) category = {} - category['us'] = AssetCategory.objects.filter(slug='blog-us', academy__slug=ACADEMY_SLUG).first() - if category['us'] is None: - category['us'] = AssetCategory( - slug='blog-us', + category["us"] = AssetCategory.objects.filter(slug="blog-us", academy__slug=ACADEMY_SLUG).first() + if category["us"] is None: + category["us"] = AssetCategory( + slug="blog-us", academy=academy, - title='Blog in English', - lang='us', + title="Blog in English", + lang="us", ) - category['us'].save() + category["us"].save() - category['es'] = AssetCategory.objects.filter(slug='blog-es', academy__slug=ACADEMY_SLUG).first() - if category['es'] is None: - category['es'] = AssetCategory( - slug='blog-es', + category["es"] = AssetCategory.objects.filter(slug="blog-es", academy__slug=ACADEMY_SLUG).first() + if category["es"] is None: + category["es"] = AssetCategory( + slug="blog-es", academy=academy, - title='Blog en Español', - lang='es', + title="Blog en Español", + lang="es", ) - category['es'].save() + category["es"].save() all_posts = [] try: all_posts = resp.json() - logger.debug(f'Found {len(all_posts)} posts to import...') + logger.debug(f"Found {len(all_posts)} posts to import...") except requests.exceptions.JSONDecodeError: - logger.error('Error decoding json: {}'.format(resp.text)) + logger.error("Error decoding json: {}".format(resp.text)) owner = User.objects.filter(id=1).first() - results = {'ignored': [], 'created': [], 'replaced': []} + results = {"ignored": [], "created": [], "replaced": []} for post in all_posts: - asset = Asset.objects.filter(slug=post['slug']).first() + asset = Asset.objects.filter(slug=post["slug"]).first() if asset is None: - asset = Asset(slug=post['slug'], - lang=post['lang'], - category=category[post['lang']], - academy=academy, - asset_type='ARTICLE', - status='PUBLISHED', - owner=owner, - readme_url=f"{HOST}/blog/{post['fileName']}?raw=true") - results['created'].append(asset) - print('C', end='') + asset = Asset( + slug=post["slug"], + lang=post["lang"], + category=category[post["lang"]], + academy=academy, + asset_type="ARTICLE", + status="PUBLISHED", + owner=owner, + readme_url=f"{HOST}/blog/{post['fileName']}?raw=true", + ) + results["created"].append(asset) + print("C", end="") else: - results['replaced'].append(asset) - print('R', end='') + results["replaced"].append(asset) + print("R", end="") # replace title and description only - readme = fetch_article(post['fileName']) - asset.title = post['title'] - asset.description = post['excerpt'] or post['subtitle'] + readme = fetch_article(post["fileName"]) + asset.title = post["title"] + asset.description = post["excerpt"] or post["subtitle"] asset.readme_raw = Asset.encode(readme) _data = frontmatter.loads(readme) _frontmatter = _data.metadata - if 'author' in _frontmatter: - if isinstance(_frontmatter['author'], list): - asset.authors_username = ','.join(_frontmatter['author']) + if "author" in _frontmatter: + if isinstance(_frontmatter["author"], list): + asset.authors_username = ",".join(_frontmatter["author"]) else: - asset.authors_username = _frontmatter['author'] + asset.authors_username = _frontmatter["author"] asset.save() async_regenerate_asset_readme.delay(asset.slug) @@ -107,9 +109,9 @@ def fetch_article(file_name): f"Done: {len(results['ignored'])} ignored, {len(results['replaced'])} replaced and {len(results['created'])} created" ) - for _a in results['ignored']: - print('Ignored: {}'.format(_a.slug)) + for _a in results["ignored"]: + print("Ignored: {}".format(_a.slug)) - print('Following posts replaced the description, title and authors_username') - for _a in results['replaced']: - print('Replaced: {}'.format(_a.slug)) + print("Following posts replaced the description, title and authors_username") + for _a in results["replaced"]: + print("Replaced: {}".format(_a.slug)) diff --git a/breathecode/registry/management/commands/set_asset_published_dates.py b/breathecode/registry/management/commands/set_asset_published_dates.py index bb86ac1b4..0b64a6193 100644 --- a/breathecode/registry/management/commands/set_asset_published_dates.py +++ b/breathecode/registry/management/commands/set_asset_published_dates.py @@ -6,11 +6,11 @@ class Command(BaseCommand): - help = 'Set published date to legacy articles' + help = "Set published date to legacy articles" def handle(self, *args, **options): - assets = Asset.objects.filter(published_at__isnull=True, status='PUBLISHED', category__isnull=False) + assets = Asset.objects.filter(published_at__isnull=True, status="PUBLISHED", category__isnull=False) for a in assets: a.published_at = a.updated_at a.save() diff --git a/breathecode/registry/management/commands/subscribe_asset_repos.py b/breathecode/registry/management/commands/subscribe_asset_repos.py index e1c9d0c0f..5d93ed2ed 100644 --- a/breathecode/registry/management/commands/subscribe_asset_repos.py +++ b/breathecode/registry/management/commands/subscribe_asset_repos.py @@ -9,23 +9,23 @@ class Command(BaseCommand): - help = 'Set published date to legacy articles' + help = "Set published date to legacy articles" def handle(self, *args, **options): - assets = Asset.objects.filter(status='PUBLISHED', is_auto_subscribed=True, readme_url__isnull=False) + assets = Asset.objects.filter(status="PUBLISHED", is_auto_subscribed=True, readme_url__isnull=False) settings = {} for a in assets: academy_id = str(a.academy.id) username, repo_name, branch_name = a.get_repo_meta() - repo_url = f'https://github.com/{username}/{repo_name}' + repo_url = f"https://github.com/{username}/{repo_name}" subs = RepositorySubscription.objects.filter(repository=repo_url).first() if subs is None: if academy_id not in settings: settings[academy_id] = AcademyAuthSettings.objects.filter(academy__id=a.academy.id).first() if settings[academy_id] is None: - logger.debug(f'Skipping asset {a.slug}, settings not found for academy {academy_id}') + logger.debug(f"Skipping asset {a.slug}, settings not found for academy {academy_id}") continue subs = RepositorySubscription( @@ -38,18 +38,18 @@ def handle(self, *args, **options): if settings[academy_id] is not None: subs = subscribe_repository(subs.id, settings[academy_id]) logger.debug( - f'Successfully created asset subscription with status {subs.status} for {a.slug}, repo {repo_url}' + f"Successfully created asset subscription with status {subs.status} for {a.slug}, repo {repo_url}" ) else: - raise Exception(f'No subscription found for academy {academy_id}') + raise Exception(f"No subscription found for academy {academy_id}") except Exception as e: - subs.status = 'CRITICAL' + subs.status = "CRITICAL" subs.status_message = str(e) subs.save() else: - logger.debug(f'Already subscribed to asset {a.slug} thru repo {repo_url}') + logger.debug(f"Already subscribed to asset {a.slug} thru repo {repo_url}") if not a.is_auto_subscribed: - logger.debug(f'Disabling asset {a.slug}, subscription because auto_subscribe is deactivated') - subs.status = 'DISABLED' + logger.debug(f"Disabling asset {a.slug}, subscription because auto_subscribe is deactivated") + subs.status = "DISABLED" continue diff --git a/breathecode/registry/migrations/0001_initial.py b/breathecode/registry/migrations/0001_initial.py index 7f956d4d1..396221c5f 100644 --- a/breathecode/registry/migrations/0001_initial.py +++ b/breathecode/registry/migrations/0001_initial.py @@ -10,183 +10,253 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ('assessment', '0001_initial'), + ("assessment", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='Asset', + name="Asset", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=200, unique=True)), - ('title', models.CharField(blank=True, max_length=200)), - ('lang', models.CharField(blank=True, - default=None, - help_text='E.g: en, es, it', - max_length=2, - null=True)), - ('url', models.URLField()), - ('solution_url', models.URLField(blank=True, default=None, null=True)), - ('preview', models.URLField(blank=True, default=None, null=True)), - ('description', models.TextField(blank=True, default=None, null=True)), - ('readme_url', - models.URLField(blank=True, - default=None, - help_text='This will be used to synch from github', - null=True)), - ('intro_video_url', models.URLField(blank=True, default=None, null=True)), - ('solution_video_url', models.URLField(blank=True, default=None, null=True)), - ('readme', models.TextField(blank=True, default=None, null=True)), - ('config', models.JSONField(blank=True, default=None, null=True)), - ('external', - models.BooleanField( - default=False, - help_text= - 'External assets will open in a new window, they are not built using breathecode or learnpack tecnology' - )), - ('interactive', models.BooleanField(default=False)), - ('with_solutions', models.BooleanField(default=False)), - ('with_video', models.BooleanField(default=False)), - ('graded', models.BooleanField(default=False)), - ('gitpod', models.BooleanField(default=False)), - ('duration', models.IntegerField(blank=True, default=None, help_text='In hours', null=True)), - ('difficulty', - models.CharField(blank=True, - choices=[('BEGINNER', 'Beginner'), ('EASY', 'Easy')], - default=None, - max_length=20, - null=True)), - ('visibility', - models.CharField(choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], - default='PUBLIC', - max_length=20)), - ('asset_type', - models.CharField(choices=[('PROJECT', 'Project'), ('EXERCISE', 'Exercise'), ('QUIZ', 'Quiz'), - ('LESSON', 'Lesson'), ('VIDEO', 'Video')], - max_length=20)), - ('status', - models.CharField(choices=[('UNNASIGNED', 'Unnasigned'), ('DRAFT', 'Draft'), ('OK', 'Ok')], - default='DRAFT', - help_text='Related to the publishing of the asset', - max_length=20)), - ('sync_status', - models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning')], - default=None, - help_text='Internal state automatically set by the system based on sync', - max_length=20, - null=True)), - ('test_status', - models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning')], - default=None, - help_text='Internal state automatically set by the system based on test', - max_length=20, - null=True)), - ('last_synch_at', models.DateTimeField(blank=True, default=None, null=True)), - ('last_test_at', models.DateTimeField(blank=True, default=None, null=True)), - ('status_text', - models.TextField(blank=True, - default=None, - help_text='Used by the sych status to provide feedback', - null=True)), - ('authors_username', - models.CharField(blank=True, - default=None, - help_text='Github usernames separated by comma', - max_length=80, - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('all_translations', - models.ManyToManyField(blank=True, - related_name='_registry_asset_all_translations_+', - to='registry.Asset')), - ('assessment', - models.ForeignKey(blank=True, - default=None, - help_text='Connection with the assessment breathecode app', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='assessment.assessment')), - ('author', - models.ForeignKey(blank=True, - default=None, - help_text='Who wrote the lesson, not necessarily the owner', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), - ('owner', - models.ForeignKey(blank=True, - default=None, - help_text='The owner has the github premissions to update the lesson', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name='owned_lessons', - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=200, unique=True)), + ("title", models.CharField(blank=True, max_length=200)), + ( + "lang", + models.CharField(blank=True, default=None, help_text="E.g: en, es, it", max_length=2, null=True), + ), + ("url", models.URLField()), + ("solution_url", models.URLField(blank=True, default=None, null=True)), + ("preview", models.URLField(blank=True, default=None, null=True)), + ("description", models.TextField(blank=True, default=None, null=True)), + ( + "readme_url", + models.URLField( + blank=True, default=None, help_text="This will be used to synch from github", null=True + ), + ), + ("intro_video_url", models.URLField(blank=True, default=None, null=True)), + ("solution_video_url", models.URLField(blank=True, default=None, null=True)), + ("readme", models.TextField(blank=True, default=None, null=True)), + ("config", models.JSONField(blank=True, default=None, null=True)), + ( + "external", + models.BooleanField( + default=False, + help_text="External assets will open in a new window, they are not built using breathecode or learnpack tecnology", + ), + ), + ("interactive", models.BooleanField(default=False)), + ("with_solutions", models.BooleanField(default=False)), + ("with_video", models.BooleanField(default=False)), + ("graded", models.BooleanField(default=False)), + ("gitpod", models.BooleanField(default=False)), + ("duration", models.IntegerField(blank=True, default=None, help_text="In hours", null=True)), + ( + "difficulty", + models.CharField( + blank=True, + choices=[("BEGINNER", "Beginner"), ("EASY", "Easy")], + default=None, + max_length=20, + null=True, + ), + ), + ( + "visibility", + models.CharField( + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], + default="PUBLIC", + max_length=20, + ), + ), + ( + "asset_type", + models.CharField( + choices=[ + ("PROJECT", "Project"), + ("EXERCISE", "Exercise"), + ("QUIZ", "Quiz"), + ("LESSON", "Lesson"), + ("VIDEO", "Video"), + ], + max_length=20, + ), + ), + ( + "status", + models.CharField( + choices=[("UNNASIGNED", "Unnasigned"), ("DRAFT", "Draft"), ("OK", "Ok")], + default="DRAFT", + help_text="Related to the publishing of the asset", + max_length=20, + ), + ), + ( + "sync_status", + models.CharField( + blank=True, + choices=[("PENDING", "Pending"), ("ERROR", "Error"), ("OK", "Ok"), ("WARNING", "Warning")], + default=None, + help_text="Internal state automatically set by the system based on sync", + max_length=20, + null=True, + ), + ), + ( + "test_status", + models.CharField( + blank=True, + choices=[("PENDING", "Pending"), ("ERROR", "Error"), ("OK", "Ok"), ("WARNING", "Warning")], + default=None, + help_text="Internal state automatically set by the system based on test", + max_length=20, + null=True, + ), + ), + ("last_synch_at", models.DateTimeField(blank=True, default=None, null=True)), + ("last_test_at", models.DateTimeField(blank=True, default=None, null=True)), + ( + "status_text", + models.TextField( + blank=True, default=None, help_text="Used by the sych status to provide feedback", null=True + ), + ), + ( + "authors_username", + models.CharField( + blank=True, + default=None, + help_text="Github usernames separated by comma", + max_length=80, + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "all_translations", + models.ManyToManyField( + blank=True, related_name="_registry_asset_all_translations_+", to="registry.Asset" + ), + ), + ( + "assessment", + models.ForeignKey( + blank=True, + default=None, + help_text="Connection with the assessment breathecode app", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="assessment.assessment", + ), + ), + ( + "author", + models.ForeignKey( + blank=True, + default=None, + help_text="Who wrote the lesson, not necessarily the owner", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "owner", + models.ForeignKey( + blank=True, + default=None, + help_text="The owner has the github premissions to update the lesson", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="owned_lessons", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='AssetTechnology', + name="AssetTechnology", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=200, unique=True)), - ('title', models.CharField(blank=True, max_length=200)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=200, unique=True)), + ("title", models.CharField(blank=True, max_length=200)), ], ), migrations.CreateModel( - name='AssetErrorLog', + name="AssetErrorLog", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('asset_type', - models.CharField(blank=True, - choices=[('PROJECT', 'Project'), ('EXERCISE', 'Exercise'), ('QUIZ', 'Quiz'), - ('LESSON', 'Lesson'), ('VIDEO', 'Video')], - default=None, - max_length=20, - null=True)), - ('slug', models.SlugField(max_length=200)), - ('status', - models.CharField(choices=[('ERROR', 'Error'), ('FIXED', 'Fixed'), ('IGNORED', 'Ignored')], - default='ERROR', - max_length=20)), - ('path', models.CharField(max_length=200)), - ('status_text', - models.TextField(blank=True, - default=None, - help_text='Status details, it may be set automatically if enough error information', - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('asset', - models.ForeignKey( - default=None, - help_text= - 'Assign an asset to this error and you will be able to create an alias for it from the django admin bulk actions "create alias"', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='registry.asset')), - ('user', - models.ForeignKey(default=None, - help_text='The user how asked for the asset and got the error', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "asset_type", + models.CharField( + blank=True, + choices=[ + ("PROJECT", "Project"), + ("EXERCISE", "Exercise"), + ("QUIZ", "Quiz"), + ("LESSON", "Lesson"), + ("VIDEO", "Video"), + ], + default=None, + max_length=20, + null=True, + ), + ), + ("slug", models.SlugField(max_length=200)), + ( + "status", + models.CharField( + choices=[("ERROR", "Error"), ("FIXED", "Fixed"), ("IGNORED", "Ignored")], + default="ERROR", + max_length=20, + ), + ), + ("path", models.CharField(max_length=200)), + ( + "status_text", + models.TextField( + blank=True, + default=None, + help_text="Status details, it may be set automatically if enough error information", + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "asset", + models.ForeignKey( + default=None, + help_text='Assign an asset to this error and you will be able to create an alias for it from the django admin bulk actions "create alias"', + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="registry.asset", + ), + ), + ( + "user", + models.ForeignKey( + default=None, + help_text="The user how asked for the asset and got the error", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='AssetAlias', + name="AssetAlias", fields=[ - ('slug', models.SlugField(max_length=200, primary_key=True, serialize=False)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='registry.asset')), + ("slug", models.SlugField(max_length=200, primary_key=True, serialize=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("asset", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="registry.asset")), ], ), migrations.AddField( - model_name='asset', - name='technologies', - field=models.ManyToManyField(to='registry.AssetTechnology'), + model_name="asset", + name="technologies", + field=models.ManyToManyField(to="registry.AssetTechnology"), ), ] diff --git a/breathecode/registry/migrations/0002_asset_html.py b/breathecode/registry/migrations/0002_asset_html.py index 937bfe263..27c97ae63 100644 --- a/breathecode/registry/migrations/0002_asset_html.py +++ b/breathecode/registry/migrations/0002_asset_html.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0001_initial'), + ("registry", "0001_initial"), ] operations = [ migrations.AddField( - model_name='asset', - name='html', + model_name="asset", + name="html", field=models.TextField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/registry/migrations/0003_alter_asset_readme_url.py b/breathecode/registry/migrations/0003_alter_asset_readme_url.py index 8b91668de..cf9a277b3 100644 --- a/breathecode/registry/migrations/0003_alter_asset_readme_url.py +++ b/breathecode/registry/migrations/0003_alter_asset_readme_url.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0002_asset_html'), + ("registry", "0002_asset_html"), ] operations = [ migrations.AlterField( - model_name='asset', - name='readme_url', + model_name="asset", + name="readme_url", field=models.URLField( blank=True, default=None, - help_text= - 'This will be used to synch only lessons from github. Projects, quizzes and exercises it will try README.md for english and README.lang.md for other langs', - null=True), + help_text="This will be used to synch only lessons from github. Projects, quizzes and exercises it will try README.md for english and README.lang.md for other langs", + null=True, + ), ), ] diff --git a/breathecode/registry/migrations/0004_auto_20220415_1515.py b/breathecode/registry/migrations/0004_auto_20220415_1515.py index e03e2c6cc..509fa0cdc 100644 --- a/breathecode/registry/migrations/0004_auto_20220415_1515.py +++ b/breathecode/registry/migrations/0004_auto_20220415_1515.py @@ -6,25 +6,41 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0003_alter_asset_readme_url'), + ("registry", "0003_alter_asset_readme_url"), ] operations = [ migrations.AlterField( - model_name='asset', - name='asset_type', - field=models.CharField(choices=[('PROJECT', 'Project'), ('EXERCISE', 'Exercise'), ('QUIZ', 'Quiz'), - ('LESSON', 'Lesson'), ('VIDEO', 'Video'), ('ARTICLE', 'Article')], - max_length=20), + model_name="asset", + name="asset_type", + field=models.CharField( + choices=[ + ("PROJECT", "Project"), + ("EXERCISE", "Exercise"), + ("QUIZ", "Quiz"), + ("LESSON", "Lesson"), + ("VIDEO", "Video"), + ("ARTICLE", "Article"), + ], + max_length=20, + ), ), migrations.AlterField( - model_name='asseterrorlog', - name='asset_type', - field=models.CharField(blank=True, - choices=[('PROJECT', 'Project'), ('EXERCISE', 'Exercise'), ('QUIZ', 'Quiz'), - ('LESSON', 'Lesson'), ('VIDEO', 'Video'), ('ARTICLE', 'Article')], - default=None, - max_length=20, - null=True), + model_name="asseterrorlog", + name="asset_type", + field=models.CharField( + blank=True, + choices=[ + ("PROJECT", "Project"), + ("EXERCISE", "Exercise"), + ("QUIZ", "Quiz"), + ("LESSON", "Lesson"), + ("VIDEO", "Video"), + ("ARTICLE", "Article"), + ], + default=None, + max_length=20, + null=True, + ), ), ] diff --git a/breathecode/registry/migrations/0005_auto_20220502_2210.py b/breathecode/registry/migrations/0005_auto_20220502_2210.py index d7830ce89..e4aa37cdb 100644 --- a/breathecode/registry/migrations/0005_auto_20220502_2210.py +++ b/breathecode/registry/migrations/0005_auto_20220502_2210.py @@ -7,68 +7,74 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0039_syllabus_main_technologies'), - ('registry', '0004_auto_20220415_1515'), + ("admissions", "0039_syllabus_main_technologies"), + ("registry", "0004_auto_20220415_1515"), ] operations = [ migrations.CreateModel( - name='KeywordCluster', + name="KeywordCluster", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=200, unique=True)), - ('title', models.CharField(max_length=200)), - ('lang', models.CharField(help_text='E.g: en, es, it', max_length=2)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=200, unique=True)), + ("title", models.CharField(max_length=200)), + ("lang", models.CharField(help_text="E.g: en, es, it", max_length=2)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), migrations.CreateModel( - name='AssetKeyword', + name="AssetKeyword", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=200, unique=True)), - ('title', models.CharField(max_length=200)), - ('lang', models.CharField(help_text='E.g: en, es, it', max_length=2)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), - ('cluster', - models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='registry.keywordcluster')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=200, unique=True)), + ("title", models.CharField(max_length=200)), + ("lang", models.CharField(help_text="E.g: en, es, it", max_length=2)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), + ( + "cluster", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="registry.keywordcluster", + ), + ), ], ), migrations.CreateModel( - name='AssetCategory', + name="AssetCategory", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('slug', models.SlugField(max_length=200, unique=True)), - ('title', models.CharField(max_length=200)), - ('lang', models.CharField(help_text='E.g: en, es, it', max_length=2)), - ('description', models.TextField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("slug", models.SlugField(max_length=200, unique=True)), + ("title", models.CharField(max_length=200)), + ("lang", models.CharField(help_text="E.g: en, es, it", max_length=2)), + ("description", models.TextField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), migrations.AddField( - model_name='asset', - name='category', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='registry.assetcategory'), + model_name="asset", + name="category", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="registry.assetcategory", + ), ), migrations.AddField( - model_name='asset', - name='seo_keywords', - field=models.ManyToManyField(blank=True, - help_text='Optimize for a max of two keywords per asset', - to='registry.AssetKeyword'), + model_name="asset", + name="seo_keywords", + field=models.ManyToManyField( + blank=True, help_text="Optimize for a max of two keywords per asset", to="registry.AssetKeyword" + ), ), ] diff --git a/breathecode/registry/migrations/0006_auto_20220623_0134.py b/breathecode/registry/migrations/0006_auto_20220623_0134.py index d53a35e40..2244fefb8 100644 --- a/breathecode/registry/migrations/0006_auto_20220623_0134.py +++ b/breathecode/registry/migrations/0006_auto_20220623_0134.py @@ -7,31 +7,36 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0041_cohortuser_watching'), - ('registry', '0005_auto_20220502_2210'), + ("admissions", "0041_cohortuser_watching"), + ("registry", "0005_auto_20220502_2210"), ] operations = [ migrations.AddField( - model_name='asset', - name='academy', - field=models.ForeignKey(default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.academy'), + model_name="asset", + name="academy", + field=models.ForeignKey( + default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to="admissions.academy" + ), ), migrations.AddField( - model_name='asset', - name='published_at', + model_name="asset", + name="published_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='asset', - name='status', - field=models.CharField(choices=[('UNASSIGNED', 'Unassigned'), ('WRITING', 'Writing'), ('DRAFT', 'Draft'), - ('PUBLISHED', 'Published')], - default='DRAFT', - help_text='Related to the publishing of the asset', - max_length=20), + model_name="asset", + name="status", + field=models.CharField( + choices=[ + ("UNASSIGNED", "Unassigned"), + ("WRITING", "Writing"), + ("DRAFT", "Draft"), + ("PUBLISHED", "Published"), + ], + default="DRAFT", + help_text="Related to the publishing of the asset", + max_length=20, + ), ), ] diff --git a/breathecode/registry/migrations/0007_auto_20220623_2057.py b/breathecode/registry/migrations/0007_auto_20220623_2057.py index 1157c3a96..aab2511e6 100644 --- a/breathecode/registry/migrations/0007_auto_20220623_2057.py +++ b/breathecode/registry/migrations/0007_auto_20220623_2057.py @@ -9,43 +9,54 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('registry', '0006_auto_20220623_0134'), + ("registry", "0006_auto_20220623_0134"), ] operations = [ migrations.AddField( - model_name='asset', - name='requirements', + model_name="asset", + name="requirements", field=models.TextField( blank=True, default=None, - help_text='Brief for the copywriters, mainly used to describe what this lessons needs to be about', - null=True), + help_text="Brief for the copywriters, mainly used to describe what this lessons needs to be about", + null=True, + ), ), migrations.AlterField( - model_name='asset', - name='status', - field=models.CharField(choices=[('UNASSIGNED', 'Unassigned'), ('WRITING', 'Writing'), ('DRAFT', 'Draft'), - ('PUBLISHED', 'Published')], - default='UNASSIGNED', - help_text='Related to the publishing of the asset', - max_length=20), + model_name="asset", + name="status", + field=models.CharField( + choices=[ + ("UNASSIGNED", "Unassigned"), + ("WRITING", "Writing"), + ("DRAFT", "Draft"), + ("PUBLISHED", "Published"), + ], + default="UNASSIGNED", + help_text="Related to the publishing of the asset", + max_length=20, + ), ), migrations.CreateModel( - name='AssetComment', + name="AssetComment", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('text', models.TextField()), - ('resolved', models.BooleanField(default=False)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='registry.asset')), - ('author', - models.ForeignKey(blank=True, - default=None, - help_text='Who wrote the lesson, not necessarily the owner', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL)), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("text", models.TextField()), + ("resolved", models.BooleanField(default=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("asset", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="registry.asset")), + ( + "author", + models.ForeignKey( + blank=True, + default=None, + help_text="Who wrote the lesson, not necessarily the owner", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/breathecode/registry/migrations/0008_auto_20220629_2026.py b/breathecode/registry/migrations/0008_auto_20220629_2026.py index 3b2470a30..d6484983e 100644 --- a/breathecode/registry/migrations/0008_auto_20220629_2026.py +++ b/breathecode/registry/migrations/0008_auto_20220629_2026.py @@ -7,36 +7,36 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0007_auto_20220623_2057'), + ("registry", "0007_auto_20220623_2057"), ] operations = [ migrations.AddField( - model_name='assettechnology', - name='description', + model_name="assettechnology", + name="description", field=models.TextField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='assettechnology', - name='featured_asset', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='registry.asset'), + model_name="assettechnology", + name="featured_asset", + field=models.ForeignKey( + blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to="registry.asset" + ), ), migrations.AddField( - model_name='assettechnology', - name='icon_url', - field=models.URLField(blank=True, default=None, help_text='Image icon to show on website', null=True), + model_name="assettechnology", + name="icon_url", + field=models.URLField(blank=True, default=None, help_text="Image icon to show on website", null=True), ), migrations.AddField( - model_name='assettechnology', - name='parent', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='registry.assettechnology'), + model_name="assettechnology", + name="parent", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="registry.assettechnology", + ), ), ] diff --git a/breathecode/registry/migrations/0009_auto_20220701_1948.py b/breathecode/registry/migrations/0009_auto_20220701_1948.py index 2fe885593..e4ff451f8 100644 --- a/breathecode/registry/migrations/0009_auto_20220701_1948.py +++ b/breathecode/registry/migrations/0009_auto_20220701_1948.py @@ -6,52 +6,59 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0008_auto_20220629_2026'), + ("registry", "0008_auto_20220629_2026"), ] operations = [ migrations.AddField( - model_name='assetcategory', - name='visibility', - field=models.CharField(choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], - default='PUBLIC', - max_length=20), + model_name="assetcategory", + name="visibility", + field=models.CharField( + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], + default="PUBLIC", + max_length=20, + ), ), migrations.AddField( - model_name='assettechnology', - name='lang', - field=models.CharField(blank=True, - default=None, - help_text='Leave blank if will be shown in all languages', - max_length=2, - null=True), + model_name="assettechnology", + name="lang", + field=models.CharField( + blank=True, + default=None, + help_text="Leave blank if will be shown in all languages", + max_length=2, + null=True, + ), ), migrations.AddField( - model_name='assettechnology', - name='visibility', - field=models.CharField(choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], - default='PUBLIC', - max_length=20), + model_name="assettechnology", + name="visibility", + field=models.CharField( + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], + default="PUBLIC", + max_length=20, + ), ), migrations.AddField( - model_name='keywordcluster', - name='is_deprecated', + model_name="keywordcluster", + name="is_deprecated", field=models.BooleanField( default=False, - help_text= - 'Used when you want to stop using this cluster, all previous articles will be kept but no new articles will be assigned' + help_text="Used when you want to stop using this cluster, all previous articles will be kept but no new articles will be assigned", ), ), migrations.AddField( - model_name='keywordcluster', - name='visibility', - field=models.CharField(choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], - default='PUBLIC', - max_length=20), + model_name="keywordcluster", + name="visibility", + field=models.CharField( + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], + default="PUBLIC", + max_length=20, + ), ), migrations.AlterField( - model_name='asset', - name='url', + model_name="asset", + name="url", field=models.URLField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/registry/migrations/0010_auto_20220812_2033.py b/breathecode/registry/migrations/0010_auto_20220812_2033.py index 24bae9d53..ff2f0d3b6 100644 --- a/breathecode/registry/migrations/0010_auto_20220812_2033.py +++ b/breathecode/registry/migrations/0010_auto_20220812_2033.py @@ -6,67 +6,64 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0009_auto_20220701_1948'), + ("registry", "0009_auto_20220701_1948"), ] operations = [ migrations.AddField( - model_name='assetkeyword', - name='difficulty', - field=models.FloatField(blank=True, default=None, help_text='From 1 to 100', null=True), + model_name="assetkeyword", + name="difficulty", + field=models.FloatField(blank=True, default=None, help_text="From 1 to 100", null=True), ), migrations.AddField( - model_name='assetkeyword', - name='expected_monthly_traffic', - field=models.FloatField(blank=True, - default=None, - help_text='You can get this info from Ahrefs or GKP', - null=True), + model_name="assetkeyword", + name="expected_monthly_traffic", + field=models.FloatField( + blank=True, default=None, help_text="You can get this info from Ahrefs or GKP", null=True + ), ), migrations.AddField( - model_name='assetkeyword', - name='is_important', + model_name="assetkeyword", + name="is_important", field=models.BooleanField(default=True), ), migrations.AddField( - model_name='assetkeyword', - name='is_urgent', + model_name="assetkeyword", + name="is_urgent", field=models.BooleanField(default=True), ), migrations.AddField( - model_name='assetkeyword', - name='optimization_rating', - field=models.FloatField(blank=True, default=None, help_text='Automatically filled (1 to 100)', null=True), + model_name="assetkeyword", + name="optimization_rating", + field=models.FloatField(blank=True, default=None, help_text="Automatically filled (1 to 100)", null=True), ), migrations.AddField( - model_name='keywordcluster', - name='internal_description', - field=models.TextField(blank=True, - default=None, - help_text='How will be this cluster be used in the SEO strategy', - null=True), + model_name="keywordcluster", + name="internal_description", + field=models.TextField( + blank=True, default=None, help_text="How will be this cluster be used in the SEO strategy", null=True + ), ), migrations.AddField( - model_name='keywordcluster', - name='is_important', + model_name="keywordcluster", + name="is_important", field=models.BooleanField(default=True), ), migrations.AddField( - model_name='keywordcluster', - name='is_urgent', + model_name="keywordcluster", + name="is_urgent", field=models.BooleanField(default=True), ), migrations.AddField( - model_name='keywordcluster', - name='landing_page_url', - field=models.URLField(blank=True, - default=None, - help_text='All keyword articles must point to this page', - null=True), + model_name="keywordcluster", + name="landing_page_url", + field=models.URLField( + blank=True, default=None, help_text="All keyword articles must point to this page", null=True + ), ), migrations.AddField( - model_name='keywordcluster', - name='optimization_rating', - field=models.FloatField(blank=True, default=None, help_text='Automatically filled (1 to 100)', null=True), + model_name="keywordcluster", + name="optimization_rating", + field=models.FloatField(blank=True, default=None, help_text="Automatically filled (1 to 100)", null=True), ), ] diff --git a/breathecode/registry/migrations/0011_auto_20220825_0524.py b/breathecode/registry/migrations/0011_auto_20220825_0524.py index 87435d503..a00ee7c51 100644 --- a/breathecode/registry/migrations/0011_auto_20220825_0524.py +++ b/breathecode/registry/migrations/0011_auto_20220825_0524.py @@ -7,77 +7,84 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0010_auto_20220812_2033'), + ("registry", "0010_auto_20220812_2033"), ] operations = [ migrations.RemoveField( - model_name='assetkeyword', - name='optimization_rating', + model_name="assetkeyword", + name="optimization_rating", ), migrations.AddField( - model_name='asset', - name='last_seo_scan_at', + model_name="asset", + name="last_seo_scan_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='asset', - name='optimization_rating', - field=models.FloatField(blank=True, default=None, help_text='Automatically filled (1 to 100)', null=True), + model_name="asset", + name="optimization_rating", + field=models.FloatField(blank=True, default=None, help_text="Automatically filled (1 to 100)", null=True), ), migrations.AddField( - model_name='asset', - name='seo_json_status', + model_name="asset", + name="seo_json_status", field=models.JSONField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='asset', - name='slug', + model_name="asset", + name="slug", field=models.SlugField( - help_text= - 'Asset must be unique within the entire database because they could be published into 4geeks.com (shared among all academies)', + help_text="Asset must be unique within the entire database because they could be published into 4geeks.com (shared among all academies)", max_length=200, - unique=True), + unique=True, + ), ), migrations.AlterField( - model_name='assetcategory', - name='slug', + model_name="assetcategory", + name="slug", field=models.SlugField(max_length=200), ), migrations.AlterField( - model_name='assetkeyword', - name='slug', + model_name="assetkeyword", + name="slug", field=models.SlugField(max_length=200), ), migrations.AlterField( - model_name='assettechnology', - name='slug', - field=models.SlugField(help_text='Technologies are unified within all 4geeks.com', - max_length=200, - unique=True), + model_name="assettechnology", + name="slug", + field=models.SlugField( + help_text="Technologies are unified within all 4geeks.com", max_length=200, unique=True + ), ), migrations.AlterField( - model_name='keywordcluster', - name='slug', + model_name="keywordcluster", + name="slug", field=models.SlugField(max_length=200), ), migrations.CreateModel( - name='SEOReport', + name="SEOReport", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('report_type', - models.CharField(help_text='Must be one of the services.seo.action script names', max_length=40)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning')], - default='PENDING', - help_text='Internal state automatically set by the system', - max_length=20)), - ('log', models.TextField(blank=True, default=None, null=True)), - ('rating', - models.FloatField(blank=True, default=None, help_text='Automatically filled (1 to 100)', null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='registry.asset')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "report_type", + models.CharField(help_text="Must be one of the services.seo.action script names", max_length=40), + ), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("ERROR", "Error"), ("OK", "Ok"), ("WARNING", "Warning")], + default="PENDING", + help_text="Internal state automatically set by the system", + max_length=20, + ), + ), + ("log", models.TextField(blank=True, default=None, null=True)), + ( + "rating", + models.FloatField(blank=True, default=None, help_text="Automatically filled (1 to 100)", null=True), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("asset", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="registry.asset")), ], ), ] diff --git a/breathecode/registry/migrations/0012_auto_20221004_1943.py b/breathecode/registry/migrations/0012_auto_20221004_1943.py index cc89ad6b5..914d9ec07 100644 --- a/breathecode/registry/migrations/0012_auto_20221004_1943.py +++ b/breathecode/registry/migrations/0012_auto_20221004_1943.py @@ -9,44 +9,48 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('registry', '0011_auto_20220825_0524'), + ("registry", "0011_auto_20220825_0524"), ] operations = [ migrations.AddField( - model_name='assetcomment', - name='delivered', + model_name="assetcomment", + name="delivered", field=models.BooleanField(default=False), ), migrations.AddField( - model_name='assetcomment', - name='owner', - field=models.ForeignKey(blank=True, - default=None, - help_text='In charge of resolving the comment or issue', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name='assigned_comments', - to=settings.AUTH_USER_MODEL), + model_name="assetcomment", + name="owner", + field=models.ForeignKey( + blank=True, + default=None, + help_text="In charge of resolving the comment or issue", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="assigned_comments", + to=settings.AUTH_USER_MODEL, + ), ), migrations.AddField( - model_name='assetcomment', - name='priority', + model_name="assetcomment", + name="priority", field=models.SmallIntegerField(default=False), ), migrations.AddField( - model_name='assetcomment', - name='urgent', + model_name="assetcomment", + name="urgent", field=models.BooleanField(default=False), ), migrations.AlterField( - model_name='assetcomment', - name='author', - field=models.ForeignKey(blank=True, - default=None, - help_text='Who wrote the comment or issue', - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to=settings.AUTH_USER_MODEL), + model_name="assetcomment", + name="author", + field=models.ForeignKey( + blank=True, + default=None, + help_text="Who wrote the comment or issue", + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/breathecode/registry/migrations/0013_alter_asset_difficulty.py b/breathecode/registry/migrations/0013_alter_asset_difficulty.py index 709d0dab8..8bf1460af 100644 --- a/breathecode/registry/migrations/0013_alter_asset_difficulty.py +++ b/breathecode/registry/migrations/0013_alter_asset_difficulty.py @@ -6,18 +6,24 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0012_auto_20221004_1943'), + ("registry", "0012_auto_20221004_1943"), ] operations = [ migrations.AlterField( - model_name='asset', - name='difficulty', - field=models.CharField(blank=True, - choices=[('HARD', 'Hard'), ('INTERMEDIATE', 'Intermediate'), ('EASY', 'Easy'), - ('BEGINNER', 'Beginner')], - default=None, - max_length=20, - null=True), + model_name="asset", + name="difficulty", + field=models.CharField( + blank=True, + choices=[ + ("HARD", "Hard"), + ("INTERMEDIATE", "Intermediate"), + ("EASY", "Easy"), + ("BEGINNER", "Beginner"), + ], + default=None, + max_length=20, + null=True, + ), ), ] diff --git a/breathecode/registry/migrations/0014_auto_20221006_1957.py b/breathecode/registry/migrations/0014_auto_20221006_1957.py index d0b024c8d..f03210cea 100644 --- a/breathecode/registry/migrations/0014_auto_20221006_1957.py +++ b/breathecode/registry/migrations/0014_auto_20221006_1957.py @@ -6,34 +6,35 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0013_alter_asset_difficulty'), + ("registry", "0013_alter_asset_difficulty"), ] operations = [ migrations.AddField( - model_name='asset', - name='cleaning_status', - field=models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning')], - default='PENDING', - help_text='Internal state automatically set by the system based on cleanup', - max_length=20, - null=True), + model_name="asset", + name="cleaning_status", + field=models.CharField( + blank=True, + choices=[("PENDING", "Pending"), ("ERROR", "Error"), ("OK", "Ok"), ("WARNING", "Warning")], + default="PENDING", + help_text="Internal state automatically set by the system based on cleanup", + max_length=20, + null=True, + ), ), migrations.AddField( - model_name='asset', - name='cleaning_status_details', + model_name="asset", + name="cleaning_status_details", field=models.TextField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='asset', - name='last_cleaning_at', + model_name="asset", + name="last_cleaning_at", field=models.DateTimeField(blank=True, default=None, null=True), ), migrations.AddField( - model_name='asset', - name='readme_raw', + model_name="asset", + name="readme_raw", field=models.TextField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/registry/migrations/0015_auto_20221026_0340.py b/breathecode/registry/migrations/0015_auto_20221026_0340.py index 2fc7eb6cd..dd341dcaf 100644 --- a/breathecode/registry/migrations/0015_auto_20221026_0340.py +++ b/breathecode/registry/migrations/0015_auto_20221026_0340.py @@ -6,33 +6,35 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0014_auto_20221006_1957'), + ("registry", "0014_auto_20221006_1957"), ] operations = [ migrations.AddField( - model_name='asset', - name='delivery_formats', + model_name="asset", + name="delivery_formats", field=models.CharField( - default='url', - help_text='Comma separated list of supported formats. Eg: url, image/png, application/pdf', - max_length=255), + default="url", + help_text="Comma separated list of supported formats. Eg: url, image/png, application/pdf", + max_length=255, + ), ), migrations.AddField( - model_name='asset', - name='delivery_instructions', - field=models.TextField(blank=True, - default=None, - help_text='Tell students how to deliver this project', - null=True), + model_name="asset", + name="delivery_instructions", + field=models.TextField( + blank=True, default=None, help_text="Tell students how to deliver this project", null=True + ), ), migrations.AddField( - model_name='asset', - name='delivery_regex_url', - field=models.CharField(blank=True, - default=None, - help_text='Will only be used if "url" is the delivery format', - max_length=255, - null=True), + model_name="asset", + name="delivery_regex_url", + field=models.CharField( + blank=True, + default=None, + help_text='Will only be used if "url" is the delivery format', + max_length=255, + null=True, + ), ), ] diff --git a/breathecode/registry/migrations/0016_assettechnology_sort_priority.py b/breathecode/registry/migrations/0016_assettechnology_sort_priority.py index 9a805b8c9..9454d3bb8 100644 --- a/breathecode/registry/migrations/0016_assettechnology_sort_priority.py +++ b/breathecode/registry/migrations/0016_assettechnology_sort_priority.py @@ -6,15 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0015_auto_20221026_0340'), + ("registry", "0015_auto_20221026_0340"), ] operations = [ migrations.AddField( - model_name='assettechnology', - name='sort_priority', - field=models.IntegerField(choices=[(1, 1), (2, 2), (3, 3)], - default=3, - help_text='Priority to sort technology (1, 2, or 3)'), + model_name="assettechnology", + name="sort_priority", + field=models.IntegerField( + choices=[(1, 1), (2, 2), (3, 3)], default=3, help_text="Priority to sort technology (1, 2, or 3)" + ), ), ] diff --git a/breathecode/registry/migrations/0016_auto_20221029_0531.py b/breathecode/registry/migrations/0016_auto_20221029_0531.py index 8a5423a0f..38e355e06 100644 --- a/breathecode/registry/migrations/0016_auto_20221029_0531.py +++ b/breathecode/registry/migrations/0016_auto_20221029_0531.py @@ -6,74 +6,111 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0015_auto_20221026_0340'), + ("registry", "0015_auto_20221026_0340"), ] operations = [ migrations.AlterField( - model_name='asset', - name='cleaning_status', - field=models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning'), ('NEEDS_RESYNC', 'Needs Resync')], - default='PENDING', - help_text='Internal state automatically set by the system based on cleanup', - max_length=20, - null=True), + model_name="asset", + name="cleaning_status", + field=models.CharField( + blank=True, + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), + ], + default="PENDING", + help_text="Internal state automatically set by the system based on cleanup", + max_length=20, + null=True, + ), ), migrations.AlterField( - model_name='asset', - name='sync_status', - field=models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning'), ('NEEDS_RESYNC', 'Needs Resync')], - default=None, - help_text='Internal state automatically set by the system based on sync', - max_length=20, - null=True), + model_name="asset", + name="sync_status", + field=models.CharField( + blank=True, + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), + ], + default=None, + help_text="Internal state automatically set by the system based on sync", + max_length=20, + null=True, + ), ), migrations.AlterField( - model_name='asset', - name='test_status', - field=models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning'), ('NEEDS_RESYNC', 'Needs Resync')], - default=None, - help_text='Internal state automatically set by the system based on test', - max_length=20, - null=True), + model_name="asset", + name="test_status", + field=models.CharField( + blank=True, + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), + ], + default=None, + help_text="Internal state automatically set by the system based on test", + max_length=20, + null=True, + ), ), migrations.AlterField( - model_name='seoreport', - name='status', - field=models.CharField(choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning'), ('NEEDS_RESYNC', 'Needs Resync')], - default='PENDING', - help_text='Internal state automatically set by the system', - max_length=20), + model_name="seoreport", + name="status", + field=models.CharField( + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), + ], + default="PENDING", + help_text="Internal state automatically set by the system", + max_length=20, + ), ), migrations.CreateModel( - name='AssetImage', + name="AssetImage", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=150)), - ('mime', models.CharField(max_length=60)), - ('bucket_url', models.URLField(max_length=255)), - ('original_url', models.URLField(max_length=255)), - ('hash', models.CharField(max_length=64)), - ('last_download_at', models.DateTimeField(blank=True, default=None, null=True)), - ('download_details', models.TextField(blank=True, default=None, null=True)), - ('download_status', - models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning'), ('NEEDS_RESYNC', 'Needs Resync')], - default='PENDING', - help_text='Internal state automatically set by the system based on download', - max_length=20, - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('assets', models.ManyToManyField(blank=True, related_name='images', to='registry.Asset')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("name", models.CharField(max_length=150)), + ("mime", models.CharField(max_length=60)), + ("bucket_url", models.URLField(max_length=255)), + ("original_url", models.URLField(max_length=255)), + ("hash", models.CharField(max_length=64)), + ("last_download_at", models.DateTimeField(blank=True, default=None, null=True)), + ("download_details", models.TextField(blank=True, default=None, null=True)), + ( + "download_status", + models.CharField( + blank=True, + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), + ], + default="PENDING", + help_text="Internal state automatically set by the system based on download", + max_length=20, + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("assets", models.ManyToManyField(blank=True, related_name="images", to="registry.Asset")), ], ), ] diff --git a/breathecode/registry/migrations/0017_merge_20221031_2045.py b/breathecode/registry/migrations/0017_merge_20221031_2045.py index 1f8c563e0..8db6fc13b 100644 --- a/breathecode/registry/migrations/0017_merge_20221031_2045.py +++ b/breathecode/registry/migrations/0017_merge_20221031_2045.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0016_assettechnology_sort_priority'), - ('registry', '0016_auto_20221029_0531'), + ("registry", "0016_assettechnology_sort_priority"), + ("registry", "0016_auto_20221029_0531"), ] operations = [] diff --git a/breathecode/registry/migrations/0018_auto_20221101_0235.py b/breathecode/registry/migrations/0018_auto_20221101_0235.py index 2d94ac1ac..51dd5e9fa 100644 --- a/breathecode/registry/migrations/0018_auto_20221101_0235.py +++ b/breathecode/registry/migrations/0018_auto_20221101_0235.py @@ -6,23 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0017_merge_20221031_2045'), + ("registry", "0017_merge_20221031_2045"), ] operations = [ migrations.AddField( - model_name='assetcategory', - name='auto_generate_previews', + model_name="assetcategory", + name="auto_generate_previews", field=models.BooleanField(default=False), ), migrations.AddField( - model_name='assetcategory', - name='preview_generation_url', - field=models.URLField(blank=True, default=None, help_text='Will be POSTed to get preview image', null=True), + model_name="assetcategory", + name="preview_generation_url", + field=models.URLField(blank=True, default=None, help_text="Will be POSTed to get preview image", null=True), ), migrations.AlterField( - model_name='asset', - name='technologies', - field=models.ManyToManyField(blank=True, to='registry.AssetTechnology'), + model_name="asset", + name="technologies", + field=models.ManyToManyField(blank=True, to="registry.AssetTechnology"), ), ] diff --git a/breathecode/registry/migrations/0019_auto_20221109_0246.py b/breathecode/registry/migrations/0019_auto_20221109_0246.py index e4c234437..28a1b9a8f 100644 --- a/breathecode/registry/migrations/0019_auto_20221109_0246.py +++ b/breathecode/registry/migrations/0019_auto_20221109_0246.py @@ -6,23 +6,23 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0018_auto_20221101_0235'), + ("registry", "0018_auto_20221101_0235"), ] operations = [ migrations.AddField( - model_name='asset', - name='is_seo_tracked', + model_name="asset", + name="is_seo_tracked", field=models.BooleanField(default=True), ), migrations.AddField( - model_name='seoreport', - name='how_to_fix', + model_name="seoreport", + name="how_to_fix", field=models.TextField(blank=True, default=None, null=True), ), migrations.AlterField( - model_name='seoreport', - name='log', + model_name="seoreport", + name="log", field=models.JSONField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/registry/migrations/0020_asset_readme_updated_at.py b/breathecode/registry/migrations/0020_asset_readme_updated_at.py index 5f6c585bd..f36771054 100644 --- a/breathecode/registry/migrations/0020_asset_readme_updated_at.py +++ b/breathecode/registry/migrations/0020_asset_readme_updated_at.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0019_auto_20221109_0246'), + ("registry", "0019_auto_20221109_0246"), ] operations = [ migrations.AddField( - model_name='asset', - name='readme_updated_at', + model_name="asset", + name="readme_updated_at", field=models.DateTimeField(blank=True, default=None, null=True), ), ] diff --git a/breathecode/registry/migrations/0020_auto_20221202_0016.py b/breathecode/registry/migrations/0020_auto_20221202_0016.py index 3f3c7bc6a..783341e0e 100644 --- a/breathecode/registry/migrations/0020_auto_20221202_0016.py +++ b/breathecode/registry/migrations/0020_auto_20221202_0016.py @@ -7,25 +7,27 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0047_merge_20220924_0611'), - ('registry', '0019_auto_20221109_0246'), + ("admissions", "0047_merge_20220924_0611"), + ("registry", "0019_auto_20221109_0246"), ] operations = [ migrations.AlterField( - model_name='asset', - name='academy', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='admissions.academy'), + model_name="asset", + name="academy", + field=models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="admissions.academy", + ), ), migrations.AlterField( - model_name='asset', - name='category', - field=models.ForeignKey(null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='registry.assetcategory'), + model_name="asset", + name="category", + field=models.ForeignKey( + null=True, on_delete=django.db.models.deletion.SET_NULL, to="registry.assetcategory" + ), ), ] diff --git a/breathecode/registry/migrations/0021_alter_asset_category.py b/breathecode/registry/migrations/0021_alter_asset_category.py index 71fce4d50..fd690e5d3 100644 --- a/breathecode/registry/migrations/0021_alter_asset_category.py +++ b/breathecode/registry/migrations/0021_alter_asset_category.py @@ -7,17 +7,25 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0020_auto_20221202_0016'), + ("registry", "0020_auto_20221202_0016"), ] operations = [ migrations.AlterField( - model_name='asset', - name='category', - field=models.ForeignKey(choices=[('PROJECT', 'Project'), ('EXERCISE', 'Exercise'), ('QUIZ', 'Quiz'), - ('LESSON', 'Lesson'), ('VIDEO', 'Video'), ('ARTICLE', 'Article')], - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='registry.assetcategory'), + model_name="asset", + name="category", + field=models.ForeignKey( + choices=[ + ("PROJECT", "Project"), + ("EXERCISE", "Exercise"), + ("QUIZ", "Quiz"), + ("LESSON", "Lesson"), + ("VIDEO", "Video"), + ("ARTICLE", "Article"), + ], + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="registry.assetcategory", + ), ), ] diff --git a/breathecode/registry/migrations/0022_alter_asset_category.py b/breathecode/registry/migrations/0022_alter_asset_category.py index a5fca0b2e..5eeb16b7f 100644 --- a/breathecode/registry/migrations/0022_alter_asset_category.py +++ b/breathecode/registry/migrations/0022_alter_asset_category.py @@ -7,15 +7,15 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0021_alter_asset_category'), + ("registry", "0021_alter_asset_category"), ] operations = [ migrations.AlterField( - model_name='asset', - name='category', - field=models.ForeignKey(null=True, - on_delete=django.db.models.deletion.SET_NULL, - to='registry.assetcategory'), + model_name="asset", + name="category", + field=models.ForeignKey( + null=True, on_delete=django.db.models.deletion.SET_NULL, to="registry.assetcategory" + ), ), ] diff --git a/breathecode/registry/migrations/0023_merge_20230105_0103.py b/breathecode/registry/migrations/0023_merge_20230105_0103.py index 3f6420bac..9f6dccfa5 100644 --- a/breathecode/registry/migrations/0023_merge_20230105_0103.py +++ b/breathecode/registry/migrations/0023_merge_20230105_0103.py @@ -6,8 +6,8 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0020_asset_readme_updated_at'), - ('registry', '0022_alter_asset_category'), + ("registry", "0020_asset_readme_updated_at"), + ("registry", "0022_alter_asset_category"), ] operations = [] diff --git a/breathecode/registry/migrations/0024_credentialsoriginality_originalityscan.py b/breathecode/registry/migrations/0024_credentialsoriginality_originalityscan.py index f5927c2db..bdddd1a74 100644 --- a/breathecode/registry/migrations/0024_credentialsoriginality_originalityscan.py +++ b/breathecode/registry/migrations/0024_credentialsoriginality_originalityscan.py @@ -7,43 +7,51 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0052_alter_cohort_kickoff_date'), - ('registry', '0023_merge_20230105_0103'), + ("admissions", "0052_alter_cohort_kickoff_date"), + ("registry", "0023_merge_20230105_0103"), ] operations = [ migrations.CreateModel( - name='OriginalityScan', + name="OriginalityScan", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('success', models.BooleanField(blank=True, default=None, null=True)), - ('score_original', models.FloatField(blank=True, default=None, null=True)), - ('score_ai', models.FloatField(blank=True, default=None, null=True)), - ('credits_used', models.IntegerField(default=0)), - ('content', models.TextField()), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('COMPLETED', 'Completed'), - ('WARNING', 'Warning')], - default='PENDING', - help_text='Scan for originality', - max_length=20)), - ('status_text', models.TextField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='registry.asset')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("success", models.BooleanField(blank=True, default=None, null=True)), + ("score_original", models.FloatField(blank=True, default=None, null=True)), + ("score_ai", models.FloatField(blank=True, default=None, null=True)), + ("credits_used", models.IntegerField(default=0)), + ("content", models.TextField()), + ( + "status", + models.CharField( + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("COMPLETED", "Completed"), + ("WARNING", "Warning"), + ], + default="PENDING", + help_text="Scan for originality", + max_length=20, + ), + ), + ("status_text", models.TextField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("asset", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="registry.asset")), ], ), migrations.CreateModel( - name='CredentialsOriginality', + name="CredentialsOriginality", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('token', models.CharField(max_length=255)), - ('balance', models.FloatField(default=0)), - ('usage', models.JSONField(default=[])), - ('last_call_at', models.DateTimeField(blank=True, default=None, null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("token", models.CharField(max_length=255)), + ("balance", models.FloatField(default=0)), + ("usage", models.JSONField(default=[])), + ("last_call_at", models.DateTimeField(blank=True, default=None, null=True)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), ] diff --git a/breathecode/registry/migrations/0025_alter_credentialsoriginality_usage.py b/breathecode/registry/migrations/0025_alter_credentialsoriginality_usage.py index 21583f9a9..d0fd1b5c6 100644 --- a/breathecode/registry/migrations/0025_alter_credentialsoriginality_usage.py +++ b/breathecode/registry/migrations/0025_alter_credentialsoriginality_usage.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0024_credentialsoriginality_originalityscan'), + ("registry", "0024_credentialsoriginality_originalityscan"), ] operations = [ migrations.AlterField( - model_name='credentialsoriginality', - name='usage', + model_name="credentialsoriginality", + name="usage", field=models.JSONField(default=dict), ), ] diff --git a/breathecode/registry/migrations/0026_assetcategory_all_translations.py b/breathecode/registry/migrations/0026_assetcategory_all_translations.py index 911a85340..50f2ca0ab 100644 --- a/breathecode/registry/migrations/0026_assetcategory_all_translations.py +++ b/breathecode/registry/migrations/0026_assetcategory_all_translations.py @@ -6,15 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0025_alter_credentialsoriginality_usage'), + ("registry", "0025_alter_credentialsoriginality_usage"), ] operations = [ migrations.AddField( - model_name='assetcategory', - name='all_translations', - field=models.ManyToManyField(blank=True, - related_name='_registry_assetcategory_all_translations_+', - to='registry.AssetCategory'), + model_name="assetcategory", + name="all_translations", + field=models.ManyToManyField( + blank=True, related_name="_registry_assetcategory_all_translations_+", to="registry.AssetCategory" + ), ), ] diff --git a/breathecode/registry/migrations/0027_asset_github_commit_hash.py b/breathecode/registry/migrations/0027_asset_github_commit_hash.py index 5de8b18f3..f7986d556 100644 --- a/breathecode/registry/migrations/0027_asset_github_commit_hash.py +++ b/breathecode/registry/migrations/0027_asset_github_commit_hash.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0026_assetcategory_all_translations'), + ("registry", "0026_assetcategory_all_translations"), ] operations = [ migrations.AddField( - model_name='asset', - name='github_commit_hash', + model_name="asset", + name="github_commit_hash", field=models.CharField(blank=True, default=None, max_length=100, null=True), ), ] diff --git a/breathecode/registry/migrations/0028_alter_asset_status.py b/breathecode/registry/migrations/0028_alter_asset_status.py index 27a2c07bd..77bcab07a 100644 --- a/breathecode/registry/migrations/0028_alter_asset_status.py +++ b/breathecode/registry/migrations/0028_alter_asset_status.py @@ -6,17 +6,24 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0027_asset_github_commit_hash'), + ("registry", "0027_asset_github_commit_hash"), ] operations = [ migrations.AlterField( - model_name='asset', - name='status', - field=models.CharField(choices=[('UNASSIGNED', 'Unassigned'), ('WRITING', 'Writing'), ('DRAFT', 'Draft'), - ('OPTIMIZED', 'Optimized'), ('PUBLISHED', 'Published')], - default='UNASSIGNED', - help_text='Related to the publishing of the asset', - max_length=20), + model_name="asset", + name="status", + field=models.CharField( + choices=[ + ("UNASSIGNED", "Unassigned"), + ("WRITING", "Writing"), + ("DRAFT", "Draft"), + ("OPTIMIZED", "Optimized"), + ("PUBLISHED", "Published"), + ], + default="UNASSIGNED", + help_text="Related to the publishing of the asset", + max_length=20, + ), ), ] diff --git a/breathecode/registry/migrations/0029_syllabusversionproxy.py b/breathecode/registry/migrations/0029_syllabusversionproxy.py index 434a34b03..ac277b19b 100644 --- a/breathecode/registry/migrations/0029_syllabusversionproxy.py +++ b/breathecode/registry/migrations/0029_syllabusversionproxy.py @@ -6,19 +6,19 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0056_auto_20230317_1657'), - ('registry', '0028_alter_asset_status'), + ("admissions", "0056_auto_20230317_1657"), + ("registry", "0028_alter_asset_status"), ] operations = [ migrations.CreateModel( - name='SyllabusVersionProxy', + name="SyllabusVersionProxy", fields=[], options={ - 'proxy': True, - 'indexes': [], - 'constraints': [], + "proxy": True, + "indexes": [], + "constraints": [], }, - bases=('admissions.syllabusversion', ), + bases=("admissions.syllabusversion",), ), ] diff --git a/breathecode/registry/migrations/0030_alter_asset_visibility.py b/breathecode/registry/migrations/0030_alter_asset_visibility.py index 7323edf70..945eeba0f 100644 --- a/breathecode/registry/migrations/0030_alter_asset_visibility.py +++ b/breathecode/registry/migrations/0030_alter_asset_visibility.py @@ -6,16 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0029_syllabusversionproxy'), + ("registry", "0029_syllabusversionproxy"), ] operations = [ migrations.AlterField( - model_name='asset', - name='visibility', - field=models.CharField(choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], - default='PUBLIC', - help_text="It won't be shown on the website unleast the status is published", - max_length=20), + model_name="asset", + name="visibility", + field=models.CharField( + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], + default="PUBLIC", + help_text="It won't be shown on the website unleast the status is published", + max_length=20, + ), ), ] diff --git a/breathecode/registry/migrations/0031_alter_asset_status.py b/breathecode/registry/migrations/0031_alter_asset_status.py index 8306ed1e1..1e2759a27 100644 --- a/breathecode/registry/migrations/0031_alter_asset_status.py +++ b/breathecode/registry/migrations/0031_alter_asset_status.py @@ -6,18 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0030_alter_asset_visibility'), + ("registry", "0030_alter_asset_visibility"), ] operations = [ migrations.AlterField( - model_name='asset', - name='status', - field=models.CharField(choices=[('NOT_STARTED', 'Not Started'), ('PLANNING', 'Planning'), - ('WRITING', 'Writing'), ('DRAFT', 'Draft'), ('OPTIMIZED', 'Optimized'), - ('PUBLISHED', 'Published')], - default='NOT_STARTED', - help_text='Related to the publishing of the asset', - max_length=20), + model_name="asset", + name="status", + field=models.CharField( + choices=[ + ("NOT_STARTED", "Not Started"), + ("PLANNING", "Planning"), + ("WRITING", "Writing"), + ("DRAFT", "Draft"), + ("OPTIMIZED", "Optimized"), + ("PUBLISHED", "Published"), + ], + default="NOT_STARTED", + help_text="Related to the publishing of the asset", + max_length=20, + ), ), ] diff --git a/breathecode/registry/migrations/0032_contentvariable.py b/breathecode/registry/migrations/0032_contentvariable.py index c9b9398e5..3fbd03eb7 100644 --- a/breathecode/registry/migrations/0032_contentvariable.py +++ b/breathecode/registry/migrations/0032_contentvariable.py @@ -7,44 +7,63 @@ class Migration(migrations.Migration): dependencies = [ - ('admissions', '0058_alter_cohort_available_as_saas'), - ('registry', '0031_alter_asset_status'), + ("admissions", "0058_alter_cohort_available_as_saas"), + ("registry", "0031_alter_asset_status"), ] operations = [ migrations.CreateModel( - name='ContentVariable', + name="ContentVariable", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('key', models.CharField(max_length=100)), - ('value', models.TextField()), - ('default_value', models.TextField()), - ('lang', - models.CharField(blank=True, - default=None, - help_text='Leave blank if will be shown in all languages', - max_length=2, - null=True)), - ('var_type', - models.CharField(choices=[('MARKDOWN', 'Markdown'), ('PYTHON_CODE', 'Python'), - ('FETCH_JSON', 'Fetch json from url'), - ('FETCH_TEXT', 'Fetch text from url')], - default='MARKDOWN', - help_text='Code vars accept python code, Fetch vars accept HTTP GET', - max_length=20)), - ('status', - models.CharField(choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('COMPLETED', 'Completed')], - default='PENDING', - help_text='Code vars accept python code, Fetch vars accept HTTP GET', - max_length=20)), - ('status_text', - models.TextField(blank=True, - default=None, - help_text='If the var is code or fetch here will be the error processing info', - null=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), - ('academy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='admissions.academy')), + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("key", models.CharField(max_length=100)), + ("value", models.TextField()), + ("default_value", models.TextField()), + ( + "lang", + models.CharField( + blank=True, + default=None, + help_text="Leave blank if will be shown in all languages", + max_length=2, + null=True, + ), + ), + ( + "var_type", + models.CharField( + choices=[ + ("MARKDOWN", "Markdown"), + ("PYTHON_CODE", "Python"), + ("FETCH_JSON", "Fetch json from url"), + ("FETCH_TEXT", "Fetch text from url"), + ], + default="MARKDOWN", + help_text="Code vars accept python code, Fetch vars accept HTTP GET", + max_length=20, + ), + ), + ( + "status", + models.CharField( + choices=[("PENDING", "Pending"), ("ERROR", "Error"), ("COMPLETED", "Completed")], + default="PENDING", + help_text="Code vars accept python code, Fetch vars accept HTTP GET", + max_length=20, + ), + ), + ( + "status_text", + models.TextField( + blank=True, + default=None, + help_text="If the var is code or fetch here will be the error processing info", + null=True, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("academy", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="admissions.academy")), ], ), ] diff --git a/breathecode/registry/migrations/0033_alter_contentvariable_default_value.py b/breathecode/registry/migrations/0033_alter_contentvariable_default_value.py index 1f8d68843..b603f0d7e 100644 --- a/breathecode/registry/migrations/0033_alter_contentvariable_default_value.py +++ b/breathecode/registry/migrations/0033_alter_contentvariable_default_value.py @@ -6,15 +6,15 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0032_contentvariable'), + ("registry", "0032_contentvariable"), ] operations = [ migrations.AlterField( - model_name='contentvariable', - name='default_value', + model_name="contentvariable", + name="default_value", field=models.TextField( - help_text= - 'If the variable type is fetch or code and the processing fails, the default value will be used'), + help_text="If the variable type is fetch or code and the processing fails, the default value will be used" + ), ), ] diff --git a/breathecode/registry/migrations/0034_assettechnology_is_deprecated.py b/breathecode/registry/migrations/0034_assettechnology_is_deprecated.py index 92c35d03f..6be51a5e2 100644 --- a/breathecode/registry/migrations/0034_assettechnology_is_deprecated.py +++ b/breathecode/registry/migrations/0034_assettechnology_is_deprecated.py @@ -6,13 +6,13 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0033_alter_contentvariable_default_value'), + ("registry", "0033_alter_contentvariable_default_value"), ] operations = [ migrations.AddField( - model_name='assettechnology', - name='is_deprecated', + model_name="assettechnology", + name="is_deprecated", field=models.BooleanField(default=False), ), ] diff --git a/breathecode/registry/migrations/0035_auto_20231017_0605.py b/breathecode/registry/migrations/0035_auto_20231017_0605.py index 30cfb82ca..a0215c1d9 100644 --- a/breathecode/registry/migrations/0035_auto_20231017_0605.py +++ b/breathecode/registry/migrations/0035_auto_20231017_0605.py @@ -6,167 +6,203 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0034_assettechnology_is_deprecated'), + ("registry", "0034_assettechnology_is_deprecated"), ] operations = [ migrations.AlterField( - model_name='asset', - name='asset_type', - field=models.CharField(choices=[('PROJECT', 'Project'), ('EXERCISE', 'Exercise'), ('QUIZ', 'Quiz'), - ('LESSON', 'Lesson'), ('VIDEO', 'Video'), ('ARTICLE', 'Article')], - db_index=True, - max_length=20), - ), - migrations.AlterField( - model_name='asset', - name='authors_username', - field=models.CharField(blank=True, - db_index=True, - default=None, - help_text='Github usernames separated by comma', - max_length=80, - null=True), - ), - migrations.AlterField( - model_name='asset', - name='cleaning_status', - field=models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning'), ('NEEDS_RESYNC', 'Needs Resync')], - db_index=True, - default='PENDING', - help_text='Internal state automatically set by the system based on cleanup', - max_length=20, - null=True), - ), - migrations.AlterField( - model_name='asset', - name='external', + model_name="asset", + name="asset_type", + field=models.CharField( + choices=[ + ("PROJECT", "Project"), + ("EXERCISE", "Exercise"), + ("QUIZ", "Quiz"), + ("LESSON", "Lesson"), + ("VIDEO", "Video"), + ("ARTICLE", "Article"), + ], + db_index=True, + max_length=20, + ), + ), + migrations.AlterField( + model_name="asset", + name="authors_username", + field=models.CharField( + blank=True, + db_index=True, + default=None, + help_text="Github usernames separated by comma", + max_length=80, + null=True, + ), + ), + migrations.AlterField( + model_name="asset", + name="cleaning_status", + field=models.CharField( + blank=True, + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), + ], + db_index=True, + default="PENDING", + help_text="Internal state automatically set by the system based on cleanup", + max_length=20, + null=True, + ), + ), + migrations.AlterField( + model_name="asset", + name="external", field=models.BooleanField( db_index=True, default=False, - help_text= - 'External assets will open in a new window, they are not built using breathecode or learnpack tecnology' + help_text="External assets will open in a new window, they are not built using breathecode or learnpack tecnology", ), ), migrations.AlterField( - model_name='asset', - name='github_commit_hash', + model_name="asset", + name="github_commit_hash", field=models.CharField(blank=True, db_index=True, default=None, max_length=100, null=True), ), migrations.AlterField( - model_name='asset', - name='graded', + model_name="asset", + name="graded", field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( - model_name='asset', - name='interactive', + model_name="asset", + name="interactive", field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( - model_name='asset', - name='is_seo_tracked', + model_name="asset", + name="is_seo_tracked", field=models.BooleanField(db_index=True, default=True), ), migrations.AlterField( - model_name='asset', - name='lang', - field=models.CharField(blank=True, - db_index=True, - default=None, - help_text='E.g: en, es, it', - max_length=2, - null=True), + model_name="asset", + name="lang", + field=models.CharField( + blank=True, db_index=True, default=None, help_text="E.g: en, es, it", max_length=2, null=True + ), ), migrations.AlterField( - model_name='asset', - name='last_cleaning_at', + model_name="asset", + name="last_cleaning_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='asset', - name='last_seo_scan_at', + model_name="asset", + name="last_seo_scan_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='asset', - name='last_synch_at', + model_name="asset", + name="last_synch_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='asset', - name='last_test_at', + model_name="asset", + name="last_test_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='asset', - name='published_at', + model_name="asset", + name="published_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='asset', - name='readme_updated_at', + model_name="asset", + name="readme_updated_at", field=models.DateTimeField(blank=True, db_index=True, default=None, null=True), ), migrations.AlterField( - model_name='asset', - name='status', - field=models.CharField(choices=[('NOT_STARTED', 'Not Started'), ('PLANNING', 'Planning'), - ('WRITING', 'Writing'), ('DRAFT', 'Draft'), ('OPTIMIZED', 'Optimized'), - ('PUBLISHED', 'Published')], - db_index=True, - default='NOT_STARTED', - help_text='Related to the publishing of the asset', - max_length=20), - ), - migrations.AlterField( - model_name='asset', - name='sync_status', - field=models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning'), ('NEEDS_RESYNC', 'Needs Resync')], - db_index=True, - default=None, - help_text='Internal state automatically set by the system based on sync', - max_length=20, - null=True), - ), - migrations.AlterField( - model_name='asset', - name='test_status', - field=models.CharField(blank=True, - choices=[('PENDING', 'Pending'), ('ERROR', 'Error'), ('OK', 'Ok'), - ('WARNING', 'Warning'), ('NEEDS_RESYNC', 'Needs Resync')], - db_index=True, - default=None, - help_text='Internal state automatically set by the system based on test', - max_length=20, - null=True), - ), - migrations.AlterField( - model_name='asset', - name='title', + model_name="asset", + name="status", + field=models.CharField( + choices=[ + ("NOT_STARTED", "Not Started"), + ("PLANNING", "Planning"), + ("WRITING", "Writing"), + ("DRAFT", "Draft"), + ("OPTIMIZED", "Optimized"), + ("PUBLISHED", "Published"), + ], + db_index=True, + default="NOT_STARTED", + help_text="Related to the publishing of the asset", + max_length=20, + ), + ), + migrations.AlterField( + model_name="asset", + name="sync_status", + field=models.CharField( + blank=True, + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), + ], + db_index=True, + default=None, + help_text="Internal state automatically set by the system based on sync", + max_length=20, + null=True, + ), + ), + migrations.AlterField( + model_name="asset", + name="test_status", + field=models.CharField( + blank=True, + choices=[ + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), + ], + db_index=True, + default=None, + help_text="Internal state automatically set by the system based on test", + max_length=20, + null=True, + ), + ), + migrations.AlterField( + model_name="asset", + name="title", field=models.CharField(blank=True, db_index=True, max_length=200), ), migrations.AlterField( - model_name='asset', - name='visibility', - field=models.CharField(choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], - db_index=True, - default='PUBLIC', - help_text="It won't be shown on the website unleast the status is published", - max_length=20), + model_name="asset", + name="visibility", + field=models.CharField( + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], + db_index=True, + default="PUBLIC", + help_text="It won't be shown on the website unleast the status is published", + max_length=20, + ), ), migrations.AlterField( - model_name='asset', - name='with_solutions', + model_name="asset", + name="with_solutions", field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( - model_name='asset', - name='with_video', + model_name="asset", + name="with_video", field=models.BooleanField(db_index=True, default=False), ), ] diff --git a/breathecode/registry/migrations/0036_auto_20231110_1530.py b/breathecode/registry/migrations/0036_auto_20231110_1530.py index fbf33da13..ad668612d 100644 --- a/breathecode/registry/migrations/0036_auto_20231110_1530.py +++ b/breathecode/registry/migrations/0036_auto_20231110_1530.py @@ -6,30 +6,34 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0035_auto_20231017_0605'), + ("registry", "0035_auto_20231017_0605"), ] operations = [ migrations.AlterField( - model_name='assettechnology', - name='is_deprecated', - field=models.BooleanField(default=True, - help_text='If False, the frontend will generate a landing for this technology.'), + model_name="assettechnology", + name="is_deprecated", + field=models.BooleanField( + default=True, help_text="If False, the frontend will generate a landing for this technology." + ), ), migrations.AlterField( - model_name='assettechnology', - name='sort_priority', + model_name="assettechnology", + name="sort_priority", field=models.IntegerField( choices=[(1, 1), (2, 2), (3, 3)], default=3, - help_text='Priority to sort technology (1, 2, or 3): One is more important and goes first than three.'), + help_text="Priority to sort technology (1, 2, or 3): One is more important and goes first than three.", + ), ), migrations.AlterField( - model_name='assettechnology', - name='visibility', - field=models.CharField(choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], - default='PUBLIC', - help_text='Only public techs will be returned by default', - max_length=20), + model_name="assettechnology", + name="visibility", + field=models.CharField( + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], + default="PUBLIC", + help_text="Only public techs will be returned by default", + max_length=20, + ), ), ] diff --git a/breathecode/registry/migrations/0037_auto_20231110_1847.py b/breathecode/registry/migrations/0037_auto_20231110_1847.py index 818be67de..046d0e74c 100644 --- a/breathecode/registry/migrations/0037_auto_20231110_1847.py +++ b/breathecode/registry/migrations/0037_auto_20231110_1847.py @@ -6,24 +6,25 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0036_auto_20231110_1530'), + ("registry", "0036_auto_20231110_1530"), ] operations = [ migrations.AlterField( - model_name='assettechnology', - name='is_deprecated', - field=models.BooleanField(default=False, - help_text='If True, the technology will be programmatically deleted.'), + model_name="assettechnology", + name="is_deprecated", + field=models.BooleanField( + default=False, help_text="If True, the technology will be programmatically deleted." + ), ), migrations.AlterField( - model_name='assettechnology', - name='visibility', + model_name="assettechnology", + name="visibility", field=models.CharField( - choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], - default='UNLISTED', - help_text= - "If public, the front-end will generate a landing page. If unlisted, it won't have a landing page but will be shown in assets. If private, it won't be shown anywhere of the front-end.", - max_length=20), + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], + default="UNLISTED", + help_text="If public, the front-end will generate a landing page. If unlisted, it won't have a landing page but will be shown in assets. If private, it won't be shown anywhere of the front-end.", + max_length=20, + ), ), ] diff --git a/breathecode/registry/migrations/0038_auto_20231123_1611.py b/breathecode/registry/migrations/0038_auto_20231123_1611.py index a4ea9488d..4983e3532 100644 --- a/breathecode/registry/migrations/0038_auto_20231123_1611.py +++ b/breathecode/registry/migrations/0038_auto_20231123_1611.py @@ -6,30 +6,37 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0037_auto_20231110_1847'), + ("registry", "0037_auto_20231110_1847"), ] operations = [ migrations.AlterField( - model_name='asset', - name='status', - field=models.CharField(choices=[('NOT_STARTED', 'Not Started'), ('PLANNING', 'Planning'), - ('WRITING', 'Writing'), ('DRAFT', 'Draft'), ('OPTIMIZED', 'Optimized'), - ('PUBLISHED', 'Published')], - db_index=True, - default='NOT_STARTED', - help_text="It won't be shown on the website until the status is published", - max_length=20), + model_name="asset", + name="status", + field=models.CharField( + choices=[ + ("NOT_STARTED", "Not Started"), + ("PLANNING", "Planning"), + ("WRITING", "Writing"), + ("DRAFT", "Draft"), + ("OPTIMIZED", "Optimized"), + ("PUBLISHED", "Published"), + ], + db_index=True, + default="NOT_STARTED", + help_text="It won't be shown on the website until the status is published", + max_length=20, + ), ), migrations.AlterField( - model_name='asset', - name='visibility', + model_name="asset", + name="visibility", field=models.CharField( - choices=[('PUBLIC', 'Public'), ('UNLISTED', 'Unlisted'), ('PRIVATE', 'Private')], + choices=[("PUBLIC", "Public"), ("UNLISTED", "Unlisted"), ("PRIVATE", "Private")], db_index=True, - default='PUBLIC', - help_text= - "This is an internal property. It won't be shown internally to other academies unless is public", - max_length=20), + default="PUBLIC", + help_text="This is an internal property. It won't be shown internally to other academies unless is public", + max_length=20, + ), ), ] diff --git a/breathecode/registry/migrations/0039_asset_assets_related.py b/breathecode/registry/migrations/0039_asset_assets_related.py index dd65d3b2d..ab71a8a07 100644 --- a/breathecode/registry/migrations/0039_asset_assets_related.py +++ b/breathecode/registry/migrations/0039_asset_assets_related.py @@ -6,16 +6,17 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0038_auto_20231123_1611'), + ("registry", "0038_auto_20231123_1611"), ] operations = [ migrations.AddField( - model_name='asset', - name='assets_related', + model_name="asset", + name="assets_related", field=models.ManyToManyField( blank=True, - help_text='Related assets used to get prepared before going through this asset.', - to='registry.Asset'), + help_text="Related assets used to get prepared before going through this asset.", + to="registry.Asset", + ), ), ] diff --git a/breathecode/registry/migrations/0040_alter_asset_all_translations_and_more.py b/breathecode/registry/migrations/0040_alter_asset_all_translations_and_more.py index 46ed58ed8..c6720bb79 100644 --- a/breathecode/registry/migrations/0040_alter_asset_all_translations_and_more.py +++ b/breathecode/registry/migrations/0040_alter_asset_all_translations_and_more.py @@ -6,18 +6,18 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0039_asset_assets_related'), + ("registry", "0039_asset_assets_related"), ] operations = [ migrations.AlterField( - model_name='asset', - name='all_translations', - field=models.ManyToManyField(blank=True, to='registry.asset'), + model_name="asset", + name="all_translations", + field=models.ManyToManyField(blank=True, to="registry.asset"), ), migrations.AlterField( - model_name='assetcategory', - name='all_translations', - field=models.ManyToManyField(blank=True, to='registry.assetcategory'), + model_name="assetcategory", + name="all_translations", + field=models.ManyToManyField(blank=True, to="registry.assetcategory"), ), ] diff --git a/breathecode/registry/migrations/0041_asset_is_auto_subscribed.py b/breathecode/registry/migrations/0041_asset_is_auto_subscribed.py index c97138a9f..816324308 100644 --- a/breathecode/registry/migrations/0041_asset_is_auto_subscribed.py +++ b/breathecode/registry/migrations/0041_asset_is_auto_subscribed.py @@ -6,17 +6,16 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0040_alter_asset_all_translations_and_more'), + ("registry", "0040_alter_asset_all_translations_and_more"), ] operations = [ migrations.AddField( - model_name='asset', - name='is_auto_subscribed', + model_name="asset", + name="is_auto_subscribed", field=models.BooleanField( default=True, - help_text= - 'If auto subscribed, the system will attempt to listen to push event and update the asset meta based on github' + help_text="If auto subscribed, the system will attempt to listen to push event and update the asset meta based on github", ), ), ] diff --git a/breathecode/registry/migrations/0042_asset_enable_table_of_content_alter_asset_gitpod_and_more.py b/breathecode/registry/migrations/0042_asset_enable_table_of_content_alter_asset_gitpod_and_more.py index 12d4fccc2..55849a0f3 100644 --- a/breathecode/registry/migrations/0042_asset_enable_table_of_content_alter_asset_gitpod_and_more.py +++ b/breathecode/registry/migrations/0042_asset_enable_table_of_content_alter_asset_gitpod_and_more.py @@ -6,26 +6,28 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0041_asset_is_auto_subscribed'), + ("registry", "0041_asset_is_auto_subscribed"), ] operations = [ migrations.AddField( - model_name='asset', - name='enable_table_of_content', - field=models.BooleanField(default=True, - help_text='If true, it shows a tabled on contents on top of the lesson'), + model_name="asset", + name="enable_table_of_content", + field=models.BooleanField( + default=True, help_text="If true, it shows a tabled on contents on top of the lesson" + ), ), migrations.AlterField( - model_name='asset', - name='gitpod', + model_name="asset", + name="gitpod", field=models.BooleanField( default=False, - help_text='If true, it means it can be opened on cloud provisioning vendors like Gitpod or Codespaces'), + help_text="If true, it means it can be opened on cloud provisioning vendors like Gitpod or Codespaces", + ), ), migrations.AlterField( - model_name='asset', - name='interactive', - field=models.BooleanField(db_index=True, default=False, help_text='If true, it means is learnpack enabled'), + model_name="asset", + name="interactive", + field=models.BooleanField(db_index=True, default=False, help_text="If true, it means is learnpack enabled"), ), ] diff --git a/breathecode/registry/migrations/0043_asset_superseded_by.py b/breathecode/registry/migrations/0043_asset_superseded_by.py index 6cda2d727..b691086d6 100644 --- a/breathecode/registry/migrations/0043_asset_superseded_by.py +++ b/breathecode/registry/migrations/0043_asset_superseded_by.py @@ -7,21 +7,21 @@ class Migration(migrations.Migration): dependencies = [ - ('registry', '0042_asset_enable_table_of_content_alter_asset_gitpod_and_more'), + ("registry", "0042_asset_enable_table_of_content_alter_asset_gitpod_and_more"), ] operations = [ migrations.AddField( - model_name='asset', - name='superseded_by', + model_name="asset", + name="superseded_by", field=models.OneToOneField( blank=True, default=None, - help_text= - 'The newer version of the article (null if it is the latest version). This is used for technology deprecation, for example, a new article to explain the new version of react router', + help_text="The newer version of the article (null if it is the latest version). This is used for technology deprecation, for example, a new article to explain the new version of react router", null=True, on_delete=django.db.models.deletion.SET_NULL, - related_name='previous_version', - to='registry.asset'), + related_name="previous_version", + to="registry.asset", + ), ), ] diff --git a/breathecode/registry/models.py b/breathecode/registry/models.py index 236626979..bccf2ce08 100644 --- a/breathecode/registry/models.py +++ b/breathecode/registry/models.py @@ -19,16 +19,16 @@ from .signals import asset_readme_modified, asset_slug_modified, asset_status_updated, asset_title_modified -__all__ = ['AssetTechnology', 'Asset', 'AssetAlias'] +__all__ = ["AssetTechnology", "Asset", "AssetAlias"] logger = logging.getLogger(__name__) -PUBLIC = 'PUBLIC' -UNLISTED = 'UNLISTED' -PRIVATE = 'PRIVATE' +PUBLIC = "PUBLIC" +UNLISTED = "UNLISTED" +PRIVATE = "PRIVATE" VISIBILITY = ( - (PUBLIC, 'Public'), - (UNLISTED, 'Unlisted'), - (PRIVATE, 'Private'), + (PUBLIC, "Public"), + (UNLISTED, "Unlisted"), + (PRIVATE, "Private"), ) SORT_PRIORITY = ( (1, 1), @@ -44,33 +44,32 @@ class Meta: class AssetTechnology(models.Model): - slug = models.SlugField(max_length=200, unique=True, help_text='Technologies are unified within all 4geeks.com') + slug = models.SlugField(max_length=200, unique=True, help_text="Technologies are unified within all 4geeks.com") title = models.CharField(max_length=200, blank=True) - lang = models.CharField(max_length=2, - blank=True, - default=None, - null=True, - help_text='Leave blank if will be shown in all languages') - parent = models.ForeignKey('self', on_delete=models.SET_NULL, default=None, blank=True, null=True) - is_deprecated = models.BooleanField(default=False, - help_text='If True, the technology will be programmatically deleted.') - featured_asset = models.ForeignKey('Asset', on_delete=models.SET_NULL, default=None, blank=True, null=True) + lang = models.CharField( + max_length=2, blank=True, default=None, null=True, help_text="Leave blank if will be shown in all languages" + ) + parent = models.ForeignKey("self", on_delete=models.SET_NULL, default=None, blank=True, null=True) + is_deprecated = models.BooleanField( + default=False, help_text="If True, the technology will be programmatically deleted." + ) + featured_asset = models.ForeignKey("Asset", on_delete=models.SET_NULL, default=None, blank=True, null=True) visibility = models.CharField( max_length=20, choices=VISIBILITY, default=UNLISTED, - help_text= - 'If public, the front-end will generate a landing page. If unlisted, it won\'t have a landing page but will be shown in assets. If private, it won\'t be shown anywhere of the front-end.' + help_text="If public, the front-end will generate a landing page. If unlisted, it won't have a landing page but will be shown in assets. If private, it won't be shown anywhere of the front-end.", ) description = models.TextField(null=True, blank=True, default=None) - icon_url = models.URLField(null=True, blank=True, default=None, help_text='Image icon to show on website') + icon_url = models.URLField(null=True, blank=True, default=None, help_text="Image icon to show on website") sort_priority = models.IntegerField( null=False, choices=SORT_PRIORITY, blank=False, default=3, - help_text='Priority to sort technology (1, 2, or 3): One is more important and goes first than three.') + help_text="Priority to sort technology (1, 2, or 3): One is more important and goes first than three.", + ) def __str__(self): return self.title @@ -94,7 +93,7 @@ def clean(self): def validate(self): if self.is_deprecated and self.parent is None: - raise Exception('You cannot mark a technology as deprecated if it doesn\'t have a parent technology') + raise Exception("You cannot mark a technology as deprecated if it doesn't have a parent technology") class AssetCategory(models.Model): @@ -105,18 +104,17 @@ def __init__(self, *args, **kwargs): slug = models.SlugField(max_length=200) title = models.CharField(max_length=200) - lang = models.CharField(max_length=2, help_text='E.g: en, es, it') + lang = models.CharField(max_length=2, help_text="E.g: en, es, it") description = models.TextField(null=True, blank=True, default=None) academy = models.ForeignKey(Academy, on_delete=models.CASCADE) - all_translations = models.ManyToManyField('self', blank=True) + all_translations = models.ManyToManyField("self", blank=True) # Ideal for generating blog post thumbnails auto_generate_previews = models.BooleanField(default=False) - preview_generation_url = models.URLField(null=True, - blank=True, - default=None, - help_text='Will be POSTed to get preview image') + preview_generation_url = models.URLField( + null=True, blank=True, default=None, help_text="Will be POSTed to get preview image" + ) visibility = models.CharField(max_length=20, choices=VISIBILITY, default=PUBLIC) @@ -132,7 +130,7 @@ def save(self, *args, **kwargs): # Prevent multiple keywords with same slug cat = AssetCategory.objects.filter(slug=self.slug, academy=self.academy).exclude(id=self.id).first() if cat is not None: - raise Exception(f'Category with slug {self.slug} already exists on this academy') + raise Exception(f"Category with slug {self.slug} already exists on this academy") super().save(*args, **kwargs) @@ -145,31 +143,27 @@ def __init__(self, *args, **kwargs): slug = models.SlugField(max_length=200) title = models.CharField(max_length=200) - lang = models.CharField(max_length=2, help_text='E.g: en, es, it') + lang = models.CharField(max_length=2, help_text="E.g: en, es, it") academy = models.ForeignKey(Academy, on_delete=models.CASCADE) visibility = models.CharField(max_length=20, choices=VISIBILITY, default=PUBLIC) - landing_page_url = models.URLField(blank=True, - null=True, - default=None, - help_text='All keyword articles must point to this page') + landing_page_url = models.URLField( + blank=True, null=True, default=None, help_text="All keyword articles must point to this page" + ) is_deprecated = models.BooleanField( default=False, - help_text= - 'Used when you want to stop using this cluster, all previous articles will be kept but no new articles will be assigned' + help_text="Used when you want to stop using this cluster, all previous articles will be kept but no new articles will be assigned", ) is_important = models.BooleanField(default=True) is_urgent = models.BooleanField(default=True) - internal_description = models.TextField(default=None, - null=True, - blank=True, - help_text='How will be this cluster be used in the SEO strategy') + internal_description = models.TextField( + default=None, null=True, blank=True, help_text="How will be this cluster be used in the SEO strategy" + ) - optimization_rating = models.FloatField(null=True, - blank=True, - default=None, - help_text='Automatically filled (1 to 100)') + optimization_rating = models.FloatField( + null=True, blank=True, default=None, help_text="Automatically filled (1 to 100)" + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) @@ -183,7 +177,7 @@ def save(self, *args, **kwargs): # Prevent multiple keywords with same slug cluster = KeywordCluster.objects.filter(slug=self.slug, academy=self.academy).first() if cluster is not None: - raise Exception(f'Cluster with slug {self.slug} already exists on this academy') + raise Exception(f"Cluster with slug {self.slug} already exists on this academy") super().save(*args, **kwargs) @@ -196,15 +190,14 @@ def __init__(self, *args, **kwargs): slug = models.SlugField(max_length=200) title = models.CharField(max_length=200) - lang = models.CharField(max_length=2, help_text='E.g: en, es, it') + lang = models.CharField(max_length=2, help_text="E.g: en, es, it") cluster = models.ForeignKey(KeywordCluster, on_delete=models.SET_NULL, default=None, blank=True, null=True) - expected_monthly_traffic = models.FloatField(null=True, - blank=True, - default=None, - help_text='You can get this info from Ahrefs or GKP') - difficulty = models.FloatField(null=True, blank=True, default=None, help_text='From 1 to 100') + expected_monthly_traffic = models.FloatField( + null=True, blank=True, default=None, help_text="You can get this info from Ahrefs or GKP" + ) + difficulty = models.FloatField(null=True, blank=True, default=None, help_text="From 1 to 100") is_important = models.BooleanField(default=True) is_urgent = models.BooleanField(default=True) @@ -222,58 +215,58 @@ def save(self, *args, **kwargs): # Prevent multiple keywords with same slug and make category mandatory keyword = AssetKeyword.objects.filter(slug=self.slug, academy=self.academy).first() if keyword is not None: - raise Exception(f'Keyword with slug {self.slug} already exists on this academy') + raise Exception(f"Keyword with slug {self.slug} already exists on this academy") super().save(*args, **kwargs) -PROJECT = 'PROJECT' -EXERCISE = 'EXERCISE' -LESSON = 'LESSON' -QUIZ = 'QUIZ' -VIDEO = 'VIDEO' -ARTICLE = 'ARTICLE' +PROJECT = "PROJECT" +EXERCISE = "EXERCISE" +LESSON = "LESSON" +QUIZ = "QUIZ" +VIDEO = "VIDEO" +ARTICLE = "ARTICLE" TYPE = ( - (PROJECT, 'Project'), - (EXERCISE, 'Exercise'), - (QUIZ, 'Quiz'), - (LESSON, 'Lesson'), - (VIDEO, 'Video'), - (ARTICLE, 'Article'), + (PROJECT, "Project"), + (EXERCISE, "Exercise"), + (QUIZ, "Quiz"), + (LESSON, "Lesson"), + (VIDEO, "Video"), + (ARTICLE, "Article"), ) -BEGINNER = 'BEGINNER' -EASY = 'EASY' -INTERMEDIATE = 'INTERMEDIATE' -HARD = 'HARD' +BEGINNER = "BEGINNER" +EASY = "EASY" +INTERMEDIATE = "INTERMEDIATE" +HARD = "HARD" DIFFICULTY = ( - (HARD, 'Hard'), - (INTERMEDIATE, 'Intermediate'), - (EASY, 'Easy'), - (BEGINNER, 'Beginner'), + (HARD, "Hard"), + (INTERMEDIATE, "Intermediate"), + (EASY, "Easy"), + (BEGINNER, "Beginner"), ) -NOT_STARTED = 'NOT_STARTED' -PLANNING = 'PLANNING' -WRITING = 'WRITING' -DRAFT = 'DRAFT' -OPTIMIZED = 'OPTIMIZED' -PUBLISHED = 'PUBLISHED' +NOT_STARTED = "NOT_STARTED" +PLANNING = "PLANNING" +WRITING = "WRITING" +DRAFT = "DRAFT" +OPTIMIZED = "OPTIMIZED" +PUBLISHED = "PUBLISHED" ASSET_STATUS = ( - (NOT_STARTED, 'Not Started'), - (PLANNING, 'Planning'), - (WRITING, 'Writing'), - (DRAFT, 'Draft'), - (OPTIMIZED, 'Optimized'), - (PUBLISHED, 'Published'), + (NOT_STARTED, "Not Started"), + (PLANNING, "Planning"), + (WRITING, "Writing"), + (DRAFT, "Draft"), + (OPTIMIZED, "Optimized"), + (PUBLISHED, "Published"), ) ASSET_SYNC_STATUS = ( - ('PENDING', 'Pending'), - ('ERROR', 'Error'), - ('OK', 'Ok'), - ('WARNING', 'Warning'), - ('NEEDS_RESYNC', 'Needs Resync'), + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("OK", "Ok"), + ("WARNING", "Warning"), + ("NEEDS_RESYNC", "Needs Resync"), ) @@ -289,18 +282,15 @@ def __init__(self, *args, **kwargs): slug = models.SlugField( max_length=200, unique=True, - help_text= - 'Asset must be unique within the entire database because they could be published into 4geeks.com (shared among all academies)', - db_index=True) + help_text="Asset must be unique within the entire database because they could be published into 4geeks.com (shared among all academies)", + db_index=True, + ) title = models.CharField(max_length=200, blank=True, db_index=True) - lang = models.CharField(max_length=2, - blank=True, - null=True, - default=None, - help_text='E.g: en, es, it', - db_index=True) - - all_translations = models.ManyToManyField('self', blank=True) + lang = models.CharField( + max_length=2, blank=True, null=True, default=None, help_text="E.g: en, es, it", db_index=True + ) + + all_translations = models.ManyToManyField("self", blank=True) technologies = models.ManyToManyField(AssetTechnology, blank=True) category = models.ForeignKey( @@ -318,14 +308,14 @@ def __init__(self, *args, **kwargs): null=True, blank=True, default=None, - help_text='Brief for the copywriters, mainly used to describe what this lessons needs to be about') + help_text="Brief for the copywriters, mainly used to describe what this lessons needs to be about", + ) readme_url = models.URLField( null=True, blank=True, default=None, - help_text= - 'This will be used to synch only lessons from github. Projects, quizzes and exercises it will try README.md for english and README.lang.md for other langs' + help_text="This will be used to synch only lessons from github. Projects, quizzes and exercises it will try README.md for english and README.lang.md for other langs", ) intro_video_url = models.URLField(null=True, blank=True, default=None) solution_video_url = models.URLField(null=True, blank=True, default=None) @@ -341,20 +331,22 @@ def __init__(self, *args, **kwargs): external = models.BooleanField( default=False, - help_text= - 'External assets will open in a new window, they are not built using breathecode or learnpack tecnology', - db_index=True) + help_text="External assets will open in a new window, they are not built using breathecode or learnpack tecnology", + db_index=True, + ) enable_table_of_content = models.BooleanField( - default=True, help_text='If true, it shows a tabled on contents on top of the lesson') - interactive = models.BooleanField(default=False, db_index=True, help_text='If true, it means is learnpack enabled') + default=True, help_text="If true, it shows a tabled on contents on top of the lesson" + ) + interactive = models.BooleanField(default=False, db_index=True, help_text="If true, it means is learnpack enabled") with_solutions = models.BooleanField(default=False, db_index=True) with_video = models.BooleanField(default=False, db_index=True) graded = models.BooleanField(default=False, db_index=True) gitpod = models.BooleanField( default=False, - help_text='If true, it means it can be opened on cloud provisioning vendors like Gitpod or Codespaces') - duration = models.IntegerField(null=True, blank=True, default=None, help_text='In hours') + help_text="If true, it means it can be opened on cloud provisioning vendors like Gitpod or Codespaces", + ) + duration = models.IntegerField(null=True, blank=True, default=None, help_text="In hours") difficulty = models.CharField(max_length=20, choices=DIFFICULTY, default=None, null=True, blank=True) @@ -363,90 +355,102 @@ def __init__(self, *args, **kwargs): max_length=20, choices=VISIBILITY, default=PUBLIC, - help_text='This is an internal property. It won\'t be shown internally to other academies unless is public', - db_index=True) + help_text="This is an internal property. It won't be shown internally to other academies unless is public", + db_index=True, + ) asset_type = models.CharField(max_length=20, choices=TYPE, db_index=True) superseded_by = models.OneToOneField( - 'Asset', - related_name='previous_version', + "Asset", + related_name="previous_version", on_delete=models.SET_NULL, null=True, default=None, blank=True, - help_text= - 'The newer version of the article (null if it is the latest version). This is used for technology deprecation, for example, a new article to explain the new version of react router' + help_text="The newer version of the article (null if it is the latest version). This is used for technology deprecation, for example, a new article to explain the new version of react router", ) - status = models.CharField(max_length=20, - choices=ASSET_STATUS, - default=NOT_STARTED, - help_text='It won\'t be shown on the website until the status is published', - db_index=True) + status = models.CharField( + max_length=20, + choices=ASSET_STATUS, + default=NOT_STARTED, + help_text="It won't be shown on the website until the status is published", + db_index=True, + ) is_auto_subscribed = models.BooleanField( default=True, - help_text= - 'If auto subscribed, the system will attempt to listen to push event and update the asset meta based on github') - sync_status = models.CharField(max_length=20, - choices=ASSET_SYNC_STATUS, - default=None, - null=True, - blank=True, - help_text='Internal state automatically set by the system based on sync', - db_index=True) + help_text="If auto subscribed, the system will attempt to listen to push event and update the asset meta based on github", + ) + sync_status = models.CharField( + max_length=20, + choices=ASSET_SYNC_STATUS, + default=None, + null=True, + blank=True, + help_text="Internal state automatically set by the system based on sync", + db_index=True, + ) last_synch_at = models.DateTimeField(null=True, blank=True, default=None, db_index=True) github_commit_hash = models.CharField(max_length=100, null=True, blank=True, default=None, db_index=True) - test_status = models.CharField(max_length=20, - choices=ASSET_SYNC_STATUS, - default=None, - null=True, - blank=True, - help_text='Internal state automatically set by the system based on test', - db_index=True) + test_status = models.CharField( + max_length=20, + choices=ASSET_SYNC_STATUS, + default=None, + null=True, + blank=True, + help_text="Internal state automatically set by the system based on test", + db_index=True, + ) published_at = models.DateTimeField(null=True, blank=True, default=None, db_index=True) last_test_at = models.DateTimeField(null=True, blank=True, default=None, db_index=True) - status_text = models.TextField(null=True, - default=None, - blank=True, - help_text='Used by the sych status to provide feedback') - - authors_username = models.CharField(max_length=80, - null=True, - default=None, - blank=True, - help_text='Github usernames separated by comma', - db_index=True) - assessment = models.ForeignKey(Assessment, - on_delete=models.SET_NULL, - default=None, - blank=True, - null=True, - help_text='Connection with the assessment breathecode app') - author = models.ForeignKey(User, - on_delete=models.SET_NULL, - default=None, - blank=True, - null=True, - help_text='Who wrote the lesson, not necessarily the owner') - owner = models.ForeignKey(User, - on_delete=models.SET_NULL, - related_name='owned_lessons', - default=None, - blank=True, - null=True, - help_text='The owner has the github premissions to update the lesson') + status_text = models.TextField( + null=True, default=None, blank=True, help_text="Used by the sych status to provide feedback" + ) + + authors_username = models.CharField( + max_length=80, + null=True, + default=None, + blank=True, + help_text="Github usernames separated by comma", + db_index=True, + ) + assessment = models.ForeignKey( + Assessment, + on_delete=models.SET_NULL, + default=None, + blank=True, + null=True, + help_text="Connection with the assessment breathecode app", + ) + author = models.ForeignKey( + User, + on_delete=models.SET_NULL, + default=None, + blank=True, + null=True, + help_text="Who wrote the lesson, not necessarily the owner", + ) + owner = models.ForeignKey( + User, + on_delete=models.SET_NULL, + related_name="owned_lessons", + default=None, + blank=True, + null=True, + help_text="The owner has the github premissions to update the lesson", + ) is_seo_tracked = models.BooleanField(default=True, db_index=True) - seo_keywords = models.ManyToManyField(AssetKeyword, - blank=True, - help_text='Optimize for a max of two keywords per asset') - - optimization_rating = models.FloatField(null=True, - blank=True, - default=None, - help_text='Automatically filled (1 to 100)') + seo_keywords = models.ManyToManyField( + AssetKeyword, blank=True, help_text="Optimize for a max of two keywords per asset" + ) + + optimization_rating = models.FloatField( + null=True, blank=True, default=None, help_text="Automatically filled (1 to 100)" + ) last_seo_scan_at = models.DateTimeField(null=True, blank=True, default=None, db_index=True) seo_json_status = models.JSONField(null=True, blank=True, default=None) @@ -454,39 +458,44 @@ def __init__(self, *args, **kwargs): last_cleaning_at = models.DateTimeField(null=True, blank=True, default=None, db_index=True) cleaning_status_details = models.TextField(null=True, blank=True, default=None) - cleaning_status = models.CharField(max_length=20, - choices=ASSET_SYNC_STATUS, - default='PENDING', - null=True, - blank=True, - help_text='Internal state automatically set by the system based on cleanup', - db_index=True) - - delivery_instructions = models.TextField(null=True, - default=None, - blank=True, - help_text='Tell students how to deliver this project') + cleaning_status = models.CharField( + max_length=20, + choices=ASSET_SYNC_STATUS, + default="PENDING", + null=True, + blank=True, + help_text="Internal state automatically set by the system based on cleanup", + db_index=True, + ) + + delivery_instructions = models.TextField( + null=True, default=None, blank=True, help_text="Tell students how to deliver this project" + ) delivery_formats = models.CharField( max_length=255, - default='url', - help_text='Comma separated list of supported formats. Eg: url, image/png, application/pdf') - delivery_regex_url = models.CharField(max_length=255, - default=None, - blank=True, - null=True, - help_text='Will only be used if "url" is the delivery format') + default="url", + help_text="Comma separated list of supported formats. Eg: url, image/png, application/pdf", + ) + delivery_regex_url = models.CharField( + max_length=255, + default=None, + blank=True, + null=True, + help_text='Will only be used if "url" is the delivery format', + ) assets_related = models.ManyToManyField( - 'self', + "self", blank=True, symmetrical=False, - help_text='Related assets used to get prepared before going through this asset.') + help_text="Related assets used to get prepared before going through this asset.", + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.slug}' + return f"{self.slug}" def save(self, *args, **kwargs): @@ -498,7 +507,7 @@ def save(self, *args, **kwargs): if self.__old_readme_raw != self.readme_raw: readme_modified = True self.readme_updated_at = timezone.now() - self.cleaning_status = 'PENDING' + self.cleaning_status = "PENDING" if self.__old_title != self.title: title_modified = True @@ -512,7 +521,7 @@ def save(self, *args, **kwargs): alias = AssetAlias.objects.filter(slug=self.slug).first() if alias is not None: raise Exception( - f'New slug {self.slug} for {self.__old_slug} is already taken by alias for asset {alias.asset.slug}' + f"New slug {self.slug} for {self.__old_slug} is already taken by alias for asset {alias.asset.slug}" ) self.full_clean() @@ -521,10 +530,14 @@ def save(self, *args, **kwargs): self.__old_readme_raw = self.readme_raw self.__old_status = self.status - if slug_modified: asset_slug_modified.send_robust(instance=self, sender=Asset) - if readme_modified: asset_readme_modified.send_robust(instance=self, sender=Asset) - if title_modified: asset_title_modified.send_robust(instance=self, sender=Asset) - if status_modified: asset_status_updated.send_robust(instance=self, sender=Asset) + if slug_modified: + asset_slug_modified.send_robust(instance=self, sender=Asset) + if readme_modified: + asset_readme_modified.send_robust(instance=self, sender=Asset) + if title_modified: + asset_title_modified.send_robust(instance=self, sender=Asset) + if status_modified: + asset_status_updated.send_robust(instance=self, sender=Asset) def get_preview_generation_url(self): @@ -536,14 +549,14 @@ def get_preview_generation_url(self): def get_repo_meta(self): # def get_url_info(url: str): url = self.readme_url - result = re.search(r'blob\/([\w\-]+)', url) + result = re.search(r"blob\/([\w\-]+)", url) branch_name = None if result is not None: branch_name = result.group(1) - result = re.search(r'https?:\/\/github\.com\/([\w\-]+)\/([\w\-]+)\/?', url) + result = re.search(r"https?:\/\/github\.com\/([\w\-]+)\/([\w\-]+)\/?", url) if result is None: - raise Exception('Invalid URL when looking organization: ' + url) + raise Exception("Invalid URL when looking organization: " + url) org_name = result.group(1) repo_name = result.group(2) @@ -555,91 +568,99 @@ def get_readme(self, parse=None, remove_frontmatter=False): if self.readme is None: self.readme = self.readme_raw - if self.readme is None or self.readme == '': - if self.asset_type != 'QUIZ': - AssetErrorLog(slug=AssetErrorLog.EMPTY_README, - path=self.slug, - asset_type=self.asset_type, - asset=self, - status_text='Readme file was not found').save() + if self.readme is None or self.readme == "": + if self.asset_type != "QUIZ": + AssetErrorLog( + slug=AssetErrorLog.EMPTY_README, + path=self.slug, + asset_type=self.asset_type, + asset=self, + status_text="Readme file was not found", + ).save() self.set_readme( - get_template('empty.md').render({ - 'title': self.title, - 'lang': self.lang, - 'asset_type': self.asset_type, - })) + get_template("empty.md").render( + { + "title": self.title, + "lang": self.lang, + "asset_type": self.asset_type, + } + ) + ) - if self.readme_url is None and self.asset_type == 'LESSON': + if self.readme_url is None and self.asset_type == "LESSON": self.readme_url = self.url self.save() readme = { - 'clean': self.readme, - 'decoded': Asset.decode(self.readme), - 'raw': self.readme_raw, - 'decoded_raw': Asset.decode(self.readme_raw) + "clean": self.readme, + "decoded": Asset.decode(self.readme), + "raw": self.readme_raw, + "decoded_raw": Asset.decode(self.readme_raw), } if parse: # external assets will have a default markdown readme generated internally - extension = '.md' - if self.readme_url and self.readme_url != '': + extension = ".md" + if self.readme_url and self.readme_url != "": u = urlparse(self.readme_url) - extension = pathlib.Path(u[2]).suffix if not self.external else '.md' + extension = pathlib.Path(u[2]).suffix if not self.external else ".md" - if extension in ['.md', '.mdx', '.txt']: - readme = self.parse(readme, format='markdown', remove_frontmatter=remove_frontmatter) - elif extension in ['.ipynb']: - readme = self.parse(readme, format='notebook') + if extension in [".md", ".mdx", ".txt"]: + readme = self.parse(readme, format="markdown", remove_frontmatter=remove_frontmatter) + elif extension in [".ipynb"]: + readme = self.parse(readme, format="notebook") else: - AssetErrorLog(slug=AssetErrorLog.INVALID_README_URL, - path=self.slug, - asset_type=self.asset_type, - asset=self, - status_text='Invalid Readme URL').save() + AssetErrorLog( + slug=AssetErrorLog.INVALID_README_URL, + path=self.slug, + asset_type=self.asset_type, + asset=self, + status_text="Invalid Readme URL", + ).save() return readme - def parse(self, readme, format='markdown', remove_frontmatter=False): - if format == 'markdown': - _data = frontmatter.loads(readme['decoded']) - readme['frontmatter'] = _data.metadata - readme['frontmatter']['format'] = format - readme['decoded'] = _data.content - readme['html'] = markdown.markdown(_data.content, extensions=['markdown.extensions.fenced_code']) - if format == 'notebook': + def parse(self, readme, format="markdown", remove_frontmatter=False): + if format == "markdown": + _data = frontmatter.loads(readme["decoded"]) + readme["frontmatter"] = _data.metadata + readme["frontmatter"]["format"] = format + readme["decoded"] = _data.content + readme["html"] = markdown.markdown(_data.content, extensions=["markdown.extensions.fenced_code"]) + if format == "notebook": import nbformat from nbconvert import HTMLExporter - notebook = nbformat.reads(readme['decoded'], as_version=4) + + notebook = nbformat.reads(readme["decoded"], as_version=4) # Instantiate the exporter. We use the `classic` template for now; we'll get into more details # later about how to customize the exporter further. You can use 'basic' - html_exporter = HTMLExporter(template_name='basic') + html_exporter = HTMLExporter(template_name="basic") # Process the notebook we loaded earlier body, resources = html_exporter.from_notebook_node(notebook) - readme['frontmatter'] = resources - readme['frontmatter']['format'] = format - readme['html'] = body + readme["frontmatter"] = resources + readme["frontmatter"]["format"] = format + readme["html"] = body return readme def get_thumbnail_name(self): - slug1 = self.category.slug if self.category is not None else 'default' + slug1 = self.category.slug if self.category is not None else "default" slug2 = self.slug if self.academy is None: - raise Exception('Asset needs to belong to an academy to generate its thumbnail') + raise Exception("Asset needs to belong to an academy to generate its thumbnail") - return f'{self.academy.slug}-{slug1}-{slug2}.png' + return f"{self.academy.slug}-{slug1}-{slug2}.png" @staticmethod def encode(content): if content is not None: - return str(base64.b64encode(content.encode('utf-8')).decode('utf-8')) + return str(base64.b64encode(content.encode("utf-8")).decode("utf-8")) return None @staticmethod def decode(content): if content is not None: - return base64.b64decode(content.encode('utf-8')).decode('utf-8') + return base64.b64decode(content.encode("utf-8")).decode("utf-8") return None def set_readme(self, content): @@ -647,11 +668,9 @@ def set_readme(self, content): return self def log_error(self, error_slug, status_text=None): - error = AssetErrorLog(slug=error_slug, - asset=self, - asset_type=self.asset_type, - status_text=status_text, - path=self.slug) + error = AssetErrorLog( + slug=error_slug, asset=self, asset_type=self.asset_type, status_text=status_text, path=self.slug + ) error.save() return error @@ -661,9 +680,9 @@ def generate_quiz_json(self): return None config = self.assessment.to_json() - config['info']['description'] = self.description - config['lang'] = self.lang - config['technologies'] = [t.slug for t in self.technologies.all()] + config["info"]["description"] = self.description + config["lang"] = self.lang + config["technologies"] = [t.slug for t in self.technologies.all()] return config @@ -672,14 +691,14 @@ def get_tasks(self): if self.readme is None: return [] - regex = r'\-\s\[(?P<status>[\sxX-])\]\s(?P<label>.+)' - findings = list(re.finditer(regex, self.get_readme()['decoded'])) + regex = r"\-\s\[(?P<status>[\sxX-])\]\s(?P<label>.+)" + findings = list(re.finditer(regex, self.get_readme()["decoded"])) tasks = [] while len(findings) > 0: task_find = findings.pop(0) task = task_find.groupdict() - task['id'] = hashlib.md5(task['label'].encode('utf-8')).hexdigest() - task['status'] = 'DONE' if 'status' in task and task['status'].strip().lower() == 'x' else 'PENDING' + task["id"] = hashlib.md5(task["label"].encode("utf-8")).hexdigest() + task["status"] = "DONE" if "status" in task and task["status"].strip().lower() == "x" else "PENDING" tasks.append(task) return tasks @@ -700,11 +719,9 @@ def get_by_slug(asset_slug, request=None, asset_type=None): AssetErrorLog(slug=AssetErrorLog.SLUG_NOT_FOUND, path=asset_slug, asset_type=asset_type, user=user).save() return None elif asset_type is not None and alias.asset.asset_type.lower() == asset_type.lower(): - AssetErrorLog(slug=AssetErrorLog.DIFFERENT_TYPE, - path=asset_slug, - asset=alias.asset, - asset_type=asset_type, - user=user).save() + AssetErrorLog( + slug=AssetErrorLog.DIFFERENT_TYPE, path=asset_slug, asset=alias.asset, asset_type=asset_type, user=user + ).save() elif is_alias: return alias.asset @@ -731,71 +748,73 @@ class AssetComment(models.Model): urgent = models.BooleanField(default=False) priority = models.SmallIntegerField(default=False) asset = models.ForeignKey(Asset, on_delete=models.CASCADE) - author = models.ForeignKey(User, - on_delete=models.SET_NULL, - default=None, - blank=True, - null=True, - help_text='Who wrote the comment or issue') - owner = models.ForeignKey(User, - on_delete=models.SET_NULL, - default=None, - blank=True, - null=True, - related_name='assigned_comments', - help_text='In charge of resolving the comment or issue') + author = models.ForeignKey( + User, on_delete=models.SET_NULL, default=None, blank=True, null=True, help_text="Who wrote the comment or issue" + ) + owner = models.ForeignKey( + User, + on_delete=models.SET_NULL, + default=None, + blank=True, + null=True, + related_name="assigned_comments", + help_text="In charge of resolving the comment or issue", + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) def __str__(self): - return 'AssetComment ' + str(self.id) + return "AssetComment " + str(self.id) -ERROR = 'ERROR' -FIXED = 'FIXED' -IGNORED = 'IGNORED' +ERROR = "ERROR" +FIXED = "FIXED" +IGNORED = "IGNORED" ERROR_STATUS = ( - (ERROR, 'Error'), - (FIXED, 'Fixed'), - (IGNORED, 'Ignored'), + (ERROR, "Error"), + (FIXED, "Fixed"), + (IGNORED, "Ignored"), ) class AssetErrorLog(models.Model): - SLUG_NOT_FOUND = 'slug-not-found' - DIFFERENT_TYPE = 'different-type' - EMPTY_README = 'empty-readme' - EMPTY_HTML = 'empty-html' - INVALID_URL = 'invalid-url' - INVALID_README_URL = 'invalid-readme-url' - README_SYNTAX = 'readme-syntax-error' + SLUG_NOT_FOUND = "slug-not-found" + DIFFERENT_TYPE = "different-type" + EMPTY_README = "empty-readme" + EMPTY_HTML = "empty-html" + INVALID_URL = "invalid-url" + INVALID_README_URL = "invalid-readme-url" + README_SYNTAX = "readme-syntax-error" asset_type = models.CharField(max_length=20, choices=TYPE, default=None, null=True, blank=True) slug = models.SlugField(max_length=200) status = models.CharField(max_length=20, choices=ERROR_STATUS, default=ERROR) path = models.CharField(max_length=200) - status_text = models.TextField(null=True, - blank=True, - default=None, - help_text='Status details, it may be set automatically if enough error information') - user = models.ForeignKey(User, - on_delete=models.SET_NULL, - default=None, - null=True, - help_text='The user how asked for the asset and got the error') + status_text = models.TextField( + null=True, + blank=True, + default=None, + help_text="Status details, it may be set automatically if enough error information", + ) + user = models.ForeignKey( + User, + on_delete=models.SET_NULL, + default=None, + null=True, + help_text="The user how asked for the asset and got the error", + ) asset = models.ForeignKey( Asset, on_delete=models.SET_NULL, default=None, null=True, - help_text= - 'Assign an asset to this error and you will be able to create an alias for it from the django admin bulk actions "create alias"' + help_text='Assign an asset to this error and you will be able to create an alias for it from the django admin bulk actions "create alias"', ) created_at = models.DateTimeField(auto_now_add=True, editable=False) def __str__(self): - return f'Error {self.status} with {self.slug}' + return f"Error {self.status} with {self.slug}" class SEOReport(models.Model): @@ -805,25 +824,27 @@ def __init__(self, *args, **kwargs): self.__shared_state = {} self.__log = [] - report_type = models.CharField(max_length=40, help_text='Must be one of the services.seo.action script names') - status = models.CharField(max_length=20, - choices=ASSET_SYNC_STATUS, - default='PENDING', - help_text='Internal state automatically set by the system') + report_type = models.CharField(max_length=40, help_text="Must be one of the services.seo.action script names") + status = models.CharField( + max_length=20, + choices=ASSET_SYNC_STATUS, + default="PENDING", + help_text="Internal state automatically set by the system", + ) log = models.JSONField(default=None, null=True, blank=True) how_to_fix = models.TextField(default=None, null=True, blank=True) asset = models.ForeignKey(Asset, on_delete=models.CASCADE) - rating = models.FloatField(default=None, null=True, blank=True, help_text='Automatically filled (1 to 100)') + rating = models.FloatField(default=None, null=True, blank=True, help_text="Automatically filled (1 to 100)") created_at = models.DateTimeField(auto_now_add=True, editable=False) def fatal(self, msg): - self.__log.append({'rating': -100, 'msg': msg}) + self.__log.append({"rating": -100, "msg": msg}) def good(self, rating, msg): - self.__log.append({'rating': rating, 'msg': msg}) + self.__log.append({"rating": rating, "msg": msg}) def bad(self, rating, msg): - self.__log.append({'rating': rating, 'msg': msg}) + self.__log.append({"rating": rating, "msg": msg}) # this data will be shared among all reports as they are # being calculated in real time @@ -831,16 +852,16 @@ def get_state(self): return self.__shared_data def set_state(self, key, value): - attrs = ['words'] + attrs = ["words"] if key in attrs: self.__shared_state[key]: value else: - raise Exception(f'Trying to set invalid property {key} on SEO report shared state') + raise Exception(f"Trying to set invalid property {key} on SEO report shared state") def get_rating(self): total_rating = 100 for entry in self.__log: - total_rating += entry['rating'] + total_rating += entry["rating"] if total_rating < 0: return 0 @@ -853,7 +874,7 @@ def get_log(self): return self.__log def to_json(self, rating, msg): - return {'rating': self.get_rating(), 'log': self.__log} + return {"rating": self.get_rating(), "log": self.__log} class AssetImage(models.Model): @@ -863,22 +884,24 @@ class AssetImage(models.Model): original_url = models.URLField(max_length=255) hash = models.CharField(max_length=64) - assets = models.ManyToManyField(Asset, blank=True, related_name='images') + assets = models.ManyToManyField(Asset, blank=True, related_name="images") last_download_at = models.DateTimeField(null=True, blank=True, default=None) download_details = models.TextField(null=True, blank=True, default=None) - download_status = models.CharField(max_length=20, - choices=ASSET_SYNC_STATUS, - default='PENDING', - null=True, - blank=True, - help_text='Internal state automatically set by the system based on download') + download_status = models.CharField( + max_length=20, + choices=ASSET_SYNC_STATUS, + default="PENDING", + null=True, + blank=True, + help_text="Internal state automatically set by the system based on download", + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) def __str__(self): - return f'{self.name} ({self.id})' + return f"{self.name} ({self.id})" class CredentialsOriginality(models.Model): @@ -894,10 +917,10 @@ class CredentialsOriginality(models.Model): ASSET_ORIGINALITY_STATUS = ( - ('PENDING', 'Pending'), - ('ERROR', 'Error'), - ('COMPLETED', 'Completed'), - ('WARNING', 'Warning'), + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("COMPLETED", "Completed"), + ("WARNING", "Warning"), ) @@ -911,10 +934,9 @@ class OriginalityScan(models.Model): asset = models.ForeignKey(Asset, on_delete=models.CASCADE) - status = models.CharField(max_length=20, - choices=ASSET_ORIGINALITY_STATUS, - default='PENDING', - help_text='Scan for originality') + status = models.CharField( + max_length=20, choices=ASSET_ORIGINALITY_STATUS, default="PENDING", help_text="Scan for originality" + ) status_text = models.TextField(default=None, null=True, blank=True) created_at = models.DateTimeField(auto_now_add=True, editable=False) @@ -922,16 +944,16 @@ class OriginalityScan(models.Model): VARIABLE_TYPE = ( - ('MARKDOWN', 'Markdown'), - ('PYTHON_CODE', 'Python'), - ('FETCH_JSON', 'Fetch json from url'), - ('FETCH_TEXT', 'Fetch text from url'), + ("MARKDOWN", "Markdown"), + ("PYTHON_CODE", "Python"), + ("FETCH_JSON", "Fetch json from url"), + ("FETCH_TEXT", "Fetch text from url"), ) CONTENT_VAR_STATUS = ( - ('PENDING', 'Pending'), - ('ERROR', 'Error'), - ('COMPLETED', 'Completed'), + ("PENDING", "Pending"), + ("ERROR", "Error"), + ("COMPLETED", "Completed"), ) @@ -940,30 +962,35 @@ class ContentVariable(models.Model): key = models.CharField(max_length=100) value = models.TextField() default_value = models.TextField( - help_text='If the variable type is fetch or code and the processing fails, the default value will be used') + help_text="If the variable type is fetch or code and the processing fails, the default value will be used" + ) - lang = models.CharField(max_length=2, - blank=True, - default=None, - null=True, - help_text='Leave blank if will be shown in all languages') + lang = models.CharField( + max_length=2, blank=True, default=None, null=True, help_text="Leave blank if will be shown in all languages" + ) academy = models.ForeignKey(Academy, on_delete=models.CASCADE) - var_type = models.CharField(max_length=20, - choices=VARIABLE_TYPE, - default='MARKDOWN', - help_text='Code vars accept python code, Fetch vars accept HTTP GET') + var_type = models.CharField( + max_length=20, + choices=VARIABLE_TYPE, + default="MARKDOWN", + help_text="Code vars accept python code, Fetch vars accept HTTP GET", + ) - status = models.CharField(max_length=20, - choices=CONTENT_VAR_STATUS, - default='PENDING', - help_text='Code vars accept python code, Fetch vars accept HTTP GET') + status = models.CharField( + max_length=20, + choices=CONTENT_VAR_STATUS, + default="PENDING", + help_text="Code vars accept python code, Fetch vars accept HTTP GET", + ) - status_text = models.TextField(null=True, - default=None, - blank=True, - help_text='If the var is code or fetch here will be the error processing info') + status_text = models.TextField( + null=True, + default=None, + blank=True, + help_text="If the var is code or fetch here will be the error processing info", + ) created_at = models.DateTimeField(auto_now_add=True, editable=False) updated_at = models.DateTimeField(auto_now=True, editable=False) diff --git a/breathecode/registry/permissions/consumers.py b/breathecode/registry/permissions/consumers.py index e682ce9af..af3b73964 100644 --- a/breathecode/registry/permissions/consumers.py +++ b/breathecode/registry/permissions/consumers.py @@ -18,46 +18,58 @@ def asset_by_slug(context: ServiceContext, args: tuple, kwargs: dict) -> tuple[d def count_cohorts(available_as_saas: bool) -> int: available_as_saas_bool = Q(cohort__available_as_saas=available_as_saas) | Q( - cohort__available_as_saas=None, cohort__academy__available_as_saas=available_as_saas) - return CohortUser.objects.filter(available_as_saas_bool, - user=request.user, - educational_status__in=['ACTIVE', 'GRADUATED'], - cohort__academy__id=academy_id, - cohort__syllabus_version__json__icontains=asset_slug).count() - - request = context['request'] + cohort__available_as_saas=None, cohort__academy__available_as_saas=available_as_saas + ) + return CohortUser.objects.filter( + available_as_saas_bool, + user=request.user, + educational_status__in=["ACTIVE", "GRADUATED"], + cohort__academy__id=academy_id, + cohort__syllabus_version__json__icontains=asset_slug, + ).count() + + request = context["request"] lang = get_user_language(request) - asset_slug = kwargs.get('asset_slug') - academy_id = kwargs.get('academy_id') + asset_slug = kwargs.get("asset_slug") + academy_id = kwargs.get("academy_id") asset = Asset.get_by_slug(asset_slug, request) academy = Academy.objects.filter(id=academy_id).first() if asset is None: raise ValidationException( - translation(lang, - en=f'Asset {asset_slug} not found', - es=f'El recurso {asset_slug} no existe', - slug='asset-not-found'), 404) + translation( + lang, + en=f"Asset {asset_slug} not found", + es=f"El recurso {asset_slug} no existe", + slug="asset-not-found", + ), + 404, + ) if count_cohorts(available_as_saas=False): - context['price'] = 0 + context["price"] = 0 else: - context['price'] = 1 + context["price"] = 1 - kwargs['asset'] = asset - kwargs['academy'] = academy - del kwargs['asset_slug'] - del kwargs['academy_id'] + kwargs["asset"] = asset + kwargs["academy"] = academy + del kwargs["asset_slug"] + del kwargs["academy_id"] - if context['price'] == 0 and is_no_saas_student_up_to_date_in_any_cohort(context['request'].user, - academy=academy) is False: + if ( + context["price"] == 0 + and is_no_saas_student_up_to_date_in_any_cohort(context["request"].user, academy=academy) is False + ): raise PaymentException( - translation(lang, - en='You can\'t access this asset because your finantial status is not up to date', - es='No puedes acceder a este recurso porque tu estado financiero no está al dia', - slug='cohort-user-status-later')) + translation( + lang, + en="You can't access this asset because your finantial status is not up to date", + es="No puedes acceder a este recurso porque tu estado financiero no está al dia", + slug="cohort-user-status-later", + ) + ) return (context, args, kwargs) diff --git a/breathecode/registry/receivers.py b/breathecode/registry/receivers.py index e9d155d48..ecc7282fb 100644 --- a/breathecode/registry/receivers.py +++ b/breathecode/registry/receivers.py @@ -30,9 +30,9 @@ @receiver(asset_slug_modified, sender=Asset) def post_asset_slug_modified(sender, instance: Asset, **kwargs): - logger.debug(f'Procesing asset slug creation for {instance.slug}') - if instance.lang == 'en': - instance.lang = 'us' + logger.debug(f"Procesing asset slug creation for {instance.slug}") + if instance.lang == "en": + instance.lang = "us" # create a new slug alias but keep the old one for redirection purposes AssetAlias.objects.create(slug=instance.slug, asset=instance) @@ -48,57 +48,57 @@ def post_asset_slug_modified(sender, instance: Asset, **kwargs): def asset_title_was_updated(sender, instance, **kwargs): # ignore unpublished assets - if instance.status != 'PUBLISHED': + if instance.status != "PUBLISHED": return False async_update_frontend_asset_cache.delay(instance.slug) - bucket_name = os.getenv('SCREENSHOTS_BUCKET', None) - if bucket_name is None or bucket_name == '': + bucket_name = os.getenv("SCREENSHOTS_BUCKET", None) + if bucket_name is None or bucket_name == "": return False - if instance.title is None or instance.title == '': + if instance.title is None or instance.title == "": return False # taking thumbnail for the first time - if instance.preview is None or instance.preview == '': - logger.debug('Creating asset screenshot') + if instance.preview is None or instance.preview == "": + logger.debug("Creating asset screenshot") async_create_asset_thumbnail.delay(instance.slug) return True # retaking a thumbnail if it was generated automatically # we know this because bucket_name is inside instance.preview if bucket_name in instance.preview: - logger.debug('Retaking asset screenshot because title was updated') + logger.debug("Retaking asset screenshot because title was updated") async_create_asset_thumbnail.delay(instance.slug) return True @receiver(asset_readme_modified, sender=Asset) def post_asset_readme_modified(sender, instance: Asset, **kwargs): - logger.debug('Cleaning asset raw readme') + logger.debug("Cleaning asset raw readme") async_regenerate_asset_readme.delay(instance.slug) @receiver(post_delete, sender=Asset) def post_asset_deleted(sender, instance: Asset, **kwargs): - logger.debug('Asset deleted, removing images from bucket and other cleanup steps') + logger.debug("Asset deleted, removing images from bucket and other cleanup steps") async_delete_asset_images.delay(instance.slug) @receiver(post_delete, sender=AssetImage) def post_assetimage_deleted(sender, instance: Asset, **kwargs): - logger.debug('AssetImage deleted, removing image from buckets') + logger.debug("AssetImage deleted, removing image from buckets") async_remove_img_from_cloud.delay(instance.id) @receiver(assignment_created, sender=Task) def post_assignment_created(sender, instance: Task, **kwargs): - logger.debug('Adding substasks to created assignments') + logger.debug("Adding substasks to created assignments") asset = Asset.objects.filter(slug=instance.associated_slug).first() if asset is None: - logger.debug(f'Ignoring task {instance.associated_slug} because its not an internal registry asset') + logger.debug(f"Ignoring task {instance.associated_slug} because its not an internal registry asset") return None # adding subtasks to assignment based on the readme from the task @@ -108,14 +108,14 @@ def post_assignment_created(sender, instance: Task, **kwargs): @receiver(github_webhook, sender=RepositoryWebhook) def post_webhook_received(sender, instance, **kwargs): - if instance.scope in ['push']: - logger.debug('Received github webhook signal for push') + if instance.scope in ["push"]: + logger.debug("Received github webhook signal for push") async_synchonize_repository_content.delay(instance.id) @receiver(syllabus_version_json_updated, sender=SyllabusVersion) def syllabus_json_updated(sender, instance, **kwargs): - logger.debug(f'Syllabus Version json for {instance.syllabus.slug} was updated') + logger.debug(f"Syllabus Version json for {instance.syllabus.slug} was updated") async_add_syllabus_translations.delay(instance.syllabus.slug, instance.version) diff --git a/breathecode/registry/serializers.py b/breathecode/registry/serializers.py index 3eb3f80d6..81fa8ceaf 100644 --- a/breathecode/registry/serializers.py +++ b/breathecode/registry/serializers.py @@ -22,6 +22,7 @@ class ProfileSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. avatar_url = serpy.Field() github_username = serpy.Field() @@ -29,6 +30,7 @@ class ProfileSerializer(serpy.Serializer): class SEOReportSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. report_type = serpy.Field() status = serpy.Field() @@ -40,6 +42,7 @@ class SEOReportSerializer(serpy.Serializer): class OriginalityScanSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. success = serpy.Field() score_original = serpy.Field() @@ -54,6 +57,7 @@ class OriginalityScanSerializer(serpy.Serializer): class VariableSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() key = serpy.Field() @@ -64,6 +68,7 @@ class VariableSmallSerializer(serpy.Serializer): class KeywordSmallSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. id = serpy.Field() slug = serpy.Field() @@ -91,6 +96,7 @@ class SmallAsset(serpy.Serializer): class AssetAliasSerializer(serpy.Serializer): """The serializer schema definition.""" + # Use a Field subclass like IntField if you need more validation. slug = serpy.Field() asset = SmallAsset() @@ -217,12 +223,12 @@ class AssetHookSerializer(serpy.Serializer): seo_keywords = serpy.MethodField() def get_technologies(self, obj): - _s = list(map(lambda t: t.slug, obj.technologies.filter(parent__isnull=True).order_by('sort_priority'))) - return ','.join(_s) + _s = list(map(lambda t: t.slug, obj.technologies.filter(parent__isnull=True).order_by("sort_priority"))) + return ",".join(_s) def get_seo_keywords(self, obj): _s = list(map(lambda t: t.slug, obj.seo_keywords.all())) - return ','.join(_s) + return ",".join(_s) class AssetSerializer(serpy.Serializer): @@ -267,7 +273,7 @@ def get_translations(self, obj): return result def get_technologies(self, obj): - _s = list(map(lambda t: t.slug, obj.technologies.filter(parent__isnull=True).order_by('sort_priority'))) + _s = list(map(lambda t: t.slug, obj.technologies.filter(parent__isnull=True).order_by("sort_priority"))) return _s def get_seo_keywords(self, obj): @@ -399,13 +405,14 @@ class AssetBigAndTechnologyPublishedSerializer(AssetBigSerializer): def get_translations(self, obj): result = {} - for t in obj.all_translations.filter(status='PUBLISHED'): + for t in obj.all_translations.filter(status="PUBLISHED"): result[t.lang] = t.slug return result def get_technologies(self, obj): techs = AssetTechnology.objects.filter( - id__in=obj.technologies.filter(visibility__in=['PUBLIC', 'UNLISTED'], is_deprecated=False)) + id__in=obj.technologies.filter(visibility__in=["PUBLIC", "UNLISTED"], is_deprecated=False) + ) return ParentAssetTechnologySerializer(techs, many=True).data @@ -415,7 +422,8 @@ class AssetAndTechnologySerializer(AssetSerializer): def get_technologies(self, obj): techs = AssetTechnology.objects.filter( - id__in=obj.technologies.filter(visibility__in=['PUBLIC', 'UNLISTED'], is_deprecated=False)) + id__in=obj.technologies.filter(visibility__in=["PUBLIC", "UNLISTED"], is_deprecated=False) + ) return ParentAssetTechnologySerializer(techs, many=True).data @@ -511,59 +519,61 @@ class PostAssetSerializer(serializers.ModelSerializer): class Meta: model = Asset - exclude = ('academy', ) + exclude = ("academy",) def validate(self, data): validated_data = super().validate(data) - if 'lang' not in validated_data or validated_data['lang'] is None: - raise ValidationException('Asset is missing a language', slug='no-language') + if "lang" not in validated_data or validated_data["lang"] is None: + raise ValidationException("Asset is missing a language", slug="no-language") - if 'category' not in data or data['category'] is None: - if 'all_translations' not in validated_data or len(validated_data['all_translations']) == 0: - raise ValidationException('No category was specified and we could not retrieve it from any translation', - slug='no-category') + if "category" not in data or data["category"] is None: + if "all_translations" not in validated_data or len(validated_data["all_translations"]) == 0: + raise ValidationException( + "No category was specified and we could not retrieve it from any translation", slug="no-category" + ) - asset_translation = Asset.objects.filter(slug=validated_data['all_translations'][0]).first() + asset_translation = Asset.objects.filter(slug=validated_data["all_translations"][0]).first() if asset_translation is None or asset_translation.category is None: - raise ValidationException('No category was specified and we could not retrieve it from any translation', - slug='no-category') + raise ValidationException( + "No category was specified and we could not retrieve it from any translation", slug="no-category" + ) category_translation = asset_translation.category.all_translations.filter( - lang=validated_data['lang']).first() + lang=validated_data["lang"] + ).first() if category_translation is None: raise ValidationException( f"No category was specified and translation's categories don't have language: {validated_data['lang']}" ) - validated_data['category'] = category_translation + validated_data["category"] = category_translation - academy_id = self.context['academy'] - validated_data['academy'] = Academy.objects.filter(id=academy_id).first() + academy_id = self.context["academy"] + validated_data["academy"] = Academy.objects.filter(id=academy_id).first() - alias = AssetAlias.objects.filter(slug=validated_data['slug']).first() + alias = AssetAlias.objects.filter(slug=validated_data["slug"]).first() if alias is not None: - raise ValidationException('Asset alias already exists with this slug') + raise ValidationException("Asset alias already exists with this slug") - if 'readme' in validated_data: - raise ValidationException('Property readme is read only, please update property readme_raw instead') + if "readme" in validated_data: + raise ValidationException("Property readme is read only, please update property readme_raw instead") return validated_data def create(self, validated_data): - academy_id = self.context['academy'] + academy_id = self.context["academy"] academy = Academy.objects.filter(id=academy_id).first() readme_raw = None - if 'readme_raw' in validated_data: - readme_raw = validated_data['readme_raw'] + if "readme_raw" in validated_data: + readme_raw = validated_data["readme_raw"] try: - return super(PostAssetSerializer, self).create({ - **validated_data, 'academy': academy, - 'readme_raw': readme_raw - }) + return super(PostAssetSerializer, self).create( + {**validated_data, "academy": academy, "readme_raw": readme_raw} + ) except Exception as e: raise ValidationException(e.message_dict, 400) @@ -573,28 +583,30 @@ class PostKeywordClusterSerializer(serializers.ModelSerializer): class Meta: model = KeywordCluster - exclude = ('academy', ) + exclude = ("academy",) def validate(self, data): validated_data = super().validate(data) - if 'landing_page_url' in validated_data: - if 'http' not in validated_data['landing_page_url']: + if "landing_page_url" in validated_data: + if "http" not in validated_data["landing_page_url"]: raise ValidationException( - 'Please make your topic cluster landing page url is an absolute url that points to your page, this is how we know your page domain' + "Please make your topic cluster landing page url is an absolute url that points to your page, this is how we know your page domain" ) return validated_data def create(self, validated_data): - academy_id = self.context['academy'] + academy_id = self.context["academy"] academy = Academy.objects.filter(id=academy_id).first() - return super(PostKeywordClusterSerializer, self).create({ - **validated_data, - 'academy': academy, - }) + return super(PostKeywordClusterSerializer, self).create( + { + **validated_data, + "academy": academy, + } + ) def update(self, instance, validated_data): return super().update(instance, validated_data) @@ -604,16 +616,18 @@ class PostKeywordSerializer(serializers.ModelSerializer): class Meta: model = AssetKeyword - exclude = ('academy', ) + exclude = ("academy",) def create(self, validated_data): - academy_id = self.context['academy'] + academy_id = self.context["academy"] academy = Academy.objects.filter(id=academy_id).first() - return super(PostKeywordSerializer, self).create({ - **validated_data, - 'academy': academy, - }) + return super(PostKeywordSerializer, self).create( + { + **validated_data, + "academy": academy, + } + ) class PUTKeywordSerializer(serializers.ModelSerializer): @@ -623,7 +637,7 @@ class PUTKeywordSerializer(serializers.ModelSerializer): class Meta: model = AssetKeyword - exclude = ('academy', ) + exclude = ("academy",) def update(self, instance, validated_data): return super().update(instance, validated_data) @@ -636,7 +650,7 @@ class PUTCategorySerializer(serializers.ModelSerializer): class Meta: model = AssetCategory - exclude = ('academy', ) + exclude = ("academy",) def update(self, instance, validated_data): return super().update(instance, validated_data) @@ -646,16 +660,18 @@ class POSTCategorySerializer(serializers.ModelSerializer): class Meta: model = AssetCategory - exclude = ('academy', ) + exclude = ("academy",) def create(self, validated_data): - academy_id = self.context['academy'] + academy_id = self.context["academy"] academy = Academy.objects.filter(id=academy_id).first() - return super().create({ - **validated_data, - 'academy': academy, - }) + return super().create( + { + **validated_data, + "academy": academy, + } + ) class TechnologyPUTSerializer(serializers.ModelSerializer): @@ -663,21 +679,21 @@ class TechnologyPUTSerializer(serializers.ModelSerializer): class Meta: model = AssetTechnology - exclude = ('slug', ) + exclude = ("slug",) def validate(self, data): validated_data = {**data} - if 'parent' in data and data['parent'] is not None: + if "parent" in data and data["parent"] is not None: parent = None - if isinstance(data['parent'], int) or data['parent'].isnumeric(): - parent = AssetTechnology.objects.filter(id=data['parent']).first() + if isinstance(data["parent"], int) or data["parent"].isnumeric(): + parent = AssetTechnology.objects.filter(id=data["parent"]).first() else: - parent = AssetTechnology.objects.filter(slug=data['parent']).first() + parent = AssetTechnology.objects.filter(slug=data["parent"]).first() if parent.parent is not None: raise ValidationException( - f'The technology parent you are trying to set {parent.slug}, its a child of another technology, only technologies without parent can be set as parent' + f"The technology parent you are trying to set {parent.slug}, its a child of another technology, only technologies without parent can be set as parent" ) if parent is None: @@ -686,13 +702,13 @@ def validate(self, data): # if parent.id == self.instance.id: # raise ValidationException(f'Technology cannot be a parent of itself') - validated_data['parent'] = parent + validated_data["parent"] = parent return validated_data def update(self, instance, validated_data): - if 'parent' in validated_data and validated_data['parent'] is None: - instance.parent = validated_data.pop('parent') + if "parent" in validated_data and validated_data["parent"] is None: + instance.parent = validated_data.pop("parent") instance.save() return super().update(instance, validated_data) @@ -707,34 +723,34 @@ class Meta: def validate(self, data): - academy_id = self.context.get('academy') + academy_id = self.context.get("academy") asset = None - if 'asset' in data: - if data['asset'].isnumeric(): - asset = Asset.objects.filter(id=data['asset'], academy__id=academy_id).first() - elif data['asset'] != '': - asset = Asset.objects.filter(slug=data['asset'], academy__id=academy_id).first() + if "asset" in data: + if data["asset"].isnumeric(): + asset = Asset.objects.filter(id=data["asset"], academy__id=academy_id).first() + elif data["asset"] != "": + asset = Asset.objects.filter(slug=data["asset"], academy__id=academy_id).first() if asset is None: raise ValidationException(f'Asset {data["asset"]} not found for academy {academy_id}') - return super().validate({**data, 'asset': asset}) + return super().validate({**data, "asset": asset}) class PutAssetCommentSerializer(serializers.ModelSerializer): class Meta: model = AssetComment - exclude = ('text', 'asset', 'author') + exclude = ("text", "asset", "author") def validate(self, data): validated_data = super().validate(data) - session_user = self.context.get('request').user + session_user = self.context.get("request").user if self.instance.owner is not None and self.instance.owner.id == session_user.id: - if 'resolved' in data and data['resolved'] != self.instance.resolved: - raise ValidationException('You cannot update the resolved property if you are the Asset Comment owner') + if "resolved" in data and data["resolved"] != self.instance.resolved: + raise ValidationException("You cannot update the resolved property if you are the Asset Comment owner") return validated_data @@ -754,7 +770,7 @@ class VariableSerializer(serializers.ModelSerializer): class Meta: model = ContentVariable - exclude = ('academy', ) + exclude = ("academy",) class AssetPUTSerializer(serializers.ModelSerializer): @@ -767,83 +783,92 @@ class AssetPUTSerializer(serializers.ModelSerializer): class Meta: model = Asset - exclude = ('academy', ) + exclude = ("academy",) list_serializer_class = AssetListSerializer def validate(self, data): - academy_id = self.context.get('academy_id') - session_user = self.context.get('request').user + academy_id = self.context.get("academy_id") + session_user = self.context.get("request").user member = ProfileAcademy.objects.filter(user=session_user, academy__id=academy_id).first() if member is None: - raise ValidationException(f"You don't belong to the academy {academy_id} owner of this asset", - status.HTTP_400_BAD_REQUEST) + raise ValidationException( + f"You don't belong to the academy {academy_id} owner of this asset", status.HTTP_400_BAD_REQUEST + ) - if member.role.slug == 'content_writer': + if member.role.slug == "content_writer": for key in data: - if key != 'status' and data[key] != getattr(self.instance, key): - raise ValidationException('You are only allowed to change the status of this asset', - status.HTTP_400_BAD_REQUEST) - if 'status' in data and data['status'] not in ['DRAFT', 'WRITING', 'NOT_STARTED', 'OPTIMIZED', 'PLANNING']: + if key != "status" and data[key] != getattr(self.instance, key): + raise ValidationException( + "You are only allowed to change the status of this asset", status.HTTP_400_BAD_REQUEST + ) + if "status" in data and data["status"] not in ["DRAFT", "WRITING", "NOT_STARTED", "OPTIMIZED", "PLANNING"]: raise ValidationException( - 'You can only set the status to not started, draft, writing, optimized, or planning', - status.HTTP_400_BAD_REQUEST) + "You can only set the status to not started, draft, writing, optimized, or planning", + status.HTTP_400_BAD_REQUEST, + ) - if self.instance.author is None and data['status'] != 'NOT_STARTED': - data['author'] = session_user + if self.instance.author is None and data["status"] != "NOT_STARTED": + data["author"] = session_user elif self.instance.author.id != session_user.id: - raise ValidationException('You can only update card assigned to yourself', status.HTTP_400_BAD_REQUEST) + raise ValidationException("You can only update card assigned to yourself", status.HTTP_400_BAD_REQUEST) - if 'status' in data and data['status'] == 'PUBLISHED': - if self.instance.test_status not in ['OK', 'WARNING']: - raise ValidationException('This asset has to pass tests successfully before publishing', - status.HTTP_400_BAD_REQUEST) + if "status" in data and data["status"] == "PUBLISHED": + if self.instance.test_status not in ["OK", "WARNING"]: + raise ValidationException( + "This asset has to pass tests successfully before publishing", status.HTTP_400_BAD_REQUEST + ) - if 'visibility' in data and data['visibility'] in ['PUBLIC', 'UNLISTED' - ] and self.instance.test_status not in ['OK', 'WARNING']: - raise ValidationException('This asset has to pass tests successfully before publishing', code=400) + if ( + "visibility" in data + and data["visibility"] in ["PUBLIC", "UNLISTED"] + and self.instance.test_status not in ["OK", "WARNING"] + ): + raise ValidationException("This asset has to pass tests successfully before publishing", code=400) - if 'slug' in data: - data['slug'] = slugify(data['slug']).lower() + if "slug" in data: + data["slug"] = slugify(data["slug"]).lower() lang = self.instance.lang - if 'lang' in data: - lang = data['lang'] + if "lang" in data: + lang = data["lang"] category = self.instance.category - if 'category' in data: - category = data['category'] + if "category" in data: + category = data["category"] - if 'superseded_by' in data and data['superseded_by']: - if data['superseded_by'].id == self.instance.id: - raise ValidationException('One asset cannot supersed itself', code=400) + if "superseded_by" in data and data["superseded_by"]: + if data["superseded_by"].id == self.instance.id: + raise ValidationException("One asset cannot supersed itself", code=400) try: - _prev = data['superseded_by'].previous_version + _prev = data["superseded_by"].previous_version if _prev and (not self.instance.superseded_by or _prev.id != self.instance.superseded_by.id): raise ValidationException( f'Asset {data["superseded_by"].id} is already superseding {_prev.asset_type}: {_prev.slug}', - code=400) + code=400, + ) except Exception: pass try: previous_version = self.instance.previous_version - if previous_version and data['superseded_by'].id == previous_version.id: - raise ValidationException('One asset cannot have its previous version also superseding', code=400) + if previous_version and data["superseded_by"].id == previous_version.id: + raise ValidationException("One asset cannot have its previous version also superseding", code=400) except Exception: pass if category is None: - raise ValidationException('Asset category cannot be null', status.HTTP_400_BAD_REQUEST) + raise ValidationException("Asset category cannot be null", status.HTTP_400_BAD_REQUEST) if lang != category.lang: translated_category = category.all_translations.filter(lang=lang).first() if translated_category is None: raise ValidationException( - 'Asset category is in a different language than the asset itself and we could not find a category translation that matches the same language', - status.HTTP_400_BAD_REQUEST) - data['category'] = translated_category + "Asset category is in a different language than the asset itself and we could not find a category translation that matches the same language", + status.HTTP_400_BAD_REQUEST, + ) + data["category"] = translated_category validated_data = super().validate(data) return validated_data @@ -852,33 +877,34 @@ def update(self, instance, validated_data): data = {} - if 'status' in validated_data: - if validated_data['status'] == 'PUBLISHED' and instance.status != 'PUBLISHED': + if "status" in validated_data: + if validated_data["status"] == "PUBLISHED" and instance.status != "PUBLISHED": now = timezone.now() - data['published_at'] = now - elif validated_data['status'] != 'PUBLISHED': - data['published_at'] = None + data["published_at"] = now + elif validated_data["status"] != "PUBLISHED": + data["published_at"] = None - if 'readme_url' in validated_data: + if "readme_url" in validated_data: def get_repo_url(url): parsed_url = urlparse(url) # Extract the scheme, netloc, and the first two parts of the path (organization/repository) - repo_url = f'{parsed_url.scheme}://{parsed_url.netloc}' - path_parts = parsed_url.path.strip('/').split('/') + repo_url = f"{parsed_url.scheme}://{parsed_url.netloc}" + path_parts = parsed_url.path.strip("/").split("/") if len(path_parts) >= 2: - repo_url += f'/{path_parts[0]}/{path_parts[1]}' + repo_url += f"/{path_parts[0]}/{path_parts[1]}" return repo_url - repo = get_repo_url(validated_data['readme_url']) - data['url'] = repo + repo = get_repo_url(validated_data["readme_url"]) + data["url"] = repo # Check if preview img is being deleted - if 'preview' in validated_data: - if validated_data['preview'] == None and instance.preview != None: - hash = instance.preview.split('/')[-1] + if "preview" in validated_data: + if validated_data["preview"] == None and instance.preview != None: + hash = instance.preview.split("/")[-1] if hash is not None: from .tasks import async_remove_asset_preview_from_cloud + async_remove_asset_preview_from_cloud.delay(hash) return super().update(instance, {**validated_data, **data}) diff --git a/breathecode/registry/signals.py b/breathecode/registry/signals.py index f860203b6..d49fb1116 100644 --- a/breathecode/registry/signals.py +++ b/breathecode/registry/signals.py @@ -2,6 +2,7 @@ For each signal you want other apps to be able to receive, you have to declare a new variable here like this: """ + from django import dispatch asset_slug_modified = dispatch.Signal() diff --git a/breathecode/registry/tasks.py b/breathecode/registry/tasks.py index c790d45c6..42d75430c 100644 --- a/breathecode/registry/tasks.py +++ b/breathecode/registry/tasks.py @@ -41,14 +41,14 @@ def google_project_id(): - return os.getenv('GOOGLE_PROJECT_ID', '') + return os.getenv("GOOGLE_PROJECT_ID", "") -img_regex = r'https?:(?:[/|.|\w|\s|-])*\.(?:jpg|gif|png|svg|jpeg)' +img_regex = r"https?:(?:[/|.|\w|\s|-])*\.(?:jpg|gif|png|svg|jpeg)" def is_remote_image(_str): - if _str is None or _str == '' or asset_images_bucket('') in _str: + if _str is None or _str == "" or asset_images_bucket("") in _str: return False match = re.search(img_regex, _str) @@ -60,22 +60,22 @@ def is_remote_image(_str): @shared_task(priority=TaskPriority.ACADEMY.value) def async_pull_from_github(asset_slug, user_id=None, override_meta=False): - logger.debug(f'Synching asset {asset_slug} with data found on github') + logger.debug(f"Synching asset {asset_slug} with data found on github") sync_status = pull_from_github(asset_slug, override_meta=override_meta) - return sync_status != 'ERROR' + return sync_status != "ERROR" @shared_task(priority=TaskPriority.ACADEMY.value) def async_test_asset(asset_slug): a = Asset.objects.filter(slug=asset_slug).first() if a is None: - logger.debug(f'Error: Error testing asset with slug {asset_slug}, does not exist.') + logger.debug(f"Error: Error testing asset with slug {asset_slug}, does not exist.") try: if test_asset(a): return True except Exception: - logger.exception(f'Error testing asset {a.slug}') + logger.exception(f"Error testing asset {a.slug}") return False @@ -83,11 +83,11 @@ def async_test_asset(asset_slug): @shared_task(priority=TaskPriority.ACADEMY.value) def async_update_frontend_asset_cache(asset_slug): try: - if os.getenv('ENV', '') != 'production': + if os.getenv("ENV", "") != "production": return - logger.info('async_update_frontend_asset_cache') - url = os.getenv('APP_URL', '') + f'/api/asset/{asset_slug}' + logger.info("async_update_frontend_asset_cache") + url = os.getenv("APP_URL", "") + f"/api/asset/{asset_slug}" requests.put(url=url) except Exception as e: logger.error(str(e)) @@ -97,7 +97,7 @@ def async_update_frontend_asset_cache(asset_slug): def async_regenerate_asset_readme(asset_slug): a = Asset.objects.filter(slug=asset_slug).first() if a is None: - logger.debug(f'Error: Error running SEO report for asset with slug {asset_slug}, does not exist.') + logger.debug(f"Error: Error running SEO report for asset with slug {asset_slug}, does not exist.") return False a.readme = a.readme_raw @@ -107,20 +107,20 @@ def async_regenerate_asset_readme(asset_slug): async_download_readme_images.delay(a.slug) async_update_frontend_asset_cache.delay(a.slug) - return a.cleaning_status == 'OK' + return a.cleaning_status == "OK" @shared_task(priority=TaskPriority.ACADEMY.value) def async_execute_seo_report(asset_slug): a = Asset.objects.filter(slug=asset_slug).first() if a is None: - logger.debug(f'Error: Error running SEO report for asset with slug {asset_slug}, does not exist.') + logger.debug(f"Error: Error running SEO report for asset with slug {asset_slug}, does not exist.") try: report = SEOAnalyzer(a) report.start() except Exception: - logger.exception(f'Error running SEO report asset {a.slug}') + logger.exception(f"Error running SEO report asset {a.slug}") return False @@ -128,9 +128,10 @@ def async_execute_seo_report(asset_slug): @task(priority=TaskPriority.ACADEMY.value) def async_create_asset_thumbnail_legacy(asset_slug: str, **_): from breathecode.registry.actions import AssetThumbnailGenerator + asset = Asset.objects.filter(slug=asset_slug).first() if asset is None: - raise Exception(f'Asset with slug {asset_slug} not found') + raise Exception(f"Asset with slug {asset_slug} not found") generator = AssetThumbnailGenerator(asset) generator.create() @@ -143,31 +144,33 @@ def async_create_asset_thumbnail(asset_slug: str, **_): asset = Asset.objects.filter(slug=asset_slug).first() if asset is None: - raise RetryTask(f'Asset with slug {asset_slug} not found') + raise RetryTask(f"Asset with slug {asset_slug} not found") preview_url = asset.get_preview_generation_url() if preview_url is None: - raise AbortTask('Not able to retrieve a preview generation') + raise AbortTask("Not able to retrieve a preview generation") name = asset.get_thumbnail_name() - url = set_query_parameter(preview_url, 'slug', asset_slug) + url = set_query_parameter(preview_url, "slug", asset_slug) response = None - logger.info(f'Generating screenshot for {preview_url}') + logger.info(f"Generating screenshot for {preview_url}") try: - response = generate_screenshot(url, '1200x630', delay=1000) + response = generate_screenshot(url, "1200x630", delay=1000) except Exception as e: - raise AbortTask('Error calling service to generate thumbnail screenshot: ' + str(e)) + raise AbortTask("Error calling service to generate thumbnail screenshot: " + str(e)) if response.status_code >= 400: - raise AbortTask('Unhandled error with async_create_asset_thumbnail, the cloud function `screenshots` ' - f'returns status code {response.status_code}') + raise AbortTask( + "Unhandled error with async_create_asset_thumbnail, the cloud function `screenshots` " + f"returns status code {response.status_code}" + ) file = response.content hash = hashlib.sha256(file).hexdigest() - content_type = response.headers['content-type'] + content_type = response.headers["content-type"] storage = Storage() @@ -180,66 +183,69 @@ def async_create_asset_thumbnail(asset_slug: str, **_): media = Media.objects.filter(hash=hash, academy=asset.academy).first() if media is not None: - if asset.preview is None or asset.preview == '': + if asset.preview is None or asset.preview == "": asset.preview = media.url asset.save() - raise AbortTask(f'Media with hash {hash} already exists, skipping') + raise AbortTask(f"Media with hash {hash} already exists, skipping") # file already exists for another academy media = Media.objects.filter(hash=hash).first() if media: - media = Media(slug=name.split('.')[0], - name=media.name, - url=media.url, - thumbnail=media.thumbnail, - academy=asset.academy, - mime=media.mime, - hash=media.hash) + media = Media( + slug=name.split(".")[0], + name=media.name, + url=media.url, + thumbnail=media.thumbnail, + academy=asset.academy, + mime=media.mime, + hash=media.hash, + ) media.save() - if asset.preview is None or asset.preview == '': + if asset.preview is None or asset.preview == "": asset.preview = media.url asset.save() - raise AbortTask(f'Media was save with {hash} for academy {asset.academy}') + raise AbortTask(f"Media was save with {hash} for academy {asset.academy}") # if media does not exist too, keep the screenshots with other name cloud_file.rename(hash) - url = f'https://storage.googleapis.com/{screenshots_bucket()}/{hash}' + url = f"https://storage.googleapis.com/{screenshots_bucket()}/{hash}" media = Media( - slug=name.split('.')[0], + slug=name.split(".")[0], name=name, url=url, - thumbnail=f'{url}-thumbnail', + thumbnail=f"{url}-thumbnail", academy=asset.academy, - mime='image/png', # this should change in a future, check the cloud function - hash=hash) + mime="image/png", # this should change in a future, check the cloud function + hash=hash, + ) media.save() - if asset.preview is None or asset.preview == '': + if asset.preview is None or asset.preview == "": asset.preview = url asset.save() - logger.warning(f'Media was save with {hash} for academy {asset.academy}') + logger.warning(f"Media was save with {hash} for academy {asset.academy}") @shared_task(priority=TaskPriority.ACADEMY.value) def async_download_readme_images(asset_slug): - logger.debug(f'Downloading images for asset {asset_slug}') + logger.debug(f"Downloading images for asset {asset_slug}") asset = Asset.get_by_slug(asset_slug) if asset is None: - raise Exception(f'Asset with slug {asset_slug} not found') + raise Exception(f"Asset with slug {asset_slug} not found") readme = asset.get_readme(parse=True) - if 'html' not in readme: - logger.error(f'Asset with {asset_slug} readme cannot be parse into an HTML') + if "html" not in readme: + logger.error(f"Asset with {asset_slug} readme cannot be parse into an HTML") return False - images = BeautifulSoup(readme['html'], features='html.parser').find_all('img', attrs={'srcset': True}) + images = BeautifulSoup(readme["html"], features="html.parser").find_all("img", attrs={"srcset": True}) # check if old images are stil in the new markdown file old_images = asset.images.all() @@ -250,14 +256,14 @@ def async_download_readme_images(asset_slug): image_links = [] for image in images: - image_links.append(image['src']) + image_links.append(image["src"]) - srcset = image.attrs.get('srcset') - if srcset and srcset != '': - srcsets = [src.strip().split(' ')[0] for src in srcset.split(',')] + srcset = image.attrs.get("srcset") + if srcset and srcset != "": + srcsets = [src.strip().split(" ")[0] for src in srcset.split(",")] image_links += srcsets - additional_img_urls = list(re.finditer(img_regex, readme['html'])) + additional_img_urls = list(re.finditer(img_regex, readme["html"])) while len(additional_img_urls) > 0: match = additional_img_urls.pop(0) if match is not None: @@ -265,11 +271,11 @@ def async_download_readme_images(asset_slug): image_links.append(img_url) image_links = list(dict.fromkeys(filter(lambda x: is_remote_image(x), image_links))) - logger.debug(f'Found {len(image_links)} images on asset {asset_slug}') + logger.debug(f"Found {len(image_links)} images on asset {asset_slug}") # create subfolder with the page name if len(image_links) == 0: - print('No images found') + print("No images found") return False for link in image_links: @@ -278,7 +284,7 @@ def async_download_readme_images(asset_slug): async_download_single_readme_image.delay(asset_slug, link) # delete asset from this image - logger.debug(f'Found {len(no_longer_used)} images no longer used on asset {asset_slug}') + logger.debug(f"Found {len(no_longer_used)} images no longer used on asset {asset_slug}") for old_img in no_longer_used: no_longer_used[old_img].assets.remove(asset) @@ -294,7 +300,7 @@ def async_delete_asset_images(asset_slug, **_): asset = Asset.get_by_slug(asset_slug) if asset is None: - raise RetryTask(f'Asset with slug {asset_slug} not found') + raise RetryTask(f"Asset with slug {asset_slug} not found") storage = Storage() for img in asset.images.all(): @@ -306,7 +312,7 @@ def async_delete_asset_images(asset_slug, **_): else: img.assets.remove(asset) - logger.info(f'Image {img.name} was deleted') + logger.info(f"Image {img.name} was deleted") return True @@ -314,11 +320,11 @@ def async_delete_asset_images(asset_slug, **_): @task(priority=TaskPriority.ACADEMY.value) def async_remove_img_from_cloud(id, **_): - logger.info('async_remove_img_from_cloud') + logger.info("async_remove_img_from_cloud") img = AssetImage.objects.filter(id=id).first() if img is None: - raise RetryTask(f'Image with id {id} not found') + raise RetryTask(f"Image with id {id} not found") img_name = img.name @@ -328,28 +334,28 @@ def async_remove_img_from_cloud(id, **_): cloud_file.delete() img.delete() - logger.info(f'Image id ({img_name}) was deleted from the cloud') + logger.info(f"Image id ({img_name}) was deleted from the cloud") return True @task(priority=TaskPriority.ACADEMY.value) def async_remove_asset_preview_from_cloud(hash, **_): - logger.info('async_remove_asset_preview_from_cloud') + logger.info("async_remove_asset_preview_from_cloud") media = Media.objects.filter(hash=hash).first() if media is None: - raise Exception(f'Media with hash {hash} not found') + raise Exception(f"Media with hash {hash} not found") media_name = media.name storage = Storage() - extension = media.mime.split('/')[-1] + extension = media.mime.split("/")[-1] cloud_file = storage.file(screenshots_bucket(), media.hash + extension) cloud_file.delete() media.delete() - logger.info(f'Media name ({media_name}) was deleted from the cloud') + logger.info(f"Media name ({media_name}) was deleted from the cloud") return True @@ -358,11 +364,11 @@ def async_upload_image_to_bucket(id, **_): img = AssetImage.objects.filter(id=id).first() if img is None: - raise Exception(f'Image with id {id} not found') + raise Exception(f"Image with id {id} not found") - img.download_status = 'PENDING' + img.download_status = "PENDING" # FIXME: undefined variable - img.download_details = f'Downloading {img.original_url}' + img.download_details = f"Downloading {img.original_url}" img.save() try: @@ -373,7 +379,7 @@ def async_upload_image_to_bucket(id, **_): except Exception as e: img.download_details = str(e) - img.download_status = 'ERROR' + img.download_status = "ERROR" raise e img.save() @@ -385,17 +391,17 @@ def async_download_single_readme_image(asset_slug, link, **_): asset = Asset.get_by_slug(asset_slug) if asset is None: - raise RetryTask(f'Asset with slug {asset_slug} not found') + raise RetryTask(f"Asset with slug {asset_slug} not found") img = AssetImage.objects.filter(Q(original_url=link) | Q(bucket_url=link)).first() if img is None: - temp_filename = link.split('/')[-1].split('?')[0] + temp_filename = link.split("/")[-1].split("?")[0] img = AssetImage(name=temp_filename, original_url=link, last_download_at=timezone.now()) - if img.download_status != 'OK': + if img.download_status != "OK": - img.download_status = 'PENDING' - img.download_details = f'Downloading {link}' + img.download_status = "PENDING" + img.download_details = f"Downloading {link}" img.save() try: @@ -406,13 +412,13 @@ def async_download_single_readme_image(asset_slug, link, **_): except Exception as e: img.download_details = str(e) - img.download_status = 'ERROR' + img.download_status = "ERROR" img.save() raise e img.save() readme = asset.get_readme() - asset.set_readme(readme['decoded'].replace(link, img.bucket_url)) + asset.set_readme(readme["decoded"].replace(link, img.bucket_url)) asset.save() return img.download_status @@ -421,84 +427,93 @@ def async_download_single_readme_image(asset_slug, link, **_): def async_resize_asset_thumbnail(media_id: int, width: Optional[int] = 0, height: Optional[int] = 0): media = Media.objects.filter(id=media_id).first() if media is None: - logger.error(f'Media with id {media_id} not found') + logger.error(f"Media with id {media_id} not found") return if not width and not height: - logger.error('async_resize_asset_thumbnail needs the width or height parameter') + logger.error("async_resize_asset_thumbnail needs the width or height parameter") return if width and height: logger.error("async_resize_asset_thumbnail can't be used with width and height together") return - kwargs = {'width': width} if width else {'height': height} + kwargs = {"width": width} if width else {"height": height} - func = FunctionV1(region='us-central1', project_id=google_project_id(), name='resize-image') + func = FunctionV1(region="us-central1", project_id=google_project_id(), name="resize-image") - response = func.call({ - **kwargs, - 'filename': media.hash, - 'bucket': media_gallery_bucket(), - }) + response = func.call( + { + **kwargs, + "filename": media.hash, + "bucket": media_gallery_bucket(), + } + ) res = response.json() - if not res['status_code'] == 200 or not res['message'] == 'Ok': - logger.error(f'Unhandled error with `resize-image` cloud function, response {res}') + if not res["status_code"] == 200 or not res["message"] == "Ok": + logger.error(f"Unhandled error with `resize-image` cloud function, response {res}") return - resolution = MediaResolution(width=res['width'], height=res['height'], hash=media.hash) + resolution = MediaResolution(width=res["width"], height=res["height"], hash=media.hash) resolution.save() @shared_task(bind=True, base=WebhookTask, priority=TaskPriority.CONTENT.value) def async_synchonize_repository_content(self, webhook): - logger.debug('async_synchonize_repository_content') + logger.debug("async_synchonize_repository_content") payload = webhook.get_payload() # some times the json contains a nested payload property - if 'payload' in payload: payload = payload['payload'] + if "payload" in payload: + payload = payload["payload"] - if 'commits' not in payload: - raise AbortTask('No commits found on the push object') + if "commits" not in payload: + raise AbortTask("No commits found on the push object") - if 'repository' not in payload: - raise AbortTask('Missing repository information') - elif 'url' not in payload['repository']: + if "repository" not in payload: + raise AbortTask("Missing repository information") + elif "url" not in payload["repository"]: raise AbortTask( - 'Repository payload is invalid, expecting an object with "url" key. Check the webhook content-type') + 'Repository payload is invalid, expecting an object with "url" key. Check the webhook content-type' + ) - base_repo_url = payload['repository']['url'] - default_branch = payload['repository']['default_branch'] + base_repo_url = payload["repository"]["url"] + default_branch = payload["repository"]["default_branch"] files = [] - for commit in payload['commits']: - for file_path in commit['modified']: + for commit in payload["commits"]: + for file_path in commit["modified"]: # one file can be modified in multiple commits, but we don't have to synch many times if file_path not in files: files.append(file_path) logger.debug( - f'The file {file_path} was modified, searching for matches in our registry with {base_repo_url}/blob/{default_branch}/{file_path}' + f"The file {file_path} was modified, searching for matches in our registry with {base_repo_url}/blob/{default_branch}/{file_path}" ) # include readme files and quiz json files - all_readme_files = Q(readme_url__icontains=f'{base_repo_url}/blob/{default_branch}/{file_path}') + all_readme_files = Q(readme_url__icontains=f"{base_repo_url}/blob/{default_branch}/{file_path}") # Conditional query for when 'learn.json' is in file_path - learn_json_files = Q(asset_type__in=['EXERCISE', 'PROJECT'], - readme_url__icontains=f'{base_repo_url}/blob/{default_branch}/' - ) if 'learn.json' in file_path else Q() + learn_json_files = ( + Q( + asset_type__in=["EXERCISE", "PROJECT"], + readme_url__icontains=f"{base_repo_url}/blob/{default_branch}/", + ) + if "learn.json" in file_path + else Q() + ) # Execute the combined query assets = Asset.objects.filter(all_readme_files | learn_json_files) for a in assets: - if commit['id'] == a.github_commit_hash: + if commit["id"] == a.github_commit_hash: # ignore asset because the commit content is already on the asset # probably the asset was updated in github using the breathecode api continue - logger.debug(f'Pulling asset from github for asset: {a.slug}') + logger.debug(f"Pulling asset from github for asset: {a.slug}") async_pull_from_github.delay(a.slug) return webhook @@ -512,7 +527,7 @@ def async_add_syllabus_translations(syllabus_slug, version): raise Exception(f'Syllabus {syllabus_slug} with version "{version}" not found') if syllabus_version.json is None: - syllabus_version.json = {'days': []} + syllabus_version.json = {"days": []} syllabus_version.json = add_syllabus_translations(syllabus_version.json) syllabus_version.save() @@ -523,7 +538,7 @@ def async_generate_quiz_config(assessment_id): assessment = Assessment.objects.filter(id=assessment_id, is_archived=False).first() if assessment is None: - raise Exception(f'Assessment {assessment_id} not found or its archived') + raise Exception(f"Assessment {assessment_id} not found or its archived") assets = assessment.asset_set.all() for a in assets: diff --git a/breathecode/registry/tests/actions/tests_asset_thumbnail_generator.py b/breathecode/registry/tests/actions/tests_asset_thumbnail_generator.py index 7c92a9c06..ca5705bba 100644 --- a/breathecode/registry/tests/actions/tests_asset_thumbnail_generator.py +++ b/breathecode/registry/tests/actions/tests_asset_thumbnail_generator.py @@ -1,6 +1,7 @@ """ Test /answer """ + from random import randint from unittest.mock import MagicMock, call, patch from breathecode.registry import tasks @@ -27,10 +28,10 @@ def test__constructor(self): height = randint(1, 2000) model = self.bc.database.create(asset=1) cases = [ - ((None, ), (None, 0, 0)), + ((None,), (None, 0, 0)), ((None, 0, 0), (None, 0, 0)), ((None, width, height), (None, width, height)), - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, height), (model.asset, width, height)), ] @@ -42,9 +43,12 @@ def test__constructor(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_default_url without preview @@ -55,24 +59,24 @@ def test__get_default_url__without_preview(self): height = randint(1, 2000) model = self.bc.database.create(asset=1) constructor_cases = [ - ((None, ), (None, 0, 0)), + ((None,), (None, 0, 0)), ((None, 0, 0), (None, 0, 0)), ((None, width, 0), (None, width, 0)), ((None, 0, height), (None, 0, height)), ((None, width, height), (None, width, height)), - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), ((model.asset, width, height), (model.asset, width, height)), ] - urls = ['', self.bc.fake.url()] + urls = ["", self.bc.fake.url()] for url in urls: for args, result in constructor_cases: generator = AssetThumbnailGenerator(*args) - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": url}))): default_url = generator._get_default_url() self.assertEqual(default_url, url) @@ -80,9 +84,12 @@ def test__get_default_url__without_preview(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_default_url with preview @@ -91,22 +98,22 @@ def test__get_default_url__without_preview(self): def test__get_default_url__with_preview(self): width = randint(1, 2000) height = randint(1, 2000) - asset = {'preview': self.bc.fake.url()} + asset = {"preview": self.bc.fake.url()} model = self.bc.database.create(asset=asset) constructor_cases = [ - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), ((model.asset, width, height), (model.asset, width, height)), ] - urls = ['', self.bc.fake.url()] + urls = ["", self.bc.fake.url()] for url in urls: for args, result in constructor_cases: generator = AssetThumbnailGenerator(*args) - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": url}))): default_url = generator._get_default_url() self.assertEqual(default_url, url) @@ -114,9 +121,12 @@ def test__get_default_url__with_preview(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_asset_url without preview @@ -127,24 +137,24 @@ def test__get_asset_url__without_preview(self): height = randint(1, 2000) model = self.bc.database.create(asset=1) constructor_cases = [ - ((None, ), (None, 0, 0)), + ((None,), (None, 0, 0)), ((None, 0, 0), (None, 0, 0)), ((None, width, 0), (None, width, 0)), ((None, 0, height), (None, 0, height)), ((None, width, height), (None, width, height)), - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), ((model.asset, width, height), (model.asset, width, height)), ] - urls = ['', self.bc.fake.url()] + urls = ["", self.bc.fake.url()] for url in urls: for args, result in constructor_cases: generator = AssetThumbnailGenerator(*args) - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": url}))): default_url = generator._get_asset_url() self.assertEqual(default_url, url) @@ -152,9 +162,12 @@ def test__get_asset_url__without_preview(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_asset_url with preview @@ -163,22 +176,22 @@ def test__get_asset_url__without_preview(self): def test__get_asset_url__with_preview(self): width = randint(1, 2000) height = randint(1, 2000) - asset = {'preview': self.bc.fake.url()} + asset = {"preview": self.bc.fake.url()} model = self.bc.database.create(asset=asset) constructor_cases = [ - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), ((model.asset, width, height), (model.asset, width, height)), ] - urls = ['', self.bc.fake.url()] + urls = ["", self.bc.fake.url()] for url in urls: for args, result in constructor_cases: generator = AssetThumbnailGenerator(*args) - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": url}))): default_url = generator._get_asset_url() self.assertEqual(default_url, model.asset.preview) @@ -186,9 +199,12 @@ def test__get_asset_url__with_preview(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_media without media @@ -199,12 +215,12 @@ def test__get_media__without_media(self): height = randint(1, 2000) model = self.bc.database.create(asset=1, asset_category=1, academy=1) constructor_cases = [ - ((None, ), (None, 0, 0)), + ((None,), (None, 0, 0)), ((None, 0, 0), (None, 0, 0)), ((None, width, 0), (None, width, 0)), ((None, 0, height), (None, 0, height)), ((None, width, height), (None, width, height)), - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), @@ -220,9 +236,12 @@ def test__get_media__without_media(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_media with media, slug don't match @@ -233,7 +252,7 @@ def test__get_media__with_media__slug_does_not_match(self): height = randint(1, 2000) model = self.bc.database.create(asset=1, media=1, academy=1) constructor_cases = [ - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), @@ -249,9 +268,12 @@ def test__get_media__with_media__slug_does_not_match(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_media with media, slug match @@ -262,14 +284,14 @@ def test__get_media__with_media__slug_match(self): height = randint(1, 2000) academy_slug = self.bc.fake.slug() asset_slug = self.bc.fake.slug() - asset = {'slug': asset_slug} - asset_category_slug = 'default' - asset_category = {'slug': asset_category_slug} - media = {'slug': f'{academy_slug}-{asset_category_slug}-{asset_slug}'} - academy = {'slug': academy_slug} + asset = {"slug": asset_slug} + asset_category_slug = "default" + asset_category = {"slug": asset_category_slug} + media = {"slug": f"{academy_slug}-{asset_category_slug}-{asset_slug}"} + academy = {"slug": academy_slug} model = self.bc.database.create(asset=asset, media=media, asset_category=asset_category, academy=academy) constructor_cases = [ - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), @@ -285,9 +307,12 @@ def test__get_media__with_media__slug_match(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_media_resolution with media, without MediaResolution @@ -297,11 +322,11 @@ def test__get_media_resolution__with_media__without_media_resolution(self): width = randint(1, 2000) height = randint(1, 2000) slug = self.bc.fake.slug() - asset = {'slug': slug} - media = {'slug': f'asset-{slug}'} + asset = {"slug": slug} + media = {"slug": f"asset-{slug}"} model = self.bc.database.create(asset=asset, media=media) constructor_cases = [ - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), @@ -317,9 +342,12 @@ def test__get_media_resolution__with_media__without_media_resolution(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_media_resolution with media, with MediaResolution, hash don't match @@ -329,11 +357,11 @@ def test__get_media_resolution__with_media__with_media_resolution__hash_does_not width = randint(1, 2000) height = randint(1, 2000) slug = self.bc.fake.slug() - asset = {'slug': slug} - media = {'slug': f'asset-{slug}'} + asset = {"slug": slug} + media = {"slug": f"asset-{slug}"} model = self.bc.database.create(asset=asset, media=media, media_resolution=1) constructor_cases = [ - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), @@ -349,9 +377,12 @@ def test__get_media_resolution__with_media__with_media_resolution__hash_does_not self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_media_resolution with media, with MediaResolution, hash match, resolution don't match @@ -362,12 +393,12 @@ def test__get_media_resolution__with_media__with_media_resolution__hash_match__r height = randint(1, 2000) slug = self.bc.fake.slug() hash = self.bc.fake.slug() - asset = {'slug': slug} - media = {'slug': f'asset-{slug}', 'hash': hash} - media_resolution = {'hash': hash} + asset = {"slug": slug} + media = {"slug": f"asset-{slug}", "hash": hash} + media_resolution = {"hash": hash} model = self.bc.database.create(asset=asset, media=media, media_resolution=media_resolution) constructor_cases = [ - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, 0), (model.asset, width, 0)), ((model.asset, 0, height), (model.asset, 0, height)), @@ -383,9 +414,12 @@ def test__get_media_resolution__with_media__with_media_resolution__hash_match__r self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _get_media_resolution with media, with MediaResolution, hash match, resolution match @@ -396,8 +430,8 @@ def test__get_media_resolution__with_media__with_media_resolution__hash_match__r height = randint(1, 2000) slug = self.bc.fake.slug() hash = self.bc.fake.slug() - asset = {'slug': slug} - media = {'slug': f'asset-{slug}', 'hash': hash} + asset = {"slug": slug} + media = {"slug": f"asset-{slug}", "hash": hash} model = self.bc.database.create(asset=asset, media=media) constructor_cases = [ ((model.asset, width, 0), (model.asset, width, 0)), @@ -406,7 +440,7 @@ def test__get_media_resolution__with_media__with_media_resolution__hash_match__r ] for args, result in constructor_cases: - media_resolution = {'hash': hash, 'width': width, 'height': height} + media_resolution = {"hash": hash, "width": width, "height": height} model2 = self.bc.database.create(media_resolution=media_resolution) generator = AssetThumbnailGenerator(*args) media = generator._get_media_resolution(model.media.hash) @@ -416,12 +450,15 @@ def test__get_media_resolution__with_media__with_media_resolution__hash_match__r self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) # teardown - self.bc.database.delete('media.MediaResolution') + self.bc.database.delete("media.MediaResolution") """ 🔽🔽🔽 _the_client_want_resize returns False @@ -432,10 +469,10 @@ def test__the_client_want_resize__return_false(self): height = randint(1, 2000) model = self.bc.database.create(asset=1) cases = [ - ((None, ), (None, 0, 0)), + ((None,), (None, 0, 0)), ((None, 0, 0), (None, 0, 0)), ((None, width, height), (None, width, height)), - ((model.asset, ), (model.asset, 0, 0)), + ((model.asset,), (model.asset, 0, 0)), ((model.asset, 0, 0), (model.asset, 0, 0)), ((model.asset, width, height), (model.asset, width, height)), ] @@ -449,9 +486,12 @@ def test__the_client_want_resize__return_false(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 _the_client_want_resize returns True @@ -477,21 +517,24 @@ def test__the_client_want_resize__return_true(self): self.assertEqual(generator.width, result[1]) self.assertEqual(generator.height, result[2]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) """ 🔽🔽🔽 get_thumbnail_url without Asset, returns default url, permanent is False """ - @patch('breathecode.registry.tasks.async_create_asset_thumbnail.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_resize_asset_thumbnail.delay', MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_resize_asset_thumbnail.delay", MagicMock()) def test__get_thumbnail_url__without_asset(self): generator = AssetThumbnailGenerator(None) default_url = self.bc.fake.url() - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': default_url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": default_url}))): url = generator.get_thumbnail_url() self.assertEqual(url, (default_url, False)) @@ -499,9 +542,9 @@ def test__get_thumbnail_url__without_asset(self): self.assertEqual(generator.width, 0) self.assertEqual(generator.height, 0) - self.assertEqual(self.bc.database.list_of('registry.Asset'), []) - self.assertEqual(self.bc.database.list_of('media.Media'), []) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), []) + self.assertEqual(self.bc.database.list_of("registry.Asset"), []) + self.assertEqual(self.bc.database.list_of("media.Media"), []) + self.assertEqual(self.bc.database.list_of("media.MediaResolution"), []) self.assertEqual(tasks.async_create_asset_thumbnail.delay.call_args_list, []) self.assertEqual(tasks.async_create_asset_thumbnail_legacy.delay.call_args_list, []) @@ -511,14 +554,14 @@ def test__get_thumbnail_url__without_asset(self): 🔽🔽🔽 get_thumbnail_url with Asset, returns default url, permanent is False """ - @patch('breathecode.registry.tasks.async_create_asset_thumbnail.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_resize_asset_thumbnail.delay', MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_resize_asset_thumbnail.delay", MagicMock()) def test__get_thumbnail_url__with_asset(self): model = self.bc.database.create(asset=1, academy=1, asset_category=1) generator = AssetThumbnailGenerator(model.asset) default_url = self.bc.fake.url() - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': default_url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": default_url}))): url = generator.get_thumbnail_url() self.assertEqual(url, (default_url, False)) @@ -526,16 +569,22 @@ def test__get_thumbnail_url__with_asset(self): self.assertEqual(generator.width, 0) self.assertEqual(generator.height, 0) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) - - self.assertEqual(self.bc.database.list_of('media.Media'), []) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), []) - - self.assertEqual(tasks.async_create_asset_thumbnail.delay.call_args_list, [ - call(model.asset.slug), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) + + self.assertEqual(self.bc.database.list_of("media.Media"), []) + self.assertEqual(self.bc.database.list_of("media.MediaResolution"), []) + + self.assertEqual( + tasks.async_create_asset_thumbnail.delay.call_args_list, + [ + call(model.asset.slug), + ], + ) self.assertEqual(tasks.async_create_asset_thumbnail_legacy.delay.call_args_list, []) self.assertEqual(tasks.async_resize_asset_thumbnail.delay.call_args_list, []) @@ -543,14 +592,14 @@ def test__get_thumbnail_url__with_asset(self): 🔽🔽🔽 get_thumbnail_url with Asset and Media, slug don't match, returns default url, permanent is False """ - @patch('breathecode.registry.tasks.async_create_asset_thumbnail.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_resize_asset_thumbnail.delay', MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_resize_asset_thumbnail.delay", MagicMock()) def test__get_thumbnail_url__with_asset__with_media__slug_does_not_match(self): model = self.bc.database.create(asset=1, media=1, academy=1) generator = AssetThumbnailGenerator(model.asset) default_url = self.bc.fake.url() - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': default_url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": default_url}))): url = generator.get_thumbnail_url() self.assertEqual(url, (default_url, False)) @@ -558,15 +607,21 @@ def test__get_thumbnail_url__with_asset__with_media__slug_does_not_match(self): self.assertEqual(generator.width, 0) self.assertEqual(generator.height, 0) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) - self.assertEqual(self.bc.database.list_of('media.Media'), [ - self.bc.format.to_dict(model.media), - ]) + self.assertEqual( + self.bc.database.list_of("media.Media"), + [ + self.bc.format.to_dict(model.media), + ], + ) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), []) + self.assertEqual(self.bc.database.list_of("media.MediaResolution"), []) self.assertEqual(tasks.async_create_asset_thumbnail.delay.call_args_list, [call(model.asset.slug)]) self.assertEqual(tasks.async_create_asset_thumbnail_legacy.delay.call_args_list, []) @@ -576,38 +631,46 @@ def test__get_thumbnail_url__with_asset__with_media__slug_does_not_match(self): 🔽🔽🔽 get_thumbnail_url with Asset and Media, slug match, returns default url, permanent is True """ - @patch('breathecode.registry.tasks.async_create_asset_thumbnail.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_resize_asset_thumbnail.delay', MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_resize_asset_thumbnail.delay", MagicMock()) def test__get_thumbnail_url__with_asset__with_media__slug_match(self): academy_slug = self.bc.fake.slug() asset_slug = self.bc.fake.slug() - asset = {'slug': asset_slug} - asset_category_slug = 'default' - asset_category = {'slug': asset_category_slug} - media = {'slug': f'{academy_slug}-{asset_category_slug}-{asset_slug}'} - academy = {'slug': academy_slug} + asset = {"slug": asset_slug} + asset_category_slug = "default" + asset_category = {"slug": asset_category_slug} + media = {"slug": f"{academy_slug}-{asset_category_slug}-{asset_slug}"} + academy = {"slug": academy_slug} model = self.bc.database.create(asset=asset, media=media, asset_category=asset_category, academy=academy) generator = AssetThumbnailGenerator(model.asset) default_url = self.bc.fake.url() - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': default_url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": default_url}))): url = generator.get_thumbnail_url() self.assertEqual(generator.asset, model.asset) self.assertEqual(generator.width, 0) self.assertEqual(generator.height, 0) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) - self.assertEqual(self.bc.database.list_of('media.Media'), [{ - **self.bc.format.to_dict(model.media), - 'hits': 1, - }]) + self.assertEqual( + self.bc.database.list_of("media.Media"), + [ + { + **self.bc.format.to_dict(model.media), + "hits": 1, + } + ], + ) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), []) + self.assertEqual(self.bc.database.list_of("media.MediaResolution"), []) self.assertEqual(tasks.async_create_asset_thumbnail.delay.call_args_list, []) self.assertEqual(tasks.async_create_asset_thumbnail_legacy.delay.call_args_list, []) @@ -618,27 +681,25 @@ def test__get_thumbnail_url__with_asset__with_media__slug_match(self): returns default url, permanent is True """ - @patch('breathecode.registry.tasks.async_create_asset_thumbnail.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay', MagicMock()) - @patch('breathecode.registry.tasks.async_resize_asset_thumbnail.delay', MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_create_asset_thumbnail_legacy.delay", MagicMock()) + @patch("breathecode.registry.tasks.async_resize_asset_thumbnail.delay", MagicMock()) def test__get_thumbnail_url__with_asset__with_media__with_media_resolution__passing_width_or_height(self): width = randint(1, 2000) height = randint(1, 2000) hash = self.bc.fake.slug() asset_slug = self.bc.fake.slug() - asset = {'slug': asset_slug} + asset = {"slug": asset_slug} asset_category_slug = self.bc.fake.slug() - asset_category = {'slug': asset_category_slug} + asset_category = {"slug": asset_category_slug} academy_slug = self.bc.fake.slug() - academy = {'slug': academy_slug} - media = {'slug': f'{academy_slug}-{asset_category_slug}-{asset_slug}', 'hash': hash} - media_resolution = {'hash': hash, 'width': width, 'height': height} + academy = {"slug": academy_slug} + media = {"slug": f"{academy_slug}-{asset_category_slug}-{asset_slug}", "hash": hash} + media_resolution = {"hash": hash, "width": width, "height": height} - model = self.bc.database.create(asset=asset, - media=media, - media_resolution=media_resolution, - asset_category=asset_category, - academy=academy) + model = self.bc.database.create( + asset=asset, media=media, media_resolution=media_resolution, asset_category=asset_category, academy=academy + ) cases = [((model.asset, width, 0), (width, 0, 1)), ((model.asset, 0, height), (0, height, 2))] @@ -646,20 +707,26 @@ def test__get_thumbnail_url__with_asset__with_media__with_media_resolution__pass generator = AssetThumbnailGenerator(*args) default_url = self.bc.fake.url() - with patch('os.getenv', MagicMock(side_effect=apply_get_env({'DEFAULT_ASSET_PREVIEW_URL': default_url}))): + with patch("os.getenv", MagicMock(side_effect=apply_get_env({"DEFAULT_ASSET_PREVIEW_URL": default_url}))): url = generator.get_thumbnail_url() self.assertEqual(generator.asset, model.asset) self.assertEqual(generator.width, result[0]) self.assertEqual(generator.height, result[1]) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) - - self.assertEqual(self.bc.database.list_of('media.Media'), [ - self.bc.format.to_dict(model.media), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) + + self.assertEqual( + self.bc.database.list_of("media.Media"), + [ + self.bc.format.to_dict(model.media), + ], + ) self.assertEqual(tasks.async_create_asset_thumbnail.delay.call_args_list, []) self.assertEqual(tasks.async_create_asset_thumbnail_legacy.delay.call_args_list, []) diff --git a/breathecode/registry/tests/actions/tests_clean_h1s.py b/breathecode/registry/tests/actions/tests_clean_h1s.py index c4b6c8634..ba7fa567a 100644 --- a/breathecode/registry/tests/actions/tests_clean_h1s.py +++ b/breathecode/registry/tests/actions/tests_clean_h1s.py @@ -1,6 +1,7 @@ """ Test clean_h1s """ + from logging import Logger from unittest.mock import MagicMock, PropertyMock, call, patch import pytest @@ -9,12 +10,14 @@ from breathecode.registry.models import Asset from breathecode.registry.actions import clean_h1s from breathecode.tests.mixins.breathecode_mixin.breathecode import Breathecode + # from ..mixins import RegistryTestCase @pytest.fixture(autouse=True) def setup(db): from linked_services.django.actions import reset_app_cache + reset_app_cache() yield @@ -138,118 +141,118 @@ def setup(db): def test__with_frontmatter(bc: Breathecode): model = bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(md_with_frontmatter), - 'readme': Asset.encode(md_with_frontmatter) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(md_with_frontmatter), + "readme": Asset.encode(md_with_frontmatter), + } + ) - asset = clean_h1s(model['asset']) + asset = clean_h1s(model["asset"]) readme = asset.get_readme() - assert readme['decoded'] == md_with_frontmatter_no_h1 + assert readme["decoded"] == md_with_frontmatter_no_h1 def test__without_frontmatter(bc: Breathecode): model = bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(md), - 'readme': Asset.encode(md) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(md), + "readme": Asset.encode(md), + } + ) - asset = clean_h1s(model['asset']) + asset = clean_h1s(model["asset"]) readme = asset.get_readme() - assert readme['decoded'] == md_no_h1 + assert readme["decoded"] == md_no_h1 def test__with_frontmatter_without_h1(bc: Breathecode): model = bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(md_with_frontmatter_no_h1), - 'readme': Asset.encode(md_with_frontmatter_no_h1) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(md_with_frontmatter_no_h1), + "readme": Asset.encode(md_with_frontmatter_no_h1), + } + ) - asset = clean_h1s(model['asset']) + asset = clean_h1s(model["asset"]) readme = asset.get_readme() - assert readme['decoded'] == md_with_frontmatter_no_h1 + assert readme["decoded"] == md_with_frontmatter_no_h1 def test__without_frontmatter_without_h1(bc: Breathecode): model = bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(md_no_h1), - 'readme': Asset.encode(md_no_h1) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(md_no_h1), + "readme": Asset.encode(md_no_h1), + } + ) - asset = clean_h1s(model['asset']) + asset = clean_h1s(model["asset"]) readme = asset.get_readme() - assert readme['decoded'] == md_no_h1 + assert readme["decoded"] == md_no_h1 def test__without_frontmatter_without_h1_many_jumplines(bc: Breathecode): model = bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(md_with_jumplines), - 'readme': Asset.encode(md_with_jumplines) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(md_with_jumplines), + "readme": Asset.encode(md_with_jumplines), + } + ) - asset = clean_h1s(model['asset']) + asset = clean_h1s(model["asset"]) readme = asset.get_readme() - assert readme['decoded'] == md_no_h1 + assert readme["decoded"] == md_no_h1 def test__with_frontmatter_and_new_lines(bc: Breathecode): model = bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(md_with_frontmatter_and_new_lines), - 'readme': Asset.encode(md_with_frontmatter_and_new_lines) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(md_with_frontmatter_and_new_lines), + "readme": Asset.encode(md_with_frontmatter_and_new_lines), + } + ) - asset = clean_h1s(model['asset']) + asset = clean_h1s(model["asset"]) readme = asset.get_readme() - assert readme['decoded'] == md_with_frontmatter_and_new_lines_no_h1 + assert readme["decoded"] == md_with_frontmatter_and_new_lines_no_h1 def test__with_frontmatter_and_new_lines_no_h1(bc: Breathecode): model = bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(md_with_frontmatter_and_new_lines_no_h1), - 'readme': Asset.encode(md_with_frontmatter_and_new_lines_no_h1) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(md_with_frontmatter_and_new_lines_no_h1), + "readme": Asset.encode(md_with_frontmatter_and_new_lines_no_h1), + } + ) - asset = clean_h1s(model['asset']) + asset = clean_h1s(model["asset"]) readme = asset.get_readme() - assert readme['decoded'] == md_with_frontmatter_and_new_lines_no_h1 + assert readme["decoded"] == md_with_frontmatter_and_new_lines_no_h1 def test__with_new_lines(bc: Breathecode): model = bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(md_with_new_lines), - 'readme': Asset.encode(md_with_new_lines) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(md_with_new_lines), + "readme": Asset.encode(md_with_new_lines), + } + ) - asset = clean_h1s(model['asset']) + asset = clean_h1s(model["asset"]) readme = asset.get_readme() - assert readme['decoded'] == md_with_new_lines_no_h1 + assert readme["decoded"] == md_with_new_lines_no_h1 diff --git a/breathecode/registry/tests/actions/tests_clean_readme.py b/breathecode/registry/tests/actions/tests_clean_readme.py index f3f43d69a..01343abfb 100644 --- a/breathecode/registry/tests/actions/tests_clean_readme.py +++ b/breathecode/registry/tests/actions/tests_clean_readme.py @@ -1,6 +1,7 @@ """ Test /answer """ + from random import randint from unittest.mock import MagicMock, call, patch from breathecode.registry import tasks @@ -32,19 +33,21 @@ def test__hide_comments(self): model = self.bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': Asset.encode(original_content), - 'readme': Asset.encode(original_content) - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": Asset.encode(original_content), + "readme": Asset.encode(original_content), + } + ) - asset = clean_readme_hide_comments(model['asset']) + asset = clean_readme_hide_comments(model["asset"]) readme = asset.get_readme() self.assertEqual(asset.readme_raw, Asset.encode(original_content)) self.assertEqual( - readme['decoded'], """For example, we need the following application URLs to match the following components: + readme["decoded"], + """For example, we need the following application URLs to match the following components: -## Defining your Application Routes""") +## Defining your Application Routes""", + ) def test__relative_paths(self): @@ -53,26 +56,29 @@ def test__relative_paths(self): ![react router](../../assets/images/6fd2b44b-598b-4ddb-85ba-9c32b086127f.png) -## Defining your Application Routes""") +## Defining your Application Routes""" + ) model = self.bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': original_content, - 'readme': original_content, - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": original_content, + "readme": original_content, + } + ) - asset = clean_readme_relative_paths(model['asset']) + asset = clean_readme_relative_paths(model["asset"]) readme = asset.get_readme() self.assertEqual(asset.readme_raw, original_content) self.assertEqual( - readme['decoded'], """For example, we need the following application URLs to match the following components: + readme["decoded"], + """For example, we need the following application URLs to match the following components: ![react router](https://github.com/breatheco-de/content/blob/master/src/content/lesson/../../assets/images/6fd2b44b-598b-4ddb-85ba-9c32b086127f.png?raw=true) -## Defining your Application Routes""") +## Defining your Application Routes""", + ) def test__clean_asset(self): @@ -83,33 +89,37 @@ def test__clean_asset(self): <!-- endhide --> ![react router](../../assets/images/6fd2b44b-598b-4ddb-85ba-9c32b086127f.png) -## Defining your Application Routes""") +## Defining your Application Routes""" + ) model = self.bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - 'readme_raw': original_content, - 'readme': original_content, - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + "readme_raw": original_content, + "readme": original_content, + } + ) - asset = clean_asset_readme(model['asset']) + asset = clean_asset_readme(model["asset"]) self.assertEqual(asset.readme_raw, original_content) self.assertEqual( asset.readme, - Asset.encode("""For example, we need the following application URLs to match the following components: + Asset.encode( + """For example, we need the following application URLs to match the following components: ![react router](https://github.com/breatheco-de/content/blob/master/src/content/lesson/../../assets/images/6fd2b44b-598b-4ddb-85ba-9c32b086127f.png?raw=true) -## Defining your Application Routes""")) +## Defining your Application Routes""" + ), + ) def test__clean_asset_without_readme_raw(self): model = self.bc.database.create( asset={ - 'readme_url': - 'https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md', - }) + "readme_url": "https://github.com/breatheco-de/content/blob/master/src/content/lesson/how-to-networkt-yourself-into-a-software-development-job.es.md", + } + ) - asset = clean_asset_readme(model['asset']) - self.assertEqual(asset, model['asset']) + asset = clean_asset_readme(model["asset"]) + self.assertEqual(asset, model["asset"]) diff --git a/breathecode/registry/tests/management/commands/tests_change_asset_readme_url.py b/breathecode/registry/tests/management/commands/tests_change_asset_readme_url.py index 4cd3afee5..2064c967f 100644 --- a/breathecode/registry/tests/management/commands/tests_change_asset_readme_url.py +++ b/breathecode/registry/tests/management/commands/tests_change_asset_readme_url.py @@ -1,6 +1,7 @@ -''' +""" Tests / Registry /asset.readme_url -''' +""" + from unittest.mock import MagicMock, patch from ...mixins import RegistryTestCase @@ -8,11 +9,11 @@ class ChangeAssetReadmeUrlTestCase(RegistryTestCase): - ''' + """ Tests / Registry /asset.readme_url - ''' + """ - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_change_asset_readme_url_without_readme(self): from django.core.management.base import OutputWrapper @@ -24,11 +25,11 @@ def test_change_asset_readme_url_without_readme(self): self.assertEqual(OutputWrapper.write.call_count, 0) - self.assertEqual(self.bc.database.list_of('registry.Asset'), []) + self.assertEqual(self.bc.database.list_of("registry.Asset"), []) self.assertEqual(OutputWrapper.write.call_args_list, []) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_change_asset_readme_url_with_readme(self): from django.core.management.base import OutputWrapper @@ -37,9 +38,13 @@ def test_change_asset_readme_url_with_readme(self): random_slug3 = self.bc.random.string(lower=True, upper=True, size=5) random_slug4 = self.bc.random.string(lower=True, upper=True, size=10) - readme_url1 = f'https://raw.githubusercontent.com/{random_slug1}/{random_slug2}/{random_slug3}/{random_slug4}.md' - readme_url2 = f'https://raw.githubusercontent.com/{random_slug4}/{random_slug3}/{random_slug2}/{random_slug1}.md' - assets = [{'readme_url': readme_url1}, {'readme_url': readme_url2}] + readme_url1 = ( + f"https://raw.githubusercontent.com/{random_slug1}/{random_slug2}/{random_slug3}/{random_slug4}.md" + ) + readme_url2 = ( + f"https://raw.githubusercontent.com/{random_slug4}/{random_slug3}/{random_slug2}/{random_slug1}.md" + ) + assets = [{"readme_url": readme_url1}, {"readme_url": readme_url2}] model = self.bc.database.create(asset=assets) @@ -51,23 +56,29 @@ def test_change_asset_readme_url_with_readme(self): self.assertEqual(OutputWrapper.write.call_count, 0) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [{ - **self.bc.format.to_dict(model.asset[0]), 'readme_url': - f'https://github.com/{random_slug1}/{random_slug2}/blob/{random_slug3}/{random_slug4}.md' - }, { - **self.bc.format.to_dict(model.asset[1]), 'readme_url': - f'https://github.com/{random_slug4}/{random_slug3}/blob/{random_slug2}/{random_slug1}.md' - }]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + { + **self.bc.format.to_dict(model.asset[0]), + "readme_url": f"https://github.com/{random_slug1}/{random_slug2}/blob/{random_slug3}/{random_slug4}.md", + }, + { + **self.bc.format.to_dict(model.asset[1]), + "readme_url": f"https://github.com/{random_slug4}/{random_slug3}/blob/{random_slug2}/{random_slug1}.md", + }, + ], + ) self.assertEqual(OutputWrapper.write.call_args_list, []) - @patch('django.core.management.base.OutputWrapper.write', MagicMock()) + @patch("django.core.management.base.OutputWrapper.write", MagicMock()) def test_change_asset_readme_url_without_readmes_to_change(self): from django.core.management.base import OutputWrapper readme_url1 = self.bc.fake.url() readme_url2 = self.bc.fake.url() - assets = [{'readme_url': readme_url1}, {'readme_url': readme_url2}] + assets = [{"readme_url": readme_url1}, {"readme_url": readme_url2}] model = self.bc.database.create(asset=assets) @@ -79,11 +90,12 @@ def test_change_asset_readme_url_without_readmes_to_change(self): self.assertEqual(OutputWrapper.write.call_count, 0) - self.assertEqual(self.bc.database.list_of('registry.Asset'), - [{ - **self.bc.format.to_dict(model.asset[0]), 'readme_url': readme_url1 - }, { - **self.bc.format.to_dict(model.asset[1]), 'readme_url': readme_url2 - }]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + {**self.bc.format.to_dict(model.asset[0]), "readme_url": readme_url1}, + {**self.bc.format.to_dict(model.asset[1]), "readme_url": readme_url2}, + ], + ) self.assertEqual(OutputWrapper.write.call_args_list, []) diff --git a/breathecode/registry/tests/mixins/__init__.py b/breathecode/registry/tests/mixins/__init__.py index 443ccba69..cbf43399e 100644 --- a/breathecode/registry/tests/mixins/__init__.py +++ b/breathecode/registry/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Media mixins """ + from .registry_test_case import RegistryTestCase # noqa: F401 diff --git a/breathecode/registry/tests/mixins/registry_test_case.py b/breathecode/registry/tests/mixins/registry_test_case.py index 29aa3d08e..4ae6f5e72 100644 --- a/breathecode/registry/tests/mixins/registry_test_case.py +++ b/breathecode/registry/tests/mixins/registry_test_case.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase from breathecode.tests.mixins import GenerateModelsMixin, CacheMixin, BreathecodeMixin diff --git a/breathecode/registry/tests/tasks/tests_async_create_asset_thumbnail.py b/breathecode/registry/tests/tasks/tests_async_create_asset_thumbnail.py index baf2193d6..243eaff1d 100644 --- a/breathecode/registry/tests/tasks/tests_async_create_asset_thumbnail.py +++ b/breathecode/registry/tests/tasks/tests_async_create_asset_thumbnail.py @@ -1,6 +1,7 @@ """ Test /answer """ + from logging import Logger from unittest.mock import MagicMock, PropertyMock, call, patch import pytest @@ -13,6 +14,7 @@ @pytest.fixture(autouse=True) def setup(db): from linked_services.django.actions import reset_app_cache + reset_app_cache() yield @@ -64,213 +66,239 @@ def patch_get(monkeypatch): def handler(expected, code, headers): reader = StreamReaderMock(expected) - monkeypatch.setattr('requests.request', MagicMock(return_value=ResponseMock(expected, code, headers))) + monkeypatch.setattr("requests.request", MagicMock(return_value=ResponseMock(expected, code, headers))) yield handler -@patch('logging.Logger.warning', MagicMock()) -@patch('logging.Logger.error', MagicMock()) +@patch("logging.Logger.warning", MagicMock()) +@patch("logging.Logger.error", MagicMock()) def test__without_asset(bc: Breathecode, client: APIClient): - async_create_asset_thumbnail.delay('slug') + async_create_asset_thumbnail.delay("slug") - assert bc.database.list_of('media.Media') == [] - assert Logger.warning.call_args_list == [call('Asset with slug slug not found')] - assert Logger.error.call_args_list == [call('Asset with slug slug not found', exc_info=True)] + assert bc.database.list_of("media.Media") == [] + assert Logger.warning.call_args_list == [call("Asset with slug slug not found")] + assert Logger.error.call_args_list == [call("Asset with slug slug not found", exc_info=True)] -@patch('logging.Logger.warning', MagicMock()) -@patch('logging.Logger.error', MagicMock()) +@patch("logging.Logger.warning", MagicMock()) +@patch("logging.Logger.error", MagicMock()) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'SCREENSHOT_MACHINE_KEY': '000000' - }))) + "os.getenv", + MagicMock(side_effect=apply_get_env({"GOOGLE_PROJECT_ID": "labor-day-story", "SCREENSHOT_MACHINE_KEY": "000000"})), +) def test__with_asset__bad_function_response(bc: Breathecode, client: APIClient, patch_get): - asset_category = {'preview_generation_url': bc.fake.url()} + asset_category = {"preview_generation_url": bc.fake.url()} model = bc.database.create_v2(asset=1, asset_category=asset_category, academy=1) - headers = {'Accept': '*/*', 'content-type': 'image/jpeg'} + headers = {"Accept": "*/*", "content-type": "image/jpeg"} patch_get(fake_file_data, 400, headers) async_create_asset_thumbnail.delay(model.asset.slug) - assert bc.database.list_of('media.Media') == [] + assert bc.database.list_of("media.Media") == [] assert Logger.warning.call_args_list == [] assert Logger.error.call_args_list == [ call( - 'Unhandled error with async_create_asset_thumbnail, the cloud function `screenshots` ' - 'returns status code 400', - exc_info=True), + "Unhandled error with async_create_asset_thumbnail, the cloud function `screenshots` " + "returns status code 400", + exc_info=True, + ), ] -@patch('logging.Logger.warning', MagicMock()) -@patch('logging.Logger.error', MagicMock()) -@patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) -@patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - delete=MagicMock(), - download=MagicMock(return_value=bytes('qwerty', 'utf-8')), - url=MagicMock(return_value='https://uio.io/path'), - create=True) +@patch("logging.Logger.warning", MagicMock()) +@patch("logging.Logger.error", MagicMock()) +@patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, +) +@patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + delete=MagicMock(), + download=MagicMock(return_value=bytes("qwerty", "utf-8")), + url=MagicMock(return_value="https://uio.io/path"), + create=True, +) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'SCREENSHOTS_BUCKET': 'random-bucket', - 'SCREENSHOT_MACHINE_KEY': '000000' - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "SCREENSHOTS_BUCKET": "random-bucket", + "SCREENSHOT_MACHINE_KEY": "000000", + } + ) + ), +) def test__with_asset__good_function_response(bc: Breathecode, client: APIClient, patch_get): - hash = '3d78522863c7781e5800cd3c7dfe6450856db9eb9166f43ecfe82ccdbe95173a' + hash = "3d78522863c7781e5800cd3c7dfe6450856db9eb9166f43ecfe82ccdbe95173a" fake_url = bc.fake.url() - asset_category = {'preview_generation_url': fake_url} + asset_category = {"preview_generation_url": fake_url} model = bc.database.create_v2(asset=1, asset_category=asset_category, academy=1) - headers = {'Accept': '*/*', 'content-type': 'image/jpeg'} + headers = {"Accept": "*/*", "content-type": "image/jpeg"} patch_get(fake_file_data, 200, headers) async_create_asset_thumbnail.delay(model.asset.slug) - assert bc.database.list_of('media.Media') == [{ - 'academy_id': model.asset.academy.id, - 'hash': hash, - 'hits': 0, - 'id': 1, - 'mime': 'image/png', - 'name': f'{model.asset.academy.slug}-{model.asset.category.slug}-{model.asset.slug}.png', - 'slug': f'{model.asset.academy.slug}-{model.asset.category.slug}-{model.asset.slug}', - 'thumbnail': f'https://storage.googleapis.com/random-bucket/{hash}-thumbnail', - 'url': f'https://storage.googleapis.com/random-bucket/{hash}', - }] + assert bc.database.list_of("media.Media") == [ + { + "academy_id": model.asset.academy.id, + "hash": hash, + "hits": 0, + "id": 1, + "mime": "image/png", + "name": f"{model.asset.academy.slug}-{model.asset.category.slug}-{model.asset.slug}.png", + "slug": f"{model.asset.academy.slug}-{model.asset.category.slug}-{model.asset.slug}", + "thumbnail": f"https://storage.googleapis.com/random-bucket/{hash}-thumbnail", + "url": f"https://storage.googleapis.com/random-bucket/{hash}", + } + ] assert Logger.warning.call_args_list == [ - call(f'Media was save with {hash} for academy {model.asset.academy}'), + call(f"Media was save with {hash} for academy {model.asset.academy}"), ] assert Logger.error.call_args_list == [] -@patch('logging.Logger.warning', MagicMock()) -@patch('logging.Logger.error', MagicMock()) -@patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) -@patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - delete=MagicMock(), - download=MagicMock(return_value=bytes('qwerty', 'utf-8')), - create=True) +@patch("logging.Logger.warning", MagicMock()) +@patch("logging.Logger.error", MagicMock()) +@patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, +) +@patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + delete=MagicMock(), + download=MagicMock(return_value=bytes("qwerty", "utf-8")), + create=True, +) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'SCREENSHOT_MACHINE_KEY': '000000' - }))) + "os.getenv", + MagicMock(side_effect=apply_get_env({"GOOGLE_PROJECT_ID": "labor-day-story", "SCREENSHOT_MACHINE_KEY": "000000"})), +) def test__with_asset__with_media__without_asset_category_with_url(bc: Breathecode, client: APIClient, patch_get): - hash = '3d78522863c7781e5800cd3c7dfe6450856db9eb9166f43ecfe82ccdbe95173a' - media = {'hash': hash} + hash = "3d78522863c7781e5800cd3c7dfe6450856db9eb9166f43ecfe82ccdbe95173a" + media = {"hash": hash} model = bc.database.create_v2(asset=1, media=media) - headers = {'Accept': '*/*', 'content-type': 'image/jpeg'} + headers = {"Accept": "*/*", "content-type": "image/jpeg"} patch_get(fake_file_data, 200, headers) async_create_asset_thumbnail.delay(model.asset.slug) - assert bc.database.list_of('media.Media') == [ + assert bc.database.list_of("media.Media") == [ bc.format.to_dict(model.media), ] assert Logger.warning.call_args_list == [] assert Logger.error.call_args_list == [ - call('Not able to retrieve a preview generation', exc_info=True), + call("Not able to retrieve a preview generation", exc_info=True), ] -@patch('logging.Logger.warning', MagicMock()) -@patch('logging.Logger.error', MagicMock()) -@patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) -@patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - delete=MagicMock(), - rename=MagicMock(), - download=MagicMock(return_value=bytes('qwerty', 'utf-8')), - create=True) +@patch("logging.Logger.warning", MagicMock()) +@patch("logging.Logger.error", MagicMock()) +@patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, +) +@patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + delete=MagicMock(), + rename=MagicMock(), + download=MagicMock(return_value=bytes("qwerty", "utf-8")), + create=True, +) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'SCREENSHOT_MACHINE_KEY': '000000', - 'SCREENSHOTS_BUCKET': 'random-bucket', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "SCREENSHOT_MACHINE_KEY": "000000", + "SCREENSHOTS_BUCKET": "random-bucket", + } + ) + ), +) def test__with_asset__with_media__with_asset_category_with_url(bc: Breathecode, client: APIClient, patch_get): - hash = '3d78522863c7781e5800cd3c7dfe6450856db9eb9166f43ecfe82ccdbe95173a' - media = {'hash': hash} - asset_category = {'preview_generation_url': bc.fake.url()} + hash = "3d78522863c7781e5800cd3c7dfe6450856db9eb9166f43ecfe82ccdbe95173a" + media = {"hash": hash} + asset_category = {"preview_generation_url": bc.fake.url()} model = bc.database.create_v2(asset=1, media=media, asset_category=asset_category, academy=1) - headers = {'Accept': '*/*', 'content-type': 'image/jpeg'} + headers = {"Accept": "*/*", "content-type": "image/jpeg"} patch_get(fake_file_data, 200, headers) async_create_asset_thumbnail.delay(model.asset.slug) - assert bc.database.list_of('media.Media') == [ + assert bc.database.list_of("media.Media") == [ bc.format.to_dict(model.media), ] assert Logger.warning.call_args_list == [] assert Logger.error.call_args_list == [ - call(f'Media with hash {hash} already exists, skipping', exc_info=True), + call(f"Media with hash {hash} already exists, skipping", exc_info=True), ] -@patch('logging.Logger.warning', MagicMock()) -@patch('logging.Logger.error', MagicMock()) -@patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) -@patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - delete=MagicMock(), - download=MagicMock(return_value=bytes('qwerty', 'utf-8')), - create=True) -@patch('os.getenv', MagicMock(side_effect=apply_get_env({'GOOGLE_PROJECT_ID': 'labor-day-story'}))) +@patch("logging.Logger.warning", MagicMock()) +@patch("logging.Logger.error", MagicMock()) +@patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, +) +@patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + delete=MagicMock(), + download=MagicMock(return_value=bytes("qwerty", "utf-8")), + create=True, +) +@patch("os.getenv", MagicMock(side_effect=apply_get_env({"GOOGLE_PROJECT_ID": "labor-day-story"}))) def test__with_asset__with_media__media_for_another_academy(bc: Breathecode, client: APIClient, patch_get): - hash = '3d78522863c7781e5800cd3c7dfe6450856db9eb9166f43ecfe82ccdbe95173a' - asset = {'academy_id': 1} - media = {'hash': hash, 'academy_id': 2} - asset_category = {'preview_generation_url': bc.fake.url()} + hash = "3d78522863c7781e5800cd3c7dfe6450856db9eb9166f43ecfe82ccdbe95173a" + asset = {"academy_id": 1} + media = {"hash": hash, "academy_id": 2} + asset_category = {"preview_generation_url": bc.fake.url()} model = bc.database.create(asset=asset, media=media, academy=2, asset_category=asset_category) - headers = {'Accept': '*/*', 'content-type': 'image/jpeg'} + headers = {"Accept": "*/*", "content-type": "image/jpeg"} patch_get(fake_file_data, 200, headers) async_create_asset_thumbnail.delay(model.asset.slug) - assert bc.database.list_of('media.Media') == [ - bc.format.to_dict(model.media), { + assert bc.database.list_of("media.Media") == [ + bc.format.to_dict(model.media), + { **bc.format.to_dict(model.media), - 'id': 2, - 'academy_id': 1, - 'slug': f'{model.asset.academy.slug}-{model.asset.category.slug}-{model.asset.slug}', - } + "id": 2, + "academy_id": 1, + "slug": f"{model.asset.academy.slug}-{model.asset.category.slug}-{model.asset.slug}", + }, ] assert Logger.warning.call_args_list == [] assert Logger.error.call_args_list == [ - call(f'Media was save with {hash} for academy {model.academy[0]}', exc_info=True), + call(f"Media was save with {hash} for academy {model.academy[0]}", exc_info=True), ] diff --git a/breathecode/registry/tests/tasks/tests_async_download_single_readme_image.py b/breathecode/registry/tests/tasks/tests_async_download_single_readme_image.py index 687d65472..b7ee304d2 100644 --- a/breathecode/registry/tests/tasks/tests_async_download_single_readme_image.py +++ b/breathecode/registry/tests/tasks/tests_async_download_single_readme_image.py @@ -1,6 +1,7 @@ """ Test async_download_single_readme_image """ + import base64 from unittest.mock import MagicMock, patch, PropertyMock @@ -20,7 +21,7 @@ def get_env(key, value=None): return get_env -original_url = 'https://www.google.com' +original_url = "https://www.google.com" class TestRegistry(LegacyAPITestCase): @@ -28,239 +29,275 @@ class TestRegistry(LegacyAPITestCase): 🔽🔽🔽 GET with status not ok """ - @patch('requests.get', apply_requests_get_mock([( - 200, - original_url, - { - 'headers': { - 'content-type': 'image/png' - } - }, - )])) + @patch( + "requests.get", + apply_requests_get_mock( + [ + ( + 200, + original_url, + {"headers": {"content-type": "image/png"}}, + ) + ] + ), + ) def test__with_wrong_file_format(self): - asset_image = {'name': 'john', 'original_url': original_url, 'bucket_url': 'https://www.f.com'} - model = self.bc.database.create(asset={'slug': 'fake_slug'}, asset_image=asset_image) + asset_image = {"name": "john", "original_url": original_url, "bucket_url": "https://www.f.com"} + model = self.bc.database.create(asset={"slug": "fake_slug"}, asset_image=asset_image) bc = self.bc.format.to_dict(model.asset_image) - async_download_single_readme_image.delay('fake_slug', 'https://www.f.com') + async_download_single_readme_image.delay("fake_slug", "https://www.f.com") - self.bc.database.list_of('registry.AssetImage'), [{ - **bc, 'download_details': - f'Skipping image download for {original_url} in asset fake_slug, invalid mime application/json', - 'download_status': 'ERROR' - }] + self.bc.database.list_of("registry.AssetImage"), [ + { + **bc, + "download_details": f"Skipping image download for {original_url} in asset fake_slug, invalid mime application/json", + "download_status": "ERROR", + } + ] @patch( - 'requests.get', - apply_requests_get_mock([(200, original_url, { - 'headers': { - 'content-type': 'image/png' - } - }, { - 'content-type': 'image/png' - })])) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - blob=PropertyMock(side_effect=[None, 1]), - upload=MagicMock(), - url=MagicMock(return_value='https://xyz/hardcoded_url'), - create=True) + "requests.get", + apply_requests_get_mock( + [(200, original_url, {"headers": {"content-type": "image/png"}}, {"content-type": "image/png"})] + ), + ) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + blob=PropertyMock(side_effect=[None, 1]), + upload=MagicMock(), + url=MagicMock(return_value="https://xyz/hardcoded_url"), + create=True, + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) - @patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) + @patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__with_download_status_no_asset_image(self): - model = self.bc.database.create(asset={'slug': 'fake_slug'}) - - async_download_single_readme_image.delay('fake_slug', original_url) - #The content is static in the decorator, so the hash is always the same - hash = '5186bd77843e507d2c6f568d282c56b06622b2fc7d6ae6a109c97ee1fc3cdebc' - - readme = self.bc.database.get_model('registry.asset').objects.first().get_readme()['decoded'] - - self.assertEqual('https://xyz/hardcoded_url' in readme, False) - self.assertEqual(self.bc.database.list_of('registry.AssetImage'), [{ - 'id': 1, - 'bucket_url': 'https://xyz/hardcoded_url', - 'original_url': original_url, - 'download_details': f'Downloading {original_url}', - 'download_status': 'OK', - 'hash': hash, - 'last_download_at': UTC_NOW, - 'mime': 'image/png', - 'name': 'www.google.com', - }]) + model = self.bc.database.create(asset={"slug": "fake_slug"}) + + async_download_single_readme_image.delay("fake_slug", original_url) + # The content is static in the decorator, so the hash is always the same + hash = "5186bd77843e507d2c6f568d282c56b06622b2fc7d6ae6a109c97ee1fc3cdebc" + + readme = self.bc.database.get_model("registry.asset").objects.first().get_readme()["decoded"] + + self.assertEqual("https://xyz/hardcoded_url" in readme, False) + self.assertEqual( + self.bc.database.list_of("registry.AssetImage"), + [ + { + "id": 1, + "bucket_url": "https://xyz/hardcoded_url", + "original_url": original_url, + "download_details": f"Downloading {original_url}", + "download_status": "OK", + "hash": hash, + "last_download_at": UTC_NOW, + "mime": "image/png", + "name": "www.google.com", + } + ], + ) @patch( - 'requests.get', - apply_requests_get_mock([(200, original_url, { - 'headers': { - 'content-type': 'image/png' - } - }, { - 'content-type': 'image/png' - })])) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - blob=PropertyMock(side_effect=[None, 1]), - upload=MagicMock(), - url=MagicMock(return_value='https://xyz/hardcoded_url'), - create=True) + "requests.get", + apply_requests_get_mock( + [(200, original_url, {"headers": {"content-type": "image/png"}}, {"content-type": "image/png"})] + ), + ) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + blob=PropertyMock(side_effect=[None, 1]), + upload=MagicMock(), + url=MagicMock(return_value="https://xyz/hardcoded_url"), + create=True, + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test__with_download_status_not_ok(self): - asset_image = {'name': 'john', 'original_url': original_url, 'bucket_url': 'https://www.f.com'} - start_of_readme = 'hi ' - fake_readme = f'{start_of_readme}https://www.f.com' - encoded_readme = base64.b64encode(fake_readme.encode('utf-8')).decode('utf-8') - - model = self.bc.database.create(asset={ - 'slug': 'fake_slug', - 'readme': encoded_readme, - 'readme_raw': encoded_readme - }, - asset_image=asset_image) - #store the original readme_raw to verify it does not get modified - readme_raw = model['asset'].readme_raw + asset_image = {"name": "john", "original_url": original_url, "bucket_url": "https://www.f.com"} + start_of_readme = "hi " + fake_readme = f"{start_of_readme}https://www.f.com" + encoded_readme = base64.b64encode(fake_readme.encode("utf-8")).decode("utf-8") + + model = self.bc.database.create( + asset={"slug": "fake_slug", "readme": encoded_readme, "readme_raw": encoded_readme}, asset_image=asset_image + ) + # store the original readme_raw to verify it does not get modified + readme_raw = model["asset"].readme_raw asset = self.bc.format.to_dict(model.asset) - async_download_single_readme_image.delay('fake_slug', 'https://www.f.com') - #The content is static in the decorator, so the hash is always the same - hash = '5186bd77843e507d2c6f568d282c56b06622b2fc7d6ae6a109c97ee1fc3cdebc' - - readme = self.bc.database.get_model('registry.asset').objects.first().get_readme()['decoded'] - asset_image = self.bc.database.get_model('registry.AssetImage').objects.first() - self.bc.database.list_of('registry.Asset'), [{ - **asset, - 'readme_raw': readme_raw, - }] - self.assertEqual(readme.count('https://xyz/hardcoded_url'), 1) + async_download_single_readme_image.delay("fake_slug", "https://www.f.com") + # The content is static in the decorator, so the hash is always the same + hash = "5186bd77843e507d2c6f568d282c56b06622b2fc7d6ae6a109c97ee1fc3cdebc" + + readme = self.bc.database.get_model("registry.asset").objects.first().get_readme()["decoded"] + asset_image = self.bc.database.get_model("registry.AssetImage").objects.first() + self.bc.database.list_of("registry.Asset"), [ + { + **asset, + "readme_raw": readme_raw, + } + ] + self.assertEqual(readme.count("https://xyz/hardcoded_url"), 1) self.assertEqual(start_of_readme in readme, True) - self.assertEqual('https://www.f.com' not in readme, True) - self.assertEqual(self.bc.database.list_of('registry.AssetImage'), [{ - 'id': 1, - 'bucket_url': 'https://xyz/hardcoded_url', - 'original_url': original_url, - 'download_details': 'Downloading https://www.f.com', - 'download_status': 'OK', - 'hash': hash, - 'last_download_at': None, - 'mime': 'image/png', - 'name': 'john', - }]) + self.assertEqual("https://www.f.com" not in readme, True) + self.assertEqual( + self.bc.database.list_of("registry.AssetImage"), + [ + { + "id": 1, + "bucket_url": "https://xyz/hardcoded_url", + "original_url": original_url, + "download_details": "Downloading https://www.f.com", + "download_status": "OK", + "hash": hash, + "last_download_at": None, + "mime": "image/png", + "name": "john", + } + ], + ) @patch( - 'requests.get', - apply_requests_get_mock([(200, original_url, { - 'headers': { - 'content-type': 'image/png' - } - }, { - 'content-type': 'image/png' - })])) - @patch.multiple('breathecode.services.google_cloud.Storage', - __init__=MagicMock(return_value=None), - client=PropertyMock(), - create=True) - @patch.multiple('breathecode.services.google_cloud.File', - __init__=MagicMock(return_value=None), - bucket=PropertyMock(), - file_name=PropertyMock(), - blob=PropertyMock(side_effect=[None, 1]), - upload=MagicMock(), - url=MagicMock(return_value='https://xyz/hardcoded_url'), - create=True) + "requests.get", + apply_requests_get_mock( + [(200, original_url, {"headers": {"content-type": "image/png"}}, {"content-type": "image/png"})] + ), + ) + @patch.multiple( + "breathecode.services.google_cloud.Storage", + __init__=MagicMock(return_value=None), + client=PropertyMock(), + create=True, + ) + @patch.multiple( + "breathecode.services.google_cloud.File", + __init__=MagicMock(return_value=None), + bucket=PropertyMock(), + file_name=PropertyMock(), + blob=PropertyMock(side_effect=[None, 1]), + upload=MagicMock(), + url=MagicMock(return_value="https://xyz/hardcoded_url"), + create=True, + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test__with_download_status_not_ok_many_images(self): - asset_image = {'name': 'john', 'original_url': original_url, 'bucket_url': 'https://www.f.com'} - start_of_readme = 'hi ' - fake_readme = f'{start_of_readme}https://www.f.com https://www.f.com https://www.f.com' - encoded_readme = base64.b64encode(fake_readme.encode('utf-8')).decode('utf-8') - - model = self.bc.database.create(asset={ - 'slug': 'fake_slug', - 'readme': encoded_readme, - 'readme_raw': encoded_readme - }, - asset_image=asset_image) - - #store the original readme_raw to verify it does not get modified - readme_raw = model['asset'].readme_raw - - async_download_single_readme_image.delay('fake_slug', 'https://www.f.com') - #The content is static in the decorator, so the hash is always the same - hash = '5186bd77843e507d2c6f568d282c56b06622b2fc7d6ae6a109c97ee1fc3cdebc' - - readme = self.bc.database.get_model('registry.asset').objects.first().get_readme()['decoded'] - asset_image = self.bc.database.get_model('registry.AssetImage').objects.first() - self.assertEqual(readme.count('https://xyz/hardcoded_url'), 3) + asset_image = {"name": "john", "original_url": original_url, "bucket_url": "https://www.f.com"} + start_of_readme = "hi " + fake_readme = f"{start_of_readme}https://www.f.com https://www.f.com https://www.f.com" + encoded_readme = base64.b64encode(fake_readme.encode("utf-8")).decode("utf-8") + + model = self.bc.database.create( + asset={"slug": "fake_slug", "readme": encoded_readme, "readme_raw": encoded_readme}, asset_image=asset_image + ) + + # store the original readme_raw to verify it does not get modified + readme_raw = model["asset"].readme_raw + + async_download_single_readme_image.delay("fake_slug", "https://www.f.com") + # The content is static in the decorator, so the hash is always the same + hash = "5186bd77843e507d2c6f568d282c56b06622b2fc7d6ae6a109c97ee1fc3cdebc" + + readme = self.bc.database.get_model("registry.asset").objects.first().get_readme()["decoded"] + asset_image = self.bc.database.get_model("registry.AssetImage").objects.first() + self.assertEqual(readme.count("https://xyz/hardcoded_url"), 3) self.assertEqual(start_of_readme in readme, True) - self.assertEqual('https://www.f.com' not in readme, True) - self.assertEqual(readme_raw, self.bc.database.get_model('registry.asset').objects.first().readme_raw) - self.assertEqual(self.bc.database.list_of('registry.AssetImage'), [{ - 'id': 1, - 'bucket_url': 'https://xyz/hardcoded_url', - 'original_url': original_url, - 'download_details': 'Downloading https://www.f.com', - 'download_status': 'OK', - 'hash': hash, - 'last_download_at': None, - 'mime': 'image/png', - 'name': 'john', - }]) + self.assertEqual("https://www.f.com" not in readme, True) + self.assertEqual(readme_raw, self.bc.database.get_model("registry.asset").objects.first().readme_raw) + self.assertEqual( + self.bc.database.list_of("registry.AssetImage"), + [ + { + "id": 1, + "bucket_url": "https://xyz/hardcoded_url", + "original_url": original_url, + "download_details": "Downloading https://www.f.com", + "download_status": "OK", + "hash": hash, + "last_download_at": None, + "mime": "image/png", + "name": "john", + } + ], + ) """ 🔽🔽🔽 GET with status ok """ @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test__with_ok_download_status(self): - asset_image = {'name': 'john', 'bucket_url': 'https://www.f.com', 'download_status': 'OK'} - fake_readme = 'hi https://www.f.com' - encoded_readme = base64.b64encode(fake_readme.encode('utf-8')).decode('utf-8') + asset_image = {"name": "john", "bucket_url": "https://www.f.com", "download_status": "OK"} + fake_readme = "hi https://www.f.com" + encoded_readme = base64.b64encode(fake_readme.encode("utf-8")).decode("utf-8") - model = self.bc.database.create(asset={ - 'slug': 'fake_slug', - 'readme': encoded_readme, - 'readme_raw': encoded_readme - }, - asset_image=asset_image) + model = self.bc.database.create( + asset={"slug": "fake_slug", "readme": encoded_readme, "readme_raw": encoded_readme}, asset_image=asset_image + ) - #store the original readme_raw to verify it does not get modified - readme_raw = model['asset'].readme_raw + # store the original readme_raw to verify it does not get modified + readme_raw = model["asset"].readme_raw - async_download_single_readme_image.delay('fake_slug', 'https://www.f.com') + async_download_single_readme_image.delay("fake_slug", "https://www.f.com") - readme = self.bc.database.get_model('registry.asset').objects.first().get_readme()['decoded'] + readme = self.bc.database.get_model("registry.asset").objects.first().get_readme()["decoded"] - self.assertEqual(readme_raw, self.bc.database.get_model('registry.asset').objects.first().readme_raw) + self.assertEqual(readme_raw, self.bc.database.get_model("registry.asset").objects.first().readme_raw) self.assertEqual(fake_readme, readme) diff --git a/breathecode/registry/tests/tasks/tests_async_resize_asset_thumbnail.py b/breathecode/registry/tests/tasks/tests_async_resize_asset_thumbnail.py index 0650167d3..2e1150fe7 100644 --- a/breathecode/registry/tests/tasks/tests_async_resize_asset_thumbnail.py +++ b/breathecode/registry/tests/tasks/tests_async_resize_asset_thumbnail.py @@ -1,6 +1,7 @@ """ Test /answer """ + from random import randint from unittest.mock import MagicMock, call, patch @@ -22,8 +23,8 @@ def json(self): WIDTH = randint(0, 2000) HEIGHT = randint(0, 2000) -FUNCTION_GOOD_RESPONSE = Response({'status_code': 200, 'message': 'Ok', 'width': WIDTH, 'height': HEIGHT}, 200) -FUNCTION_BAD_RESPONSE = Response({'status_code': 400, 'message': 'Bad response'}, 400) +FUNCTION_GOOD_RESPONSE = Response({"status_code": 200, "message": "Ok", "width": WIDTH, "height": HEIGHT}, 200) +FUNCTION_BAD_RESPONSE = Response({"status_code": 400, "message": "Bad response"}, 400) def apply_get_env(configuration={}): @@ -39,109 +40,150 @@ class RegistryTestSuite(RegistryTestCase): 🔽🔽🔽 GET without Media """ - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.error", MagicMock()) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test__without_media(self): # model = self.bc.database.create(asset=1) async_resize_asset_thumbnail.delay(1) - self.assertEqual(self.bc.database.list_of('media.Media'), []) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), []) - self.assertEqual(Logger.error.call_args_list, [call('Media with id 1 not found')]) + self.assertEqual(self.bc.database.list_of("media.Media"), []) + self.assertEqual(self.bc.database.list_of("media.MediaResolution"), []) + self.assertEqual(Logger.error.call_args_list, [call("Media with id 1 not found")]) """ 🔽🔽🔽 GET with Media """ - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.error", MagicMock()) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test__with_media(self): model = self.bc.database.create(media=1) async_resize_asset_thumbnail.delay(1) - self.assertEqual(self.bc.database.list_of('media.Media'), [self.bc.format.to_dict(model.media)]) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), []) - self.assertEqual(Logger.error.call_args_list, [ - call('async_resize_asset_thumbnail needs the width or height parameter'), - ]) + self.assertEqual(self.bc.database.list_of("media.Media"), [self.bc.format.to_dict(model.media)]) + self.assertEqual(self.bc.database.list_of("media.MediaResolution"), []) + self.assertEqual( + Logger.error.call_args_list, + [ + call("async_resize_asset_thumbnail needs the width or height parameter"), + ], + ) """ 🔽🔽🔽 GET with Media, passing width and height """ - @patch('logging.Logger.error', MagicMock()) + @patch("logging.Logger.error", MagicMock()) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test__with_media__passing_width__passing_height(self): model = self.bc.database.create(media=1) async_resize_asset_thumbnail.delay(1, width=WIDTH, height=HEIGHT) - self.assertEqual(self.bc.database.list_of('media.Media'), [self.bc.format.to_dict(model.media)]) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), []) - self.assertEqual(Logger.error.call_args_list, [ - call("async_resize_asset_thumbnail can't be used with width and height together"), - ]) + self.assertEqual(self.bc.database.list_of("media.Media"), [self.bc.format.to_dict(model.media)]) + self.assertEqual(self.bc.database.list_of("media.MediaResolution"), []) + self.assertEqual( + Logger.error.call_args_list, + [ + call("async_resize_asset_thumbnail can't be used with width and height together"), + ], + ) """ 🔽🔽🔽 GET with Media, passing width or height, function return good response """ - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.services.google_cloud.function_v1.FunctionV1.__init__', MagicMock(return_value=None)) - @patch('breathecode.services.google_cloud.function_v1.FunctionV1.call', - MagicMock(return_value=FUNCTION_GOOD_RESPONSE)) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.services.google_cloud.function_v1.FunctionV1.__init__", MagicMock(return_value=None)) + @patch( + "breathecode.services.google_cloud.function_v1.FunctionV1.call", MagicMock(return_value=FUNCTION_GOOD_RESPONSE) + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test__with_media__passing_width_or_height__function_return_good_response(self): model = self.bc.database.create(media=1) - cases = [((1, ), {'width': WIDTH}, 1), ((1, ), {'height': HEIGHT}, 2)] + cases = [((1,), {"width": WIDTH}, 1), ((1,), {"height": HEIGHT}, 2)] for args, kwargs, media_resolution_id in cases: async_resize_asset_thumbnail.delay(*args, **kwargs) - self.assertEqual(self.bc.database.list_of('media.Media'), [self.bc.format.to_dict(model.media)]) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), [{ - 'hash': model.media.hash, - 'height': HEIGHT, - 'hits': 0, - 'id': media_resolution_id, - 'width': WIDTH, - }]) + self.assertEqual(self.bc.database.list_of("media.Media"), [self.bc.format.to_dict(model.media)]) + self.assertEqual( + self.bc.database.list_of("media.MediaResolution"), + [ + { + "hash": model.media.hash, + "height": HEIGHT, + "hits": 0, + "id": media_resolution_id, + "width": WIDTH, + } + ], + ) self.assertEqual(Logger.error.call_args_list, []) - self.assertEqual(FunctionV1.__init__.call_args_list, [ - call(region='us-central1', project_id='labor-day-story', name='resize-image'), - ]) - - self.assertEqual(str(FunctionV1.call.call_args_list), - str([ - call({ - **kwargs, - 'filename': model.media.hash, - 'bucket': 'bucket-name', - }), - ])) + self.assertEqual( + FunctionV1.__init__.call_args_list, + [ + call(region="us-central1", project_id="labor-day-story", name="resize-image"), + ], + ) + + self.assertEqual( + str(FunctionV1.call.call_args_list), + str( + [ + call( + { + **kwargs, + "filename": model.media.hash, + "bucket": "bucket-name", + } + ), + ] + ), + ) # teardown - self.bc.database.delete('media.MediaResolution') + self.bc.database.delete("media.MediaResolution") FunctionV1.__init__.call_args_list = [] FunctionV1.call.call_args_list = [] @@ -149,44 +191,63 @@ def test__with_media__passing_width_or_height__function_return_good_response(sel 🔽🔽🔽 GET with Media, passing width or height, function return bad response """ - @patch('logging.Logger.error', MagicMock()) - @patch('breathecode.services.google_cloud.function_v1.FunctionV1.__init__', MagicMock(return_value=None)) - @patch('breathecode.services.google_cloud.function_v1.FunctionV1.call', - MagicMock(return_value=FUNCTION_BAD_RESPONSE)) + @patch("logging.Logger.error", MagicMock()) + @patch("breathecode.services.google_cloud.function_v1.FunctionV1.__init__", MagicMock(return_value=None)) + @patch( + "breathecode.services.google_cloud.function_v1.FunctionV1.call", MagicMock(return_value=FUNCTION_BAD_RESPONSE) + ) @patch( - 'os.getenv', - MagicMock(side_effect=apply_get_env({ - 'GOOGLE_PROJECT_ID': 'labor-day-story', - 'MEDIA_GALLERY_BUCKET': 'bucket-name', - }))) + "os.getenv", + MagicMock( + side_effect=apply_get_env( + { + "GOOGLE_PROJECT_ID": "labor-day-story", + "MEDIA_GALLERY_BUCKET": "bucket-name", + } + ) + ), + ) def test__with_media__passing_width_or_height__function_return_bad_response(self): model = self.bc.database.create(media=1) - cases = [((1, ), {'width': WIDTH}), ((1, ), {'height': HEIGHT})] + cases = [((1,), {"width": WIDTH}), ((1,), {"height": HEIGHT})] for args, kwargs in cases: async_resize_asset_thumbnail.delay(*args, **kwargs) - self.assertEqual(self.bc.database.list_of('media.Media'), [self.bc.format.to_dict(model.media)]) - self.assertEqual(self.bc.database.list_of('media.MediaResolution'), []) - self.assertEqual(Logger.error.call_args_list, [ - call('Unhandled error with `resize-image` cloud function, response ' - '' + str(FUNCTION_BAD_RESPONSE.json()) + ''), - ]) - - self.assertEqual(FunctionV1.__init__.call_args_list, [ - call(region='us-central1', project_id='labor-day-story', name='resize-image'), - ]) - - self.assertEqual(FunctionV1.call.call_args_list, [ - call({ - **kwargs, - 'filename': model.media.hash, - 'bucket': 'bucket-name', - }), - ]) + self.assertEqual(self.bc.database.list_of("media.Media"), [self.bc.format.to_dict(model.media)]) + self.assertEqual(self.bc.database.list_of("media.MediaResolution"), []) + self.assertEqual( + Logger.error.call_args_list, + [ + call( + "Unhandled error with `resize-image` cloud function, response " + "" + str(FUNCTION_BAD_RESPONSE.json()) + "" + ), + ], + ) + + self.assertEqual( + FunctionV1.__init__.call_args_list, + [ + call(region="us-central1", project_id="labor-day-story", name="resize-image"), + ], + ) + + self.assertEqual( + FunctionV1.call.call_args_list, + [ + call( + { + **kwargs, + "filename": model.media.hash, + "bucket": "bucket-name", + } + ), + ], + ) # teardown - self.bc.database.delete('media.MediaResolution') + self.bc.database.delete("media.MediaResolution") Logger.error.call_args_list = [] FunctionV1.__init__.call_args_list = [] FunctionV1.call.call_args_list = [] diff --git a/breathecode/registry/tests/urls/v1/tests_academy_asset.py b/breathecode/registry/tests/urls/v1/tests_academy_asset.py index 1ef8625e0..048f92900 100644 --- a/breathecode/registry/tests/urls/v1/tests_academy_asset.py +++ b/breathecode/registry/tests/urls/v1/tests_academy_asset.py @@ -18,60 +18,60 @@ def database_item(academy, category, data={}): return { - 'academy_id': academy.id, - 'assessment_id': None, - 'asset_type': 'PROJECT', - 'author_id': None, - 'authors_username': None, - 'category_id': category.id, - 'cleaning_status': 'PENDING', - 'cleaning_status_details': None, - 'config': None, - 'delivery_formats': 'url', - 'delivery_instructions': None, - 'readme_updated_at': None, - 'delivery_regex_url': None, - 'description': None, - 'difficulty': None, - 'duration': None, - 'external': False, - 'gitpod': False, - 'graded': False, - 'html': None, - 'id': 1, - 'interactive': False, - 'intro_video_url': None, - 'is_seo_tracked': True, - 'lang': None, - 'last_cleaning_at': None, - 'last_seo_scan_at': None, - 'last_synch_at': None, - 'last_test_at': None, - 'optimization_rating': None, - 'owner_id': None, - 'github_commit_hash': None, - 'preview': None, - 'published_at': None, - 'readme': None, - 'readme_raw': None, - 'readme_url': None, - 'requirements': None, - 'seo_json_status': None, - 'slug': '', - 'solution_url': None, - 'solution_video_url': None, - 'status': 'NOT_STARTED', - 'status_text': None, - 'sync_status': None, - 'test_status': None, - 'title': '', - 'url': None, - 'visibility': 'PUBLIC', - 'with_solutions': False, - 'with_video': False, - 'is_auto_subscribed': True, - 'superseded_by_id': None, - 'enable_table_of_content': True, + "academy_id": academy.id, + "assessment_id": None, + "asset_type": "PROJECT", + "author_id": None, + "authors_username": None, + "category_id": category.id, + "cleaning_status": "PENDING", + "cleaning_status_details": None, + "config": None, + "delivery_formats": "url", + "delivery_instructions": None, + "readme_updated_at": None, + "delivery_regex_url": None, + "description": None, + "difficulty": None, + "duration": None, + "external": False, + "gitpod": False, + "graded": False, + "html": None, + "id": 1, + "interactive": False, + "intro_video_url": None, + "is_seo_tracked": True, + "lang": None, + "last_cleaning_at": None, + "last_seo_scan_at": None, + "last_synch_at": None, + "last_test_at": None, + "optimization_rating": None, + "owner_id": None, + "github_commit_hash": None, + "preview": None, + "published_at": None, + "readme": None, + "readme_raw": None, + "readme_url": None, + "requirements": None, + "seo_json_status": None, + "slug": "", + "solution_url": None, + "solution_video_url": None, + "status": "NOT_STARTED", + "status_text": None, + "sync_status": None, + "test_status": None, + "title": "", + "url": None, + "visibility": "PUBLIC", + "with_solutions": False, + "with_video": False, + "is_auto_subscribed": True, + "superseded_by_id": None, + "enable_table_of_content": True, **data, } @@ -80,55 +80,52 @@ def post_serializer(academy, category, data={}): translations = {} return { - 'academy': { - 'id': academy.id, - 'name': academy.name + "academy": {"id": academy.id, "name": academy.name}, + "asset_type": "PROJECT", + "author": None, + "category": { + "id": category.id, + "slug": category.slug, + "title": category.title, }, - 'asset_type': 'PROJECT', - 'author': None, - 'category': { - 'id': category.id, - 'slug': category.slug, - 'title': category.title, - }, - 'delivery_formats': 'url', - 'delivery_instructions': None, - 'delivery_regex_url': None, - 'description': None, - 'difficulty': None, - 'duration': None, - 'external': False, - 'gitpod': False, - 'graded': False, - 'id': academy.id, - 'interactive': False, - 'intro_video_url': None, - 'lang': None, - 'last_synch_at': None, - 'last_test_at': None, - 'owner': None, - 'preview': None, - 'published_at': None, - 'readme_url': None, - 'seo_keywords': [], - 'slug': '', - 'solution_url': None, - 'solution_video_url': None, - 'status': 'NOT_STARTED', - 'status_text': None, - 'sync_status': None, - 'technologies': [], - 'test_status': None, - 'title': 'model_title', - 'translations': translations, - 'url': None, - 'visibility': 'PUBLIC', - 'with_solutions': False, - 'assets_related': [], - 'with_video': False, - 'superseded_by': None, - 'enable_table_of_content': True, - 'updated_at': UTC_NOW.isoformat().replace('+00:00', 'Z'), + "delivery_formats": "url", + "delivery_instructions": None, + "delivery_regex_url": None, + "description": None, + "difficulty": None, + "duration": None, + "external": False, + "gitpod": False, + "graded": False, + "id": academy.id, + "interactive": False, + "intro_video_url": None, + "lang": None, + "last_synch_at": None, + "last_test_at": None, + "owner": None, + "preview": None, + "published_at": None, + "readme_url": None, + "seo_keywords": [], + "slug": "", + "solution_url": None, + "solution_video_url": None, + "status": "NOT_STARTED", + "status_text": None, + "sync_status": None, + "technologies": [], + "test_status": None, + "title": "model_title", + "translations": translations, + "url": None, + "visibility": "PUBLIC", + "with_solutions": False, + "assets_related": [], + "with_video": False, + "superseded_by": None, + "enable_table_of_content": True, + "updated_at": UTC_NOW.isoformat().replace("+00:00", "Z"), **data, } @@ -136,120 +133,116 @@ def post_serializer(academy, category, data={}): def put_serializer(academy, category, asset, data={}): return { - 'assessment': asset.assessment, - 'asset_type': asset.asset_type, - 'author': asset.author, - 'authors_username': None, - 'category': { - 'id': category.id, - 'slug': category.slug, - 'title': category.title - }, - 'cleaning_status': asset.cleaning_status, - 'cleaning_status_details': None, - 'clusters': [], - 'assets_related': [], - 'previous_versions': [], - 'description': None, - 'difficulty': None, - 'readme_updated_at': None, - 'duration': None, - 'external': False, - 'gitpod': False, - 'graded': False, - 'id': asset.id, - 'intro_video_url': None, - 'is_seo_tracked': True, - 'lang': asset.lang, - 'last_synch_at': None, - 'last_test_at': None, - 'last_cleaning_at': None, - 'last_seo_scan_at': None, - 'optimization_rating': None, - 'owner': None, - 'preview': None, - 'published_at': None, - 'readme_url': None, - 'requirements': None, - 'seo_json_status': None, - 'seo_keywords': [], - 'slug': asset.slug, - 'solution_video_url': None, - 'solution_url': None, - 'status': 'NOT_STARTED', - 'status_text': None, - 'sync_status': None, - 'technologies': [], - 'test_status': None, - 'title': asset.title, - 'translations': {}, - 'url': None, - 'visibility': 'PUBLIC', + "assessment": asset.assessment, + "asset_type": asset.asset_type, + "author": asset.author, + "authors_username": None, + "category": {"id": category.id, "slug": category.slug, "title": category.title}, + "cleaning_status": asset.cleaning_status, + "cleaning_status_details": None, + "clusters": [], + "assets_related": [], + "previous_versions": [], + "description": None, + "difficulty": None, + "readme_updated_at": None, + "duration": None, + "external": False, + "gitpod": False, + "graded": False, + "id": asset.id, + "intro_video_url": None, + "is_seo_tracked": True, + "lang": asset.lang, + "last_synch_at": None, + "last_test_at": None, + "last_cleaning_at": None, + "last_seo_scan_at": None, + "optimization_rating": None, + "owner": None, + "preview": None, + "published_at": None, + "readme_url": None, + "requirements": None, + "seo_json_status": None, + "seo_keywords": [], + "slug": asset.slug, + "solution_video_url": None, + "solution_url": None, + "status": "NOT_STARTED", + "status_text": None, + "sync_status": None, + "technologies": [], + "test_status": None, + "title": asset.title, + "translations": {}, + "url": None, + "visibility": "PUBLIC", **data, } @pytest.fixture(autouse=True) def setup(db, monkeypatch): - monkeypatch.setattr('breathecode.registry.signals.asset_slug_modified.send_robust', MagicMock()) + monkeypatch.setattr("breathecode.registry.signals.asset_slug_modified.send_robust", MagicMock()) yield def test__without_auth(bc: Breathecode, client: APIClient): """Test /certificate without auth""" - url = reverse_lazy('registry:academy_asset') + url = reverse_lazy("registry:academy_asset") response = client.get(url) json = response.json() assert json == { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('registry.Asset') == [] + assert bc.database.list_of("registry.Asset") == [] def test__without_capability(bc: Breathecode, client: APIClient): """Test /certificate without auth""" - url = reverse_lazy('registry:academy_asset') + url = reverse_lazy("registry:academy_asset") model = bc.database.create(user=1) client.force_authenticate(user=model.user) response = client.get(url, HTTP_ACADEMY=1) json = response.json() - expected = {'status_code': 403, 'detail': "You (user: 1) don't have this capability: read_asset for academy 1"} + expected = {"status_code": 403, "detail": "You (user: 1) don't have this capability: read_asset for academy 1"} assert json == expected assert response.status_code == status.HTTP_403_FORBIDDEN - assert bc.database.list_of('registry.Asset') == [] + assert bc.database.list_of("registry.Asset") == [] def test__post__without_category(bc: Breathecode, client: APIClient): """Test /Asset without category""" - model = bc.database.create(role=1, capability='crud_asset', profile_academy=1, academy=1, user=1) + model = bc.database.create(role=1, capability="crud_asset", profile_academy=1, academy=1, user=1) client.force_authenticate(user=model.user) - url = reverse_lazy('registry:academy_asset') - data = {'slug': 'model_slug', 'asset_type': 'PROJECT', 'lang': 'es'} - response = client.post(url, data, format='json', HTTP_ACADEMY=1) + url = reverse_lazy("registry:academy_asset") + data = {"slug": "model_slug", "asset_type": "PROJECT", "lang": "es"} + response = client.post(url, data, format="json", HTTP_ACADEMY=1) json = response.json() expected = { - 'detail': 'no-category', - 'status_code': 400, + "detail": "no-category", + "status_code": 400, } assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST - assert bc.database.list_of('registry.Asset') == [] + assert bc.database.list_of("registry.Asset") == [] -@patch('breathecode.registry.tasks.async_pull_from_github.delay', MagicMock()) -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) +@patch("breathecode.registry.tasks.async_pull_from_github.delay", MagicMock()) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test__post__with__all__mandatory__properties(bc: Breathecode, client: APIClient): """Test /Asset creation with all mandatory properties""" model = bc.database.create( role=1, - capability='crud_asset', + capability="crud_asset", profile_academy=1, academy=1, user=1, @@ -258,43 +251,43 @@ def test__post__with__all__mandatory__properties(bc: Breathecode, client: APICli client.force_authenticate(user=model.user) - url = reverse_lazy('registry:academy_asset') - data = {'slug': 'model_slug', 'asset_type': 'PROJECT', 'lang': 'us', 'category': 1, 'title': 'model_slug'} - response = client.post(url, data, format='json', HTTP_ACADEMY=1) + url = reverse_lazy("registry:academy_asset") + data = {"slug": "model_slug", "asset_type": "PROJECT", "lang": "us", "category": 1, "title": "model_slug"} + response = client.post(url, data, format="json", HTTP_ACADEMY=1) json = response.json() - del data['category'] + del data["category"] expected = post_serializer(model.academy, model.asset_category, data=data) assert json == expected assert response.status_code == status.HTTP_201_CREATED - assert tasks.async_pull_from_github.delay.call_args_list == [call('model_slug')] - assert bc.database.list_of('registry.Asset') == [database_item(model.academy, model.asset_category, data)] + assert tasks.async_pull_from_github.delay.call_args_list == [call("model_slug")] + assert bc.database.list_of("registry.Asset") == [database_item(model.academy, model.asset_category, data)] def test_asset__put_many_without_id(bc: Breathecode, client: APIClient): """Test Asset bulk update""" - model = bc.database.create(user=1, - profile_academy=True, - capability='crud_asset', - role='potato', - asset_category=True, - asset={ - 'category_id': 1, - 'academy_id': 1, - 'slug': 'asset-1' - }) + model = bc.database.create( + user=1, + profile_academy=True, + capability="crud_asset", + role="potato", + asset_category=True, + asset={"category_id": 1, "academy_id": 1, "slug": "asset-1"}, + ) client.force_authenticate(user=model.user) - url = reverse_lazy('registry:academy_asset') - data = [{ - 'category': 1, - }] + url = reverse_lazy("registry:academy_asset") + data = [ + { + "category": 1, + } + ] - response = client.put(url, data, format='json', HTTP_ACADEMY=1) + response = client.put(url, data, format="json", HTTP_ACADEMY=1) json = response.json() - expected = {'detail': 'without-id', 'status_code': 400} + expected = {"detail": "without-id", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST @@ -303,28 +296,28 @@ def test_asset__put_many_without_id(bc: Breathecode, client: APIClient): def test_asset__put_many_with_wrong_id(bc: Breathecode, client: APIClient): """Test Asset bulk update""" - model = bc.database.create(user=1, - profile_academy=True, - capability='crud_asset', - role='potato', - asset_category=True, - asset={ - 'category_id': 1, - 'academy_id': 1, - 'slug': 'asset-1' - }) + model = bc.database.create( + user=1, + profile_academy=True, + capability="crud_asset", + role="potato", + asset_category=True, + asset={"category_id": 1, "academy_id": 1, "slug": "asset-1"}, + ) client.force_authenticate(user=model.user) - url = reverse_lazy('registry:academy_asset') - data = [{ - 'category': 1, - 'id': 2, - }] + url = reverse_lazy("registry:academy_asset") + data = [ + { + "category": 1, + "id": 2, + } + ] - response = client.put(url, data, format='json', HTTP_ACADEMY=1) + response = client.put(url, data, format="json", HTTP_ACADEMY=1) json = response.json() - expected = {'detail': 'not-found', 'status_code': 404} + expected = {"detail": "not-found", "status_code": 404} assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND @@ -336,284 +329,320 @@ def test_asset__put_many(bc: Breathecode, client: APIClient): model = bc.database.create( user=1, profile_academy=True, - capability='crud_asset', - role='potato', - asset_category={'lang': 'es'}, - asset=[{ - 'test_status': 'OK', - 'category_id': 1, - 'lang': 'es', - 'academy_id': 1, - 'slug': 'asset-1', - 'test_status': 'OK', - }, { - 'test_status': 'OK', - 'category_id': 1, - 'lang': 'es', - 'academy_id': 1, - 'slug': 'asset-2', - 'test_status': 'OK', - }], + capability="crud_asset", + role="potato", + asset_category={"lang": "es"}, + asset=[ + { + "test_status": "OK", + "category_id": 1, + "lang": "es", + "academy_id": 1, + "slug": "asset-1", + "test_status": "OK", + }, + { + "test_status": "OK", + "category_id": 1, + "lang": "es", + "academy_id": 1, + "slug": "asset-2", + "test_status": "OK", + }, + ], ) client.force_authenticate(user=model.user) - url = reverse_lazy('registry:academy_asset') - data = [{ - 'id': 1, - 'category': 1, - }, { - 'id': 2, - 'category': 1, - }] + url = reverse_lazy("registry:academy_asset") + data = [ + { + "id": 1, + "category": 1, + }, + { + "id": 2, + "category": 1, + }, + ] - response = client.put(url, data, format='json', HTTP_ACADEMY=1) + response = client.put(url, data, format="json", HTTP_ACADEMY=1) json = response.json() for item in json: - del item['created_at'] - del item['updated_at'] + del item["created_at"] + del item["updated_at"] expected = [ - put_serializer(model.academy, model.asset_category, asset, data={ - 'test_status': 'OK', - }) for i, asset in enumerate(model.asset) + put_serializer( + model.academy, + model.asset_category, + asset, + data={ + "test_status": "OK", + }, + ) + for i, asset in enumerate(model.asset) ] assert json == expected assert response.status_code == status.HTTP_200_OK -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_asset__put_many_with_test_status_ok(bc: Breathecode, client: APIClient): """Test Asset bulk update""" - model = bc.database.create(user=1, - profile_academy=True, - capability='crud_asset', - role='potato', - asset_category={'lang': 'es'}, - asset={ - 'category_id': 1, - 'academy_id': 1, - 'slug': 'asset-1', - 'visibility': 'PRIVATE', - 'test_status': 'OK', - 'lang': 'es', - }) + model = bc.database.create( + user=1, + profile_academy=True, + capability="crud_asset", + role="potato", + asset_category={"lang": "es"}, + asset={ + "category_id": 1, + "academy_id": 1, + "slug": "asset-1", + "visibility": "PRIVATE", + "test_status": "OK", + "lang": "es", + }, + ) client.force_authenticate(user=model.user) title = bc.fake.slug() date = timezone.now() - url = reverse_lazy('registry:academy_asset') - data = [{ - 'category': 1, - 'created_at': bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': bc.datetime.to_iso_string(UTC_NOW), - 'title': title, - 'id': 1, - 'visibility': 'PUBLIC', - 'asset_type': 'VIDEO', - }] - - response = client.put(url, data, format='json', HTTP_ACADEMY=1) + url = reverse_lazy("registry:academy_asset") + data = [ + { + "category": 1, + "created_at": bc.datetime.to_iso_string(UTC_NOW), + "updated_at": bc.datetime.to_iso_string(UTC_NOW), + "title": title, + "id": 1, + "visibility": "PUBLIC", + "asset_type": "VIDEO", + } + ] + + response = client.put(url, data, format="json", HTTP_ACADEMY=1) json = response.json() expected = [ - put_serializer(model.academy, - model.asset_category, - model.asset, - data={ - 'test_status': 'OK', - 'created_at': bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': bc.datetime.to_iso_string(UTC_NOW), - 'title': title, - 'id': 1, - 'visibility': 'PUBLIC', - 'asset_type': 'VIDEO', - }) + put_serializer( + model.academy, + model.asset_category, + model.asset, + data={ + "test_status": "OK", + "created_at": bc.datetime.to_iso_string(UTC_NOW), + "updated_at": bc.datetime.to_iso_string(UTC_NOW), + "title": title, + "id": 1, + "visibility": "PUBLIC", + "asset_type": "VIDEO", + }, + ) ] assert json == expected assert response.status_code == status.HTTP_200_OK -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_asset__put_many_with_test_status_warning(bc: Breathecode, client: APIClient): """Test Asset bulk update""" - model = bc.database.create(user=1, - profile_academy=True, - capability='crud_asset', - role='potato', - asset_category={'lang': 'es'}, - asset={ - 'category_id': 1, - 'academy_id': 1, - 'slug': 'asset-1', - 'visibility': 'PRIVATE', - 'test_status': 'WARNING', - 'lang': 'es', - }) + model = bc.database.create( + user=1, + profile_academy=True, + capability="crud_asset", + role="potato", + asset_category={"lang": "es"}, + asset={ + "category_id": 1, + "academy_id": 1, + "slug": "asset-1", + "visibility": "PRIVATE", + "test_status": "WARNING", + "lang": "es", + }, + ) client.force_authenticate(user=model.user) title = bc.fake.slug() date = timezone.now() - url = reverse_lazy('registry:academy_asset') - data = [{ - 'category': 1, - 'created_at': bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': bc.datetime.to_iso_string(UTC_NOW), - 'title': title, - 'id': 1, - 'visibility': 'PUBLIC', - 'asset_type': 'VIDEO', - }] - - response = client.put(url, data, format='json', HTTP_ACADEMY=1) + url = reverse_lazy("registry:academy_asset") + data = [ + { + "category": 1, + "created_at": bc.datetime.to_iso_string(UTC_NOW), + "updated_at": bc.datetime.to_iso_string(UTC_NOW), + "title": title, + "id": 1, + "visibility": "PUBLIC", + "asset_type": "VIDEO", + } + ] + + response = client.put(url, data, format="json", HTTP_ACADEMY=1) json = response.json() expected = [ - put_serializer(model.academy, - model.asset_category, - model.asset, - data={ - 'test_status': 'WARNING', - 'created_at': bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': bc.datetime.to_iso_string(UTC_NOW), - 'title': title, - 'id': 1, - 'visibility': 'PUBLIC', - 'asset_type': 'VIDEO', - }) + put_serializer( + model.academy, + model.asset_category, + model.asset, + data={ + "test_status": "WARNING", + "created_at": bc.datetime.to_iso_string(UTC_NOW), + "updated_at": bc.datetime.to_iso_string(UTC_NOW), + "title": title, + "id": 1, + "visibility": "PUBLIC", + "asset_type": "VIDEO", + }, + ) ] assert json == expected assert response.status_code == status.HTTP_200_OK -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_asset__put_many_with_test_status_pending(bc: Breathecode, client: APIClient): """Test Asset bulk update""" - model = bc.database.create(user=1, - profile_academy=True, - capability='crud_asset', - role='potato', - asset_category={'lang': 'es'}, - asset={ - 'category_id': 1, - 'academy_id': 1, - 'slug': 'asset-1', - 'visibility': 'PRIVATE', - 'test_status': 'PENDING', - 'lang': 'es', - }) + model = bc.database.create( + user=1, + profile_academy=True, + capability="crud_asset", + role="potato", + asset_category={"lang": "es"}, + asset={ + "category_id": 1, + "academy_id": 1, + "slug": "asset-1", + "visibility": "PRIVATE", + "test_status": "PENDING", + "lang": "es", + }, + ) client.force_authenticate(user=model.user) title = bc.fake.slug() date = timezone.now() - url = reverse_lazy('registry:academy_asset') - data = [{ - 'category': 1, - 'created_at': bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': bc.datetime.to_iso_string(UTC_NOW), - 'title': title, - 'id': 1, - 'visibility': 'PUBLIC', - 'asset_type': 'VIDEO', - }] - - response = client.put(url, data, format='json', HTTP_ACADEMY=1) + url = reverse_lazy("registry:academy_asset") + data = [ + { + "category": 1, + "created_at": bc.datetime.to_iso_string(UTC_NOW), + "updated_at": bc.datetime.to_iso_string(UTC_NOW), + "title": title, + "id": 1, + "visibility": "PUBLIC", + "asset_type": "VIDEO", + } + ] + + response = client.put(url, data, format="json", HTTP_ACADEMY=1) json = response.json() - expected = {'detail': 'This asset has to pass tests successfully before publishing', 'status_code': 400} + expected = {"detail": "This asset has to pass tests successfully before publishing", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_asset__put_many_with_test_status_error(bc: Breathecode, client: APIClient): """Test Asset bulk update""" - model = bc.database.create(user=1, - profile_academy=True, - capability='crud_asset', - role='potato', - asset_category={'lang': 'es'}, - asset={ - 'category_id': 1, - 'academy_id': 1, - 'slug': 'asset-1', - 'visibility': 'PRIVATE', - 'test_status': 'ERROR', - 'lang': 'es', - }) + model = bc.database.create( + user=1, + profile_academy=True, + capability="crud_asset", + role="potato", + asset_category={"lang": "es"}, + asset={ + "category_id": 1, + "academy_id": 1, + "slug": "asset-1", + "visibility": "PRIVATE", + "test_status": "ERROR", + "lang": "es", + }, + ) client.force_authenticate(user=model.user) title = bc.fake.slug() date = timezone.now() - url = reverse_lazy('registry:academy_asset') - data = [{ - 'category': 1, - 'created_at': bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': bc.datetime.to_iso_string(UTC_NOW), - 'title': title, - 'id': 1, - 'visibility': 'PUBLIC', - 'asset_type': 'VIDEO', - }] - - response = client.put(url, data, format='json', HTTP_ACADEMY=1) + url = reverse_lazy("registry:academy_asset") + data = [ + { + "category": 1, + "created_at": bc.datetime.to_iso_string(UTC_NOW), + "updated_at": bc.datetime.to_iso_string(UTC_NOW), + "title": title, + "id": 1, + "visibility": "PUBLIC", + "asset_type": "VIDEO", + } + ] + + response = client.put(url, data, format="json", HTTP_ACADEMY=1) json = response.json() - expected = {'detail': 'This asset has to pass tests successfully before publishing', 'status_code': 400} + expected = {"detail": "This asset has to pass tests successfully before publishing", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST -@patch('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) +@patch("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) def test_asset__put_many_with_test_status_Needs_Resync(bc: Breathecode, client: APIClient): """Test Asset bulk update""" - model = bc.database.create(user=1, - profile_academy=True, - capability='crud_asset', - role='potato', - asset_category={'lang': 'es'}, - asset={ - 'category_id': 1, - 'academy_id': 1, - 'slug': 'asset-1', - 'visibility': 'PRIVATE', - 'test_status': 'NEEDS_RESYNC', - 'lang': 'es', - }) + model = bc.database.create( + user=1, + profile_academy=True, + capability="crud_asset", + role="potato", + asset_category={"lang": "es"}, + asset={ + "category_id": 1, + "academy_id": 1, + "slug": "asset-1", + "visibility": "PRIVATE", + "test_status": "NEEDS_RESYNC", + "lang": "es", + }, + ) client.force_authenticate(user=model.user) title = bc.fake.slug() date = timezone.now() - url = reverse_lazy('registry:academy_asset') - data = [{ - 'category': 1, - 'created_at': bc.datetime.to_iso_string(UTC_NOW), - 'updated_at': bc.datetime.to_iso_string(UTC_NOW), - 'title': title, - 'id': 1, - 'visibility': 'PUBLIC', - 'asset_type': 'VIDEO', - }] - - response = client.put(url, data, format='json', HTTP_ACADEMY=1) + url = reverse_lazy("registry:academy_asset") + data = [ + { + "category": 1, + "created_at": bc.datetime.to_iso_string(UTC_NOW), + "updated_at": bc.datetime.to_iso_string(UTC_NOW), + "title": title, + "id": 1, + "visibility": "PUBLIC", + "asset_type": "VIDEO", + } + ] + + response = client.put(url, data, format="json", HTTP_ACADEMY=1) json = response.json() - expected = {'detail': 'This asset has to pass tests successfully before publishing', 'status_code': 400} + expected = {"detail": "This asset has to pass tests successfully before publishing", "status_code": 400} assert json == expected assert response.status_code == status.HTTP_400_BAD_REQUEST diff --git a/breathecode/registry/tests/urls/v1/tests_academy_technology.py b/breathecode/registry/tests/urls/v1/tests_academy_technology.py index d1dbdfc4f..1cf60acf8 100644 --- a/breathecode/registry/tests/urls/v1/tests_academy_technology.py +++ b/breathecode/registry/tests/urls/v1/tests_academy_technology.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random import string from unittest.mock import MagicMock, call, patch @@ -16,24 +17,28 @@ def get_serializer(asset_technology, assets=[], asset_technologies=[], data={}): return { - 'alias': asset_technologies, - 'assets': assets, - 'lang': None, - 'description': asset_technology.description, - 'icon_url': asset_technology.icon_url, - 'is_deprecated': asset_technology.is_deprecated, - 'parent': { - 'description': asset_technology.description, - 'icon_url': asset_technology.icon_url, - 'is_deprecated': asset_technology.is_deprecated, - 'slug': asset_technology.slug, - 'title': asset_technology.title, - 'visibility': asset_technology.visibility, - } if asset_technology.parent else None, - 'slug': asset_technology.slug, - 'title': asset_technology.title, - 'visibility': asset_technology.visibility, - 'sort_priority': asset_technology.sort_priority, + "alias": asset_technologies, + "assets": assets, + "lang": None, + "description": asset_technology.description, + "icon_url": asset_technology.icon_url, + "is_deprecated": asset_technology.is_deprecated, + "parent": ( + { + "description": asset_technology.description, + "icon_url": asset_technology.icon_url, + "is_deprecated": asset_technology.is_deprecated, + "slug": asset_technology.slug, + "title": asset_technology.title, + "visibility": asset_technology.visibility, + } + if asset_technology.parent + else None + ), + "slug": asset_technology.slug, + "title": asset_technology.title, + "visibility": asset_technology.visibility, + "sort_priority": asset_technology.sort_priority, **data, } @@ -44,24 +49,24 @@ class RegistryTestSuite(RegistryTestCase): """ def test_without_auth(self): - url = reverse_lazy('registry:academy_technology') + url = reverse_lazy("registry:academy_technology") response = self.client.get(url) json = response.json() - expected = {'detail': 'Authentication credentials were not provided.', 'status_code': 401} + expected = {"detail": "Authentication credentials were not provided.", "status_code": 401} self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) - self.assertEqual(self.bc.database.list_of('registry.Asset'), []) + self.assertEqual(self.bc.database.list_of("registry.Asset"), []) def test_without_academy_id(self): model = self.generate_models(authenticate=True) - url = reverse_lazy('registry:academy_technology') + url = reverse_lazy("registry:academy_technology") response = self.client.get(url) json = response.json() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } self.assertEqual(json, expected) @@ -77,12 +82,12 @@ def test_without_capability(self): authenticate=True, profile_academy=True, ) - url = reverse_lazy('registry:academy_technology') + url = reverse_lazy("registry:academy_technology") response = self.client.get(url) json = response.json() expected = { - 'detail': "You (user: 1) don't have this capability: read_technology for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: read_technology for academy 1", + "status_code": 403, } self.assertEqual(json, expected) @@ -94,15 +99,15 @@ def test_without_capability(self): def test_with_zero_asset_technologies(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, profile_academy=True, role=1, capability='read_technology') - url = reverse_lazy('registry:academy_technology') + model = self.generate_models(authenticate=True, profile_academy=True, role=1, capability="read_technology") + url = reverse_lazy("registry:academy_technology") response = self.client.get(url) json = response.json() expected = [] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('registry.AssetTechnology'), []) + self.assertEqual(self.bc.database.list_of("registry.AssetTechnology"), []) """ 🔽🔽🔽 GET with two AssetTechnology @@ -110,14 +115,19 @@ def test_with_zero_asset_technologies(self): def test_with_two_asset_technologies(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=(2, { - 'visibility': 'PUBLIC', - }), - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=( + 2, + { + "visibility": "PUBLIC", + }, + ), + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") response = self.client.get(url) json = response.json() expected = [get_serializer(x) for x in sorted(model.asset_technology, key=lambda x: x.slug, reverse=True)] @@ -125,7 +135,7 @@ def test_with_two_asset_technologies(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -135,13 +145,15 @@ def test_with_two_asset_technologies(self): def test_with_two_asset_technologies__passing_include_children_as_false(self): self.headers(academy=1) - asset_technologies = [{'parent_id': n} for n in range(1, 3)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + '?include_children=false' + asset_technologies = [{"parent_id": n} for n in range(1, 3)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + "?include_children=false" response = self.client.get(url) json = response.json() expected = [] @@ -149,20 +161,22 @@ def test_with_two_asset_technologies__passing_include_children_as_false(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) def test_with_two_asset_technologies__passing_include_children_as_true(self): self.headers(academy=1) - asset_technologies = [{'visibility': 'PUBLIC', 'parent_id': n} for n in range(1, 3)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=1, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + '?include_children=true' + asset_technologies = [{"visibility": "PUBLIC", "parent_id": n} for n in range(1, 3)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=1, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + "?include_children=true" response = self.client.get(url) json = response.json() expected = [ @@ -173,7 +187,7 @@ def test_with_two_asset_technologies__passing_include_children_as_true(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -192,19 +206,21 @@ def test_with_two_asset_technologies__passing_language__not_found(self): lang = random.choices(string.ascii_lowercase, k=2) random.shuffle(lang) - query = ''.join(query) - lang = ''.join(lang) + query = "".join(query) + lang = "".join(lang) - asset_technologies = [{'lang': lang} for _ in range(0, 2)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=asset_technologies, - capability='read_technology') + asset_technologies = [{"lang": lang} for _ in range(0, 2)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=asset_technologies, + capability="read_technology", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('registry:academy_technology') + f'?language={query}' + url = reverse_lazy("registry:academy_technology") + f"?language={query}" response = self.client.get(url) json = response.json() expected = [] @@ -212,42 +228,44 @@ def test_with_two_asset_technologies__passing_language__not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) # teardown - self.bc.database.delete('registry.AssetTechnology') + self.bc.database.delete("registry.AssetTechnology") def test_with_two_asset_technologies__passing_language__found(self): - cases = [('en', 'us'), ('us', 'us'), ('es', 'es'), ('es', ''), ('es', None)] + cases = [("en", "us"), ("us", "us"), ("es", "es"), ("es", ""), ("es", None)] for query, value in cases: - asset_technologies = [{'visibility': 'PUBLIC', 'lang': value} for _ in range(0, 2)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=asset_technologies, - capability='read_technology') + asset_technologies = [{"visibility": "PUBLIC", "lang": value} for _ in range(0, 2)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=asset_technologies, + capability="read_technology", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('registry:academy_technology') + f'?language={query}' + url = reverse_lazy("registry:academy_technology") + f"?language={query}" response = self.client.get(url) json = response.json() expected = [ - get_serializer(x, data={'lang': value}) + get_serializer(x, data={"lang": value}) for x in sorted(model.asset_technology, key=lambda x: x.slug, reverse=True) ] self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) # teardown - self.bc.database.delete('registry.AssetTechnology') + self.bc.database.delete("registry.AssetTechnology") """ 🔽🔽🔽 GET with two AssetTechnology, passing sort_priority @@ -266,21 +284,22 @@ def test_with_two_asset_technologies__passing_sort_priority__not_found(self): while query == sort_priority: sort_priority = random.choice(cases) - asset_technologies = [{ - 'sort_priority': sort_priority, - 'slug': self.bc.fake.slug(), - 'title': self.bc.fake.slug() - } for _ in range(0, 2)] + asset_technologies = [ + {"sort_priority": sort_priority, "slug": self.bc.fake.slug(), "title": self.bc.fake.slug()} + for _ in range(0, 2) + ] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=asset_technologies, - capability='read_technology') + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=asset_technologies, + capability="read_technology", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('registry:academy_technology') + f'?sort_priority={query}' + url = reverse_lazy("registry:academy_technology") + f"?sort_priority={query}" response = self.client.get(url) json = response.json() expected = [] @@ -288,12 +307,12 @@ def test_with_two_asset_technologies__passing_sort_priority__not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) # teardown - self.bc.database.delete('registry.AssetTechnology') + self.bc.database.delete("registry.AssetTechnology") def test_with_two_asset_technologies__passing_sort_priority__found(self): cases = ( @@ -305,22 +324,27 @@ def test_with_two_asset_technologies__passing_sort_priority__found(self): sort_priority = query - asset_technologies = [{ - 'visibility': 'PUBLIC', - 'sort_priority': sort_priority, - 'slug': self.bc.fake.slug(), - 'title': self.bc.fake.slug() - } for _ in range(0, 2)] + asset_technologies = [ + { + "visibility": "PUBLIC", + "sort_priority": sort_priority, + "slug": self.bc.fake.slug(), + "title": self.bc.fake.slug(), + } + for _ in range(0, 2) + ] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=asset_technologies, - capability='read_technology') + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=asset_technologies, + capability="read_technology", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('registry:academy_technology') + f'?sort_priority={query}' + url = reverse_lazy("registry:academy_technology") + f"?sort_priority={query}" response = self.client.get(url) json = response.json() expected = [get_serializer(x) for x in sorted(model.asset_technology, key=lambda x: x.slug, reverse=True)] @@ -328,12 +352,12 @@ def test_with_two_asset_technologies__passing_sort_priority__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) # teardown - self.bc.database.delete('registry.AssetTechnology') + self.bc.database.delete("registry.AssetTechnology") """ 🔽🔽🔽 GET with two AssetTechnology, passing like @@ -342,15 +366,13 @@ def test_with_two_asset_technologies__passing_sort_priority__found(self): def test_with_two_asset_technologies__passing_like__not_found(self): slug = self.bc.fake.slug() - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=2, - capability='read_technology') + model = self.generate_models( + authenticate=True, profile_academy=True, role=1, asset_technology=2, capability="read_technology" + ) self.headers(academy=model.academy.id) - url = reverse_lazy('registry:academy_technology') + f'?like={slug}' + url = reverse_lazy("registry:academy_technology") + f"?like={slug}" response = self.client.get(url) json = response.json() expected = [] @@ -358,7 +380,7 @@ def test_with_two_asset_technologies__passing_like__not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -369,22 +391,24 @@ def test_with_two_asset_technologies__passing_like__found(self): slug2 = self.bc.fake.slug() cases = [ - ('slug', slug1[0:random.randint(1, len(slug1))], slug1), - ('slug', slug2[0:random.randint(1, len(slug2))], slug2), - ('title', title1[0:random.randint(1, len(title1))], title1), - ('title', title2[0:random.randint(1, len(title2))], title2), + ("slug", slug1[0 : random.randint(1, len(slug1))], slug1), + ("slug", slug2[0 : random.randint(1, len(slug2))], slug2), + ("title", title1[0 : random.randint(1, len(title1))], title1), + ("title", title2[0 : random.randint(1, len(title2))], title2), ] for field, query, value in cases: - asset_technologies = [{'visibility': 'PUBLIC', field: f'{value}{n}'} for n in range(0, 2)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=asset_technologies, - capability='read_technology') + asset_technologies = [{"visibility": "PUBLIC", field: f"{value}{n}"} for n in range(0, 2)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=asset_technologies, + capability="read_technology", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('registry:academy_technology') + f'?like={query}' + url = reverse_lazy("registry:academy_technology") + f"?like={query}" response = self.client.get(url) json = response.json() expected = [get_serializer(x) for x in sorted(model.asset_technology, key=lambda x: x.slug, reverse=True)] @@ -392,12 +416,12 @@ def test_with_two_asset_technologies__passing_like__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) # teardown - self.bc.database.delete('registry.AssetTechnology') + self.bc.database.delete("registry.AssetTechnology") """ 🔽🔽🔽 GET with two AssetTechnology, passing parent @@ -405,14 +429,16 @@ def test_with_two_asset_technologies__passing_like__found(self): def test_with_two_asset_technologies__passing_parent__not_found(self): self.headers(academy=1) - asset_technologies = [{'parent_id': n} for n in range(1, 3)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=1, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + '?parent=3,4' + asset_technologies = [{"parent_id": n} for n in range(1, 3)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=1, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + "?parent=3,4" response = self.client.get(url) json = response.json() expected = [] @@ -420,20 +446,22 @@ def test_with_two_asset_technologies__passing_parent__not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) def test_with_two_asset_technologies__passing_parent__found(self): self.headers(academy=1) - asset_technologies = [{'visibility': 'PUBLIC', 'parent_id': n} for n in range(1, 3)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=1, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + '?parent=1,2' + asset_technologies = [{"visibility": "PUBLIC", "parent_id": n} for n in range(1, 3)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=1, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + "?parent=1,2" response = self.client.get(url) json = response.json() expected = [ @@ -444,7 +472,7 @@ def test_with_two_asset_technologies__passing_parent__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -453,7 +481,7 @@ def test_with_two_asset_technologies__passing_parent__found(self): """ def test_with_two_asset_technologies__passing_visibility__not_found(self): - statuses = ['PUBLIC', 'UNLISTED', 'PRIVATE'] + statuses = ["PUBLIC", "UNLISTED", "PRIVATE"] query1 = random.choice(statuses) @@ -463,14 +491,16 @@ def test_with_two_asset_technologies__passing_visibility__not_found(self): statuses.pop(statuses.index(query2)) self.headers(academy=1) - asset_technologies = [{'visibility': statuses[0]} for _ in range(0, 2)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=1, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + f'?visibility={query1},{query2}' + asset_technologies = [{"visibility": statuses[0]} for _ in range(0, 2)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=1, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + f"?visibility={query1},{query2}" response = self.client.get(url) json = response.json() expected = [] @@ -478,12 +508,12 @@ def test_with_two_asset_technologies__passing_visibility__not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) def test_with_two_asset_technologies__passing_visibility__found(self): - statuses = ['PUBLIC', 'UNLISTED', 'PRIVATE'] + statuses = ["PUBLIC", "UNLISTED", "PRIVATE"] query1 = random.choice(statuses) @@ -493,14 +523,16 @@ def test_with_two_asset_technologies__passing_visibility__found(self): statuses.pop(statuses.index(query2)) self.headers(academy=1) - asset_technologies = [{'visibility': s} for s in [query1, query2]] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=1, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + f'?visibility={query1},{query2}' + asset_technologies = [{"visibility": s} for s in [query1, query2]] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=1, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + f"?visibility={query1},{query2}" response = self.client.get(url) json = response.json() expected = [ @@ -511,7 +543,7 @@ def test_with_two_asset_technologies__passing_visibility__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -525,13 +557,10 @@ def test_with_two_asset_technologies__passing_slug__not_found(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=1, - asset_technology=2, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + f'?slug={slug1},{slug2}' + model = self.generate_models( + authenticate=True, profile_academy=True, role=1, asset=1, asset_technology=2, capability="read_technology" + ) + url = reverse_lazy("registry:academy_technology") + f"?slug={slug1},{slug2}" response = self.client.get(url) json = response.json() expected = [] @@ -539,7 +568,7 @@ def test_with_two_asset_technologies__passing_slug__not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -549,14 +578,16 @@ def test_with_two_asset_technologies__passing_slug__found(self): self.headers(academy=1) - asset_technologies = [{'visibility': 'PUBLIC', 'slug': s} for s in [slug1, slug2]] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=1, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + f'?slug={slug1},{slug2}' + asset_technologies = [{"visibility": "PUBLIC", "slug": s} for s in [slug1, slug2]] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=1, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + f"?slug={slug1},{slug2}" response = self.client.get(url) json = response.json() expected = [ @@ -567,7 +598,7 @@ def test_with_two_asset_technologies__passing_slug__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -581,14 +612,16 @@ def test_with_two_asset_technologies__passing_asset_slug__not_found(self): self.headers(academy=1) - asset_technologies = [{'featured_asset_id': n} for n in range(1, 3)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=2, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + f'?asset_slug={slug1},{slug2}' + asset_technologies = [{"featured_asset_id": n} for n in range(1, 3)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=2, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + f"?asset_slug={slug1},{slug2}" response = self.client.get(url) json = response.json() expected = [] @@ -596,7 +629,7 @@ def test_with_two_asset_technologies__passing_asset_slug__not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -606,15 +639,17 @@ def test_with_two_asset_technologies__passing_asset_slug__found(self): self.headers(academy=1) - assets = [{'slug': s} for s in [slug1, slug2]] - asset_technologies = [{'visibility': 'PUBLIC', 'featured_asset_id': n} for n in range(1, 3)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=assets, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + f'?asset_slug={slug1},{slug2}' + assets = [{"slug": s} for s in [slug1, slug2]] + asset_technologies = [{"visibility": "PUBLIC", "featured_asset_id": n} for n in range(1, 3)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=assets, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + f"?asset_slug={slug1},{slug2}" response = self.client.get(url) json = response.json() expected = [ @@ -625,7 +660,7 @@ def test_with_two_asset_technologies__passing_asset_slug__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -634,7 +669,7 @@ def test_with_two_asset_technologies__passing_asset_slug__found(self): """ def test_with_two_asset_technologies__passing_asset_type__not_found(self): - statuses = ['PROJECT', 'EXERCISE', 'LESSON', 'QUIZ', 'VIDEO', 'ARTICLE'] + statuses = ["PROJECT", "EXERCISE", "LESSON", "QUIZ", "VIDEO", "ARTICLE"] query1 = random.choice(statuses) @@ -645,14 +680,16 @@ def test_with_two_asset_technologies__passing_asset_type__not_found(self): self.headers(academy=1) - asset_technologies = [{'featured_asset_id': n} for n in range(1, 3)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=2, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + f'?asset_slug={query1},{query2}' + asset_technologies = [{"featured_asset_id": n} for n in range(1, 3)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=2, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + f"?asset_slug={query1},{query2}" response = self.client.get(url) json = response.json() expected = [] @@ -660,12 +697,12 @@ def test_with_two_asset_technologies__passing_asset_type__not_found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) def test_with_two_asset_technologies__passing_asset_type__found(self): - statuses = ['PROJECT', 'EXERCISE', 'LESSON', 'QUIZ', 'VIDEO', 'ARTICLE'] + statuses = ["PROJECT", "EXERCISE", "LESSON", "QUIZ", "VIDEO", "ARTICLE"] query1 = random.choice(statuses) @@ -676,15 +713,17 @@ def test_with_two_asset_technologies__passing_asset_type__found(self): self.headers(academy=1) - assets = [{'asset_type': s} for s in [query1, query2]] - asset_technologies = [{'visibility': 'PUBLIC', 'featured_asset_id': n} for n in range(1, 3)] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset=assets, - asset_technology=asset_technologies, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + f'?asset_type={query1},{query2}' + assets = [{"asset_type": s} for s in [query1, query2]] + asset_technologies = [{"visibility": "PUBLIC", "featured_asset_id": n} for n in range(1, 3)] + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset=assets, + asset_technology=asset_technologies, + capability="read_technology", + ) + url = reverse_lazy("registry:academy_technology") + f"?asset_type={query1},{query2}" response = self.client.get(url) json = response.json() expected = [ @@ -695,7 +734,7 @@ def test_with_two_asset_technologies__passing_asset_type__found(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) @@ -703,22 +742,28 @@ def test_with_two_asset_technologies__passing_asset_type__found(self): 🔽🔽🔽 GET spy extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_spy_extensions(self): self.headers(academy=1) - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=2, - capability='read_technology') - url = reverse_lazy('registry:academy_technology') + model = self.generate_models( + authenticate=True, profile_academy=True, role=1, asset_technology=2, capability="read_technology" + ) + url = reverse_lazy("registry:academy_technology") self.client.get(url) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=TechnologyCache, sort='-slug', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=TechnologyCache, sort="-slug", paginate=True), + ], + ) diff --git a/breathecode/registry/tests/urls/v1/tests_asset.py b/breathecode/registry/tests/urls/v1/tests_asset.py index e98ee54cf..9a65de069 100644 --- a/breathecode/registry/tests/urls/v1/tests_asset.py +++ b/breathecode/registry/tests/urls/v1/tests_asset.py @@ -10,250 +10,238 @@ UTC_NOW = timezone.now() # enable this file to use the database -pytestmark = pytest.mark.usefixtures('db') +pytestmark = pytest.mark.usefixtures("db") def get_serializer(asset, data={}): asset_translations = {} for translation in asset.all_translations.all(): - asset_translations[translation.lang or 'null'] = translation.slug + asset_translations[translation.lang or "null"] = translation.slug return { - 'id': - asset.id, - 'slug': - asset.slug, - 'title': - asset.title, - 'asset_type': - asset.asset_type, - 'category': { - 'id': asset.category.id, - 'slug': asset.category.slug, - 'title': asset.category.title, + "id": asset.id, + "slug": asset.slug, + "title": asset.title, + "asset_type": asset.asset_type, + "category": { + "id": asset.category.id, + "slug": asset.category.slug, + "title": asset.category.title, }, - 'description': - asset.description, - 'assets_related': [{ - 'id': - related.id, - 'slug': - related.slug, - 'lang': - related.lang, - 'asset_type': - related.asset_type, - 'status': - related.status, - 'published_at': - related.published_at, - 'category': { - 'id': related.category.id, - 'slug': related.category.slug, - 'title': related.category.title, - }, - 'technologies': [get_serializer_technology(tech) - for tech in related.technologies.all()] if related.technologies else [], - } for related in asset.assets_related.all()] if asset.assets_related else [], - 'difficulty': - asset.difficulty, - 'duration': - asset.duration, - 'external': - asset.external, - 'gitpod': - asset.gitpod, - 'graded': - asset.graded, - 'intro_video_url': - asset.intro_video_url, - 'lang': - asset.lang, - 'preview': - asset.preview, - 'published_at': - asset.published_at, - 'readme_url': - asset.readme_url, - 'solution_video_url': - asset.solution_video_url, - 'solution_url': - asset.solution_url, - 'status': - asset.status, - 'url': - asset.url, - 'translations': - asset_translations, - 'technologies': [tech.slug for tech in asset.technologies.all()] if asset.technologies else [], - 'seo_keywords': [seo_keyword.slug for seo_keyword in asset.seo_keywords.all()] if asset.seo_keywords else [], - 'visibility': - asset.visibility, + "description": asset.description, + "assets_related": ( + [ + { + "id": related.id, + "slug": related.slug, + "lang": related.lang, + "asset_type": related.asset_type, + "status": related.status, + "published_at": related.published_at, + "category": { + "id": related.category.id, + "slug": related.category.slug, + "title": related.category.title, + }, + "technologies": ( + [get_serializer_technology(tech) for tech in related.technologies.all()] + if related.technologies + else [] + ), + } + for related in asset.assets_related.all() + ] + if asset.assets_related + else [] + ), + "difficulty": asset.difficulty, + "duration": asset.duration, + "external": asset.external, + "gitpod": asset.gitpod, + "graded": asset.graded, + "intro_video_url": asset.intro_video_url, + "lang": asset.lang, + "preview": asset.preview, + "published_at": asset.published_at, + "readme_url": asset.readme_url, + "solution_video_url": asset.solution_video_url, + "solution_url": asset.solution_url, + "status": asset.status, + "url": asset.url, + "translations": asset_translations, + "technologies": [tech.slug for tech in asset.technologies.all()] if asset.technologies else [], + "seo_keywords": [seo_keyword.slug for seo_keyword in asset.seo_keywords.all()] if asset.seo_keywords else [], + "visibility": asset.visibility, **data, } def get_serializer_technology(technology, data={}): return { - 'slug': technology.slug, - 'title': technology.title, - 'description': technology.description, - 'icon_url': technology.icon_url, - 'is_deprecated': technology.is_deprecated, - 'visibility': technology.visibility, + "slug": technology.slug, + "title": technology.title, + "description": technology.description, + "icon_url": technology.icon_url, + "is_deprecated": technology.is_deprecated, + "visibility": technology.visibility, **data, } def test_with_no_assets(bc: Breathecode, client): - url = reverse_lazy('registry:asset') + url = reverse_lazy("registry:asset") response = client.get(url) json = response.json() assert json == [] - assert bc.database.list_of('registry.Asset') == [] + assert bc.database.list_of("registry.Asset") == [] def test_one_asset(bc: Breathecode, client): - model = bc.database.create(asset={'status': 'PUBLISHED'}) + model = bc.database.create(asset={"status": "PUBLISHED"}) - url = reverse_lazy('registry:asset') + url = reverse_lazy("registry:asset") response = client.get(url) json = response.json() expected = [get_serializer(model.asset)] assert json == expected - assert bc.database.list_of('registry.Asset') == [bc.format.to_dict(model.asset)] + assert bc.database.list_of("registry.Asset") == [bc.format.to_dict(model.asset)] def test_many_assets(bc: Breathecode, client): - model = bc.database.create(asset=(3, {'status': 'PUBLISHED'})) + model = bc.database.create(asset=(3, {"status": "PUBLISHED"})) - url = reverse_lazy('registry:asset') + url = reverse_lazy("registry:asset") response = client.get(url) json = response.json() expected = [get_serializer(asset) for asset in model.asset] assert json == expected - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) def test_assets_technologies_expand(bc: Breathecode, client): - technology = {'slug': 'learn-react', 'title': 'Learn React'} - model = bc.database.create(asset_technology=(1, technology), - asset=(3, { - 'technologies': 1, - 'status': 'PUBLISHED', - })) + technology = {"slug": "learn-react", "title": "Learn React"} + model = bc.database.create( + asset_technology=(1, technology), + asset=( + 3, + { + "technologies": 1, + "status": "PUBLISHED", + }, + ), + ) - url = reverse_lazy('registry:asset') + f'?expand=technologies' + url = reverse_lazy("registry:asset") + f"?expand=technologies" response = client.get(url) json = response.json() expected = [ - get_serializer(asset, data={'technologies': [get_serializer_technology(model.asset_technology)]}) + get_serializer(asset, data={"technologies": [get_serializer_technology(model.asset_technology)]}) for asset in model.asset ] assert json == expected - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) def test_assets_with_slug(bc: Breathecode, client): assets = [ { - 'slug': 'randy', - 'status': 'PUBLISHED', + "slug": "randy", + "status": "PUBLISHED", }, { - 'slug': 'jackson', - 'status': 'PUBLISHED', + "slug": "jackson", + "status": "PUBLISHED", }, ] model = bc.database.create(asset=assets) - url = reverse_lazy('registry:asset') + '?slug=randy' + url = reverse_lazy("registry:asset") + "?slug=randy" response = client.get(url) json = response.json() expected = [get_serializer(model.asset[0])] assert json == expected - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) def test_assets_with_lang(bc: Breathecode, client): assets = [ { - 'lang': 'us', - 'status': 'PUBLISHED', + "lang": "us", + "status": "PUBLISHED", }, { - 'lang': 'es', - 'status': 'PUBLISHED', + "lang": "es", + "status": "PUBLISHED", }, ] model = bc.database.create(asset=assets) - url = reverse_lazy('registry:asset') + '?language=en' + url = reverse_lazy("registry:asset") + "?language=en" response = client.get(url) json = response.json() expected = [get_serializer(model.asset[0])] assert json == expected - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) def test_assets__hidden_all_non_visibilities(bc: Breathecode, client): assets = [ { - 'visibility': 'PUBLIC', - 'status': 'PUBLISHED', + "visibility": "PUBLIC", + "status": "PUBLISHED", }, { - 'visibility': 'PRIVATE', - 'status': 'PUBLISHED', + "visibility": "PRIVATE", + "status": "PUBLISHED", }, { - 'visibility': 'UNLISTED', - 'status': 'PUBLISHED', + "visibility": "UNLISTED", + "status": "PUBLISHED", }, ] model = bc.database.create(asset=assets) - url = reverse_lazy('registry:asset') + url = reverse_lazy("registry:asset") response = client.get(url) json = response.json() expected = [get_serializer(model.asset[0])] assert json == expected - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) def test_assets_with_bad_academy(bc: Breathecode, client): model = bc.database.create(asset=2) - url = reverse_lazy('registry:asset') + '?academy=banana' + url = reverse_lazy("registry:asset") + "?academy=banana" response = client.get(url) json = response.json() - expected = {'detail': 'academy-id-must-be-integer', 'status_code': 400} + expected = {"detail": "academy-id-must-be-integer", "status_code": 400} assert json == expected assert response.status_code == 400 - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) def test_assets_with_academy(bc: Breathecode, client): @@ -261,90 +249,107 @@ def test_assets_with_academy(bc: Breathecode, client): academies = bc.database.create(academy=2) assets = [ { - 'academy': academies.academy[0], - 'status': 'PUBLISHED', + "academy": academies.academy[0], + "status": "PUBLISHED", }, { - 'academy': academies.academy[1], - 'status': 'PUBLISHED', + "academy": academies.academy[1], + "status": "PUBLISHED", }, ] model = bc.database.create(asset=assets) - url = reverse_lazy('registry:asset') + '?academy=2' + url = reverse_lazy("registry:asset") + "?academy=2" response = client.get(url) json = response.json() expected = [get_serializer(model.asset[1])] assert json == expected - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) def test_assets_with_category(bc: Breathecode, client): - categories = [{'slug': 'how-to'}, {'slug': 'como'}] + categories = [{"slug": "how-to"}, {"slug": "como"}] model_categories = bc.database.create(asset_category=categories) - assets = [{ - 'category': model_categories.asset_category[0], - 'status': 'PUBLISHED', - }, { - 'category': model_categories.asset_category[1], - 'status': 'PUBLISHED', - }] + assets = [ + { + "category": model_categories.asset_category[0], + "status": "PUBLISHED", + }, + { + "category": model_categories.asset_category[1], + "status": "PUBLISHED", + }, + ] model = bc.database.create(asset=assets) - url = reverse_lazy('registry:asset') + '?category=how-to' + url = reverse_lazy("registry:asset") + "?category=how-to" response = client.get(url) json = response.json() expected = [get_serializer(model.asset[0])] assert json == expected - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) -@patch('breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup', - MagicMock(wraps=lookup_extension.compile_lookup)) +@patch( + "breathecode.utils.api_view_extensions.extensions.lookup_extension.compile_lookup", + MagicMock(wraps=lookup_extension.compile_lookup), +) def test_lookup_extension(bc: Breathecode, client): assets = [ { - 'asset_type': 'LESSON', - 'status': 'PUBLISHED', + "asset_type": "LESSON", + "status": "PUBLISHED", }, { - 'asset_type': 'PROJECT', - 'status': 'PUBLISHED', + "asset_type": "PROJECT", + "status": "PUBLISHED", }, ] model = bc.database.create(asset=assets) args, kwargs = bc.format.call( - 'en', + "en", strings={ - 'iexact': [ - 'test_status', - 'sync_status', + "iexact": [ + "test_status", + "sync_status", ], - 'in': ['difficulty', 'status', 'asset_type', 'category__slug', 'technologies__slug', 'seo_keywords__slug'], + "in": ["difficulty", "status", "asset_type", "category__slug", "technologies__slug", "seo_keywords__slug"], }, - ids=['author', 'owner'], + ids=["author", "owner"], bools={ - 'exact': ['with_video', 'interactive', 'graded'], + "exact": ["with_video", "interactive", "graded"], }, overwrite={ - 'category': 'category__slug', - 'technologies': 'technologies__slug', - 'seo_keywords': 'seo_keywords__slug' - }) + "category": "category__slug", + "technologies": "technologies__slug", + "seo_keywords": "seo_keywords__slug", + }, + ) query = bc.format.lookup(*args, **kwargs) - url = reverse_lazy('registry:asset') + '?' + bc.format.querystring(query) + url = reverse_lazy("registry:asset") + "?" + bc.format.querystring(query) assert [x for x in query] == [ - 'author', 'owner', 'test_status', 'sync_status', 'difficulty', 'status', 'asset_type', 'category', - 'technologies', 'seo_keywords', 'with_video', 'interactive', 'graded' + "author", + "owner", + "test_status", + "sync_status", + "difficulty", + "status", + "asset_type", + "category", + "technologies", + "seo_keywords", + "with_video", + "interactive", + "graded", ] response = client.get(url) @@ -354,4 +359,4 @@ def test_lookup_extension(bc: Breathecode, client): expected = [] assert json == expected - assert bc.database.list_of('registry.Asset') == bc.format.to_dict(model.asset) + assert bc.database.list_of("registry.Asset") == bc.format.to_dict(model.asset) diff --git a/breathecode/registry/tests/urls/v1/tests_asset_thumbnail_slug.py b/breathecode/registry/tests/urls/v1/tests_asset_thumbnail_slug.py index b09c0dc60..c2c44477a 100644 --- a/breathecode/registry/tests/urls/v1/tests_asset_thumbnail_slug.py +++ b/breathecode/registry/tests/urls/v1/tests_asset_thumbnail_slug.py @@ -1,6 +1,7 @@ """ Test /answer """ + from random import randint from unittest.mock import MagicMock, call, patch @@ -17,24 +18,26 @@ class RegistryTestSuite(RegistryTestCase): 🔽🔽🔽 GET without Asset """ - @patch('breathecode.registry.actions.AssetThumbnailGenerator.__init__', MagicMock(return_value=None)) + @patch("breathecode.registry.actions.AssetThumbnailGenerator.__init__", MagicMock(return_value=None)) def test__get__without_asset(self): cases = [(True, status.HTTP_301_MOVED_PERMANENTLY), (False, status.HTTP_302_FOUND)] - url = reverse_lazy('registry:asset_thumbnail_slug', kwargs={'asset_slug': 'slug'}) + url = reverse_lazy("registry:asset_thumbnail_slug", kwargs={"asset_slug": "slug"}) for redirect_permanently, current_status in cases: redirect_url = self.bc.fake.url() - with patch('breathecode.registry.actions.AssetThumbnailGenerator.get_thumbnail_url', - MagicMock(return_value=(redirect_url, redirect_permanently))): + with patch( + "breathecode.registry.actions.AssetThumbnailGenerator.get_thumbnail_url", + MagicMock(return_value=(redirect_url, redirect_permanently)), + ): response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) self.assertEqual(response.url, redirect_url) self.assertEqual(response.status_code, current_status) - self.assertEqual(self.bc.database.list_of('registry.Asset'), []) + self.assertEqual(self.bc.database.list_of("registry.Asset"), []) self.assertEqual(AssetThumbnailGenerator.__init__.call_args_list, [call(None, 0, 0)]) self.assertEqual(AssetThumbnailGenerator.get_thumbnail_url.call_args_list, [call()]) @@ -46,32 +49,41 @@ def test__get__without_asset(self): 🔽🔽🔽 GET without Asset, passing width and height """ - @patch('breathecode.registry.actions.AssetThumbnailGenerator.__init__', MagicMock(return_value=None)) + @patch("breathecode.registry.actions.AssetThumbnailGenerator.__init__", MagicMock(return_value=None)) def test__get__without_asset__passing_width__passing_height(self): cases = [(True, status.HTTP_301_MOVED_PERMANENTLY), (False, status.HTTP_302_FOUND)] width = randint(1, 2000) height = randint(1, 2000) - url = reverse_lazy('registry:asset_thumbnail_slug', kwargs={'asset_slug': 'slug' - }) + f'?width={width}&height={height}' + url = ( + reverse_lazy("registry:asset_thumbnail_slug", kwargs={"asset_slug": "slug"}) + + f"?width={width}&height={height}" + ) for redirect_permanently, current_status in cases: redirect_url = self.bc.fake.url() - with patch('breathecode.registry.actions.AssetThumbnailGenerator.get_thumbnail_url', - MagicMock(return_value=(redirect_url, redirect_permanently))): + with patch( + "breathecode.registry.actions.AssetThumbnailGenerator.get_thumbnail_url", + MagicMock(return_value=(redirect_url, redirect_permanently)), + ): response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) self.assertEqual(response.url, redirect_url) self.assertEqual(response.status_code, current_status) - self.assertEqual(self.bc.database.list_of('registry.Asset'), []) - - self.assertEqual(str(AssetThumbnailGenerator.__init__.call_args_list), str([ - call(None, width, height), - ])) + self.assertEqual(self.bc.database.list_of("registry.Asset"), []) + + self.assertEqual( + str(AssetThumbnailGenerator.__init__.call_args_list), + str( + [ + call(None, width, height), + ] + ), + ) self.assertEqual(AssetThumbnailGenerator.get_thumbnail_url.call_args_list, [call()]) # teardown @@ -82,28 +94,33 @@ def test__get__without_asset__passing_width__passing_height(self): 🔽🔽🔽 GET with Asset """ - @patch('breathecode.registry.actions.AssetThumbnailGenerator.__init__', MagicMock(return_value=None)) + @patch("breathecode.registry.actions.AssetThumbnailGenerator.__init__", MagicMock(return_value=None)) def test__get__with_asset(self): cases = [(True, status.HTTP_301_MOVED_PERMANENTLY), (False, status.HTTP_302_FOUND)] model = self.bc.database.create(asset=1) - url = reverse_lazy('registry:asset_thumbnail_slug', kwargs={'asset_slug': model.asset.slug}) + url = reverse_lazy("registry:asset_thumbnail_slug", kwargs={"asset_slug": model.asset.slug}) for redirect_permanently, current_status in cases: redirect_url = self.bc.fake.url() - with patch('breathecode.registry.actions.AssetThumbnailGenerator.get_thumbnail_url', - MagicMock(return_value=(redirect_url, redirect_permanently))): + with patch( + "breathecode.registry.actions.AssetThumbnailGenerator.get_thumbnail_url", + MagicMock(return_value=(redirect_url, redirect_permanently)), + ): response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) self.assertEqual(response.url, redirect_url) self.assertEqual(response.status_code, current_status) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) self.assertEqual(AssetThumbnailGenerator.__init__.call_args_list, [call(model.asset, 0, 0)]) self.assertEqual(AssetThumbnailGenerator.get_thumbnail_url.call_args_list, [call()]) @@ -116,36 +133,47 @@ def test__get__with_asset(self): 🔽🔽🔽 GET with Asset, passing width and height """ - @patch('breathecode.registry.actions.AssetThumbnailGenerator.__init__', MagicMock(return_value=None)) + @patch("breathecode.registry.actions.AssetThumbnailGenerator.__init__", MagicMock(return_value=None)) def test__get__with_asset__passing_width__passing_height(self): cases = [(True, status.HTTP_301_MOVED_PERMANENTLY), (False, status.HTTP_302_FOUND)] model = self.bc.database.create(asset=1) width = randint(1, 2000) height = randint(1, 2000) - url = reverse_lazy('registry:asset_thumbnail_slug', kwargs={'asset_slug': model.asset.slug - }) + f'?width={width}&height={height}' + url = ( + reverse_lazy("registry:asset_thumbnail_slug", kwargs={"asset_slug": model.asset.slug}) + + f"?width={width}&height={height}" + ) for redirect_permanently, current_status in cases: redirect_url = self.bc.fake.url() - with patch('breathecode.registry.actions.AssetThumbnailGenerator.get_thumbnail_url', - MagicMock(return_value=(redirect_url, redirect_permanently))): + with patch( + "breathecode.registry.actions.AssetThumbnailGenerator.get_thumbnail_url", + MagicMock(return_value=(redirect_url, redirect_permanently)), + ): response = self.client.get(url) content = self.bc.format.from_bytes(response.content) - expected = '' + expected = "" self.assertEqual(content, expected) self.assertEqual(response.url, redirect_url) self.assertEqual(response.status_code, current_status) - self.assertEqual(self.bc.database.list_of('registry.Asset'), [ - self.bc.format.to_dict(model.asset), - ]) - - self.assertEqual(str(AssetThumbnailGenerator.__init__.call_args_list), - str([ - call(model.asset, width, height), - ])) + self.assertEqual( + self.bc.database.list_of("registry.Asset"), + [ + self.bc.format.to_dict(model.asset), + ], + ) + + self.assertEqual( + str(AssetThumbnailGenerator.__init__.call_args_list), + str( + [ + call(model.asset, width, height), + ] + ), + ) self.assertEqual(AssetThumbnailGenerator.get_thumbnail_url.call_args_list, [call()]) # teardown diff --git a/breathecode/registry/tests/urls/v1/tests_technology.py b/breathecode/registry/tests/urls/v1/tests_technology.py index 77c693b34..738035a4c 100644 --- a/breathecode/registry/tests/urls/v1/tests_technology.py +++ b/breathecode/registry/tests/urls/v1/tests_technology.py @@ -1,6 +1,7 @@ """ Test /answer """ + import random from django.urls.base import reverse_lazy @@ -11,14 +12,14 @@ def get_serializer(asset_technology, assets=[], asset_technologies=[]): return { - 'description': asset_technology.description, - 'icon_url': asset_technology.icon_url, - 'lang': None, - 'is_deprecated': asset_technology.is_deprecated, - 'parent': None, - 'slug': asset_technology.slug, - 'title': asset_technology.title, - 'visibility': asset_technology.visibility, + "description": asset_technology.description, + "icon_url": asset_technology.icon_url, + "lang": None, + "is_deprecated": asset_technology.is_deprecated, + "parent": None, + "slug": asset_technology.slug, + "title": asset_technology.title, + "visibility": asset_technology.visibility, } @@ -28,7 +29,7 @@ class RegistryTestSuite(RegistryTestCase): """ def test_without_auth(self): - url = reverse_lazy('registry:technology') + url = reverse_lazy("registry:technology") response = self.client.get(url) json = response.json() @@ -36,7 +37,7 @@ def test_without_auth(self): self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(self.bc.database.list_of('registry.Asset'), []) + self.assertEqual(self.bc.database.list_of("registry.Asset"), []) def test_with_two_asset_technologies__passing_sort_priority__not_found_for_get_technologies(self): cases = ( @@ -51,21 +52,22 @@ def test_with_two_asset_technologies__passing_sort_priority__not_found_for_get_t while query == sort_priority: sort_priority = random.choice(cases) - asset_technologies = [{ - 'sort_priority': sort_priority, - 'slug': self.bc.fake.slug(), - 'title': self.bc.fake.slug() - } for _ in range(0, 2)] + asset_technologies = [ + {"sort_priority": sort_priority, "slug": self.bc.fake.slug(), "title": self.bc.fake.slug()} + for _ in range(0, 2) + ] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=asset_technologies, - capability='read_technology') + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=asset_technologies, + capability="read_technology", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('registry:technology') + f'?sort_priority={query}' + url = reverse_lazy("registry:technology") + f"?sort_priority={query}" response = self.client.get(url) json = response.json() expected = [] @@ -73,12 +75,12 @@ def test_with_two_asset_technologies__passing_sort_priority__not_found_for_get_t self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) # teardown - self.bc.database.delete('registry.AssetTechnology') + self.bc.database.delete("registry.AssetTechnology") def test_with_two_asset_technologies__passing_sort_priority__found_for_get_technologies(self): cases = ( @@ -90,21 +92,22 @@ def test_with_two_asset_technologies__passing_sort_priority__found_for_get_techn sort_priority = query - asset_technologies = [{ - 'sort_priority': sort_priority, - 'slug': self.bc.fake.slug(), - 'title': self.bc.fake.slug() - } for _ in range(0, 2)] + asset_technologies = [ + {"sort_priority": sort_priority, "slug": self.bc.fake.slug(), "title": self.bc.fake.slug()} + for _ in range(0, 2) + ] - model = self.generate_models(authenticate=True, - profile_academy=True, - role=1, - asset_technology=asset_technologies, - capability='read_technology') + model = self.generate_models( + authenticate=True, + profile_academy=True, + role=1, + asset_technology=asset_technologies, + capability="read_technology", + ) self.headers(academy=model.academy.id) - url = reverse_lazy('registry:technology') + f'?sort_priority={query}' + url = reverse_lazy("registry:technology") + f"?sort_priority={query}" response = self.client.get(url) json = response.json() expected = [ @@ -114,9 +117,9 @@ def test_with_two_asset_technologies__passing_sort_priority__found_for_get_techn self.assertEqual(json, expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - self.bc.database.list_of('registry.AssetTechnology'), + self.bc.database.list_of("registry.AssetTechnology"), self.bc.format.to_dict(model.asset_technology), ) - #teardown - self.bc.database.delete('registry.AssetTechnology') + # teardown + self.bc.database.delete("registry.AssetTechnology") diff --git a/breathecode/registry/tests/urls/v2/tests_academy_asset_slug.py b/breathecode/registry/tests/urls/v2/tests_academy_asset_slug.py index bb5dee177..51b5945d8 100644 --- a/breathecode/registry/tests/urls/v2/tests_academy_asset_slug.py +++ b/breathecode/registry/tests/urls/v2/tests_academy_asset_slug.py @@ -16,154 +16,153 @@ def get_asset_category(category): - return {'id': category.id, 'slug': category.slug, 'title': category.title} + return {"id": category.id, "slug": category.slug, "title": category.title} def get_serializer(bc: Breathecode, asset, asset_category=None, data={}): return { - 'assessment': asset.assessment, - 'asset_type': asset.asset_type, - 'author': asset.author, - 'authors_username': None, - 'category': get_asset_category(asset_category) if asset_category else None, - 'cleaning_status': asset.cleaning_status, - 'cleaning_status_details': None, - 'clusters': [], - 'assets_related': [], - 'previous_versions': [], - 'description': None, - 'difficulty': None, - 'readme_updated_at': None, - 'duration': None, - 'external': False, - 'gitpod': False, - 'graded': False, - 'id': asset.id, - 'intro_video_url': None, - 'is_seo_tracked': True, - 'lang': asset.lang, - 'last_synch_at': None, - 'last_test_at': None, - 'last_cleaning_at': None, - 'last_seo_scan_at': None, - 'optimization_rating': None, - 'owner': None, - 'preview': None, - 'published_at': None, - 'readme_url': None, - 'requirements': None, - 'seo_json_status': None, - 'seo_keywords': [], - 'slug': asset.slug, - 'solution_video_url': None, - 'solution_url': None, - 'status': 'NOT_STARTED', - 'status_text': None, - 'sync_status': None, - 'technologies': [], - 'test_status': None, - 'title': asset.title, - 'translations': {}, - 'url': None, - 'visibility': 'PUBLIC', - 'created_at': bc.datetime.to_iso_string(asset.created_at), - 'updated_at': bc.datetime.to_iso_string(asset.updated_at), + "assessment": asset.assessment, + "asset_type": asset.asset_type, + "author": asset.author, + "authors_username": None, + "category": get_asset_category(asset_category) if asset_category else None, + "cleaning_status": asset.cleaning_status, + "cleaning_status_details": None, + "clusters": [], + "assets_related": [], + "previous_versions": [], + "description": None, + "difficulty": None, + "readme_updated_at": None, + "duration": None, + "external": False, + "gitpod": False, + "graded": False, + "id": asset.id, + "intro_video_url": None, + "is_seo_tracked": True, + "lang": asset.lang, + "last_synch_at": None, + "last_test_at": None, + "last_cleaning_at": None, + "last_seo_scan_at": None, + "optimization_rating": None, + "owner": None, + "preview": None, + "published_at": None, + "readme_url": None, + "requirements": None, + "seo_json_status": None, + "seo_keywords": [], + "slug": asset.slug, + "solution_video_url": None, + "solution_url": None, + "status": "NOT_STARTED", + "status_text": None, + "sync_status": None, + "technologies": [], + "test_status": None, + "title": asset.title, + "translations": {}, + "url": None, + "visibility": "PUBLIC", + "created_at": bc.datetime.to_iso_string(asset.created_at), + "updated_at": bc.datetime.to_iso_string(asset.updated_at), **data, } @pytest.fixture(autouse=True) def setup(db, monkeypatch): - monkeypatch.setattr('breathecode.registry.signals.asset_slug_modified.send_robust', MagicMock()) + monkeypatch.setattr("breathecode.registry.signals.asset_slug_modified.send_robust", MagicMock()) yield def test_no_auth(bc: Breathecode, client: APIClient): """Test /certificate without auth""" - url = reverse_lazy('v2:registry:academy_asset_slug', kwargs={'asset_slug': 'model_slug'}) + url = reverse_lazy("v2:registry:academy_asset_slug", kwargs={"asset_slug": "model_slug"}) response = client.get(url) json = response.json() assert json == { - 'detail': 'Authentication credentials were not provided.', - 'status_code': status.HTTP_401_UNAUTHORIZED + "detail": "Authentication credentials were not provided.", + "status_code": status.HTTP_401_UNAUTHORIZED, } assert response.status_code == status.HTTP_401_UNAUTHORIZED - assert bc.database.list_of('registry.Asset') == [] + assert bc.database.list_of("registry.Asset") == [] def test_no_capability(bc: Breathecode, client: APIClient): """Test /certificate without auth""" - url = reverse_lazy('v2:registry:academy_asset_slug', kwargs={'asset_slug': 'model_slug'}) + url = reverse_lazy("v2:registry:academy_asset_slug", kwargs={"asset_slug": "model_slug"}) model = bc.database.create(user=1) client.force_authenticate(user=model.user) response = client.get(url, HTTP_ACADEMY=1) json = response.json() expected = { - 'status_code': 403, - 'detail': "You (user: 1) don't have this capability: read_asset for academy 1", + "status_code": 403, + "detail": "You (user: 1) don't have this capability: read_asset for academy 1", } assert json == expected assert response.status_code == status.HTTP_403_FORBIDDEN - assert bc.database.list_of('registry.Asset') == [] + assert bc.database.list_of("registry.Asset") == [] def test_no_consumables(bc: Breathecode, client: APIClient): """Test /certificate without auth""" - url = reverse_lazy('v2:registry:academy_asset_slug', kwargs={'asset_slug': 'model_slug'}) - model = bc.database.create(user=1, profile_academy=1, role=1, capability='read_asset') + url = reverse_lazy("v2:registry:academy_asset_slug", kwargs={"asset_slug": "model_slug"}) + model = bc.database.create(user=1, profile_academy=1, role=1, capability="read_asset") client.force_authenticate(user=model.user) response = client.get(url, HTTP_ACADEMY=1) json = response.json() expected = { - 'detail': 'asset-not-found', - 'status_code': 404, + "detail": "asset-not-found", + "status_code": 404, } assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('registry.Asset') == [] + assert bc.database.list_of("registry.Asset") == [] def test_no_asset(bc: Breathecode, client: APIClient): """Test /certificate without auth""" - model = bc.database.create(user=1, - profile_academy=1, - role=1, - capability='read_asset', - service={'slug': 'read-lesson'}, - consumable=1) + model = bc.database.create( + user=1, profile_academy=1, role=1, capability="read_asset", service={"slug": "read-lesson"}, consumable=1 + ) client.force_authenticate(user=model.user) - url = reverse_lazy('v2:registry:academy_asset_slug', kwargs={'asset_slug': 'model_slug'}) + url = reverse_lazy("v2:registry:academy_asset_slug", kwargs={"asset_slug": "model_slug"}) response = client.get(url, HTTP_ACADEMY=1) json = response.json() expected = { - 'detail': 'asset-not-found', - 'status_code': 404, + "detail": "asset-not-found", + "status_code": 404, } assert json == expected assert response.status_code == status.HTTP_404_NOT_FOUND - assert bc.database.list_of('registry.Asset') == [] + assert bc.database.list_of("registry.Asset") == [] def test_with_asset(bc: Breathecode, client: APIClient): """Test /certificate without auth""" - model = bc.database.create(user=1, - profile_academy=1, - role=1, - capability='read_asset', - service={'slug': 'read-lesson'}, - consumable=1, - asset=1, - asset_category=1, - academy=1) + model = bc.database.create( + user=1, + profile_academy=1, + role=1, + capability="read_asset", + service={"slug": "read-lesson"}, + consumable=1, + asset=1, + asset_category=1, + academy=1, + ) client.force_authenticate(user=model.user) - url = reverse_lazy('v2:registry:academy_asset_slug', kwargs={'asset_slug': model.asset.slug}) + url = reverse_lazy("v2:registry:academy_asset_slug", kwargs={"asset_slug": model.asset.slug}) response = client.get(url, HTTP_ACADEMY=1) json = response.json() @@ -171,64 +170,65 @@ def test_with_asset(bc: Breathecode, client: APIClient): assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('registry.Asset') == [bc.format.to_dict(model.asset)] + assert bc.database.list_of("registry.Asset") == [bc.format.to_dict(model.asset)] # Given: A no SAAS student who has paid # When: auth # Then: response 200 -@pytest.mark.parametrize('cohort_user', [ - { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'ACTIVE', - }, - { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'ACTIVE', - }, - { - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'GRADUATED', - }, - { - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'GRADUATED', - }, -]) -@pytest.mark.parametrize('academy, cohort', [ - ( +@pytest.mark.parametrize( + "cohort_user", + [ { - 'available_as_saas': True + "finantial_status": "FULLY_PAID", + "educational_status": "ACTIVE", }, { - 'available_as_saas': False + "finantial_status": "UP_TO_DATE", + "educational_status": "ACTIVE", }, - ), - ( { - 'available_as_saas': False + "finantial_status": "FULLY_PAID", + "educational_status": "GRADUATED", }, { - 'available_as_saas': None + "finantial_status": "UP_TO_DATE", + "educational_status": "GRADUATED", }, - ), -]) -def test_with_asset__no_saas__finantial_status_no_late(bc: Breathecode, client: APIClient, academy, cohort, - cohort_user): + ], +) +@pytest.mark.parametrize( + "academy, cohort", + [ + ( + {"available_as_saas": True}, + {"available_as_saas": False}, + ), + ( + {"available_as_saas": False}, + {"available_as_saas": None}, + ), + ], +) +def test_with_asset__no_saas__finantial_status_no_late( + bc: Breathecode, client: APIClient, academy, cohort, cohort_user +): """Test /certificate without auth""" - model = bc.database.create(user=1, - profile_academy=1, - role=1, - capability='read_asset', - service={'slug': 'read-lesson'}, - consumable=1, - asset=1, - asset_category=1, - academy=academy, - cohort=cohort, - cohort_user=cohort_user) + model = bc.database.create( + user=1, + profile_academy=1, + role=1, + capability="read_asset", + service={"slug": "read-lesson"}, + consumable=1, + asset=1, + asset_category=1, + academy=academy, + cohort=cohort, + cohort_user=cohort_user, + ) client.force_authenticate(user=model.user) - url = reverse_lazy('v2:registry:academy_asset_slug', kwargs={'asset_slug': model.asset.slug}) + url = reverse_lazy("v2:registry:academy_asset_slug", kwargs={"asset_slug": model.asset.slug}) response = client.get(url, HTTP_ACADEMY=1) json = response.json() @@ -236,57 +236,54 @@ def test_with_asset__no_saas__finantial_status_no_late(bc: Breathecode, client: assert json == expected assert response.status_code == status.HTTP_200_OK - assert bc.database.list_of('registry.Asset') == [bc.format.to_dict(model.asset)] + assert bc.database.list_of("registry.Asset") == [bc.format.to_dict(model.asset)] # Given: A no SAAS student who hasn't paid # When: auth # Then: response 402 -@pytest.mark.parametrize('academy, cohort', [ - ( - { - 'available_as_saas': True - }, - { - 'available_as_saas': False - }, - ), - ( - { - 'available_as_saas': False - }, - { - 'available_as_saas': None - }, - ), -]) +@pytest.mark.parametrize( + "academy, cohort", + [ + ( + {"available_as_saas": True}, + {"available_as_saas": False}, + ), + ( + {"available_as_saas": False}, + {"available_as_saas": None}, + ), + ], +) def test_with_asset__no_saas__finantial_status_late(bc: Breathecode, client: APIClient, academy, cohort, fake): """Test /certificate without auth""" - cohort_user = {'finantial_status': 'LATE', 'educational_status': 'ACTIVE'} + cohort_user = {"finantial_status": "LATE", "educational_status": "ACTIVE"} slug = fake.slug() - model = bc.database.create(user=1, - profile_academy=1, - role=1, - capability='read_asset', - service={'slug': 'read-lesson'}, - consumable=1, - asset={'slug': slug}, - syllabus_version={ - 'json': { - 'x': slug, - }, - }, - asset_category=1, - academy=academy, - cohort=cohort, - cohort_user=cohort_user) + model = bc.database.create( + user=1, + profile_academy=1, + role=1, + capability="read_asset", + service={"slug": "read-lesson"}, + consumable=1, + asset={"slug": slug}, + syllabus_version={ + "json": { + "x": slug, + }, + }, + asset_category=1, + academy=academy, + cohort=cohort, + cohort_user=cohort_user, + ) client.force_authenticate(user=model.user) - url = reverse_lazy('v2:registry:academy_asset_slug', kwargs={'asset_slug': model.asset.slug}) + url = reverse_lazy("v2:registry:academy_asset_slug", kwargs={"asset_slug": model.asset.slug}) response = client.get(url, HTTP_ACADEMY=1) json = response.json() - expected = {'detail': 'cohort-user-status-later', 'status_code': 402} + expected = {"detail": "cohort-user-status-later", "status_code": 402} assert json == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED - assert bc.database.list_of('registry.Asset') == [bc.format.to_dict(model.asset)] + assert bc.database.list_of("registry.Asset") == [bc.format.to_dict(model.asset)] diff --git a/breathecode/registry/urls/v1.py b/breathecode/registry/urls/v1.py index eee43e5cb..a0470b2c5 100644 --- a/breathecode/registry/urls/v1.py +++ b/breathecode/registry/urls/v1.py @@ -28,42 +28,42 @@ AssetSupersedesView, ) -app_name = 'registry' +app_name = "registry" urlpatterns = [ - path('asset', AssetView.as_view(), name='asset'), - path('asset/test', handle_test_asset), - path('asset/thumbnail/<str:asset_slug>', AssetThumbnailView.as_view(), name='asset_thumbnail_slug'), - path('asset/preview/<str:asset_slug>', render_preview_html), - path('asset/gitpod/<str:asset_slug>', forward_asset_url), - path('asset/<str:asset_slug>/supersedes', AssetSupersedesView.as_view()), - path('asset/<str:asset_slug>/github/config', get_config), - path('asset/<str:asset_slug>.<str:extension>', render_readme), - path('asset/<str:asset_slug>', AssetView.as_view()), - path('academy/contentvariable', AcademyContentVariableView.as_view()), - path('academy/contentvariable/<str:variable_slug>', AcademyContentVariableView.as_view()), - path('academy/asset', AcademyAssetView.as_view(), name='academy_asset'), - path('academy/asset/image', AssetImageView.as_view()), - path('academy/asset/comment', AcademyAssetCommentView.as_view()), - path('academy/asset/comment/<str:comment_id>', AcademyAssetCommentView.as_view()), - path('academy/asset/action/<str:action_slug>', AcademyAssetActionView.as_view()), - path('academy/asset/alias', AcademyAssetAliasView.as_view()), - path('academy/asset/alias/<str:alias_slug>', AcademyAssetAliasView.as_view()), - path('academy/asset/<str:asset_slug>/action/<str:action_slug>', AcademyAssetActionView.as_view()), - path('academy/asset/<str:asset_slug>/seo_report', AcademyAssetSEOReportView.as_view()), - path('academy/asset/<str:asset_slug>/originality', AcademyAssetOriginalityView.as_view()), - path('academy/asset/<str:asset_slug>/thumbnail', AssetThumbnailView.as_view()), - path('academy/asset/<str:asset_slug>', AcademyAssetView.as_view()), - path('keyword', get_keywords), - path('academy/category', AcademyCategoryView.as_view()), - path('academy/category/<str:category_slug>', AcademyCategoryView.as_view()), - path('academy/keyword', AcademyKeywordView.as_view()), - path('academy/keyword/<str:keyword_slug>', AcademyKeywordView.as_view()), - path('academy/keywordcluster', AcademyKeywordClusterView.as_view()), - path('academy/keywordcluster/<str:cluster_slug>', AcademyKeywordClusterView.as_view()), - path('category', get_categories), - path('technology', get_technologies, name='technology'), - path('academy/technology', AcademyTechnologyView.as_view(), name='academy_technology'), - path('academy/technology/<str:tech_slug>', AcademyTechnologyView.as_view()), - path('translation', get_translations), - path('alias/redirect', get_alias_redirects), + path("asset", AssetView.as_view(), name="asset"), + path("asset/test", handle_test_asset), + path("asset/thumbnail/<str:asset_slug>", AssetThumbnailView.as_view(), name="asset_thumbnail_slug"), + path("asset/preview/<str:asset_slug>", render_preview_html), + path("asset/gitpod/<str:asset_slug>", forward_asset_url), + path("asset/<str:asset_slug>/supersedes", AssetSupersedesView.as_view()), + path("asset/<str:asset_slug>/github/config", get_config), + path("asset/<str:asset_slug>.<str:extension>", render_readme), + path("asset/<str:asset_slug>", AssetView.as_view()), + path("academy/contentvariable", AcademyContentVariableView.as_view()), + path("academy/contentvariable/<str:variable_slug>", AcademyContentVariableView.as_view()), + path("academy/asset", AcademyAssetView.as_view(), name="academy_asset"), + path("academy/asset/image", AssetImageView.as_view()), + path("academy/asset/comment", AcademyAssetCommentView.as_view()), + path("academy/asset/comment/<str:comment_id>", AcademyAssetCommentView.as_view()), + path("academy/asset/action/<str:action_slug>", AcademyAssetActionView.as_view()), + path("academy/asset/alias", AcademyAssetAliasView.as_view()), + path("academy/asset/alias/<str:alias_slug>", AcademyAssetAliasView.as_view()), + path("academy/asset/<str:asset_slug>/action/<str:action_slug>", AcademyAssetActionView.as_view()), + path("academy/asset/<str:asset_slug>/seo_report", AcademyAssetSEOReportView.as_view()), + path("academy/asset/<str:asset_slug>/originality", AcademyAssetOriginalityView.as_view()), + path("academy/asset/<str:asset_slug>/thumbnail", AssetThumbnailView.as_view()), + path("academy/asset/<str:asset_slug>", AcademyAssetView.as_view()), + path("keyword", get_keywords), + path("academy/category", AcademyCategoryView.as_view()), + path("academy/category/<str:category_slug>", AcademyCategoryView.as_view()), + path("academy/keyword", AcademyKeywordView.as_view()), + path("academy/keyword/<str:keyword_slug>", AcademyKeywordView.as_view()), + path("academy/keywordcluster", AcademyKeywordClusterView.as_view()), + path("academy/keywordcluster/<str:cluster_slug>", AcademyKeywordClusterView.as_view()), + path("category", get_categories), + path("technology", get_technologies, name="technology"), + path("academy/technology", AcademyTechnologyView.as_view(), name="academy_technology"), + path("academy/technology/<str:tech_slug>", AcademyTechnologyView.as_view()), + path("translation", get_translations), + path("alias/redirect", get_alias_redirects), ] diff --git a/breathecode/registry/urls/v2.py b/breathecode/registry/urls/v2.py index 0413c1cee..e78734331 100644 --- a/breathecode/registry/urls/v2.py +++ b/breathecode/registry/urls/v2.py @@ -4,11 +4,11 @@ from .v1 import urlpatterns as urlpatterns_v1 deprecation_list = [ - 'academy/asset/<str:asset_slug>', + "academy/asset/<str:asset_slug>", ] -app_name = 'activity' +app_name = "activity" urlpatterns = [ - path('academy/asset/<str:asset_slug>', V2AcademyAssetView.as_view(), name='academy_asset_slug'), + path("academy/asset/<str:asset_slug>", V2AcademyAssetView.as_view(), name="academy_asset_slug"), *[r for r in urlpatterns_v1 if r.pattern._route not in deprecation_list], ] diff --git a/breathecode/registry/urls_shortner.py b/breathecode/registry/urls_shortner.py index 25ba2b022..5876b8ca6 100644 --- a/breathecode/registry/urls_shortner.py +++ b/breathecode/registry/urls_shortner.py @@ -1,7 +1,7 @@ from django.urls import path from .views import forward_asset_url -app_name = 'registry' +app_name = "registry" urlpatterns = [ - path('<slug:asset_slug>', forward_asset_url), + path("<slug:asset_slug>", forward_asset_url), ] diff --git a/breathecode/registry/utils.py b/breathecode/registry/utils.py index bb6543717..714863664 100644 --- a/breathecode/registry/utils.py +++ b/breathecode/registry/utils.py @@ -10,33 +10,33 @@ def get_urls_from_html(html_content): - soup = BeautifulSoup(html_content, features='lxml') + soup = BeautifulSoup(html_content, features="lxml") urls = [] - anchors = soup.findAll('a') + anchors = soup.findAll("a") for a in anchors: - urls.append(a.get('href')) + urls.append(a.get("href")) - images = images = soup.findAll('img') + images = images = soup.findAll("img") for img in images: - urls.append(img.get('src')) + urls.append(img.get("src")) return urls def test_url(url, allow_relative=False, allow_hash=True): - if url is None or url == '': - raise Exception('Empty url') + if url is None or url == "": + raise Exception("Empty url") - if not allow_hash and '#' == url[0:1]: - raise Exception('Not allowed hash url: ' + url) + if not allow_hash and "#" == url[0:1]: + raise Exception("Not allowed hash url: " + url) - if not allow_relative and ('../' == url[0:3] or './' == url[0:2]): - raise Exception('Not allowed relative url: ' + url) + if not allow_relative and ("../" == url[0:3] or "./" == url[0:2]): + raise Exception("Not allowed relative url: " + url) return True - #FIXME: the code is under this line is unaccessible, want you remove it? + # FIXME: the code is under this line is unaccessible, want you remove it? # response = requests.head(url, allow_redirects=False, timeout=2) # if response.status_code not in [200, 302, 301, 307]: # raise Exception(f'Invalid URL with code {response.status_code}: ' + url) @@ -44,17 +44,17 @@ def test_url(url, allow_relative=False, allow_hash=True): class AssetException(Exception): - def __init__(self, message='', severity='ERROR'): - all_severities = ['ERROR', 'WARNING'] + def __init__(self, message="", severity="ERROR"): + all_severities = ["ERROR", "WARNING"] if severity in all_severities: self.severity = severity else: - raise Exception('Invalid AssetException severity ' + severity) + raise Exception("Invalid AssetException severity " + severity) -class AssetValidator(): - base_warns = ['translations', 'technologies'] - base_errors = ['lang', 'urls', 'category', 'preview', 'images', 'readme_url'] +class AssetValidator: + base_warns = ["translations", "technologies"] + base_errors = ["lang", "urls", "category", "preview", "images", "readme_url"] warns = [] errors = [] @@ -70,63 +70,64 @@ def validate(self): if hasattr(self, validation): getattr(self, validation)() else: - raise Exception('Invalid asset error validation ' + validation) + raise Exception("Invalid asset error validation " + validation) except Exception as e: - raise AssetException(str(e), severity='ERROR') + raise AssetException(str(e), severity="ERROR") try: for validation in self.warns: if hasattr(self, validation): - print('validating warning ' + validation) + print("validating warning " + validation) getattr(self, validation)() else: - raise Exception('Invalid asset warning validation ' + validation) + raise Exception("Invalid asset warning validation " + validation) except Exception as e: - raise AssetException(str(e), severity='WARNING') + raise AssetException(str(e), severity="WARNING") def readme_url(self): - if self.asset.readme_url is not None or self.asset.readme_url != '': + if self.asset.readme_url is not None or self.asset.readme_url != "": if not self.asset.owner: - raise Exception('Asset must have an owner and the owner must have write access to the readme file') + raise Exception("Asset must have an owner and the owner must have write access to the readme file") credentials = CredentialsGithub.objects.filter(user=self.asset.owner).first() if credentials is None: - raise Exception('Github credentials for asset owner were not found') + raise Exception("Github credentials for asset owner were not found") gb = Github(credentials.token) try: if not gb.file_exists(self.asset.readme_url): - raise AssetException('Readme URL points to a missing file', severity='ERROR') + raise AssetException("Readme URL points to a missing file", severity="ERROR") except GithubAuthException: - raise AssetException('Cannot connect to github to validate readme url, please fix owner or credentials', - severity='ERROR') + raise AssetException( + "Cannot connect to github to validate readme url, please fix owner or credentials", severity="ERROR" + ) except Exception as e: - raise AssetException(str(e), severity='ERROR') + raise AssetException(str(e), severity="ERROR") def urls(self): readme = self.asset.get_readme(parse=True) - if 'html' in readme: - urls = get_urls_from_html(readme['html']) + if "html" in readme: + urls = get_urls_from_html(readme["html"]) for url in urls: test_url(url, allow_relative=False) def lang(self): - if self.asset.lang is None or self.asset.lang == '': - raise Exception('Empty default language') + if self.asset.lang is None or self.asset.lang == "": + raise Exception("Empty default language") def translations(self): if self.asset.all_translations.count() == 0: - raise Exception('No translations') + raise Exception("No translations") def technologies(self): if self.asset.technologies.count() == 0: - raise Exception('No technologies') + raise Exception("No technologies") def difficulty(self): if self.asset.difficulty is None: - raise Exception('No difficulty') + raise Exception("No difficulty") def preview(self): pass @@ -137,64 +138,62 @@ def preview(self): # test_url(self.asset.preview, allow_relative=False, allow_hash=False) def readme(self): - if self.asset.readme is None or self.asset.readme == '' and not self.asset.external: - raise Exception('Empty readme') + if self.asset.readme is None or self.asset.readme == "" and not self.asset.external: + raise Exception("Empty readme") def category(self): if self.asset.category is None: - raise Exception('Empty category') + raise Exception("Empty category") def images(self): images = self.asset.images.all() - print('Validating images', images) + print("Validating images", images) for image in images: - if image.download_status != 'OK': - raise Exception('Check the asset images, there seems to be images not properly downloaded') + if image.download_status != "OK": + raise Exception("Check the asset images, there seems to be images not properly downloaded") class LessonValidator(AssetValidator): warns = [] - errors = ['readme'] + errors = ["readme"] class ArticleValidator(AssetValidator): warns = [] - errors = ['readme'] + errors = ["readme"] class ExerciseValidator(AssetValidator): - warns = ['difficulty'] - errors = ['readme', 'preview'] + warns = ["difficulty"] + errors = ["readme", "preview"] class ProjectValidator(ExerciseValidator): - warns = ['difficulty'] - errors = ['readme', 'preview'] + warns = ["difficulty"] + errors = ["readme", "preview"] class QuizValidator(AssetValidator): - warns = ['difficulty'] - errors = ['preview'] + warns = ["difficulty"] + errors = ["preview"] -class OriginalityWrapper(): +class OriginalityWrapper: def __init__(self, token): self.token = token def detect(self, text): - return self._request('scan/ai', method='POST', body={'content': text}) + return self._request("scan/ai", method="POST", body={"content": text}) - def _request(self, url, method='GET', body=None): + def _request(self, url, method="GET", body=None): - headers = {'X-OAI-API-KEY': self.token} - response = requests.request(method=method, - url='https://api.originality.ai/api/v1/' + url, - data=body, - headers=headers, - timeout=2) + headers = {"X-OAI-API-KEY": self.token} + response = requests.request( + method=method, url="https://api.originality.ai/api/v1/" + url, data=body, headers=headers, timeout=2 + ) if response.status_code == 200: result = response.json() @@ -209,6 +208,6 @@ def _request(self, url, method='GET', body=None): # } return result else: - msg = f'Error {response.status_code} while request originality API' + msg = f"Error {response.status_code} while request originality API" logger.error(msg) raise Exception(msg) diff --git a/breathecode/registry/views.py b/breathecode/registry/views.py index c0b5d76d4..a46b7de5b 100644 --- a/breathecode/registry/views.py +++ b/breathecode/registry/views.py @@ -88,83 +88,85 @@ logger = logging.getLogger(__name__) -SYSTEM_EMAIL = os.getenv('SYSTEM_EMAIL', None) -ENV = os.getenv('ENV', 'development') +SYSTEM_EMAIL = os.getenv("SYSTEM_EMAIL", None) +ENV = os.getenv("ENV", "development") -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def forward_asset_url(request, asset_slug=None): asset = Asset.get_by_slug(asset_slug, request) if asset is None: - return render_message(request, f'Asset with slug {asset_slug} not found') + return render_message(request, f"Asset with slug {asset_slug} not found") validator = URLValidator() try: - if not asset.external and asset.asset_type == 'LESSON': + if not asset.external and asset.asset_type == "LESSON": slug = Path(asset.readme_url).stem - url = 'https://4geeks.com/en/lesson/' + slug + '?plain=true' + url = "https://4geeks.com/en/lesson/" + slug + "?plain=true" - if ENV == 'development': - return render_message(request, 'Redirect to: ' + url, academy=asset.academy) + if ENV == "development": + return render_message(request, "Redirect to: " + url, academy=asset.academy) else: return HttpResponseRedirect(redirect_to=url) validator(asset.url) if asset.gitpod: - return HttpResponseRedirect(redirect_to='https://gitpod.io#' + asset.url) + return HttpResponseRedirect(redirect_to="https://gitpod.io#" + asset.url) else: return HttpResponseRedirect(redirect_to=asset.url) except Exception as e: logger.error(e) - msg = f'The url for the {asset.asset_type.lower()} your are trying to open ({asset_slug}) was not found, this error has been reported and will be fixed soon.' - AssetErrorLog(slug=AssetErrorLog.INVALID_URL, - path=asset_slug, - asset=asset, - asset_type=asset.asset_type, - status_text=msg).save() + msg = f"The url for the {asset.asset_type.lower()} your are trying to open ({asset_slug}) was not found, this error has been reported and will be fixed soon." + AssetErrorLog( + slug=AssetErrorLog.INVALID_URL, path=asset_slug, asset=asset, asset_type=asset.asset_type, status_text=msg + ).save() return render_message(request, msg, academy=asset.academy) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) @xframe_options_exempt def render_preview_html(request, asset_slug): asset = Asset.get_by_slug(asset_slug, request) if asset is None: - return render_message(request, f'Asset with slug {asset_slug} not found') + return render_message(request, f"Asset with slug {asset_slug} not found") - if asset.asset_type == 'QUIZ': - return render_message(request, 'Quiz cannot be previewed', academy=asset.academy) + if asset.asset_type == "QUIZ": + return render_message(request, "Quiz cannot be previewed", academy=asset.academy) readme = asset.get_readme(parse=True) response = render( - request, readme['frontmatter']['format'] + '.html', { - **AssetBigSerializer(asset).data, 'html': readme['html'], - 'theme': request.GET.get('theme', 'light'), - 'plain': request.GET.get('plain', 'false'), - 'styles': readme['frontmatter']['inlining']['css'][0] if 'inlining' in readme['frontmatter'] else None, - 'frontmatter': readme['frontmatter'].items() - }) + request, + readme["frontmatter"]["format"] + ".html", + { + **AssetBigSerializer(asset).data, + "html": readme["html"], + "theme": request.GET.get("theme", "light"), + "plain": request.GET.get("plain", "false"), + "styles": readme["frontmatter"]["inlining"]["css"][0] if "inlining" in readme["frontmatter"] else None, + "frontmatter": readme["frontmatter"].items(), + }, + ) # Set Content-Security-Policy header - response['Content-Security-Policy'] = "frame-ancestors 'self' https://4geeks.com" + response["Content-Security-Policy"] = "frame-ancestors 'self' https://4geeks.com" return response -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_technologies(request): lang = get_user_language(request) items = AssetTechnology.objects.filter(parent__isnull=True) - if 'sort_priority' in request.GET: - param = request.GET.get('sort_priority') + if "sort_priority" in request.GET: + param = request.GET.get("sort_priority") try: @@ -173,21 +175,24 @@ def get_technologies(request): items = items.filter(sort_priority__exact=param) except Exception: raise ValidationException( - translation(lang, - en='The parameter must be an integer, nothing else', - es='El parametró debera ser un entero y nada mas ', - slug='integer-not-found')) - - if 'lang' in request.GET: - param = request.GET.get('lang') - if param == 'en': - param = 'us' - items = items.filter(Q(lang__iexact=param) | Q(lang='') | Q(lang__isnull=True)) - - if 'is_deprecated' not in request.GET or request.GET.get('is_deprecated').lower() == 'false': + translation( + lang, + en="The parameter must be an integer, nothing else", + es="El parametró debera ser un entero y nada mas ", + slug="integer-not-found", + ) + ) + + if "lang" in request.GET: + param = request.GET.get("lang") + if param == "en": + param = "us" + items = items.filter(Q(lang__iexact=param) | Q(lang="") | Q(lang__isnull=True)) + + if "is_deprecated" not in request.GET or request.GET.get("is_deprecated").lower() == "false": items = items.filter(is_deprecated=False) - items = items.order_by('sort_priority') + items = items.order_by("sort_priority") serializer = AssetTechnologySerializer(items, many=True) return Response(serializer.data) @@ -198,13 +203,14 @@ class AcademyTechnologyView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(cache=TechnologyCache, sort='-slug', paginate=True) + + extensions = APIViewExtensions(cache=TechnologyCache, sort="-slug", paginate=True) def _has_valid_parent(self): - regex = r'^(?:\d+,)*(?:\d+)$' - return bool(re.findall(regex, self.request.GET.get('parent', ''))) + regex = r"^(?:\d+,)*(?:\d+)$" + return bool(re.findall(regex, self.request.GET.get("parent", ""))) - @capable_of('read_technology') + @capable_of("read_technology") def get(self, request, academy_id=None): lang = get_user_language(request) handler = self.extensions(request) @@ -216,95 +222,94 @@ def get(self, request, academy_id=None): lookup = {} has_valid_parent = self._has_valid_parent() - if self.request.GET.get('include_children') != 'true' and not has_valid_parent: + if self.request.GET.get("include_children") != "true" and not has_valid_parent: items = items.filter(parent__isnull=True) - if 'language' in self.request.GET or 'lang' in self.request.GET: - param = self.request.GET.get('language', '') + if "language" in self.request.GET or "lang" in self.request.GET: + param = self.request.GET.get("language", "") if not param: - param = self.request.GET.get('lang') + param = self.request.GET.get("lang") - if param == 'en': - param = 'us' - items = items.filter(Q(lang__iexact=param) | Q(lang='') | Q(lang__isnull=True)) + if param == "en": + param = "us" + items = items.filter(Q(lang__iexact=param) | Q(lang="") | Q(lang__isnull=True)) - if 'sort_priority' in self.request.GET: - param = self.request.GET.get('sort_priority') + if "sort_priority" in self.request.GET: + param = self.request.GET.get("sort_priority") try: param = int(param) - lookup['sort_priority__iexact'] = param + lookup["sort_priority__iexact"] = param except Exception: raise ValidationException( - translation(lang, - en='The parameter must be an integer', - es='El parametró debe ser un entero', - slug='not-an-integer')) - - if 'visibility' in self.request.GET: - param = self.request.GET.get('visibility') - lookup['visibility__in'] = [p.upper() for p in param.split(',')] + translation( + lang, + en="The parameter must be an integer", + es="El parametró debe ser un entero", + slug="not-an-integer", + ) + ) + + if "visibility" in self.request.GET: + param = self.request.GET.get("visibility") + lookup["visibility__in"] = [p.upper() for p in param.split(",")] else: - lookup['visibility'] = 'PUBLIC' + lookup["visibility"] = "PUBLIC" if has_valid_parent: - param = self.request.GET.get('parent') - lookup['parent__id__in'] = [int(p) for p in param.split(',')] + param = self.request.GET.get("parent") + lookup["parent__id__in"] = [int(p) for p in param.split(",")] - like = request.GET.get('like', None) - if like is not None and like != 'undefined' and like != '': + like = request.GET.get("like", None) + if like is not None and like != "undefined" and like != "": items = items.filter(Q(slug__icontains=like) | Q(title__icontains=like)) - if slug := request.GET.get('slug'): - lookup['slug__in'] = slug.split(',') + if slug := request.GET.get("slug"): + lookup["slug__in"] = slug.split(",") - if asset_slug := request.GET.get('asset_slug'): - lookup['featured_asset__slug__in'] = asset_slug.split(',') + if asset_slug := request.GET.get("asset_slug"): + lookup["featured_asset__slug__in"] = asset_slug.split(",") - if asset_type := request.GET.get('asset_type'): - lookup['featured_asset__asset_type__in'] = asset_type.split(',') + if asset_type := request.GET.get("asset_type"): + lookup["featured_asset__asset_type__in"] = asset_type.split(",") - if 'is_deprecated' not in request.GET or request.GET.get('is_deprecated').lower() == 'false': - lookup['is_deprecated'] = False + if "is_deprecated" not in request.GET or request.GET.get("is_deprecated").lower() == "false": + lookup["is_deprecated"] = False - items = items.filter(**lookup).order_by('sort_priority') + items = items.filter(**lookup).order_by("sort_priority") items = handler.queryset(items) serializer = AssetBigTechnologySerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_technology') + @capable_of("crud_technology") def put(self, request, tech_slug=None, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['slug']) + lookups = self.generate_lookups(request, many_fields=["slug"]) if lookups and tech_slug: raise ValidationException( - 'user_id or cohort_id was provided in url ' - 'in bulk mode request, use querystring style instead', - code=400) + "user_id or cohort_id was provided in url " "in bulk mode request, use querystring style instead", + code=400, + ) - if 'slug' not in request.GET and tech_slug is None: - raise ValidationException('Missing technology slug(s)') + if "slug" not in request.GET and tech_slug is None: + raise ValidationException("Missing technology slug(s)") elif tech_slug is not None: - lookups['slug__in'] = [tech_slug] + lookups["slug__in"] = [tech_slug] - techs = AssetTechnology.objects.filter(**lookups).order_by('sort_priority') + techs = AssetTechnology.objects.filter(**lookups).order_by("sort_priority") _count = techs.count() if _count == 0: - raise ValidationException('This technolog(ies) does not exist for this academy', 404) + raise ValidationException("This technolog(ies) does not exist for this academy", 404) serializers = [] for t in techs: - serializer = TechnologyPUTSerializer(t, - data=request.data, - many=False, - context={ - 'request': request, - 'academy_id': academy_id - }) + serializer = TechnologyPUTSerializer( + t, data=request.data, many=False, context={"request": request, "academy_id": academy_id} + ) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) serializers.append(serializer) @@ -320,123 +325,124 @@ def put(self, request, tech_slug=None, academy_id=None): return Response(resp, status=status.HTTP_200_OK) -@api_view(['GET']) +@api_view(["GET"]) def get_categories(request): - items = AssetCategory.objects.filter(visibility='PUBLIC') + items = AssetCategory.objects.filter(visibility="PUBLIC") serializer = AssetCategorySerializer(items, many=True) return Response(serializer.data) -@api_view(['GET']) +@api_view(["GET"]) def get_keywords(request): items = AssetKeyword.objects.all() serializer = AssetKeywordSerializer(items, many=True) return Response(serializer.data) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_translations(request): - langs = Asset.objects.all().values_list('lang', flat=True) + langs = Asset.objects.all().values_list("lang", flat=True) langs = set(langs) - return Response([{'slug': l, 'title': l} for l in langs]) + return Response([{"slug": l, "title": l} for l in langs]) -@api_view(['POST']) +@api_view(["POST"]) @permission_classes([AllowAny]) def handle_test_asset(request): test_asset(request.data) - return Response({'status': 'ok'}) + return Response({"status": "ok"}) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) @xframe_options_exempt -def render_readme(request, asset_slug, extension='raw'): +def render_readme(request, asset_slug, extension="raw"): asset = Asset.get_by_slug(asset_slug, request) if asset is None: - raise ValidationException(f'Asset {asset_slug} not found', status.HTTP_404_NOT_FOUND) + raise ValidationException(f"Asset {asset_slug} not found", status.HTTP_404_NOT_FOUND) - response = HttpResponse('Invalid extension format', content_type='text/html') - if extension == 'raw': + response = HttpResponse("Invalid extension format", content_type="text/html") + if extension == "raw": readme = asset.get_readme() - response = HttpResponse(readme['decoded_raw'], content_type='text/markdown') + response = HttpResponse(readme["decoded_raw"], content_type="text/markdown") - if extension == 'html': - if asset.html is not None and asset.html != '': - response = HttpResponse(asset.html, content_type='text/html') + if extension == "html": + if asset.html is not None and asset.html != "": + response = HttpResponse(asset.html, content_type="text/html") else: - asset.log_error(AssetErrorLog.EMPTY_HTML, - status_text='Someone requested the asset HTML via API and it was empty') - readme = asset.get_readme(parse=True, remove_frontmatter=request.GET.get('frontmatter', 'true') != 'false') - asset.html = readme['html'] + asset.log_error( + AssetErrorLog.EMPTY_HTML, status_text="Someone requested the asset HTML via API and it was empty" + ) + readme = asset.get_readme(parse=True, remove_frontmatter=request.GET.get("frontmatter", "true") != "false") + asset.html = readme["html"] asset.save() - response = HttpResponse(readme['html'], content_type='text/html') + response = HttpResponse(readme["html"], content_type="text/html") - elif extension in ['md', 'mdx', 'txt']: - readme = asset.get_readme(parse=True, remove_frontmatter=request.GET.get('frontmatter', 'true') != 'false') - response = HttpResponse(readme['decoded'], content_type='text/markdown') + elif extension in ["md", "mdx", "txt"]: + readme = asset.get_readme(parse=True, remove_frontmatter=request.GET.get("frontmatter", "true") != "false") + response = HttpResponse(readme["decoded"], content_type="text/markdown") - elif extension == 'ipynb': + elif extension == "ipynb": readme = asset.get_readme() - response = HttpResponse(readme['decoded'], content_type='application/json') + response = HttpResponse(readme["decoded"], content_type="application/json") return response -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_alias_redirects(request): aliases = AssetAlias.objects.all() redirects = {} - if 'academy' in request.GET: - param = request.GET.get('academy', '') - aliases = aliases.filter(asset__academy__id__in=param.split(',')) + if "academy" in request.GET: + param = request.GET.get("academy", "") + aliases = aliases.filter(asset__academy__id__in=param.split(",")) for a in aliases: if a.slug != a.asset.slug: - redirects[a.slug] = {'slug': a.asset.slug, 'type': a.asset.asset_type, 'lang': a.asset.lang} + redirects[a.slug] = {"slug": a.asset.slug, "type": a.asset.asset_type, "lang": a.asset.lang} return Response(redirects) -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) def get_config(request, asset_slug): asset = Asset.get_by_slug(asset_slug, request) if asset is None: - raise ValidationException(f'Asset not {asset_slug} found', status.HTTP_404_NOT_FOUND) + raise ValidationException(f"Asset not {asset_slug} found", status.HTTP_404_NOT_FOUND) - main_branch = 'master' - response = requests.head(f'{asset.url}/tree/{main_branch}', allow_redirects=False, timeout=2) + main_branch = "master" + response = requests.head(f"{asset.url}/tree/{main_branch}", allow_redirects=False, timeout=2) if response.status_code == 302: - main_branch = 'main' + main_branch = "main" try: - response = requests.get(f'{asset.url}/blob/{main_branch}/learn.json?raw=true', timeout=2) + response = requests.get(f"{asset.url}/blob/{main_branch}/learn.json?raw=true", timeout=2) if response.status_code == 404: - response = requests.get(f'{asset.url}/blob/{main_branch}/bc.json?raw=true', timeout=2) + response = requests.get(f"{asset.url}/blob/{main_branch}/bc.json?raw=true", timeout=2) if response.status_code == 404: - raise ValidationException(f'Config file not found for {asset.url}', code=404, slug='config_not_found') + raise ValidationException(f"Config file not found for {asset.url}", code=404, slug="config_not_found") return Response(response.json()) except Exception: data = { - 'MESSAGE': f'learn.json or bc.json not found or invalid for for: \n {asset.url}', - 'TITLE': f'Error fetching the exercise meta-data learn.json for {asset.asset_type.lower()} {asset.slug}', + "MESSAGE": f"learn.json or bc.json not found or invalid for for: \n {asset.url}", + "TITLE": f"Error fetching the exercise meta-data learn.json for {asset.asset_type.lower()} {asset.slug}", } to = SYSTEM_EMAIL if asset.author is not None: to = asset.author.email - send_email_message('message', to=to, data=data, academy=asset.academy) - raise ValidationException(f'Config file invalid or not found for {asset.url}', - code=404, - slug='config_not_found') + send_email_message("message", to=to, data=data, academy=asset.academy) + raise ValidationException( + f"Config file invalid or not found for {asset.url}", code=404, slug="config_not_found" + ) class AssetThumbnailView(APIView): @@ -448,8 +454,8 @@ class AssetThumbnailView(APIView): permission_classes = [AllowAny] def get(self, request, asset_slug): - width = int(request.GET.get('width', '0')) - height = int(request.GET.get('height', '0')) + width = int(request.GET.get("width", "0")) + height = int(request.GET.get("height", "0")) asset = Asset.objects.filter(slug=asset_slug).first() generator = AssetThumbnailGenerator(asset, width, height) @@ -458,18 +464,18 @@ def get(self, request, asset_slug): return redirect(url, permanent=permanent) # this method will force to reset the thumbnail - @capable_of('crud_asset') + @capable_of("crud_asset") def post(self, request, asset_slug, academy_id): lang = get_user_language(request) - width = int(request.GET.get('width', '0')) - height = int(request.GET.get('height', '0')) + width = int(request.GET.get("width", "0")) + height = int(request.GET.get("height", "0")) asset = Asset.objects.filter(slug=asset_slug, academy__id=academy_id).first() if asset is None: - raise ValidationException(f'Asset with slug {asset_slug} not found for this academy', - slug='asset-slug-not-found', - code=400) + raise ValidationException( + f"Asset with slug {asset_slug} not found for this academy", slug="asset-slug-not-found", code=400 + ) generator = AssetThumbnailGenerator(asset, width, height) @@ -478,15 +484,18 @@ def post(self, request, asset_slug, academy_id): asset = generator.create(delay=1500) except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) serializer = AcademyAssetSerializer(asset) return Response(serializer.data, status=status.HTTP_200_OK) @@ -497,17 +506,19 @@ class AcademyContentVariableView(APIView): get: Get content variables thumbnail. """ + extensions = APIViewExtensions(cache=ContentVariableCache, paginate=True) - @capable_of('read_content_variables') + @capable_of("read_content_variables") def get(self, request, academy_id, variable_slug=None): handler = self.extensions(request) if variable_slug is not None: variable = ContentVariable.objects.filter(slug=variable_slug).first() if variable is None: - raise ValidationException(f'Variable {variable_slug} not found for this academy', - status.HTTP_404_NOT_FOUND) + raise ValidationException( + f"Variable {variable_slug} not found for this academy", status.HTTP_404_NOT_FOUND + ) serializer = VariableSmallSerializer(variable) return handler.response(serializer.data) @@ -515,9 +526,9 @@ def get(self, request, academy_id, variable_slug=None): items = ContentVariable.objects.filter(academy__id=academy_id) lookup = {} - if 'lang' in self.request.GET: - param = self.request.GET.get('lang') - lookup['lang'] = param + if "lang" in self.request.GET: + param = self.request.GET.get("lang") + lookup["lang"] = param items = items.filter(**lookup) items = handler.queryset(items) @@ -527,18 +538,18 @@ def get(self, request, academy_id, variable_slug=None): return handler.response(serializer.data) # this method will force to reset the thumbnail - @capable_of('crud_asset') + @capable_of("crud_asset") def post(self, request, asset_slug, academy_id): lang = get_user_language(request) - width = int(request.GET.get('width', '0')) - height = int(request.GET.get('height', '0')) + width = int(request.GET.get("width", "0")) + height = int(request.GET.get("height", "0")) asset = Asset.objects.filter(slug=asset_slug, academy__id=academy_id).first() if asset is None: - raise ValidationException(f'Asset with slug {asset_slug} not found for this academy', - slug='asset-slug-not-found', - code=400) + raise ValidationException( + f"Asset with slug {asset_slug} not found for this academy", slug="asset-slug-not-found", code=400 + ) generator = AssetThumbnailGenerator(asset, width, height) @@ -547,15 +558,18 @@ def post(self, request, asset_slug, academy_id): asset = generator.create(delay=1500) except CircuitBreakerError: - raise ValidationException(translation( - lang, - en='The circuit breaker is open due to an error, please try again later', - es='El circuit breaker está abierto debido a un error, por favor intente más tarde', - slug='circuit-breaker-open'), - slug='circuit-breaker-open', - data={'service': 'Google Cloud Storage'}, - silent=True, - code=503) + raise ValidationException( + translation( + lang, + en="The circuit breaker is open due to an error, please try again later", + es="El circuit breaker está abierto debido a un error, por favor intente más tarde", + slug="circuit-breaker-open", + ), + slug="circuit-breaker-open", + data={"service": "Google Cloud Storage"}, + silent=True, + code=503, + ) serializer = AcademyAssetSerializer(asset) return Response(serializer.data, status=status.HTTP_200_OK) @@ -566,8 +580,9 @@ class AssetView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] - extensions = APIViewExtensions(cache=AssetCache, sort='-published_at', paginate=True) + extensions = APIViewExtensions(cache=AssetCache, sort="-published_at", paginate=True) def get(self, request, asset_slug=None): handler = self.extensions(request) @@ -581,7 +596,7 @@ def get(self, request, asset_slug=None): if asset_slug is not None: asset = Asset.get_by_slug(asset_slug, request) if asset is None: - raise ValidationException(f'Asset {asset_slug} not found', status.HTTP_404_NOT_FOUND) + raise ValidationException(f"Asset {asset_slug} not found", status.HTTP_404_NOT_FOUND) serializer = AssetBigAndTechnologyPublishedSerializer(asset) return handler.response(serializer.data) @@ -591,85 +606,96 @@ def get(self, request, asset_slug=None): query = handler.lookup.build( lang, strings={ - 'iexact': [ - 'test_status', - 'sync_status', + "iexact": [ + "test_status", + "sync_status", + ], + "in": [ + "difficulty", + "status", + "asset_type", + "category__slug", + "technologies__slug", + "seo_keywords__slug", ], - 'in': - ['difficulty', 'status', 'asset_type', 'category__slug', 'technologies__slug', 'seo_keywords__slug'] }, - ids=['author', 'owner'], + ids=["author", "owner"], bools={ - 'exact': ['with_video', 'interactive', 'graded'], + "exact": ["with_video", "interactive", "graded"], }, overwrite={ - 'category': 'category__slug', - 'technologies': 'technologies__slug', - 'seo_keywords': 'seo_keywords__slug' - }) + "category": "category__slug", + "technologies": "technologies__slug", + "seo_keywords": "seo_keywords__slug", + }, + ) - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter( - Q(slug__icontains=like) | Q(title__icontains=like) - | Q(assetalias__slug__icontains=like)) + Q(slug__icontains=like) | Q(title__icontains=like) | Q(assetalias__slug__icontains=like) + ) - if 'slug' in self.request.GET: - asset_type = self.request.GET.get('asset_type', None) - param = self.request.GET.get('slug') + if "slug" in self.request.GET: + asset_type = self.request.GET.get("asset_type", None) + param = self.request.GET.get("slug") asset = Asset.get_by_slug(param, request, asset_type=asset_type) if asset is not None: - lookup['slug'] = asset.slug + lookup["slug"] = asset.slug else: - lookup['slug'] = param + lookup["slug"] = param - if 'language' in self.request.GET: - param = self.request.GET.get('language') - if param == 'en': - param = 'us' - lookup['lang'] = param + if "language" in self.request.GET: + param = self.request.GET.get("language") + if param == "en": + param = "us" + lookup["lang"] = param - if 'status' not in self.request.GET: - lookup['status__in'] = ['PUBLISHED'] + if "status" not in self.request.GET: + lookup["status__in"] = ["PUBLISHED"] try: - if 'academy' in self.request.GET and self.request.GET.get('academy') not in ['null', '']: - param = self.request.GET.get('academy') - lookup['academy__in'] = [int(p) for p in param.split(',')] + if "academy" in self.request.GET and self.request.GET.get("academy") not in ["null", ""]: + param = self.request.GET.get("academy") + lookup["academy__in"] = [int(p) for p in param.split(",")] except Exception: - raise ValidationException(translation(lang, - en='The academy filter value should be an integer', - es='El valor del filtro de academy debería ser un entero', - slug='academy-id-must-be-integer'), - code=400) - - if 'video' in self.request.GET: - param = self.request.GET.get('video') - if param == 'true': - lookup['with_video'] = True - - lookup['external'] = False - if 'external' in self.request.GET: - param = self.request.GET.get('external') - if param == 'true': - lookup['external'] = True - elif param == 'both': - lookup.pop('external', None) - - need_translation = self.request.GET.get('need_translation', False) - if need_translation == 'true': - items = items.annotate(num_translations=Count('all_translations')).filter(num_translations__lte=1) - - if 'exclude_category' in self.request.GET: - param = self.request.GET.get('exclude_category') - items = items.exclude(category__slug__in=[p for p in param.split(',') if p]) - - items = items.filter(query, **lookup, visibility='PUBLIC').distinct() + raise ValidationException( + translation( + lang, + en="The academy filter value should be an integer", + es="El valor del filtro de academy debería ser un entero", + slug="academy-id-must-be-integer", + ), + code=400, + ) + + if "video" in self.request.GET: + param = self.request.GET.get("video") + if param == "true": + lookup["with_video"] = True + + lookup["external"] = False + if "external" in self.request.GET: + param = self.request.GET.get("external") + if param == "true": + lookup["external"] = True + elif param == "both": + lookup.pop("external", None) + + need_translation = self.request.GET.get("need_translation", False) + if need_translation == "true": + items = items.annotate(num_translations=Count("all_translations")).filter(num_translations__lte=1) + + if "exclude_category" in self.request.GET: + param = self.request.GET.get("exclude_category") + items = items.exclude(category__slug__in=[p for p in param.split(",") if p]) + + items = items.filter(query, **lookup, visibility="PUBLIC").distinct() items = handler.queryset(items) - if 'big' in self.request.GET: + if "big" in self.request.GET: serializer = AssetMidSerializer(items, many=True) - elif 'expand' in self.request.GET and self.request.GET.get('expand') == 'technologies': + elif "expand" in self.request.GET and self.request.GET.get("expand") == "technologies": serializer = AssetAndTechnologySerializer(items, many=True) else: serializer = AssetSerializer(items, many=True) @@ -683,43 +709,43 @@ class AcademyAssetActionView(APIView): List all snippets, or create a new snippet. """ - @capable_of('crud_asset') + @capable_of("crud_asset") def put(self, request, asset_slug, action_slug, academy_id=None): if asset_slug is None: - raise ValidationException('Missing asset_slug') + raise ValidationException("Missing asset_slug") asset = Asset.objects.filter(slug__iexact=asset_slug, academy__id=academy_id).first() if asset is None: - raise ValidationException(f'This asset {asset_slug} does not exist for this academy {academy_id}', 404) + raise ValidationException(f"This asset {asset_slug} does not exist for this academy {academy_id}", 404) - possible_actions = ['test', 'pull', 'push', 'analyze_seo', 'clean', 'originality'] + possible_actions = ["test", "pull", "push", "analyze_seo", "clean", "originality"] if action_slug not in possible_actions: - raise ValidationException(f'Invalid action {action_slug}') + raise ValidationException(f"Invalid action {action_slug}") try: - if action_slug == 'test': + if action_slug == "test": test_asset(asset) - elif action_slug == 'clean': + elif action_slug == "clean": clean_asset_readme(asset) - elif action_slug == 'pull': + elif action_slug == "pull": override_meta = False - if request.data and 'override_meta' in request.data: - override_meta = request.data['override_meta'] + if request.data and "override_meta" in request.data: + override_meta = request.data["override_meta"] pull_from_github(asset.slug, override_meta=override_meta) - elif action_slug == 'push': - if asset.asset_type not in ['ARTICLE', 'LESSON', 'QUIZ']: + elif action_slug == "push": + if asset.asset_type not in ["ARTICLE", "LESSON", "QUIZ"]: raise ValidationException( - f'Asset type {asset.asset_type} cannot be pushed to GitHub, please update the Github repository manually' + f"Asset type {asset.asset_type} cannot be pushed to GitHub, please update the Github repository manually" ) push_to_github(asset.slug, author=request.user) - elif action_slug == 'analyze_seo': + elif action_slug == "analyze_seo": report = SEOAnalyzer(asset) report.start() - elif action_slug == 'originality': + elif action_slug == "originality": - if asset.asset_type not in ['ARTICLE', 'LESSON']: - raise ValidationException('Only lessons and articles can be scanned for originality') + if asset.asset_type not in ["ARTICLE", "LESSON"]: + raise ValidationException("Only lessons and articles can be scanned for originality") scan_asset_originality(asset) except Exception as e: @@ -727,25 +753,26 @@ def put(self, request, asset_slug, action_slug, academy_id=None): if isinstance(e, Exception): raise ValidationException(str(e)) - raise ValidationException('; '.join([k.capitalize() + ': ' + ''.join(v) - for k, v in e.message_dict.items()])) + raise ValidationException( + "; ".join([k.capitalize() + ": " + "".join(v) for k, v in e.message_dict.items()]) + ) asset = Asset.objects.filter(slug=asset_slug, academy__id=academy_id).first() serializer = AcademyAssetSerializer(asset) return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_asset') + @capable_of("crud_asset") def post(self, request, action_slug, academy_id=None): - if action_slug not in ['test', 'pull', 'push', 'analyze_seo']: - raise ValidationException(f'Invalid action {action_slug}') + if action_slug not in ["test", "pull", "push", "analyze_seo"]: + raise ValidationException(f"Invalid action {action_slug}") - if not request.data['assets']: - raise ValidationException('Assets not found in the body of the request.') + if not request.data["assets"]: + raise ValidationException("Assets not found in the body of the request.") - assets = request.data['assets'] + assets = request.data["assets"] if len(assets) < 1: - raise ValidationException('The list of Assets is empty.') + raise ValidationException("The list of Assets is empty.") invalid_assets = [] @@ -755,23 +782,23 @@ def post(self, request, action_slug, academy_id=None): invalid_assets.append(asset_slug) continue try: - if action_slug == 'test': + if action_slug == "test": test_asset(asset) - elif action_slug == 'clean': + elif action_slug == "clean": clean_asset_readme(asset) - elif action_slug == 'pull': + elif action_slug == "pull": override_meta = False - if request.data and 'override_meta' in request.data: - override_meta = request.data['override_meta'] + if request.data and "override_meta" in request.data: + override_meta = request.data["override_meta"] pull_from_github(asset.slug, override_meta=override_meta) - elif action_slug == 'push': - if asset.asset_type not in ['ARTICLE', 'LESSON']: + elif action_slug == "push": + if asset.asset_type not in ["ARTICLE", "LESSON"]: raise ValidationException( - 'Only lessons and articles and be pushed to github, please update the Github repository yourself and come back to pull the changes from here' + "Only lessons and articles and be pushed to github, please update the Github repository yourself and come back to pull the changes from here" ) push_to_github(asset.slug, author=request.user) - elif action_slug == 'analyze_seo': + elif action_slug == "analyze_seo": report = SEOAnalyzer(asset) report.start() @@ -783,11 +810,12 @@ def post(self, request, action_slug, academy_id=None): pulled_assets = list(set(assets).difference(set(invalid_assets))) if len(pulled_assets) < 1: - raise ValidationException(f'Failed to {action_slug} for these assets: {invalid_assets}') + raise ValidationException(f"Failed to {action_slug} for these assets: {invalid_assets}") return Response( f'These asset readmes were pulled correctly from GitHub: {pulled_assets}. {f"These assets {invalid_assets} do not exist for this academy {academy_id}" if len(invalid_assets) > 0 else ""}', - status=status.HTTP_200_OK) + status=status.HTTP_200_OK, + ) # Create your views here. @@ -795,16 +823,17 @@ class AcademyAssetSEOReportView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) - @capable_of('read_asset') + extensions = APIViewExtensions(sort="-created_at", paginate=True) + + @capable_of("read_asset") def get(self, request, asset_slug, academy_id): handler = self.extensions(request) reports = SEOReport.objects.filter(asset__slug=asset_slug) if reports.count() == 0: - raise ValidationException(f'No report found for asset {asset_slug}', status.HTTP_404_NOT_FOUND) + raise ValidationException(f"No report found for asset {asset_slug}", status.HTTP_404_NOT_FOUND) reports = handler.queryset(reports) serializer = SEOReportSerializer(reports, many=True) @@ -816,18 +845,19 @@ class AcademyAssetOriginalityView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) - @capable_of('read_asset') + extensions = APIViewExtensions(sort="-created_at", paginate=True) + + @capable_of("read_asset") def get(self, request, asset_slug, academy_id): handler = self.extensions(request) scans = OriginalityScan.objects.filter(asset__slug=asset_slug) if scans.count() == 0: - raise ValidationException(f'No originality scans found for asset {asset_slug}', status.HTTP_404_NOT_FOUND) + raise ValidationException(f"No originality scans found for asset {asset_slug}", status.HTTP_404_NOT_FOUND) - scans = scans.order_by('-created_at') + scans = scans.order_by("-created_at") scans = handler.queryset(scans) serializer = OriginalityScanSerializer(scans, many=True) @@ -839,7 +869,7 @@ class AssetSupersedesView(APIView, GenerateLookupsMixin): List all snippets, or create a new snippet. """ - @capable_of('read_asset') + @capable_of("read_asset") def get(self, request, asset_slug=None, academy_id=None): asset = Asset.get_by_slug(asset_slug, request) @@ -859,19 +889,22 @@ def get(self, request, asset_slug=None, academy_id=None): except Exception: pass - return Response({ - 'supersedes': AssetTinySerializer(supersedes, many=True).data, - 'previous': AssetTinySerializer(previous, many=True).data - }) + return Response( + { + "supersedes": AssetTinySerializer(supersedes, many=True).data, + "previous": AssetTinySerializer(previous, many=True).data, + } + ) class AcademyAssetView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(cache=AssetCache, sort='-published_at', paginate=True) - @capable_of('read_asset') + extensions = APIViewExtensions(cache=AssetCache, sort="-published_at", paginate=True) + + @capable_of("read_asset") def get(self, request, asset_slug=None, academy_id=None): handler = self.extensions(request) @@ -886,7 +919,7 @@ def get(self, request, asset_slug=None, academy_id=None): if asset_slug is not None: asset = Asset.get_by_slug(asset_slug, request) if asset is None or (asset.academy is not None and asset.academy.id != int(academy_id)): - raise ValidationException(f'Asset {asset_slug} not found for this academy', status.HTTP_404_NOT_FOUND) + raise ValidationException(f"Asset {asset_slug} not found for this academy", status.HTTP_404_NOT_FOUND) serializer = AcademyAssetSerializer(asset) return handler.response(serializer.data) @@ -895,133 +928,134 @@ def get(self, request, asset_slug=None, academy_id=None): lookup = {} - if member.role.slug == 'content_writer': + if member.role.slug == "content_writer": items = items.filter(author__id=request.user.id) - elif 'author' in self.request.GET: - param = self.request.GET.get('author') - lookup['author__id'] = param + elif "author" in self.request.GET: + param = self.request.GET.get("author") + lookup["author__id"] = param - if 'owner' in self.request.GET: - param = self.request.GET.get('owner') - lookup['owner__id'] = param + if "owner" in self.request.GET: + param = self.request.GET.get("owner") + lookup["owner__id"] = param - like = request.GET.get('like', None) + like = request.GET.get("like", None) if like is not None: items = items.filter( - Q(slug__icontains=like) | Q(title__icontains=like) - | Q(assetalias__slug__icontains=like)) + Q(slug__icontains=like) | Q(title__icontains=like) | Q(assetalias__slug__icontains=like) + ) - if 'asset_type' in self.request.GET: - param = self.request.GET.get('asset_type') - lookup['asset_type__iexact'] = param + if "asset_type" in self.request.GET: + param = self.request.GET.get("asset_type") + lookup["asset_type__iexact"] = param - if 'category' in self.request.GET: - param = self.request.GET.get('category') - lookup['category__slug__in'] = [p.lower() for p in param.split(',')] + if "category" in self.request.GET: + param = self.request.GET.get("category") + lookup["category__slug__in"] = [p.lower() for p in param.split(",")] - if 'test_status' in self.request.GET: - param = self.request.GET.get('test_status') - lookup['test_status'] = param.upper() + if "test_status" in self.request.GET: + param = self.request.GET.get("test_status") + lookup["test_status"] = param.upper() - if 'sync_status' in self.request.GET: - param = self.request.GET.get('sync_status') - lookup['sync_status'] = param.upper() + if "sync_status" in self.request.GET: + param = self.request.GET.get("sync_status") + lookup["sync_status"] = param.upper() - if 'slug' in self.request.GET: - asset_type = self.request.GET.get('asset_type', None) - param = self.request.GET.get('slug') + if "slug" in self.request.GET: + asset_type = self.request.GET.get("asset_type", None) + param = self.request.GET.get("slug") asset = Asset.get_by_slug(param, request, asset_type=asset_type) if asset is not None: - lookup['slug'] = asset.slug + lookup["slug"] = asset.slug else: - lookup['slug'] = param + lookup["slug"] = param - if 'language' in self.request.GET or 'lang' in self.request.GET: - param = self.request.GET.get('language') - if not param: param = self.request.GET.get('lang') + if "language" in self.request.GET or "lang" in self.request.GET: + param = self.request.GET.get("language") + if not param: + param = self.request.GET.get("lang") - if param == 'en': - param = 'us' - lookup['lang'] = param + if param == "en": + param = "us" + lookup["lang"] = param - if 'visibility' in self.request.GET: - param = self.request.GET.get('visibility') - lookup['visibility__in'] = [p.upper() for p in param.split(',')] + if "visibility" in self.request.GET: + param = self.request.GET.get("visibility") + lookup["visibility__in"] = [p.upper() for p in param.split(",")] else: - lookup['visibility'] = 'PUBLIC' + lookup["visibility"] = "PUBLIC" - if 'technologies' in self.request.GET: - param = self.request.GET.get('technologies') - lookup['technologies__slug__in'] = [p.lower() for p in param.split(',')] + if "technologies" in self.request.GET: + param = self.request.GET.get("technologies") + lookup["technologies__slug__in"] = [p.lower() for p in param.split(",")] - if 'keywords' in self.request.GET: - param = self.request.GET.get('keywords') - items = items.filter(seo_keywords__slug__in=[p.lower() for p in param.split(',')]) + if "keywords" in self.request.GET: + param = self.request.GET.get("keywords") + items = items.filter(seo_keywords__slug__in=[p.lower() for p in param.split(",")]) - if 'status' in self.request.GET: - param = self.request.GET.get('status') - lookup['status__in'] = [p.upper() for p in param.split(',')] + if "status" in self.request.GET: + param = self.request.GET.get("status") + lookup["status__in"] = [p.upper() for p in param.split(",")] else: - items = items.exclude(status='DELETED') - - if 'sync_status' in self.request.GET: - param = self.request.GET.get('sync_status') - lookup['sync_status__in'] = [p.upper() for p in param.split(',')] - - if 'video' in self.request.GET: - param = self.request.GET.get('video') - if param == 'true': - lookup['with_video'] = True - - if 'interactive' in self.request.GET: - param = self.request.GET.get('interactive') - if param == 'true': - lookup['interactive'] = True - - if 'graded' in self.request.GET: - param = self.request.GET.get('graded') - if param == 'true': - lookup['graded'] = True - - lookup['external'] = False - if 'external' in self.request.GET: - param = self.request.GET.get('external') - if param == 'true': - lookup['external'] = True - elif param == 'both': - lookup.pop('external', None) - - if 'superseded_by' in self.request.GET: - param = self.request.GET.get('superseded_by') - if param.lower() in ['none', 'null']: - lookup['superseded_by__isnull'] = True + items = items.exclude(status="DELETED") + + if "sync_status" in self.request.GET: + param = self.request.GET.get("sync_status") + lookup["sync_status__in"] = [p.upper() for p in param.split(",")] + + if "video" in self.request.GET: + param = self.request.GET.get("video") + if param == "true": + lookup["with_video"] = True + + if "interactive" in self.request.GET: + param = self.request.GET.get("interactive") + if param == "true": + lookup["interactive"] = True + + if "graded" in self.request.GET: + param = self.request.GET.get("graded") + if param == "true": + lookup["graded"] = True + + lookup["external"] = False + if "external" in self.request.GET: + param = self.request.GET.get("external") + if param == "true": + lookup["external"] = True + elif param == "both": + lookup.pop("external", None) + + if "superseded_by" in self.request.GET: + param = self.request.GET.get("superseded_by") + if param.lower() in ["none", "null"]: + lookup["superseded_by__isnull"] = True else: if param.isnumeric(): - lookup['superseded_by__id'] = param + lookup["superseded_by__id"] = param else: - lookup['superseded_by__slug'] = param + lookup["superseded_by__slug"] = param - if 'previous_version' in self.request.GET: - param = self.request.GET.get('previous_version') - if param.lower() in ['none', 'null']: - lookup['previous_version__isnull'] = True + if "previous_version" in self.request.GET: + param = self.request.GET.get("previous_version") + if param.lower() in ["none", "null"]: + lookup["previous_version__isnull"] = True else: if param.isnumeric(): - lookup['previous_version__id'] = param + lookup["previous_version__id"] = param else: - lookup['previous_version__slug'] = param + lookup["previous_version__slug"] = param - published_before = request.GET.get('published_before', '') - if published_before != '': + published_before = request.GET.get("published_before", "") + if published_before != "": items = items.filter(published_at__lte=published_before) - published_after = request.GET.get('published_after', '') - if published_after != '': + published_after = request.GET.get("published_after", "") + if published_after != "": items = items.filter(published_at__gte=published_after) - need_translation = self.request.GET.get('need_translation', False) - if need_translation == 'true': - items = items.annotate(num_translations=Count('all_translations')).filter(num_translations__lte=1) + need_translation = self.request.GET.get("need_translation", False) + if need_translation == "true": + items = items.annotate(num_translations=Count("all_translations")).filter(num_translations__lte=1) items = items.filter(**lookup).distinct() items = handler.queryset(items) @@ -1030,7 +1064,7 @@ def get(self, request, asset_slug=None, academy_id=None): return handler.response(serializer.data) - @capable_of('crud_asset') + @capable_of("crud_asset") def put(self, request, asset_slug=None, academy_id=None): data_list = request.data @@ -1040,56 +1074,59 @@ def put(self, request, asset_slug=None, academy_id=None): data_list = [request.data] if asset_slug is None: - raise ValidationException('Missing asset_slug') + raise ValidationException("Missing asset_slug") asset = Asset.objects.filter(slug__iexact=asset_slug, academy__id=academy_id).first() if asset is None: - raise ValidationException(f'This asset {asset_slug} does not exist for this academy {academy_id}', 404) + raise ValidationException(f"This asset {asset_slug} does not exist for this academy {academy_id}", 404) - data_list[0]['id'] = asset.id + data_list[0]["id"] = asset.id all_assets = [] for data in data_list: - if 'technologies' in data and len(data['technologies']) > 0 and isinstance(data['technologies'][0], str): - technology_ids = AssetTechnology.objects.filter(slug__in=data['technologies']).values_list('pk', - flat=True) - delta = len(data['technologies']) - len(technology_ids) + if "technologies" in data and len(data["technologies"]) > 0 and isinstance(data["technologies"][0], str): + technology_ids = AssetTechnology.objects.filter(slug__in=data["technologies"]).values_list( + "pk", flat=True + ) + delta = len(data["technologies"]) - len(technology_ids) if delta != 0: - raise ValidationException(f'{delta} of the assigned technologies for this lesson are not found') + raise ValidationException(f"{delta} of the assigned technologies for this lesson are not found") - data['technologies'] = technology_ids + data["technologies"] = technology_ids - if 'seo_keywords' in data and len(data['seo_keywords']) > 0: - if isinstance(data['seo_keywords'][0], str): - data['seo_keywords'] = AssetKeyword.objects.filter(slug__in=data['seo_keywords']).values_list( - 'pk', flat=True) + if "seo_keywords" in data and len(data["seo_keywords"]) > 0: + if isinstance(data["seo_keywords"][0], str): + data["seo_keywords"] = AssetKeyword.objects.filter(slug__in=data["seo_keywords"]).values_list( + "pk", flat=True + ) - if 'all_translations' in data and len(data['all_translations']) > 0 and isinstance( - data['all_translations'][0], str): - data['all_translations'] = Asset.objects.filter(slug__in=data['all_translations']).values_list( - 'pk', flat=True) + if ( + "all_translations" in data + and len(data["all_translations"]) > 0 + and isinstance(data["all_translations"][0], str) + ): + data["all_translations"] = Asset.objects.filter(slug__in=data["all_translations"]).values_list( + "pk", flat=True + ) - if 'id' not in data: - raise ValidationException('Cannot determine asset id', slug='without-id') + if "id" not in data: + raise ValidationException("Cannot determine asset id", slug="without-id") - instance = Asset.objects.filter(id=data['id'], academy__id=academy_id).first() + instance = Asset.objects.filter(id=data["id"], academy__id=academy_id).first() if not instance: - raise ValidationException(f'Asset({data["id"]}) does not exist on this academy', - code=404, - slug='not-found') + raise ValidationException( + f'Asset({data["id"]}) does not exist on this academy', code=404, slug="not-found" + ) all_assets.append(instance) all_serializers = [] index = -1 for data in data_list: index += 1 - serializer = AssetPUTSerializer(all_assets[index], - data=data, - context={ - 'request': request, - 'academy_id': academy_id - }) + serializer = AssetPUTSerializer( + all_assets[index], data=data, context={"request": request, "academy_id": academy_id} + ) all_serializers.append(serializer) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1105,33 +1142,41 @@ def put(self, request, asset_slug=None, academy_id=None): return Response(serializer.data, status=status.HTTP_200_OK) - @capable_of('crud_asset') + @capable_of("crud_asset") def post(self, request, academy_id=None): data = { **request.data, } - if 'seo_keywords' in data and len(data['seo_keywords']) > 0: - if isinstance(data['seo_keywords'][0], str): - data['seo_keywords'] = AssetKeyword.objects.filter(slug__in=data['seo_keywords']).values_list('pk', - flat=True) - - if 'all_translations' in data and len(data['all_translations']) > 0 and isinstance( - data['all_translations'][0], str): - data['all_translations'] = Asset.objects.filter(slug__in=data['all_translations']).values_list('pk', - flat=True) - - if 'technologies' in data and len(data['technologies']) > 0 and isinstance(data['technologies'][0], str): - technology_ids = AssetTechnology.objects.filter(slug__in=data['technologies']).values_list( - 'pk', flat=True).order_by('sort_priority') - delta = len(data['technologies']) - len(technology_ids) + if "seo_keywords" in data and len(data["seo_keywords"]) > 0: + if isinstance(data["seo_keywords"][0], str): + data["seo_keywords"] = AssetKeyword.objects.filter(slug__in=data["seo_keywords"]).values_list( + "pk", flat=True + ) + + if ( + "all_translations" in data + and len(data["all_translations"]) > 0 + and isinstance(data["all_translations"][0], str) + ): + data["all_translations"] = Asset.objects.filter(slug__in=data["all_translations"]).values_list( + "pk", flat=True + ) + + if "technologies" in data and len(data["technologies"]) > 0 and isinstance(data["technologies"][0], str): + technology_ids = ( + AssetTechnology.objects.filter(slug__in=data["technologies"]) + .values_list("pk", flat=True) + .order_by("sort_priority") + ) + delta = len(data["technologies"]) - len(technology_ids) if delta != 0: - raise ValidationException(f'{delta} of the assigned technologies for this asset are not found') + raise ValidationException(f"{delta} of the assigned technologies for this asset are not found") - data['technologies'] = technology_ids + data["technologies"] = technology_ids - serializer = PostAssetSerializer(data=data, context={'request': request, 'academy': academy_id}) + serializer = PostAssetSerializer(data=data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): instance = serializer.save() async_pull_from_github.delay(instance.slug) @@ -1143,10 +1188,11 @@ class V2AcademyAssetView(APIView): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(cache=AssetCache, sort='-published_at', paginate=True) - @capable_of('read_asset') - @consume('read-lesson', consumer=asset_by_slug) + extensions = APIViewExtensions(cache=AssetCache, sort="-published_at", paginate=True) + + @capable_of("read_asset") + @consume("read-lesson", consumer=asset_by_slug) def get(self, request, asset: Asset, academy: Academy): serializer = AcademyAssetSerializer(asset) return Response(serializer.data) @@ -1156,9 +1202,10 @@ class AssetImageView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) - @capable_of('read_asset') + extensions = APIViewExtensions(sort="-created_at", paginate=True) + + @capable_of("read_asset") def get(self, request, academy_id=None): handler = self.extensions(request) @@ -1166,17 +1213,17 @@ def get(self, request, academy_id=None): items = AssetImage.objects.filter(assets__academy__id=academy_id) lookup = {} - if 'slug' in self.request.GET: - param = self.request.GET.get('slug') - lookup['assets__slug__in'] = [p.lower() for p in param.split(',')] + if "slug" in self.request.GET: + param = self.request.GET.get("slug") + lookup["assets__slug__in"] = [p.lower() for p in param.split(",")] - if 'download_status' in self.request.GET: - param = self.request.GET.get('download_status') - lookup['download_status__in'] = [p.upper() for p in param.split(',')] + if "download_status" in self.request.GET: + param = self.request.GET.get("download_status") + lookup["download_status__in"] = [p.upper() for p in param.split(",")] - if 'original_url' in self.request.GET: - param = self.request.GET.get('original_url') - lookup['original_url'] = param + if "original_url" in self.request.GET: + param = self.request.GET.get("original_url") + lookup["original_url"] = param items = items.filter(**lookup) items = handler.queryset(items) @@ -1189,9 +1236,10 @@ class AcademyAssetCommentView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(cache=AssetCommentCache, sort='-created_at', paginate=True) - @capable_of('read_asset') + extensions = APIViewExtensions(cache=AssetCommentCache, sort="-created_at", paginate=True) + + @capable_of("read_asset") def get(self, request, academy_id=None): handler = self.extensions(request) @@ -1202,31 +1250,31 @@ def get(self, request, academy_id=None): items = AssetComment.objects.filter(asset__academy__id=academy_id) lookup = {} - if 'asset' in self.request.GET: - param = self.request.GET.get('asset') - lookup['asset__slug__in'] = [p.lower() for p in param.split(',')] + if "asset" in self.request.GET: + param = self.request.GET.get("asset") + lookup["asset__slug__in"] = [p.lower() for p in param.split(",")] - if 'resolved' in self.request.GET: - param = self.request.GET.get('resolved') - if param == 'true': - lookup['resolved'] = True - elif param == 'false': - lookup['resolved'] = False + if "resolved" in self.request.GET: + param = self.request.GET.get("resolved") + if param == "true": + lookup["resolved"] = True + elif param == "false": + lookup["resolved"] = False - if 'delivered' in self.request.GET: - param = self.request.GET.get('delivered') - if param == 'true': - lookup['delivered'] = True - elif param == 'false': - lookup['delivered'] = False + if "delivered" in self.request.GET: + param = self.request.GET.get("delivered") + if param == "true": + lookup["delivered"] = True + elif param == "false": + lookup["delivered"] = False - if 'owner' in self.request.GET: - param = self.request.GET.get('owner') - lookup['owner__email'] = param + if "owner" in self.request.GET: + param = self.request.GET.get("owner") + lookup["owner__email"] = param - if 'author' in self.request.GET: - param = self.request.GET.get('author') - lookup['author__email'] = param + if "author" in self.request.GET: + param = self.request.GET.get("author") + lookup["author__email"] = param items = items.filter(**lookup) items = handler.queryset(items) @@ -1234,48 +1282,48 @@ def get(self, request, academy_id=None): serializer = AcademyCommentSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_asset') + @capable_of("crud_asset") def post(self, request, academy_id=None): - payload = {**request.data, 'author': request.user.id} + payload = {**request.data, "author": request.user.id} - serializer = PostAssetCommentSerializer(data=payload, context={'request': request, 'academy': academy_id}) + serializer = PostAssetCommentSerializer(data=payload, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() serializer = AcademyCommentSerializer(serializer.instance) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_asset') + @capable_of("crud_asset") def put(self, request, comment_id, academy_id=None): if comment_id is None: - raise ValidationException('Missing comment_id') + raise ValidationException("Missing comment_id") comment = AssetComment.objects.filter(id=comment_id, asset__academy__id=academy_id).first() if comment is None: - raise ValidationException('This comment does not exist for this academy', 404) + raise ValidationException("This comment does not exist for this academy", 404) data = {**request.data} - if 'status' in request.data and request.data['status'] == 'NOT_STARTED': - data['author'] = None + if "status" in request.data and request.data["status"] == "NOT_STARTED": + data["author"] = None - serializer = PutAssetCommentSerializer(comment, data=data, context={'request': request, 'academy': academy_id}) + serializer = PutAssetCommentSerializer(comment, data=data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() serializer = AcademyCommentSerializer(serializer.instance) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_asset') + @capable_of("crud_asset") def delete(self, request, comment_id=None, academy_id=None): if comment_id is None: - raise ValidationException('Missing comment ID on the URL', 404) + raise ValidationException("Missing comment ID on the URL", 404) comment = AssetComment.objects.filter(id=comment_id, asset__academy__id=academy_id).first() if comment is None: - raise ValidationException('This comment does not exist', 404) + raise ValidationException("This comment does not exist", 404) comment.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) @@ -1285,9 +1333,10 @@ class AcademyAssetAliasView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) - @capable_of('read_asset') + extensions = APIViewExtensions(sort="-created_at", paginate=True) + + @capable_of("read_asset") def get(self, request, alias_slug=None, academy_id=None): handler = self.extensions(request) @@ -1301,10 +1350,13 @@ def get(self, request, alias_slug=None, academy_id=None): item = AssetAlias.objects.filter(slug=alias_slug, asset__academy__id=academy_id).first() if not item: raise ValidationException( - translation(lang, - en='Asset alias with slug {alias_slug} not found for this academy', - es='No se ha encontrado el alias {alias_slug} para esta academia', - slug='not-found')) + translation( + lang, + en="Asset alias with slug {alias_slug} not found for this academy", + es="No se ha encontrado el alias {alias_slug} para esta academia", + slug="not-found", + ) + ) serializer = AssetAliasSerializer(item, many=False) return handler.response(serializer.data) @@ -1312,9 +1364,9 @@ def get(self, request, alias_slug=None, academy_id=None): items = AssetAlias.objects.filter(asset__academy__id=academy_id) lookup = {} - if 'asset' in self.request.GET: - param = self.request.GET.get('asset') - lookup['asset__slug__in'] = [p.lower() for p in param.split(',')] + if "asset" in self.request.GET: + param = self.request.GET.get("asset") + lookup["asset__slug__in"] = [p.lower() for p in param.split(",")] items = items.filter(**lookup) items = handler.queryset(items) @@ -1322,30 +1374,39 @@ def get(self, request, alias_slug=None, academy_id=None): serializer = AssetAliasSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_asset') + @capable_of("crud_asset") def delete(self, request, alias_slug=None, academy_id=None): lang = get_user_language(request) if not alias_slug: raise ValidationException( - translation(lang, - en='Missing alias slug', - es='Especifica el slug del alias que deseas eliminar', - slug='missing-alias-slug')) + translation( + lang, + en="Missing alias slug", + es="Especifica el slug del alias que deseas eliminar", + slug="missing-alias-slug", + ) + ) item = AssetAlias.objects.filter(slug=alias_slug, asset__academy__id=academy_id).first() if not item: raise ValidationException( - translation(lang, - en=f'Asset alias with slug {alias_slug} not found for this academy', - es=f'No se ha encontrado el alias {alias_slug} para esta academia', - slug='not-found')) + translation( + lang, + en=f"Asset alias with slug {alias_slug} not found for this academy", + es=f"No se ha encontrado el alias {alias_slug} para esta academia", + slug="not-found", + ) + ) if item.asset.slug == item.slug: raise ValidationException( - translation(lang, - en='Rename the asset slug before deleting this alias', - es='Necesitas renombrar el slug principal del asset antes de eliminar este alias', - slug='rename-asset-slug')) + translation( + lang, + en="Rename the asset slug before deleting this alias", + es="Necesitas renombrar el slug principal del asset antes de eliminar este alias", + slug="rename-asset-slug", + ) + ) item.delete() @@ -1356,9 +1417,10 @@ class AcademyCategoryView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(cache=CategoryCache, sort='-created_at', paginate=True) - @capable_of('read_category') + extensions = APIViewExtensions(cache=CategoryCache, sort="-created_at", paginate=True) + + @capable_of("read_category") def get(self, request, category_slug=None, academy_id=None): handler = self.extensions(request) @@ -1369,11 +1431,11 @@ def get(self, request, category_slug=None, academy_id=None): items = AssetCategory.objects.filter(academy__id=academy_id) lookup = {} - like = request.GET.get('like', None) - if like is not None and like != 'undefined' and like != '': + like = request.GET.get("like", None) + if like is not None and like != "undefined" and like != "": items = items.filter(Q(slug__icontains=like) | Q(title__icontains=like)) - lang = request.GET.get('lang', None) + lang = request.GET.get("lang", None) if lang is not None: items = items.filter(lang__iexact=lang) @@ -1383,20 +1445,20 @@ def get(self, request, category_slug=None, academy_id=None): serializer = AssetCategorySerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_category') + @capable_of("crud_category") def post(self, request, academy_id=None): data = {**request.data} - if 'lang' in data: - data['lang'] = data['lang'].upper() + if "lang" in data: + data["lang"] = data["lang"].upper() - serializer = POSTCategorySerializer(data=data, context={'request': request, 'academy': academy_id}) + serializer = POSTCategorySerializer(data=data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_category') + @capable_of("crud_category") def put(self, request, category_slug, academy_id=None): cat = None @@ -1406,21 +1468,21 @@ def put(self, request, category_slug, academy_id=None): cat = AssetCategory.objects.filter(slug=category_slug, academy__id=academy_id).first() if cat is None: - raise ValidationException('This category does not exist for this academy', 404) + raise ValidationException("This category does not exist for this academy", 404) data = {**request.data} - if 'lang' in data: - data['lang'] = data['lang'].upper() + if "lang" in data: + data["lang"] = data["lang"].upper() - serializer = PUTCategorySerializer(cat, data=data, context={'request': request, 'academy': academy_id}) + serializer = PUTCategorySerializer(cat, data=data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_category') + @capable_of("crud_category") def delete(self, request, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups: items = AssetCategory.objects.filter(**lookups, academy__id=academy_id) @@ -1428,16 +1490,17 @@ def delete(self, request, academy_id=None): item.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) else: - raise ValidationException('Category ids were not provided', 404, slug='missing_ids') + raise ValidationException("Category ids were not provided", 404, slug="missing_ids") class AcademyKeywordView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(cache=KeywordCache, sort='-created_at', paginate=True) - @capable_of('read_keyword') + extensions = APIViewExtensions(cache=KeywordCache, sort="-created_at", paginate=True) + + @capable_of("read_keyword") def get(self, request, keyword_slug=None, academy_id=None): handler = self.extensions(request) @@ -1448,20 +1511,20 @@ def get(self, request, keyword_slug=None, academy_id=None): items = AssetKeyword.objects.filter(academy__id=academy_id) lookup = {} - if 'cluster' in self.request.GET: - param = self.request.GET.get('cluster') - if param == 'null': - lookup['cluster'] = None + if "cluster" in self.request.GET: + param = self.request.GET.get("cluster") + if param == "null": + lookup["cluster"] = None else: - lookup['cluster__slug__in'] = [p.lower() for p in param.split(',')] + lookup["cluster__slug__in"] = [p.lower() for p in param.split(",")] - like = request.GET.get('like', None) - if like is not None and like != 'undefined' and like != '': + like = request.GET.get("like", None) + if like is not None and like != "undefined" and like != "": items = items.filter(Q(slug__icontains=like) | Q(title__icontains=like)) - lang = request.GET.get('lang', None) - if lang is not None and lang != 'undefined' and lang != '': - lookup['lang__iexact'] = lang + lang = request.GET.get("lang", None) + if lang is not None and lang != "undefined" and lang != "": + lookup["lang__iexact"] = lang items = items.filter(**lookup) items = handler.queryset(items) @@ -1469,37 +1532,37 @@ def get(self, request, keyword_slug=None, academy_id=None): serializer = KeywordSmallSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_keyword') + @capable_of("crud_keyword") def post(self, request, academy_id=None): payload = {**request.data} - serializer = PostKeywordSerializer(data=payload, context={'request': request, 'academy': academy_id}) + serializer = PostKeywordSerializer(data=payload, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() serializer = AssetKeywordBigSerializer(serializer.instance) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_keyword') + @capable_of("crud_keyword") def put(self, request, keyword_slug, academy_id=None): keywd = AssetKeyword.objects.filter(slug=keyword_slug, academy__id=academy_id).first() if keywd is None: - raise ValidationException('This keyword does not exist for this academy', 404) + raise ValidationException("This keyword does not exist for this academy", 404) data = {**request.data} - serializer = PUTKeywordSerializer(keywd, data=data, context={'request': request, 'academy': academy_id}) + serializer = PUTKeywordSerializer(keywd, data=data, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() serializer = AssetKeywordBigSerializer(serializer.instance) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_keyword') + @capable_of("crud_keyword") def delete(self, request, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups: items = AssetKeyword.objects.filter(**lookups, academy__id=academy_id) @@ -1507,24 +1570,27 @@ def delete(self, request, academy_id=None): item.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) else: - raise ValidationException('Asset ids were not provided', 404, slug='missing_ids') + raise ValidationException("Asset ids were not provided", 404, slug="missing_ids") class AcademyKeywordClusterView(APIView, GenerateLookupsMixin): """ List all snippets, or create a new snippet. """ - extensions = APIViewExtensions(sort='-created_at', paginate=True) - @capable_of('read_keywordcluster') + extensions = APIViewExtensions(sort="-created_at", paginate=True) + + @capable_of("read_keywordcluster") def get(self, request, cluster_slug=None, academy_id=None): if cluster_slug is not None: item = KeywordCluster.objects.filter(academy__id=academy_id, slug=cluster_slug).first() if item is None: - raise ValidationException(f'Cluster with slug {cluster_slug} not found for this academy', - status.HTTP_404_NOT_FOUND, - slug='cluster-not-found') + raise ValidationException( + f"Cluster with slug {cluster_slug} not found for this academy", + status.HTTP_404_NOT_FOUND, + slug="cluster-not-found", + ) serializer = KeywordClusterBigSerializer(item) return Response(serializer.data, status=status.HTTP_200_OK) @@ -1538,14 +1604,14 @@ def get(self, request, cluster_slug=None, academy_id=None): items = KeywordCluster.objects.filter(academy__id=academy_id) lookup = {} - if 'visibility' in self.request.GET: - param = self.request.GET.get('visibility') - lookup['visibility'] = param.upper() + if "visibility" in self.request.GET: + param = self.request.GET.get("visibility") + lookup["visibility"] = param.upper() else: - lookup['visibility'] = 'PUBLIC' + lookup["visibility"] = "PUBLIC" - like = request.GET.get('like', None) - if like is not None and like != 'undefined' and like != '': + like = request.GET.get("like", None) + if like is not None and like != "undefined" and like != "": items = items.filter(Q(slug__icontains=like) | Q(title__icontains=like)) items = items.filter(**lookup) @@ -1554,43 +1620,40 @@ def get(self, request, cluster_slug=None, academy_id=None): serializer = KeywordClusterMidSerializer(items, many=True) return handler.response(serializer.data) - @capable_of('crud_keywordcluster') + @capable_of("crud_keywordcluster") def post(self, request, academy_id=None): - payload = {**request.data, 'author': request.user.id} + payload = {**request.data, "author": request.user.id} - serializer = PostKeywordClusterSerializer(data=payload, context={'request': request, 'academy': academy_id}) + serializer = PostKeywordClusterSerializer(data=payload, context={"request": request, "academy": academy_id}) if serializer.is_valid(): serializer.save() serializer = KeywordClusterBigSerializer(serializer.instance) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_keywordcluster') + @capable_of("crud_keywordcluster") def put(self, request, cluster_slug, academy_id=None): cluster = KeywordCluster.objects.filter(slug=cluster_slug, academy__id=academy_id).first() if cluster is None: - raise ValidationException('This cluster does not exist for this academy', 404) + raise ValidationException("This cluster does not exist for this academy", 404) data = {**request.data} - data.pop('academy', False) - - serializer = PostKeywordClusterSerializer(cluster, - data=data, - context={ - 'request': request, - 'academy': academy_id - }) + data.pop("academy", False) + + serializer = PostKeywordClusterSerializer( + cluster, data=data, context={"request": request, "academy": academy_id} + ) if serializer.is_valid(): serializer.save() serializer = KeywordClusterBigSerializer(serializer.instance) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - @capable_of('crud_keywordcluster') + @capable_of("crud_keywordcluster") def delete(self, request, academy_id=None): - lookups = self.generate_lookups(request, many_fields=['id']) + lookups = self.generate_lookups(request, many_fields=["id"]) if lookups: items = KeywordCluster.objects.filter(**lookups, academy__id=academy_id) @@ -1598,4 +1661,4 @@ def delete(self, request, academy_id=None): item.delete() return Response(None, status=status.HTTP_204_NO_CONTENT) else: - raise ValidationException('Cluster ids were not provided', 404, slug='missing_ids') + raise ValidationException("Cluster ids were not provided", 404, slug="missing_ids") diff --git a/breathecode/renderers.py b/breathecode/renderers.py index 829629ff8..c86b73c80 100644 --- a/breathecode/renderers.py +++ b/breathecode/renderers.py @@ -2,12 +2,12 @@ class PlainTextRenderer(renderers.BaseRenderer): - media_type = 'text/plain' - format = 'txt' - charset = 'iso-8859-1' + media_type = "text/plain" + format = "txt" + charset = "iso-8859-1" def render(self, data, media_type=None, renderer_context=None): - if hasattr(data, 'encode'): + if hasattr(data, "encode"): return data.encode(self.charset) else: return None diff --git a/breathecode/schema.py b/breathecode/schema.py index 9872002bf..e146111cd 100644 --- a/breathecode/schema.py +++ b/breathecode/schema.py @@ -5,7 +5,7 @@ class Query(graphene.ObjectType): - hello = graphene.String(default_value='Hi!') + hello = graphene.String(default_value="Hi!") Admissions = graphene.Field(Admissions) def resolve_Admissions(self, info): diff --git a/breathecode/services/activecampaign/actions/deal_add.py b/breathecode/services/activecampaign/actions/deal_add.py index 84f4c7968..9444a9a23 100644 --- a/breathecode/services/activecampaign/actions/deal_add.py +++ b/breathecode/services/activecampaign/actions/deal_add.py @@ -3,11 +3,11 @@ from django.utils import timezone status = { - 'Won': 'WON', - 'Lost': 'LOST', - '0': None, - '1': 'WON', - '2': 'LOST', + "Won": "WON", + "Lost": "LOST", + "0": None, + "1": "WON", + "2": "LOST", } logger = logging.getLogger(__name__) @@ -18,36 +18,47 @@ def deal_add(self, webhook, payload: dict, acp_ids): from breathecode.marketing.models import FormEntry from breathecode.marketing.tasks import async_update_deal_custom_fields - entry = FormEntry.objects.filter(ac_deal_id=payload['deal[id]'], - storage_status='PERSISTED').order_by('-created_at').first() - if entry is None and 'deal[contactid]' in payload: - entry = FormEntry.objects.filter(ac_contact_id=payload['deal[contactid]'], - ac_deal_id__isnull=True, - storage_status='PERSISTED').order_by('-created_at').first() - if entry is None and 'deal[contact_email]' in payload: - entry = FormEntry.objects.filter(email=payload['deal[contact_email]'], - ac_deal_id__isnull=True, - storage_status='PERSISTED').order_by('-created_at').first() + entry = ( + FormEntry.objects.filter(ac_deal_id=payload["deal[id]"], storage_status="PERSISTED") + .order_by("-created_at") + .first() + ) + if entry is None and "deal[contactid]" in payload: + entry = ( + FormEntry.objects.filter( + ac_contact_id=payload["deal[contactid]"], ac_deal_id__isnull=True, storage_status="PERSISTED" + ) + .order_by("-created_at") + .first() + ) + if entry is None and "deal[contact_email]" in payload: + entry = ( + FormEntry.objects.filter( + email=payload["deal[contact_email]"], ac_deal_id__isnull=True, storage_status="PERSISTED" + ) + .order_by("-created_at") + .first() + ) if entry is None: - raise Exception(f'Impossible to find formentry for webhook {webhook.id} -> {webhook.webhook_type} ') + raise Exception(f"Impossible to find formentry for webhook {webhook.id} -> {webhook.webhook_type} ") logger.debug(payload) - entry.ac_deal_id = payload['deal[id]'] - entry.ac_contact_id = payload['deal[contactid]'] - if payload['deal[status]'] in status: + entry.ac_deal_id = payload["deal[id]"] + entry.ac_contact_id = payload["deal[contactid]"] + if payload["deal[status]"] in status: # check if we just won or lost the deal - if entry.deal_status is None and status[payload['deal[status]']] == 'WON': + if entry.deal_status is None and status[payload["deal[status]"]] == "WON": entry.won_at = timezone.now() - elif status[payload['deal[status]']] != 'WON': + elif status[payload["deal[status]"]] != "WON": entry.won_at = None - entry.deal_status = status[payload['deal[status]']] - entry.ac_deal_owner_id = payload['deal[owner]'] - entry.ac_deal_owner_full_name = payload['deal[owner_firstname]'] + ' ' + payload['deal[owner_lastname]'] + entry.deal_status = status[payload["deal[status]"]] + entry.ac_deal_owner_id = payload["deal[owner]"] + entry.ac_deal_owner_full_name = payload["deal[owner_firstname]"] + " " + payload["deal[owner_lastname]"] - entry.ac_deal_amount = float(payload['deal[value_raw]']) - entry.ac_deal_currency_code = payload['deal[currency]'] + entry.ac_deal_amount = float(payload["deal[value_raw]"]) + entry.ac_deal_currency_code = payload["deal[currency]"] entry.save() diff --git a/breathecode/services/activecampaign/actions/deal_update.py b/breathecode/services/activecampaign/actions/deal_update.py index 5ce66bc8b..b73fc5124 100644 --- a/breathecode/services/activecampaign/actions/deal_update.py +++ b/breathecode/services/activecampaign/actions/deal_update.py @@ -5,53 +5,64 @@ logger = logging.getLogger(__name__) status = { - 'Won': 'WON', - 'Lost': 'LOST', - '0': None, - '1': 'WON', - '2': 'LOST', + "Won": "WON", + "Lost": "LOST", + "0": None, + "1": "WON", + "2": "LOST", } -#FIXME: it's unused +# FIXME: it's unused def deal_update(ac_cls, webhook, payload: dict, acp_ids): # prevent circular dependency import between thousand modules previuosly loaded and cached from breathecode.marketing.models import FormEntry - entry = FormEntry.objects.filter(ac_deal_id=payload['deal[id]'], - storage_status='PERSISTED').order_by('-created_at').first() - if entry is None and 'deal[contactid]' in payload: - entry = FormEntry.objects.filter(ac_contact_id=payload['deal[contactid]'], - storage_status='PERSISTED').order_by('-created_at').first() - if entry is None and 'deal[contact_email]' in payload: - entry = FormEntry.objects.filter(email=payload['deal[contact_email]'], - storage_status='PERSISTED').order_by('-created_at').first() + entry = ( + FormEntry.objects.filter(ac_deal_id=payload["deal[id]"], storage_status="PERSISTED") + .order_by("-created_at") + .first() + ) + if entry is None and "deal[contactid]" in payload: + entry = ( + FormEntry.objects.filter(ac_contact_id=payload["deal[contactid]"], storage_status="PERSISTED") + .order_by("-created_at") + .first() + ) + if entry is None and "deal[contact_email]" in payload: + entry = ( + FormEntry.objects.filter(email=payload["deal[contact_email]"], storage_status="PERSISTED") + .order_by("-created_at") + .first() + ) if entry is None: - raise Exception(f'Impossible to find formentry with deal {payload["deal[id]"]} for webhook {webhook.id} -> ' - f'{webhook.webhook_type} ') + raise Exception( + f'Impossible to find formentry with deal {payload["deal[id]"]} for webhook {webhook.id} -> ' + f"{webhook.webhook_type} " + ) - entry.ac_deal_id = payload['deal[id]'] + entry.ac_deal_id = payload["deal[id]"] - if 'contact[id]' in payload: - entry.ac_contact_id = payload['contact[id]'] + if "contact[id]" in payload: + entry.ac_contact_id = payload["contact[id]"] - if 'deal[status]' in payload and payload['deal[status]'] in status: + if "deal[status]" in payload and payload["deal[status]"] in status: # check if we just won or lost the deal - if entry.deal_status is None and status[payload['deal[status]']] == 'WON': + if entry.deal_status is None and status[payload["deal[status]"]] == "WON": entry.won_at = timezone.now() - elif status[payload['deal[status]']] != 'WON': + elif status[payload["deal[status]"]] != "WON": entry.won_at = None - entry.deal_status = status[payload['deal[status]']] - entry.ac_deal_owner_id = payload['deal[owner]'] - entry.ac_deal_owner_full_name = payload['deal[owner_firstname]'] + ' ' + payload['deal[owner_lastname]'] + entry.deal_status = status[payload["deal[status]"]] + entry.ac_deal_owner_id = payload["deal[owner]"] + entry.ac_deal_owner_full_name = payload["deal[owner_firstname]"] + " " + payload["deal[owner_lastname]"] - entry.ac_deal_amount = float(payload['deal[value_raw]']) - entry.ac_deal_currency_code = payload['deal[currency]'] + entry.ac_deal_amount = float(payload["deal[value_raw]"]) + entry.ac_deal_currency_code = payload["deal[currency]"] # lets get the custom fields and use them to update some local fields - logger.debug('looking for deal on activecampaign api') + logger.debug("looking for deal on activecampaign api") deal_custom_fields = ac_cls.get_deal_customfields(entry.ac_deal_id) # WARNING: Do not update the utm's back to breathecode, we want to keep the original trace @@ -71,22 +82,22 @@ def deal_update(ac_cls, webhook, payload: dict, acp_ids): def update_course(ac_cls, entry, acp_ids, deal_custom_fields): - deal_ids = acp_ids['deal'] + deal_ids = acp_ids["deal"] - if deal_ids['utm_course'] in deal_custom_fields: - new_course = deal_custom_fields[deal_ids['utm_course']] - if new_course is not None and new_course != '': + if deal_ids["utm_course"] in deal_custom_fields: + new_course = deal_custom_fields[deal_ids["utm_course"]] + if new_course is not None and new_course != "": entry.ac_deal_course = new_course return entry def update_location(ac_cls, entry, acp_ids, deal_custom_fields): - deal_ids = acp_ids['deal'] + deal_ids = acp_ids["deal"] - if deal_ids['utm_location'] in deal_custom_fields: - new_location = deal_custom_fields[deal_ids['utm_location']] - if new_location is not None and entry.location != new_location and new_location != '': + if deal_ids["utm_location"] in deal_custom_fields: + new_location = deal_custom_fields[deal_ids["utm_location"]] + if new_location is not None and entry.location != new_location and new_location != "": entry.ac_deal_location = new_location new_alias = AcademyAlias.objects.filter(slug=new_location).first() @@ -97,13 +108,13 @@ def update_location(ac_cls, entry, acp_ids, deal_custom_fields): def update_expected_cohort(ac_cls, entry, acp_ids, deal_custom_fields): - deal_ids = acp_ids['deal'] + deal_ids = acp_ids["deal"] if entry.academy is not None: - if deal_ids['expected_cohort'] in deal_custom_fields: - entry.ac_expected_cohort = deal_custom_fields[deal_ids['expected_cohort']] - if deal_ids['expected_cohort_date'] in deal_custom_fields: - entry.ac_expected_cohort_date = deal_custom_fields[deal_ids['expected_cohort_date']] + if deal_ids["expected_cohort"] in deal_custom_fields: + entry.ac_expected_cohort = deal_custom_fields[deal_ids["expected_cohort"]] + if deal_ids["expected_cohort_date"] in deal_custom_fields: + entry.ac_expected_cohort_date = deal_custom_fields[deal_ids["expected_cohort_date"]] else: - logger.debug('No academy for EntryForm, ignoring deal custom fields') + logger.debug("No academy for EntryForm, ignoring deal custom fields") return entry diff --git a/breathecode/services/activecampaign/actions/test.py b/breathecode/services/activecampaign/actions/test.py index fc61bd0d3..656f0cfc7 100644 --- a/breathecode/services/activecampaign/actions/test.py +++ b/breathecode/services/activecampaign/actions/test.py @@ -4,4 +4,4 @@ def test(self, webhook, payload: dict): - logger.info('performing test request') + logger.info("performing test request") diff --git a/breathecode/services/activecampaign/client.py b/breathecode/services/activecampaign/client.py index 151f68958..e298cf9c8 100644 --- a/breathecode/services/activecampaign/client.py +++ b/breathecode/services/activecampaign/client.py @@ -16,54 +16,54 @@ # "strong": "49", # "soft": "48", # "newsletter_list": "3", - 'utm_plan': '67', - 'utm_placement': '66', - 'utm_term': '65', - 'utm_source': '59', - 'utm_medium': '36', - 'utm_content': '35', - 'utm_url': '60', - 'utm_location': '18', - 'utm_campaign': '33', - 'gender': '45', - 'course': '2', - 'client_comments': '13', - 'current_download': '46', # used in downloadables - 'utm_language': '16', - 'utm_country': '19', - 'gclid': '26', - 'referral_key': '27', - 'deal': { - 'expected_cohort': '10', - 'expected_cohort_date': '21', - 'utm_location': '16', - 'utm_course': '6', - 'utm_url': '5', - 'gclid': '4', - 'utm_campaign': '7', - 'utm_source': '8', - 'utm_medium': '9', - 'utm_location': '16', - 'utm_term': '22', - 'utm_placement': '23', - 'referral_key': '34', - 'scheudule': '35', - } + "utm_plan": "67", + "utm_placement": "66", + "utm_term": "65", + "utm_source": "59", + "utm_medium": "36", + "utm_content": "35", + "utm_url": "60", + "utm_location": "18", + "utm_campaign": "33", + "gender": "45", + "course": "2", + "client_comments": "13", + "current_download": "46", # used in downloadables + "utm_language": "16", + "utm_country": "19", + "gclid": "26", + "referral_key": "27", + "deal": { + "expected_cohort": "10", + "expected_cohort_date": "21", + "utm_location": "16", + "utm_course": "6", + "utm_url": "5", + "gclid": "4", + "utm_campaign": "7", + "utm_source": "8", + "utm_medium": "9", + "utm_location": "16", + "utm_term": "22", + "utm_placement": "23", + "referral_key": "34", + "scheudule": "35", + }, } def map_ids(contact_customfield_id): contact_to_deal = { - '66': '23', - '65': '22', - '59': '8', - '36': '9', - '60': '5', - '18': '16', - '33': '7', - '2': '6', - '26': '4', - '27': '34', + "66": "23", + "65": "22", + "59": "8", + "36": "9", + "60": "5", + "18": "16", + "33": "7", + "2": "6", + "26": "4", + "27": "34", } if contact_customfield_id in contact_to_deal: @@ -75,11 +75,11 @@ def map_ids(contact_customfield_id): class ActiveCampaignClient(Client): def _request(self, method, endpoint, headers=None, **kwargs): - _headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Api-Token': self.api_key} + _headers = {"Accept": "application/json", "Content-Type": "application/json", "Api-Token": self.api_key} if headers: _headers.update(headers) - kwargs['timeout'] = 2 + kwargs["timeout"] = 2 return self._parse(requests.request(method, self.BASE_URL + endpoint, headers=_headers, **kwargs)) @@ -89,14 +89,14 @@ class ActiveCampaign: def __init__(self, token=None, url=None): if token is None: - token = os.getenv('ACTIVE_CAMPAIGN_KEY', '') + token = os.getenv("ACTIVE_CAMPAIGN_KEY", "") if url is None: - url = os.getenv('ACTIVE_CAMPAIGN_URL', '') + url = os.getenv("ACTIVE_CAMPAIGN_URL", "") self.host = url self.token = token - self.headers = {'Authorization': f'Bearer {token}'} + self.headers = {"Authorization": f"Bearer {token}"} def execute_action(self, webhook_id: int): # prevent circular dependency import between thousand modules previuosly loaded and cached @@ -115,39 +115,39 @@ def execute_action(self, webhook_id: int): webhook = ActiveCampaignWebhook.objects.filter(id=webhook_id).first() if not webhook: - raise Exception('Invalid webhook') + raise Exception("Invalid webhook") if not webhook.webhook_type: - raise Exception('Impossible to webhook_type') + raise Exception("Impossible to webhook_type") webhook.run_at = timezone.now() action = webhook.webhook_type - logger.debug(f'Executing ActiveCampaign Webhook => {action}') + logger.debug(f"Executing ActiveCampaign Webhook => {action}") if hasattr(actions, action): - logger.debug('Action found') + logger.debug("Action found") fn = getattr(actions, action) try: fn(self, webhook, json.loads(webhook.payload), acp_ids) - logger.debug('Mark active campaign action as done') - webhook.status = 'DONE' - webhook.status_text = 'OK' + logger.debug("Mark active campaign action as done") + webhook.status = "DONE" + webhook.status_text = "OK" webhook.save() except Exception as e: - logger.debug('Mark active campaign action with error') + logger.debug("Mark active campaign action with error") - webhook.status = 'ERROR' + webhook.status = "ERROR" webhook.status_text = str(e) webhook.save() else: - message = f'ActiveCampaign Action `{action}` is not implemented' + message = f"ActiveCampaign Action `{action}` is not implemented" logger.debug(message) - webhook.status = 'ERROR' + webhook.status = "ERROR" webhook.status_text = message webhook.save() @@ -165,35 +165,45 @@ def add_webhook_to_log(context: dict, academy_slug: str): ac_academy = ActiveCampaignAcademy.objects.filter(academy__slug=academy_slug).first() if ac_academy is None: - logger.debug(f'ActiveCampaign academy {str(academy_slug)} not found') - raise APIException(f'ActiveCampaign academy {str(academy_slug)} not found') + logger.debug(f"ActiveCampaign academy {str(academy_slug)} not found") + raise APIException(f"ActiveCampaign academy {str(academy_slug)} not found") webhook = ActiveCampaignWebhook() - webhook.webhook_type = context['type'] - webhook.initiated_by = context['initiated_by'] + webhook.webhook_type = context["type"] + webhook.initiated_by = context["initiated_by"] webhook.ac_academy = ac_academy - webhook.status = 'PENDING' + webhook.status = "PENDING" webhook.payload = json.dumps(context) webhook.save() return webhook def get_deal(self, deal_id): - #/api/3/deals/id - #Api-Token - resp = requests.get(f'{self.host}/api/3/deals/{deal_id}', headers={'Api-Token': self.token}, timeout=2) - logger.debug(f'Get deal {self.host}/api/3/deals/{deal_id}', resp.status_code) + # /api/3/deals/id + # Api-Token + resp = requests.get(f"{self.host}/api/3/deals/{deal_id}", headers={"Api-Token": self.token}, timeout=2) + logger.debug(f"Get deal {self.host}/api/3/deals/{deal_id}", resp.status_code) return resp.json() def update_deal(self, id: str, fields: dict): import requests - #The following are the fields that can be updated on the deal + # The following are the fields that can be updated on the deal _allowed_fields = [ - 'contact', 'account', 'description', 'currency', 'group', 'owner', 'percent', 'stage', 'status', 'title', - 'value', 'fields' + "contact", + "account", + "description", + "currency", + "group", + "owner", + "percent", + "stage", + "status", + "title", + "value", + "fields", ] - _allowed_custom_ids = [x for x in acp_ids['deal'].values()] + _allowed_custom_ids = [x for x in acp_ids["deal"].values()] # { # "deal": { # "contact": "51", @@ -224,70 +234,71 @@ def update_deal(self, id: str, fields: dict): # ] # } # } - _to_be_updated = {'fields': []} + _to_be_updated = {"fields": []} for field_key in fields: if field_key not in _allowed_fields: logger.error(f'Error updating deal `{id}`, field "{field_key}" does not exist on active campaign deals') - raise Exception(f'Field {field_key} does not exist for active campaign deals') + raise Exception(f"Field {field_key} does not exist for active campaign deals") # include all non-custom fields on the payload to be updated - if field_key != 'fields': + if field_key != "fields": _to_be_updated[field_key] = acp_ids[field_key] # custom fields validation - if 'fields' in fields: - for cf in fields['fields']: - if cf['customFieldId'] not in _allowed_custom_ids: + if "fields" in fields: + for cf in fields["fields"]: + if cf["customFieldId"] not in _allowed_custom_ids: logger.error( - f'Error updating deal `{id}`, custom field with id "{cf["customFieldId"]}" does not exist') + f'Error updating deal `{id}`, custom field with id "{cf["customFieldId"]}" does not exist' + ) raise Exception( - f'Custom field with id {cf["customFieldId"]} does not exist for active campaign deals') + f'Custom field with id {cf["customFieldId"]} does not exist for active campaign deals' + ) - _to_be_updated['fields'] = fields['fields'].copy() + _to_be_updated["fields"] = fields["fields"].copy() body = { - 'deal': { + "deal": { **_to_be_updated, } } - resp = requests.put(f'{self.host}/api/3/deals/{id}', headers={'Api-Token': self.token}, json=body, timeout=2) - logger.info(f'Updating lead `{id}` on active campaign') + resp = requests.put(f"{self.host}/api/3/deals/{id}", headers={"Api-Token": self.token}, json=body, timeout=2) + logger.info(f"Updating lead `{id}` on active campaign") if resp.status_code in [201, 200]: - logger.info('Deal updated successfully') + logger.info("Deal updated successfully") body = resp.json() - if 'deal' in body: - return body['deal'] + if "deal" in body: + return body["deal"] else: - logger.error(f'Failed to update deal with id `{id}` because the structure of response was changed') - raise Exception(f'Failed to update deal with id `{id}` because the structure of response was changed') + logger.error(f"Failed to update deal with id `{id}` because the structure of response was changed") + raise Exception(f"Failed to update deal with id `{id}` because the structure of response was changed") else: - logger.error(f'Error updating deal `{id}` with status={str(resp.status_code)}') + logger.error(f"Error updating deal `{id}` with status={str(resp.status_code)}") error = resp.json() logger.error(error) - raise Exception(f'Error updating deal with id `{id}` with status={str(resp.status_code)}') + raise Exception(f"Error updating deal with id `{id}` with status={str(resp.status_code)}") def get_contact_by_email(self, email): import requests - #/api/3/deals/id - #Api-Token - resp = requests.get(f'{self.host}/api/3/contacts', - headers={'Api-Token': self.token}, - params={'email': email}, - timeout=2) - logger.debug(f'Get contact by email {self.host}/api/3/contacts {resp.status_code}') + # /api/3/deals/id + # Api-Token + resp = requests.get( + f"{self.host}/api/3/contacts", headers={"Api-Token": self.token}, params={"email": email}, timeout=2 + ) + logger.debug(f"Get contact by email {self.host}/api/3/contacts {resp.status_code}") data = resp.json() - if data and 'contacts' in data and len(data['contacts']) == 1: - return data['contacts'][0] + if data and "contacts" in data and len(data["contacts"]) == 1: + return data["contacts"][0] else: - raise Exception(f'Problem fetching contact in activecampaign with email {email}') + raise Exception(f"Problem fetching contact in activecampaign with email {email}") def get_contact(self, id: str): import requests @@ -338,13 +349,13 @@ def get_contact(self, id: str): # "id": "1", # "organization": null # } - resp = requests.get(f'{self.host}/api/3/contacts/{id}', headers={'Api-Token': self.token}, timeout=2) - logger.debug(f'Get contact by eidmail {self.host}/api/3/contacts/{id} => status={resp.status_code}') + resp = requests.get(f"{self.host}/api/3/contacts/{id}", headers={"Api-Token": self.token}, timeout=2) + logger.debug(f"Get contact by eidmail {self.host}/api/3/contacts/{id} => status={resp.status_code}") data = resp.json() - if data and 'contact' in data: - return data['contact'] + if data and "contact" in data: + return data["contact"] else: - raise Exception(f'Problem fetching contact in activecampaign with id {id}') + raise Exception(f"Problem fetching contact in activecampaign with id {id}") def get_contact_customfields(self, id: str): @@ -369,32 +380,33 @@ def get_contact_customfields(self, id: str): # } import requests - resp = requests.get(f'{self.host}/api/3/contacts/{id}/fieldValues', - headers={'Api-Token': self.token}, - timeout=2) + resp = requests.get( + f"{self.host}/api/3/contacts/{id}/fieldValues", headers={"Api-Token": self.token}, timeout=2 + ) logger.debug( - f'Get contact field values {self.host}/api/3/contacts/{id}/fieldValues => status={resp.status_code}') + f"Get contact field values {self.host}/api/3/contacts/{id}/fieldValues => status={resp.status_code}" + ) data = resp.json() - if data and 'fieldValues' in data: - return data['fieldValues'] + if data and "fieldValues" in data: + return data["fieldValues"] else: - raise Exception(f'Problem fetching contact custom fields in activecampaign with id {id}') + raise Exception(f"Problem fetching contact custom fields in activecampaign with id {id}") def get_deal_customfields(self, deal_id): - #/api/3/deals/id - #Api-Token - resp = requests.get(f'{self.host}/api/3/deals/{deal_id}/dealCustomFieldData', - headers={'Api-Token': self.token}, - timeout=2) + # /api/3/deals/id + # Api-Token + resp = requests.get( + f"{self.host}/api/3/deals/{deal_id}/dealCustomFieldData", headers={"Api-Token": self.token}, timeout=2 + ) logger.debug( - f'Get custom fields {self.host}/api/3/deals/{deal_id}/dealCustomFieldData with status {str(resp.status_code)}' + f"Get custom fields {self.host}/api/3/deals/{deal_id}/dealCustomFieldData with status {str(resp.status_code)}" ) if resp.status_code == 200: data = resp.json() _reponse = {} - for field in data['dealCustomFieldData']: - _reponse[str(field['customFieldId'])] = field['fieldValue'] + for field in data["dealCustomFieldData"]: + _reponse[str(field["customFieldId"])] = field["fieldValue"] return _reponse return None @@ -402,76 +414,76 @@ def get_deal_customfields(self, deal_id): def add_tag_to_contact(self, contact_id: int, tag_id: int): import requests - #/api/3/deals/id - #Api-Token - body = {'contactTag': {'contact': contact_id, 'tag': tag_id}} + # /api/3/deals/id + # Api-Token + body = {"contactTag": {"contact": contact_id, "tag": tag_id}} headers = { - 'Api-Token': self.token, - 'Content-Type': 'application/json', - 'Accept': 'application/json', + "Api-Token": self.token, + "Content-Type": "application/json", + "Accept": "application/json", } - resp = requests.post(f'{self.host}/api/3/contactTags', headers=headers, json=body, timeout=2) - logger.debug('Add tag to contact') + resp = requests.post(f"{self.host}/api/3/contactTags", headers=headers, json=body, timeout=2) + logger.debug("Add tag to contact") # can return status 200 if the contact have has been tagged, this case is not a error if resp.status_code < 400: data = resp.json() - if data and 'contactTag' in data: - return data['contactTag'] + if data and "contactTag" in data: + return data["contactTag"] else: - raise Exception('Bad response format from ActiveCampaign when adding a new tag to contact') + raise Exception("Bad response format from ActiveCampaign when adding a new tag to contact") else: logger.error(resp.json()) - raise Exception(f'Failed to add tag to contact {contact_id} with status={resp.status_code}') + raise Exception(f"Failed to add tag to contact {contact_id} with status={resp.status_code}") def create_tag(self, slug: str, description: str): import requests - #/api/3/deals/id - #Api-Token - body = {'tag': {'tag': slugify(slug), 'tagType': 'contact', 'description': description}} - resp = requests.post(f'{self.host}/api/3/tags', headers={'Api-Token': self.token}, json=body, timeout=2) + # /api/3/deals/id + # Api-Token + body = {"tag": {"tag": slugify(slug), "tagType": "contact", "description": description}} + resp = requests.post(f"{self.host}/api/3/tags", headers={"Api-Token": self.token}, json=body, timeout=2) logger.info(f'Creating tag `{body["tag"]["tag"]}` on active campaign') if resp.status_code == 201: - logger.info('Tag created successfully') + logger.info("Tag created successfully") body = resp.json() - if 'tag' in body: - return body['tag'] + if "tag" in body: + return body["tag"] else: - logger.error(f'Failed to create tag `{slug}` because the structure of response was changed') - raise Exception(f'Failed to create tag `{slug}` because the structure of response was changed') + logger.error(f"Failed to create tag `{slug}` because the structure of response was changed") + raise Exception(f"Failed to create tag `{slug}` because the structure of response was changed") else: - logger.error(f'Error creating tag `{slug}` with status={str(resp.status_code)}') + logger.error(f"Error creating tag `{slug}` with status={str(resp.status_code)}") error = resp.json() logger.error(error) - raise Exception(f'Error creating tag `{slug}` with status={str(resp.status_code)}') + raise Exception(f"Error creating tag `{slug}` with status={str(resp.status_code)}") def delete_tag(self, tag_id: str): import requests - #/api/3/deals/id - #Api-Token + # /api/3/deals/id + # Api-Token resp = requests.delete( - f'{self.host}/api/3/tags/{tag_id}', - headers={'Api-Token': self.token}, + f"{self.host}/api/3/tags/{tag_id}", + headers={"Api-Token": self.token}, timeout=2, ) - logger.debug(f'Deleting tag {str(tag_id)} on active campaign') + logger.debug(f"Deleting tag {str(tag_id)} on active campaign") if resp.status_code == 200 or resp.status_code == 404: - logger.debug(f'Tag deleted successfully or not existent {str(resp.status_code)} /api/3/tag/{tag_id}') + logger.debug(f"Tag deleted successfully or not existent {str(resp.status_code)} /api/3/tag/{tag_id}") return True else: - logger.error(f'Error deleting tag `{str(tag_id)}` with status={str(resp.status_code)}') + logger.error(f"Error deleting tag `{str(tag_id)}` with status={str(resp.status_code)}") error = resp.json() logger.error(error) - raise Exception(f'Error deleting tag `{str(tag_id)}` with status={str(resp.status_code)}') + raise Exception(f"Error deleting tag `{str(tag_id)}` with status={str(resp.status_code)}") class Contacts(object): @@ -510,9 +522,9 @@ def create_contact(self, data): ``` :return: A json """ - if 'email' not in data: - raise KeyError('The contact must have an email') - return self.client._post('contact_sync', data=data) + if "email" not in data: + raise KeyError("The contact must have an email") + return self.client._post("contact_sync", data=data) def subscribe_contact(self, data): """ @@ -546,10 +558,10 @@ def subscribe_contact(self, data): :return: A json """ - if 'email' not in data: - raise KeyError('The contact must have an email') + if "email" not in data: + raise KeyError("The contact must have an email") - return self.client._post('contact_add', data=data) + return self.client._post("contact_add", data=data) def edit_contact(self, data): """ @@ -583,18 +595,18 @@ def edit_contact(self, data): :return: A json """ - if 'email' not in data: - raise KeyError('The contact must have an email') - return self.client._post('contact_edit', data=data) + if "email" not in data: + raise KeyError("The contact must have an email") + return self.client._post("contact_edit", data=data) def view_contact_email(self, email): - return self.client._get('contact_view_email', aditional_data=[('email', email)]) + return self.client._get("contact_view_email", aditional_data=[("email", email)]) def view_contact(self, id): - return self.client._get('contact_view', aditional_data=[('id', id)]) + return self.client._get("contact_view", aditional_data=[("id", id)]) def delete_contact(self, id): - return self.client._get('contact_delete', aditional_data=[('id', id)]) + return self.client._get("contact_delete", aditional_data=[("id", id)]) class ACOldClient(object): @@ -602,40 +614,40 @@ class ACOldClient(object): def __init__(self, url, apikey): if url is None: - raise Exception('Invalid URL for active campaign API, have you setup your env variables?') + raise Exception("Invalid URL for active campaign API, have you setup your env variables?") - self._base_url = f'https://{url}' if not url.startswith('http') else url + self._base_url = f"https://{url}" if not url.startswith("http") else url self._apikey = apikey self.contacts = Contacts(self) def _get(self, action, aditional_data=None): - return self._request('GET', action, aditional_data=aditional_data) + return self._request("GET", action, aditional_data=aditional_data) def _post(self, action, data=None, aditional_data=None): - return self._request('POST', action, data=data, aditional_data=aditional_data) + return self._request("POST", action, data=data, aditional_data=aditional_data) def _delete(self, action): - return self._request('DELETE', action) + return self._request("DELETE", action) def _request(self, method, action, data=None, aditional_data=None): params = [ - ('api_action', action), - ('api_key', self._apikey), - ('api_output', 'json'), + ("api_action", action), + ("api_key", self._apikey), + ("api_output", "json"), ] if aditional_data is not None: for aditional in aditional_data: params.append(aditional) - response = requests.request(method, self._base_url + '/admin/api.php', params=params, data=data, timeout=3) + response = requests.request(method, self._base_url + "/admin/api.php", params=params, data=data, timeout=3) if response.status_code >= 200 and response.status_code < 400: data = response.json() return self._parse(data) else: - raise Exception('Error when saving contact on AC') + raise Exception("Error when saving contact on AC") def _parse(self, response): - if response['result_code'] == 1: + if response["result_code"] == 1: return response else: - raise Exception(response['result_message']) + raise Exception(response["result_message"]) diff --git a/breathecode/services/calendly/actions/__init__.py b/breathecode/services/calendly/actions/__init__.py index 555fe506c..294db2d69 100644 --- a/breathecode/services/calendly/actions/__init__.py +++ b/breathecode/services/calendly/actions/__init__.py @@ -2,4 +2,4 @@ from .invitee_canceled import invitee_canceled from .test import test -__all__ = ['invitee_created', 'invitee_canceled', 'test'] +__all__ = ["invitee_created", "invitee_canceled", "test"] diff --git a/breathecode/services/calendly/actions/invitee_canceled.py b/breathecode/services/calendly/actions/invitee_canceled.py index bf86cf6a7..7a8f39b8a 100644 --- a/breathecode/services/calendly/actions/invitee_canceled.py +++ b/breathecode/services/calendly/actions/invitee_canceled.py @@ -8,12 +8,12 @@ def invitee_canceled(self, webhook, payload: dict): # lazyload to fix circular import from breathecode.mentorship.models import MentorshipSession - cancellation_email = payload['email'] + cancellation_email = payload["email"] - event_uuid = urlparse(payload['event']).path.split('/')[-1] + event_uuid = urlparse(payload["event"]).path.split("/")[-1] session = MentorshipSession.objects.filter(calendly_uuid=event_uuid).first() if session is None: - raise Exception(f'Mentoring session with calendly_uuid {event_uuid} not found while trying to cancel it') - session.Summary = f'Session was canceled by {cancellation_email} and it was notified by calendly' - session.status = 'CANCELED' + raise Exception(f"Mentoring session with calendly_uuid {event_uuid} not found while trying to cancel it") + session.Summary = f"Session was canceled by {cancellation_email} and it was notified by calendly" + session.status = "CANCELED" session.save() diff --git a/breathecode/services/calendly/actions/invitee_created.py b/breathecode/services/calendly/actions/invitee_created.py index d80affc19..1fa26403d 100644 --- a/breathecode/services/calendly/actions/invitee_created.py +++ b/breathecode/services/calendly/actions/invitee_created.py @@ -10,60 +10,63 @@ def invitee_created(client, webhook, payload: dict): # lazyload to fix circular import from breathecode.mentorship.models import MentorshipService, MentorProfile, MentorshipSession + # from breathecode.events.actions import update_or_create_event # payload = payload['resource'] academy = webhook.organization.academy service = None - service_slug = payload['tracking']['utm_campaign'] + service_slug = payload["tracking"]["utm_campaign"] if service_slug is None: - raise Exception('Missing service information on calendly iframe info: tracking->utm_campaign') + raise Exception("Missing service information on calendly iframe info: tracking->utm_campaign") service = MentorshipService.objects.filter(slug=service_slug, academy=academy).first() if service is None: - raise Exception(f'Service with slug {service_slug} not found for academy {academy.name}') + raise Exception(f"Service with slug {service_slug} not found for academy {academy.name}") mentee = None - mentee_id = 'undefined' - if 'salesforce_uuid' in payload['tracking'] and payload['tracking']['salesforce_uuid'] != '': - mentee_id = payload['tracking']['salesforce_uuid'] + mentee_id = "undefined" + if "salesforce_uuid" in payload["tracking"] and payload["tracking"]["salesforce_uuid"] != "": + mentee_id = payload["tracking"]["salesforce_uuid"] mentee = User.objects.filter(id=mentee_id).first() if mentee is None: - mentee_email = payload['email'] + mentee_email = payload["email"] mentee = User.objects.filter(email=mentee_email).first() if mentee is None: - raise Exception(f'Mentee user not found with email {mentee_email} or id {mentee_id}') + raise Exception(f"Mentee user not found with email {mentee_email} or id {mentee_id}") - event_uuid = urlparse(payload['event']).path.split('/')[-1] + event_uuid = urlparse(payload["event"]).path.split("/")[-1] event = client.get_event(event_uuid) - if event is None or 'resource' not in event: - raise Exception(f'Event with uuid {event_uuid} not found on calendly') - event = event['resource'] + if event is None or "resource" not in event: + raise Exception(f"Event with uuid {event_uuid} not found on calendly") + event = event["resource"] - if not isinstance(event['event_memberships'], list) or len(event['event_memberships']) == 0: - raise Exception('No mentor information was found on calendly event') + if not isinstance(event["event_memberships"], list) or len(event["event_memberships"]) == 0: + raise Exception("No mentor information was found on calendly event") - mentor_email = event['event_memberships'][0]['user_email'] - mentor_uuid = urlparse(event['event_memberships'][0]['user']).path.split('/')[-1] + mentor_email = event["event_memberships"][0]["user_email"] + mentor_uuid = urlparse(event["event_memberships"][0]["user"]).path.split("/")[-1] - mentor = MentorProfile.objects.filter( - academy=academy).filter(Q(calendly_uuid=mentor_uuid) | Q(email=mentor_email) - | Q(user__email=mentor_email)).first() + mentor = ( + MentorProfile.objects.filter(academy=academy) + .filter(Q(calendly_uuid=mentor_uuid) | Q(email=mentor_email) | Q(user__email=mentor_email)) + .first() + ) if mentor is None: - raise Exception(f'Mentor not found with uuid {mentor_uuid} and email {mentor_email}') + raise Exception(f"Mentor not found with uuid {mentor_uuid} and email {mentor_email}") - if mentor.status in ['INVITED', 'INNACTIVE']: - raise Exception(f'Mentor status is {mentor.status}') + if mentor.status in ["INVITED", "INNACTIVE"]: + raise Exception(f"Mentor status is {mentor.status}") if mentor.services.filter(slug=service.slug).first() is None: - raise Exception(f'Mentor {mentor.name} is not assigned for service {service.slug}') + raise Exception(f"Mentor {mentor.name} is not assigned for service {service.slug}") meeting_url = None - if 'location' in payload and 'join_url' in payload['location']: - meeting_url = payload['location']['join_url'] + if "location" in payload and "join_url" in payload["location"]: + meeting_url = payload["location"]["join_url"] session = MentorshipSession.objects.filter(calendly_uuid=event_uuid).first() if session is None: @@ -73,9 +76,9 @@ def invitee_created(client, webhook, payload: dict): session.mentor = mentor session.mentee = mentee session.online_meeting_url = meeting_url - session.status_message = 'Scheduled throught calendly' - session.starts_at = event['start_time'] - session.ends_at = event['end_time'] + session.status_message = "Scheduled throught calendly" + session.starts_at = event["start_time"] + session.ends_at = event["end_time"] session.service = service session.calendly_uuid = event_uuid session.save() diff --git a/breathecode/services/calendly/actions/test.py b/breathecode/services/calendly/actions/test.py index fc61bd0d3..656f0cfc7 100644 --- a/breathecode/services/calendly/actions/test.py +++ b/breathecode/services/calendly/actions/test.py @@ -4,4 +4,4 @@ def test(self, webhook, payload: dict): - logger.info('performing test request') + logger.info("performing test request") diff --git a/breathecode/services/calendly/client.py b/breathecode/services/calendly/client.py index 67b1a70b1..9d2a5f68c 100644 --- a/breathecode/services/calendly/client.py +++ b/breathecode/services/calendly/client.py @@ -7,7 +7,7 @@ import traceback logger = logging.getLogger(__name__) -API_URL = os.getenv('API_URL', '') +API_URL = os.getenv("API_URL", "") class Calendly: @@ -15,9 +15,9 @@ class Calendly: headers = {} def __init__(self, token): - self.host = 'https://api.calendly.com' + self.host = "https://api.calendly.com" self.token = token - self.headers = {'Authorization': f'Bearer {token}'} + self.headers = {"Authorization": f"Bearer {token}"} def has_error(self): # { @@ -35,9 +35,9 @@ def request(self, _type, url, headers=None, query_string=None, json=None): headers = {} _headers = {**self.headers, **headers} - _query_string = '' + _query_string = "" if query_string is not None: - _query_string = '?' + urllib.parse.urlencode(query_string) + _query_string = "?" + urllib.parse.urlencode(query_string) if json is not None: response = requests.request(_type, self.host + url + _query_string, headers=_headers, timeout=2, json=json) @@ -48,64 +48,72 @@ def request(self, _type, url, headers=None, query_string=None, json=None): try: result = response.json() except JSONDecodeError as e: - if _type != 'DELETE': + if _type != "DELETE": raise e if response.status_code >= 400: - print('Error calling calendly: ', self.host + url + _query_string) - raise Exception(result['message']) - - if result is not None and 'pagination' in result: - if result['pagination']['next_page'] is not None: - new_result = self.request(_type, result['pagination']['next_page'], query_string={ - **query_string, - }) - if 'collection' in new_result and type(new_result['collection']) == 'list': - new_result['collection'] = result['collection'] + new_result['collection'] + print("Error calling calendly: ", self.host + url + _query_string) + raise Exception(result["message"]) + + if result is not None and "pagination" in result: + if result["pagination"]["next_page"] is not None: + new_result = self.request( + _type, + result["pagination"]["next_page"], + query_string={ + **query_string, + }, + ) + if "collection" in new_result and type(new_result["collection"]) == "list": + new_result["collection"] = result["collection"] + new_result["collection"] result.update(new_result) return result def subscribe(self, org_uri, org_hash): - data = self.request('POST', - '/webhook_subscriptions', - json={ - 'url': f'{API_URL}/v1/mentorship/calendly/webhook/{org_hash}', - 'events': ['invitee.created', 'invitee.canceled'], - 'organization': f'{org_uri}', - 'scope': 'organization', - }) - if 'collection' in data: - return data['collection'] + data = self.request( + "POST", + "/webhook_subscriptions", + json={ + "url": f"{API_URL}/v1/mentorship/calendly/webhook/{org_hash}", + "events": ["invitee.created", "invitee.canceled"], + "organization": f"{org_uri}", + "scope": "organization", + }, + ) + if "collection" in data: + return data["collection"] else: return data def unsubscribe(self, webhook_uuid): - return self.request('DELETE', f'/webhook_subscriptions/{webhook_uuid}') + return self.request("DELETE", f"/webhook_subscriptions/{webhook_uuid}") def unsubscribe_all(self, org_uri): data = self.get_subscriptions(org_uri) for webhook in data: - self.unsubscribe(urlparse(webhook['uri']).path.split('/')[-1]) + self.unsubscribe(urlparse(webhook["uri"]).path.split("/")[-1]) def get_subscriptions(self, org_uri): - data = self.request('GET', - '/webhook_subscriptions', - query_string={ - 'organization': f'{org_uri}', - 'scope': 'organization', - }) - if 'collection' in data: - return data['collection'] + data = self.request( + "GET", + "/webhook_subscriptions", + query_string={ + "organization": f"{org_uri}", + "scope": "organization", + }, + ) + if "collection" in data: + return data["collection"] else: return data def get_event(self, uuid): - data = self.request('GET', f'/scheduled_events/{uuid}') + data = self.request("GET", f"/scheduled_events/{uuid}") return data def get_organization(self): - data = self.request('GET', '/users/me') + data = self.request("GET", "/users/me") return data def execute_action(self, calendly_webhook_id: int): @@ -123,42 +131,42 @@ def execute_action(self, calendly_webhook_id: int): webhook = CalendlyWebhook.objects.filter(id=calendly_webhook_id).first() if not webhook: - raise Exception('Invalid webhook id or not found') + raise Exception("Invalid webhook id or not found") - if not webhook.event or webhook.event == '': - raise Exception('Impossible to determine event action, the webhook should have an event action string') + if not webhook.event or webhook.event == "": + raise Exception("Impossible to determine event action, the webhook should have an event action string") webhook.organization = CalendlyOrganization.objects.filter(hash=webhook.organization_hash).first() if webhook.organization is None: - raise Exception(f'Calendly organization with internal hash not found: {webhook.organization_hash}') + raise Exception(f"Calendly organization with internal hash not found: {webhook.organization_hash}") - action = webhook.event.replace('.', '_') + action = webhook.event.replace(".", "_") - logger.debug(f'Executing => {action}') + logger.debug(f"Executing => {action}") if hasattr(actions, action): - logger.debug('Action found') + logger.debug("Action found") fn = getattr(actions, action) try: fn(self, webhook, webhook.payload) - logger.debug('Mark action as done') - webhook.status = 'DONE' - webhook.status_text = 'OK' + logger.debug("Mark action as done") + webhook.status = "DONE" + webhook.status_text = "OK" webhook.save() except Exception as e: - logger.error('Mark action with error') + logger.error("Mark action with error") - webhook.status = 'ERROR' - webhook.status_text = ''.join(traceback.format_exception(None, e, e.__traceback__)) + webhook.status = "ERROR" + webhook.status_text = "".join(traceback.format_exception(None, e, e.__traceback__)) webhook.save() else: - message = f'Action `{action}` is not implemented' + message = f"Action `{action}` is not implemented" logger.debug(message) - webhook.status = 'ERROR' + webhook.status = "ERROR" webhook.status_text = message webhook.save() @@ -176,17 +184,17 @@ def add_webhook_to_log(context: dict, organization_hash: str): webhook = CalendlyWebhook() - if 'event' not in context or context['event'] == '': - raise Exception('Impossible to determine event action, the webhook should have an event action string') + if "event" not in context or context["event"] == "": + raise Exception("Impossible to determine event action, the webhook should have an event action string") - webhook.event = context['event'] - webhook.created_by = context['created_by'] - webhook.payload = context['payload'] - webhook.called_at = context['created_at'] + webhook.event = context["event"] + webhook.created_by = context["created_by"] + webhook.payload = context["payload"] + webhook.called_at = context["created_at"] webhook.organization_hash = organization_hash - webhook.status = 'PENDING' + webhook.status = "PENDING" - organization = CalendlyOrganization.objects.filter(uri=context['created_by']).first() + organization = CalendlyOrganization.objects.filter(uri=context["created_by"]).first() if organization is not None: webhook.organization = organization diff --git a/breathecode/services/daily/client.py b/breathecode/services/daily/client.py index d2ef01413..efd03e9b5 100644 --- a/breathecode/services/daily/client.py +++ b/breathecode/services/daily/client.py @@ -9,11 +9,11 @@ class DailyClient: def __init__(self, token=None): if token is None: - token = os.getenv('DAILY_API_KEY', '') + token = os.getenv("DAILY_API_KEY", "") - self.host = os.getenv('DAILY_API_URL', '') + self.host = os.getenv("DAILY_API_URL", "") self.token = token - self.headers = {'Authorization': f'Bearer {token}'} + self.headers = {"Authorization": f"Bearer {token}"} def request(self, _type, url, headers=None, query_string=None, data=None): # wonderful way to fix one poor mocking system @@ -23,18 +23,18 @@ def request(self, _type, url, headers=None, query_string=None, data=None): headers = {} _headers = {**self.headers, **headers} - _query_string = '' + _query_string = "" if query_string is not None: - _query_string = '?' + urllib.parse.urlencode(query_string) + _query_string = "?" + urllib.parse.urlencode(query_string) response = requests.request(_type, self.host + url + _query_string, headers=_headers, json=data, timeout=2) result = response.json() if result is None: - raise Exception('Unknown error when requesting meeting room') + raise Exception("Unknown error when requesting meeting room") - if ('status_code' in result and result['status_code'] >= 400) or 'error' in result: - raise Exception(result['error'] + ': ' + result['info']) + if ("status_code" in result and result["status_code"] >= 400) or "error" in result: + raise Exception(result["error"] + ": " + result["info"]) # if 'pagination' in result: # print('has more items?', result['pagination']['has_more_items']) @@ -55,44 +55,44 @@ def request(self, _type, url, headers=None, query_string=None, data=None): return result def create_all_rooms(self): - data = self.request('GET', '/v1/rooms') + data = self.request("GET", "/v1/rooms") return data - def create_room(self, name='', exp_in_seconds=3600, exp_in_epoch=None): + def create_room(self, name="", exp_in_seconds=3600, exp_in_epoch=None): # now timestamp in epoch epoc_now = time.mktime(timezone.now().timetuple()) if exp_in_epoch is None: epoc_now = time.mktime(timezone.now().timetuple()) - payload = {'properties': {'exp': f'{str(epoc_now + exp_in_seconds)}'}} + payload = {"properties": {"exp": f"{str(epoc_now + exp_in_seconds)}"}} else: - payload = {'properties': {'exp': f'{str(exp_in_epoch)}'}} + payload = {"properties": {"exp": f"{str(exp_in_epoch)}"}} - if name != '': - payload['properties']['name'] = name + if name != "": + payload["properties"]["name"] = name - data = self.request('POST', '/v1/rooms', data=payload) + data = self.request("POST", "/v1/rooms", data=payload) return data - def extend_room(self, name='', exp_in_seconds=3600, exp_in_epoch=None): + def extend_room(self, name="", exp_in_seconds=3600, exp_in_epoch=None): if exp_in_epoch is None: epoc_now = time.mktime(timezone.now().timetuple()) - payload = {'properties': {'exp': f'{str(epoc_now + exp_in_seconds)}'}} + payload = {"properties": {"exp": f"{str(epoc_now + exp_in_seconds)}"}} else: - payload = {'properties': {'exp': f'{str(exp_in_epoch)}'}} + payload = {"properties": {"exp": f"{str(exp_in_epoch)}"}} - data = self.request('POST', '/v1/rooms/' + name, data=payload) + data = self.request("POST", "/v1/rooms/" + name, data=payload) return data - def get_room(self, name=''): + def get_room(self, name=""): epoc_now = time.mktime(timezone.now().timetuple()) - data = self.request('GET', '/v1/rooms/' + name) + data = self.request("GET", "/v1/rooms/" + name) - if epoc_now > data['config']['exp']: - data['expired'] = True + if epoc_now > data["config"]["exp"]: + data["expired"] = True else: - data['expired'] = False + data["expired"] = False return data diff --git a/breathecode/services/datetime_to_iso_format/datetime_to_iso_format.py b/breathecode/services/datetime_to_iso_format/datetime_to_iso_format.py index 2030ce62f..5467b671d 100644 --- a/breathecode/services/datetime_to_iso_format/datetime_to_iso_format.py +++ b/breathecode/services/datetime_to_iso_format/datetime_to_iso_format.py @@ -5,11 +5,11 @@ class SimpleUTC(tzinfo): def tzname(self, **kwargs): - return 'UTC' + return "UTC" def utcoffset(self, dt): return timedelta(0) def datetime_to_iso_format(date: datetime) -> str: - return re.sub(r'\+00:00$', 'Z', date.replace(tzinfo=SimpleUTC()).isoformat()) + return re.sub(r"\+00:00$", "Z", date.replace(tzinfo=SimpleUTC()).isoformat()) diff --git a/breathecode/services/eventbrite/actions/__init__.py b/breathecode/services/eventbrite/actions/__init__.py index 4f6a3a359..cdb7f2f6b 100644 --- a/breathecode/services/eventbrite/actions/__init__.py +++ b/breathecode/services/eventbrite/actions/__init__.py @@ -4,4 +4,4 @@ from .event_published import event_published from .test import test -__all__ = ['event_created', 'event_updated', 'order_placed', 'event_published', 'test'] +__all__ = ["event_created", "event_updated", "order_placed", "event_published", "test"] diff --git a/breathecode/services/eventbrite/actions/order_placed.py b/breathecode/services/eventbrite/actions/order_placed.py index ebc099245..b4adda5fc 100644 --- a/breathecode/services/eventbrite/actions/order_placed.py +++ b/breathecode/services/eventbrite/actions/order_placed.py @@ -2,8 +2,8 @@ from django.contrib.auth.models import User logger = logging.getLogger(__name__) -SOURCE = 'eventbrite' -CAMPAIGN = 'eventbrite order placed' +SOURCE = "eventbrite" +CAMPAIGN = "eventbrite order placed" def order_placed(self, webhook, payload: dict): @@ -22,11 +22,11 @@ def order_placed(self, webhook, payload: dict): raise Exception(message) if not org.academy: - raise Exception('Organization not have one Academy') + raise Exception("Organization not have one Academy") academy_id = org.academy.id - event_id = payload['event_id'] - email = payload['email'] + event_id = payload["event_id"] + email = payload["email"] local_event = Event.objects.filter(eventbrite_id=event_id).first() @@ -45,8 +45,9 @@ def order_placed(self, webhook, payload: dict): webhook.save() if not EventCheckin.objects.filter(email=email, event=local_event).count(): - EventCheckin(email=email, status='PENDING', event=local_event, attendee=local_attendee, - utm_source='eventbrite').save() + EventCheckin( + email=email, status="PENDING", event=local_event, attendee=local_attendee, utm_source="eventbrite" + ).save() elif not EventCheckin.objects.filter(email=email, event=local_event, attendee=local_attendee).count(): event_checkin = EventCheckin.objects.filter(email=email, event=local_event).first() @@ -54,26 +55,26 @@ def order_placed(self, webhook, payload: dict): event_checkin.save() contact = { - 'email': email, - 'first_name': payload['first_name'], - 'last_name': payload['last_name'], + "email": email, + "first_name": payload["first_name"], + "last_name": payload["last_name"], } custom = { - 'academy': local_event.academy.slug, - 'source': SOURCE, - 'campaign': CAMPAIGN, - 'language': local_event.lang, + "academy": local_event.academy.slug, + "source": SOURCE, + "campaign": CAMPAIGN, + "language": local_event.lang, } # utm_language ? - contact = set_optional(contact, 'utm_location', custom, 'academy') - contact = set_optional(contact, 'utm_source', custom, 'source') - contact = set_optional(contact, 'utm_campaign', custom, 'campaign') + contact = set_optional(contact, "utm_location", custom, "academy") + contact = set_optional(contact, "utm_source", custom, "source") + contact = set_optional(contact, "utm_campaign", custom, "campaign") if local_event.lang: - contact = set_optional(contact, 'utm_language', custom, 'language') + contact = set_optional(contact, "utm_language", custom, "language") academy = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).first() if academy is None: @@ -81,8 +82,11 @@ def order_placed(self, webhook, payload: dict): logger.debug(message) raise Exception(message) - automation_id = ActiveCampaignAcademy.objects.filter(academy__id=academy_id).values_list( - 'event_attendancy_automation__id', flat=True).first() + automation_id = ( + ActiveCampaignAcademy.objects.filter(academy__id=academy_id) + .values_list("event_attendancy_automation__id", flat=True) + .first() + ) if automation_id: add_to_active_campaign(contact, academy_id, automation_id) diff --git a/breathecode/services/eventbrite/actions/test.py b/breathecode/services/eventbrite/actions/test.py index fc61bd0d3..656f0cfc7 100644 --- a/breathecode/services/eventbrite/actions/test.py +++ b/breathecode/services/eventbrite/actions/test.py @@ -4,4 +4,4 @@ def test(self, webhook, payload: dict): - logger.info('performing test request') + logger.info("performing test request") diff --git a/breathecode/services/eventbrite/client.py b/breathecode/services/eventbrite/client.py index 6a3b50d58..b123ac3f5 100644 --- a/breathecode/services/eventbrite/client.py +++ b/breathecode/services/eventbrite/client.py @@ -15,11 +15,11 @@ class Eventbrite: def __init__(self, token=None): if token is None: - token = os.getenv('EVENTBRITE_KEY', '') + token = os.getenv("EVENTBRITE_KEY", "") - self.host = 'https://www.eventbriteapi.com/v3' + self.host = "https://www.eventbriteapi.com/v3" self.token = token - self.headers = {'Authorization': f'Bearer {token}'} + self.headers = {"Authorization": f"Bearer {token}"} def has_error(self): # { @@ -37,41 +37,39 @@ def request(self, _type, url, headers=None, query_string=None): headers = {} _headers = {**self.headers, **headers} - _query_string = '' + _query_string = "" if query_string is not None: - _query_string = '?' + urllib.parse.urlencode(query_string) + _query_string = "?" + urllib.parse.urlencode(query_string) response = requests.request(_type, self.host + url + _query_string, headers=_headers, timeout=5) result = response.json() - if 'status_code' in result and result['status_code'] >= 400: - raise Exception(result['error_description']) + if "status_code" in result and result["status_code"] >= 400: + raise Exception(result["error_description"]) - if 'pagination' in result: - if result['pagination']['has_more_items']: - new_result = self.request(_type, - url, - query_string={ - **query_string, 'continuation': result['pagination']['continuation'] - }) + if "pagination" in result: + if result["pagination"]["has_more_items"]: + new_result = self.request( + _type, url, query_string={**query_string, "continuation": result["pagination"]["continuation"]} + ) for key in new_result: - if type(new_result[key]) == 'list': + if type(new_result[key]) == "list": new_result[key] = result[key] + new_result[key] result.update(new_result) return result def get_my_organizations(self): - data = self.request('GET', '/users/me/organizations/') + data = self.request("GET", "/users/me/organizations/") return data def get_organization_events(self, organization_id): - query_string = {'expand': 'organizer', 'status': 'live'} - data = self.request('GET', f'/organizations/{str(organization_id)}/events/', query_string=query_string) + query_string = {"expand": "organizer", "status": "live"} + data = self.request("GET", f"/organizations/{str(organization_id)}/events/", query_string=query_string) return data def get_organization_venues(self, organization_id): - data = self.request('GET', f'/organizations/{str(organization_id)}/venues/') + data = self.request("GET", f"/organizations/{str(organization_id)}/venues/") return data def execute_action(self, eventbrite_webhook_id: int): @@ -94,21 +92,21 @@ def execute_action(self, eventbrite_webhook_id: int): webhook = EventbriteWebhook.objects.filter(id=eventbrite_webhook_id).first() if not webhook: - raise Exception('Invalid webhook') + raise Exception("Invalid webhook") if not webhook.action: - raise Exception('Impossible to determine action') + raise Exception("Impossible to determine action") if not webhook.api_url: - raise Exception('Impossible to determine api url') + raise Exception("Impossible to determine api url") - action = webhook.action.replace('.', '_') + action = webhook.action.replace(".", "_") api_url = webhook.api_url - if (re.search(r'^https://www\.eventbriteapi\.com/v3/events/\d+/?$', api_url)): - api_url = api_url + '?expand=organizer,venue' + if re.search(r"^https://www\.eventbriteapi\.com/v3/events/\d+/?$", api_url): + api_url = api_url + "?expand=organizer,venue" - logger.debug(f'Executing => {action}') + logger.debug(f"Executing => {action}") if hasattr(actions, action): response = requests.get(api_url, headers=self.headers, timeout=5) json = response.json() @@ -117,28 +115,28 @@ def execute_action(self, eventbrite_webhook_id: int): # logger.debug("Eventbrite response") # logger.debug(json) - logger.debug('Action found') + logger.debug("Action found") fn = getattr(actions, action) try: fn(self, webhook, json) - logger.debug('Mark action as done') - webhook.status = 'DONE' - webhook.status_text = 'OK' + logger.debug("Mark action as done") + webhook.status = "DONE" + webhook.status_text = "OK" webhook.save() except Exception as e: - logger.error('Mark action with error') + logger.error("Mark action with error") - webhook.status = 'ERROR' - webhook.status_text = ''.join(traceback.format_exception(None, e, e.__traceback__)) + webhook.status = "ERROR" + webhook.status_text = "".join(traceback.format_exception(None, e, e.__traceback__)) webhook.save() else: - message = f'Action `{action}` is not implemented' + message = f"Action `{action}` is not implemented" logger.debug(message) - webhook.status = 'ERROR' + webhook.status = "ERROR" webhook.status_text = message webhook.save() @@ -155,26 +153,26 @@ def add_webhook_to_log(context: dict, organization_id: str): return None webhook = EventbriteWebhook() - context_has_config_key = 'config' in context - context_has_api_url = 'api_url' in context + context_has_config_key = "config" in context + context_has_api_url = "api_url" in context if context_has_api_url: - webhook.api_url = context['api_url'] + webhook.api_url = context["api_url"] - if context_has_config_key and 'user_id' in context['config']: - webhook.user_id = context['config']['user_id'] + if context_has_config_key and "user_id" in context["config"]: + webhook.user_id = context["config"]["user_id"] - if context_has_config_key and 'action' in context['config']: - webhook.action = context['config']['action'] + if context_has_config_key and "action" in context["config"]: + webhook.action = context["config"]["action"] - if context_has_config_key and 'webhook_id' in context['config']: - webhook.webhook_id = context['config']['webhook_id'] + if context_has_config_key and "webhook_id" in context["config"]: + webhook.webhook_id = context["config"]["webhook_id"] - if context_has_config_key and 'endpoint_url' in context['config']: - webhook.endpoint_url = context['config']['endpoint_url'] + if context_has_config_key and "endpoint_url" in context["config"]: + webhook.endpoint_url = context["config"]["endpoint_url"] webhook.organization_id = organization_id - webhook.status = 'PENDING' + webhook.status = "PENDING" webhook.save() return webhook diff --git a/breathecode/services/facebook.py b/breathecode/services/facebook.py index cdc4b9592..a7c412d58 100644 --- a/breathecode/services/facebook.py +++ b/breathecode/services/facebook.py @@ -4,7 +4,7 @@ class Facebook: - HOST = 'https://graph.facebook.com/v8.0/' + HOST = "https://graph.facebook.com/v8.0/" headers = {} def __init__(self, token): @@ -15,41 +15,38 @@ def get(self, action_name, request_data=None): if request_data is None: request_data = {} - return self._call('GET', action_name, params=request_data) + return self._call("GET", action_name, params=request_data) def post(self, action_name, request_data=None): if request_data is None: request_data = {} - return self._call('POST', action_name, json=request_data) + return self._call("POST", action_name, json=request_data) def _call(self, method_name, action_name, params=None, json=None): - if method_name != 'GET': + if method_name != "GET": self.headers = { - 'Authorization': 'Bearer ' + self.token, - 'Content-type': 'application/json', + "Authorization": "Bearer " + self.token, + "Content-type": "application/json", } else: params = { - 'token': self.token, + "token": self.token, **params, } - resp = requests.request(method=method_name, - url=self.HOST + action_name, - headers=self.headers, - params=params, - json=json, - timeout=2) + resp = requests.request( + method=method_name, url=self.HOST + action_name, headers=self.headers, params=params, json=json, timeout=2 + ) if resp.status_code == 200: data = resp.json() - if data['ok'] == False: - raise Exception('Slack API Error ' + data['error']) + if data["ok"] == False: + raise Exception("Slack API Error " + data["error"]) else: - logger.debug(f'Successfull call {method_name}: /{action_name}') + logger.debug(f"Successfull call {method_name}: /{action_name}") return data else: - raise Exception(f'Unable to communicate with Slack API, error: {resp.status_code}') + raise Exception(f"Unable to communicate with Slack API, error: {resp.status_code}") diff --git a/breathecode/services/github.py b/breathecode/services/github.py index 7922a4395..10adbff2e 100644 --- a/breathecode/services/github.py +++ b/breathecode/services/github.py @@ -5,7 +5,7 @@ import requests logger = logging.getLogger(__name__) -API_URL = os.getenv('API_URL', '') +API_URL = os.getenv("API_URL", "") class GithubAuthException(Exception): @@ -13,7 +13,7 @@ class GithubAuthException(Exception): class Github: - HOST = 'https://api.github.com' + HOST = "https://api.github.com" headers = {} def __init__(self, token=None, org=None, host=None): @@ -28,36 +28,36 @@ def get(self, action_name, request_data=None): if request_data is None: request_data = {} - return self._call('GET', action_name, params=request_data) + return self._call("GET", action_name, params=request_data) def head(self, action_name, request_data=None): if request_data is None: request_data = {} - return self._call('HEAD', action_name, params=request_data) + return self._call("HEAD", action_name, params=request_data) def post(self, action_name, request_data=None): if request_data is None: request_data = {} - return self._call('POST', action_name, json=request_data) + return self._call("POST", action_name, json=request_data) def delete(self, action_name, request_data=None): if request_data is None: request_data = {} - return self._call('DELETE', action_name, params=request_data) + return self._call("DELETE", action_name, params=request_data) def _call(self, method_name, action_name, params=None, json=None): self.headers = { - 'Authorization': 'Bearer ' + self.token, - 'Content-type': 'application/json', + "Authorization": "Bearer " + self.token, + "Content-type": "application/json", } - if method_name in ['GET', 'DELETE']: + if method_name in ["GET", "DELETE"]: params = { # 'token': self.token, **params, @@ -67,108 +67,98 @@ def _call(self, method_name, action_name, params=None, json=None): resp = requests.request(method=method_name, url=url, headers=self.headers, params=params, json=json, timeout=2) if resp.status_code >= 200 and resp.status_code < 300: - if method_name in ['DELETE', 'HEAD']: + if method_name in ["DELETE", "HEAD"]: return resp data = resp.json() return data else: - logger.debug(f'Error call {method_name}: /{action_name}') + logger.debug(f"Error call {method_name}: /{action_name}") if resp.status_code == 401: - raise GithubAuthException('Invalid credentials when calling the Github API') + raise GithubAuthException("Invalid credentials when calling the Github API") error_message = str(resp.status_code) try: error = resp.json() - error_message = error['message'] + error_message = error["message"] logger.debug(error) except Exception: pass - raise Exception(f'Unable to communicate with Github API for {action_name}, error: {error_message}') + raise Exception(f"Unable to communicate with Github API for {action_name}, error: {error_message}") def get_machines_types(self, repo_name): - return self.get(f'/repos/{self.org}/{repo_name}/codespaces/machines') + return self.get(f"/repos/{self.org}/{repo_name}/codespaces/machines") def subscribe_to_repo(self, owner, repo_name, subscription_token): payload = { - 'name': 'web', - 'active': True, - 'events': ['push'], - 'config': { - 'url': f'{API_URL}/v1/monitoring/github/webhook/{subscription_token}', - 'content_type': 'json' - } + "name": "web", + "active": True, + "events": ["push"], + "config": {"url": f"{API_URL}/v1/monitoring/github/webhook/{subscription_token}", "content_type": "json"}, } - return self.post(f'/repos/{owner}/{repo_name}/hooks', request_data=payload) + return self.post(f"/repos/{owner}/{repo_name}/hooks", request_data=payload) def unsubscribe_from_repo(self, owner, repo_name, hook_id): - return self.delete(f'/repos/{owner}/{repo_name}/hooks/{hook_id}') + return self.delete(f"/repos/{owner}/{repo_name}/hooks/{hook_id}") def file_exists(self, url): # Example URL: https://github.com/owner/repo/blob/branch/path/to/file # Extract necessary parts of the URL - parts = url.split('/') + parts = url.split("/") owner = parts[3] repo_name = parts[4] branch = parts[6] - path_to_file = '/'.join(parts[7:]) # Join the remaining parts to form the path + path_to_file = "/".join(parts[7:]) # Join the remaining parts to form the path # Make a request to the GitHub API - response = self.head(f'/repos/{owner}/{repo_name}/contents/{path_to_file}?ref={branch}') + response = self.head(f"/repos/{owner}/{repo_name}/contents/{path_to_file}?ref={branch}") # Check if the file exists return response.status_code == 200 def create_container(self, repo_name): - return self.post(f'/repos/{self.org}/{repo_name}/codespaces') + return self.post(f"/repos/{self.org}/{repo_name}/codespaces") def get_org_members(self): results = [] chunk = None while chunk is None or len(chunk) == self.page_size: - chunk = self.get(f'/orgs/{self.org}/members', - request_data={ - 'per_page': self.page_size, - 'page': int(len(results) / self.page_size) + 1 - }) + chunk = self.get( + f"/orgs/{self.org}/members", + request_data={"per_page": self.page_size, "page": int(len(results) / self.page_size) + 1}, + ) results = results + chunk return results - def invite_org_member(self, email, role='direct_member', team_ids=None): + def invite_org_member(self, email, role="direct_member", team_ids=None): if team_ids is None: team_ids = [] - return self.post(f'/orgs/{self.org}/invitations', - request_data={ - 'email': email, - 'role': role, - 'team_ids': [12, 26] - }) + return self.post( + f"/orgs/{self.org}/invitations", request_data={"email": email, "role": role, "team_ids": [12, 26]} + ) def delete_org_member(self, username): - return self.delete(f'/orgs/{self.org}/members/{username}') - - def get_org_repos(self, - organization: str, - type: str = 'all', - per_page: int = 30, - sort: str = 'created', - direction: str = 'asc') -> Generator[list[dict], None, None]: + return self.delete(f"/orgs/{self.org}/members/{username}") + + def get_org_repos( + self, organization: str, type: str = "all", per_page: int = 30, sort: str = "created", direction: str = "asc" + ) -> Generator[list[dict], None, None]: if per_page > 100: - raise Exception('per_page cannot be greater than 100') + raise Exception("per_page cannot be greater than 100") - if type not in ['all', 'public', 'private', 'forks', 'sources', 'member']: - raise Exception('Invalid type') + if type not in ["all", "public", "private", "forks", "sources", "member"]: + raise Exception("Invalid type") - if sort not in ['created', 'updated', 'pushed', 'full_name']: - raise Exception('Invalid sort') + if sort not in ["created", "updated", "pushed", "full_name"]: + raise Exception("Invalid sort") - if direction not in ['asc', 'desc']: - raise Exception('Invalid direction') + if direction not in ["asc", "desc"]: + raise Exception("Invalid direction") page = 0 while True: @@ -178,7 +168,7 @@ def get_org_repos(self, break res = self.get( - f'/orgs/{organization}/repos?page={page}&type={type}&per_page={per_page}&sort={sort}&direction={direction}' + f"/orgs/{organization}/repos?page={page}&type={type}&per_page={per_page}&sort={sort}&direction={direction}" ) if len(res) == 0: @@ -187,5 +177,5 @@ def get_org_repos(self, yield res def delete_org_repo(self, owner: str, repo: str): - res = self.delete(f'/repos/{owner}/{repo}') + res = self.delete(f"/repos/{owner}/{repo}") return res diff --git a/breathecode/services/google_cloud/__init__.py b/breathecode/services/google_cloud/__init__.py index 8d84dd755..0458e0ebb 100644 --- a/breathecode/services/google_cloud/__init__.py +++ b/breathecode/services/google_cloud/__init__.py @@ -1,6 +1,7 @@ """ Google Cloud Service """ + from .datastore import * # noqa: F401 from .recaptcha import * # noqa: F401 from .function_v1 import * # noqa: F401 diff --git a/breathecode/services/google_cloud/big_query.py b/breathecode/services/google_cloud/big_query.py index 1a8252dec..6b3a960f3 100644 --- a/breathecode/services/google_cloud/big_query.py +++ b/breathecode/services/google_cloud/big_query.py @@ -18,11 +18,11 @@ client = None engine = None -__all__ = ['BigQuery'] +__all__ = ["BigQuery"] def is_test_env(): - return os.getenv('ENV') == 'test' + return os.getenv("ENV") == "test" class BigQueryModel: @@ -63,7 +63,7 @@ def save(self): raise Exception(errors) -class BigQuerySet(): +class BigQuerySet: query: dict[str, Any] agg: list[Any] fields: Optional[list[str]] @@ -90,7 +90,7 @@ def _get_table(self) -> Table: if self._table_ref: return self._table_ref - table_ref = f'{self.dataset}.{self.table}' + table_ref = f"{self.dataset}.{self.table}" # Fetch the schema of the table table = self.client.get_table(table_ref) @@ -122,20 +122,20 @@ def update_schema(self, schema: list[SchemaField]) -> None: table = self._get_table() table.schema = schema - self.client.update_table(table, ['schema']) + self.client.update_table(table, ["schema"]) def set_query(self, *args: Any, **kwargs: Any) -> None: self.query.update(kwargs) - def limit_by(self, name: int) -> 'BigQuerySet': + def limit_by(self, name: int) -> "BigQuerySet": self.limit = name return self - def order_by(self, *name: str) -> 'BigQuerySet': + def order_by(self, *name: str) -> "BigQuerySet": self.order = name return self - def group_by(self, *name: str) -> 'BigQuerySet': + def group_by(self, *name: str) -> "BigQuerySet": self.group = name return self @@ -156,41 +156,41 @@ def build(self) -> RowIterator: query_job = self.client.query(sql, *params, **kwparams) return query_job.result() - def filter(self, *args: Any, **kwargs: Any) -> 'BigQuerySet': + def filter(self, *args: Any, **kwargs: Any) -> "BigQuerySet": self.set_query(*args, **kwargs) return self def attribute_parser(self, key: str) -> tuple[str, str, str]: - operand = '=' - key = key.replace('__', '.') - if key[-4:] == '.gte': + operand = "=" + key = key.replace("__", ".") + if key[-4:] == ".gte": key = key[:-4] - operand = '>=' - elif key[-3:] == '.gt': + operand = ">=" + elif key[-3:] == ".gt": key = key[:-3] - operand = '>' - elif key[-3:] == '.lt': + operand = ">" + elif key[-3:] == ".lt": key = key[:-3] - operand = '<' - if key[-4:] == '.lte': + operand = "<" + if key[-4:] == ".lte": key = key[:-4] - operand = '<=' - if key[-5:] == '.like': + operand = "<=" + if key[-5:] == ".like": key = key[:-5] - operand = 'LIKE' - return key, operand, 'x__' + key.replace('.', '__') + operand = "LIKE" + return key, operand, "x__" + key.replace(".", "__") def get_type(self, elem: Any) -> None: if isinstance(elem, int): - return 'INT64' + return "INT64" if isinstance(elem, float): - return 'FLOAT64' + return "FLOAT64" if isinstance(elem, bool): - return 'BOOL' + return "BOOL" if isinstance(elem, str): - return 'STRING' + return "STRING" if isinstance(elem, datetime): - return 'DATETIME' + return "DATETIME" def get_params(self) -> tuple[list[Any], dict[str, Any]]: if not self.query: @@ -204,11 +204,11 @@ def get_params(self) -> tuple[list[Any], dict[str, Any]]: query_params.append(bigquery.ScalarQueryParameter(var_name, self.get_type(val), val)) job_config = bigquery.QueryJobConfig(query_parameters=query_params) - kwparams['job_config'] = job_config + kwparams["job_config"] = job_config return params, kwparams - def select(self, *names: str) -> 'BigQuerySet': + def select(self, *names: str) -> "BigQuerySet": self.fields = names return self @@ -216,15 +216,15 @@ def aggregation_parser(self, agg: Any) -> tuple[str, str]: operation = None attribute = None if isinstance(agg, Sum): - operation = 'SUM' + operation = "SUM" attribute = agg._constructor_args[0][0] if isinstance(agg, Count): - operation = 'COUNT' + operation = "COUNT" attribute = agg._constructor_args[0][0] if isinstance(agg, Avg): - operation = 'AVG' + operation = "AVG" attribute = agg._constructor_args[0][0] return operation, attribute @@ -248,54 +248,54 @@ def sql(self, aggs=None) -> str: query = f"""SELECT * FROM `{self.project_id}.{self.dataset}.{self.table}` """ if self.query: - query += 'WHERE ' + query += "WHERE " for key, _ in self.query.items(): key, operand, var_name = self.attribute_parser(key) - query += f'{key} {operand} @{var_name} AND ' + query += f"{key} {operand} @{var_name} AND " query = query[:-5] if self.group: - group_by = ', '.join(self.group) - query += f' GROUP BY {group_by}' + group_by = ", ".join(self.group) + query += f" GROUP BY {group_by}" if self.order: - order_by = ', '.join(self.order) - query += f' ORDER BY {order_by} DESC' + order_by = ", ".join(self.order) + query += f" ORDER BY {order_by} DESC" if self.limit: - query += f' LIMIT {self.limit}' + query += f" LIMIT {self.limit}" return query def json_query(self, query: dict[str, Any]): - if 'filter' in query: - self.filter(**query['filter']) + if "filter" in query: + self.filter(**query["filter"]) - if 'fields' in query: - self.select(*query['fields']) + if "fields" in query: + self.select(*query["fields"]) - if 'by' in query: - self.group_by(*query['by']) + if "by" in query: + self.group_by(*query["by"]) - if 'order' in query: - self.order_by(*query['order']) + if "order" in query: + self.order_by(*query["order"]) - if 'limit' in query: - self.limit_by(query['limit']) + if "limit" in query: + self.limit_by(query["limit"]) - if 'grouping_function' in query: - grouping_function = query['grouping_function'] + if "grouping_function" in query: + grouping_function = query["grouping_function"] aggs = [] - if 'sum' in grouping_function: - for value in grouping_function['sum']: + if "sum" in grouping_function: + for value in grouping_function["sum"]: aggs.append(Sum(value)) - if 'count' in grouping_function: - for value in grouping_function['count']: + if "count" in grouping_function: + for value in grouping_function["count"]: aggs.append(Count(value)) - if 'avg' in grouping_function: - for value in grouping_function['avg']: + if "avg" in grouping_function: + for value in grouping_function["avg"]: aggs.append(Avg(value)) result = self.aggregate(*aggs) @@ -348,21 +348,21 @@ def _setup_engine(cls): credentials.resolve_credentials() if not engine and is_test_env(): - engine = create_engine('sqlite:///:memory:', echo=False) + engine = create_engine("sqlite:///:memory:", echo=False) - client_options = ClientOptions(api_endpoint='http://0.0.0.0:9050') + client_options = ClientOptions(api_endpoint="http://0.0.0.0:9050") client = bigquery.Client( - 'test', + "test", client_options=client_options, credentials=AnonymousCredentials(), ) if not engine: - project = os.getenv('GOOGLE_PROJECT_ID', '') - engine = create_engine(f'bigquery://{project}') + project = os.getenv("GOOGLE_PROJECT_ID", "") + engine = create_engine(f"bigquery://{project}") credentials.resolve_credentials() - client = bigquery.Client(location='us-central1') + client = bigquery.Client(location="us-central1") @classmethod def session(cls) -> sessionmaker: @@ -399,7 +399,7 @@ def client(cls) -> tuple[bigquery.Client, str, str]: cls._setup_engine() credentials.resolve_credentials() - return client, os.getenv('GOOGLE_PROJECT_ID', 'test'), os.getenv('BIGQUERY_DATASET', '') + return client, os.getenv("GOOGLE_PROJECT_ID", "test"), os.getenv("BIGQUERY_DATASET", "") @classmethod def table(cls, table: str) -> BigQuerySet: diff --git a/breathecode/services/google_cloud/credentials.py b/breathecode/services/google_cloud/credentials.py index 9d3181035..958d4e5cd 100644 --- a/breathecode/services/google_cloud/credentials.py +++ b/breathecode/services/google_cloud/credentials.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) -__all__ = ['resolve_credentials'] +__all__ = ["resolve_credentials"] def resolve_credentials(): diff --git a/breathecode/services/google_cloud/datastore.py b/breathecode/services/google_cloud/datastore.py index 9eb6764a8..9826208b8 100644 --- a/breathecode/services/google_cloud/datastore.py +++ b/breathecode/services/google_cloud/datastore.py @@ -6,11 +6,12 @@ logger = logging.getLogger(__name__) -__all__ = ['Datastore'] +__all__ = ["Datastore"] class Datastore: """Google Cloud Storage""" + client = None def __init__(self): @@ -26,22 +27,22 @@ def fetch(self, order_by=None, **kwargs): Returns: Fetch: Fetch object """ - kind = kwargs.pop('kind') + kind = kwargs.pop("kind") query = self.client.query(kind=kind) limit = 100 offset = 0 - if 'offset' in kwargs: - offset = kwargs['offset'] - kwargs.pop('offset') + if "offset" in kwargs: + offset = kwargs["offset"] + kwargs.pop("offset") - if 'limit' in kwargs: - limit = kwargs['limit'] - kwargs.pop('limit') + if "limit" in kwargs: + limit = kwargs["limit"] + kwargs.pop("limit") for key in kwargs: - query.add_filter(key, '=', kwargs[key]) + query.add_filter(key, "=", kwargs[key]) if order_by: query.order = order_by @@ -67,11 +68,11 @@ def count(self, order_by=None, **kwargs): """ - kind = kwargs.pop('kind') + kind = kwargs.pop("kind") query = self.client.query(kind=kind) for key in kwargs: - query.add_filter(key, '=', kwargs[key]) + query.add_filter(key, "=", kwargs[key]) query.keys_only() diff --git a/breathecode/services/google_cloud/file.py b/breathecode/services/google_cloud/file.py index ae754e046..78d1ca600 100644 --- a/breathecode/services/google_cloud/file.py +++ b/breathecode/services/google_cloud/file.py @@ -7,11 +7,12 @@ logger = logging.getLogger(__name__) -__all__ = ['File'] +__all__ = ["File"] class File: """Google Cloud Storage""" + bucket: Bucket blob: Blob file_name: str @@ -33,14 +34,14 @@ def delete(self): self.blob.delete() @circuit - def upload(self, content, public: bool = False, content_type: str = 'text/plain') -> None: + def upload(self, content, public: bool = False, content_type: str = "text/plain") -> None: """Upload Blob from Bucket""" self.blob = self.bucket.blob(self.file_name) if content_type is None: - content_type = 'application/octet-stream' + content_type = "application/octet-stream" - if (isinstance(content, str) or isinstance(content, bytes)): + if isinstance(content, str) or isinstance(content, bytes): self.blob.upload_from_string(content, content_type=content_type) else: content.seek(0) @@ -62,20 +63,16 @@ def url(self) -> str: return self.blob.public_url @overload - def download(self, file: StringIO | TextIOWrapper) -> None: - ... + def download(self, file: StringIO | TextIOWrapper) -> None: ... @overload - def download(self, file: BytesIO | BufferedReader) -> None: - ... + def download(self, file: BytesIO | BufferedReader) -> None: ... @overload - def download(self, file: None) -> bytes: - ... + def download(self, file: None) -> bytes: ... @overload - def download(self) -> bytes: - ... + def download(self) -> bytes: ... @circuit def download(self, file: Optional[BytesIO | StringIO]) -> bytes | None: @@ -100,7 +97,7 @@ def __init__(self): def write(self, value): """Write the value by returning it, instead of storing in a buffer.""" - self.pieces.append(value.decode('latin1')) + self.pieces.append(value.decode("latin1")) def all(self): return self.pieces diff --git a/breathecode/services/google_cloud/function_v1.py b/breathecode/services/google_cloud/function_v1.py index 1e0a8b8d1..815d375cd 100644 --- a/breathecode/services/google_cloud/function_v1.py +++ b/breathecode/services/google_cloud/function_v1.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) -__all__ = ['Function', 'FunctionV1'] +__all__ = ["Function", "FunctionV1"] class FunctionV1: @@ -14,50 +14,47 @@ class FunctionV1: service_url: str method: str - def __init__(self, region, project_id, name, method='POST'): + def __init__(self, region, project_id, name, method="POST"): """Google Cloud Function constructor. - Args: - region (str): Google Cloud Function region - project_id (str): Google Cloud Function project id - name (str): Google Cloud Function name + Args: + region (str): Google Cloud Function region + project_id (str): Google Cloud Function project id + name (str): Google Cloud Function name """ credentials.resolve_credentials() - self.service_url = f'{region}-{project_id}.cloudfunctions.net/{name}' + self.service_url = f"{region}-{project_id}.cloudfunctions.net/{name}" self.method = method def call(self, data=None, params=None, timeout=2) -> requests.models.Response: """Call a Google Cloud Function. - Args: - data (dict): Arguments of Google Cloud Function. + Args: + data (dict): Arguments of Google Cloud Function. - Returns: - Response: Google Cloud Function response. + Returns: + Response: Google Cloud Function response. """ if params is None: params = {} auth_req = GCRequest() - token = id_token.fetch_id_token(auth_req, 'https://' + self.service_url) - headers = {'Authorization': f'Bearer {token}'} + token = id_token.fetch_id_token(auth_req, "https://" + self.service_url) + headers = {"Authorization": f"Bearer {token}"} if data: - headers['Content-Type'] = 'application/json' - headers['Accept'] = 'application/json' + headers["Content-Type"] = "application/json" + headers["Accept"] = "application/json" data = json.dumps(data) - request = requests.request(self.method, - 'https://' + self.service_url, - data=data, - headers=headers, - params=params, - timeout=timeout) + request = requests.request( + self.method, "https://" + self.service_url, data=data, headers=headers, params=params, timeout=timeout + ) - logger.info(f'Cloud function {self.service_url}') - logger.info(request.content.decode('utf-8')) + logger.info(f"Cloud function {self.service_url}") + logger.info(request.content.decode("utf-8")) return request diff --git a/breathecode/services/google_cloud/function_v2.py b/breathecode/services/google_cloud/function_v2.py index 56e1da4d3..08d695d1d 100644 --- a/breathecode/services/google_cloud/function_v2.py +++ b/breathecode/services/google_cloud/function_v2.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) -__all__ = ['FunctionV2'] +__all__ = ["FunctionV2"] class FunctionV2: @@ -14,7 +14,7 @@ class FunctionV2: service_url: str method: str - def __init__(self, url, method='POST'): + def __init__(self, url, method="POST"): """ Google Cloud Function constructor. @@ -39,22 +39,19 @@ def call(self, data=None, params=None, timeout=2) -> requests.models.Response: auth_req = GCRequest() token = id_token.fetch_id_token(auth_req, self.service_url) - headers = {'Authorization': f'Bearer {token}'} + headers = {"Authorization": f"Bearer {token}"} if data: - headers['Content-Type'] = 'application/json' - headers['Accept'] = 'application/json' + headers["Content-Type"] = "application/json" + headers["Accept"] = "application/json" data = json.dumps(data) - request = requests.request(self.method, - self.service_url, - data=data, - headers=headers, - params=params, - timeout=timeout) + request = requests.request( + self.method, self.service_url, data=data, headers=headers, params=params, timeout=timeout + ) - logger.info(f'Cloud function {self.service_url}') - logger.info(request.content.decode('utf-8')) + logger.info(f"Cloud function {self.service_url}") + logger.info(request.content.decode("utf-8")) return request diff --git a/breathecode/services/google_cloud/recaptcha.py b/breathecode/services/google_cloud/recaptcha.py index a94661096..1925a9e8b 100644 --- a/breathecode/services/google_cloud/recaptcha.py +++ b/breathecode/services/google_cloud/recaptcha.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) -__all__ = ['Recaptcha'] +__all__ = ["Recaptcha"] class Recaptcha: @@ -18,8 +18,9 @@ class Recaptcha: def __init__(self): resolve_credentials() - def create_assessment(self, project_id: str, recaptcha_site_key: str, token: str, - recaptcha_action: str) -> Assessment: + def create_assessment( + self, project_id: str, recaptcha_site_key: str, token: str, recaptcha_action: str + ) -> Assessment: """Create an assessment to analyze the risk of a UI action. Args: project_id: GCloud Project ID @@ -37,7 +38,7 @@ def create_assessment(self, project_id: str, recaptcha_site_key: str, token: str assessment = recaptchaenterprise_v1.Assessment() assessment.event = event - project_name = f'projects/{project_id}' + project_name = f"projects/{project_id}" # Build the assessment request. request = recaptchaenterprise_v1.CreateAssessmentRequest() @@ -48,28 +49,34 @@ def create_assessment(self, project_id: str, recaptcha_site_key: str, token: str # Check if the token is valid. if not response.token_properties.valid: - logger.error('The CreateAssessment call failed because the token was ' + - 'invalid for for the following reasons: ' + str(response.token_properties.invalid_reason)) + logger.error( + "The CreateAssessment call failed because the token was " + + "invalid for for the following reasons: " + + str(response.token_properties.invalid_reason) + ) raise ValidationException( - f'Invalid token for the following reasons: {str(response.token_properties.invalid_reason)}', code=400) + f"Invalid token for the following reasons: {str(response.token_properties.invalid_reason)}", code=400 + ) # Check if the expected action was executed. if response.token_properties.action != recaptcha_action: - logger.error('The action attribute in your reCAPTCHA tag does' + - 'not match the action you are expecting to score') + logger.error( + "The action attribute in your reCAPTCHA tag does" + "not match the action you are expecting to score" + ) raise ValidationException( - 'The action attribute in your reCAPTCHA tag does not match the action you are expecting to score', - code=400) + "The action attribute in your reCAPTCHA tag does not match the action you are expecting to score", + code=400, + ) else: # Get the risk score and the reason(s) # For more information on interpreting the assessment, # see: https://cloud.google.com/recaptcha-enterprise/docs/interpret-assessment for reason in response.risk_analysis.reasons: logger.info(reason) - logger.info('The reCAPTCHA score for this token is: ' + str(response.risk_analysis.score)) + logger.info("The reCAPTCHA score for this token is: " + str(response.risk_analysis.score)) # Get the assessment name (id). Use this to annotate the assessment. - assessment_name = client.parse_assessment_path(response.name).get('assessment') - logger.info(f'Assessment name: {assessment_name}') + assessment_name = client.parse_assessment_path(response.name).get("assessment") + logger.info(f"Assessment name: {assessment_name}") return response def create_assessment_v2(self, project_id: str, recaptcha_site_key: str, token: str) -> Assessment: @@ -89,7 +96,7 @@ def create_assessment_v2(self, project_id: str, recaptcha_site_key: str, token: assessment = recaptchaenterprise_v1.Assessment() assessment.event = event - project_name = f'projects/{project_id}' + project_name = f"projects/{project_id}" # Build the assessment request. request = recaptchaenterprise_v1.CreateAssessmentRequest() @@ -101,18 +108,23 @@ def create_assessment_v2(self, project_id: str, recaptcha_site_key: str, token: # Check if the token is valid. if not response.token_properties.valid: from breathecode.utils.validation_exception import ValidationException - logger.error('The CreateAssessment call failed because the token was ' + - 'invalid for for the following reasons: ' + str(response.token_properties.invalid_reason)) + + logger.error( + "The CreateAssessment call failed because the token was " + + "invalid for for the following reasons: " + + str(response.token_properties.invalid_reason) + ) raise ValidationException( - f'Invalid token for the following reasons: {str(response.token_properties.invalid_reason)}', code=400) + f"Invalid token for the following reasons: {str(response.token_properties.invalid_reason)}", code=400 + ) # Get the risk score and the reason(s) # For more information on interpreting the assessment, # see: https://cloud.google.com/recaptcha-enterprise/docs/interpret-assessment for reason in response.risk_analysis.reasons: logger.info(reason) - logger.info('The reCAPTCHA score for this token is: ' + str(response.risk_analysis.score)) + logger.info("The reCAPTCHA score for this token is: " + str(response.risk_analysis.score)) # Get the assessment name (id). Use this to annotate the assessment. - assessment_name = client.parse_assessment_path(response.name).get('assessment') - logger.info(f'Assessment name: {assessment_name}') + assessment_name = client.parse_assessment_path(response.name).get("assessment") + logger.info(f"Assessment name: {assessment_name}") return response diff --git a/breathecode/services/google_cloud/storage.py b/breathecode/services/google_cloud/storage.py index 49081499a..2c098ecf8 100644 --- a/breathecode/services/google_cloud/storage.py +++ b/breathecode/services/google_cloud/storage.py @@ -6,11 +6,12 @@ logger = logging.getLogger(__name__) -__all__ = ['Storage'] +__all__ = ["Storage"] class Storage: """Google Cloud Storage""" + client: storage.Client def __init__(self) -> None: diff --git a/breathecode/services/google_meet/google_meet.py b/breathecode/services/google_meet/google_meet.py index 9bab9e5fc..d5dc1835f 100644 --- a/breathecode/services/google_meet/google_meet.py +++ b/breathecode/services/google_meet/google_meet.py @@ -6,7 +6,7 @@ from google.apps.meet_v2.types import Space from google.protobuf.field_mask_pb2 import FieldMask -__all__ = ['GoogleMeet'] +__all__ = ["GoogleMeet"] class CreateSpaceRequest(TypedDict): @@ -191,7 +191,8 @@ async def get_participant(self, **kwargs: Unpack[GetParticipantRequest]) -> meet return await self.aget_participant(**kwargs) async def alist_participant_sessions( - self, **kwargs: Unpack[ListParticipantSessionsRequest]) -> pagers.ListParticipantSessionsAsyncPager: + self, **kwargs: Unpack[ListParticipantSessionsRequest] + ) -> pagers.ListParticipantSessionsAsyncPager: # Create a client client = await self.conference_records_service_client() @@ -201,8 +202,9 @@ async def alist_participant_sessions( # Make the request return await client.list_participant_sessions(request=request) - async def aget_participant_session(self, - **kwargs: Unpack[GetParticipantSessionRequest]) -> meet_v2.ParticipantSession: + async def aget_participant_session( + self, **kwargs: Unpack[GetParticipantSessionRequest] + ) -> meet_v2.ParticipantSession: # Create a client client = await self.conference_records_service_client() @@ -213,8 +215,9 @@ async def aget_participant_session(self, return await client.get_participant_session(request=request) @async_to_sync - async def get_participant_session(self, - **kwargs: Unpack[GetParticipantSessionRequest]) -> meet_v2.ParticipantSession: + async def get_participant_session( + self, **kwargs: Unpack[GetParticipantSessionRequest] + ) -> meet_v2.ParticipantSession: return await self.aget_participant_session(**kwargs) async def alist_recordings(self, **kwargs: Unpack[ListRecordingsRequest]) -> pagers.ListRecordingsAsyncPager: @@ -272,7 +275,8 @@ async def get_transcript(self, **kwargs: Unpack[GetTranscriptRequest]) -> meet_v return await self.aget_transcript(**kwargs) async def alist_conference_records( - self, **kwargs: Unpack[ListConferenceRecordsRequest]) -> pagers.ListConferenceRecordsAsyncPager: + self, **kwargs: Unpack[ListConferenceRecordsRequest] + ) -> pagers.ListConferenceRecordsAsyncPager: # Create a client client = await self.conference_records_service_client() diff --git a/breathecode/services/launch_darkly/client.py b/breathecode/services/launch_darkly/client.py index a229e8111..5ff9bc0a6 100644 --- a/breathecode/services/launch_darkly/client.py +++ b/breathecode/services/launch_darkly/client.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -__all__ = ['LaunchDarkly'] +__all__ = ["LaunchDarkly"] clients: dict[str, LDClient] = {} @@ -17,7 +17,7 @@ class LaunchDarkly: client: LDClient def __init__(self, api_key=None): - api_key = api_key or os.getenv('LAUNCH_DARKLY_API_KEY') + api_key = api_key or os.getenv("LAUNCH_DARKLY_API_KEY") if api_key not in clients: config = Config(api_key) @@ -33,9 +33,10 @@ def get_evaluation_reason(self, key, context, default=None) -> Any: return self.client.variation_detail(key, context, default) def _validate_key(self, key): - if not re.findall(r'^[a-zA-Z0-9_\-\.]+$', key): - raise ValueError('The chosen key is invalid, it just must incluse letters, numbers, ' - 'underscore, dash and dot') + if not re.findall(r"^[a-zA-Z0-9_\-\.]+$", key): + raise ValueError( + "The chosen key is invalid, it just must incluse letters, numbers, " "underscore, dash and dot" + ) def context(self, key: str, name: str, kind: str, value: dict) -> Context: self._validate_key(key) diff --git a/breathecode/services/learnpack/actions/__init__.py b/breathecode/services/learnpack/actions/__init__.py index f615ad425..d6034feb1 100644 --- a/breathecode/services/learnpack/actions/__init__.py +++ b/breathecode/services/learnpack/actions/__init__.py @@ -2,4 +2,4 @@ from .open_step import open_step from .test import test -__all__ = ['batch', 'open_step', 'test'] +__all__ = ["batch", "open_step", "test"] diff --git a/breathecode/services/learnpack/actions/batch.py b/breathecode/services/learnpack/actions/batch.py index 6d679906c..dd9e31936 100644 --- a/breathecode/services/learnpack/actions/batch.py +++ b/breathecode/services/learnpack/actions/batch.py @@ -8,13 +8,13 @@ def batch(self, webhook: LearnPackWebhook): # lazyload to fix circular import from breathecode.assignments.models import Task - _slug = webhook.payload['slug'] + _slug = webhook.payload["slug"] - telemetry = AssignmentTelemetry.objects.filter(asset_slug=_slug, user__id=webhook.payload['user_id']).first() + telemetry = AssignmentTelemetry.objects.filter(asset_slug=_slug, user__id=webhook.payload["user_id"]).first() assets = Task.objects.filter(associated_slug=_slug, user__id=webhook.student.id) if assets.count() == 0: - raise Exception(f'Student with id {webhook.student.id} has not tasks with associated slug {_slug}') + raise Exception(f"Student with id {webhook.student.id} has not tasks with associated slug {_slug}") if telemetry is None: telemetry = AssignmentTelemetry(user=webhook.student, asset_slug=_slug, telemetry=webhook.payload) diff --git a/breathecode/services/learnpack/actions/test.py b/breathecode/services/learnpack/actions/test.py index 2485f3a01..6f4871027 100644 --- a/breathecode/services/learnpack/actions/test.py +++ b/breathecode/services/learnpack/actions/test.py @@ -6,4 +6,4 @@ def test(self, webhook: LearnPackWebhook): - logger.info('performing test request') + logger.info("performing test request") diff --git a/breathecode/services/learnpack/client.py b/breathecode/services/learnpack/client.py index f39e0a8d5..28098ebef 100644 --- a/breathecode/services/learnpack/client.py +++ b/breathecode/services/learnpack/client.py @@ -33,53 +33,53 @@ def execute_action(self, webhook_id: int): webhook = LearnPackWebhook.objects.filter(id=webhook_id).first() if not webhook: - raise Exception('Invalid webhook') + raise Exception("Invalid webhook") try: if not webhook.event: - raise Exception('Impossible to determine learnpack event') + raise Exception("Impossible to determine learnpack event") if not webhook.payload: - raise Exception('Impossible to retrive webhook payload') + raise Exception("Impossible to retrive webhook payload") - if 'slug' not in webhook.payload: - raise Exception('Impossible to retrive learnpack exercise slug') + if "slug" not in webhook.payload: + raise Exception("Impossible to retrive learnpack exercise slug") - if 'user_id' not in webhook.payload: - raise Exception('Impossible to retrive learnpack user id') + if "user_id" not in webhook.payload: + raise Exception("Impossible to retrive learnpack user id") else: - user_id = webhook.payload['user_id'] + user_id = webhook.payload["user_id"] user = User.objects.filter(id=user_id).first() if user is None: - raise Exception(f'Learnpack student with user id {user_id} not found') + raise Exception(f"Learnpack student with user id {user_id} not found") else: webhook.student = user - logger.debug(f'Executing => {webhook.event}') + logger.debug(f"Executing => {webhook.event}") if not hasattr(actions, webhook.event): - raise Exception(f'Learnpack telemetry event `{webhook.event}` is not implemented') + raise Exception(f"Learnpack telemetry event `{webhook.event}` is not implemented") - logger.debug('Action found') + logger.debug("Action found") fn = getattr(actions, webhook.event) try: fn(self, webhook) - logger.debug('Mark action as done') - webhook.status = 'DONE' - webhook.status_text = 'OK' + logger.debug("Mark action as done") + webhook.status = "DONE" + webhook.status_text = "OK" webhook.save() except Exception as e: - logger.error('Mark action with error') + logger.error("Mark action with error") - webhook.status = 'ERROR' - webhook.status_text = str(e)+'\n'.join(traceback.format_exception(None, e, e.__traceback__)) + webhook.status = "ERROR" + webhook.status_text = str(e) + "\n".join(traceback.format_exception(None, e, e.__traceback__)) webhook.save() except Exception as e: - webhook.status = 'ERROR' - webhook.status_text = str(e)+'\n'.join(traceback.format_exception(None, e, e.__traceback__)) + webhook.status = "ERROR" + webhook.status_text = str(e) + "\n".join(traceback.format_exception(None, e, e.__traceback__)) webhook.save() raise e @@ -93,16 +93,16 @@ def add_webhook_to_log(payload: dict): return None webhook = LearnPackWebhook() - is_streaming = 'event' in payload + is_streaming = "event" in payload if is_streaming: - webhook.event = payload['event'] + webhook.event = payload["event"] else: - webhook.event = 'batch' + webhook.event = "batch" webhook.is_streaming = is_streaming webhook.payload = payload - webhook.status = 'PENDING' + webhook.status = "PENDING" webhook.save() return webhook diff --git a/breathecode/services/seo/actions/_0_general_structure.py b/breathecode/services/seo/actions/_0_general_structure.py index aa5377a22..4a21be4ea 100644 --- a/breathecode/services/seo/actions/_0_general_structure.py +++ b/breathecode/services/seo/actions/_0_general_structure.py @@ -10,18 +10,18 @@ def general_structure(client, report): asset = client.asset readme = asset.get_readme(parse=True) - if 'html' not in readme: - report.fatal(f'Asset with {asset.slug} readme cannot be parse into an HTML') + if "html" not in readme: + report.fatal(f"Asset with {asset.slug} readme cannot be parse into an HTML") return False - h1s = BeautifulSoup(readme['html'], features='html.parser').find_all('h1') + h1s = BeautifulSoup(readme["html"], features="html.parser").find_all("h1") total_h1s = len(h1s) if total_h1s > 0: - report.bad(-20, f'We found {total_h1s} please remove all of them') + report.bad(-20, f"We found {total_h1s} please remove all of them") - h2s = BeautifulSoup(readme['html'], features='html.parser').find_all('h2') + h2s = BeautifulSoup(readme["html"], features="html.parser").find_all("h2") if len(h2s) == 0: - report.bad(-20, 'Include at least one h2 heading in the article') + report.bad(-20, "Include at least one h2 heading in the article") general_structure.description = """ diff --git a/breathecode/services/seo/actions/_1_keyword_density.py b/breathecode/services/seo/actions/_1_keyword_density.py index 599a8ddcd..7002e4670 100644 --- a/breathecode/services/seo/actions/_1_keyword_density.py +++ b/breathecode/services/seo/actions/_1_keyword_density.py @@ -8,19 +8,19 @@ def keyword_density(client, report): def remove_three_characters_words(str): - words = str.split(' ') + words = str.split(" ") words = list(filter(lambda word: len(word) > 3, words)) - return ' '.join(words) + return " ".join(words) asset = client.asset readme = asset.get_readme(parse=True) - if 'html' not in readme: - report.fatal(f'Asset with {asset.slug} readme cannot be parse into an HTML') + if "html" not in readme: + report.fatal(f"Asset with {asset.slug} readme cannot be parse into an HTML") return False all_h2s = [] - h2s = BeautifulSoup(readme['html'], features='html.parser').find_all('h2') + h2s = BeautifulSoup(readme["html"], features="html.parser").find_all("h2") for h in h2s: all_h2s.append(h.contents[0]) @@ -34,7 +34,7 @@ def remove_three_characters_words(str): if len(h2s_with_keywords) > 2: report.bad( -20, - f'Too many h2 tags contain the target keyword "{keyword.title}", please consider a max of 2 h2 tags with the keyword' + f'Too many h2 tags contain the target keyword "{keyword.title}", please consider a max of 2 h2 tags with the keyword', ) elif len(h2s_with_keywords) == 0: report.bad(-20, f'Please add the target keyword "{keyword.title}" to at least one tag') diff --git a/breathecode/services/seo/actions/_2_internal_linking.py b/breathecode/services/seo/actions/_2_internal_linking.py index c24728951..ff39ea5b5 100644 --- a/breathecode/services/seo/actions/_2_internal_linking.py +++ b/breathecode/services/seo/actions/_2_internal_linking.py @@ -12,34 +12,36 @@ def internal_linking(client, report): asset = client.asset missing_cluster_paths = [] - main_domain = '' + main_domain = "" for keyword in asset.seo_keywords.all(): if keyword.cluster is not None: - if keyword.cluster.landing_page_url is None or keyword.cluster.landing_page_url == '': - report.fatal(f'Cluster {keyword.cluster.slug} its missing a landing page url') + if keyword.cluster.landing_page_url is None or keyword.cluster.landing_page_url == "": + report.fatal(f"Cluster {keyword.cluster.slug} its missing a landing page url") continue url = urlparse(keyword.cluster.landing_page_url) - if url.netloc != '': main_domain = url.netloc - if url.path == '': url = keyword.cluster.landing_page_url + if url.netloc != "": + main_domain = url.netloc + if url.path == "": + url = keyword.cluster.landing_page_url missing_cluster_paths.append(url.path) if len(missing_cluster_paths) == 0: - report.fatal('No valid clusters landing urls') + report.fatal("No valid clusters landing urls") readme = asset.get_readme(parse=True) - if 'html' not in readme: - logger.fatal(f'Asset with {asset.slug} readme cannot be parse into an HTML') + if "html" not in readme: + logger.fatal(f"Asset with {asset.slug} readme cannot be parse into an HTML") return False - links = BeautifulSoup(readme['html'], features='html.parser').find_all('a') + links = BeautifulSoup(readme["html"], features="html.parser").find_all("a") internal_links = [] for link in links: - if 'href' not in link.attrs: + if "href" not in link.attrs: report.bad(-1, f'No href found for anchor with label "{link.contents[0]}"') - href = link.attrs['href'] + href = link.attrs["href"] url = urlparse(href) # clusters must be linked @@ -48,23 +50,24 @@ def internal_linking(client, report): # clusters must be linked path = url.path - if path == '': path = href + if path == "": + path = href missing_cluster_paths = [i for i in missing_cluster_paths if i.lower() != path.lower()] for path in missing_cluster_paths: - report.fatal(f'Missing link to cluster: {path}') + report.fatal(f"Missing link to cluster: {path}") total_internal = len(internal_links) if total_internal < 4: missing = 4 - total_internal - report.bad(-(missing * 5), f'Please add at least {missing} more internal links') + report.bad(-(missing * 5), f"Please add at least {missing} more internal links") missing_links = False - text = readme['decoded'] - url_regex = re.compile(r'[^!]\[.*\]\(.*\)') + text = readme["decoded"] + url_regex = re.compile(r"[^!]\[.*\]\(.*\)") words = text.split() - words = [' '.join(words[i:i + 500]) for i in range(0, len(words), 500)] + words = [" ".join(words[i : i + 500]) for i in range(0, len(words), 500)] for word in words: urls = re.findall(url_regex, word) @@ -73,9 +76,9 @@ def internal_linking(client, report): break if missing_links: - report.bad(-15, 'Please add at least a link every 500 words') + report.bad(-15, "Please add at least a link every 500 words") - #report.good('No errors found on keyword density') + # report.good('No errors found on keyword density') internal_linking.description = """ diff --git a/breathecode/services/seo/actions/_3_images_use.py b/breathecode/services/seo/actions/_3_images_use.py index 52705ae7b..d9a120a62 100644 --- a/breathecode/services/seo/actions/_3_images_use.py +++ b/breathecode/services/seo/actions/_3_images_use.py @@ -10,20 +10,20 @@ def images_use(client, report): asset = client.asset readme = asset.get_readme(parse=True) - if 'html' not in readme: - logger.fatal(f'Asset with {asset.slug} readme cannot be parse into an HTML') + if "html" not in readme: + logger.fatal(f"Asset with {asset.slug} readme cannot be parse into an HTML") return False - images = BeautifulSoup(readme['html'], features='html.parser').find_all('img') + images = BeautifulSoup(readme["html"], features="html.parser").find_all("img") for image in images: - if 'alt' not in image.attrs or image.attrs['alt'] == '': + if "alt" not in image.attrs or image.attrs["alt"] == "": report.bad(-10, f'No alt found for image with source "{image.attrs["src"]}"') if len(images) == 0: - report.bad(-5, 'Article must have at least one image, diagram or graphic') + report.bad(-5, "Article must have at least one image, diagram or graphic") - #report.good('No errors found on keyword density') + # report.good('No errors found on keyword density') images_use.description = """ diff --git a/breathecode/services/seo/client.py b/breathecode/services/seo/client.py index a1520bf97..af414f267 100644 --- a/breathecode/services/seo/client.py +++ b/breathecode/services/seo/client.py @@ -10,7 +10,7 @@ class SEOAnalyzer: asset = None excluded = [] shared_state = {} - influence = {'general_structure': 0.2, 'keyword_density': 0.375, 'internal_linking': 0.375, 'images_use': 0.05} + influence = {"general_structure": 0.2, "keyword_density": 0.375, "internal_linking": 0.375, "images_use": 0.05} def __init__(self, asset, exclude=None): @@ -18,23 +18,23 @@ def __init__(self, asset, exclude=None): exclude = [] if asset is None: - raise Exception('Invalid Asset') + raise Exception("Invalid Asset") self.asset = asset - self.excluded = [*exclude, '__init__'] + self.excluded = [*exclude, "__init__"] total_influence = 0 for slug in self.influence: total_influence += self.influence[slug] if total_influence != 1: - raise Exception(f'Total influence from all SEO reports should sum 1 but its {str(total_influence)}') + raise Exception(f"Total influence from all SEO reports should sum 1 but its {str(total_influence)}") def _get_actions(self): actions = [] dir_path = os.path.dirname(os.path.realpath(__file__)) - files = os.listdir(dir_path + '/actions') + files = os.listdir(dir_path + "/actions") for file_name in files: - if '.py' not in file_name: + if ".py" not in file_name: continue actions.append(file_name[0:-3]) return sorted(actions, key=str.lower) @@ -57,7 +57,7 @@ def start(self): report = self.execute_report(act) if report.report_type not in self.influence: - logger.error(f'Influence for report {report.report_type} its not specified') + logger.error(f"Influence for report {report.report_type} its not specified") self.influence[report.report_type] = 0 rating += report.get_rating() * self.influence[report.report_type] @@ -65,15 +65,15 @@ def start(self): self.asset.last_seo_scan_at = timezone.now() self.asset.optimization_rating = rating - self.asset.seo_json_status = {'rating': rating, 'log': log} + self.asset.seo_json_status = {"rating": rating, "log": log} self.asset.save() return self.asset.seo_json_status def execute_report(self, script_slug): - action_name = re.sub(r'_[0-9]+_', '', script_slug) + action_name = re.sub(r"_[0-9]+_", "", script_slug) - logger.debug(f'Executing SEP Report => {script_slug}') + logger.debug(f"Executing SEP Report => {script_slug}") report = SEOReport( report_type=action_name, asset=self.asset, @@ -88,10 +88,10 @@ def execute_report(self, script_slug): try: if self.asset.seo_keywords is None or self.asset.seo_keywords.count() == 0: - raise Exception('Asset has not keywords associated') + raise Exception("Asset has not keywords associated") if self.asset.readme is None: - raise Exception('Asset has not content') + raise Exception("Asset has not content") fn(self, report) report.rating = report.get_rating() @@ -102,23 +102,23 @@ def execute_report(self, script_slug): pass report.log = report.get_log() - report.status = 'OK' + report.status = "OK" report.save() self.shared_state = report.__shared_state except Exception as e: - logger.exception('Report error') + logger.exception("Report error") report.rating = None report.log = str(e) - report.status = 'ERROR' + report.status = "ERROR" report.save() else: - message = f'SEO Report `{action_name}` is not implemented' + message = f"SEO Report `{action_name}` is not implemented" logger.debug(message) report.rating = None - report.status = 'ERROR' + report.status = "ERROR" report.log = message report.save() diff --git a/breathecode/services/slack/actions/monitoring.py b/breathecode/services/slack/actions/monitoring.py index 2d2889ade..13d28db3c 100644 --- a/breathecode/services/slack/actions/monitoring.py +++ b/breathecode/services/slack/actions/monitoring.py @@ -9,101 +9,77 @@ def __init__(self, context): self.context = context # disable endpoint testing until specific time - @action(only='staff') + @action(only="staff") def snooze_test_endpoint(self, **kwargs): - selected_date = kwargs['actions'][0]['selected_date'] - endpoint_id = kwargs['state']['endpoint_id'] + selected_date = kwargs["actions"][0]["selected_date"] + endpoint_id = kwargs["state"]["endpoint_id"] print(endpoint_id) e = Endpoint.objects.get(id=endpoint_id) e.paused_until = selected_date e.save() - return {'text': '✅ The endpoint test has been snoozed until ' + selected_date, 'response_type': 'ephemeral'} + return {"text": "✅ The endpoint test has been snoozed until " + selected_date, "response_type": "ephemeral"} # disable script until specific time - @action(only='staff') + @action(only="staff") def snooze_script(self, **kwargs): - selected_date = kwargs['actions'][0]['selected_date'] - script_id = kwargs['state']['script_id'] + selected_date = kwargs["actions"][0]["selected_date"] + script_id = kwargs["state"]["script_id"] print(script_id) e = MonitorScript.objects.get(id=script_id) e.paused_until = selected_date e.save() - return {'text': '✅ The script has been snoozed until ' + selected_date, 'response_type': 'ephemeral'} + return {"text": "✅ The script has been snoozed until " + selected_date, "response_type": "ephemeral"} def render_snooze_text_endpoint(endpoints): snooze_dates = [] for e in endpoints: - snooze_dates.append({ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f'*App:* {e.application.title} \n *URL:* {e.url} \n *Status:* {e.status} \n *Details:* {e.status_text}', - }, - 'accessory': { - 'type': 'datepicker', - 'placeholder': { - 'type': 'plain_text', - 'text': 'Select a date to snooze', - 'emoji': True + snooze_dates.append( + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": f"*App:* {e.application.title} \n *URL:* {e.url} \n *Status:* {e.status} \n *Details:* {e.status_text}", + }, + "accessory": { + "type": "datepicker", + "placeholder": {"type": "plain_text", "text": "Select a date to snooze", "emoji": True}, + "action_id": json.dumps( + {"class": "monitoring", "method": "snooze_test_endpoint", "endpoint_id": e.id} + ), }, - 'action_id': json.dumps({ - 'class': 'monitoring', - 'method': 'snooze_test_endpoint', - 'endpoint_id': e.id - }) } - }) + ) - return [{ - 'type': 'header', - 'text': { - 'type': 'plain_text', - 'text': '🛑 Endpoint monitor error!', - 'emoji': True - } - }] + snooze_dates + return [ + {"type": "header", "text": {"type": "plain_text", "text": "🛑 Endpoint monitor error!", "emoji": True}} + ] + snooze_dates def render_snooze_script(scripts): snooze_dates = [] for e in scripts: - snooze_dates.append({ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f'*App:* {e.application.title} \n *Slug:* {e.script_slug} \n *Status:* {e.status} \n *Details:* \n ```{e.response_text}```', - }, - 'accessory': { - 'type': 'datepicker', - 'placeholder': { - 'type': 'plain_text', - 'text': 'Select a date to snooze', - 'emoji': True + snooze_dates.append( + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": f"*App:* {e.application.title} \n *Slug:* {e.script_slug} \n *Status:* {e.status} \n *Details:* \n ```{e.response_text}```", + }, + "accessory": { + "type": "datepicker", + "placeholder": {"type": "plain_text", "text": "Select a date to snooze", "emoji": True}, + "action_id": json.dumps({"class": "monitoring", "method": "snooze_script", "script_id": e.id}), }, - 'action_id': json.dumps({ - 'class': 'monitoring', - 'method': 'snooze_script', - 'script_id': e.id - }) } - }) - - return [{ - 'type': 'header', - 'text': { - 'type': 'plain_text', - 'text': '🛑 Script monitor error!', - 'emoji': True - } - }] + snooze_dates + ) + + return [ + {"type": "header", "text": {"type": "plain_text", "text": "🛑 Script monitor error!", "emoji": True}} + ] + snooze_dates diff --git a/breathecode/services/slack/client.py b/breathecode/services/slack/client.py index e7b7b028f..6cbc2974e 100644 --- a/breathecode/services/slack/client.py +++ b/breathecode/services/slack/client.py @@ -7,7 +7,7 @@ class Slack: - HOST = 'https://slack.com/api/' + HOST = "https://slack.com/api/" headers = {} def __init__(self, token=None, command=None): @@ -18,73 +18,71 @@ def get(self, action_name, request_data=None): if request_data is None: request_data = {} - return self._call('GET', action_name, params=request_data) + return self._call("GET", action_name, params=request_data) def post(self, action_name, request_data=None): if request_data is None: request_data = {} - return self._call('POST', action_name, json=request_data) + return self._call("POST", action_name, json=request_data) def _call(self, method_name, action_name, params=None, json=None): if self.token is None: - raise Exception('Missing slack token') + raise Exception("Missing slack token") - if method_name != 'GET': + if method_name != "GET": self.headers = { - 'Authorization': 'Bearer ' + self.token, - 'Content-type': 'application/json', + "Authorization": "Bearer " + self.token, + "Content-type": "application/json", } else: params = { - 'token': self.token, + "token": self.token, **params, } - resp = requests.request(method=method_name, - url=self.HOST + action_name, - headers=self.headers, - params=params, - json=json, - timeout=10) + resp = requests.request( + method=method_name, url=self.HOST + action_name, headers=self.headers, params=params, json=json, timeout=10 + ) if resp.status_code == 200: data = resp.json() - if data['ok'] == False: - raise Exception('Slack API Error ' + data['error']) + if data["ok"] == False: + raise Exception("Slack API Error " + data["error"]) else: - logger.debug(f'Successfull call {method_name}: /{action_name}') + logger.debug(f"Successfull call {method_name}: /{action_name}") return data else: - raise Exception(f'Unable to communicate with Slack API, error: {resp.status_code}') + raise Exception(f"Unable to communicate with Slack API, error: {resp.status_code}") def execute_command(self, context): - patterns = {'users': r'\<@([^|]+)\|([^>]+)>', 'command': r'^(\w+)\s?'} - content = context['text'] + patterns = {"users": r"\<@([^|]+)\|([^>]+)>", "command": r"^(\w+)\s?"} + content = context["text"] payload = {} - _commands = re.findall(patterns['command'], content) + _commands = re.findall(patterns["command"], content) if len(_commands) != 1: - raise SlackException('Impossible to determine command', slug='command-does-not-found') + raise SlackException("Impossible to determine command", slug="command-does-not-found") - matches = re.findall(patterns['users'], content) - payload['users'] = [u[0] for u in matches] + matches = re.findall(patterns["users"], content) + payload["users"] = [u[0] for u in matches] - payload['context'] = context + payload["context"] = context if hasattr(commands, _commands[0]): response = self._execute_command(commands, _commands[0], payload) - if 'response_url' in context and response: - resp = requests.post(context['response_url'], json=response, timeout=3) + if "response_url" in context and response: + resp = requests.post(context["response_url"], json=response, timeout=3) return resp.status_code == 200 else: return True else: - raise SlackException(f'No implementation has been found for `{_commands[0]}` command', - slug='command-does-not-exist') + raise SlackException( + f"No implementation has been found for `{_commands[0]}` command", slug="command-does-not-exist" + ) def _execute_command(self, module, command, response): @@ -92,38 +90,38 @@ def _execute_command(self, module, command, response): def execute_action(self, context): - payload = json.loads(context['payload']) + payload = json.loads(context["payload"]) - if 'actions' not in payload or len(payload['actions']) == 0: - raise Exception('Impossible to determine action') + if "actions" not in payload or len(payload["actions"]) == 0: + raise Exception("Impossible to determine action") try: logger.debug(f"Slack action: {str(payload['actions'])}") - _data = json.loads(payload['actions'][0]['action_id']) - action_class = _data.pop('class', None) - method = _data.pop('method', None) - payload['action_state'] = _data + _data = json.loads(payload["actions"][0]["action_id"]) + action_class = _data.pop("class", None) + method = _data.pop("method", None) + payload["action_state"] = _data except Exception: - raise Exception('Invalid slack action format, must be json with class and method properties at least') + raise Exception("Invalid slack action format, must be json with class and method properties at least") - logger.debug(f'Executing {action_class} => {method}') + logger.debug(f"Executing {action_class} => {method}") if hasattr(actions, action_class): - logger.debug('Action found') - _module = getattr(actions, action_class) #get action module + logger.debug("Action found") + _module = getattr(actions, action_class) # get action module if not hasattr(_module, action_class.capitalize()): - raise Exception(f'Class {action_class.capitalize()} not found in module {action_class}') - _class = getattr(_module, action_class.capitalize())(payload) #factory the class + raise Exception(f"Class {action_class.capitalize()} not found in module {action_class}") + _class = getattr(_module, action_class.capitalize())(payload) # factory the class if not hasattr(_class, method): - raise Exception(f'Method {method} not found in slack action class {action_class.capitalize()}') + raise Exception(f"Method {method} not found in slack action class {action_class.capitalize()}") response = getattr(_class, method)(payload=payload) # call action method - if 'response_url' in payload and response: - resp = requests.post(payload['response_url'], json=response, timeout=3) + if "response_url" in payload and response: + resp = requests.post(payload["response_url"], json=response, timeout=3) return resp.status_code == 200 else: return True else: - raise Exception(f'No implementation has been found for this action: {action_class}') + raise Exception(f"No implementation has been found for this action: {action_class}") diff --git a/breathecode/services/slack/commands/chat.py b/breathecode/services/slack/commands/chat.py index 8de51209d..53b108014 100644 --- a/breathecode/services/slack/commands/chat.py +++ b/breathecode/services/slack/commands/chat.py @@ -3,6 +3,7 @@ - bot_slug: Name of the bot to chat with """ + import openai from breathecode.mentorship.models import ChatBot @@ -10,7 +11,7 @@ from ..exceptions import SlackException -@command(capable_of='chatbot_message') +@command(capable_of="chatbot_message") def execute(bot_name=None, academies=None, **context): if academies is None: @@ -23,22 +24,22 @@ def execute(bot_name=None, academies=None, **context): bot = query.first() if bot is None: - raise SlackException('No chatbot was found to respond this message.', slug='chatbot-not-found') + raise SlackException("No chatbot was found to respond this message.", slug="chatbot-not-found") - text = context['text'] + text = context["text"] openai.organization = bot.api_organization openai.api_key = bot.api_key - result = openai.Completion.create(model='text-davinci-003', prompt=text, max_tokens=2000, temperature=0) + result = openai.Completion.create(model="text-davinci-003", prompt=text, max_tokens=2000, temperature=0) - response = {'blocks': []} - response['blocks'].append(render_message(result)) + response = {"blocks": []} + response["blocks"].append(render_message(result)) return response def render_message(result): - message = result['choices'].pop() + message = result["choices"].pop() - return {'type': 'section', 'text': {'type': 'mrkdwn', 'text': f"""{message["text"]}"""}} + return {"type": "section", "text": {"type": "mrkdwn", "text": f"""{message["text"]}"""}} diff --git a/breathecode/services/slack/commands/cohort.py b/breathecode/services/slack/commands/cohort.py index e335291fc..28528f499 100644 --- a/breathecode/services/slack/commands/cohort.py +++ b/breathecode/services/slack/commands/cohort.py @@ -7,16 +7,17 @@ - text: Content of the slack channel """ + from breathecode.admissions.models import Cohort, CohortUser from ..decorator import command from ..exceptions import SlackException -@command(capable_of='read_cohort') +@command(capable_of="read_cohort") def execute(channel_id, academies, **context): - response = {'blocks': []} - response['blocks'].append(render_cohort(channel_id=channel_id, academies=academies)) + response = {"blocks": []} + response["blocks"].append(render_cohort(channel_id=channel_id, academies=academies)) return response @@ -26,25 +27,24 @@ def render_cohort(channel_id, academies): cohort = Cohort.objects.filter(slackchannel__slack_id=channel_id, academy__id__in=[academies]).first() if cohort is None: raise SlackException( - 'Cohort was not found as slack channel, make sure the channel name matches the cohort slug', - slug='cohort-not-found') + "Cohort was not found as slack channel, make sure the channel name matches the cohort slug", + slug="cohort-not-found", + ) - teachers = CohortUser.objects.filter(cohort=cohort, - role__in=['TEACHER', 'ASSISTANT'], - cohort__academy__id__in=[academies]) + teachers = CohortUser.objects.filter( + cohort=cohort, role__in=["TEACHER", "ASSISTANT"], cohort__academy__id__in=[academies] + ) return { - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f""" + "type": "section", + "text": { + "type": "mrkdwn", + "text": f""" *Cohort name:* {cohort.name} *Start Date:* {cohort.kickoff_date} *End Date:* {cohort.ending_date} *Current day:* {cohort.current_day} *Stage:* {cohort.stage} *Teachers:* {', '.join([cu.user.first_name + ' ' + cu.user.last_name for cu in teachers])} -""" - } +""", + }, } diff --git a/breathecode/services/slack/commands/student.py b/breathecode/services/slack/commands/student.py index 52fed012d..deabd03e3 100644 --- a/breathecode/services/slack/commands/student.py +++ b/breathecode/services/slack/commands/student.py @@ -8,6 +8,7 @@ - text: Content of the slack channel """ + import os import random @@ -16,28 +17,29 @@ from ..exceptions import SlackException -@command(capable_of='read_student') +@command(capable_of="read_student") def execute(users, academies, **context): from breathecode.admissions.models import CohortUser if len(users) == 0: - raise SlackException('No usernames found on the command', slug='users-not-provided') + raise SlackException("No usernames found on the command", slug="users-not-provided") - cohort_users = CohortUser.objects.filter(user__slackuser__slack_id=users[0], - role='STUDENT', - cohort__academy__id__in=[academies]) + cohort_users = CohortUser.objects.filter( + user__slackuser__slack_id=users[0], role="STUDENT", cohort__academy__id__in=[academies] + ) user = cohort_users.first() if user is None: raise SlackException( f'Student {users[0]} not found on any cohort for your available academies, if you feel you should have access " \ "to this information maybe you need to be added to the relevant academy for this student', - slug='cohort-user-not-found') + slug="cohort-user-not-found", + ) user = user.user - response = {'blocks': []} - response['blocks'].append(render_student(user, cohort_users)) + response = {"blocks": []} + response["blocks"].append(render_student(user, cohort_users)) return response @@ -46,9 +48,9 @@ def render_student(user, cohort_users): from breathecode.authenticate.models import Profile avatar_number = random.randint(1, 21) - avatar_url = os.getenv('API_URL', '') + f'/static/img/avatar-{avatar_number}.png' - github_username = 'not set' - phone = 'not set' + avatar_url = os.getenv("API_URL", "") + f"/static/img/avatar-{avatar_number}.png" + github_username = "not set" + phone = "not set" try: if user.profile.github_username: github_username = user.profile.github_username @@ -67,12 +69,10 @@ def render_student(user, cohort_users): pass return { - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f""" + "type": "section", + "text": { + "type": "mrkdwn", + "text": f""" *Student Name:* {user.first_name} {user.last_name} *Github*: {github_username} *Phone*: {phone} @@ -81,11 +81,11 @@ def render_student(user, cohort_users): ``` {jump().join([('- '+cu.cohort.name + ': 🎓' + to_string(cu.educational_status) + ' and 💰' + to_string(cu.finantial_status)) for cu in cohort_users])} ``` -""" +""", + }, + "accessory": { + "type": "image", + "image_url": to_string(avatar_url), + "alt_text": f"{user.first_name} {user.last_name}", }, - 'accessory': { - 'type': 'image', - 'image_url': to_string(avatar_url), - 'alt_text': f'{user.first_name} {user.last_name}' - } } diff --git a/breathecode/services/slack/decorator.py b/breathecode/services/slack/decorator.py index 99247cba2..b32bf8bc2 100644 --- a/breathecode/services/slack/decorator.py +++ b/breathecode/services/slack/decorator.py @@ -12,28 +12,30 @@ def decorator(function): def wrapper(*args, **kwargs): - if 'context' not in kwargs or kwargs['context'] is None: - raise SlackException('Missing scope information on slack command', slug='context-missing') - context = kwargs['context'] + if "context" not in kwargs or kwargs["context"] is None: + raise SlackException("Missing scope information on slack command", slug="context-missing") + context = kwargs["context"] profiles = None if capable_of is not None: - profiles = ProfileAcademy.objects.filter(user__slackuser__slack_id=context['user_id'], - academy__slackteam__slack_id=context['team_id'], - role__capabilities__slug=capable_of).values_list('academy__id', - flat=True) + profiles = ProfileAcademy.objects.filter( + user__slackuser__slack_id=context["user_id"], + academy__slackteam__slack_id=context["team_id"], + role__capabilities__slug=capable_of, + ).values_list("academy__id", flat=True) if len(profiles) == 0: raise SlackException( f"Your user {context['user_id']} don't have permissions to use this command, are you a staff or student on this academy?", - slug='unauthorized-user') + slug="unauthorized-user", + ) - kwargs['academies'] = profiles - kwargs['user_id'] = context['user_id'] - kwargs['team_id'] = context['team_id'] - kwargs['channel_id'] = context['channel_id'] - kwargs['text'] = context['text'] + kwargs["academies"] = profiles + kwargs["user_id"] = context["user_id"] + kwargs["team_id"] = context["team_id"] + kwargs["channel_id"] = context["channel_id"] + kwargs["text"] = context["text"] result = function(*args, **kwargs) return result @@ -49,27 +51,27 @@ def action(only=None): def decorator(function): def wrapper(*args, **kwargs): - if 'payload' not in kwargs or kwargs['payload'] is None: - raise Exception('Missing payload information on slack action') - context = kwargs['payload'] + if "payload" not in kwargs or kwargs["payload"] is None: + raise Exception("Missing payload information on slack action") + context = kwargs["payload"] profiles = None - if only == 'staff': + if only == "staff": profiles = ProfileAcademy.objects.filter( - user__slackuser__slack_id=context['user']['id'], - academy__slackteam__slack_id=context['team']['id']).values_list('academy__id', flat=True) + user__slackuser__slack_id=context["user"]["id"], academy__slackteam__slack_id=context["team"]["id"] + ).values_list("academy__id", flat=True) if len(profiles) == 0: raise Exception(f"Your user {context['user']['id']} don't have permissions execute this action") - kwargs['academies'] = profiles - kwargs['user_id'] = context['user']['id'] - kwargs['type'] = context['type'] - kwargs['state'] = context['action_state'] - kwargs['team_id'] = context['team']['id'] - kwargs['channel_id'] = context['channel']['id'] - kwargs['actions'] = context['actions'] - kwargs.pop('payload', None) + kwargs["academies"] = profiles + kwargs["user_id"] = context["user"]["id"] + kwargs["type"] = context["type"] + kwargs["state"] = context["action_state"] + kwargs["team_id"] = context["team"]["id"] + kwargs["channel_id"] = context["channel"]["id"] + kwargs["actions"] = context["actions"] + kwargs.pop("payload", None) result = function(*args, **kwargs) return result diff --git a/breathecode/services/slack/examples/get_channel_users.py b/breathecode/services/slack/examples/get_channel_users.py index 745ee5932..5d14a8673 100644 --- a/breathecode/services/slack/examples/get_channel_users.py +++ b/breathecode/services/slack/examples/get_channel_users.py @@ -1,20 +1,23 @@ from breathecode.services.slack import client -token = '12345' +token = "12345" api = client.Slack(token) -data = api.get('users.list', {'limit': 300}) +data = api.get("users.list", {"limit": 300}) -members = data['members'] -while 'response_metadata' in data and 'next_cursor' in data['response_metadata'] and data['response_metadata'][ - 'next_cursor'] != '': - print('Next cursor: ', data['response_metadata']['next_cursor']) - data = api.get('users.list', {'limit': 300, 'cursor': data['response_metadata']['next_cursor']}) - members = members + data['members'] +members = data["members"] +while ( + "response_metadata" in data + and "next_cursor" in data["response_metadata"] + and data["response_metadata"]["next_cursor"] != "" +): + print("Next cursor: ", data["response_metadata"]["next_cursor"]) + data = api.get("users.list", {"limit": 300, "cursor": data["response_metadata"]["next_cursor"]}) + members = members + data["members"] print(len(members)) from breathecode.services.slack import client api = client.Slack(token) -data = api.get('users.list') -print(len(data['members'])) +data = api.get("users.list") +print(len(data["members"])) diff --git a/breathecode/services/slack/exceptions.py b/breathecode/services/slack/exceptions.py index c324263f7..3d182a65c 100644 --- a/breathecode/services/slack/exceptions.py +++ b/breathecode/services/slack/exceptions.py @@ -1,7 +1,7 @@ import os import logging -IS_TEST_ENV = os.getenv('ENV') == 'test' +IS_TEST_ENV = os.getenv("ENV") == "test" logger = logging.getLogger(__name__) @@ -10,8 +10,8 @@ class SlackException(Exception): def __init__(self, message, slug=None): if IS_TEST_ENV and slug: - logger.error(f'Slack error: {slug}') + logger.error(f"Slack error: {slug}") super().__init__(slug) else: - logger.error(f'Slack error: {message}') + logger.error(f"Slack error: {message}") super().__init__(message) diff --git a/breathecode/services/slack/tests/client/tests_client.py b/breathecode/services/slack/tests/client/tests_client.py index b714f8740..61057ce15 100644 --- a/breathecode/services/slack/tests/client/tests_client.py +++ b/breathecode/services/slack/tests/client/tests_client.py @@ -8,7 +8,7 @@ from unittest.mock import MagicMock, call, patch from breathecode.utils import AttrDict -fake_command = AttrDict(**{'fake': AttrDict(**{'execute': MagicMock(return_value='potato')})}) +fake_command = AttrDict(**{"fake": AttrDict(**{"execute": MagicMock(return_value="potato")})}) class SlackTestSuite(SlackTestCase): @@ -17,48 +17,50 @@ class SlackTestSuite(SlackTestCase): def test_slack_command___not_implemented(self): """Testing when command has not been implemented.""" - data = {'text': 'simple'} + data = {"text": "simple"} slack = Slack() - with self.assertRaisesMessage(SlackException, 'command-does-not-exist'): + with self.assertRaisesMessage(SlackException, "command-does-not-exist"): slack.execute_command(data) def test_slack_command___no_slack_user(self): """Testing when user has no slack_user.""" - data = {'text': 'student', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "student", "user_id": "name", "team_id": "team", "channel_id": "test"} slack = Slack() - with self.assertRaisesMessage(SlackException, 'unauthorized-user'): + with self.assertRaisesMessage(SlackException, "unauthorized-user"): slack.execute_command(data) - @patch('breathecode.services.slack.client.Slack._execute_command', MagicMock(return_value='some')) + @patch("breathecode.services.slack.client.Slack._execute_command", MagicMock(return_value="some")) def test_slack_execute_command___success(self): """Testing when execute_command() is successfully executed.""" - data = {'text': 'student', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "student", "user_id": "name", "team_id": "team", "channel_id": "test"} slack = Slack() expected = True result = slack.execute_command(data) self.assertEqual(result, expected) - self.assertEqual(slack._execute_command.call_args_list, [ - call(commands, 'student', { - 'users': [], - 'context': { - 'text': 'student', - 'user_id': 'name', - 'team_id': 'team', - 'channel_id': 'test' - } - }) - ]) + self.assertEqual( + slack._execute_command.call_args_list, + [ + call( + commands, + "student", + { + "users": [], + "context": {"text": "student", "user_id": "name", "team_id": "team", "channel_id": "test"}, + }, + ) + ], + ) def test_slack__execute_command__test_executor(self): """Testing how execute_command is being executed.""" - data = {'text': 'student', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "student", "user_id": "name", "team_id": "team", "channel_id": "test"} slack = Slack() - expected = 'potato' - result = slack._execute_command(fake_command, 'fake', data) + expected = "potato" + result = slack._execute_command(fake_command, "fake", data) self.assertEqual(result, expected) self.assertEqual(fake_command.fake.execute.call_args_list, [call(**data)]) diff --git a/breathecode/services/slack/tests/commands/tests_cohort.py b/breathecode/services/slack/tests/commands/tests_cohort.py index bc8f4f886..00be357b7 100644 --- a/breathecode/services/slack/tests/commands/tests_cohort.py +++ b/breathecode/services/slack/tests/commands/tests_cohort.py @@ -13,160 +13,172 @@ class SlackTestSuite(SlackTestCase): - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___context_is_not_provided_or_is_none(self): """Testing when context is None or not provided.""" - with self.assertRaisesMessage(SlackException, 'context-missing'): + with self.assertRaisesMessage(SlackException, "context-missing"): result = execute(users=[]) - with self.assertRaisesMessage(SlackException, 'context-missing'): + with self.assertRaisesMessage(SlackException, "context-missing"): result = execute(users=[], context=None) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___user_is_not_authorized(self): """Testing when user is not authorized.""" - data = {'text': 'cohort', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "cohort", "user_id": "name", "team_id": "team", "channel_id": "test"} - with self.assertRaisesMessage(SlackException, 'unauthorized-user'): + with self.assertRaisesMessage(SlackException, "unauthorized-user"): result = execute(users=[], context=data) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___cohort_does_not_exist(self): """Testing when cohort does not exist.""" - slack_user = {'slack_id': 'name'} - slack_team = {'slack_id': 'team'} + slack_user = {"slack_id": "name"} + slack_team = {"slack_id": "team"} - self.bc.database.create(profile_academy=1, - slack_user=slack_user, - capability='read_cohort', - user=1, - role='potato', - academy=1, - slack_team=slack_team) + self.bc.database.create( + profile_academy=1, + slack_user=slack_user, + capability="read_cohort", + user=1, + role="potato", + academy=1, + slack_team=slack_team, + ) - data = {'text': 'cohort', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "cohort", "user_id": "name", "team_id": "team", "channel_id": "test"} - with self.assertRaisesMessage(SlackException, 'cohort-not-found'): + with self.assertRaisesMessage(SlackException, "cohort-not-found"): result = execute(users=[], context=data) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___cohort_does_exist_but_not_associated_with_slack_channel(self): """Testing when cohort does exist but not associated with slack channel.""" - slack_user = {'slack_id': 'name'} - slack_team = {'slack_id': 'team'} - - self.bc.database.create(profile_academy=1, - slack_user=slack_user, - capability='read_cohort', - user=1, - role='potato', - academy=1, - slack_team=slack_team) - - data = {'text': 'cohort <@fdd2325|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} - with self.assertRaisesMessage(SlackException, 'cohort-not-found'): - result = execute(users=['fdd2325'], context=data) - - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + slack_user = {"slack_id": "name"} + slack_team = {"slack_id": "team"} + + self.bc.database.create( + profile_academy=1, + slack_user=slack_user, + capability="read_cohort", + user=1, + role="potato", + academy=1, + slack_team=slack_team, + ) + + data = {"text": "cohort <@fdd2325|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} + with self.assertRaisesMessage(SlackException, "cohort-not-found"): + result = execute(users=["fdd2325"], context=data) + + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___cohort_does_exist_and_associated_with_slack_channel(self): """Testing when cohort exists and is associated with slack channel""" - slack_users = [{'slack_id': 'name'}] - slack_team = {'slack_id': 'team'} - slack_channel = {'slack_id': 'test'} - cohort_user = {'user_id': 1} - - model = self.bc.database.create(profile_academy=1, - slack_user=slack_users, - capability='read_cohort', - user=1, - role='hello', - academy=1, - slack_team=slack_team, - cohort_user=cohort_user, - slack_channel=slack_channel) - - data = {'text': 'cohort <@percybrown|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + slack_users = [{"slack_id": "name"}] + slack_team = {"slack_id": "team"} + slack_channel = {"slack_id": "test"} + cohort_user = {"user_id": 1} + + model = self.bc.database.create( + profile_academy=1, + slack_user=slack_users, + capability="read_cohort", + user=1, + role="hello", + academy=1, + slack_team=slack_team, + cohort_user=cohort_user, + slack_channel=slack_channel, + ) + + data = {"text": "cohort <@percybrown|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} expected = { - 'blocks': [{ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - '\n'.join([ - '', - f'*Cohort name:* {model.cohort.name}', - f'*Start Date:* {model.cohort.kickoff_date}', - f'*End Date:* {model.cohort.ending_date}', - f'*Current day:* {model.cohort.current_day}', - f'*Stage:* {model.cohort.stage}', - f'*Teachers:* ', - '', - ]) + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "\n".join( + [ + "", + f"*Cohort name:* {model.cohort.name}", + f"*Start Date:* {model.cohort.kickoff_date}", + f"*End Date:* {model.cohort.ending_date}", + f"*Current day:* {model.cohort.current_day}", + f"*Stage:* {model.cohort.stage}", + f"*Teachers:* ", + "", + ] + ), + }, } - }] + ] } - result = execute(users=['percybrown'], context=data) + result = execute(users=["percybrown"], context=data) self.assertEqual(result, expected) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___cohort_does_exist_and_role_is_teacher(self): """Testing when cohort exists and role is teacher""" - slack_users = [{'slack_id': 'name'}] - slack_team = {'slack_id': 'team'} - slack_channel = {'slack_id': 'test'} - cohort_user = {'role': 'TEACHER'} - - model = self.bc.database.create(profile_academy=1, - slack_user=slack_users, - capability='read_cohort', - user=1, - role='teacher', - academy=1, - slack_team=slack_team, - cohort_user=cohort_user, - slack_channel=slack_channel) + slack_users = [{"slack_id": "name"}] + slack_team = {"slack_id": "team"} + slack_channel = {"slack_id": "test"} + cohort_user = {"role": "TEACHER"} + + model = self.bc.database.create( + profile_academy=1, + slack_user=slack_users, + capability="read_cohort", + user=1, + role="teacher", + academy=1, + slack_team=slack_team, + cohort_user=cohort_user, + slack_channel=slack_channel, + ) teachers = [model.cohort_user] - teacher_role = ', '.join([cu.user.first_name + ' ' + cu.user.last_name for cu in teachers]) + teacher_role = ", ".join([cu.user.first_name + " " + cu.user.last_name for cu in teachers]) - data = {'text': 'cohort <@percybrown|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "cohort <@percybrown|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} expected = { - 'blocks': [{ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - '\n'.join([ - '', - f'*Cohort name:* {model.cohort.name}', - f'*Start Date:* {model.cohort.kickoff_date}', - f'*End Date:* {model.cohort.ending_date}', - f'*Current day:* {model.cohort.current_day}', - f'*Stage:* {model.cohort.stage}', - f'*Teachers:* {teacher_role}', - '', - ]) + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "\n".join( + [ + "", + f"*Cohort name:* {model.cohort.name}", + f"*Start Date:* {model.cohort.kickoff_date}", + f"*End Date:* {model.cohort.ending_date}", + f"*Current day:* {model.cohort.current_day}", + f"*Stage:* {model.cohort.stage}", + f"*Teachers:* {teacher_role}", + "", + ] + ), + }, } - }] + ] } - result = execute(users=['percybrown'], context=data) + result = execute(users=["percybrown"], context=data) self.assertEqual(result, expected) diff --git a/breathecode/services/slack/tests/commands/tests_student.py b/breathecode/services/slack/tests/commands/tests_student.py index 16b56eed7..ee11eda62 100644 --- a/breathecode/services/slack/tests/commands/tests_student.py +++ b/breathecode/services/slack/tests/commands/tests_student.py @@ -12,17 +12,17 @@ def profile_fields(data={}): return { - 'avatar_url': None, - 'bio': None, - 'blog': None, - 'github_username': None, - 'id': 0, - 'linkedin_url': None, - 'phone': '', - 'portfolio_url': None, - 'show_tutorial': True, - 'twitter_username': None, - 'user_id': 0, + "avatar_url": None, + "bio": None, + "blog": None, + "github_username": None, + "id": 0, + "linkedin_url": None, + "phone": "", + "portfolio_url": None, + "show_tutorial": True, + "twitter_username": None, + "user_id": 0, **data, } @@ -37,395 +37,391 @@ def get_env(key, value=None): class SlackTestSuite(SlackTestCase): - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___context_is_not_provide_or_is_none(self): """Testing .""" - with self.assertRaisesMessage(SlackException, 'context-missing'): + with self.assertRaisesMessage(SlackException, "context-missing"): result = execute(users=[]) - with self.assertRaisesMessage(SlackException, 'context-missing'): + with self.assertRaisesMessage(SlackException, "context-missing"): result = execute(users=[], context=None) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___user_is_not_authorized(self): """Testing .""" - data = {'text': 'student', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "student", "user_id": "name", "team_id": "team", "channel_id": "test"} - with self.assertRaisesMessage(SlackException, 'unauthorized-user'): + with self.assertRaisesMessage(SlackException, "unauthorized-user"): result = execute(users=[], context=data) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___users_is_an_empty_list(self): """Testing when passing and empty list to users.""" - slack_user = {'slack_id': 'name'} - slack_team = {'slack_id': 'team'} + slack_user = {"slack_id": "name"} + slack_team = {"slack_id": "team"} - self.bc.database.create(profile_academy=1, - slack_user=slack_user, - capability='read_student', - user=1, - role='potato', - academy=1, - slack_team=slack_team) + self.bc.database.create( + profile_academy=1, + slack_user=slack_user, + capability="read_student", + user=1, + role="potato", + academy=1, + slack_team=slack_team, + ) - data = {'text': 'student', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "student", "user_id": "name", "team_id": "team", "channel_id": "test"} - with self.assertRaisesMessage(SlackException, 'users-not-provided'): + with self.assertRaisesMessage(SlackException, "users-not-provided"): result = execute(users=[], context=data) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___user_not_registered_in_a_cohort(self): """Testing when user is not registered in a cohort.""" - slack_user = {'slack_id': 'name'} - slack_team = {'slack_id': 'team'} - - self.bc.database.create(profile_academy=1, - slack_user=slack_user, - capability='read_student', - user=1, - role='potato', - academy=1, - slack_team=slack_team) - - data = {'text': 'student <@fdd2325|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} - with self.assertRaisesMessage(SlackException, 'cohort-user-not-found'): - result = execute(users=['fdd2325'], context=data) - - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + slack_user = {"slack_id": "name"} + slack_team = {"slack_id": "team"} + + self.bc.database.create( + profile_academy=1, + slack_user=slack_user, + capability="read_student", + user=1, + role="potato", + academy=1, + slack_team=slack_team, + ) + + data = {"text": "student <@fdd2325|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} + with self.assertRaisesMessage(SlackException, "cohort-user-not-found"): + result = execute(users=["fdd2325"], context=data) + + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___user_registered_in_a_cohort__without_financial_status_or_educational_status(self): """Testing when user is registered in a cohort.""" - slack_users = [{'slack_id': 'name'}, {'slack_id': 'percybrown', 'user_id': 2}] - slack_team = {'slack_id': 'team'} - cohort_user = {'user_id': 2} + slack_users = [{"slack_id": "name"}, {"slack_id": "percybrown", "user_id": 2}] + slack_team = {"slack_id": "team"} + cohort_user = {"user_id": 2} - model = self.bc.database.create(profile_academy=1, - slack_user=slack_users, - capability='read_student', - user=2, - role='STUDENT', - academy=1, - slack_team=slack_team, - cohort_user=cohort_user) + model = self.bc.database.create( + profile_academy=1, + slack_user=slack_users, + capability="read_student", + user=2, + role="STUDENT", + academy=1, + slack_team=slack_team, + cohort_user=cohort_user, + ) - data = {'text': 'student <@percybrown|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + data = {"text": "student <@percybrown|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} avatar_number = random.randint(1, 21) expected = { - 'blocks': [{ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f'\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: not set\n*Phone*: not set\n*Email:* '\ - f'{model.user[1].email}\n*Cohorts:*\n```\n- {model.cohort.name}: 🎓ACTIVE and 💰Not set\n```\n' - }, - 'accessory': { - 'type': 'image', - 'image_url': f'/static/img/avatar-{avatar_number}.png', - 'alt_text': f'{model.user[1].first_name} {model.user[1].last_name}', - + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": f"\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: not set\n*Phone*: not set\n*Email:* " + f"{model.user[1].email}\n*Cohorts:*\n```\n- {model.cohort.name}: 🎓ACTIVE and 💰Not set\n```\n", + }, + "accessory": { + "type": "image", + "image_url": f"/static/img/avatar-{avatar_number}.png", + "alt_text": f"{model.user[1].first_name} {model.user[1].last_name}", + }, } - }] + ] } - with patch('random.randint') as mock: + with patch("random.randint") as mock: mock.return_value = avatar_number - result = execute(users=['percybrown'], context=data) + result = execute(users=["percybrown"], context=data) self.assertEqual(random.randint.call_args_list, [call(1, 21)]) self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Profile"), []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___user_registered_in_two_cohorts__with_financial_status_and_educational_status(self): """Testing when user is registered in a cohort.""" - slack_users = [{'slack_id': 'name'}, {'slack_id': 'percybrown', 'user_id': 2}] - slack_team = {'slack_id': 'team'} - cohort_user = [{ - 'user_id': 2, - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'ACTIVE' - }, { - 'user_id': 2, - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'POSTPONED', - 'cohort_id': 2 - }] - - model = self.bc.database.create(profile_academy=1, - slack_user=slack_users, - capability='read_student', - user=2, - role='STUDENT', - academy=1, - slack_team=slack_team, - cohort_user=cohort_user, - cohort=2) - - data = {'text': 'student <@percybrown|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + slack_users = [{"slack_id": "name"}, {"slack_id": "percybrown", "user_id": 2}] + slack_team = {"slack_id": "team"} + cohort_user = [ + {"user_id": 2, "finantial_status": "FULLY_PAID", "educational_status": "ACTIVE"}, + {"user_id": 2, "finantial_status": "UP_TO_DATE", "educational_status": "POSTPONED", "cohort_id": 2}, + ] + + model = self.bc.database.create( + profile_academy=1, + slack_user=slack_users, + capability="read_student", + user=2, + role="STUDENT", + academy=1, + slack_team=slack_team, + cohort_user=cohort_user, + cohort=2, + ) + + data = {"text": "student <@percybrown|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} avatar_number = random.randint(1, 21) expected = { - 'blocks': [{ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f'\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: not '\ - f'set\n*Phone*: not set\n*Email:* {model.user[1].email}\n*Cohorts:*\n```\n- '\ - f'{model.cohort[0].name}: 🎓ACTIVE and 💰FULLY PAID\n- {model.cohort[1].name}: 🎓POSTPONED '\ - f'and 💰UP TO DATE\n```\n' - }, - 'accessory': { - 'type': 'image', - 'image_url': f'/static/img/avatar-{avatar_number}.png', - 'alt_text': f'{model.user[1].first_name} {model.user[1].last_name}', - + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": f"\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: not " + f"set\n*Phone*: not set\n*Email:* {model.user[1].email}\n*Cohorts:*\n```\n- " + f"{model.cohort[0].name}: 🎓ACTIVE and 💰FULLY PAID\n- {model.cohort[1].name}: 🎓POSTPONED " + f"and 💰UP TO DATE\n```\n", + }, + "accessory": { + "type": "image", + "image_url": f"/static/img/avatar-{avatar_number}.png", + "alt_text": f"{model.user[1].first_name} {model.user[1].last_name}", + }, } - }] + ] } - with patch('random.randint') as mock: + with patch("random.randint") as mock: mock.return_value = avatar_number - result = execute(users=['percybrown'], context=data) + result = execute(users=["percybrown"], context=data) self.assertEqual(random.randint.call_args_list, [call(1, 21)]) self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Profile"), []) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___user_registered_in_two_different_cohorts__with_financial_status_and_educational_status( - self): + self, + ): """Testing when user is registered in two different cohorts with financial and educational status.""" - slack_users = [{'slack_id': 'name'}, {'slack_id': 'percybrown', 'user_id': 2}] - slack_team = {'slack_id': 'team'} - cohort_user = [{ - 'user_id': 2, - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'ACTIVE' - }, { - 'user_id': 2, - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'POSTPONED', - 'cohort_id': 2 - }] - - model = self.bc.database.create(profile_academy=1, - slack_user=slack_users, - capability='read_student', - user=2, - role='STUDENT', - academy=1, - slack_team=slack_team, - cohort_user=cohort_user, - cohort=2) - - data = {'text': 'student <@percybrown|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + slack_users = [{"slack_id": "name"}, {"slack_id": "percybrown", "user_id": 2}] + slack_team = {"slack_id": "team"} + cohort_user = [ + {"user_id": 2, "finantial_status": "FULLY_PAID", "educational_status": "ACTIVE"}, + {"user_id": 2, "finantial_status": "UP_TO_DATE", "educational_status": "POSTPONED", "cohort_id": 2}, + ] + + model = self.bc.database.create( + profile_academy=1, + slack_user=slack_users, + capability="read_student", + user=2, + role="STUDENT", + academy=1, + slack_team=slack_team, + cohort_user=cohort_user, + cohort=2, + ) + + data = {"text": "student <@percybrown|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} avatar_number = random.randint(1, 21) expected = { - 'blocks': [{ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f'\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: not '\ - f'set\n*Phone*: not set\n*Email:* {model.user[1].email}\n*Cohorts:*\n```\n- '\ - f'{model.cohort[0].name}: 🎓ACTIVE and 💰FULLY PAID\n- {model.cohort[1].name}: 🎓POSTPONED '\ - f'and 💰UP TO DATE\n```\n' - }, - 'accessory': { - 'type': 'image', - 'image_url': f'/static/img/avatar-{avatar_number}.png', - 'alt_text': f'{model.user[1].first_name} {model.user[1].last_name}', - + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": f"\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: not " + f"set\n*Phone*: not set\n*Email:* {model.user[1].email}\n*Cohorts:*\n```\n- " + f"{model.cohort[0].name}: 🎓ACTIVE and 💰FULLY PAID\n- {model.cohort[1].name}: 🎓POSTPONED " + f"and 💰UP TO DATE\n```\n", + }, + "accessory": { + "type": "image", + "image_url": f"/static/img/avatar-{avatar_number}.png", + "alt_text": f"{model.user[1].first_name} {model.user[1].last_name}", + }, } - }] + ] } - with patch('random.randint') as mock: + with patch("random.randint") as mock: mock.return_value = avatar_number - result = execute(users=['percybrown'], context=data) + result = execute(users=["percybrown"], context=data) self.assertEqual(random.randint.call_args_list, [call(1, 21)]) self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), []) + self.assertEqual(self.bc.database.list_of("authenticate.Profile"), []) """ 🔽🔽🔽 With two CohortUser and one Profile, with right financial_status and educational_status, profile empty """ - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'API_URL': API_URL}))) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"API_URL": API_URL}))) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___with_profile_empty(self): """Testing when user is registered in two different cohorts with financial and educational status.""" - slack_users = [{'slack_id': 'name'}, {'slack_id': 'percybrown', 'user_id': 2}] - slack_team = {'slack_id': 'team'} - cohort_user = [{ - 'user_id': 2, - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'ACTIVE' - }, { - 'user_id': 2, - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'POSTPONED', - 'cohort_id': 2 - }] - - profile = {'user_id': 2} - model = self.bc.database.create(profile_academy=1, - profile=profile, - slack_user=slack_users, - capability='read_student', - user=2, - role='STUDENT', - academy=1, - slack_team=slack_team, - cohort_user=cohort_user, - cohort=2) - - data = {'text': 'student <@percybrown|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + slack_users = [{"slack_id": "name"}, {"slack_id": "percybrown", "user_id": 2}] + slack_team = {"slack_id": "team"} + cohort_user = [ + {"user_id": 2, "finantial_status": "FULLY_PAID", "educational_status": "ACTIVE"}, + {"user_id": 2, "finantial_status": "UP_TO_DATE", "educational_status": "POSTPONED", "cohort_id": 2}, + ] + + profile = {"user_id": 2} + model = self.bc.database.create( + profile_academy=1, + profile=profile, + slack_user=slack_users, + capability="read_student", + user=2, + role="STUDENT", + academy=1, + slack_team=slack_team, + cohort_user=cohort_user, + cohort=2, + ) + + data = {"text": "student <@percybrown|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} avatar_number = random.randint(1, 21) expected = { - 'blocks': [{ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f'\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: not '\ - f'set\n*Phone*: not set\n*Email:* {model.user[1].email}\n*Cohorts:*\n```\n- '\ - f'{model.cohort[0].name}: 🎓ACTIVE and 💰FULLY PAID\n- {model.cohort[1].name}: 🎓POSTPONED '\ - f'and 💰UP TO DATE\n```\n' - }, - 'accessory': { - 'type': 'image', - 'image_url': f'{API_URL}/static/img/avatar-{avatar_number}.png', - 'alt_text': f'{model.user[1].first_name} {model.user[1].last_name}', - + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": f"\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: not " + f"set\n*Phone*: not set\n*Email:* {model.user[1].email}\n*Cohorts:*\n```\n- " + f"{model.cohort[0].name}: 🎓ACTIVE and 💰FULLY PAID\n- {model.cohort[1].name}: 🎓POSTPONED " + f"and 💰UP TO DATE\n```\n", + }, + "accessory": { + "type": "image", + "image_url": f"{API_URL}/static/img/avatar-{avatar_number}.png", + "alt_text": f"{model.user[1].first_name} {model.user[1].last_name}", + }, } - }] + ] } - with patch('random.randint') as mock: + with patch("random.randint") as mock: mock.return_value = avatar_number - result = execute(users=['percybrown'], context=data) + result = execute(users=["percybrown"], context=data) self.assertEqual(random.randint.call_args_list, [call(1, 21)]) self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_fields({ - 'id': 1, - 'user_id': 2, - 'avatar_url': f'{API_URL}/static/img/avatar-{avatar_number}.png', - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_fields( + { + "id": 1, + "user_id": 2, + "avatar_url": f"{API_URL}/static/img/avatar-{avatar_number}.png", + } + ), + ], + ) """ 🔽🔽🔽 With two CohortUser and one Profile, with right financial_status and educational_status, profile set """ - @patch('os.getenv', MagicMock(side_effect=apply_get_env({'API_URL': API_URL}))) - @patch('django.db.models.signals.pre_delete.send_robust', MagicMock(return_value=None)) - @patch('breathecode.admissions.signals.student_edu_status_updated.send_robust', MagicMock(return_value=None)) + @patch("os.getenv", MagicMock(side_effect=apply_get_env({"API_URL": API_URL}))) + @patch("django.db.models.signals.pre_delete.send_robust", MagicMock(return_value=None)) + @patch("breathecode.admissions.signals.student_edu_status_updated.send_robust", MagicMock(return_value=None)) def test_slack_command___with_profile_set(self): """Testing when user is registered in two different cohorts with financial and educational status.""" - slack_users = [{'slack_id': 'name'}, {'slack_id': 'percybrown', 'user_id': 2}] - slack_team = {'slack_id': 'team'} - cohort_user = [{ - 'user_id': 2, - 'finantial_status': 'FULLY_PAID', - 'educational_status': 'ACTIVE' - }, { - 'user_id': 2, - 'finantial_status': 'UP_TO_DATE', - 'educational_status': 'POSTPONED', - 'cohort_id': 2 - }] + slack_users = [{"slack_id": "name"}, {"slack_id": "percybrown", "user_id": 2}] + slack_team = {"slack_id": "team"} + cohort_user = [ + {"user_id": 2, "finantial_status": "FULLY_PAID", "educational_status": "ACTIVE"}, + {"user_id": 2, "finantial_status": "UP_TO_DATE", "educational_status": "POSTPONED", "cohort_id": 2}, + ] github_username = self.bc.fake.slug() phone = self.bc.fake.phone_number() profile = { - 'user_id': 2, - 'github_username': github_username, - 'phone': phone, + "user_id": 2, + "github_username": github_username, + "phone": phone, } - model = self.bc.database.create(profile_academy=1, - profile=profile, - slack_user=slack_users, - capability='read_student', - user=2, - role='STUDENT', - academy=1, - slack_team=slack_team, - cohort_user=cohort_user, - cohort=2) - - data = {'text': 'student <@percybrown|244372eew>', 'user_id': 'name', 'team_id': 'team', 'channel_id': 'test'} + model = self.bc.database.create( + profile_academy=1, + profile=profile, + slack_user=slack_users, + capability="read_student", + user=2, + role="STUDENT", + academy=1, + slack_team=slack_team, + cohort_user=cohort_user, + cohort=2, + ) + + data = {"text": "student <@percybrown|244372eew>", "user_id": "name", "team_id": "team", "channel_id": "test"} avatar_number = random.randint(1, 21) expected = { - 'blocks': [{ - 'type': 'section', - 'text': { - 'type': - 'mrkdwn', - 'text': - f'\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: '\ - f'{github_username}\n*Phone*: {phone}\n*Email:* {model.user[1].email}\n*Cohorts:*\n```\n- '\ - f'{model.cohort[0].name}: 🎓ACTIVE and 💰FULLY PAID\n- {model.cohort[1].name}: 🎓POSTPONED '\ - f'and 💰UP TO DATE\n```\n' - }, - 'accessory': { - 'type': 'image', - 'image_url': f'{API_URL}/static/img/avatar-{avatar_number}.png', - 'alt_text': f'{model.user[1].first_name} {model.user[1].last_name}', - + "blocks": [ + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": f"\n*Student Name:* {model.user[1].first_name} {model.user[1].last_name}\n*Github*: " + f"{github_username}\n*Phone*: {phone}\n*Email:* {model.user[1].email}\n*Cohorts:*\n```\n- " + f"{model.cohort[0].name}: 🎓ACTIVE and 💰FULLY PAID\n- {model.cohort[1].name}: 🎓POSTPONED " + f"and 💰UP TO DATE\n```\n", + }, + "accessory": { + "type": "image", + "image_url": f"{API_URL}/static/img/avatar-{avatar_number}.png", + "alt_text": f"{model.user[1].first_name} {model.user[1].last_name}", + }, } - }] + ] } - with patch('random.randint') as mock: + with patch("random.randint") as mock: mock.return_value = avatar_number - result = execute(users=['percybrown'], context=data) + result = execute(users=["percybrown"], context=data) self.assertEqual(random.randint.call_args_list, [call(1, 21)]) self.assertEqual(result, expected) - self.assertEqual(self.bc.database.list_of('authenticate.Profile'), [ - profile_fields({ - 'id': 1, - 'user_id': 2, - 'avatar_url': f'{API_URL}/static/img/avatar-{avatar_number}.png', - 'github_username': github_username, - 'phone': phone, - }), - ]) + self.assertEqual( + self.bc.database.list_of("authenticate.Profile"), + [ + profile_fields( + { + "id": 1, + "user_id": 2, + "avatar_url": f"{API_URL}/static/img/avatar-{avatar_number}.png", + "github_username": github_username, + "phone": phone, + } + ), + ], + ) diff --git a/breathecode/services/slack/tests/mixins/__init__.py b/breathecode/services/slack/tests/mixins/__init__.py index a38e397e8..7499174ac 100644 --- a/breathecode/services/slack/tests/mixins/__init__.py +++ b/breathecode/services/slack/tests/mixins/__init__.py @@ -1,4 +1,5 @@ """ Slack mixins """ + from .slack_test_case import SlackTestCase # noqa: F401 diff --git a/breathecode/services/slack/tests/mixins/slack_test_case.py b/breathecode/services/slack/tests/mixins/slack_test_case.py index 4f5b3f3ff..73e6f2f3c 100644 --- a/breathecode/services/slack/tests/mixins/slack_test_case.py +++ b/breathecode/services/slack/tests/mixins/slack_test_case.py @@ -1,17 +1,25 @@ """ Collections of mixins used to login in authorize microservice """ + import re from unittest.mock import MagicMock, patch from django.urls.base import reverse_lazy from rest_framework.test import APITestCase -from breathecode.tests.mixins import (GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, DatetimeMixin, ICallMixin, - BreathecodeMixin) +from breathecode.tests.mixins import ( + GenerateModelsMixin, + CacheMixin, + GenerateQueriesMixin, + DatetimeMixin, + ICallMixin, + BreathecodeMixin, +) from rest_framework import status -class SlackTestCase(APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, DatetimeMixin, ICallMixin, - BreathecodeMixin): +class SlackTestCase( + APITestCase, GenerateModelsMixin, CacheMixin, GenerateQueriesMixin, DatetimeMixin, ICallMixin, BreathecodeMixin +): """SlackTestCase with auth methods""" def setUp(self): diff --git a/breathecode/services/slack/utils.py b/breathecode/services/slack/utils.py index bdecda516..c8b555bb6 100644 --- a/breathecode/services/slack/utils.py +++ b/breathecode/services/slack/utils.py @@ -1,9 +1,9 @@ def to_string(s): if s is None: - return 'Not set' + return "Not set" else: - return s.replace('_', ' ') + return s.replace("_", " ") def jump(): - return '\n' + return "\n" diff --git a/breathecode/settings.py b/breathecode/settings.py index e68e07186..33d20e18d 100644 --- a/breathecode/settings.py +++ b/breathecode/settings.py @@ -23,139 +23,138 @@ from breathecode.setup import configure_redis -settings.set_settings(app_name='breathecode') +settings.set_settings(app_name="breathecode") # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) -DATABASE_URL = os.environ.get('DATABASE_URL') -ENVIRONMENT = os.environ.get('ENV') +DATABASE_URL = os.environ.get("DATABASE_URL") +ENVIRONMENT = os.environ.get("ENV") # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = '5ar3h@ha%y*dc72z=8-ju7@4xqm0o59*@k*c2i=xacmy2r=%4a' +SECRET_KEY = "5ar3h@ha%y*dc72z=8-ju7@4xqm0o59*@k*c2i=xacmy2r=%4a" # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = (ENVIRONMENT == 'development' or ENVIRONMENT == 'test') +DEBUG = ENVIRONMENT == "development" or ENVIRONMENT == "test" ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ - 'breathecode.admin_styles', - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django.contrib.sites', - 'django.contrib.postgres', - 'django.contrib.admindocs', - 'rest_framework', - 'adrf', - 'phonenumber_field', - 'corsheaders', - 'breathecode.activity', - 'breathecode.notify', - 'breathecode.authenticate', - 'breathecode.monitoring', - 'breathecode.admissions', - 'breathecode.events', - 'breathecode.feedback', - 'breathecode.assignments', - 'breathecode.marketing', - 'breathecode.freelance', - 'breathecode.certificate', - 'breathecode.media', - 'breathecode.assessment', - 'breathecode.registry', - 'breathecode.mentorship', - 'breathecode.career', - 'breathecode.commons', - 'breathecode.payments', - 'breathecode.provisioning', - 'explorer', - 'graphene_django', - 'task_manager', - 'linked_services', + "breathecode.admin_styles", + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "django.contrib.sites", + "django.contrib.postgres", + "django.contrib.admindocs", + "rest_framework", + "adrf", + "phonenumber_field", + "corsheaders", + "breathecode.activity", + "breathecode.notify", + "breathecode.authenticate", + "breathecode.monitoring", + "breathecode.admissions", + "breathecode.events", + "breathecode.feedback", + "breathecode.assignments", + "breathecode.marketing", + "breathecode.freelance", + "breathecode.certificate", + "breathecode.media", + "breathecode.assessment", + "breathecode.registry", + "breathecode.mentorship", + "breathecode.career", + "breathecode.commons", + "breathecode.payments", + "breathecode.provisioning", + "explorer", + "graphene_django", + "task_manager", + "linked_services", ] -GRAPHENE = {'SCHEMA': 'breathecode.schema.schema'} +GRAPHENE = {"SCHEMA": "breathecode.schema.schema"} -if os.getenv('ALLOW_UNSAFE_CYPRESS_APP') or ENVIRONMENT == 'test': - INSTALLED_APPS.append('breathecode.cypress') +if os.getenv("ALLOW_UNSAFE_CYPRESS_APP") or ENVIRONMENT == "test": + INSTALLED_APPS.append("breathecode.cypress") REST_FRAMEWORK = { - 'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.openapi.AutoSchema', - 'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.NamespaceVersioning', - 'DEFAULT_PAGINATION_CLASS': 'breathecode.utils.HeaderLimitOffsetPagination', - 'EXCEPTION_HANDLER': 'capyc.rest_framework.exception_handler.exception_handler', - 'PAGE_SIZE': 100, - 'DEFAULT_VERSION': 'v1', - 'DEFAULT_AUTHENTICATION_CLASSES': [ - 'breathecode.authenticate.authentication.ExpiringTokenAuthentication', + "DEFAULT_SCHEMA_CLASS": "rest_framework.schemas.openapi.AutoSchema", + "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.NamespaceVersioning", + "DEFAULT_PAGINATION_CLASS": "breathecode.utils.HeaderLimitOffsetPagination", + "EXCEPTION_HANDLER": "capyc.rest_framework.exception_handler.exception_handler", + "PAGE_SIZE": 100, + "DEFAULT_VERSION": "v1", + "DEFAULT_AUTHENTICATION_CLASSES": [ + "breathecode.authenticate.authentication.ExpiringTokenAuthentication", ], - 'DEFAULT_PERMISSION_CLASSES': [ - 'rest_framework.permissions.IsAuthenticated', + "DEFAULT_PERMISSION_CLASSES": [ + "rest_framework.permissions.IsAuthenticated", ], - 'DEFAULT_RENDERER_CLASSES': ( - 'rest_framework.renderers.JSONRenderer', - 'rest_framework_csv.renderers.CSVRenderer', + "DEFAULT_RENDERER_CLASSES": ( + "rest_framework.renderers.JSONRenderer", + "rest_framework_csv.renderers.CSVRenderer", ), } -if os.getenv('ENABLE_DEFAULT_PAGINATION', 'y') in ['t', 'true', 'True', 'TRUE', '1', 'yes', 'y']: - REST_FRAMEWORK['PAGE_SIZE'] = 20 +if os.getenv("ENABLE_DEFAULT_PAGINATION", "y") in ["t", "true", "True", "TRUE", "1", "yes", "y"]: + REST_FRAMEWORK["PAGE_SIZE"] = 20 # whitenoise runs in sync mode, it must be wrapped or removed # CompressResponseMiddleware must be upgraded because a django deprecation MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'breathecode.middlewares.static_redirect_middleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'corsheaders.middleware.CorsMiddleware', - + "django.middleware.security.SecurityMiddleware", + "breathecode.middlewares.static_redirect_middleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "corsheaders.middleware.CorsMiddleware", # Cache # 'django.middleware.cache.UpdateCacheMiddleware', - 'django.middleware.common.CommonMiddleware', + "django.middleware.common.CommonMiddleware", # 'django.middleware.cache.FetchFromCacheMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", #'breathecode.utils.admin_timezone.TimezoneMiddleware', - 'breathecode.middlewares.CompressResponseMiddleware', - 'django.middleware.http.ConditionalGetMiddleware', + "breathecode.middlewares.CompressResponseMiddleware", + "django.middleware.http.ConditionalGetMiddleware", ] -if ENVIRONMENT != 'test': - MIDDLEWARE += ['django_minify_html.middleware.MinifyHtmlMiddleware'] +if ENVIRONMENT != "test": + MIDDLEWARE += ["django_minify_html.middleware.MinifyHtmlMiddleware"] -if os.getenv('GOOGLE_APPLICATION_CREDENTIALS') and (GS_BUCKET_NAME := os.getenv('STATIC_BUCKET')): +if os.getenv("GOOGLE_APPLICATION_CREDENTIALS") and (GS_BUCKET_NAME := os.getenv("STATIC_BUCKET")): from google.oauth2 import service_account from .setup import resolve_gcloud_credentials resolve_gcloud_credentials() - GS_CREDENTIALS = service_account.Credentials.from_service_account_file(os.getenv('GOOGLE_APPLICATION_CREDENTIALS')) + GS_CREDENTIALS = service_account.Credentials.from_service_account_file(os.getenv("GOOGLE_APPLICATION_CREDENTIALS")) - GS_PROJECT_ID = os.getenv('GOOGLE_PROJECT_ID', '') + GS_PROJECT_ID = os.getenv("GOOGLE_PROJECT_ID", "") GS_IS_GZIPPED = True GS_QUERYSTRING_AUTH = False GS_FILE_OVERWRITE = True GZIP_CONTENT_TYPES = ( - 'text/html', - 'text/css', - 'text/javascript', - 'application/javascript', - 'application/x-javascript', - 'image/svg+xml', + "text/html", + "text/css", + "text/javascript", + "application/javascript", + "application/x-javascript", + "image/svg+xml", ) # GS_OBJECT_PARAMETERS = { @@ -165,151 +164,153 @@ GS_EXPIRATION = timedelta(days=7) STORAGES = { - 'staticfiles': { - 'BACKEND': 'storages.backends.gcloud.GoogleCloudStorage', + "staticfiles": { + "BACKEND": "storages.backends.gcloud.GoogleCloudStorage", }, } # STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage' else: INSTALLED_APPS += [ - 'whitenoise.runserver_nostatic', + "whitenoise.runserver_nostatic", ] MIDDLEWARE += [ - 'whitenoise.middleware.WhiteNoiseMiddleware', + "whitenoise.middleware.WhiteNoiseMiddleware", ] # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ STORAGES = { - 'staticfiles': { - 'BACKEND': 'whitenoise.storage.CompressedManifestStaticFilesStorage', + "staticfiles": { + "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", }, } DISABLE_SERVER_SIDE_CURSORS = True # required when using pgbouncer's pool_mode=transaction -AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend', ) +AUTHENTICATION_BACKENDS = ("django.contrib.auth.backends.ModelBackend",) -ROOT_URLCONF = 'breathecode.urls' +ROOT_URLCONF = "breathecode.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - 'django.template.context_processors.request', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + "django.template.context_processors.request", ], }, }, ] -WSGI_APPLICATION = 'breathecode.wsgi.application' +WSGI_APPLICATION = "breathecode.wsgi.application" # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] # Disable Django's logging setup LOGGING_CONFIG = None -IS_TEST_ENV = os.getenv('ENV') == 'test' -LOG_LEVEL = os.environ.get('LOG_LEVEL', 'INFO').upper() +IS_TEST_ENV = os.getenv("ENV") == "test" +LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO").upper() # this prevent the duplications of logs because heroku redirect the output to Coralogix if IS_TEST_ENV: - LOGGING_HANDLERS = ['console'] + LOGGING_HANDLERS = ["console"] else: - LOGGING_HANDLERS = ['coralogix', 'console'] - -logging.config.dictConfig({ - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'default': { - # exact format is not important, this is the minimum information - 'format': '[%(asctime)s] %(name)-12s %(levelname)-8s %(message)s', - }, - 'django.server': DEFAULT_LOGGING['formatters']['django.server'], - }, - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse', - }, - }, - 'handlers': { - 'coralogix': { - 'class': 'coralogix.handlers.CoralogixLogger', - 'formatter': 'default', - 'private_key': os.getenv('CORALOGIX_PRIVATE_KEY', ''), - 'app_name': os.getenv('CORALOGIX_APP_NAME', 'localhost'), - 'subsystem': os.getenv('CORALOGIX_SUBSYSTEM', 'logger'), - }, - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'default', + LOGGING_HANDLERS = ["coralogix", "console"] + +logging.config.dictConfig( + { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "default": { + # exact format is not important, this is the minimum information + "format": "[%(asctime)s] %(name)-12s %(levelname)-8s %(message)s", + }, + "django.server": DEFAULT_LOGGING["formatters"]["django.server"], }, - 'django.server': DEFAULT_LOGGING['handlers']['django.server'], - }, - 'loggers': { - '': { - 'level': 'WARNING', - 'handlers': LOGGING_HANDLERS, + "filters": { + "require_debug_false": { + "()": "django.utils.log.RequireDebugFalse", + }, }, - # Our application code - 'breathecode': { - 'level': LOG_LEVEL, - 'handlers': LOGGING_HANDLERS, - # Avoid double logging because of root logger - 'propagate': False, + "handlers": { + "coralogix": { + "class": "coralogix.handlers.CoralogixLogger", + "formatter": "default", + "private_key": os.getenv("CORALOGIX_PRIVATE_KEY", ""), + "app_name": os.getenv("CORALOGIX_APP_NAME", "localhost"), + "subsystem": os.getenv("CORALOGIX_SUBSYSTEM", "logger"), + }, + "console": { + "class": "logging.StreamHandler", + "formatter": "default", + }, + "django.server": DEFAULT_LOGGING["handlers"]["django.server"], }, - # Prevent noisy modules from logging to Sentry - 'noisy_module': { - 'level': 'ERROR', - 'handlers': LOGGING_HANDLERS, - 'propagate': False, + "loggers": { + "": { + "level": "WARNING", + "handlers": LOGGING_HANDLERS, + }, + # Our application code + "breathecode": { + "level": LOG_LEVEL, + "handlers": LOGGING_HANDLERS, + # Avoid double logging because of root logger + "propagate": False, + }, + # Prevent noisy modules from logging to Sentry + "noisy_module": { + "level": "ERROR", + "handlers": LOGGING_HANDLERS, + "propagate": False, + }, + # Default runserver request logging + "django.server": DEFAULT_LOGGING["loggers"]["django.server"], }, - # Default runserver request logging - 'django.server': DEFAULT_LOGGING['loggers']['django.server'], } -}) +) MESSAGE_TAGS = { - messages.DEBUG: 'alert-info', - messages.INFO: 'alert-info', - messages.SUCCESS: 'alert-success', - messages.WARNING: 'alert-warning', - messages.ERROR: 'alert-danger', + messages.DEBUG: "alert-info", + messages.INFO: "alert-info", + messages.SUCCESS: "alert-success", + messages.WARNING: "alert-warning", + messages.ERROR: "alert-danger", } # Internationalization # https://docs.djangoproject.com/en/3.0/topics/i18n/ -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" -TIME_ZONE = 'UTC' +TIME_ZONE = "UTC" USE_I18N = True @@ -319,68 +320,68 @@ # SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Allow all host headers -ALLOWED_HOSTS = ['*'] +ALLOWED_HOSTS = ["*"] # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.0/howto/static-files/ # static generated automatically -STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') -STATIC_URL = '/static/' +STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles") +STATIC_URL = "/static/" STATICFILES_DIRS = [ # static generated by us - os.path.join(PROJECT_ROOT, 'static'), + os.path.join(PROJECT_ROOT, "static"), ] CSRF_TRUSTED_ORIGINS = [ - 'http://*.gitpod.io', - 'https://*.gitpod.io', + "http://*.gitpod.io", + "https://*.gitpod.io", ] CORS_ORIGIN_ALLOW_ALL = True CORS_ALLOW_HEADERS = [ - 'accept', - 'academy', - 'accept-encoding', - 'authorization', - 'content-type', - 'dnt', - 'origin', - 'user-agent', - 'x-csrftoken', - 'x-requested-with', - 'cache-control', - 'credentials', - 'http-access-control-request-method', + "accept", + "academy", + "accept-encoding", + "authorization", + "content-type", + "dnt", + "origin", + "user-agent", + "x-csrftoken", + "x-requested-with", + "cache-control", + "credentials", + "http-access-control-request-method", ] # production redis url -REDIS_URL = os.getenv('REDIS_COM_URL', '') +REDIS_URL = os.getenv("REDIS_COM_URL", "") kwargs = {} IS_REDIS_WITH_SSL_ON_HEROKU = False IS_REDIS_WITH_SSL = False # local or heroku redis url -if REDIS_URL == '' or REDIS_URL == 'redis://localhost:6379': - REDIS_URL = os.getenv('REDIS_URL', 'redis://localhost:6379') +if REDIS_URL == "" or REDIS_URL == "redis://localhost:6379": + REDIS_URL = os.getenv("REDIS_URL", "redis://localhost:6379") # support for heroku redis addon - if REDIS_URL.startswith('rediss://'): + if REDIS_URL.startswith("rediss://"): IS_REDIS_WITH_SSL_ON_HEROKU = True else: IS_REDIS_WITH_SSL = True # on localhost this should be false to avoid SSL Certificate -SECURE_SSL_REDIRECT = os.getenv('SECURE_SSL_REDIRECT', 'TRUE') == 'TRUE' +SECURE_SSL_REDIRECT = os.getenv("SECURE_SSL_REDIRECT", "TRUE") == "TRUE" -CACHE_MIDDLEWARE_SECONDS = 60 * int(os.getenv('GLOBAL_CACHE_MINUTES', 60 * 24)) +CACHE_MIDDLEWARE_SECONDS = 60 * int(os.getenv("GLOBAL_CACHE_MINUTES", 60 * 24)) CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': REDIS_URL, - 'TIMEOUT': CACHE_MIDDLEWARE_SECONDS, + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "TIMEOUT": CACHE_MIDDLEWARE_SECONDS, } } @@ -388,28 +389,28 @@ DJANGO_REDIS_IGNORE_EXCEPTIONS = True if IS_REDIS_WITH_SSL_ON_HEROKU: - CACHES['default']['OPTIONS'] = { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - 'PICKLE_VERSION': -1, + CACHES["default"]["OPTIONS"] = { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "PICKLE_VERSION": -1, # "IGNORE_EXCEPTIONS": True, - 'CONNECTION_POOL_KWARGS': { - 'ssl_cert_reqs': None, - 'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', 500)), + "CONNECTION_POOL_KWARGS": { + "ssl_cert_reqs": None, + "max_connections": int(os.getenv("REDIS_MAX_CONNECTIONS", 500)), }, } elif IS_REDIS_WITH_SSL: redis_ca_cert_path, redis_user_cert_path, redis_user_private_key_path = configure_redis() - CACHES['default']['OPTIONS'] = { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - 'PICKLE_VERSION': -1, + CACHES["default"]["OPTIONS"] = { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "PICKLE_VERSION": -1, # "IGNORE_EXCEPTIONS": True, - 'CONNECTION_POOL_KWARGS': { - 'ssl_cert_reqs': 'required', - 'ssl_ca_certs': redis_ca_cert_path, - 'ssl_certfile': redis_user_cert_path, - 'ssl_keyfile': redis_user_private_key_path, - 'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', 500)), - } + "CONNECTION_POOL_KWARGS": { + "ssl_cert_reqs": "required", + "ssl_ca_certs": redis_ca_cert_path, + "ssl_certfile": redis_user_cert_path, + "ssl_keyfile": redis_user_private_key_path, + "max_connections": int(os.getenv("REDIS_MAX_CONNECTIONS", 500)), + }, } if IS_TEST_ENV: @@ -450,90 +451,89 @@ def set(self, key, value, *args, timeout=None, **kwargs): return self._cache[key] = { - 'key': key, - 'value': value, - 'valid_until': timeout, + "key": key, + "value": value, + "valid_until": timeout, } def get(self, key, *args, **kwargs): if key not in self._cache.keys(): return None - return self._cache[key]['value'] + return self._cache[key]["value"] - CACHES['default'] = { - **CACHES['default'], - 'LOCATION': 'breathecode', - 'BACKEND': 'breathecode.settings.CustomMemCache', + CACHES["default"] = { + **CACHES["default"], + "LOCATION": "breathecode", + "BACKEND": "breathecode.settings.CustomMemCache", } # overwrite the redis url with the new one -os.environ['REDIS_URL'] = REDIS_URL +os.environ["REDIS_URL"] = REDIS_URL SITE_ID = 1 # Change 'default' database configuration with $DATABASE_URL. # https://github.com/jacobian/dj-database-url#url-schema DATABASES = { - 'default': dj_database_url.config(default=DATABASE_URL, conn_max_age=600, ssl_require=False), + "default": dj_database_url.config(default=DATABASE_URL, conn_max_age=600, ssl_require=False), } -DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" # SQL Explorer -EXPLORER_CONNECTIONS = {'Default': 'default'} -EXPLORER_DEFAULT_CONNECTION = 'default' +EXPLORER_CONNECTIONS = {"Default": "default"} +EXPLORER_DEFAULT_CONNECTION = "default" # Use the format of Django 6.0, remove it when upgrading to Django 6.0 FORMS_URLFIELD_ASSUME_HTTPS = True -sql_keywords_path = Path(os.getcwd()) / 'breathecode' / 'sql_keywords.json' -with open(sql_keywords_path, 'r') as f: +sql_keywords_path = Path(os.getcwd()) / "breathecode" / "sql_keywords.json" +with open(sql_keywords_path, "r") as f: sql_keywords = json.load(f) # https://www.postgresql.org/docs/8.1/sql-keywords-appendix.html # scripts/update_sql_keywords_json.py # breathecode/sql_keywords.json - EXPLORER_SQL_BLACKLIST = tuple(sql_keywords['blacklist']) + EXPLORER_SQL_BLACKLIST = tuple(sql_keywords["blacklist"]) # Django Rest Hooks HOOK_EVENTS = { # 'any.event.name': 'App.Model.Action' (created/updated/deleted) - 'form_entry.added': 'marketing.FormEntry.created+', - 'form_entry.changed': 'marketing.FormEntry.updated+', - 'profile_academy.added': 'authenticate.ProfileAcademy.created+', - 'profile_academy.changed': 'authenticate.ProfileAcademy.updated+', - 'cohort_user.added': 'admissions.CohortUser.created+', - 'cohort_user.changed': 'admissions.CohortUser.updated+', - + "form_entry.added": "marketing.FormEntry.created+", + "form_entry.changed": "marketing.FormEntry.updated+", + "profile_academy.added": "authenticate.ProfileAcademy.created+", + "profile_academy.changed": "authenticate.ProfileAcademy.updated+", + "cohort_user.added": "admissions.CohortUser.created+", + "cohort_user.changed": "admissions.CohortUser.updated+", # and custom events, make sure to trigger them at notify.receivers.py - 'cohort_user.edu_status_updated': 'admissions.CohortUser.edu_status_updated', - 'cohort.cohort_stage_updated': 'admissions.Cohort.cohort_stage_updated', - 'user_invite.invite_status_updated': 'authenticate.UserInvite.invite_status_updated', - 'asset.asset_status_updated': 'registry.Asset.asset_status_updated', - 'event.event_status_updated': 'events.Event.event_status_updated', - 'event.new_event_order': 'events.EventCheckin.new_event_order', - 'event.new_event_attendee': 'events.EventCheckin.new_event_attendee', - 'form_entry.won_or_lost': 'marketing.FormEntry.won_or_lost', - 'form_entry.new_deal': 'marketing.FormEntry.new_deal', - 'session.mentorship_session_status': 'mentorship.MentorshipSession.mentorship_session_status', - 'planfinancing.planfinancing_created': 'payments.PlanFinancing.planfinancing_created', - 'subscription.subscription_created': 'payments.Subscription.subscription_created', + "cohort_user.edu_status_updated": "admissions.CohortUser.edu_status_updated", + "cohort.cohort_stage_updated": "admissions.Cohort.cohort_stage_updated", + "user_invite.invite_status_updated": "authenticate.UserInvite.invite_status_updated", + "asset.asset_status_updated": "registry.Asset.asset_status_updated", + "event.event_status_updated": "events.Event.event_status_updated", + "event.new_event_order": "events.EventCheckin.new_event_order", + "event.new_event_attendee": "events.EventCheckin.new_event_attendee", + "form_entry.won_or_lost": "marketing.FormEntry.won_or_lost", + "form_entry.new_deal": "marketing.FormEntry.new_deal", + "session.mentorship_session_status": "mentorship.MentorshipSession.mentorship_session_status", + "planfinancing.planfinancing_created": "payments.PlanFinancing.planfinancing_created", + "subscription.subscription_created": "payments.Subscription.subscription_created", } # Websocket -ASGI_APPLICATION = 'breathecode.asgi.application' -REDIS_URL_PATTERN = r'^redis://(.+):(\d+)$' +ASGI_APPLICATION = "breathecode.asgi.application" +REDIS_URL_PATTERN = r"^redis://(.+):(\d+)$" heroku_redis_ssl_host = { - 'address': REDIS_URL, # The 'rediss' schema denotes a SSL connection. + "address": REDIS_URL, # The 'rediss' schema denotes a SSL connection. } if IS_REDIS_WITH_SSL_ON_HEROKU: - heroku_redis_ssl_host['address'] += '?ssl_cert_reqs=none' + heroku_redis_ssl_host["address"] += "?ssl_cert_reqs=none" # keep last part of the file django_heroku.settings(locals(), databases=False) # django_heroku does not support the new storages properly required by django 5.0 -del locals()['STATICFILES_STORAGE'] +del locals()["STATICFILES_STORAGE"] diff --git a/breathecode/setup.py b/breathecode/setup.py index 132f9f769..76d6ba538 100644 --- a/breathecode/setup.py +++ b/breathecode/setup.py @@ -5,41 +5,41 @@ import redis -__all__ = ['resolve_gcloud_credentials', 'configure_redis', 'Lock'] +__all__ = ["resolve_gcloud_credentials", "configure_redis", "Lock"] prev_path = None prev_key = None def is_test_env(): - return os.getenv('ENV') == 'test' + return os.getenv("ENV") == "test" IS_TEST_ENV = is_test_env() logger = logging.getLogger(__name__) redis_client = None -IS_HEROKU = os.getenv('DYNO', '') != '' +IS_HEROKU = os.getenv("DYNO", "") != "" def configure_redis(): - ssl_ca_certs = os.getenv('REDIS_CA_CERT', None) - ssl_certfile = os.getenv('REDIS_USER_CERT', None) - ssl_keyfile = os.getenv('REDIS_USER_PRIVATE_KEY', None) + ssl_ca_certs = os.getenv("REDIS_CA_CERT", None) + ssl_certfile = os.getenv("REDIS_USER_CERT", None) + ssl_keyfile = os.getenv("REDIS_USER_PRIVATE_KEY", None) if not (ssl_ca_certs and ssl_certfile and ssl_keyfile): return - redis_ca_cert_path = Path(os.path.join(os.getcwd(), 'redis_ca.pem')) - redis_user_cert_path = Path(os.path.join(os.getcwd(), 'redis_user.crt')) - redis_user_private_key_path = Path(os.path.join(os.getcwd(), 'redis_user_private.key')) + redis_ca_cert_path = Path(os.path.join(os.getcwd(), "redis_ca.pem")) + redis_user_cert_path = Path(os.path.join(os.getcwd(), "redis_user.crt")) + redis_user_private_key_path = Path(os.path.join(os.getcwd(), "redis_user_private.key")) - with open(redis_ca_cert_path, 'w') as f: + with open(redis_ca_cert_path, "w") as f: f.write(ssl_ca_certs) - with open(redis_user_cert_path, 'w') as f: + with open(redis_user_cert_path, "w") as f: f.write(ssl_certfile) - with open(redis_user_private_key_path, 'w') as f: + with open(redis_user_private_key_path, "w") as f: f.write(ssl_keyfile) return redis_ca_cert_path, redis_user_cert_path, redis_user_private_key_path @@ -47,43 +47,43 @@ def configure_redis(): def get_redis_config(): # production redis url - redis_url = os.getenv('REDIS_COM_URL', '') + redis_url = os.getenv("REDIS_COM_URL", "") redis_kwargs = {} settings = {} # local or heroku redis url - if redis_url == '': - redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379') + if redis_url == "": + redis_url = os.getenv("REDIS_URL", "redis://localhost:6379") # support for heroku redis addon - if redis_url.startswith('redis://') and IS_HEROKU: + if redis_url.startswith("redis://") and IS_HEROKU: redis_kwargs = { - 'broker_use_ssl': { - 'ssl_cert_reqs': ssl.CERT_NONE, + "broker_use_ssl": { + "ssl_cert_reqs": ssl.CERT_NONE, + }, + "redis_backend_use_ssl": { + "ssl_cert_reqs": ssl.CERT_NONE, }, - 'redis_backend_use_ssl': { - 'ssl_cert_reqs': ssl.CERT_NONE, - } } else: redis_ca_cert_path, redis_user_cert_path, redis_user_private_key_path = configure_redis() settings = { - 'ssl_cert_reqs': ssl.CERT_REQUIRED, - 'ssl_ca_certs': redis_ca_cert_path, - 'ssl_certfile': redis_user_cert_path, - 'ssl_keyfile': redis_user_private_key_path, + "ssl_cert_reqs": ssl.CERT_REQUIRED, + "ssl_ca_certs": redis_ca_cert_path, + "ssl_certfile": redis_user_cert_path, + "ssl_keyfile": redis_user_private_key_path, } redis_kwargs = { - 'broker_use_ssl': settings, - 'redis_backend_use_ssl': settings, + "broker_use_ssl": settings, + "redis_backend_use_ssl": settings, } # overwrite the redis url with the new one - os.environ['REDIS_URL'] = redis_url + os.environ["REDIS_URL"] = redis_url return settings, redis_kwargs, redis_url @@ -120,32 +120,37 @@ def resolve_gcloud_credentials(): global prev_path, prev_key # avoid manage credentials if they are already set - if is_test_env() is False and (prev_path and prev_path == os.getenv('GOOGLE_APPLICATION_CREDENTIALS') and - ((prev_key and prev_key == os.getenv('GOOGLE_SERVICE_KEY')) or - (prev_key == None and os.getenv('GOOGLE_SERVICE_KEY') == None))): - logger.info('GOOGLE_APPLICATION_CREDENTIALS and GOOGLE_SERVICE_KEY are already set') + if is_test_env() is False and ( + prev_path + and prev_path == os.getenv("GOOGLE_APPLICATION_CREDENTIALS") + and ( + (prev_key and prev_key == os.getenv("GOOGLE_SERVICE_KEY")) + or (prev_key == None and os.getenv("GOOGLE_SERVICE_KEY") == None) + ) + ): + logger.info("GOOGLE_APPLICATION_CREDENTIALS and GOOGLE_SERVICE_KEY are already set") return True - path = os.getenv('GOOGLE_APPLICATION_CREDENTIALS') + path = os.getenv("GOOGLE_APPLICATION_CREDENTIALS") if not path: - logger.error('GOOGLE_APPLICATION_CREDENTIALS is not set') + logger.error("GOOGLE_APPLICATION_CREDENTIALS is not set") return False path = Path(os.path.join(os.getcwd(), path)) prev_path = str(path) - os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = prev_path + os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = prev_path if os.path.exists(path): return True - credentials = os.getenv('GOOGLE_SERVICE_KEY') + credentials = os.getenv("GOOGLE_SERVICE_KEY") if not credentials: - logger.error('GOOGLE_SERVICE_KEY is not set') + logger.error("GOOGLE_SERVICE_KEY is not set") return False prev_key = credentials - with open(path, 'w') as credentials_file: + with open(path, "w") as credentials_file: credentials_file.write(credentials) return True diff --git a/breathecode/tests/mixins/__init__.py b/breathecode/tests/mixins/__init__.py index cb72dda64..a51e5e14b 100644 --- a/breathecode/tests/mixins/__init__.py +++ b/breathecode/tests/mixins/__init__.py @@ -1,6 +1,7 @@ """ Global mixins """ + from .models_mixin import * from .development_environment import * from .date_formatter_mixin import * diff --git a/breathecode/tests/mixins/breathecode_mixin/__init__.py b/breathecode/tests/mixins/breathecode_mixin/__init__.py index bfa450320..11a381e1e 100644 --- a/breathecode/tests/mixins/breathecode_mixin/__init__.py +++ b/breathecode/tests/mixins/breathecode_mixin/__init__.py @@ -1,12 +1,13 @@ """ Global mixins """ + from .breathecode import Breathecode, fake -__all__ = ['BreathecodeMixin', 'fake'] +__all__ = ["BreathecodeMixin", "fake"] -class BreathecodeMixin(): +class BreathecodeMixin: bc: Breathecode def set_test_instance(self, parent) -> None: diff --git a/breathecode/tests/mixins/breathecode_mixin/breathecode.py b/breathecode/tests/mixins/breathecode_mixin/breathecode.py index c0e1b2420..68ec65b4f 100644 --- a/breathecode/tests/mixins/breathecode_mixin/breathecode.py +++ b/breathecode/tests/mixins/breathecode_mixin/breathecode.py @@ -15,7 +15,7 @@ from .format import Format from .random import Random -__all__ = ['Breathecode', 'fake'] +__all__ = ["Breathecode", "fake"] fake = Faker() @@ -23,9 +23,9 @@ def print_arguments(func: callable) -> str: try: varnames = str(inspect.signature(func)) except ValueError: - raise Exception(f'{func.__name__} is a invalid function/method') + raise Exception(f"{func.__name__} is a invalid function/method") - return varnames.replace('self, ', '').replace('cls, ', '') + return varnames.replace("self, ", "").replace("cls, ", "") class Breathecode(BreathecodeInterface): @@ -81,21 +81,21 @@ def help(self, *args) -> None: assert False def _get_doctring(self, path: str) -> None: - parts_of_path = path.split('.') - current_path = '' + parts_of_path = path.split(".") + current_path = "" current = None for part_of_path in parts_of_path: if not current: if not hasattr(self._parent, part_of_path): - current_path += f'.{part_of_path}' + current_path += f".{part_of_path}" break current = getattr(self._parent, part_of_path) else: if not hasattr(current, part_of_path): - current_path += f'.{part_of_path}' + current_path += f".{part_of_path}" current = None break @@ -105,27 +105,27 @@ def _get_doctring(self, path: str) -> None: from unittest.mock import patch, MagicMock if callable(current): - print(f'self.{path}{print_arguments(current)}:') + print(f"self.{path}{print_arguments(current)}:") else: - print(f'self.{path}:') + print(f"self.{path}:") print() - with patch('sys.stdout.write', MagicMock()) as mock: + with patch("sys.stdout.write", MagicMock()) as mock: help(current) for args, _ in mock.call_args_list: - if args[0] == '\n': + if args[0] == "\n": print() - lines = args[0].split('\n') + lines = args[0].split("\n") for line in lines[3:-1]: - print(f' {line}') + print(f" {line}") else: - print(f'self.{path}:') + print(f"self.{path}:") print() - print(f' self{current_path} not exists.') + print(f" self{current_path} not exists.") print() @@ -135,37 +135,37 @@ def _help_tree(self, level: int = 0, parent: Optional[dict] = None, last_item: b result: list[str] = [] if not parent: - result.append('bc') + result.append("bc") - parent = [x for x in dir(parent or self) if not x.startswith('_')] + parent = [x for x in dir(parent or self) if not x.startswith("_")] if last_item: - starts = ' ' + ('│ ' * (level - 1)) + starts = " " + ("│ " * (level - 1)) else: - starts = '│ ' * level + starts = "│ " * level for key in parent: item = getattr(self, key) if callable(item): - result.append(f'{starts}├── {key}{print_arguments(item)}') + result.append(f"{starts}├── {key}{print_arguments(item)}") else: - result.append(f'{starts}├── {key}') + result.append(f"{starts}├── {key}") last_item = parent.index(key) == len(parent) - 1 result = [*result, *Breathecode._help_tree(item, level + 1, item, last_item)] - result[-1] = result[-1].replace(' ├── ', ' └── ') - result[-1] = result[-1].replace(r'├── ([a-zA-Z0-9]+)$', r'└── \1') + result[-1] = result[-1].replace(" ├── ", " └── ") + result[-1] = result[-1].replace(r"├── ([a-zA-Z0-9]+)$", r"└── \1") for n in range(len(result) - 1, -1, -1): - if result[n][0] == '├': - result[n] = re.sub(r'^├', r'└', result[n]) + if result[n][0] == "├": + result[n] = re.sub(r"^├", r"└", result[n]) break if level == 0: - print('\n'.join(result)) + print("\n".join(result)) return result diff --git a/breathecode/tests/mixins/breathecode_mixin/cache.py b/breathecode/tests/mixins/breathecode_mixin/cache.py index f90fb4046..26bba46df 100644 --- a/breathecode/tests/mixins/breathecode_mixin/cache.py +++ b/breathecode/tests/mixins/breathecode_mixin/cache.py @@ -5,7 +5,7 @@ from ..cache_mixin import CacheMixin -__all__ = ['Cache'] +__all__ = ["Cache"] class Cache: diff --git a/breathecode/tests/mixins/breathecode_mixin/check.py b/breathecode/tests/mixins/breathecode_mixin/check.py index a3748626c..5f17393d7 100644 --- a/breathecode/tests/mixins/breathecode_mixin/check.py +++ b/breathecode/tests/mixins/breathecode_mixin/check.py @@ -13,7 +13,7 @@ from ..sha256_mixin import Sha256Mixin from ..token_mixin import TokenMixin -__all__ = ['Check'] +__all__ = ["Check"] class Check: @@ -110,23 +110,23 @@ def calls(self, first: list[call], second: list[call]) -> None: ``` """ - is_unittest = hasattr(self._parent, 'assertEqual') + is_unittest = hasattr(self._parent, "assertEqual") if is_unittest: - self._parent.assertEqual(len(first), - len(second), - msg=f'Does not have same length\n\n{first}\n\n!=\n\n{second}') + self._parent.assertEqual( + len(first), len(second), msg=f"Does not have same length\n\n{first}\n\n!=\n\n{second}" + ) for i in range(0, len(first)): - self._parent.assertEqual(first[i].args, second[i].args, msg=f'args in index {i} does not match') - self._parent.assertEqual(first[i].kwargs, second[i].kwargs, msg=f'kwargs in index {i} does not match') + self._parent.assertEqual(first[i].args, second[i].args, msg=f"args in index {i} does not match") + self._parent.assertEqual(first[i].kwargs, second[i].kwargs, msg=f"kwargs in index {i} does not match") else: - assert len(first) == len(second), f'not have same length than {first}\n{second}' + assert len(first) == len(second), f"not have same length than {first}\n{second}" for i in range(0, len(first)): - assert first[i].args == second[i].args, f'args in index {i} does not match' - assert first[i].kwargs == second[i].kwargs, f'kwargs in index {i} does not match' + assert first[i].args == second[i].args, f"args in index {i} does not match" + assert first[i].kwargs == second[i].kwargs, f"kwargs in index {i} does not match" def _fill_partial_equality(self, first: dict, second: dict) -> dict: original = {} @@ -162,10 +162,10 @@ def queryset_of(self, query: Any, model: Model) -> None: """ if not isinstance(query, QuerySet): - self._parent.fail('The first argument is not a QuerySet') + self._parent.fail("The first argument is not a QuerySet") if query.model != model: - self._parent.fail(f'The QuerySet is type {query.model.__name__} instead of {model.__name__}') + self._parent.fail(f"The QuerySet is type {query.model.__name__} instead of {model.__name__}") def queryset_with_pks(self, query: Any, pks: list[int]) -> None: """ @@ -190,7 +190,7 @@ def queryset_with_pks(self, query: Any, pks: list[int]) -> None: """ if not isinstance(query, QuerySet): - self._parent.fail('The first argument is not a QuerySet') + self._parent.fail("The first argument is not a QuerySet") assert [x.pk for x in query] == pks @@ -216,24 +216,24 @@ def list_with_pks(self, query: Any, pks: list[int]) -> None: """ if not isinstance(query, list): - self._parent.fail('The first argument is not a list') + self._parent.fail("The first argument is not a list") self._parent.assertEqual([x.pk for x in query], pks) - def count_queries(self, n, db='default', verbose=False): - queries = [query['sql'] for query in connections[db].queries] + def count_queries(self, n, db="default", verbose=False): + queries = [query["sql"] for query in connections[db].queries] if not verbose: - self._parent.assertEqual(n, len(queries), 'different number of queries, use verbose=True to see more info') + self._parent.assertEqual(n, len(queries), "different number of queries, use verbose=True to see more info") if verbose and n != len(queries): - result = '\n' - result += '---------------- Queries ----------------\n\n' + result = "\n" + result += "---------------- Queries ----------------\n\n" for query in connections[db].queries: result += f'{query["time"]} {query["sql"]}\n\n' - result += '----------------- Count -----------------\n\n' - result += f'Queries: {len(connections[db].queries)}\n\n' - result += '-----------------------------------------\n\n' + result += "----------------- Count -----------------\n\n" + result += f"Queries: {len(connections[db].queries)}\n\n" + result += "-----------------------------------------\n\n" self._parent.fail(result) @contextmanager @@ -243,4 +243,4 @@ def raises(self, expected_exception, expected_message): except expected_exception as e: assert str(e) == expected_message, f"Expected '{expected_message}', but got '{str(e)}'" except Exception as e: - pytest.fail(f'Expected {expected_exception} but it was not raised.') + pytest.fail(f"Expected {expected_exception} but it was not raised.") diff --git a/breathecode/tests/mixins/breathecode_mixin/database.py b/breathecode/tests/mixins/breathecode_mixin/database.py index 7f01ce026..f30658326 100644 --- a/breathecode/tests/mixins/breathecode_mixin/database.py +++ b/breathecode/tests/mixins/breathecode_mixin/database.py @@ -38,7 +38,7 @@ # from django.test.utils import override_settings -__all__ = ['Database'] +__all__ = ["Database"] _fake = Faker() @@ -49,32 +49,36 @@ class DatabaseV3: def _get_random_attrs(cls, model): props = {} - model_fields = [( - x, - type(getattr(model, x).field), - { - 'choices': getattr(getattr(model, x).field, 'choices', None), - 'default': getattr(getattr(model, x).field, 'default', models.NOT_PROVIDED), - 'null': getattr(getattr(model, x).field, 'null', False), - 'blank': getattr(getattr(model, x).field, 'blank', False), - }, - ) for x in vars(model) if type(getattr(model, x)) is DeferredAttribute] + model_fields = [ + ( + x, + type(getattr(model, x).field), + { + "choices": getattr(getattr(model, x).field, "choices", None), + "default": getattr(getattr(model, x).field, "default", models.NOT_PROVIDED), + "null": getattr(getattr(model, x).field, "null", False), + "blank": getattr(getattr(model, x).field, "blank", False), + }, + ) + for x in vars(model) + if type(getattr(model, x)) is DeferredAttribute + ] for field_name, field_type, field_attrs in model_fields: - if field_attrs['default'] is not models.NOT_PROVIDED: - if callable(field_attrs['default']): - props[field_name] = field_attrs['default']() + if field_attrs["default"] is not models.NOT_PROVIDED: + if callable(field_attrs["default"]): + props[field_name] = field_attrs["default"]() else: - props[field_name] = field_attrs['default'] + props[field_name] = field_attrs["default"] - elif field_attrs['blank'] is True and field_attrs['null'] is True: + elif field_attrs["blank"] is True and field_attrs["null"] is True: props[field_name] = None - elif field_attrs['choices'] is not None: - props[field_name] = random.choice(field_attrs['choices'])[0] + elif field_attrs["choices"] is not None: + props[field_name] = random.choice(field_attrs["choices"])[0] elif field_type is models.EmailField: props[field_name] = _fake.email() @@ -178,23 +182,23 @@ def get_attrs(field): cls_type = type(field) field = field.field obj = { - 'cls': cls_type, - 'path': field.related_model._meta.app_label + '.' + field.related_model.__name__, - 'name': field.name, - 'blank': field.blank, - 'null': field.null, - 'default': field.default, - 'choices': field.choices, - 'related_model': field.related_model, + "cls": cls_type, + "path": field.related_model._meta.app_label + "." + field.related_model.__name__, + "name": field.name, + "blank": field.blank, + "null": field.null, + "default": field.default, + "choices": field.choices, + "related_model": field.related_model, } return obj for x in vars(model): if type(getattr(model, x)) in [ - ForwardOneToOneDescriptor, - ForwardManyToOneDescriptor, - ManyToManyDescriptor, + ForwardOneToOneDescriptor, + ForwardManyToOneDescriptor, + ManyToManyDescriptor, ]: yield ( x, @@ -211,20 +215,20 @@ def _build_descriptors(cls): ban_list = set() for app in settings.INSTALLED_APPS: - app_label = app.split('.')[-1] + app_label = app.split(".")[-1] all_models = apps.get_app_config(app_label).get_models() app_cache = {} for model in all_models: model_name = model.__name__ model_descriptor = { - 'cls': model, - 'path': app_label + '.' + model_name, - 'related_fields': [*cls._get_related_fields(model)], - 'get_values': functools.partial(cls._get_random_attrs, model), + "cls": model, + "path": app_label + "." + model_name, + "related_fields": [*cls._get_related_fields(model)], + "get_values": functools.partial(cls._get_random_attrs, model), } app_cache[model_name] = model_descriptor - name_map[app_label + '__' + cls.to_snake_case(model_name)] = (app_label, model_name) + name_map[app_label + "__" + cls.to_snake_case(model_name)] = (app_label, model_name) if model_name in ban_list: continue @@ -239,7 +243,7 @@ def _build_descriptors(cls): model_map[model_name] = model_descriptor name_map[snake_model_name] = model_name - model_alias_map[snake_model_name] = app_label + '.' + model_name + model_alias_map[snake_model_name] = app_label + "." + model_name app_map[app_label] = app_cache @@ -247,7 +251,7 @@ def _build_descriptors(cls): @classmethod def to_snake_case(cls, class_name): - snake_case = re.sub('([a-z0-9])([A-Z])', r'\1_\2', class_name).lower() + snake_case = re.sub("([a-z0-9])([A-Z])", r"\1_\2", class_name).lower() return snake_case @classmethod @@ -263,12 +267,14 @@ def create(cls, **models): path = name_map[model_alias] except KeyError: - if '__' in model_alias: - app_label, model_name = model_alias.split('__') - raise ValueError(f'Model {model_name} not found in {app_label}') + if "__" in model_alias: + app_label, model_name = model_alias.split("__") + raise ValueError(f"Model {model_name} not found in {app_label}") - raise ValueError(f'Model {model_alias} not found or two models have the same name, ' - 'use the app_label.model_name format') + raise ValueError( + f"Model {model_alias} not found or two models have the same name, " + "use the app_label.model_name format" + ) if isinstance(path, tuple): app_label, model_name = path @@ -284,7 +290,7 @@ def create(cls, **models): # fill cache for model_alias, model_descriptor in pending.items(): - x = model_descriptor['path'] + x = model_descriptor["path"] cache[x] = (model_descriptor, models.get(model_alias)) exec_order.append(x) @@ -297,34 +303,37 @@ def create(cls, **models): for key in exec_order: item = cache.get(key, None) if item is None: - app_label, model_name = key.split('.') + app_label, model_name = key.split(".") x = app_map[app_label][model_name] item = (x, 1) cache[key] = item model_descriptor, value = item - if model_descriptor['path'] in cache_to_add: + if model_descriptor["path"] in cache_to_add: continue - if model_descriptor['path'] in processed: + if model_descriptor["path"] in processed: continue - processed.add(model_descriptor['path']) + processed.add(model_descriptor["path"]) - for related_field, field_type, field_attrs in model_descriptor['related_fields']: + for related_field, field_type, field_attrs in model_descriptor["related_fields"]: - if field_attrs['path'] in processed: + if field_attrs["path"] in processed: continue - if (field_attrs['path'] not in exec_order and field_attrs['path'] not in cache_to_add - and (field_attrs['null'] is False or field_attrs['cls'] is ForwardOneToOneDescriptor)): - app_label, model_name = field_attrs['path'].split('.') - cache_to_add[field_attrs['path']] = (app_map[app_label][model_name], 1) + if ( + field_attrs["path"] not in exec_order + and field_attrs["path"] not in cache_to_add + and (field_attrs["null"] is False or field_attrs["cls"] is ForwardOneToOneDescriptor) + ): + app_label, model_name = field_attrs["path"].split(".") + cache_to_add[field_attrs["path"]] = (app_map[app_label][model_name], 1) # disable m2m temporally - if field_attrs['cls'] is not ManyToManyDescriptor: - exec_order_to_add.append(field_attrs['path']) + if field_attrs["cls"] is not ManyToManyDescriptor: + exec_order_to_add.append(field_attrs["path"]) exec_order += exec_order_to_add cache.update(cache_to_add) @@ -335,8 +344,8 @@ def create(cls, **models): # sort dependencies for model_path, (model_descriptor, value) in cache.items(): - for related_field, field_type, field_attrs in model_descriptor['related_fields']: - dep_path = field_attrs['path'] + for related_field, field_type, field_attrs in model_descriptor["related_fields"]: + dep_path = field_attrs["path"] to_reevaluate = [] # dep not found, maybe it is a m2m, that was temporally disabled @@ -355,9 +364,9 @@ def create(cls, **models): to_re_reevaluate = [] for x in to_reevaluate: - for related_field, field_type, field_attrs in cache[x][0]['related_fields']: + for related_field, field_type, field_attrs in cache[x][0]["related_fields"]: - dep_path = field_attrs['path'] + dep_path = field_attrs["path"] # dep not found, maybe it is a m2m, that was temporally disabled try: @@ -373,7 +382,7 @@ def create(cls, **models): exec_order.insert(model_index, dep_path) # disable m2m temporally - if field_attrs['cls'] is not ManyToManyDescriptor: + if field_attrs["cls"] is not ManyToManyDescriptor: to_re_reevaluate.append(dep_path) to_reevaluate = to_re_reevaluate @@ -386,8 +395,8 @@ def create(cls, **models): how_many, arguments = argument_parser(value)[0] - for related_field, field_type, field_attrs in model_descriptor['related_fields']: - if field_attrs['path'] in generated: + for related_field, field_type, field_attrs in model_descriptor["related_fields"]: + if field_attrs["path"] in generated: # no implemented yet if field_type is ManyToManyDescriptor: @@ -396,26 +405,24 @@ def create(cls, **models): # else: - arguments[field_attrs['name']] = generated[field_attrs['path']] + arguments[field_attrs["name"]] = generated[field_attrs["path"]] result = [ - model_descriptor['cls'].objects.create(**{ - **model_descriptor['get_values'](), - **arguments - }) for _ in range(how_many) + model_descriptor["cls"].objects.create(**{**model_descriptor["get_values"](), **arguments}) + for _ in range(how_many) ] if len(result) == 1: result = result[0] - app_label, model_name = model_descriptor['path'].split('.') + app_label, model_name = model_descriptor["path"].split(".") model_alias = cls.to_snake_case(model_name) if model_alias not in name_map: - model_alias = app_label + '__' + model_alias + model_alias = app_label + "__" + model_alias res[model_alias] = result - generated[model_descriptor['path']] = result + generated[model_descriptor["path"]] = result return AttrDict(**res) @@ -436,19 +443,19 @@ def reset_queries(self): reset_queries() # @override_settings(DEBUG=True) - def get_queries(self, db='default'): - return [query['sql'] for query in connections[db].queries] + def get_queries(self, db="default"): + return [query["sql"] for query in connections[db].queries] # @override_settings(DEBUG=True) - def print_queries(self, db='default'): + def print_queries(self, db="default"): print() - print('---------------- Queries ----------------\n') + print("---------------- Queries ----------------\n") for query in connections[db].queries: print(f'{query["time"]} {query["sql"]}\n') - print('----------------- Count -----------------\n') - print(f'Queries: {len(connections[db].queries)}\n') - print('-----------------------------------------\n') + print("----------------- Count -----------------\n") + print(f"Queries: {len(connections[db].queries)}\n") + print("-----------------------------------------\n") @classmethod def get_model(cls, path: str) -> Model: @@ -477,7 +484,7 @@ def get_model(cls, path: str) -> Model: if path in cls._cache: return cls._cache[path] - app_label, model_name = path.split('.') + app_label, model_name = path.split(".") cls._cache[path] = apps.get_model(app_label, model_name) return cls._cache[path] @@ -569,7 +576,7 @@ def delete(self, path: str, pk: Optional[int | str] = None) -> tuple[int, dict[s - pk(`str | int`): primary key of model. """ - lookups = {'pk': pk} if pk else {} + lookups = {"pk": pk} if pk else {} model = Database.get_model(path) return model.objects.filter(**lookups).delete() @@ -651,29 +658,29 @@ def _get_models(self) -> list[Model]: return values def camel_case_to_snake_case(self, name): - name = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) - return re.sub('([a-z0-9])([A-Z])', r'\1_\2', name).lower() + name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() def _get_model_field_info(self, model, key): attr = getattr(model, key) - meta = vars(attr)['field'].related_model._meta - model = vars(attr)['field'].related_model + meta = vars(attr)["field"].related_model._meta + model = vars(attr)["field"].related_model blank = attr.field.blank null = attr.field.null result = { - 'field': key, - 'blank': blank, - 'null': null, - 'app_name': meta.app_label, - 'model_name': meta.object_name, - 'handler': attr, - 'model': model, + "field": key, + "blank": blank, + "null": null, + "app_name": meta.app_label, + "model_name": meta.object_name, + "handler": attr, + "model": model, } - if hasattr(attr, 'through'): - result['custom_through'] = '_' not in attr.through.__name__ - result['through_fields'] = attr.rel.through_fields + if hasattr(attr, "through"): + result["custom_through"] = "_" not in attr.through.__name__ + result["through_fields"] = attr.rel.through_fields return result @@ -687,19 +694,21 @@ def _get_models_descriptors(self) -> list[Model]: models = apps[app_key] for model in models: values[app_key][model.__name__] = {} - values[app_key][model.__name__]['meta'] = { - 'app_name': model._meta.app_label, - 'model_name': model._meta.object_name, - 'model': model, + values[app_key][model.__name__]["meta"] = { + "app_name": model._meta.app_label, + "model_name": model._meta.object_name, + "model": model, } - values[app_key][model.__name__]['to_one'] = [ - self._get_model_field_info(model, x) for x in dir(model) + values[app_key][model.__name__]["to_one"] = [ + self._get_model_field_info(model, x) + for x in dir(model) if isinstance(getattr(model, x), ForwardManyToOneDescriptor) ] - values[app_key][model.__name__]['to_many'] = [ - self._get_model_field_info(model, x) for x in dir(model) + values[app_key][model.__name__]["to_many"] = [ + self._get_model_field_info(model, x) + for x in dir(model) if isinstance(getattr(model, x), ManyToManyDescriptor) ] @@ -716,39 +725,49 @@ def _get_models_dependencies(self) -> list[Model]: if app_key not in values: values[app_key] = set() - primary_values = values[app_key]['primary'] if 'primary' in values[app_key] else [] - secondary_values = values[app_key]['secondary'] if 'secondary' in values[app_key] else [] + primary_values = values[app_key]["primary"] if "primary" in values[app_key] else [] + secondary_values = values[app_key]["secondary"] if "secondary" in values[app_key] else [] values[app_key] = { - 'primary': { - *primary_values, *[ - x['app_name'] - for x in descriptor['to_one'] if x['app_name'] != app_key and x['null'] == False - ], *[ - x['app_name'] - for x in descriptor['to_many'] if x['app_name'] != app_key and x['null'] == False - ] + "primary": { + *primary_values, + *[ + x["app_name"] + for x in descriptor["to_one"] + if x["app_name"] != app_key and x["null"] == False + ], + *[ + x["app_name"] + for x in descriptor["to_many"] + if x["app_name"] != app_key and x["null"] == False + ], }, - 'secondary': { - *secondary_values, *[ - x['app_name'] - for x in descriptor['to_one'] if x['app_name'] != app_key and x['null'] == True - ], *[ - x['app_name'] - for x in descriptor['to_many'] if x['app_name'] != app_key and x['null'] == True - ] + "secondary": { + *secondary_values, + *[ + x["app_name"] + for x in descriptor["to_one"] + if x["app_name"] != app_key and x["null"] == True + ], + *[ + x["app_name"] + for x in descriptor["to_many"] + if x["app_name"] != app_key and x["null"] == True + ], }, } return values - def _sort_models_handlers(self, - dependencies_resolved=None, - primary_values=None, - secondary_values=None, - primary_dependencies=None, - secondary_dependencies=None, - consume_primary=True) -> list[Model]: + def _sort_models_handlers( + self, + dependencies_resolved=None, + primary_values=None, + secondary_values=None, + primary_dependencies=None, + secondary_dependencies=None, + consume_primary=True, + ) -> list[Model]: dependencies_resolved = dependencies_resolved or set() primary_values = primary_values or [] @@ -759,11 +778,11 @@ def _sort_models_handlers(self, primary_dependencies = {} for x in dependencies: - primary_dependencies[x] = dependencies[x]['primary'] + primary_dependencies[x] = dependencies[x]["primary"] secondary_dependencies = {} for x in dependencies: - secondary_dependencies[x] = dependencies[x]['secondary'] + secondary_dependencies[x] = dependencies[x]["secondary"] for dependency in dependencies_resolved: for key in primary_dependencies: @@ -772,7 +791,8 @@ def _sort_models_handlers(self, primary_dependencies[key].remove(dependency) primary_found = [ - x for x in [y for y in primary_dependencies if y not in dependencies_resolved] + x + for x in [y for y in primary_dependencies if y not in dependencies_resolved] if len(primary_dependencies[x]) == 0 ] @@ -780,7 +800,8 @@ def _sort_models_handlers(self, dependencies_resolved.add(x) secondary_found = [ - x for x in [y for y in secondary_dependencies if y not in dependencies_resolved] + x + for x in [y for y in secondary_dependencies if y not in dependencies_resolved] if len(secondary_dependencies[x]) == 0 ] @@ -798,12 +819,14 @@ def _sort_models_handlers(self, primary_dependencies[dependency].remove(x) if primary_dependencies: - return self._sort_models_handlers(dependencies_resolved, - primary_values, - secondary_values, - primary_dependencies, - secondary_dependencies, - consume_primary=True) + return self._sort_models_handlers( + dependencies_resolved, + primary_values, + secondary_values, + primary_dependencies, + secondary_dependencies, + consume_primary=True, + ) if secondary_dependencies: return primary_values, [x for x in secondary_dependencies if len(secondary_dependencies[x])] @@ -818,17 +841,19 @@ def _get_models_handlers(self) -> list[Model]: descriptors = self._get_models_descriptors() def manage_model(models, descriptor, *args, **kwargs): - model_field_name = self.camel_case_to_snake_case(descriptor['meta']['model_name']) - app_name = descriptor['meta']['app_name'] - model_name = descriptor['meta']['model_name'] - - if model_field_name in kwargs and f'{app_name}__{model_field_name}' in kwargs: - raise Exception(f'Exists many apps with the same model name `{model_name}`, please use ' - f'`{app_name}__{model_field_name}` instead of `{model_field_name}`') + model_field_name = self.camel_case_to_snake_case(descriptor["meta"]["model_name"]) + app_name = descriptor["meta"]["app_name"] + model_name = descriptor["meta"]["model_name"] + + if model_field_name in kwargs and f"{app_name}__{model_field_name}" in kwargs: + raise Exception( + f"Exists many apps with the same model name `{model_name}`, please use " + f"`{app_name}__{model_field_name}` instead of `{model_field_name}`" + ) arg = False - if f'{app_name}__{model_field_name}' in kwargs: - arg = kwargs[f'{app_name}__{model_field_name}'] + if f"{app_name}__{model_field_name}" in kwargs: + arg = kwargs[f"{app_name}__{model_field_name}"] elif model_field_name in kwargs: arg = kwargs[model_field_name] @@ -836,68 +861,73 @@ def manage_model(models, descriptor, *args, **kwargs): if not model_field_name in models and is_valid(arg): kargs = {} - for x in descriptor['to_one']: - related_model_field_name = self.camel_case_to_snake_case(x['model_name']) + for x in descriptor["to_one"]: + related_model_field_name = self.camel_case_to_snake_case(x["model_name"]) if related_model_field_name in models: - kargs[x['field']] = just_one(models[related_model_field_name]) + kargs[x["field"]] = just_one(models[related_model_field_name]) - without_through = [x for x in descriptor['to_many'] if x['custom_through'] == False] + without_through = [x for x in descriptor["to_many"] if x["custom_through"] == False] for x in without_through: - related_model_field_name = self.camel_case_to_snake_case(x['model_name']) + related_model_field_name = self.camel_case_to_snake_case(x["model_name"]) if related_model_field_name in models: - kargs[x['field']] = get_list(models[related_model_field_name]) + kargs[x["field"]] = get_list(models[related_model_field_name]) - models[model_field_name] = create_models(arg, f'{app_name}.{model_name}', **kargs) + models[model_field_name] = create_models(arg, f"{app_name}.{model_name}", **kargs) with_through = [ - x for x in descriptor['to_many'] if x['custom_through'] == True and not x['field'].endswith('_set') + x for x in descriptor["to_many"] if x["custom_through"] == True and not x["field"].endswith("_set") ] for x in with_through: - related_model_field_name = self.camel_case_to_snake_case(x['model_name']) + related_model_field_name = self.camel_case_to_snake_case(x["model_name"]) if related_model_field_name in models: for item in get_list(models[related_model_field_name]): - through_current = x['through_fields'][0] - through_related = x['through_fields'][1] + through_current = x["through_fields"][0] + through_related = x["through_fields"][1] through_args = {through_current: models[model_field_name], through_related: item} - x['handler'].through.objects.create(**through_args) + x["handler"].through.objects.create(**through_args) return models def link_deferred_model(models, descriptor, *args, **kwargs): - model_field_name = self.camel_case_to_snake_case(descriptor['meta']['model_name']) - app_name = descriptor['meta']['app_name'] - model_name = descriptor['meta']['model_name'] - - if model_field_name in kwargs and f'{app_name}__{model_field_name}' in kwargs: - raise Exception(f'Exists many apps with the same model name `{model_name}`, please use ' - f'`{app_name}__{model_field_name}` instead of `{model_field_name}`') + model_field_name = self.camel_case_to_snake_case(descriptor["meta"]["model_name"]) + app_name = descriptor["meta"]["app_name"] + model_name = descriptor["meta"]["model_name"] + + if model_field_name in kwargs and f"{app_name}__{model_field_name}" in kwargs: + raise Exception( + f"Exists many apps with the same model name `{model_name}`, please use " + f"`{app_name}__{model_field_name}` instead of `{model_field_name}`" + ) if model_field_name in models: - items = models[model_field_name] if isinstance(models[model_field_name], - list) else [models[model_field_name]] + items = ( + models[model_field_name] + if isinstance(models[model_field_name], list) + else [models[model_field_name]] + ) for m in items: - for x in descriptor['to_one']: - related_model_field_name = self.camel_case_to_snake_case(x['model_name']) + for x in descriptor["to_one"]: + related_model_field_name = self.camel_case_to_snake_case(x["model_name"]) model_exists = related_model_field_name in models is_list = isinstance(models[model_field_name], list) if model_exists else False - if model_exists and not is_list and not getattr(models[model_field_name], x['field']): - setattr(m, x['field'], just_one(models[related_model_field_name])) + if model_exists and not is_list and not getattr(models[model_field_name], x["field"]): + setattr(m, x["field"], just_one(models[related_model_field_name])) if model_exists and is_list: for y in models[model_field_name]: - if getattr(y, x['field']): - setattr(m, x['field'], just_one(models[related_model_field_name])) + if getattr(y, x["field"]): + setattr(m, x["field"], just_one(models[related_model_field_name])) - for x in descriptor['to_many']: - related_model_field_name = self.camel_case_to_snake_case(x['model_name']) - if related_model_field_name in models and not getattr(models[model_field_name], x['field']): - setattr(m, x['field'], get_list(models[related_model_field_name])) + for x in descriptor["to_many"]: + related_model_field_name = self.camel_case_to_snake_case(x["model_name"]) + if related_model_field_name in models and not getattr(models[model_field_name], x["field"]): + setattr(m, x["field"], get_list(models[related_model_field_name])) - setattr(m, '__mixer__', None) + setattr(m, "__mixer__", None) m.save() return models @@ -908,7 +938,7 @@ def wrapper(*args, **kwargs): for app_key in generation_round: for descriptor_key in descriptors[app_key]: descriptor = descriptors[app_key][descriptor_key] - attr = self.camel_case_to_snake_case(descriptor['meta']['model_name']) + attr = self.camel_case_to_snake_case(descriptor["meta"]["model_name"]) models = manage_model(models, descriptor, *args, **kwargs) @@ -919,13 +949,13 @@ def wrapper(*args, **kwargs): else: arguments_banned.add(attr) - arguments[f'{app_key}__{attr}'] = ... + arguments[f"{app_key}__{attr}"] = ... for generation_round in order: for app_key in generation_round: for descriptor_key in descriptors[app_key]: descriptor = descriptors[app_key][descriptor_key] - attr = self.camel_case_to_snake_case(descriptor['meta']['model_name']) + attr = self.camel_case_to_snake_case(descriptor["meta"]["model_name"]) models = link_deferred_model(models, descriptor, *args, **kwargs) @@ -936,7 +966,7 @@ def wrapper(*args, **kwargs): else: arguments_banned.add(attr) - arguments[f'{app_key}__{attr}'] = ... + arguments[f"{app_key}__{attr}"] = ... return AttrDict(**models) @@ -1007,7 +1037,7 @@ def create(self, *args, **kwargs) -> dict[str, Model | list[Model]]: get credentials. """ - #TODO: remove it in a future + # TODO: remove it in a future if self._parent: return GenerateModelsMixin.generate_models(self._parent, _new_implementation=True, *args, **kwargs) diff --git a/breathecode/tests/mixins/breathecode_mixin/datetime.py b/breathecode/tests/mixins/breathecode_mixin/datetime.py index 4276dc593..9f7a98cb4 100644 --- a/breathecode/tests/mixins/breathecode_mixin/datetime.py +++ b/breathecode/tests/mixins/breathecode_mixin/datetime.py @@ -7,7 +7,7 @@ from ..datetime_mixin import DatetimeMixin -__all__ = ['Datetime'] +__all__ = ["Datetime"] class Datetime: diff --git a/breathecode/tests/mixins/breathecode_mixin/format.py b/breathecode/tests/mixins/breathecode_mixin/format.py index 3ac2bf028..ece991436 100644 --- a/breathecode/tests/mixins/breathecode_mixin/format.py +++ b/breathecode/tests/mixins/breathecode_mixin/format.py @@ -18,9 +18,9 @@ from django.utils import timezone -__all__ = ['Format'] +__all__ = ["Format"] -ENCODE = 'utf-8' +ENCODE = "utf-8" fake = Faker() @@ -29,41 +29,41 @@ class Field: @staticmethod def id(mode: str) -> Q: - return f'{random.randint(0, 100000000000000000)}' + return f"{random.randint(0, 100000000000000000)}" @staticmethod def integer(mode: str) -> Callable[[str, str, str], Q]: - if mode == 'in': - v = '' + if mode == "in": + v = "" now_many = random.randint(2, 4) for _ in range(now_many): - v += f'{random.randint(0, 100000000000000000)},' + v += f"{random.randint(0, 100000000000000000)}," return v[:-1] - if mode == 'isnull': - return 'true' if bool(random.randbytes(1)) else 'false' + if mode == "isnull": + return "true" if bool(random.randbytes(1)) else "false" - return f'{random.randint(0, 100000000000000000)}' + return f"{random.randint(0, 100000000000000000)}" @staticmethod def slug(mode: str) -> Q: is_int = bool(random.randbytes(1)) if is_int: - return f'{random.randint(0, 100000000000000000)}' + return f"{random.randint(0, 100000000000000000)}" return fake.slug() @staticmethod def string(mode: str) -> Callable[[str, str, str], Q]: - if mode == 'in': - v = '' + if mode == "in": + v = "" now_many = random.randint(2, 4) for _ in range(now_many): v += f"'{fake.slug()}'," return v[:-1] - if mode == 'isnull': - return 'true' if bool(random.randbytes(1)) else 'false' + if mode == "isnull": + return "true" if bool(random.randbytes(1)) else "false" return fake.slug() @@ -83,18 +83,18 @@ def value(): return date.isoformat() - if mode == 'in': - v = '' + if mode == "in": + v = "" now_many = random.randint(2, 4) for _ in range(now_many): - v += value() + ',' + v += value() + "," return v[:-1] return value() @staticmethod def bool(mode: str) -> Callable[[str, str, str], Q]: - return 'true' if bool(random.randbytes(1)) else 'false' + return "true" if bool(random.randbytes(1)) else "false" class Format: @@ -185,17 +185,17 @@ def lookup(self, lang: str, overwrite: dict = dict(), **kwargs: dict | tuple) -> result = {} # foreign - ids = kwargs.get('ids', tuple()) - slugs = kwargs.get('slugs', tuple()) + ids = kwargs.get("ids", tuple()) + slugs = kwargs.get("slugs", tuple()) # fields - ints = kwargs.get('ints', dict()) - strings = kwargs.get('strings', dict()) - datetimes = kwargs.get('datetimes', dict()) - bools = kwargs.get('bools', dict()) + ints = kwargs.get("ints", dict()) + strings = kwargs.get("strings", dict()) + datetimes = kwargs.get("datetimes", dict()) + bools = kwargs.get("bools", dict()) # opts - custom_fields = kwargs.get('custom_fields', dict()) + custom_fields = kwargs.get("custom_fields", dict()) # serialize foreign ids = tuple(ids) @@ -206,21 +206,21 @@ def lookup(self, lang: str, overwrite: dict = dict(), **kwargs: dict | tuple) -> # foreign for field in ids: - if field == '': - result['id'] = field.integer('exact') + if field == "": + result["id"] = field.integer("exact") continue name = overwrite.get(field, field) - result[name] = Field.id('') + result[name] = Field.id("") for field in slugs: - if field == '': - result['id'] = Field.integer('exact') - result['slug'] = Field.string('exact') + if field == "": + result["id"] = Field.integer("exact") + result["slug"] = Field.string("exact") continue name = overwrite.get(field, field) - result[name] = Field.slug('') + result[name] = Field.slug("") # fields @@ -302,7 +302,7 @@ def to_decimal_string(self, decimal: int | float) -> str: self.bc.format.to_decimal(1) # returns '1.000000000000000' ``` """ - return '%.15f' % round(decimal, 15) + return "%.15f" % round(decimal, 15) def _one_to_dict(self, arg) -> dict[str, Any]: """Parse the object to a `dict`""" @@ -313,7 +313,7 @@ def _one_to_dict(self, arg) -> dict[str, Any]: if isinstance(arg, dict): return arg.copy() - raise NotImplementedError(f'{arg.__name__} is not implemented yet') + raise NotImplementedError(f"{arg.__name__} is not implemented yet") def describe_models(self, models: dict[str, Model]) -> str: """ @@ -330,8 +330,8 @@ def describe_models(self, models: dict[str, Model]) -> str: ``` """ - title_spaces = ' ' * 8 - model_spaces = ' ' * 10 + title_spaces = " " * 8 + model_spaces = " " * 10 result = {} for key in models: @@ -346,10 +346,10 @@ def describe_models(self, models: dict[str, Model]) -> str: name, obj = self._describe_model(model) result[name] = obj - print(title_spaces + 'Descriptions of models are being generated:') + print(title_spaces + "Descriptions of models are being generated:") - for line in yaml.dump(result).split('\n'): - if not line.startswith(' '): + for line in yaml.dump(result).split("\n"): + if not line.startswith(" "): print() print(model_spaces + line) @@ -357,29 +357,29 @@ def describe_models(self, models: dict[str, Model]) -> str: # This make sure the element are being printed and prevent `describe_models` are pushed to dev branch assert False - #TODO: this method is buggy in the line `if not hasattr(model, key)` + # TODO: this method is buggy in the line `if not hasattr(model, key)` def _describe_model(self, model: Model): pk_name = self._get_pk_name(model) attrs = dir(model) result = {} for key in attrs: - if key.startswith('_'): + if key.startswith("_"): continue - if key == 'DoesNotExist': + if key == "DoesNotExist": continue - if key == 'MultipleObjectsReturned': + if key == "MultipleObjectsReturned": continue - if key.startswith('get_next_'): + if key.startswith("get_next_"): continue - if key.startswith('get_previous_'): + if key.startswith("get_previous_"): continue - if key.endswith('_set'): + if key.endswith("_set"): continue if not hasattr(model, key): @@ -387,19 +387,19 @@ def _describe_model(self, model: Model): attr = getattr(model, key) - if attr.__class__.__name__ == 'method': + if attr.__class__.__name__ == "method": continue if isinstance(attr, Model): - result[key] = f'{attr.__class__.__name__}({self._get_pk_name(attr)}={self._repr_pk(attr.pk)})' + result[key] = f"{attr.__class__.__name__}({self._get_pk_name(attr)}={self._repr_pk(attr.pk)})" - elif attr.__class__.__name__ == 'ManyRelatedManager': + elif attr.__class__.__name__ == "ManyRelatedManager": instances = [ - f'{attr.model.__name__}({self._get_pk_name(x)}={self._repr_pk(x.pk)})' for x in attr.get_queryset() + f"{attr.model.__name__}({self._get_pk_name(x)}={self._repr_pk(x.pk)})" for x in attr.get_queryset() ] result[key] = instances - return (f'{model.__class__.__name__}({pk_name}={self._repr_pk(model.pk)})', result) + return (f"{model.__class__.__name__}({pk_name}={self._repr_pk(model.pk)})", result) def _repr_pk(self, pk: str | int) -> int | str: if isinstance(pk, int): @@ -411,17 +411,24 @@ def _get_pk_name(self, model: Model): from django.db.models.fields import Field, SlugField attrs = [ - x for x in dir(model) - if hasattr(model.__class__, x) and (isinstance(getattr(model.__class__, x), SlugField) or isinstance( - getattr(model.__class__, x), SlugField)) and getattr(model.__class__, x).primary_key + x + for x in dir(model) + if hasattr(model.__class__, x) + and ( + isinstance(getattr(model.__class__, x), SlugField) or isinstance(getattr(model.__class__, x), SlugField) + ) + and getattr(model.__class__, x).primary_key ] for key in dir(model): - if (hasattr(model.__class__, key) and hasattr(getattr(model.__class__, key), 'field') - and getattr(model.__class__, key).field.primary_key): + if ( + hasattr(model.__class__, key) + and hasattr(getattr(model.__class__, key), "field") + and getattr(model.__class__, key).field.primary_key + ): return key - return 'pk' + return "pk" def from_base64(self, hash: str | bytes) -> str: """ diff --git a/breathecode/tests/mixins/breathecode_mixin/garbage_collector.py b/breathecode/tests/mixins/breathecode_mixin/garbage_collector.py index b00201548..03d7b00ad 100644 --- a/breathecode/tests/mixins/breathecode_mixin/garbage_collector.py +++ b/breathecode/tests/mixins/breathecode_mixin/garbage_collector.py @@ -5,13 +5,13 @@ from faker import Faker from . import interfaces -__all__ = ['Check'] +__all__ = ["Check"] fake = Faker() IMAGE_TYPES = { - 'png': 'PNG', - 'jpg': 'JPEG', - 'jpeg': 'JPEG', + "png": "PNG", + "jpg": "JPEG", + "jpeg": "JPEG", } diff --git a/breathecode/tests/mixins/breathecode_mixin/models_generator/__init__.py b/breathecode/tests/mixins/breathecode_mixin/models_generator/__init__.py index c520895a4..85f702382 100644 --- a/breathecode/tests/mixins/breathecode_mixin/models_generator/__init__.py +++ b/breathecode/tests/mixins/breathecode_mixin/models_generator/__init__.py @@ -1,4 +1,5 @@ """ Global mixins """ + from .generate_models_mixin import * diff --git a/breathecode/tests/mixins/breathecode_mixin/models_generator/generate_models_mixin.py b/breathecode/tests/mixins/breathecode_mixin/models_generator/generate_models_mixin.py index 0bc8c8e1a..d6a95c6de 100644 --- a/breathecode/tests/mixins/breathecode_mixin/models_generator/generate_models_mixin.py +++ b/breathecode/tests/mixins/breathecode_mixin/models_generator/generate_models_mixin.py @@ -1,19 +1,20 @@ """ Collections of mixins used to login in authorize microservice """ + from django.db.models import Model from breathecode.utils import AttrDict -__all__ = ['GenerateModelsMixin'] +__all__ = ["GenerateModelsMixin"] -class GenerateModelsMixin(): +class GenerateModelsMixin: def __detect_invalid_arguments__(self, models={}, **kwargs): """check if one argument is invalid to prevent errors""" for key in kwargs: - if key != 'authenticate' and not key.endswith('_kwargs') and not key in models: - print(f'key `{key}` should not be implemented in self.generate_models') + if key != "authenticate" and not key.endswith("_kwargs") and not key in models: + print(f"key `{key}` should not be implemented in self.generate_models") def __inject_models_in_instance__(self, models={}): """Add support to model.name instead of model['name']""" @@ -23,9 +24,9 @@ def __inject_models_in_instance__(self, models={}): def __flow_wrapper__(self, *args, **kwargs): models = {} - if 'models' in kwargs: - models = kwargs['models'].copy() - del kwargs['models'] + if "models" in kwargs: + models = kwargs["models"].copy() + del kwargs["models"] for func in args: models = func(models=models, **kwargs) @@ -54,14 +55,14 @@ def __inject_models__(self, models={}, **kwargs): return models def generate_models(self, models={}, **kwargs): - if '_new_implementation' not in kwargs: - print(f'The method `generate_models` is deprecated, use `self.bc.database.create` instead') + if "_new_implementation" not in kwargs: + print(f"The method `generate_models` is deprecated, use `self.bc.database.create` instead") else: - del kwargs['_new_implementation'] + del kwargs["_new_implementation"] - if 'authenticate' in kwargs: - print(f'The argument `authenticate` is deprecated, use `self.bc.request.authenticate` instead') + if "authenticate" in kwargs: + print(f"The argument `authenticate` is deprecated, use `self.bc.request.authenticate` instead") self.maxDiff = None models = models.copy() diff --git a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/argument_parser.py b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/argument_parser.py index b141d8c71..d5effcd3c 100644 --- a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/argument_parser.py +++ b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/argument_parser.py @@ -2,7 +2,7 @@ from breathecode.tests.mixins.generate_models_mixin.exceptions import BadArgument -__all__ = ['argument_parser'] +__all__ = ["argument_parser"] list_of_args = list[tuple[int, dict[str, Any]]] args = list[tuple[int, dict[str, Any]]] @@ -22,7 +22,7 @@ def boolean_parser(arg: int) -> args: def tuple_parser(arg: tuple[Any, Any]) -> list_of_args: if len(arg) != 2: - raise BadArgument('The tuple should have length of two elements') + raise BadArgument("The tuple should have length of two elements") if isinstance(arg[0], int) and isinstance(arg[1], dict): return (arg[0], arg[1] or dict()) @@ -30,7 +30,7 @@ def tuple_parser(arg: tuple[Any, Any]) -> list_of_args: if isinstance(arg[0], int) and isinstance(arg[1], dict): return (arg[1], arg[0] or dict()) - raise BadArgument(f'The tuple[{arg[0].__class__.__name__}, {arg[0].__class__.__name__}] is invalid') + raise BadArgument(f"The tuple[{arg[0].__class__.__name__}, {arg[0].__class__.__name__}] is invalid") def list_parser(arg: int) -> list_of_args: @@ -44,7 +44,7 @@ def list_parser(arg: int) -> list_of_args: result.append(tuple_parser(item)) continue - raise BadArgument(f'You can\'t pass a list of {arg.__class__.__name__} as argument') + raise BadArgument(f"You can't pass a list of {arg.__class__.__name__} as argument") return result @@ -65,5 +65,5 @@ def argument_parser(arg: Any) -> list_of_args: if isinstance(arg, int): return [integer_parser(arg)] - print(f'The argument parser has a receive a invalid type {arg.__class__.__name__}') + print(f"The argument parser has a receive a invalid type {arg.__class__.__name__}") return [] diff --git a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/create_models.py b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/create_models.py index 700f87679..4db9942cf 100644 --- a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/create_models.py +++ b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/create_models.py @@ -5,7 +5,7 @@ from .argument_parser import argument_parser -__all__ = ['create_models'] +__all__ = ["create_models"] list_of_args = list[tuple[int, dict[str, Any]]] args = list[tuple[int, dict[str, Any]]] @@ -17,10 +17,14 @@ def cycle(how_many): def create_models(attr, path, **kwargs): result = [ - cycle(how_many).blend(path, **{ - **kwargs, - **arguments, - }) for how_many, arguments in argument_parser(attr) + cycle(how_many).blend( + path, + **{ + **kwargs, + **arguments, + } + ) + for how_many, arguments in argument_parser(attr) ] if len(result) == 1: diff --git a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/get_list.py b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/get_list.py index cd516bb0a..73f3e449a 100644 --- a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/get_list.py +++ b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/get_list.py @@ -1,6 +1,6 @@ from typing import Any -__all__ = ['get_list'] +__all__ = ["get_list"] def get_list(attr: Any) -> bool: diff --git a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/is_valid.py b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/is_valid.py index fe82870a3..7f40b06f4 100644 --- a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/is_valid.py +++ b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/is_valid.py @@ -1,6 +1,6 @@ from typing import Any -__all__ = ['is_valid'] +__all__ = ["is_valid"] def is_valid(attr: Any) -> bool: diff --git a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/just_one.py b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/just_one.py index 698c5387c..e87f4cf32 100644 --- a/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/just_one.py +++ b/breathecode/tests/mixins/breathecode_mixin/models_generator/utils/just_one.py @@ -1,6 +1,6 @@ from typing import Any -__all__ = ['just_one'] +__all__ = ["just_one"] def just_one(attr: Any) -> bool: diff --git a/breathecode/tests/mixins/breathecode_mixin/random.py b/breathecode/tests/mixins/breathecode_mixin/random.py index 0fa1de08d..dfaa4bc67 100644 --- a/breathecode/tests/mixins/breathecode_mixin/random.py +++ b/breathecode/tests/mixins/breathecode_mixin/random.py @@ -13,13 +13,13 @@ from . import interfaces -__all__ = ['Random', 'fake'] +__all__ = ["Random", "fake"] fake = Faker() IMAGE_TYPES = { - 'png': 'PNG', - 'jpg': 'JPEG', - 'jpeg': 'JPEG', + "png": "PNG", + "jpg": "JPEG", + "jpeg": "JPEG", } @@ -33,7 +33,7 @@ def __init__(self, parent, bc: interfaces.BreathecodeInterface) -> None: self._parent = parent self._bc = bc - def image(self, width: int = 10, height: int = 10, ext='png') -> tuple[TextIOWrapper, str]: + def image(self, width: int = 10, height: int = 10, ext="png") -> tuple[TextIOWrapper, str]: """ Generate a random image. @@ -46,14 +46,14 @@ def image(self, width: int = 10, height: int = 10, ext='png') -> tuple[TextIOWra """ size = (width, height) - filename = fake.slug() + f'.{ext}' - image = Image.new('RGB', size) + filename = fake.slug() + f".{ext}" + image = Image.new("RGB", size) arr = np.random.randint(low=0, high=255, size=(size[1], size[0])) - image = Image.fromarray(arr.astype('uint8')) + image = Image.fromarray(arr.astype("uint8")) image.save(filename, IMAGE_TYPES[ext]) - file = open(filename, 'rb') + file = open(filename, "rb") self._bc.garbage_collector.register_image(file) @@ -73,7 +73,7 @@ def file(self) -> tuple[TextIOWrapper, str]: ext = self.string(lower=True, size=2) - file = tempfile.NamedTemporaryFile(suffix=f'.{ext}', delete=False) + file = tempfile.NamedTemporaryFile(suffix=f".{ext}", delete=False) file.write(os.urandom(1024)) self._bc.garbage_collector.register_file(file) @@ -81,7 +81,7 @@ def file(self) -> tuple[TextIOWrapper, str]: return file, file.name def string(self, lower=False, upper=False, symbol=False, number=False, size=0) -> str: - chars = '' + chars = "" if lower: chars = chars + string.ascii_lowercase @@ -95,4 +95,4 @@ def string(self, lower=False, upper=False, symbol=False, number=False, size=0) - if number: chars = chars + string.digits - return ''.join(random.choices(chars, k=size)) + return "".join(random.choices(chars, k=size)) diff --git a/breathecode/tests/mixins/breathecode_mixin/request.py b/breathecode/tests/mixins/breathecode_mixin/request.py index e795fea45..407ed130c 100644 --- a/breathecode/tests/mixins/breathecode_mixin/request.py +++ b/breathecode/tests/mixins/breathecode_mixin/request.py @@ -5,7 +5,7 @@ import jwt from rest_framework.test import APIClient, APITestCase -__all__ = ['Request'] +__all__ = ["Request"] class Request: @@ -28,19 +28,22 @@ def set_headers(self, **kargs: str) -> None: self.bc.request.set_headers(academy=1, thing_of_importance='potato') ``` """ - warn('Use rest_framework.test.APIClient instead. Example: client.get(..., headers={...})', - DeprecationWarning, - stacklevel=2) + warn( + "Use rest_framework.test.APIClient instead. Example: client.get(..., headers={...})", + DeprecationWarning, + stacklevel=2, + ) headers = {} items = [ - index for index in kargs + index + for index in kargs if kargs[index] and (isinstance(kargs[index], str) or isinstance(kargs[index], int)) ] for index in items: - headers[f'HTTP_{index.upper()}'] = str(kargs[index]) + headers[f"HTTP_{index.upper()}"] = str(kargs[index]) self._parent.client.credentials(**headers) @@ -62,7 +65,7 @@ def authenticate(self, user) -> None: - user: a instance of user model `breathecode.authenticate.models.User` """ - warn('Use `client.manual_authentication` instead', DeprecationWarning, stacklevel=2) + warn("Use `client.manual_authentication` instead", DeprecationWarning, stacklevel=2) self._parent.client.force_authenticate(user=user) def manual_authentication(self, user) -> None: @@ -83,16 +86,14 @@ def manual_authentication(self, user) -> None: """ from breathecode.authenticate.models import Token - warn('Use `client.credentials` instead', DeprecationWarning, stacklevel=2) + warn("Use `client.credentials` instead", DeprecationWarning, stacklevel=2) token = Token.objects.create(user=user) - self._parent.client.credentials(HTTP_AUTHORIZATION=f'Token {token.key}') + self._parent.client.credentials(HTTP_AUTHORIZATION=f"Token {token.key}") - def sign_jwt_link(self, - app, - user_id: Optional[int] = None, - reverse: bool = False, - client: Optional[APIClient] = None): + def sign_jwt_link( + self, app, user_id: Optional[int] = None, reverse: bool = False, client: Optional[APIClient] = None + ): """ Set Json Web Token in the request. @@ -121,30 +122,30 @@ def sign_jwt_link(self, # https://datatracker.ietf.org/doc/html/rfc7519#section-4 payload = { - 'sub': user_id, - 'iss': os.getenv('API_URL', 'http://localhost:8000'), - 'app': app.slug, - 'aud': 'breathecode', - 'exp': datetime.timestamp(now + timedelta(minutes=2)), - 'iat': datetime.timestamp(now) - 1, - 'typ': 'JWT', + "sub": user_id, + "iss": os.getenv("API_URL", "http://localhost:8000"), + "app": app.slug, + "aud": "breathecode", + "exp": datetime.timestamp(now + timedelta(minutes=2)), + "iat": datetime.timestamp(now) - 1, + "typ": "JWT", } if reverse: - payload['aud'] = app.slug - payload['app'] = 'breathecode' + payload["aud"] = app.slug + payload["app"] = "breathecode" - if app.algorithm == 'HMAC_SHA256': + if app.algorithm == "HMAC_SHA256": - token = jwt.encode(payload, bytes.fromhex(app.private_key), algorithm='HS256') + token = jwt.encode(payload, bytes.fromhex(app.private_key), algorithm="HS256") - elif app.algorithm == 'HMAC_SHA512': - token = jwt.encode(payload, bytes.fromhex(app.private_key), algorithm='HS512') + elif app.algorithm == "HMAC_SHA512": + token = jwt.encode(payload, bytes.fromhex(app.private_key), algorithm="HS512") - elif app.algorithm == 'ED25519': - token = jwt.encode(payload, bytes.fromhex(app.private_key), algorithm='EdDSA') + elif app.algorithm == "ED25519": + token = jwt.encode(payload, bytes.fromhex(app.private_key), algorithm="EdDSA") else: - raise Exception('Algorithm not implemented') + raise Exception("Algorithm not implemented") - client.credentials(HTTP_AUTHORIZATION=f'Link App={app.slug},Token={token}') + client.credentials(HTTP_AUTHORIZATION=f"Link App={app.slug},Token={token}") diff --git a/breathecode/tests/mixins/cache_mixin.py b/breathecode/tests/mixins/cache_mixin.py index 2df7dc110..079a16c2e 100644 --- a/breathecode/tests/mixins/cache_mixin.py +++ b/breathecode/tests/mixins/cache_mixin.py @@ -1,12 +1,13 @@ """ Cache mixin """ + from django.core.cache import cache -__all__ = ['CacheMixin'] +__all__ = ["CacheMixin"] -class CacheMixin(): +class CacheMixin: """Cache mixin""" def clear_cache(self) -> None: diff --git a/breathecode/tests/mixins/date_formatter_mixin.py b/breathecode/tests/mixins/date_formatter_mixin.py index 778122f46..867a4fba4 100644 --- a/breathecode/tests/mixins/date_formatter_mixin.py +++ b/breathecode/tests/mixins/date_formatter_mixin.py @@ -1,13 +1,14 @@ """ Format date to common rest format """ + import re from datetime import datetime, date -__all__ = ['DateFormatterMixin'] +__all__ = ["DateFormatterMixin"] -class DateFormatterMixin(): +class DateFormatterMixin: """Setup ENV variable""" def date_today(self): @@ -17,11 +18,11 @@ def date_today(self): def date_today_to_iso_format(self, literal=None): """get current date with iso format""" current = literal if literal else self.date_today() - return re.sub(r'\+00:00$', 'Z', current.isoformat()) + return re.sub(r"\+00:00$", "Z", current.isoformat()) def datetime_iso_format_to_date_string(self, current: str): """get current date with iso format""" - return current.split('T')[0] + return current.split("T")[0] def datetime_today(self): """get current datetime""" @@ -30,4 +31,4 @@ def datetime_today(self): def datetime_today_to_iso_format(self, literal=None): """get current datetime with iso format""" current = literal if literal else self.datetime_today() - return re.sub(r'\+00:00$', 'Z', current.isoformat()) + return re.sub(r"\+00:00$", "Z", current.isoformat()) diff --git a/breathecode/tests/mixins/datetime_mixin.py b/breathecode/tests/mixins/datetime_mixin.py index 9abc55adb..a4ff74ce1 100644 --- a/breathecode/tests/mixins/datetime_mixin.py +++ b/breathecode/tests/mixins/datetime_mixin.py @@ -9,14 +9,14 @@ from breathecode.utils.datetime_integer import DatetimeInteger -__all__ = ['DatetimeMixin'] +__all__ = ["DatetimeMixin"] -class DatetimeMixin(): +class DatetimeMixin: """Datetime mixin""" def time_to_string(self, t: datetime) -> str: - return t.strftime('%H:%M:%S') + return t.strftime("%H:%M:%S") def datetime_now(self) -> datetime: """ @@ -41,7 +41,7 @@ def datetime_to_iso(self, date=datetime.now(UTC)) -> str: self.bc.datetime.to_iso_string(utc_now) # equals to '2022-03-21T07:51:55.068Z' ``` """ - return re.sub(r'\+00:00$', 'Z', date.replace(tzinfo=UTC).isoformat()) + return re.sub(r"\+00:00$", "Z", date.replace(tzinfo=UTC).isoformat()) def integer_to_iso(self, timezone: str, integer: int) -> str: return DatetimeInteger.to_iso_string(timezone, integer) @@ -62,14 +62,14 @@ def iso_to_datetime(self, iso: str) -> datetime: self.bc.datetime.from_iso_string('2022-03-21T07:51:55.068Z') ``` """ - string = re.sub(r'Z$', '', iso) + string = re.sub(r"Z$", "", iso) date = datetime.fromisoformat(string) return timezone.make_aware(date) def datetime_to_ical(self, date=datetime.now(UTC), utc=True) -> str: - s = f'{date.year:04}{date.month:02}{date.day:02}T{date.hour:02}{date.minute:02}{date.second:02}' + s = f"{date.year:04}{date.month:02}{date.day:02}T{date.hour:02}{date.minute:02}{date.second:02}" if utc: - s += 'Z' + s += "Z" return s @@ -79,7 +79,7 @@ def assertDatetime(self, date: datetime) -> bool: return True try: - string = re.sub(r'Z$', '', date) + string = re.sub(r"Z$", "", date) datetime.fromisoformat(string) self.assertTrue(True) return True diff --git a/breathecode/tests/mixins/development_environment.py b/breathecode/tests/mixins/development_environment.py index 3f6dcb27d..a4cfc3a50 100644 --- a/breathecode/tests/mixins/development_environment.py +++ b/breathecode/tests/mixins/development_environment.py @@ -1,13 +1,14 @@ """ Setup development environment """ + import os -__all__ = ['DevelopmentEnvironment'] +__all__ = ["DevelopmentEnvironment"] -class DevelopmentEnvironment(): +class DevelopmentEnvironment: """Setup ENV variable""" def __init__(self): - os.environ['ENV'] = 'development' + os.environ["ENV"] = "development" diff --git a/breathecode/tests/mixins/generate_models_mixin/__init__.py b/breathecode/tests/mixins/generate_models_mixin/__init__.py index c520895a4..85f702382 100644 --- a/breathecode/tests/mixins/generate_models_mixin/__init__.py +++ b/breathecode/tests/mixins/generate_models_mixin/__init__.py @@ -1,4 +1,5 @@ """ Global mixins """ + from .generate_models_mixin import * diff --git a/breathecode/tests/mixins/generate_models_mixin/admissions_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/admissions_models_mixin.py index 3c0547471..e08630cf4 100644 --- a/breathecode/tests/mixins/generate_models_mixin/admissions_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/admissions_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from random import choice, randint from breathecode.admissions.models import Cohort @@ -9,16 +10,21 @@ from .utils import create_models, is_valid, just_one TIMEZONES = [ - 'America/New_York', 'America/Bogota', 'America/Santiago', 'America/Buenos_Aires', 'Europe/Madrid', 'America/Caracas' + "America/New_York", + "America/Bogota", + "America/Santiago", + "America/Buenos_Aires", + "Europe/Madrid", + "America/Caracas", ] def random_datetime_integer(): - year = '{:04d}'.format(randint(2021, 2999)) - month = '{:02d}'.format(randint(1, 12)) - day = '{:02d}'.format(randint(1, 28)) - hour = '{:02d}'.format(randint(0, 23)) - minute = '{:02d}'.format(randint(0, 59)) + year = "{:04d}".format(randint(2021, 2999)) + month = "{:02d}".format(randint(1, 12)) + day = "{:02d}".format(randint(1, 28)) + hour = "{:02d}".format(randint(0, 23)) + minute = "{:02d}".format(randint(0, 59)) return int(year + month + day + hour + minute) @@ -29,201 +35,237 @@ def count_cohort_stage(self, cohort_id): cohort = Cohort.objects.get(id=cohort_id) return cohort.stage - def generate_admissions_models(self, - mentorship_service=False, - academy=False, - event_type=False, - cohort=False, - profile_academy=False, - cohort_user=False, - city=False, - syllabus_schedule=False, - country=False, - skip_cohort=False, - syllabus=False, - cohort_time_slot=False, - syllabus_version=False, - syllabus_schedule_time_slot=False, - monitor_script=False, - mentor_profile=False, - user_specialty=False, - asset_category=False, - keyword_cluster=False, - asset_keyword=False, - bag=False, - subscription=False, - event_type_visibility_setting=False, - mentorship_service_set=False, - event_type_set=False, - event_type_set_translation=False, - mentorship_service_set_translation=False, - live_class=False, - course=False, - course_translation=False, - provisioning_profile=False, - provisioning_academy=False, - provisioning_bill=False, - github_academy_user=False, - github_academy_user_log=False, - cohort_set=False, - invoice=False, - plan_financing=False, - service_set=False, - service_set_translation=False, - country_kwargs={}, - city_kwargs={}, - cohort_time_slot_kwargs={}, - academy_kwargs={}, - syllabus_schedule_kwargs={}, - syllabus_kwargs={}, - cohort_kwargs={}, - cohort_user_kwargs={}, - syllabus_schedule_time_slot_kwargs={}, - syllabus_version_kwargs={}, - models={}, - **kwargs): + def generate_admissions_models( + self, + mentorship_service=False, + academy=False, + event_type=False, + cohort=False, + profile_academy=False, + cohort_user=False, + city=False, + syllabus_schedule=False, + country=False, + skip_cohort=False, + syllabus=False, + cohort_time_slot=False, + syllabus_version=False, + syllabus_schedule_time_slot=False, + monitor_script=False, + mentor_profile=False, + user_specialty=False, + asset_category=False, + keyword_cluster=False, + asset_keyword=False, + bag=False, + subscription=False, + event_type_visibility_setting=False, + mentorship_service_set=False, + event_type_set=False, + event_type_set_translation=False, + mentorship_service_set_translation=False, + live_class=False, + course=False, + course_translation=False, + provisioning_profile=False, + provisioning_academy=False, + provisioning_bill=False, + github_academy_user=False, + github_academy_user_log=False, + cohort_set=False, + invoice=False, + plan_financing=False, + service_set=False, + service_set_translation=False, + country_kwargs={}, + city_kwargs={}, + cohort_time_slot_kwargs={}, + academy_kwargs={}, + syllabus_schedule_kwargs={}, + syllabus_kwargs={}, + cohort_kwargs={}, + cohort_user_kwargs={}, + syllabus_schedule_time_slot_kwargs={}, + syllabus_version_kwargs={}, + models={}, + **kwargs + ): models = models.copy() - if not 'country' in models and (is_valid(country) or is_valid(city) or is_valid(academy) - or is_valid(profile_academy) or is_valid(event_type) - or is_valid(event_type_visibility_setting) or is_valid(mentorship_service_set)): + if not "country" in models and ( + is_valid(country) + or is_valid(city) + or is_valid(academy) + or is_valid(profile_academy) + or is_valid(event_type) + or is_valid(event_type_visibility_setting) + or is_valid(mentorship_service_set) + ): kargs = {} - models['country'] = create_models(country, 'admissions.Country', **{**kargs, **country_kwargs}) - - if not 'city' in models and (is_valid(city) or is_valid(country) or is_valid(academy) - or is_valid(profile_academy) or is_valid(event_type) - or is_valid(event_type_visibility_setting) or is_valid(mentorship_service_set)): + models["country"] = create_models(country, "admissions.Country", **{**kargs, **country_kwargs}) + + if not "city" in models and ( + is_valid(city) + or is_valid(country) + or is_valid(academy) + or is_valid(profile_academy) + or is_valid(event_type) + or is_valid(event_type_visibility_setting) + or is_valid(mentorship_service_set) + ): kargs = {} - if 'country' in models: - kargs['country'] = just_one(models['country']) - - models['city'] = create_models(city, 'admissions.City', **{**kargs, **city_kwargs}) - - if not 'academy' in models and ( - is_valid(academy) or is_valid(profile_academy) or is_valid(syllabus) or is_valid(cohort) - or is_valid(monitor_script) or is_valid(mentorship_service) or is_valid(mentor_profile) - or is_valid(user_specialty) or is_valid(asset_category) or is_valid(keyword_cluster) - or is_valid(asset_keyword) or is_valid(bag) or is_valid(subscription) or is_valid(event_type) - or is_valid(event_type_visibility_setting) or is_valid(mentorship_service_set) or is_valid(course) - or is_valid(course_translation) or is_valid(event_type_set) or is_valid(event_type_set_translation) - or is_valid(mentorship_service_set) or is_valid(mentorship_service_set_translation) - or is_valid(provisioning_profile) or is_valid(provisioning_academy) or is_valid(provisioning_bill) - or is_valid(github_academy_user) or is_valid(github_academy_user_log) or is_valid(cohort_set) - or is_valid(invoice) or is_valid(plan_financing) or is_valid(service_set) - or is_valid(service_set_translation)): + if "country" in models: + kargs["country"] = just_one(models["country"]) + + models["city"] = create_models(city, "admissions.City", **{**kargs, **city_kwargs}) + + if not "academy" in models and ( + is_valid(academy) + or is_valid(profile_academy) + or is_valid(syllabus) + or is_valid(cohort) + or is_valid(monitor_script) + or is_valid(mentorship_service) + or is_valid(mentor_profile) + or is_valid(user_specialty) + or is_valid(asset_category) + or is_valid(keyword_cluster) + or is_valid(asset_keyword) + or is_valid(bag) + or is_valid(subscription) + or is_valid(event_type) + or is_valid(event_type_visibility_setting) + or is_valid(mentorship_service_set) + or is_valid(course) + or is_valid(course_translation) + or is_valid(event_type_set) + or is_valid(event_type_set_translation) + or is_valid(mentorship_service_set) + or is_valid(mentorship_service_set_translation) + or is_valid(provisioning_profile) + or is_valid(provisioning_academy) + or is_valid(provisioning_bill) + or is_valid(github_academy_user) + or is_valid(github_academy_user_log) + or is_valid(cohort_set) + or is_valid(invoice) + or is_valid(plan_financing) + or is_valid(service_set) + or is_valid(service_set_translation) + ): kargs = {} - if 'country' in models: - kargs['country'] = just_one(models['country']) + if "country" in models: + kargs["country"] = just_one(models["country"]) - if 'city' in models: - kargs['city'] = just_one(models['city']) + if "city" in models: + kargs["city"] = just_one(models["city"]) - models['academy'] = create_models(academy, 'admissions.Academy', **{**kargs, **academy_kwargs}) + models["academy"] = create_models(academy, "admissions.Academy", **{**kargs, **academy_kwargs}) - if not 'syllabus' in models and (is_valid(syllabus) or is_valid(syllabus_version) or is_valid(course) - or is_valid(course_translation)): + if not "syllabus" in models and ( + is_valid(syllabus) or is_valid(syllabus_version) or is_valid(course) or is_valid(course_translation) + ): kargs = {} - if 'academy' in models: - kargs['academy_owner'] = just_one(models['academy']) + if "academy" in models: + kargs["academy_owner"] = just_one(models["academy"]) - models['syllabus'] = create_models(syllabus, 'admissions.Syllabus', **{**kargs, **syllabus_kwargs}) + models["syllabus"] = create_models(syllabus, "admissions.Syllabus", **{**kargs, **syllabus_kwargs}) - if not 'syllabus_version' in models and is_valid(syllabus_version): + if not "syllabus_version" in models and is_valid(syllabus_version): kargs = {} - if 'syllabus' in models: - kargs['syllabus'] = just_one(models['syllabus']) + if "syllabus" in models: + kargs["syllabus"] = just_one(models["syllabus"]) - models['syllabus_version'] = create_models(syllabus_version, 'admissions.SyllabusVersion', **{ - **kargs, - **syllabus_version_kwargs - }) + models["syllabus_version"] = create_models( + syllabus_version, "admissions.SyllabusVersion", **{**kargs, **syllabus_version_kwargs} + ) - if not 'syllabus_schedule' in models and (is_valid(syllabus_schedule) or is_valid(syllabus_schedule_time_slot)): + if not "syllabus_schedule" in models and (is_valid(syllabus_schedule) or is_valid(syllabus_schedule_time_slot)): kargs = {} - if 'syllabus' in models: - kargs['syllabus'] = just_one(models['syllabus']) + if "syllabus" in models: + kargs["syllabus"] = just_one(models["syllabus"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['syllabus_schedule'] = create_models(syllabus_schedule, 'admissions.SyllabusSchedule', **{ - **kargs, - **syllabus_schedule_kwargs - }) + models["syllabus_schedule"] = create_models( + syllabus_schedule, "admissions.SyllabusSchedule", **{**kargs, **syllabus_schedule_kwargs} + ) - if not 'cohort' in models and not skip_cohort and (is_valid(cohort) or is_valid(profile_academy) - or is_valid(cohort_user) or is_valid(cohort_set)): + if ( + not "cohort" in models + and not skip_cohort + and (is_valid(cohort) or is_valid(profile_academy) or is_valid(cohort_user) or is_valid(cohort_set)) + ): kargs = {} - if profile_academy or 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if profile_academy or "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'syllabus_version' in models or syllabus_version: - kargs['syllabus_version'] = just_one(models['syllabus_version']) + if "syllabus_version" in models or syllabus_version: + kargs["syllabus_version"] = just_one(models["syllabus_version"]) - if 'syllabus_schedule' in models or syllabus_schedule: - kargs['schedule'] = just_one(models['syllabus_schedule']) + if "syllabus_schedule" in models or syllabus_schedule: + kargs["schedule"] = just_one(models["syllabus_schedule"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['cohort'] = create_models(cohort, 'admissions.Cohort', **{**kargs, **cohort_kwargs}) + models["cohort"] = create_models(cohort, "admissions.Cohort", **{**kargs, **cohort_kwargs}) - if not 'cohort_user' in models and not skip_cohort and is_valid(cohort_user): + if not "cohort_user" in models and not skip_cohort and is_valid(cohort_user): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - models['cohort_user'] = create_models(cohort_user, 'admissions.CohortUser', **{ - **kargs, - **cohort_user_kwargs - }) + models["cohort_user"] = create_models( + cohort_user, "admissions.CohortUser", **{**kargs, **cohort_user_kwargs} + ) - if not 'syllabus_schedule_time_slot' in models and is_valid(syllabus_schedule_time_slot): + if not "syllabus_schedule_time_slot" in models and is_valid(syllabus_schedule_time_slot): kargs = { - 'starting_at': random_datetime_integer(), - 'ending_at': random_datetime_integer(), - 'timezone': choice(TIMEZONES), + "starting_at": random_datetime_integer(), + "ending_at": random_datetime_integer(), + "timezone": choice(TIMEZONES), } - if kargs['starting_at'] > kargs['ending_at']: - kargs['starting_at'], kargs['ending_at'] = kargs['ending_at'], kargs['starting_at'] + if kargs["starting_at"] > kargs["ending_at"]: + kargs["starting_at"], kargs["ending_at"] = kargs["ending_at"], kargs["starting_at"] - if 'syllabus_schedule' in models: - kargs['schedule'] = just_one(models['syllabus_schedule']) + if "syllabus_schedule" in models: + kargs["schedule"] = just_one(models["syllabus_schedule"]) - models['syllabus_schedule_time_slot'] = create_models(syllabus_schedule_time_slot, - 'admissions.SyllabusScheduleTimeSlot', **{ - **kargs, - **syllabus_schedule_time_slot_kwargs - }) + models["syllabus_schedule_time_slot"] = create_models( + syllabus_schedule_time_slot, + "admissions.SyllabusScheduleTimeSlot", + **{**kargs, **syllabus_schedule_time_slot_kwargs} + ) - if not 'cohort_time_slot' in models and (is_valid(cohort_time_slot) or is_valid(live_class)): + if not "cohort_time_slot" in models and (is_valid(cohort_time_slot) or is_valid(live_class)): kargs = { - 'starting_at': random_datetime_integer(), - 'ending_at': random_datetime_integer(), - 'timezone': choice(TIMEZONES), + "starting_at": random_datetime_integer(), + "ending_at": random_datetime_integer(), + "timezone": choice(TIMEZONES), } - if kargs['starting_at'] > kargs['ending_at']: - kargs['starting_at'], kargs['ending_at'] = kargs['ending_at'], kargs['starting_at'] + if kargs["starting_at"] > kargs["ending_at"]: + kargs["starting_at"], kargs["ending_at"] = kargs["ending_at"], kargs["starting_at"] - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - models['cohort_time_slot'] = create_models(cohort_time_slot, 'admissions.CohortTimeSlot', **{ - **kargs, - **cohort_time_slot_kwargs - }) + models["cohort_time_slot"] = create_models( + cohort_time_slot, "admissions.CohortTimeSlot", **{**kargs, **cohort_time_slot_kwargs} + ) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/assessment_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/assessment_models_mixin.py index d25079bae..8e6142af5 100644 --- a/breathecode/tests/mixins/generate_models_mixin/assessment_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/assessment_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + import os from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer @@ -9,77 +10,78 @@ class AssessmentModelsMixin(ModelsMixin): - def generate_assessment_models(self, - assessment=False, - question=False, - academy=False, - option=False, - student_assessment=False, - answer=False, - user=False, - assessment_kwargs={}, - question_kwargs={}, - option_kwargs={}, - student_assessment_kwargs={}, - answer_kwargs={}, - models={}, - **kwargs): + def generate_assessment_models( + self, + assessment=False, + question=False, + academy=False, + option=False, + student_assessment=False, + answer=False, + user=False, + assessment_kwargs={}, + question_kwargs={}, + option_kwargs={}, + student_assessment_kwargs={}, + answer_kwargs={}, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'assessment' in models and is_valid(assessment): + if not "assessment" in models and is_valid(assessment): kargs = {} - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - if 'user' in models or user: - kargs['author'] = just_one(models['user']) + if "user" in models or user: + kargs["author"] = just_one(models["user"]) - models['assessment'] = create_models(assessment, 'assessment.Assessment', **{**kargs, **assessment_kwargs}) + models["assessment"] = create_models(assessment, "assessment.Assessment", **{**kargs, **assessment_kwargs}) - if not 'question' in models and is_valid(question): + if not "question" in models and is_valid(question): kargs = {} - if 'assessment' in models or assessment: - kargs['assessment'] = just_one(models['assessment']) + if "assessment" in models or assessment: + kargs["assessment"] = just_one(models["assessment"]) - if 'user' in models or user: - kargs['author'] = just_one(models['user']) + if "user" in models or user: + kargs["author"] = just_one(models["user"]) - models['question'] = create_models(question, 'assessment.Question', **{**kargs, **question_kwargs}) + models["question"] = create_models(question, "assessment.Question", **{**kargs, **question_kwargs}) - if not 'option' in models and is_valid(option): + if not "option" in models and is_valid(option): kargs = {} - if 'question' in models or question: - kargs['question'] = just_one(models['question']) + if "question" in models or question: + kargs["question"] = just_one(models["question"]) - models['option'] = create_models(option, 'assessment.Option', **{**kargs, **option_kwargs}) + models["option"] = create_models(option, "assessment.Option", **{**kargs, **option_kwargs}) - if not 'student_assessment' in models and is_valid(student_assessment): + if not "student_assessment" in models and is_valid(student_assessment): kargs = {} - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - if 'assessment' in models or assessment: - kargs['assessment'] = just_one(models['assessment']) + if "assessment" in models or assessment: + kargs["assessment"] = just_one(models["assessment"]) - if 'user' in models or user: - kargs['student'] = just_one(models['user']) + if "user" in models or user: + kargs["student"] = just_one(models["user"]) - models['student_assessment'] = create_models(student_assessment, 'assessment.StudentAssessment', **{ - **kargs, - **student_assessment_kwargs - }) + models["student_assessment"] = create_models( + student_assessment, "assessment.StudentAssessment", **{**kargs, **student_assessment_kwargs} + ) - if not 'answer' in models and is_valid(answer): + if not "answer" in models and is_valid(answer): kargs = {} - if 'student_assessment' in models or student_assessment: - kargs['student_assesment'] = just_one(models['student_assessment']) + if "student_assessment" in models or student_assessment: + kargs["student_assesment"] = just_one(models["student_assessment"]) - models['answer'] = create_models(answer, 'assessment.Answer', **{**kargs, **answer_kwargs}) + models["answer"] = create_models(answer, "assessment.Answer", **{**kargs, **answer_kwargs}) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/assignments_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/assignments_models_mixin.py index 4542a0e72..723c6288f 100644 --- a/breathecode/tests/mixins/generate_models_mixin/assignments_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/assignments_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer from .utils import is_valid, create_models, just_one, get_list @@ -8,42 +9,44 @@ class AssignmentsModelsMixin(ModelsMixin): - def generate_assignments_models(self, - task=False, - cohort=False, - task_revision_status='', - models={}, - task_kwargs={}, - final_project=False, - final_project_kwargs={}, - **kwargs): + def generate_assignments_models( + self, + task=False, + cohort=False, + task_revision_status="", + models={}, + task_kwargs={}, + final_project=False, + final_project_kwargs={}, + **kwargs + ): models = models.copy() - if not 'cohort' in models and is_valid(cohort): + if not "cohort" in models and is_valid(cohort): kargs = {} - models['cohort'] = create_models(cohort, 'admissions.Cohort', **kargs) + models["cohort"] = create_models(cohort, "admissions.Cohort", **kargs) - if not 'task' in models and is_valid(task): + if not "task" in models and is_valid(task): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) if task_revision_status: - kargs['revision_status'] = just_one(kargs['revision_status']) + kargs["revision_status"] = just_one(kargs["revision_status"]) - models['task'] = create_models(task, 'assignments.Task', **{**kargs, **task_kwargs}) + models["task"] = create_models(task, "assignments.Task", **{**kargs, **task_kwargs}) - if not 'final_project' in models and is_valid(final_project): + if not "final_project" in models and is_valid(final_project): kargs = {} - if 'user' in models: - kargs['members'] = get_list(models['user']) + if "user" in models: + kargs["members"] = get_list(models["user"]) - models['final_project'] = create_models(final_project, 'assignments.FinalProject', **kargs) + models["final_project"] = create_models(final_project, "assignments.FinalProject", **kargs) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/auth_mixin.py b/breathecode/tests/mixins/generate_models_mixin/auth_mixin.py index 0b0efd38a..0866feb06 100644 --- a/breathecode/tests/mixins/generate_models_mixin/auth_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/auth_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.date_formatter_mixin import DateFormatterMixin from breathecode.tests.mixins.headers_mixin import HeadersMixin from breathecode.tests.mixins.models_mixin import ModelsMixin @@ -10,73 +11,90 @@ class AuthMixin(DateFormatterMixin, HeadersMixin, ModelsMixin): """CapacitiesTestCase with auth methods""" - password = 'pass1234' - - def generate_credentials(self, - user=False, - task=False, - authenticate=False, - manual_authenticate=False, - cohort_user=False, - slack_team=False, - group=False, - permission=False, - mentor_profile=False, - consumable=False, - invoice=False, - subscription=False, - bag=False, - user_setting=False, - consumption_session=False, - provisioning_container=False, - app_user_agreement=False, - first_party_credentials=False, - task_watcher=False, - hook=False, - profile_academy='', - user_kwargs={}, - group_kwargs={}, - permission_kwargs={}, - models={}, - **kwargs): + + password = "pass1234" + + def generate_credentials( + self, + user=False, + task=False, + authenticate=False, + manual_authenticate=False, + cohort_user=False, + slack_team=False, + group=False, + permission=False, + mentor_profile=False, + consumable=False, + invoice=False, + subscription=False, + bag=False, + user_setting=False, + consumption_session=False, + provisioning_container=False, + app_user_agreement=False, + first_party_credentials=False, + task_watcher=False, + hook=False, + profile_academy="", + user_kwargs={}, + group_kwargs={}, + permission_kwargs={}, + models={}, + **kwargs, + ): models = models.copy() - if not 'permission' in models and is_valid(permission): + if not "permission" in models and is_valid(permission): kargs = {} - models['permission'] = create_models(permission, 'auth.Permission', **{**kargs, **permission_kwargs}) + models["permission"] = create_models(permission, "auth.Permission", **{**kargs, **permission_kwargs}) - if not 'group' in models and is_valid(group): + if not "group" in models and is_valid(group): kargs = {} - if 'permission' in models: - kargs['permissions'] = get_list(models['permission']) - - models['group'] = create_models(group, 'auth.Group', **{**kargs, **group_kwargs}) - - if not 'user' in models and (is_valid(user) or is_valid(authenticate) or is_valid(profile_academy) - or is_valid(manual_authenticate) or is_valid(cohort_user) or is_valid(task) - or is_valid(slack_team) or is_valid(mentor_profile) or is_valid(consumable) - or is_valid(invoice) or is_valid(subscription) or is_valid(bag) - or is_valid(user_setting) or is_valid(consumption_session) - or is_valid(provisioning_container) or is_valid(app_user_agreement) - or is_valid(first_party_credentials) or is_valid(task_watcher) or is_valid(hook)): + if "permission" in models: + kargs["permissions"] = get_list(models["permission"]) + + models["group"] = create_models(group, "auth.Group", **{**kargs, **group_kwargs}) + + if not "user" in models and ( + is_valid(user) + or is_valid(authenticate) + or is_valid(profile_academy) + or is_valid(manual_authenticate) + or is_valid(cohort_user) + or is_valid(task) + or is_valid(slack_team) + or is_valid(mentor_profile) + or is_valid(consumable) + or is_valid(invoice) + or is_valid(subscription) + or is_valid(bag) + or is_valid(user_setting) + or is_valid(consumption_session) + or is_valid(provisioning_container) + or is_valid(app_user_agreement) + or is_valid(first_party_credentials) + or is_valid(task_watcher) + or is_valid(hook) + ): kargs = {} - if 'group' in models: - kargs['groups'] = get_list(models['group']) + if "group" in models: + kargs["groups"] = get_list(models["group"]) - if 'permission' in models: - kargs['user_permissions'] = get_list(models['permission']) + if "permission" in models: + kargs["user_permissions"] = get_list(models["permission"]) - models['user'] = create_models(user, 'auth.User', **{**kargs, **user_kwargs}) + models["user"] = create_models(user, "auth.User", **{**kargs, **user_kwargs}) if authenticate: - self.client.force_authenticate(user=models['user']) + self.client.force_authenticate(user=models["user"]) if manual_authenticate: from breathecode.authenticate.models import Token - token = Token.objects.create(user=models['user']) - self.client.credentials(HTTP_AUTHORIZATION=f'Token {token.key}') + token = Token.objects.create(user=models["user"]) + self.client.credentials(HTTP_AUTHORIZATION=f"Token {token.key}") return models diff --git a/breathecode/tests/mixins/generate_models_mixin/authenticate_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/authenticate_models_mixin.py index e1b6bf99b..44c6c8b23 100644 --- a/breathecode/tests/mixins/generate_models_mixin/authenticate_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/authenticate_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins import DateFormatterMixin from breathecode.tests.mixins.headers_mixin import HeadersMixin from breathecode.tests.mixins.models_mixin import ModelsMixin @@ -10,248 +11,248 @@ class AuthenticateMixin(DateFormatterMixin, HeadersMixin, ModelsMixin): """CapacitiesTestCase with auth methods""" - password = 'pass1234' - - def generate_authenticate_models(self, - profile_academy=False, - capability='', - role='', - profile=False, - user_invite=False, - credentials_github=False, - credentials_slack=False, - credentials_facebook=False, - credentials_quick_books=False, - academy_auth_settings=False, - github_academy_user=False, - profile_translation=False, - cohort_user=False, - token=False, - device_id=False, - user_setting=False, - github_academy_user_log=False, - pending_github_user=False, - profile_kwargs={}, - device_id_kwargs={}, - capability_kwargs={}, - role_kwargs={}, - user_invite_kwargs={}, - profile_academy_kwargs={}, - cohort_user_kwargs={}, - credentials_github_kwargs={}, - credentials_slack_kwargs={}, - credentials_facebook_kwargs={}, - credentials_quick_books_kwargs={}, - token_kwargs={}, - github_academy_user_kwargs={}, - academy_auth_settings_kwargs={}, - models={}, - **kwargs): + + password = "pass1234" + + def generate_authenticate_models( + self, + profile_academy=False, + capability="", + role="", + profile=False, + user_invite=False, + credentials_github=False, + credentials_slack=False, + credentials_facebook=False, + credentials_quick_books=False, + academy_auth_settings=False, + github_academy_user=False, + profile_translation=False, + cohort_user=False, + token=False, + device_id=False, + user_setting=False, + github_academy_user_log=False, + pending_github_user=False, + profile_kwargs={}, + device_id_kwargs={}, + capability_kwargs={}, + role_kwargs={}, + user_invite_kwargs={}, + profile_academy_kwargs={}, + cohort_user_kwargs={}, + credentials_github_kwargs={}, + credentials_slack_kwargs={}, + credentials_facebook_kwargs={}, + credentials_quick_books_kwargs={}, + token_kwargs={}, + github_academy_user_kwargs={}, + academy_auth_settings_kwargs={}, + models={}, + **kwargs + ): models = models.copy() - if not 'profile' in models and (is_valid(profile) or is_valid(profile_translation)): + if not "profile" in models and (is_valid(profile) or is_valid(profile_translation)): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['profile'] = create_models(profile, 'authenticate.Profile', **{**kargs, **profile_kwargs}) + models["profile"] = create_models(profile, "authenticate.Profile", **{**kargs, **profile_kwargs}) - if not 'profile_translation' in models and is_valid(profile_translation): + if not "profile_translation" in models and is_valid(profile_translation): kargs = { - 'profile': just_one(models['profile']), + "profile": just_one(models["profile"]), } - models['profile_translation'] = create_models(profile_translation, 'authenticate.ProfileTranslation', - **kargs) + models["profile_translation"] = create_models( + profile_translation, "authenticate.ProfileTranslation", **kargs + ) - if not 'user_setting' in models and is_valid(user_setting): + if not "user_setting" in models and is_valid(user_setting): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['user_setting'] = create_models(user_setting, 'authenticate.UserSetting', **kargs) + models["user_setting"] = create_models(user_setting, "authenticate.UserSetting", **kargs) - if not 'capability' in models and is_valid(capability): + if not "capability" in models and is_valid(capability): kargs = { - 'slug': capability, - 'description': capability, + "slug": capability, + "description": capability, } - models['capability'] = create_models(profile, 'authenticate.Capability', **{**kargs, **capability_kwargs}) + models["capability"] = create_models(profile, "authenticate.Capability", **{**kargs, **capability_kwargs}) - if not 'role' in models and (is_valid(role) or is_valid(profile_academy)): - kargs = { - 'slug': role, - 'name': role, - } if isinstance(role, str) else {} + if not "role" in models and (is_valid(role) or is_valid(profile_academy)): + kargs = ( + { + "slug": role, + "name": role, + } + if isinstance(role, str) + else {} + ) if capability: - kargs['capabilities'] = get_list(models['capability']) + kargs["capabilities"] = get_list(models["capability"]) - models['role'] = create_models(role if not isinstance(role, str) else {}, 'authenticate.Role', **{ - **kargs, - **role_kwargs - }) + models["role"] = create_models( + role if not isinstance(role, str) else {}, "authenticate.Role", **{**kargs, **role_kwargs} + ) - if not 'user_invite' in models and is_valid(user_invite): + if not "user_invite" in models and is_valid(user_invite): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - if 'role' in models: - kargs['role'] = just_one(models['role']) + if "role" in models: + kargs["role"] = just_one(models["role"]) - if 'user' in models: - kargs['author'] = just_one(models['user']) + if "user" in models: + kargs["author"] = just_one(models["user"]) - models['user_invite'] = create_models(user_invite, 'authenticate.UserInvite', **{ - **kargs, - **user_invite_kwargs - }) + models["user_invite"] = create_models( + user_invite, "authenticate.UserInvite", **{**kargs, **user_invite_kwargs} + ) - if not 'profile_academy' in models and is_valid(profile_academy): + if not "profile_academy" in models and is_valid(profile_academy): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'role' in models: - kargs['role'] = just_one(models['role']) + if "role" in models: + kargs["role"] = just_one(models["role"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['profile_academy'] = create_models(profile_academy, 'authenticate.ProfileAcademy', **{ - **kargs, - **profile_academy_kwargs - }) + models["profile_academy"] = create_models( + profile_academy, "authenticate.ProfileAcademy", **{**kargs, **profile_academy_kwargs} + ) - if not 'academy_auth_settings' in models and is_valid(academy_auth_settings): + if not "academy_auth_settings" in models and is_valid(academy_auth_settings): kargs = {} - if 'user' in models: - kargs['github_owner'] = just_one(models['user']) + if "user" in models: + kargs["github_owner"] = just_one(models["user"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['academy_auth_settings'] = create_models(academy_auth_settings, 'authenticate.AcademyAuthSettings', - **{ - **kargs, - **academy_auth_settings_kwargs - }) + models["academy_auth_settings"] = create_models( + academy_auth_settings, "authenticate.AcademyAuthSettings", **{**kargs, **academy_auth_settings_kwargs} + ) - if not 'credentials_github' in models and is_valid(credentials_github): + if not "credentials_github" in models and is_valid(credentials_github): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['credentials_github'] = create_models(credentials_github, 'authenticate.CredentialsGithub', **{ - **kargs, - **credentials_github_kwargs - }) + models["credentials_github"] = create_models( + credentials_github, "authenticate.CredentialsGithub", **{**kargs, **credentials_github_kwargs} + ) - if not 'credentials_slack' in models and is_valid(credentials_slack): + if not "credentials_slack" in models and is_valid(credentials_slack): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['credentials_slack'] = create_models(credentials_slack, 'authenticate.CredentialsSlack', **{ - **kargs, - **credentials_slack_kwargs - }) + models["credentials_slack"] = create_models( + credentials_slack, "authenticate.CredentialsSlack", **{**kargs, **credentials_slack_kwargs} + ) - if not 'credentials_facebook' in models and is_valid(credentials_facebook): + if not "credentials_facebook" in models and is_valid(credentials_facebook): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['credentials_facebook'] = create_models(credentials_facebook, 'authenticate.CredentialsFacebook', **{ - **kargs, - **credentials_facebook_kwargs - }) + models["credentials_facebook"] = create_models( + credentials_facebook, "authenticate.CredentialsFacebook", **{**kargs, **credentials_facebook_kwargs} + ) - if not 'cohort_user' in models and is_valid(cohort_user): + if not "cohort_user" in models and is_valid(cohort_user): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - models['cohort_user'] = create_models(cohort_user, 'admissions.CohortUser', **{ - **kargs, - **cohort_user_kwargs - }) + models["cohort_user"] = create_models( + cohort_user, "admissions.CohortUser", **{**kargs, **cohort_user_kwargs} + ) - if not 'github_academy_user' in models and (is_valid(github_academy_user) or is_valid(github_academy_user_log)): + if not "github_academy_user" in models and (is_valid(github_academy_user) or is_valid(github_academy_user_log)): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['github_academy_user'] = create_models(github_academy_user, 'authenticate.GithubAcademyUser', **{ - **kargs, - **github_academy_user_kwargs - }) + models["github_academy_user"] = create_models( + github_academy_user, "authenticate.GithubAcademyUser", **{**kargs, **github_academy_user_kwargs} + ) - if not 'pending_github_user' in models and is_valid(pending_github_user): + if not "pending_github_user" in models and is_valid(pending_github_user): kargs = {} - models['pending_github_user'] = create_models(pending_github_user, 'authenticate.PendingGithubUser', - **kargs) + models["pending_github_user"] = create_models( + pending_github_user, "authenticate.PendingGithubUser", **kargs + ) - if not 'github_academy_user_log' in models and is_valid(github_academy_user_log): + if not "github_academy_user_log" in models and is_valid(github_academy_user_log): kargs = {} - if 'github_academy_user' in models: - kargs['academy_user'] = just_one(models['github_academy_user']) + if "github_academy_user" in models: + kargs["academy_user"] = just_one(models["github_academy_user"]) - models['github_academy_user_log'] = create_models(github_academy_user_log, - 'authenticate.GithubAcademyUserLog', **kargs) + models["github_academy_user_log"] = create_models( + github_academy_user_log, "authenticate.GithubAcademyUserLog", **kargs + ) - if not 'credentials_quick_books' in models and is_valid(credentials_quick_books): + if not "credentials_quick_books" in models and is_valid(credentials_quick_books): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['credentials_quick_books'] = create_models(credentials_quick_books, - 'authenticate.CredentialsQuickBooks', **{ - **kargs, - **credentials_quick_books_kwargs - }) + models["credentials_quick_books"] = create_models( + credentials_quick_books, + "authenticate.CredentialsQuickBooks", + **{**kargs, **credentials_quick_books_kwargs} + ) - if not 'token' in models and is_valid(token): + if not "token" in models and is_valid(token): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['token'] = create_models(token, 'authenticate.Token', **{**kargs, **token_kwargs}) + models["token"] = create_models(token, "authenticate.Token", **{**kargs, **token_kwargs}) - if not 'device_id' in models and is_valid(device_id): + if not "device_id" in models and is_valid(device_id): kargs = {} - models['device_id'] = create_models(device_id, 'authenticate.DeviceId', **{**kargs, **device_id_kwargs}) + models["device_id"] = create_models(device_id, "authenticate.DeviceId", **{**kargs, **device_id_kwargs}) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/career_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/career_models_mixin.py index 06b4e9b6d..75a15a4f5 100644 --- a/breathecode/tests/mixins/generate_models_mixin/career_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/career_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer from .utils import is_valid, create_models, just_one, get_list @@ -8,122 +9,121 @@ class CareerModelsMixin(ModelsMixin): - def generate_career_models(self, - platform=False, - position=False, - zyte_project=False, - spider=False, - position_alias=False, - career_tag=False, - location=False, - location_alias=False, - employer=False, - job=False, - platform_kwargs={}, - position_kwargs={}, - zyte_project_kwargs={}, - spider_kwargs={}, - position_alias_kwargs={}, - career_tag_kwargs={}, - location_kwargs={}, - location_alias_kwargs={}, - employer_kwargs={}, - job_kwargs={}, - models={}, - **kwargs): + def generate_career_models( + self, + platform=False, + position=False, + zyte_project=False, + spider=False, + position_alias=False, + career_tag=False, + location=False, + location_alias=False, + employer=False, + job=False, + platform_kwargs={}, + position_kwargs={}, + zyte_project_kwargs={}, + spider_kwargs={}, + position_alias_kwargs={}, + career_tag_kwargs={}, + location_kwargs={}, + location_alias_kwargs={}, + employer_kwargs={}, + job_kwargs={}, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'platform' in models and is_valid(platform): + if not "platform" in models and is_valid(platform): kargs = {} - models['platform'] = create_models(platform, 'career.Platform', **{**kargs, **platform_kwargs}) + models["platform"] = create_models(platform, "career.Platform", **{**kargs, **platform_kwargs}) - if not 'position' in models and (is_valid(position) or is_valid(spider)): + if not "position" in models and (is_valid(position) or is_valid(spider)): kargs = {} - models['position'] = create_models(position, 'career.Position', **{**kargs, **position_kwargs}) + models["position"] = create_models(position, "career.Position", **{**kargs, **position_kwargs}) - if not 'zyte_project' in models and (is_valid(zyte_project) or is_valid(spider)): + if not "zyte_project" in models and (is_valid(zyte_project) or is_valid(spider)): kargs = {} - if 'platform' in models: - kargs['platform'] = just_one(models['platform']) + if "platform" in models: + kargs["platform"] = just_one(models["platform"]) - models['zyte_project'] = create_models(zyte_project, 'career.ZyteProject', **{ - **kargs, - **zyte_project_kwargs - }) + models["zyte_project"] = create_models( + zyte_project, "career.ZyteProject", **{**kargs, **zyte_project_kwargs} + ) - if not 'spider' in models and is_valid(spider): + if not "spider" in models and is_valid(spider): kargs = {} - if 'position' in models: - kargs['position'] = just_one(models['position']) + if "position" in models: + kargs["position"] = just_one(models["position"]) - if 'zyte_project' in models: - kargs['zyte_project'] = just_one(models['zyte_project']) + if "zyte_project" in models: + kargs["zyte_project"] = just_one(models["zyte_project"]) - models['spider'] = create_models(spider, 'career.Spider', **{**kargs, **spider_kwargs}) + models["spider"] = create_models(spider, "career.Spider", **{**kargs, **spider_kwargs}) - if not 'position_alias' in models and is_valid(position_alias): + if not "position_alias" in models and is_valid(position_alias): kargs = {} - if 'position' in models: - kargs['position'] = just_one(models['position']) + if "position" in models: + kargs["position"] = just_one(models["position"]) - models['position_alias'] = create_models(position_alias, 'career.PositionAlias', **{ - **kargs, - **position_alias_kwargs - }) + models["position_alias"] = create_models( + position_alias, "career.PositionAlias", **{**kargs, **position_alias_kwargs} + ) - if not 'career_tag' in models and is_valid(career_tag): + if not "career_tag" in models and is_valid(career_tag): kargs = {} - models['career_tag'] = create_models(career_tag, 'career.CareerTag', **{**kargs, **career_tag_kwargs}) + models["career_tag"] = create_models(career_tag, "career.CareerTag", **{**kargs, **career_tag_kwargs}) - if not 'location' in models and is_valid(location): + if not "location" in models and is_valid(location): kargs = {} - models['location'] = create_models(location, 'career.Location', **{**kargs, **location_kwargs}) + models["location"] = create_models(location, "career.Location", **{**kargs, **location_kwargs}) - if not 'location_alias' in models and is_valid(location_alias): + if not "location_alias" in models and is_valid(location_alias): kargs = {} - if 'location' in models: - kargs['location'] = just_one(models['location']) + if "location" in models: + kargs["location"] = just_one(models["location"]) - models['location_alias'] = create_models(location_alias, 'career.LocationAlias', **{ - **kargs, - **location_alias_kwargs - }) + models["location_alias"] = create_models( + location_alias, "career.LocationAlias", **{**kargs, **location_alias_kwargs} + ) - if not 'employer' in models and is_valid(employer): + if not "employer" in models and is_valid(employer): kargs = {} - if 'location' in models: - kargs['location'] = just_one(models['location']) + if "location" in models: + kargs["location"] = just_one(models["location"]) - models['employer'] = create_models(employer, 'career.Employer', **{**kargs, **employer_kwargs}) + models["employer"] = create_models(employer, "career.Employer", **{**kargs, **employer_kwargs}) - if not 'job' in models and (is_valid(job) or is_valid(employer)): + if not "job" in models and (is_valid(job) or is_valid(employer)): kargs = {} - if 'spider' in models: - kargs['spider'] = just_one(models['spider']) + if "spider" in models: + kargs["spider"] = just_one(models["spider"]) - if 'employer' in models: - kargs['employer'] = just_one(models['employer']) + if "employer" in models: + kargs["employer"] = just_one(models["employer"]) - if 'position' in models: - kargs['position'] = just_one(models['position']) + if "position" in models: + kargs["position"] = just_one(models["position"]) - if 'career_tag' in models: - kargs['career_tag'] = just_one(models['career_tag']) + if "career_tag" in models: + kargs["career_tag"] = just_one(models["career_tag"]) - if 'location' in models: - kargs['location'] = just_one(models['location']) + if "location" in models: + kargs["location"] = just_one(models["location"]) - models['job'] = create_models(job, 'career.Job', **{**kargs, **job_kwargs}) + models["job"] = create_models(job, "career.Job", **{**kargs, **job_kwargs}) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/certificate_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/certificate_models_mixin.py index 576c6b2fe..39b42e389 100644 --- a/breathecode/tests/mixins/generate_models_mixin/certificate_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/certificate_models_mixin.py @@ -1,88 +1,89 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from .utils import is_valid, create_models, just_one, get_list class CertificateModelsMixin(ModelsMixin): # TODO: Implement Badge - user_specialty_token = '9e76a2ab3bd55454c384e0a5cdb5298d17285949' - - def generate_certificate_models(self, - layout_design=False, - specialty=False, - syllabus=False, - user_specialty=False, - layout_design_slug='', - user_specialty_preview_url='', - user_specialty_token='', - badge=False, - syllabus_kwargs={}, - badge_kwargs={}, - layout_design_kwargs={}, - user_specialty_kwargs={}, - models={}, - **kwargs): + user_specialty_token = "9e76a2ab3bd55454c384e0a5cdb5298d17285949" + + def generate_certificate_models( + self, + layout_design=False, + specialty=False, + syllabus=False, + user_specialty=False, + layout_design_slug="", + user_specialty_preview_url="", + user_specialty_token="", + badge=False, + syllabus_kwargs={}, + badge_kwargs={}, + layout_design_kwargs={}, + user_specialty_kwargs={}, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'specialty' in models and (is_valid(specialty) or is_valid(user_specialty)): + if not "specialty" in models and (is_valid(specialty) or is_valid(user_specialty)): kargs = {} - if 'syllabus' in models or syllabus: - kargs['syllabus'] = just_one(models['syllabus']) + if "syllabus" in models or syllabus: + kargs["syllabus"] = just_one(models["syllabus"]) - models['specialty'] = create_models(specialty, 'certificate.Specialty', **{**kargs, **syllabus_kwargs}) - if not 'badge' in models and badge: + models["specialty"] = create_models(specialty, "certificate.Specialty", **{**kargs, **syllabus_kwargs}) + if not "badge" in models and badge: kargs = {} - if 'specialty' in models or is_valid(specialty): - kargs['specialties'] = get_list(['specialty']) + if "specialty" in models or is_valid(specialty): + kargs["specialties"] = get_list(["specialty"]) - models['badge'] = create_models(specialty, 'certificate.Badge', **{**kargs, **badge_kwargs}) + models["badge"] = create_models(specialty, "certificate.Badge", **{**kargs, **badge_kwargs}) - if not 'layout_design' in models and is_valid(layout_design): - kargs = {'slug': 'default'} + if not "layout_design" in models and is_valid(layout_design): + kargs = {"slug": "default"} if layout_design_slug: - kargs['slug'] = layout_design_slug + kargs["slug"] = layout_design_slug - models['layout_design'] = create_models(layout_design, 'certificate.LayoutDesign', **{ - **kargs, - **layout_design_kwargs - }) + models["layout_design"] = create_models( + layout_design, "certificate.LayoutDesign", **{**kargs, **layout_design_kwargs} + ) - if not 'user_specialty' in models and is_valid(user_specialty): + if not "user_specialty" in models and is_valid(user_specialty): kargs = { - 'token': self.user_specialty_token, - 'preview_url': 'https://asdasd.com', + "token": self.user_specialty_token, + "preview_url": "https://asdasd.com", } if user_specialty_preview_url: - kargs['preview_url'] = user_specialty_preview_url + kargs["preview_url"] = user_specialty_preview_url if user_specialty_token: - kargs['token'] = user_specialty_token + kargs["token"] = user_specialty_token - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'specialty' in models: - kargs['specialty'] = just_one(models['specialty']) + if "specialty" in models: + kargs["specialty"] = just_one(models["specialty"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'layout_design' in models: - kargs['layout'] = just_one(models['layout_design']) + if "layout_design" in models: + kargs["layout"] = just_one(models["layout_design"]) - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - models['user_specialty'] = create_models(user_specialty, 'certificate.UserSpecialty', **{ - **kargs, - **user_specialty_kwargs - }) + models["user_specialty"] = create_models( + user_specialty, "certificate.UserSpecialty", **{**kargs, **user_specialty_kwargs} + ) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/commons_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/commons_models_mixin.py index 10a276e45..eaf48a653 100644 --- a/breathecode/tests/mixins/generate_models_mixin/commons_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/commons_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from mixer.backend.django import mixer from breathecode.tests.mixins.models_mixin import ModelsMixin diff --git a/breathecode/tests/mixins/generate_models_mixin/content_types_mixin.py b/breathecode/tests/mixins/generate_models_mixin/content_types_mixin.py index 44971923f..6fb9e2bbb 100644 --- a/breathecode/tests/mixins/generate_models_mixin/content_types_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/content_types_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from breathecode.tests.mixins.headers_mixin import HeadersMixin from breathecode.tests.mixins import DateFormatterMixin @@ -13,8 +14,8 @@ class ContentTypesMixin(DateFormatterMixin, HeadersMixin, ModelsMixin): def generate_contenttypes_models(self, content_type=False, models={}, **kwargs): models = models.copy() - if not 'content_type' in models and is_valid(content_type): + if not "content_type" in models and is_valid(content_type): kargs = {} - models['content_type'] = create_models(content_type, 'contenttypes.ContentType', **kargs) + models["content_type"] = create_models(content_type, "contenttypes.ContentType", **kargs) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/events_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/events_models_mixin.py index 8c1d8a95e..809f32ea4 100644 --- a/breathecode/tests/mixins/generate_models_mixin/events_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/events_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.generate_models_mixin.utils.get_list import get_list from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer @@ -12,143 +13,143 @@ class EventsModelsMixin(ModelsMixin): - def generate_events_models(self, - organization=False, - user=False, - organizer=False, - academy=False, - venue=False, - event_type=False, - event=False, - event_checkin=False, - eventbrite_webhook=False, - event_type_visibility_setting=False, - live_class=False, - organization_kwargs={}, - organizer_kwargs={}, - venue_kwargs={}, - event_type_kwargs={}, - event_kwargs={}, - event_checkin_kwargs={}, - eventbrite_webhook_kwargs={}, - models={}, - **kwargs): + def generate_events_models( + self, + organization=False, + user=False, + organizer=False, + academy=False, + venue=False, + event_type=False, + event=False, + event_checkin=False, + eventbrite_webhook=False, + event_type_visibility_setting=False, + live_class=False, + organization_kwargs={}, + organizer_kwargs={}, + venue_kwargs={}, + event_type_kwargs={}, + event_kwargs={}, + event_checkin_kwargs={}, + eventbrite_webhook_kwargs={}, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'organization' in models and is_valid(organization): + if not "organization" in models and is_valid(organization): kargs = {} - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - models['organization'] = create_models(organization, 'events.Organization', **{ - **kargs, - **organization_kwargs - }) + models["organization"] = create_models( + organization, "events.Organization", **{**kargs, **organization_kwargs} + ) - if not 'organizer' in models and is_valid(organizer): + if not "organizer" in models and is_valid(organizer): kargs = {} - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - if 'organization' in models or organization: - kargs['organization'] = just_one(models['organization']) + if "organization" in models or organization: + kargs["organization"] = just_one(models["organization"]) - models['organizer'] = create_models(organizer, 'events.Organizer', **{**kargs, **organizer_kwargs}) + models["organizer"] = create_models(organizer, "events.Organizer", **{**kargs, **organizer_kwargs}) - if not 'venue' in models and is_valid(venue): + if not "venue" in models and is_valid(venue): kargs = {} - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - if 'organization' in models or organization: - kargs['organization'] = just_one(models['organization']) + if "organization" in models or organization: + kargs["organization"] = just_one(models["organization"]) - models['venue'] = create_models(venue, 'events.Venue', **{**kargs, **venue_kwargs}) + models["venue"] = create_models(venue, "events.Venue", **{**kargs, **venue_kwargs}) - if not 'event_type_visibility_setting' in models and is_valid(event_type_visibility_setting): + if not "event_type_visibility_setting" in models and is_valid(event_type_visibility_setting): kargs = {} - if 'syllabus' in models: - kargs['syllabus'] = just_one(models['syllabus']) + if "syllabus" in models: + kargs["syllabus"] = just_one(models["syllabus"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - models['event_type_visibility_setting'] = create_models(event_type_visibility_setting, - 'events.EventTypeVisibilitySetting', **kargs) + models["event_type_visibility_setting"] = create_models( + event_type_visibility_setting, "events.EventTypeVisibilitySetting", **kargs + ) - if not 'event_type' in models and is_valid(event_type): + if not "event_type" in models and is_valid(event_type): kargs = {} - kargs['description'] = fake.text()[:255] + kargs["description"] = fake.text()[:255] - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - if 'event_type_visibility_setting' in models: - kargs['visibility_settings'] = get_list(models['event_type_visibility_setting']) + if "event_type_visibility_setting" in models: + kargs["visibility_settings"] = get_list(models["event_type_visibility_setting"]) - models['event_type'] = create_models(event_type, 'events.EventType', **{**kargs, **event_type_kwargs}) + models["event_type"] = create_models(event_type, "events.EventType", **{**kargs, **event_type_kwargs}) - if not 'event' in models and is_valid(event): + if not "event" in models and is_valid(event): kargs = {} - if 'user' in models or user: - kargs['host'] = just_one(models['user']) + if "user" in models or user: + kargs["host"] = just_one(models["user"]) - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - if 'organization' in models or organization: - kargs['organization'] = just_one(models['organization']) + if "organization" in models or organization: + kargs["organization"] = just_one(models["organization"]) - if 'user' in models or user: - kargs['author'] = just_one(models['user']) - kargs['host_user'] = just_one(models['user']) + if "user" in models or user: + kargs["author"] = just_one(models["user"]) + kargs["host_user"] = just_one(models["user"]) - if 'venue' in models or venue: - kargs['venue'] = just_one(models['venue']) + if "venue" in models or venue: + kargs["venue"] = just_one(models["venue"]) - if 'event_type' in models or event_type: - kargs['event_type'] = just_one(models['event_type']) + if "event_type" in models or event_type: + kargs["event_type"] = just_one(models["event_type"]) - models['event'] = create_models(event, 'events.Event', **{**kargs, **event_kwargs}) + models["event"] = create_models(event, "events.Event", **{**kargs, **event_kwargs}) - if not 'event_checkin' in models and is_valid(event_checkin): + if not "event_checkin" in models and is_valid(event_checkin): kargs = {} - if 'user' in models or user: - kargs['attendee'] = just_one(models['user']) + if "user" in models or user: + kargs["attendee"] = just_one(models["user"]) - if 'event' in models or event: - kargs['event'] = just_one(models['event']) + if "event" in models or event: + kargs["event"] = just_one(models["event"]) - models['event_checkin'] = create_models(event_checkin, 'events.EventCheckin', **{ - **kargs, - **event_checkin_kwargs - }) + models["event_checkin"] = create_models( + event_checkin, "events.EventCheckin", **{**kargs, **event_checkin_kwargs} + ) - if not 'eventbrite_webhook' in models and is_valid(eventbrite_webhook): + if not "eventbrite_webhook" in models and is_valid(eventbrite_webhook): kargs = {} - models['eventbrite_webhook'] = create_models(eventbrite_webhook, 'events.EventbriteWebhook', **{ - **kargs, - **eventbrite_webhook_kwargs - }) + models["eventbrite_webhook"] = create_models( + eventbrite_webhook, "events.EventbriteWebhook", **{**kargs, **eventbrite_webhook_kwargs} + ) - if not 'live_class' in models and is_valid(live_class): + if not "live_class" in models and is_valid(live_class): kargs = {} - if 'cohort_time_slot' in models: - kargs['cohort_time_slot'] = just_one(models['cohort_time_slot']) + if "cohort_time_slot" in models: + kargs["cohort_time_slot"] = just_one(models["cohort_time_slot"]) - models['live_class'] = create_models(live_class, 'events.LiveClass', **kargs) + models["live_class"] = create_models(live_class, "events.LiveClass", **kargs) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/feedback_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/feedback_models_mixin.py index 8e7f269c6..346ae409f 100644 --- a/breathecode/tests/mixins/generate_models_mixin/feedback_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/feedback_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + import os from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer @@ -9,70 +10,72 @@ class FeedbackModelsMixin(ModelsMixin): - def generate_feedback_models(self, - answer=False, - event=False, - survey=False, - cohort=False, - mentor=False, - academy=False, - token=False, - user=False, - language='', - answer_status='', - answer_score='', - survey_kwargs={}, - answer_kwargs={}, - models={}, - **kwargs): + def generate_feedback_models( + self, + answer=False, + event=False, + survey=False, + cohort=False, + mentor=False, + academy=False, + token=False, + user=False, + language="", + answer_status="", + answer_score="", + survey_kwargs={}, + answer_kwargs={}, + models={}, + **kwargs + ): """Generate models""" - os.environ['EMAIL_NOTIFICATIONS_ENABLED'] = 'TRUE' + os.environ["EMAIL_NOTIFICATIONS_ENABLED"] = "TRUE" models = models.copy() - if not 'survey' in models and is_valid(survey): + if not "survey" in models and is_valid(survey): kargs = {} - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - models['survey'] = create_models(survey, 'feedback.Survey', **{**kargs, **survey_kwargs}) + models["survey"] = create_models(survey, "feedback.Survey", **{**kargs, **survey_kwargs}) - if not 'answer' in models and is_valid(answer): + if not "answer" in models and is_valid(answer): kargs = {} - if 'event' in models: - kargs['event'] = just_one(models['event']) + if "event" in models: + kargs["event"] = just_one(models["event"]) - if 'user' in models or mentor: - kargs['mentor'] = just_one(models['user']) + if "user" in models or mentor: + kargs["mentor"] = just_one(models["user"]) - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'token' in models: - kargs['token'] = just_one(models['token']) + if "token" in models: + kargs["token"] = just_one(models["token"]) - if 'survey' in models: - kargs['survey'] = just_one(models['survey']) + if "survey" in models: + kargs["survey"] = just_one(models["survey"]) - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'mentorship_session' in models: - kargs['mentorship_session'] = just_one(models['mentorship_session']) + if "mentorship_session" in models: + kargs["mentorship_session"] = just_one(models["mentorship_session"]) if answer_status: - kargs['status'] = answer_status + kargs["status"] = answer_status if answer_score: - kargs['score'] = answer_score + kargs["score"] = answer_score if language: - kargs['lang'] = language + kargs["lang"] = language - models['answer'] = create_models(answer, 'feedback.Answer', **{**kargs, **answer_kwargs}) + models["answer"] = create_models(answer, "feedback.Answer", **{**kargs, **answer_kwargs}) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/freelance_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/freelance_models_mixin.py index 3e9428a6b..84d516bd2 100644 --- a/breathecode/tests/mixins/generate_models_mixin/freelance_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/freelance_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer from .utils import is_valid, create_models, just_one @@ -8,54 +9,56 @@ class FreelanceModelsMixin(ModelsMixin): - def generate_freelance_models(self, - freelancer=False, - user=False, - credentials_github=False, - bill=False, - issue=False, - freelancer_kwargs={}, - bill_kwargs={}, - issue_kwargs={}, - models={}, - **kwargs): + def generate_freelance_models( + self, + freelancer=False, + user=False, + credentials_github=False, + bill=False, + issue=False, + freelancer_kwargs={}, + bill_kwargs={}, + issue_kwargs={}, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'freelancer' in models and is_valid(freelancer): + if not "freelancer" in models and is_valid(freelancer): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'credentials_github' in models: - kargs['github_user'] = just_one(models['credentials_github']) + if "credentials_github" in models: + kargs["github_user"] = just_one(models["credentials_github"]) - models['freelancer'] = create_models(freelancer, 'freelance.Freelancer', **{**kargs, **freelancer_kwargs}) + models["freelancer"] = create_models(freelancer, "freelance.Freelancer", **{**kargs, **freelancer_kwargs}) - if not 'bill' in models and is_valid(bill): + if not "bill" in models and is_valid(bill): kargs = {} - if 'user' in models: - kargs['reviewer'] = just_one(models['user']) + if "user" in models: + kargs["reviewer"] = just_one(models["user"]) - if 'freelancer' in models: - kargs['freelancer'] = just_one(models['freelancer']) + if "freelancer" in models: + kargs["freelancer"] = just_one(models["freelancer"]) - models['bill'] = create_models(bill, 'freelance.Bill', **{**kargs, **bill_kwargs}) + models["bill"] = create_models(bill, "freelance.Bill", **{**kargs, **bill_kwargs}) - if not 'issue' in models and is_valid(issue): + if not "issue" in models and is_valid(issue): kargs = {} - if 'user' in models or user: - kargs['author'] = just_one(models['user']) + if "user" in models or user: + kargs["author"] = just_one(models["user"]) - if 'freelancer' in models or freelancer: - kargs['freelancer'] = just_one(models['freelancer']) + if "freelancer" in models or freelancer: + kargs["freelancer"] = just_one(models["freelancer"]) - if 'bill' in models or bill: - kargs['bill'] = just_one(models['bill']) + if "bill" in models or bill: + kargs["bill"] = just_one(models["bill"]) - models['issue'] = create_models(issue, 'freelance.Issue', **{**kargs, **issue_kwargs}) + models["issue"] = create_models(issue, "freelance.Issue", **{**kargs, **issue_kwargs}) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/generate_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/generate_models_mixin.py index 8ee9f289d..7582482aa 100644 --- a/breathecode/tests/mixins/generate_models_mixin/generate_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/generate_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from django.db.models import Model from breathecode.utils.attr_dict import AttrDict @@ -28,21 +29,39 @@ from .registry_models_mixin import RegistryModelsMixin from .task_manager_models_mixin import TaskManagerModelsMixin -__all__ = ['GenerateModelsMixin'] - - -class GenerateModelsMixin(AuthMixin, AssignmentsModelsMixin, AdmissionsModelsMixin, AuthenticateMixin, - CertificateModelsMixin, FeedbackModelsMixin, NotifyModelsMixin, EventsModelsMixin, - AssessmentModelsMixin, FreelanceModelsMixin, MarketingModelsMixin, MonitoringModelsMixin, - MediaModelsMixin, MentorshipModelsMixin, CareerModelsMixin, ContentTypesMixin, - RegistryModelsMixin, PaymentsModelsMixin, ProvisioningModelsMixin, CommonsModelsMixin, - LinkedServicesMixin, TaskManagerModelsMixin): +__all__ = ["GenerateModelsMixin"] + + +class GenerateModelsMixin( + AuthMixin, + AssignmentsModelsMixin, + AdmissionsModelsMixin, + AuthenticateMixin, + CertificateModelsMixin, + FeedbackModelsMixin, + NotifyModelsMixin, + EventsModelsMixin, + AssessmentModelsMixin, + FreelanceModelsMixin, + MarketingModelsMixin, + MonitoringModelsMixin, + MediaModelsMixin, + MentorshipModelsMixin, + CareerModelsMixin, + ContentTypesMixin, + RegistryModelsMixin, + PaymentsModelsMixin, + ProvisioningModelsMixin, + CommonsModelsMixin, + LinkedServicesMixin, + TaskManagerModelsMixin, +): def __detect_invalid_arguments__(self, models={}, **kwargs): """check if one argument is invalid to prevent errors""" for key in kwargs: - if key != 'authenticate' and not key.endswith('_kwargs') and not key in models: - print(f'key `{key}` should not be implemented in self.generate_models') + if key != "authenticate" and not key.endswith("_kwargs") and not key in models: + print(f"key `{key}` should not be implemented in self.generate_models") def __inject_models_in_instance__(self, models={}): """Add support to model.name instead of model['name']""" @@ -52,9 +71,9 @@ def __inject_models_in_instance__(self, models={}): def __flow_wrapper__(self, *args, **kwargs): models = {} - if 'models' in kwargs: - models = kwargs['models'].copy() - del kwargs['models'] + if "models" in kwargs: + models = kwargs["models"].copy() + del kwargs["models"] for func in args: models = func(models=models, **kwargs) @@ -77,21 +96,22 @@ def __inject_models__(self, models={}, **kwargs): for key in kwargs: kwarg = kwargs[key] - if isinstance(kwarg, Model) or (isinstance(kwarg, list) and len([x - for x in kwarg if isinstance(x, Model)])): + if isinstance(kwarg, Model) or ( + isinstance(kwarg, list) and len([x for x in kwarg if isinstance(x, Model)]) + ): models[key] = kwarg return models def generate_models(self, models={}, **kwargs): - if '_new_implementation' not in kwargs: - print(f'The method `generate_models` is deprecated, use `self.bc.database.create` instead') + if "_new_implementation" not in kwargs: + print(f"The method `generate_models` is deprecated, use `self.bc.database.create` instead") else: - del kwargs['_new_implementation'] + del kwargs["_new_implementation"] - if 'authenticate' in kwargs: - print(f'The argument `authenticate` is deprecated, use `self.bc.request.authenticate` instead') + if "authenticate" in kwargs: + print(f"The argument `authenticate` is deprecated, use `self.bc.request.authenticate` instead") self.maxDiff = None models = models.copy() diff --git a/breathecode/tests/mixins/generate_models_mixin/linked_services_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/linked_services_models_mixin.py index 53dbaa678..51e6357d6 100644 --- a/breathecode/tests/mixins/generate_models_mixin/linked_services_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/linked_services_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins import DateFormatterMixin from breathecode.tests.mixins.headers_mixin import HeadersMixin from breathecode.tests.mixins.models_mixin import ModelsMixin @@ -10,109 +11,123 @@ class LinkedServicesMixin(DateFormatterMixin, HeadersMixin, ModelsMixin): """CapacitiesTestCase with auth methods""" - password = 'pass1234' - - def generate_linked_services_models(self, - scope=False, - app=False, - app_user_agreement=False, - optional_scope_set=False, - legacy_key=False, - app_required_scope=False, - app_optional_scope=False, - first_party_webhook_log=False, - first_party_credentials=False, - models={}, - **kwargs): + + password = "pass1234" + + def generate_linked_services_models( + self, + scope=False, + app=False, + app_user_agreement=False, + optional_scope_set=False, + legacy_key=False, + app_required_scope=False, + app_optional_scope=False, + first_party_webhook_log=False, + first_party_credentials=False, + models={}, + **kwargs + ): models = models.copy() - if not 'scope' in models and (is_valid(scope) or is_valid(app_required_scope) or is_valid(app_optional_scope)): + if not "scope" in models and (is_valid(scope) or is_valid(app_required_scope) or is_valid(app_optional_scope)): kargs = {} - models['scope'] = create_models(scope, 'linked_services.Scope', **kargs) + models["scope"] = create_models(scope, "linked_services.Scope", **kargs) - if not 'app' in models and (is_valid(app) or is_valid(app_user_agreement) or is_valid(legacy_key) - or is_valid(app_required_scope) or is_valid(app_optional_scope) - or is_valid(first_party_webhook_log)): + if not "app" in models and ( + is_valid(app) + or is_valid(app_user_agreement) + or is_valid(legacy_key) + or is_valid(app_required_scope) + or is_valid(app_optional_scope) + or is_valid(first_party_webhook_log) + ): kargs = { - 'public_key': None, - 'private_key': '', + "public_key": None, + "private_key": "", } - models['app'] = create_models(app, 'linked_services.App', **kargs) + models["app"] = create_models(app, "linked_services.App", **kargs) - if not 'app_required_scope' in models and is_valid(app_required_scope): + if not "app_required_scope" in models and is_valid(app_required_scope): kargs = {} - if 'app' in models: - kargs['app'] = just_one(models['app']) + if "app" in models: + kargs["app"] = just_one(models["app"]) - if 'scope' in models: - kargs['scope'] = just_one(models['scope']) + if "scope" in models: + kargs["scope"] = just_one(models["scope"]) - models['app_required_scope'] = create_models(app_required_scope, 'linked_services.AppRequiredScope', - **kargs) + models["app_required_scope"] = create_models( + app_required_scope, "linked_services.AppRequiredScope", **kargs + ) - if not 'app_optional_scope' in models and is_valid(app_optional_scope): + if not "app_optional_scope" in models and is_valid(app_optional_scope): kargs = {} - if 'app' in models: - kargs['app'] = just_one(models['app']) + if "app" in models: + kargs["app"] = just_one(models["app"]) - if 'scope' in models: - kargs['scope'] = just_one(models['scope']) + if "scope" in models: + kargs["scope"] = just_one(models["scope"]) - models['app_optional_scope'] = create_models(app_optional_scope, 'linked_services.AppOptionalScope', - **kargs) + models["app_optional_scope"] = create_models( + app_optional_scope, "linked_services.AppOptionalScope", **kargs + ) - if not 'optional_scope_set' in models and (is_valid(optional_scope_set) or is_valid(app_user_agreement)): + if not "optional_scope_set" in models and (is_valid(optional_scope_set) or is_valid(app_user_agreement)): kargs = {} - if 'scope' in models: - kargs['optional_scopes'] = get_list(models['scope']) + if "scope" in models: + kargs["optional_scopes"] = get_list(models["scope"]) - models['optional_scope_set'] = create_models(optional_scope_set, 'linked_services.OptionalScopeSet', - **kargs) + models["optional_scope_set"] = create_models( + optional_scope_set, "linked_services.OptionalScopeSet", **kargs + ) - if not 'app_user_agreement' in models and is_valid(app_user_agreement): + if not "app_user_agreement" in models and is_valid(app_user_agreement): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'app' in models: - kargs['app'] = just_one(models['app']) + if "app" in models: + kargs["app"] = just_one(models["app"]) - if 'optional_scope_set' in models: - kargs['optional_scope_set'] = just_one(models['optional_scope_set']) + if "optional_scope_set" in models: + kargs["optional_scope_set"] = just_one(models["optional_scope_set"]) - models['app_user_agreement'] = create_models(app_user_agreement, 'linked_services.AppUserAgreement', - **kargs) + models["app_user_agreement"] = create_models( + app_user_agreement, "linked_services.AppUserAgreement", **kargs + ) - if not 'legacy_key' in models and is_valid(legacy_key): + if not "legacy_key" in models and is_valid(legacy_key): kargs = {} - if 'app' in models: - kargs['app'] = just_one(models['app']) + if "app" in models: + kargs["app"] = just_one(models["app"]) - models['legacy_key'] = create_models(legacy_key, 'linked_services.LegacyKey', **kargs) + models["legacy_key"] = create_models(legacy_key, "linked_services.LegacyKey", **kargs) - if not 'first_party_credentials' in models and is_valid(first_party_credentials): + if not "first_party_credentials" in models and is_valid(first_party_credentials): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['first_party_credentials'] = create_models(first_party_credentials, - 'linked_services.FirstPartyCredentials', **kargs) + models["first_party_credentials"] = create_models( + first_party_credentials, "linked_services.FirstPartyCredentials", **kargs + ) - if not 'first_party_webhook_log' in models and is_valid(first_party_webhook_log): + if not "first_party_webhook_log" in models and is_valid(first_party_webhook_log): kargs = {} - if 'app' in models: - kargs['app'] = just_one(models['app']) + if "app" in models: + kargs["app"] = just_one(models["app"]) - models['first_party_webhook_log'] = create_models(first_party_webhook_log, - 'linked_services.FirstPartyWebhookLog', **kargs) + models["first_party_webhook_log"] = create_models( + first_party_webhook_log, "linked_services.FirstPartyWebhookLog", **kargs + ) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/marketing_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/marketing_models_mixin.py index 53becb8fa..44c157a2c 100644 --- a/breathecode/tests/mixins/generate_models_mixin/marketing_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/marketing_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from mixer.backend.django import mixer from breathecode.tests.mixins.models_mixin import ModelsMixin @@ -10,196 +11,196 @@ class MarketingModelsMixin(ModelsMixin): - def generate_marketing_models(self, - active_campaign_academy=False, - automation=False, - academy=False, - tag=False, - contact=False, - form_entry=False, - short_link=False, - user=False, - academy_alias=False, - lead_generation_app=False, - downloadable=False, - course=False, - course_translation=False, - active_campaign_webhook=False, - active_campaign_academy_kwargs={}, - automation_kwargs={}, - tag_kwargs={}, - academy_alias_kwargs={}, - contact_kwargs={}, - form_entry_kwargs={}, - short_link_kwargs={}, - lead_generation_app_kwargs={}, - downloadable_kwargs={}, - models={}, - **kwargs): + def generate_marketing_models( + self, + active_campaign_academy=False, + automation=False, + academy=False, + tag=False, + contact=False, + form_entry=False, + short_link=False, + user=False, + academy_alias=False, + lead_generation_app=False, + downloadable=False, + course=False, + course_translation=False, + active_campaign_webhook=False, + active_campaign_academy_kwargs={}, + automation_kwargs={}, + tag_kwargs={}, + academy_alias_kwargs={}, + contact_kwargs={}, + form_entry_kwargs={}, + short_link_kwargs={}, + lead_generation_app_kwargs={}, + downloadable_kwargs={}, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'active_campaign_academy' in models and (is_valid(active_campaign_academy) - or is_valid(active_campaign_webhook)): + if not "active_campaign_academy" in models and ( + is_valid(active_campaign_academy) or is_valid(active_campaign_webhook) + ): kargs = {} - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - models['active_campaign_academy'] = create_models(active_campaign_academy, - 'marketing.ActiveCampaignAcademy', **{ - **kargs, - **active_campaign_academy_kwargs - }) + models["active_campaign_academy"] = create_models( + active_campaign_academy, + "marketing.ActiveCampaignAcademy", + **{**kargs, **active_campaign_academy_kwargs} + ) - if not 'automation' in models and is_valid(automation): + if not "automation" in models and is_valid(automation): kargs = {} - if 'active_campaign_academy' in models: - kargs['ac_academy'] = just_one(models['active_campaign_academy']) + if "active_campaign_academy" in models: + kargs["ac_academy"] = just_one(models["active_campaign_academy"]) - models['automation'] = create_models(automation, 'marketing.Automation', **{**kargs, **automation_kwargs}) + models["automation"] = create_models(automation, "marketing.Automation", **{**kargs, **automation_kwargs}) - if not 'downloadable' in models and is_valid(downloadable): + if not "downloadable" in models and is_valid(downloadable): kargs = {} - if 'academy' in models and is_valid(downloadable): - kargs['academy'] = just_one(models['academy']) + if "academy" in models and is_valid(downloadable): + kargs["academy"] = just_one(models["academy"]) - if 'user' in models and is_valid(downloadable): - kargs['user'] = just_one(models['user']) + if "user" in models and is_valid(downloadable): + kargs["user"] = just_one(models["user"]) - models['downloadable'] = create_models(downloadable, 'marketing.Downloadable', **{ - **kargs, - **downloadable_kwargs - }) + models["downloadable"] = create_models( + downloadable, "marketing.Downloadable", **{**kargs, **downloadable_kwargs} + ) - if not 'course' in models and is_valid(course): + if not "course" in models and is_valid(course): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'syllabus' in models: - kargs['syllabus'] = just_one(models['syllabus']) + if "syllabus" in models: + kargs["syllabus"] = just_one(models["syllabus"]) - models['course'] = create_models(course, 'marketing.Course', **kargs) + models["course"] = create_models(course, "marketing.Course", **kargs) - if not 'course_translation' in models and is_valid(course_translation): + if not "course_translation" in models and is_valid(course_translation): kargs = {} - if 'course' in models: - kargs['course'] = just_one(models['course']) + if "course" in models: + kargs["course"] = just_one(models["course"]) - models['course_translation'] = create_models(course_translation, 'marketing.CourseTranslation', **kargs) + models["course_translation"] = create_models(course_translation, "marketing.CourseTranslation", **kargs) - if not 'academy_alias' in models and is_valid(academy_alias): + if not "academy_alias" in models and is_valid(academy_alias): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['academy_alias'] = create_models(academy_alias, 'marketing.AcademyAlias', **{ - **kargs, - **academy_alias_kwargs - }) + models["academy_alias"] = create_models( + academy_alias, "marketing.AcademyAlias", **{**kargs, **academy_alias_kwargs} + ) # OneToOneField - if 'active_campaign_academy' in models and is_valid(active_campaign_academy): - if 'automation' in models: - models['active_campaign_academy'].event_attendancy_automation = just_one(models['automation']) + if "active_campaign_academy" in models and is_valid(active_campaign_academy): + if "automation" in models: + models["active_campaign_academy"].event_attendancy_automation = just_one(models["automation"]) - models['active_campaign_academy'].save() + models["active_campaign_academy"].save() - if not 'tag' in models and is_valid(tag): + if not "tag" in models and is_valid(tag): kargs = {} - if 'active_campaign_academy' in models: - kargs['ac_academy'] = just_one(models['active_campaign_academy']) + if "active_campaign_academy" in models: + kargs["ac_academy"] = just_one(models["active_campaign_academy"]) - if 'automation' in models: - kargs['automation'] = just_one(models['automation']) + if "automation" in models: + kargs["automation"] = just_one(models["automation"]) - models['tag'] = create_models(tag, 'marketing.Tag', **{**kargs, **tag_kwargs}) + models["tag"] = create_models(tag, "marketing.Tag", **{**kargs, **tag_kwargs}) - if not 'contact' in models and is_valid(contact): + if not "contact" in models and is_valid(contact): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['contact'] = create_models(contact, 'marketing.Contact', **{**kargs, **contact_kwargs}) + models["contact"] = create_models(contact, "marketing.Contact", **{**kargs, **contact_kwargs}) - if not 'lead_generation_app' in models and is_valid(lead_generation_app): + if not "lead_generation_app" in models and is_valid(lead_generation_app): kargs = {} - if 'academy' in models: - kargs['academy'] = models['academy'] + if "academy" in models: + kargs["academy"] = models["academy"] - if 'tag' in models: - kargs['default_tags'] = [models['tag']] + if "tag" in models: + kargs["default_tags"] = [models["tag"]] - if 'automation' in models: - kargs['default_automations'] = [models['automation']] + if "automation" in models: + kargs["default_automations"] = [models["automation"]] - models['lead_generation_app'] = create_models(contact, 'marketing.LeadGenerationApp', **{ - **kargs, - **lead_generation_app_kwargs - }) + models["lead_generation_app"] = create_models( + contact, "marketing.LeadGenerationApp", **{**kargs, **lead_generation_app_kwargs} + ) - if not 'form_entry' in models and is_valid(form_entry): + if not "form_entry" in models and is_valid(form_entry): kargs = {} - if 'contact' in models: - kargs['contact'] = just_one(models['contact']) + if "contact" in models: + kargs["contact"] = just_one(models["contact"]) - if 'academy' in models or academy: - kargs['academy'] = just_one(models['academy']) + if "academy" in models or academy: + kargs["academy"] = just_one(models["academy"]) - if 'active_campaign_academy' in models: - kargs['ac_academy'] = just_one(models['active_campaign_academy']) + if "active_campaign_academy" in models: + kargs["ac_academy"] = just_one(models["active_campaign_academy"]) - models['form_entry'] = create_models(form_entry, 'marketing.FormEntry', **{**kargs, **form_entry_kwargs}) + models["form_entry"] = create_models(form_entry, "marketing.FormEntry", **{**kargs, **form_entry_kwargs}) - if not 'short_link' in models and is_valid(short_link): + if not "short_link" in models and is_valid(short_link): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'user' in models: - kargs['author'] = just_one(models['user']) + if "user" in models: + kargs["author"] = just_one(models["user"]) - models['short_link'] = create_models(short_link, 'marketing.ShortLink', **{**kargs, **short_link_kwargs}) + models["short_link"] = create_models(short_link, "marketing.ShortLink", **{**kargs, **short_link_kwargs}) - if not 'active_campaign_webhook' in models and is_valid(active_campaign_webhook): + if not "active_campaign_webhook" in models and is_valid(active_campaign_webhook): kargs = {} - if 'active_campaign_academy' in models: - kargs['ac_academy'] = just_one(models['active_campaign_academy']) + if "active_campaign_academy" in models: + kargs["ac_academy"] = just_one(models["active_campaign_academy"]) - if 'form_entry' in models: - kargs['form_entry'] = just_one(models['form_entry']) + if "form_entry" in models: + kargs["form_entry"] = just_one(models["form_entry"]) - if 'contact' in models: - kargs['contact'] = just_one(models['contact']) + if "contact" in models: + kargs["contact"] = just_one(models["contact"]) - models['active_campaign_webhook'] = create_models(active_campaign_webhook, - 'marketing.ActiveCampaignWebhook', **kargs) + models["active_campaign_webhook"] = create_models( + active_campaign_webhook, "marketing.ActiveCampaignWebhook", **kargs + ) - if not 'downloadable' in models and is_valid(downloadable): + if not "downloadable" in models and is_valid(downloadable): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'user' in models: - kargs['author'] = just_one(models['user']) + if "user" in models: + kargs["author"] = just_one(models["user"]) - models['downloadable'] = create_models(downloadable, 'marketing.Downloadable', **{ - **kargs, - **downloadable_kwargs - }) + models["downloadable"] = create_models( + downloadable, "marketing.Downloadable", **{**kargs, **downloadable_kwargs} + ) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/media_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/media_models_mixin.py index b6e9f3df5..aa58d6987 100644 --- a/breathecode/tests/mixins/generate_models_mixin/media_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/media_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer from .utils import is_valid, create_models, just_one, get_list @@ -8,39 +9,40 @@ class MediaModelsMixin(ModelsMixin): - def generate_media_models(self, - category=False, - media=False, - media_resolution=False, - category_kwargs={}, - media_kwargs={}, - media_resolution_kwargs={}, - models={}, - **kwargs): + def generate_media_models( + self, + category=False, + media=False, + media_resolution=False, + category_kwargs={}, + media_kwargs={}, + media_resolution_kwargs={}, + models={}, + **kwargs + ): models = models.copy() - if not 'category' in models and is_valid(category): + if not "category" in models and is_valid(category): kargs = {} - models['category'] = create_models(category, 'media.Category', **{**kargs, **category_kwargs}) + models["category"] = create_models(category, "media.Category", **{**kargs, **category_kwargs}) - if not 'media' in models and is_valid(media): + if not "media" in models and is_valid(media): kargs = {} - if 'category' in models: - kargs['categories'] = get_list(models['category']) + if "category" in models: + kargs["categories"] = get_list(models["category"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['media'] = create_models(media, 'media.Media', **{**kargs, **media_kwargs}) + models["media"] = create_models(media, "media.Media", **{**kargs, **media_kwargs}) - if not 'media_resolution' in models and is_valid(media_resolution): + if not "media_resolution" in models and is_valid(media_resolution): kargs = {} - models['media_resolution'] = create_models(media_resolution, 'media.MediaResolution', **{ - **kargs, - **media_resolution_kwargs - }) + models["media_resolution"] = create_models( + media_resolution, "media.MediaResolution", **{**kargs, **media_resolution_kwargs} + ) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/mentorship_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/mentorship_models_mixin.py index d38558a10..d385d49cb 100644 --- a/breathecode/tests/mixins/generate_models_mixin/mentorship_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/mentorship_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from .utils import create_models, get_list, is_valid, just_one @@ -8,70 +9,73 @@ class MentorshipModelsMixin(ModelsMixin): - def generate_mentorship_models(self, - mentorship_service=False, - mentor_profile=False, - mentorship_bill=False, - mentorship_session=False, - models={}, - **kwargs): + def generate_mentorship_models( + self, + mentorship_service=False, + mentor_profile=False, + mentorship_bill=False, + mentorship_session=False, + models={}, + **kwargs + ): models = models.copy() - if not 'mentorship_service' in models and (is_valid(mentorship_service)): + if not "mentorship_service" in models and (is_valid(mentorship_service)): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['mentorship_service'] = create_models(mentorship_service, 'mentorship.MentorshipService', **kargs) + models["mentorship_service"] = create_models(mentorship_service, "mentorship.MentorshipService", **kargs) - if not 'mentor_profile' in models and (is_valid(mentor_profile) or is_valid(mentorship_bill) - or is_valid(mentorship_session)): + if not "mentor_profile" in models and ( + is_valid(mentor_profile) or is_valid(mentorship_bill) or is_valid(mentorship_session) + ): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'mentorship_service' in models: - kargs['services'] = get_list(models['mentorship_service']) + if "mentorship_service" in models: + kargs["services"] = get_list(models["mentorship_service"]) - if 'syllabus' in models: - kargs['syllabus'] = get_list(models['syllabus']) + if "syllabus" in models: + kargs["syllabus"] = get_list(models["syllabus"]) - models['mentor_profile'] = create_models(mentor_profile, 'mentorship.MentorProfile', **kargs) + models["mentor_profile"] = create_models(mentor_profile, "mentorship.MentorProfile", **kargs) - if not 'mentorship_bill' in models and is_valid(mentorship_bill): + if not "mentorship_bill" in models and is_valid(mentorship_bill): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'user' in models: - kargs['reviewer'] = just_one(models['user']) + if "user" in models: + kargs["reviewer"] = just_one(models["user"]) - if 'mentor_profile' in models: - kargs['mentor'] = just_one(models['mentor_profile']) + if "mentor_profile" in models: + kargs["mentor"] = just_one(models["mentor_profile"]) - models['mentorship_bill'] = create_models(mentorship_bill, 'mentorship.MentorshipBill', **kargs) + models["mentorship_bill"] = create_models(mentorship_bill, "mentorship.MentorshipBill", **kargs) - if not 'mentorship_session' in models and is_valid(mentorship_session): + if not "mentorship_session" in models and is_valid(mentorship_session): kargs = {} - if 'mentor_profile' in models: - kargs['mentor'] = just_one(models['mentor_profile']) + if "mentor_profile" in models: + kargs["mentor"] = just_one(models["mentor_profile"]) - if 'user' in models: - kargs['mentee'] = just_one(models['user']) + if "user" in models: + kargs["mentee"] = just_one(models["user"]) - if 'mentorship_bill' in models: - kargs['bill'] = just_one(models['mentorship_bill']) + if "mentorship_bill" in models: + kargs["bill"] = just_one(models["mentorship_bill"]) - if 'mentorship_service' in models: - kargs['service'] = just_one(models['mentorship_service']) + if "mentorship_service" in models: + kargs["service"] = just_one(models["mentorship_service"]) - models['mentorship_session'] = create_models(mentorship_session, 'mentorship.MentorshipSession', **kargs) + models["mentorship_session"] = create_models(mentorship_session, "mentorship.MentorshipSession", **kargs) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/monitoring_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/monitoring_models_mixin.py index 121bb077a..30be3f7c8 100644 --- a/breathecode/tests/mixins/generate_models_mixin/monitoring_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/monitoring_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer from .utils import is_valid, create_models, just_one @@ -8,70 +9,78 @@ class MonitoringModelsMixin(ModelsMixin): - def generate_monitoring_models(self, - application=False, - academy=False, - csv_upload=False, - slack_channel=False, - endpoint=False, - monitor_script=False, - stripe_event=False, - application_kwargs={}, - endpoint_kwargs={}, - monitor_script_kwargs={}, - models={}, - **kwargs): + def generate_monitoring_models( + self, + application=False, + academy=False, + csv_upload=False, + slack_channel=False, + endpoint=False, + monitor_script=False, + stripe_event=False, + application_kwargs={}, + endpoint_kwargs={}, + monitor_script_kwargs={}, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'application' in models and (is_valid(application) or is_valid(monitor_script)): + if not "application" in models and (is_valid(application) or is_valid(monitor_script)): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'slack_channel' in models: - kargs['notify_slack_channel'] = just_one(models['slack_channel']) + if "slack_channel" in models: + kargs["notify_slack_channel"] = just_one(models["slack_channel"]) - models['application'] = create_models(application, 'monitoring.Application', **{ - **kargs, - **application_kwargs - }) + models["application"] = create_models( + application, "monitoring.Application", **{**kargs, **application_kwargs} + ) - if not 'endpoint' in models and is_valid(endpoint): + if not "endpoint" in models and is_valid(endpoint): kargs = {} - if 'application' in models: - kargs['application'] = just_one(models['application']) + if "application" in models: + kargs["application"] = just_one(models["application"]) - models['endpoint'] = create_models(endpoint, 'monitoring.Endpoint', **{**kargs, **endpoint_kwargs}) + models["endpoint"] = create_models(endpoint, "monitoring.Endpoint", **{**kargs, **endpoint_kwargs}) - if not 'monitor_script' in models and is_valid(monitor_script): + if not "monitor_script" in models and is_valid(monitor_script): kargs = {} - if 'application' in models: - kargs['application'] = just_one(models['application']) + if "application" in models: + kargs["application"] = just_one(models["application"]) - models['monitor_script'] = create_models(monitor_script, 'monitoring.MonitorScript', **{ - **kargs, - **monitor_script_kwargs - }) + models["monitor_script"] = create_models( + monitor_script, "monitoring.MonitorScript", **{**kargs, **monitor_script_kwargs} + ) - if not 'csv_upload' in models and is_valid(csv_upload): + if not "csv_upload" in models and is_valid(csv_upload): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['csv_upload'] = create_models(csv_upload, 'monitoring.CSVUpload', **{ - **kargs, - }) + models["csv_upload"] = create_models( + csv_upload, + "monitoring.CSVUpload", + **{ + **kargs, + } + ) - if not 'stripe_event' in models and is_valid(stripe_event): + if not "stripe_event" in models and is_valid(stripe_event): kargs = {} - models['stripe_event'] = create_models(stripe_event, 'monitoring.StripeEvent', **{ - **kargs, - }) + models["stripe_event"] = create_models( + stripe_event, + "monitoring.StripeEvent", + **{ + **kargs, + } + ) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/notify_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/notify_models_mixin.py index 9558272e3..58e3ede0a 100644 --- a/breathecode/tests/mixins/generate_models_mixin/notify_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/notify_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from .utils import create_models, is_valid, just_one @@ -8,84 +9,84 @@ class NotifyModelsMixin(ModelsMixin): - def generate_notify_models(self, - device=False, - slack_team=False, - slack_user=False, - slack_user_team=False, - slack_channel=False, - hook=False, - device_kwargs={}, - slack_team_kwargs={}, - slack_user_kwargs={}, - slack_user_team_kwargs={}, - slack_channel_kwargs={}, - models={}, - **kwargs): + def generate_notify_models( + self, + device=False, + slack_team=False, + slack_user=False, + slack_user_team=False, + slack_channel=False, + hook=False, + device_kwargs={}, + slack_team_kwargs={}, + slack_user_kwargs={}, + slack_user_team_kwargs={}, + slack_channel_kwargs={}, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'device' in models and is_valid(device): + if not "device" in models and is_valid(device): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['device'] = create_models(device, 'notify.Device', **{**kargs, **device_kwargs}) + models["device"] = create_models(device, "notify.Device", **{**kargs, **device_kwargs}) - if not 'slack_team' in models and is_valid(slack_team): + if not "slack_team" in models and is_valid(slack_team): kargs = {} - if 'user' in models: - kargs['owner'] = just_one(models['user']) + if "user" in models: + kargs["owner"] = just_one(models["user"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['slack_team'] = create_models(slack_team, 'notify.SlackTeam', **{**kargs, **slack_team_kwargs}) + models["slack_team"] = create_models(slack_team, "notify.SlackTeam", **{**kargs, **slack_team_kwargs}) - if not 'slack_user' in models and is_valid(slack_user): + if not "slack_user" in models and is_valid(slack_user): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['slack_user'] = create_models(slack_user, 'notify.SlackUser', **{**kargs, **slack_user_kwargs}) + models["slack_user"] = create_models(slack_user, "notify.SlackUser", **{**kargs, **slack_user_kwargs}) - if not 'slack_user_team' in models and is_valid(slack_user_team): + if not "slack_user_team" in models and is_valid(slack_user_team): kargs = {} - if 'slack_user' in models: - kargs['slack_user'] = just_one(models['slack_user']) + if "slack_user" in models: + kargs["slack_user"] = just_one(models["slack_user"]) - if 'slack_team' in models: - kargs['slack_team'] = just_one(models['slack_team']) + if "slack_team" in models: + kargs["slack_team"] = just_one(models["slack_team"]) - models['slack_user_team'] = create_models(slack_user_team, 'notify.SlackUserTeam', **{ - **kargs, - **slack_user_team_kwargs - }) + models["slack_user_team"] = create_models( + slack_user_team, "notify.SlackUserTeam", **{**kargs, **slack_user_team_kwargs} + ) - if not 'slack_channel' in models and is_valid(slack_channel): + if not "slack_channel" in models and is_valid(slack_channel): kargs = {} - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - if 'slack_team' in models: - kargs['team'] = just_one(models['slack_team']) + if "slack_team" in models: + kargs["team"] = just_one(models["slack_team"]) - models['slack_channel'] = create_models(slack_channel, 'notify.SlackChannel', **{ - **kargs, - **slack_channel_kwargs - }) + models["slack_channel"] = create_models( + slack_channel, "notify.SlackChannel", **{**kargs, **slack_channel_kwargs} + ) - if not 'hook' in models and is_valid(hook): + if not "hook" in models and is_valid(hook): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['hook'] = create_models(hook, 'notify.Hook', **kargs) + models["hook"] = create_models(hook, "notify.Hook", **kargs) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/payments_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/payments_models_mixin.py index 2c60c270c..918ba2de1 100644 --- a/breathecode/tests/mixins/generate_models_mixin/payments_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/payments_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.admissions.models import Academy from breathecode.tests.mixins.models_mixin import ModelsMixin @@ -9,60 +10,69 @@ class PaymentsModelsMixin(ModelsMixin): - def generate_payments_models(self, - currency=False, - service=False, - service_translation=False, - service_item=False, - plan=False, - plan_translation=False, - consumable=False, - invoice=False, - subscription=False, - service_stock_scheduler=False, - payment_contact=False, - financial_reputation=False, - academy_service=False, - academy=False, - bag=False, - cohort_set_cohort=False, - plan_service_item_handler=False, - mentorship_service_set=False, - mentorship_service_set_translation=False, - event_type_set=False, - event_type_set_translation=False, - subscription_service_item=False, - plan_service_item=False, - plan_financing=False, - service_item_feature=False, - financing_option=False, - consumption_session=False, - plan_offer=False, - plan_offer_translation=False, - provisioning_price=False, - cohort_set=False, - cohort_set_translation=False, - service_set=False, - service_set_translation=False, - seller=False, - coupon=False, - models={}, - **kwargs): + def generate_payments_models( + self, + currency=False, + service=False, + service_translation=False, + service_item=False, + plan=False, + plan_translation=False, + consumable=False, + invoice=False, + subscription=False, + service_stock_scheduler=False, + payment_contact=False, + financial_reputation=False, + academy_service=False, + academy=False, + bag=False, + cohort_set_cohort=False, + plan_service_item_handler=False, + mentorship_service_set=False, + mentorship_service_set_translation=False, + event_type_set=False, + event_type_set_translation=False, + subscription_service_item=False, + plan_service_item=False, + plan_financing=False, + service_item_feature=False, + financing_option=False, + consumption_session=False, + plan_offer=False, + plan_offer_translation=False, + provisioning_price=False, + cohort_set=False, + cohort_set_translation=False, + service_set=False, + service_set_translation=False, + seller=False, + coupon=False, + models={}, + **kwargs + ): """Generate models""" models = models.copy() - if not 'currency' in models and (is_valid(currency) or is_valid(invoice) or is_valid(plan) or is_valid(service) - or is_valid(service_item) or is_valid(financing_option) - or is_valid(academy_service) or is_valid(provisioning_price)): + if not "currency" in models and ( + is_valid(currency) + or is_valid(invoice) + or is_valid(plan) + or is_valid(service) + or is_valid(service_item) + or is_valid(financing_option) + or is_valid(academy_service) + or is_valid(provisioning_price) + ): kargs = {} - if 'country' in models: - kargs['countries'] = get_list(models['country']) + if "country" in models: + kargs["countries"] = get_list(models["country"]) - models['currency'] = create_models(currency, 'payments.Currency', **kargs) + models["currency"] = create_models(currency, "payments.Currency", **kargs) - if 'academy' in models: - academies_intances = models['academy'] if isinstance(models['academy'], list) else [models['academy']] + if "academy" in models: + academies_intances = models["academy"] if isinstance(models["academy"], list) else [models["academy"]] academies_arguments = academy if isinstance(academy, list) else [academy] @@ -72,442 +82,463 @@ def generate_payments_models(self, academy_instance = academies_intances[index] if isinstance(academy_argument, Academy) and academy_argument.main_currency is None: - academy_argument.main_currency = just_one(models['currency']) + academy_argument.main_currency = just_one(models["currency"]) academy_argument.save() - elif isinstance(academy_argument, - Academy) is False and (isinstance(academy_argument, int) - or 'main_currency' not in academy_argument - or academy_argument['main_currency'] is not None): - academy_instance.main_currency = just_one(models['currency']) + elif isinstance(academy_argument, Academy) is False and ( + isinstance(academy_argument, int) + or "main_currency" not in academy_argument + or academy_argument["main_currency"] is not None + ): + academy_instance.main_currency = just_one(models["currency"]) academy_instance.save() - if not 'service' in models and (is_valid(service) or is_valid(service_item) or is_valid(consumable) - or is_valid(service_translation) or is_valid(academy_service)): + if not "service" in models and ( + is_valid(service) + or is_valid(service_item) + or is_valid(consumable) + or is_valid(service_translation) + or is_valid(academy_service) + ): kargs = {} - if 'currency' in models: - kargs['currency'] = just_one(models['currency']) + if "currency" in models: + kargs["currency"] = just_one(models["currency"]) - if 'academy' in models: - kargs['owner'] = just_one(models['academy']) + if "academy" in models: + kargs["owner"] = just_one(models["academy"]) - if 'group' in models: - kargs['groups'] = get_list(models['group']) + if "group" in models: + kargs["groups"] = get_list(models["group"]) - models['service'] = create_models(service, 'payments.Service', **kargs) + models["service"] = create_models(service, "payments.Service", **kargs) - if not 'service_translation' in models and is_valid(service_translation): + if not "service_translation" in models and is_valid(service_translation): kargs = {} - if 'service' in models: - kargs['service'] = just_one(models['service']) + if "service" in models: + kargs["service"] = just_one(models["service"]) - models['service_translation'] = create_models(service_translation, 'payments.ServiceTranslation', **kargs) + models["service_translation"] = create_models(service_translation, "payments.ServiceTranslation", **kargs) - if not 'service_item' in models and (is_valid(service_item) or is_valid(consumable) - or is_valid(service_stock_scheduler) or is_valid(subscription_service_item) - or is_valid(plan_service_item) or is_valid(service_item_feature)): + if not "service_item" in models and ( + is_valid(service_item) + or is_valid(consumable) + or is_valid(service_stock_scheduler) + or is_valid(subscription_service_item) + or is_valid(plan_service_item) + or is_valid(service_item_feature) + ): kargs = {} - if 'service' in models: - kargs['service'] = just_one(models['service']) + if "service" in models: + kargs["service"] = just_one(models["service"]) - models['service_item'] = create_models(service_item, 'payments.ServiceItem', **kargs) + models["service_item"] = create_models(service_item, "payments.ServiceItem", **kargs) - if not 'service_item_feature' in models and is_valid(service_item_feature): + if not "service_item_feature" in models and is_valid(service_item_feature): kargs = {} - if 'service_item' in models: - kargs['service_item'] = just_one(models['service_item']) + if "service_item" in models: + kargs["service_item"] = just_one(models["service_item"]) - models['service_item_feature'] = create_models(service_item_feature, 'payments.ServiceItemFeature', **kargs) + models["service_item_feature"] = create_models(service_item_feature, "payments.ServiceItemFeature", **kargs) - if not 'financing_option' in models and is_valid(financing_option): + if not "financing_option" in models and is_valid(financing_option): kargs = {} - if 'currency' in models: - kargs['currency'] = just_one(models['currency']) + if "currency" in models: + kargs["currency"] = just_one(models["currency"]) - models['financing_option'] = create_models(financing_option, 'payments.FinancingOption', **kargs) + models["financing_option"] = create_models(financing_option, "payments.FinancingOption", **kargs) - if not 'cohort_set' in models and (is_valid(cohort_set) or is_valid(cohort_set_translation)): + if not "cohort_set" in models and (is_valid(cohort_set) or is_valid(cohort_set_translation)): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['cohort_set'] = create_models(cohort_set, 'payments.CohortSet', **kargs) + models["cohort_set"] = create_models(cohort_set, "payments.CohortSet", **kargs) - if not 'cohort_set_cohort' in models and is_valid(cohort_set_cohort): + if not "cohort_set_cohort" in models and is_valid(cohort_set_cohort): kargs = {} - if 'cohort_set' in models: - kargs['cohort_set'] = just_one(models['cohort_set']) + if "cohort_set" in models: + kargs["cohort_set"] = just_one(models["cohort_set"]) - if 'cohort' in models: - kargs['cohort'] = just_one(models['cohort']) + if "cohort" in models: + kargs["cohort"] = just_one(models["cohort"]) - models['cohort_set_cohort'] = create_models(cohort_set_cohort, 'payments.CohortSetCohort', **kargs) + models["cohort_set_cohort"] = create_models(cohort_set_cohort, "payments.CohortSetCohort", **kargs) - if not 'cohort_set_translation' in models and is_valid(cohort_set_translation): + if not "cohort_set_translation" in models and is_valid(cohort_set_translation): kargs = {} - if 'cohort_set' in models: - kargs['cohort_set'] = get_list(models['cohort_set']) + if "cohort_set" in models: + kargs["cohort_set"] = get_list(models["cohort_set"]) - models['cohort_set_translation'] = create_models(mentorship_service_set_translation, - 'payments.CohortSetTranslation', **kargs) + models["cohort_set_translation"] = create_models( + mentorship_service_set_translation, "payments.CohortSetTranslation", **kargs + ) - if not 'mentorship_service_set' in models and (is_valid(mentorship_service_set) - or is_valid(mentorship_service_set_translation)): + if not "mentorship_service_set" in models and ( + is_valid(mentorship_service_set) or is_valid(mentorship_service_set_translation) + ): kargs = {} - if 'mentorship_service' in models: - kargs['mentorship_services'] = get_list(models['mentorship_service']) + if "mentorship_service" in models: + kargs["mentorship_services"] = get_list(models["mentorship_service"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['mentorship_service_set'] = create_models(mentorship_service_set, 'payments.MentorshipServiceSet', - **kargs) + models["mentorship_service_set"] = create_models( + mentorship_service_set, "payments.MentorshipServiceSet", **kargs + ) - if not 'mentorship_service_set_translation' in models and is_valid(mentorship_service_set_translation): + if not "mentorship_service_set_translation" in models and is_valid(mentorship_service_set_translation): kargs = {} - if 'mentorship_service_set' in models: - kargs['mentorship_service_set'] = get_list(models['mentorship_service_set']) + if "mentorship_service_set" in models: + kargs["mentorship_service_set"] = get_list(models["mentorship_service_set"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['mentorship_service_set_translation'] = create_models(mentorship_service_set_translation, - 'payments.MentorshipServiceSetTranslation', - **kargs) + models["mentorship_service_set_translation"] = create_models( + mentorship_service_set_translation, "payments.MentorshipServiceSetTranslation", **kargs + ) - if not 'event_type_set' in models and (is_valid(event_type_set) or is_valid(event_type_set_translation)): + if not "event_type_set" in models and (is_valid(event_type_set) or is_valid(event_type_set_translation)): kargs = {} - if 'event_type' in models: - kargs['event_types'] = get_list(models['event_type']) + if "event_type" in models: + kargs["event_types"] = get_list(models["event_type"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['event_type_set'] = create_models(event_type_set, 'payments.EventTypeSet', **kargs) + models["event_type_set"] = create_models(event_type_set, "payments.EventTypeSet", **kargs) - if not 'event_type_set_translation' in models and is_valid(event_type_set_translation): + if not "event_type_set_translation" in models and is_valid(event_type_set_translation): kargs = {} - if 'event_type_sets' in models: - kargs['event_type_sets'] = get_list(models['event_type_set']) + if "event_type_sets" in models: + kargs["event_type_sets"] = get_list(models["event_type_set"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['event_type_set_translation'] = create_models(event_type_set_translation, - 'payments.EventTypeSetTranslation', **kargs) + models["event_type_set_translation"] = create_models( + event_type_set_translation, "payments.EventTypeSetTranslation", **kargs + ) - if not 'academy_service' in models and is_valid(academy_service): + if not "academy_service" in models and is_valid(academy_service): kargs = {} - if 'service' in models: - kargs['service'] = just_one(models['service']) + if "service" in models: + kargs["service"] = just_one(models["service"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'currency' in models: - kargs['currency'] = just_one(models['currency']) + if "currency" in models: + kargs["currency"] = just_one(models["currency"]) - if 'mentorship_service_set' in models: - kargs['available_mentorship_service_sets'] = get_list(models['mentorship_service_set']) + if "mentorship_service_set" in models: + kargs["available_mentorship_service_sets"] = get_list(models["mentorship_service_set"]) - if 'event_type_set' in models: - kargs['available_event_type_sets'] = get_list(models['event_type_set']) + if "event_type_set" in models: + kargs["available_event_type_sets"] = get_list(models["event_type_set"]) - models['academy_service'] = create_models(academy_service, 'payments.AcademyService', **kargs) + models["academy_service"] = create_models(academy_service, "payments.AcademyService", **kargs) - if not 'plan' in models and (is_valid(plan) or is_valid(plan_translation) or is_valid(plan_service_item) - or is_valid(plan_offer)): + if not "plan" in models and ( + is_valid(plan) or is_valid(plan_translation) or is_valid(plan_service_item) or is_valid(plan_offer) + ): kargs = {} - if 'currency' in models: - kargs['currency'] = just_one(models['currency']) + if "currency" in models: + kargs["currency"] = just_one(models["currency"]) - if 'payment_service_scheduler' in models: - kargs['schedulers'] = get_list(models['payment_service_scheduler']) + if "payment_service_scheduler" in models: + kargs["schedulers"] = get_list(models["payment_service_scheduler"]) - if 'financing_option' in models: - kargs['financing_options'] = get_list(models['financing_option']) + if "financing_option" in models: + kargs["financing_options"] = get_list(models["financing_option"]) - if 'academy' in models: - kargs['owner'] = just_one(models['academy']) + if "academy" in models: + kargs["owner"] = just_one(models["academy"]) - if 'mentorship_service_set' in models: - kargs['mentorship_service_set'] = just_one(models['mentorship_service_set']) + if "mentorship_service_set" in models: + kargs["mentorship_service_set"] = just_one(models["mentorship_service_set"]) - if 'event_type_set' in models: - kargs['event_type_set'] = just_one(models['event_type_set']) + if "event_type_set" in models: + kargs["event_type_set"] = just_one(models["event_type_set"]) - if 'cohort_set' in models: - kargs['cohort_set'] = just_one(models['cohort_set']) + if "cohort_set" in models: + kargs["cohort_set"] = just_one(models["cohort_set"]) - if 'user_invite' in models: - kargs['invites'] = get_list(models['user_invite']) + if "user_invite" in models: + kargs["invites"] = get_list(models["user_invite"]) - models['plan'] = create_models(plan, 'payments.Plan', **kargs) + models["plan"] = create_models(plan, "payments.Plan", **kargs) - if not 'plan_translation' in models and is_valid(plan_translation): + if not "plan_translation" in models and is_valid(plan_translation): kargs = {} - if 'plan' in models: - kargs['plan'] = just_one(models['plan']) + if "plan" in models: + kargs["plan"] = just_one(models["plan"]) - models['plan_translation'] = create_models(plan_translation, 'payments.PlanTranslation', **kargs) + models["plan_translation"] = create_models(plan_translation, "payments.PlanTranslation", **kargs) - if not 'plan_offer' in models and (is_valid(plan_offer) or is_valid(plan_offer_translation)): + if not "plan_offer" in models and (is_valid(plan_offer) or is_valid(plan_offer_translation)): kargs = {} - if 'plan' in models: - kargs['original_plan'] = just_one(models['plan']) + if "plan" in models: + kargs["original_plan"] = just_one(models["plan"]) - if 'plan' in models: - kargs['suggested_plan'] = just_one(models['plan']) + if "plan" in models: + kargs["suggested_plan"] = just_one(models["plan"]) - models['plan_offer'] = create_models(plan_offer, 'payments.PlanOffer', **kargs) + models["plan_offer"] = create_models(plan_offer, "payments.PlanOffer", **kargs) - if not 'plan_offer_translation' in models and is_valid(plan_offer_translation): + if not "plan_offer_translation" in models and is_valid(plan_offer_translation): kargs = {} - if 'plan_offer' in models: - kargs['offer'] = just_one(models['plan_offer']) + if "plan_offer" in models: + kargs["offer"] = just_one(models["plan_offer"]) - models['plan_offer_translation'] = create_models(plan_offer_translation, 'payments.PlanOfferTranslation', - **kargs) + models["plan_offer_translation"] = create_models( + plan_offer_translation, "payments.PlanOfferTranslation", **kargs + ) - if not 'seller' in models and is_valid(seller): + if not "seller" in models and is_valid(seller): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['seller'] = create_models(seller, 'payments.Seller', **kargs) + models["seller"] = create_models(seller, "payments.Seller", **kargs) - if not 'coupon' in models and is_valid(coupon): + if not "coupon" in models and is_valid(coupon): kargs = {} - if 'seller' in models: - kargs['seller'] = just_one(models['seller']) + if "seller" in models: + kargs["seller"] = just_one(models["seller"]) - if 'plan' in models: - kargs['plans'] = get_list(models['plan']) + if "plan" in models: + kargs["plans"] = get_list(models["plan"]) - models['coupon'] = create_models(coupon, 'payments.Coupon', **kargs) + models["coupon"] = create_models(coupon, "payments.Coupon", **kargs) - if not 'bag' in models and (is_valid(bag) or is_valid(invoice)): + if not "bag" in models and (is_valid(bag) or is_valid(invoice)): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'currency' in models: - kargs['currency'] = just_one(models['currency']) + if "currency" in models: + kargs["currency"] = just_one(models["currency"]) - if 'service_item' in models: - kargs['service_items'] = get_list(models['service_item']) + if "service_item" in models: + kargs["service_items"] = get_list(models["service_item"]) - if 'plan' in models: - kargs['plans'] = get_list(models['plan']) + if "plan" in models: + kargs["plans"] = get_list(models["plan"]) - if 'coupon' in models: - kargs['coupons'] = get_list(models['coupon']) + if "coupon" in models: + kargs["coupons"] = get_list(models["coupon"]) - models['bag'] = create_models(bag, 'payments.Bag', **kargs) + models["bag"] = create_models(bag, "payments.Bag", **kargs) - if not 'invoice' in models and is_valid(invoice): + if not "invoice" in models and is_valid(invoice): kargs = {} - if 'currency' in models: - kargs['currency'] = just_one(models['currency']) + if "currency" in models: + kargs["currency"] = just_one(models["currency"]) - if 'bag' in models: - kargs['bag'] = just_one(models['bag']) + if "bag" in models: + kargs["bag"] = just_one(models["bag"]) - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['invoice'] = create_models(invoice, 'payments.Invoice', **kargs) + models["invoice"] = create_models(invoice, "payments.Invoice", **kargs) - if not 'plan_financing' in models and is_valid(plan_financing): + if not "plan_financing" in models and is_valid(plan_financing): kargs = {} - if 'invoice' in models: - kargs['invoices'] = get_list(models['invoice']) + if "invoice" in models: + kargs["invoices"] = get_list(models["invoice"]) - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'cohort_set' in models: - kargs['selected_cohort_set'] = just_one(models['cohort_set']) + if "cohort_set" in models: + kargs["selected_cohort_set"] = just_one(models["cohort_set"]) - if 'cohort' in models: - kargs['joined_cohorts'] = get_list(models['cohort']) + if "cohort" in models: + kargs["joined_cohorts"] = get_list(models["cohort"]) - if 'mentorship_service_set' in models: - kargs['selected_mentorship_service_set'] = just_one(models['mentorship_service_set']) + if "mentorship_service_set" in models: + kargs["selected_mentorship_service_set"] = just_one(models["mentorship_service_set"]) - if 'event_type_set' in models: - kargs['selected_event_type_set'] = just_one(models['event_type_set']) + if "event_type_set" in models: + kargs["selected_event_type_set"] = just_one(models["event_type_set"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'plan' in models: - kargs['plans'] = get_list(models['plan']) + if "plan" in models: + kargs["plans"] = get_list(models["plan"]) - models['plan_financing'] = create_models(plan_financing, 'payments.PlanFinancing', **kargs) + models["plan_financing"] = create_models(plan_financing, "payments.PlanFinancing", **kargs) - if not 'subscription' in models and (is_valid(subscription) or is_valid(subscription_service_item)): + if not "subscription" in models and (is_valid(subscription) or is_valid(subscription_service_item)): kargs = {} - if 'invoice' in models: - kargs['invoices'] = get_list(models['invoice']) + if "invoice" in models: + kargs["invoices"] = get_list(models["invoice"]) - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'cohort_set' in models: - kargs['selected_cohort_set'] = just_one(models['cohort_set']) + if "cohort_set" in models: + kargs["selected_cohort_set"] = just_one(models["cohort_set"]) - if 'cohort' in models: - kargs['joined_cohorts'] = get_list(models['cohort']) + if "cohort" in models: + kargs["joined_cohorts"] = get_list(models["cohort"]) - if 'mentorship_service_set' in models: - kargs['selected_mentorship_service_set'] = just_one(models['mentorship_service_set']) + if "mentorship_service_set" in models: + kargs["selected_mentorship_service_set"] = just_one(models["mentorship_service_set"]) - if 'event_type_set' in models: - kargs['selected_event_type_set'] = just_one(models['event_type_set']) + if "event_type_set" in models: + kargs["selected_event_type_set"] = just_one(models["event_type_set"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'plan' in models: - kargs['plans'] = get_list(models['plan']) + if "plan" in models: + kargs["plans"] = get_list(models["plan"]) - models['subscription'] = create_models(subscription, 'payments.Subscription', **kargs) + models["subscription"] = create_models(subscription, "payments.Subscription", **kargs) - if not 'subscription_service_item' in models and is_valid(subscription_service_item): + if not "subscription_service_item" in models and is_valid(subscription_service_item): kargs = {} - if 'subscription' in models: - kargs['subscription'] = just_one(models['subscription']) + if "subscription" in models: + kargs["subscription"] = just_one(models["subscription"]) - if 'service_item' in models: - kargs['service_item'] = just_one(models['service_item']) + if "service_item" in models: + kargs["service_item"] = just_one(models["service_item"]) - if 'mentorship_service_set' in models: - kargs['mentorship_service_set'] = just_one(models['mentorship_service_set']) + if "mentorship_service_set" in models: + kargs["mentorship_service_set"] = just_one(models["mentorship_service_set"]) - if 'cohort_set' in models: - kargs['cohort_sets'] = get_list(models['cohort_set']) + if "cohort_set" in models: + kargs["cohort_sets"] = get_list(models["cohort_set"]) - models['subscription_service_item'] = create_models(subscription_service_item, - 'payments.SubscriptionServiceItem', **kargs) + models["subscription_service_item"] = create_models( + subscription_service_item, "payments.SubscriptionServiceItem", **kargs + ) - if not 'consumable' in models and (is_valid(consumable) or is_valid(consumption_session)): + if not "consumable" in models and (is_valid(consumable) or is_valid(consumption_session)): kargs = {} - if 'service_item' in models: - kargs['service_item'] = just_one(models['service_item']) + if "service_item" in models: + kargs["service_item"] = just_one(models["service_item"]) - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'cohort_set' in models: - kargs['cohort_set'] = just_one(models['cohort_set']) + if "cohort_set" in models: + kargs["cohort_set"] = just_one(models["cohort_set"]) - if 'mentorship_service_set' in models: - kargs['mentorship_service_set'] = just_one(models['mentorship_service_set']) + if "mentorship_service_set" in models: + kargs["mentorship_service_set"] = just_one(models["mentorship_service_set"]) - if 'event_type_set' in models: - kargs['event_type_set'] = just_one(models['event_type_set']) + if "event_type_set" in models: + kargs["event_type_set"] = just_one(models["event_type_set"]) - models['consumable'] = create_models(consumable, 'payments.Consumable', **kargs) + models["consumable"] = create_models(consumable, "payments.Consumable", **kargs) - if not 'consumption_session' in models and is_valid(consumption_session): + if not "consumption_session" in models and is_valid(consumption_session): kargs = {} - if 'consumable' in models: - kargs['consumable'] = just_one(models['consumable']) + if "consumable" in models: + kargs["consumable"] = just_one(models["consumable"]) - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['consumption_session'] = create_models(consumption_session, 'payments.ConsumptionSession', **kargs) + models["consumption_session"] = create_models(consumption_session, "payments.ConsumptionSession", **kargs) - if not 'plan_service_item' in models and (is_valid(plan_service_item) or is_valid(plan_service_item_handler)): + if not "plan_service_item" in models and (is_valid(plan_service_item) or is_valid(plan_service_item_handler)): kargs = {} - if 'plan' in models: - kargs['plan'] = just_one(models['plan']) + if "plan" in models: + kargs["plan"] = just_one(models["plan"]) - if 'service_item' in models: - kargs['service_item'] = just_one(models['service_item']) + if "service_item" in models: + kargs["service_item"] = just_one(models["service_item"]) - models['plan_service_item'] = create_models(plan_service_item, 'payments.PlanServiceItem', **kargs) + models["plan_service_item"] = create_models(plan_service_item, "payments.PlanServiceItem", **kargs) - if not 'plan_service_item_handler' in models and is_valid(plan_service_item_handler): + if not "plan_service_item_handler" in models and is_valid(plan_service_item_handler): kargs = {} - if 'plan_service_item' in models: - kargs['handler'] = just_one(models['plan_service_item']) + if "plan_service_item" in models: + kargs["handler"] = just_one(models["plan_service_item"]) - if 'subscription' in models: - kargs['subscription'] = just_one(models['subscription']) + if "subscription" in models: + kargs["subscription"] = just_one(models["subscription"]) - if 'plan_financing' in models: - kargs['plan_financing'] = just_one(models['plan_financing']) + if "plan_financing" in models: + kargs["plan_financing"] = just_one(models["plan_financing"]) - models['plan_service_item_handler'] = create_models(plan_service_item_handler, - 'payments.PlanServiceItemHandler', **kargs) + models["plan_service_item_handler"] = create_models( + plan_service_item_handler, "payments.PlanServiceItemHandler", **kargs + ) - if not 'service_stock_scheduler' in models and is_valid(service_stock_scheduler): + if not "service_stock_scheduler" in models and is_valid(service_stock_scheduler): kargs = {} - if 'subscription_service_item' in models: - kargs['subscription_handler'] = just_one(models['subscription_service_item']) + if "subscription_service_item" in models: + kargs["subscription_handler"] = just_one(models["subscription_service_item"]) - if 'plan_service_item' in models: - kargs['plan_handler'] = just_one(models['plan_service_item_handler']) + if "plan_service_item" in models: + kargs["plan_handler"] = just_one(models["plan_service_item_handler"]) - if 'consumable' in models: - kargs['consumables'] = get_list(models['consumable']) + if "consumable" in models: + kargs["consumables"] = get_list(models["consumable"]) - models['service_stock_scheduler'] = create_models(service_stock_scheduler, 'payments.ServiceStockScheduler', - **kargs) + models["service_stock_scheduler"] = create_models( + service_stock_scheduler, "payments.ServiceStockScheduler", **kargs + ) - if not 'payment_contact' in models and is_valid(payment_contact): + if not "payment_contact" in models and is_valid(payment_contact): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['payment_contact'] = create_models(payment_contact, 'payments.PaymentContact', **kargs) + models["payment_contact"] = create_models(payment_contact, "payments.PaymentContact", **kargs) - if not 'financial_reputation' in models and is_valid(financial_reputation): + if not "financial_reputation" in models and is_valid(financial_reputation): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['financial_reputation'] = create_models(financial_reputation, 'payments.FinancialReputation', - **kargs) + models["financial_reputation"] = create_models( + financial_reputation, "payments.FinancialReputation", **kargs + ) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/provisioning_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/provisioning_models_mixin.py index 185b769a4..b184b3152 100644 --- a/breathecode/tests/mixins/generate_models_mixin/provisioning_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/provisioning_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer from .utils import is_valid, create_models, just_one, get_list @@ -8,151 +9,160 @@ class ProvisioningModelsMixin(ModelsMixin): - def generate_provisioning_models(self, - provisioning_vendor=False, - provisioning_vendor_kwargs={}, - provisioning_profile=False, - provisioning_profile_kwargs={}, - provisioning_machine_types=False, - provisioning_academy=False, - provisioning_bill=False, - provisioning_activity=False, - provisioning_container=False, - provisioning_consumption_kind=False, - provisioning_price=False, - provisioning_consumption_event=False, - provisioning_user_consumption=False, - models={}, - **kwargs): + def generate_provisioning_models( + self, + provisioning_vendor=False, + provisioning_vendor_kwargs={}, + provisioning_profile=False, + provisioning_profile_kwargs={}, + provisioning_machine_types=False, + provisioning_academy=False, + provisioning_bill=False, + provisioning_activity=False, + provisioning_container=False, + provisioning_consumption_kind=False, + provisioning_price=False, + provisioning_consumption_event=False, + provisioning_user_consumption=False, + models={}, + **kwargs + ): models = models.copy() - if not 'provisioning_vendor' in models and (is_valid(provisioning_vendor) - or is_valid(provisioning_consumption_event)): + if not "provisioning_vendor" in models and ( + is_valid(provisioning_vendor) or is_valid(provisioning_consumption_event) + ): kargs = {} - models['provisioning_vendor'] = create_models(provisioning_vendor, 'provisioning.ProvisioningVendor', **{ - **kargs, - **provisioning_vendor_kwargs - }) + models["provisioning_vendor"] = create_models( + provisioning_vendor, "provisioning.ProvisioningVendor", **{**kargs, **provisioning_vendor_kwargs} + ) - if not 'provisioning_profile' in models and is_valid(provisioning_profile): + if not "provisioning_profile" in models and is_valid(provisioning_profile): kargs = {} - if 'provisioning_vendor' in models: - kargs['vendor'] = just_one(models['provisioning_vendor']) + if "provisioning_vendor" in models: + kargs["vendor"] = just_one(models["provisioning_vendor"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'cohort' in models: - kargs['cohorts'] = get_list(models['cohort']) + if "cohort" in models: + kargs["cohorts"] = get_list(models["cohort"]) - if 'profile_academy' in models: - kargs['members'] = get_list(models['profile_academy']) + if "profile_academy" in models: + kargs["members"] = get_list(models["profile_academy"]) - models['provisioning_profile'] = create_models(provisioning_profile, 'provisioning.ProvisioningProfile', **{ - **kargs, - **provisioning_profile_kwargs - }) + models["provisioning_profile"] = create_models( + provisioning_profile, "provisioning.ProvisioningProfile", **{**kargs, **provisioning_profile_kwargs} + ) - if not 'provisioning_machine_types' in models and is_valid(provisioning_machine_types): + if not "provisioning_machine_types" in models and is_valid(provisioning_machine_types): kargs = {} - if 'provisioning_vendor' in models: - kargs['vendor'] = just_one(models['provisioning_vendor']) + if "provisioning_vendor" in models: + kargs["vendor"] = just_one(models["provisioning_vendor"]) - models['provisioning_machine_types'] = create_models(provisioning_machine_types, - 'provisioning.ProvisioningMachineTypes', **kargs) + models["provisioning_machine_types"] = create_models( + provisioning_machine_types, "provisioning.ProvisioningMachineTypes", **kargs + ) - if not 'provisioning_academy' in models and is_valid(provisioning_academy): + if not "provisioning_academy" in models and is_valid(provisioning_academy): kargs = {} - if 'provisioning_vendor' in models: - kargs['vendor'] = just_one(models['provisioning_vendor']) + if "provisioning_vendor" in models: + kargs["vendor"] = just_one(models["provisioning_vendor"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'provisioning_machine_types' in models: - kargs['allowed_machine_types'] = get_list(models['provisioning_machine_types']) + if "provisioning_machine_types" in models: + kargs["allowed_machine_types"] = get_list(models["provisioning_machine_types"]) - models['provisioning_academy'] = create_models(provisioning_academy, 'provisioning.ProvisioningAcademy', - **kargs) + models["provisioning_academy"] = create_models( + provisioning_academy, "provisioning.ProvisioningAcademy", **kargs + ) - if not 'provisioning_bill' in models and is_valid(provisioning_bill): + if not "provisioning_bill" in models and is_valid(provisioning_bill): kargs = {} - if 'provisioning_vendor' in models: - kargs['vendor'] = just_one(models['provisioning_vendor']) + if "provisioning_vendor" in models: + kargs["vendor"] = just_one(models["provisioning_vendor"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - if 'provisioning_machine_types' in models: - kargs['allowed_machine_types'] = get_list(models['provisioning_machine_types']) + if "provisioning_machine_types" in models: + kargs["allowed_machine_types"] = get_list(models["provisioning_machine_types"]) - models['provisioning_bill'] = create_models(provisioning_bill, 'provisioning.ProvisioningBill', **kargs) + models["provisioning_bill"] = create_models(provisioning_bill, "provisioning.ProvisioningBill", **kargs) - if not 'provisioning_consumption_kind' in models and (is_valid(provisioning_consumption_kind) - or is_valid(provisioning_user_consumption)): + if not "provisioning_consumption_kind" in models and ( + is_valid(provisioning_consumption_kind) or is_valid(provisioning_user_consumption) + ): kargs = {} - models['provisioning_consumption_kind'] = create_models(provisioning_consumption_kind, - 'provisioning.ProvisioningConsumptionKind', **kargs) + models["provisioning_consumption_kind"] = create_models( + provisioning_consumption_kind, "provisioning.ProvisioningConsumptionKind", **kargs + ) - if not 'provisioning_price' in models and (is_valid(provisioning_price) - or is_valid(provisioning_consumption_event)): + if not "provisioning_price" in models and ( + is_valid(provisioning_price) or is_valid(provisioning_consumption_event) + ): kargs = {} - if 'currency' in models: - kargs['currency'] = just_one(models['currency']) + if "currency" in models: + kargs["currency"] = just_one(models["currency"]) - models['provisioning_price'] = create_models(provisioning_price, 'provisioning.ProvisioningPrice', **kargs) + models["provisioning_price"] = create_models(provisioning_price, "provisioning.ProvisioningPrice", **kargs) - if not 'provisioning_consumption_event' in models and is_valid(provisioning_consumption_event): + if not "provisioning_consumption_event" in models and is_valid(provisioning_consumption_event): kargs = {} - if 'provisioning_vendor' in models: - kargs['vendor'] = just_one(models['provisioning_vendor']) + if "provisioning_vendor" in models: + kargs["vendor"] = just_one(models["provisioning_vendor"]) - if 'provisioning_price' in models: - kargs['price'] = just_one(models['provisioning_price']) + if "provisioning_price" in models: + kargs["price"] = just_one(models["provisioning_price"]) - models['provisioning_consumption_event'] = create_models(provisioning_consumption_event, - 'provisioning.ProvisioningConsumptionEvent', - **kargs) + models["provisioning_consumption_event"] = create_models( + provisioning_consumption_event, "provisioning.ProvisioningConsumptionEvent", **kargs + ) - if not 'provisioning_user_consumption' in models and is_valid(provisioning_user_consumption): + if not "provisioning_user_consumption" in models and is_valid(provisioning_user_consumption): kargs = {} - if 'provisioning_consumption_kind' in models: - kargs['kind'] = just_one(models['provisioning_consumption_kind']) + if "provisioning_consumption_kind" in models: + kargs["kind"] = just_one(models["provisioning_consumption_kind"]) - if 'provisioning_bill' in models: - kargs['bills'] = get_list(models['provisioning_bill']) + if "provisioning_bill" in models: + kargs["bills"] = get_list(models["provisioning_bill"]) - if 'provisioning_consumption_event' in models: - kargs['events'] = get_list(models['provisioning_consumption_event']) + if "provisioning_consumption_event" in models: + kargs["events"] = get_list(models["provisioning_consumption_event"]) - models['provisioning_user_consumption'] = create_models(provisioning_user_consumption, - 'provisioning.ProvisioningUserConsumption', **kargs) + models["provisioning_user_consumption"] = create_models( + provisioning_user_consumption, "provisioning.ProvisioningUserConsumption", **kargs + ) - if not 'provisioning_activity' in models and is_valid(provisioning_activity): + if not "provisioning_activity" in models and is_valid(provisioning_activity): kargs = {} - if 'provisioning_bill' in models: - kargs['bill'] = just_one(models['provisioning_bill']) + if "provisioning_bill" in models: + kargs["bill"] = just_one(models["provisioning_bill"]) - models['provisioning_activity'] = create_models(provisioning_activity, 'provisioning.ProvisioningActivity', - **kargs) + models["provisioning_activity"] = create_models( + provisioning_activity, "provisioning.ProvisioningActivity", **kargs + ) - if not 'provisioning_container' in models and is_valid(provisioning_container): + if not "provisioning_container" in models and is_valid(provisioning_container): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - models['provisioning_container'] = create_models(provisioning_container, - 'provisioning.ProvisioningContainer', **kargs) + models["provisioning_container"] = create_models( + provisioning_container, "provisioning.ProvisioningContainer", **kargs + ) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/registry_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/registry_models_mixin.py index 6aad159ca..bf13f210d 100644 --- a/breathecode/tests/mixins/generate_models_mixin/registry_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/registry_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from breathecode.tests.mixins.models_mixin import ModelsMixin from mixer.backend.django import mixer from .utils import is_valid, create_models, just_one, get_list @@ -8,114 +9,116 @@ class RegistryModelsMixin(ModelsMixin): - def generate_registry_models(self, - asset_technology=False, - asset_category=False, - keyword_cluster=False, - asset_keyword=False, - asset=False, - asset_image=False, - asset_alias=False, - asset_comment=False, - asset_error_log=False, - models={}, - **kwargs): + def generate_registry_models( + self, + asset_technology=False, + asset_category=False, + keyword_cluster=False, + asset_keyword=False, + asset=False, + asset_image=False, + asset_alias=False, + asset_comment=False, + asset_error_log=False, + models={}, + **kwargs + ): models = models.copy() - if not 'asset_technology' in models and is_valid(asset_technology): + if not "asset_technology" in models and is_valid(asset_technology): kargs = {} - models['asset_technology'] = create_models(asset_technology, 'registry.AssetTechnology', **kargs) + models["asset_technology"] = create_models(asset_technology, "registry.AssetTechnology", **kargs) - if not 'asset_category' in models and is_valid(asset_category): + if not "asset_category" in models and is_valid(asset_category): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['asset_category'] = create_models(asset_category, 'registry.AssetCategory', **kargs) + models["asset_category"] = create_models(asset_category, "registry.AssetCategory", **kargs) - if not 'keyword_cluster' in models and is_valid(keyword_cluster): + if not "keyword_cluster" in models and is_valid(keyword_cluster): kargs = {} - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['keyword_cluster'] = create_models(keyword_cluster, 'registry.KeywordCluster', **kargs) + models["keyword_cluster"] = create_models(keyword_cluster, "registry.KeywordCluster", **kargs) - if not 'asset_keyword' in models and is_valid(asset_keyword): + if not "asset_keyword" in models and is_valid(asset_keyword): kargs = {} - if 'keyword_cluster' in models: - kargs['cluster'] = just_one(models['keyword_cluster']) + if "keyword_cluster" in models: + kargs["cluster"] = just_one(models["keyword_cluster"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['asset_keyword'] = create_models(asset_keyword, 'registry.AssetKeyword', **kargs) + models["asset_keyword"] = create_models(asset_keyword, "registry.AssetKeyword", **kargs) - if not 'asset' in models and (is_valid(asset) or is_valid(asset_alias) or is_valid(asset_comment)): + if not "asset" in models and (is_valid(asset) or is_valid(asset_alias) or is_valid(asset_comment)): kargs = { - 'all_translations': [], + "all_translations": [], } - if 'asset_technology' in models: - kargs['technologies'] = get_list(models['asset_technology']) + if "asset_technology" in models: + kargs["technologies"] = get_list(models["asset_technology"]) - if 'asset_keyword' in models: - kargs['seo_keywords'] = get_list(models['asset_keyword']) + if "asset_keyword" in models: + kargs["seo_keywords"] = get_list(models["asset_keyword"]) - if 'asset_category' in models: - kargs['category'] = just_one(models['asset_category']) + if "asset_category" in models: + kargs["category"] = just_one(models["asset_category"]) - if 'academy' in models: - kargs['academy'] = just_one(models['academy']) + if "academy" in models: + kargs["academy"] = just_one(models["academy"]) - models['asset'] = create_models(asset, 'registry.Asset', **kargs) + models["asset"] = create_models(asset, "registry.Asset", **kargs) - if 'asset_technology' in models and 'asset' in models: - technologies = models['asset_technology'] + if "asset_technology" in models and "asset" in models: + technologies = models["asset_technology"] if not isinstance(technologies, list): - technologies = [models['asset_technology']] + technologies = [models["asset_technology"]] for instance in technologies: - instance.featured_asset = just_one(models['asset']) + instance.featured_asset = just_one(models["asset"]) instance.save() - if not 'asset_alias' in models and is_valid(asset_alias): + if not "asset_alias" in models and is_valid(asset_alias): kargs = {} - if 'asset' in models: - kargs['asset'] = just_one(models['asset']) + if "asset" in models: + kargs["asset"] = just_one(models["asset"]) - models['asset_alias'] = create_models(asset_alias, 'registry.AssetAlias', **kargs) + models["asset_alias"] = create_models(asset_alias, "registry.AssetAlias", **kargs) - if not 'asset_comment' in models and is_valid(asset_comment): + if not "asset_comment" in models and is_valid(asset_comment): kargs = {} - if 'asset' in models: - kargs['asset'] = just_one(models['asset']) + if "asset" in models: + kargs["asset"] = just_one(models["asset"]) - if 'author' in models: - kargs['user'] = just_one(models['user']) + if "author" in models: + kargs["user"] = just_one(models["user"]) - models['asset_comment'] = create_models(asset_comment, 'registry.AssetComment', **kargs) + models["asset_comment"] = create_models(asset_comment, "registry.AssetComment", **kargs) - if not 'asset_error_log' in models and is_valid(asset_error_log): + if not "asset_error_log" in models and is_valid(asset_error_log): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'asset' in models: - kargs['asset'] = just_one(models['asset']) + if "asset" in models: + kargs["asset"] = just_one(models["asset"]) - models['asset_error_log'] = create_models(asset_error_log, 'registry.AssetErrorLog', **kargs) + models["asset_error_log"] = create_models(asset_error_log, "registry.AssetErrorLog", **kargs) - if not 'asset_image' in models and is_valid(asset_image): + if not "asset_image" in models and is_valid(asset_image): kargs = {} - if 'asset' in models: - kargs['assets'] = get_list(models['asset']) + if "asset" in models: + kargs["assets"] = get_list(models["asset"]) - models['asset_image'] = create_models(asset_image, 'registry.AssetImage', **kargs) + models["asset_image"] = create_models(asset_image, "registry.AssetImage", **kargs) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/task_manager_models_mixin.py b/breathecode/tests/mixins/generate_models_mixin/task_manager_models_mixin.py index 610f27561..136606d1b 100644 --- a/breathecode/tests/mixins/generate_models_mixin/task_manager_models_mixin.py +++ b/breathecode/tests/mixins/generate_models_mixin/task_manager_models_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + from mixer.backend.django import mixer from breathecode.tests.mixins.generate_models_mixin.utils import get_list, just_one @@ -15,20 +16,20 @@ def generate_task_manager_models(self, task_manager=False, models={}, **kwargs): """Generate models""" models = models.copy() - if not 'task_manager' in models and is_valid(task_manager): + if not "task_manager" in models and is_valid(task_manager): kargs = {} - models['task_manager'] = create_models(task_manager, 'task_manager.TaskManager', **kargs) + models["task_manager"] = create_models(task_manager, "task_manager.TaskManager", **kargs) - if not 'task_watcher' in models and is_valid(task_manager): + if not "task_watcher" in models and is_valid(task_manager): kargs = {} - if 'user' in models: - kargs['user'] = just_one(models['user']) + if "user" in models: + kargs["user"] = just_one(models["user"]) - if 'task_manager' in models: - kargs['tasks'] = get_list(models['task_manager']) + if "task_manager" in models: + kargs["tasks"] = get_list(models["task_manager"]) - models['task_watcher'] = create_models(task_manager, 'task_manager.TaskWatcher', **kargs) + models["task_watcher"] = create_models(task_manager, "task_manager.TaskWatcher", **kargs) return models diff --git a/breathecode/tests/mixins/generate_models_mixin/utils/argument_parser.py b/breathecode/tests/mixins/generate_models_mixin/utils/argument_parser.py index 6b5c01518..b3a9ba728 100644 --- a/breathecode/tests/mixins/generate_models_mixin/utils/argument_parser.py +++ b/breathecode/tests/mixins/generate_models_mixin/utils/argument_parser.py @@ -5,7 +5,7 @@ from breathecode.tests.mixins.generate_models_mixin.exceptions import BadArgument -__all__ = ['argument_parser'] +__all__ = ["argument_parser"] list_of_args = list[tuple[int, dict[str, Any]]] args = list[tuple[int, dict[str, Any]]] @@ -33,7 +33,7 @@ def boolean_parser(arg: int) -> args: def tuple_parser(arg: tuple[Any, Any]) -> list_of_args: if len(arg) != 2: - raise BadArgument('The tuple should have length of two elements') + raise BadArgument("The tuple should have length of two elements") if isinstance(arg[0], int) and isinstance(arg[1], dict): return (arg[0], argument_fixer(arg[1] or dict())) @@ -41,7 +41,7 @@ def tuple_parser(arg: tuple[Any, Any]) -> list_of_args: if isinstance(arg[0], int) and isinstance(arg[1], dict): return (arg[1], argument_fixer(arg[0] or dict())) - raise BadArgument(f'The tuple[{arg[0].__class__.__name__}, {arg[0].__class__.__name__}] is invalid') + raise BadArgument(f"The tuple[{arg[0].__class__.__name__}, {arg[0].__class__.__name__}] is invalid") def list_parser(arg: int) -> list_of_args: @@ -55,7 +55,7 @@ def list_parser(arg: int) -> list_of_args: result.append(tuple_parser(item)) continue - raise BadArgument(f'You can\'t pass a list of {arg.__class__.__name__} as argument') + raise BadArgument(f"You can't pass a list of {arg.__class__.__name__} as argument") return result @@ -76,5 +76,5 @@ def argument_parser(arg: Any) -> list_of_args: if isinstance(arg, int): return [integer_parser(arg)] - print(f'The argument parser has a receive a invalid type {arg.__class__.__name__}') + print(f"The argument parser has a receive a invalid type {arg.__class__.__name__}") return [] diff --git a/breathecode/tests/mixins/generate_models_mixin/utils/create_models.py b/breathecode/tests/mixins/generate_models_mixin/utils/create_models.py index e5b2c5d2a..daf00b7fa 100644 --- a/breathecode/tests/mixins/generate_models_mixin/utils/create_models.py +++ b/breathecode/tests/mixins/generate_models_mixin/utils/create_models.py @@ -7,7 +7,7 @@ from .argument_parser import argument_parser -__all__ = ['create_models'] +__all__ = ["create_models"] logger = logging.getLogger(__name__) @@ -21,11 +21,11 @@ def cycle(how_many): def debug_mixer(attr, path, **kwargs): for how_many, arguments in argument_parser(attr): - sentence = '' + sentence = "" if how_many > 1: - sentence += f'mixer.cycle({how_many}).blend(' + sentence += f"mixer.cycle({how_many}).blend(" else: - sentence += 'mixer.blend(' + sentence += "mixer.blend(" sentence += f"'{path}', " values = { @@ -36,11 +36,11 @@ def debug_mixer(attr, path, **kwargs): if isinstance(values[key], str): sentence += f"{key}='{values[key]}', " elif isinstance(values[key], int) or isinstance(values[key], list): - sentence += f'{key}={values[key]}, ' + sentence += f"{key}={values[key]}, " else: - sentence += f'{key}=<{values[key]}>, ' + sentence += f"{key}=<{values[key]}>, " - sentence = sentence[:-2] + ')' + sentence = sentence[:-2] + ")" print(sentence) @@ -49,10 +49,14 @@ def create_models(attr, path, **kwargs): # debug_mixer(attr, path, **kwargs) result = [ - cycle(how_many).blend(path, **{ - **kwargs, - **arguments, - }) for how_many, arguments in argument_parser(attr) + cycle(how_many).blend( + path, + **{ + **kwargs, + **arguments, + }, + ) + for how_many, arguments in argument_parser(attr) ] if len(result) == 1: diff --git a/breathecode/tests/mixins/generate_models_mixin/utils/get_list.py b/breathecode/tests/mixins/generate_models_mixin/utils/get_list.py index cd516bb0a..73f3e449a 100644 --- a/breathecode/tests/mixins/generate_models_mixin/utils/get_list.py +++ b/breathecode/tests/mixins/generate_models_mixin/utils/get_list.py @@ -1,6 +1,6 @@ from typing import Any -__all__ = ['get_list'] +__all__ = ["get_list"] def get_list(attr: Any) -> bool: diff --git a/breathecode/tests/mixins/generate_models_mixin/utils/is_valid.py b/breathecode/tests/mixins/generate_models_mixin/utils/is_valid.py index fe82870a3..7f40b06f4 100644 --- a/breathecode/tests/mixins/generate_models_mixin/utils/is_valid.py +++ b/breathecode/tests/mixins/generate_models_mixin/utils/is_valid.py @@ -1,6 +1,6 @@ from typing import Any -__all__ = ['is_valid'] +__all__ = ["is_valid"] def is_valid(attr: Any) -> bool: diff --git a/breathecode/tests/mixins/generate_models_mixin/utils/just_one.py b/breathecode/tests/mixins/generate_models_mixin/utils/just_one.py index 698c5387c..e87f4cf32 100644 --- a/breathecode/tests/mixins/generate_models_mixin/utils/just_one.py +++ b/breathecode/tests/mixins/generate_models_mixin/utils/just_one.py @@ -1,6 +1,6 @@ from typing import Any -__all__ = ['just_one'] +__all__ = ["just_one"] def just_one(attr: Any) -> bool: diff --git a/breathecode/tests/mixins/generate_queries_mixin/__init__.py b/breathecode/tests/mixins/generate_queries_mixin/__init__.py index 5d04b9f43..868e92406 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/__init__.py +++ b/breathecode/tests/mixins/generate_queries_mixin/__init__.py @@ -1,4 +1,5 @@ """ Global mixins """ + from .generate_queries_mixin import * diff --git a/breathecode/tests/mixins/generate_queries_mixin/admissions_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/admissions_queries_mixin.py index 2c7d1aff9..68226e060 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/admissions_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/admissions_queries_mixin.py @@ -3,15 +3,22 @@ """ -class AdmissionsQueriesMixin(): +class AdmissionsQueriesMixin: def generate_admissions_queries(self): """Generate queries""" return { - 'module': - 'admissions', - 'models': [ - 'Country', 'City', 'Academy', 'Syllabus', 'SyllabusVersion', 'SyllabusSchedule', 'Cohort', 'CohortUser', - 'SyllabusScheduleTimeSlot', 'CohortTimeSlot' - ] + "module": "admissions", + "models": [ + "Country", + "City", + "Academy", + "Syllabus", + "SyllabusVersion", + "SyllabusSchedule", + "Cohort", + "CohortUser", + "SyllabusScheduleTimeSlot", + "CohortTimeSlot", + ], } diff --git a/breathecode/tests/mixins/generate_queries_mixin/assessment_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/assessment_queries_mixin.py index 133f02910..69d8c89e7 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/assessment_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/assessment_queries_mixin.py @@ -3,8 +3,8 @@ """ -class AssessmentQueriesMixin(): +class AssessmentQueriesMixin: def generate_assessment_queries(self): """Generate queries""" - return {'module': 'assessment', 'models': ['Assessment', 'Question', 'Option', 'StudentAssessment', 'Answer']} + return {"module": "assessment", "models": ["Assessment", "Question", "Option", "StudentAssessment", "Answer"]} diff --git a/breathecode/tests/mixins/generate_queries_mixin/assignments_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/assignments_queries_mixin.py index b0fe55b7d..4e3b49cc0 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/assignments_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/assignments_queries_mixin.py @@ -3,8 +3,8 @@ """ -class AssignmentsQueriesMixin(): +class AssignmentsQueriesMixin: def generate_assignments_queries(self): """Generate queries""" - return {'module': 'assignments', 'models': ['Task']} + return {"module": "assignments", "models": ["Task"]} diff --git a/breathecode/tests/mixins/generate_queries_mixin/authenticate_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/authenticate_queries_mixin.py index b21fb6302..c6f9d777d 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/authenticate_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/authenticate_queries_mixin.py @@ -3,15 +3,23 @@ """ -class AuthenticateQueriesMixin(): +class AuthenticateQueriesMixin: def generate_authenticate_queries(self): """Generate queries""" return { - 'module': - 'authenticate', - 'models': [ - 'Profile', 'Capability', 'Role', 'UserInvite', 'ProfileAcademy', 'CredentialsGithub', - 'CredentialsSlack', 'CredentialsFacebook', 'CredentialsQuickBooks', 'Token', 'DeviceId' - ] + "module": "authenticate", + "models": [ + "Profile", + "Capability", + "Role", + "UserInvite", + "ProfileAcademy", + "CredentialsGithub", + "CredentialsSlack", + "CredentialsFacebook", + "CredentialsQuickBooks", + "Token", + "DeviceId", + ], } diff --git a/breathecode/tests/mixins/generate_queries_mixin/career_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/career_queries_mixin.py index 6e033c812..69b200858 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/career_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/career_queries_mixin.py @@ -3,15 +3,22 @@ """ -class CareerQueriesMixin(): +class CareerQueriesMixin: def generate_career_queries(self): """Generate queries""" return { - 'module': - 'career', - 'models': [ - 'Platform', 'Position', 'ZyteProject', 'Spider', 'PositionAlias', 'CareerTag', 'Location', - 'LocationAlias', 'Employer', 'Job' - ] + "module": "career", + "models": [ + "Platform", + "Position", + "ZyteProject", + "Spider", + "PositionAlias", + "CareerTag", + "Location", + "LocationAlias", + "Employer", + "Job", + ], } diff --git a/breathecode/tests/mixins/generate_queries_mixin/certificate_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/certificate_queries_mixin.py index 192e4dda6..f44cf80f0 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/certificate_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/certificate_queries_mixin.py @@ -3,8 +3,8 @@ """ -class CertificateQueriesMixin(): +class CertificateQueriesMixin: def generate_certificate_queries(self): """Generate queries""" - return {'module': 'certificate', 'models': ['Specialty', 'Badge', 'LayoutDesign', 'UserSpecialty']} + return {"module": "certificate", "models": ["Specialty", "Badge", "LayoutDesign", "UserSpecialty"]} diff --git a/breathecode/tests/mixins/generate_queries_mixin/events_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/events_queries_mixin.py index 699ff1169..d057c3c7f 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/events_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/events_queries_mixin.py @@ -3,11 +3,11 @@ """ -class EventsQueriesMixin(): +class EventsQueriesMixin: def generate_events_queries(self): """Generate queries""" return { - 'module': 'events', - 'models': ['Organization', 'Organizer', 'Venue', 'EventType', 'Event', 'EventCheckin', 'EventbriteWebhook'] + "module": "events", + "models": ["Organization", "Organizer", "Venue", "EventType", "Event", "EventCheckin", "EventbriteWebhook"], } diff --git a/breathecode/tests/mixins/generate_queries_mixin/feedback_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/feedback_queries_mixin.py index ea23f2dc1..5953f8b96 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/feedback_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/feedback_queries_mixin.py @@ -3,8 +3,8 @@ """ -class FeedbackQueriesMixin(): +class FeedbackQueriesMixin: def generate_feedback_queries(self): """Generate queries""" - return {'module': 'feedback', 'models': ['Survey', 'Answer']} + return {"module": "feedback", "models": ["Survey", "Answer"]} diff --git a/breathecode/tests/mixins/generate_queries_mixin/freelance_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/freelance_queries_mixin.py index e9f27e8e1..fc28a1a30 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/freelance_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/freelance_queries_mixin.py @@ -3,8 +3,8 @@ """ -class FreelanceQueriesMixin(): +class FreelanceQueriesMixin: def generate_freelance_queries(self): """Generate queries""" - return {'module': 'freelance', 'models': ['Freelancer', 'Bill', 'Issue']} + return {"module": "freelance", "models": ["Freelancer", "Bill", "Issue"]} diff --git a/breathecode/tests/mixins/generate_queries_mixin/generate_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/generate_queries_mixin.py index db7525139..bffd6bf83 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/generate_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/generate_queries_mixin.py @@ -1,6 +1,7 @@ """ Collections of mixins used to login in authorize microservice """ + import re from breathecode.tests.mixins.models_mixin import ModelsMixin @@ -19,30 +20,42 @@ from .media_queries_mixin import MediaQueriesMixin from .career_queries_mixin import CareerQueriesMixin -__all__ = ['GenerateQueriesMixin'] - - -class GenerateQueriesMixin(ModelsMixin, AdmissionsQueriesMixin, AssessmentQueriesMixin, AssignmentsQueriesMixin, - AuthenticateQueriesMixin, CertificateQueriesMixin, EventsQueriesMixin, FeedbackQueriesMixin, - FreelanceQueriesMixin, MarketingQueriesMixin, NotifyQueriesMixin, MonitoringQueriesMixin, - MediaQueriesMixin, CareerQueriesMixin): - __project__ = 'breathecode' +__all__ = ["GenerateQueriesMixin"] + + +class GenerateQueriesMixin( + ModelsMixin, + AdmissionsQueriesMixin, + AssessmentQueriesMixin, + AssignmentsQueriesMixin, + AuthenticateQueriesMixin, + CertificateQueriesMixin, + EventsQueriesMixin, + FeedbackQueriesMixin, + FreelanceQueriesMixin, + MarketingQueriesMixin, + NotifyQueriesMixin, + MonitoringQueriesMixin, + MediaQueriesMixin, + CareerQueriesMixin, +): + __project__ = "breathecode" __generate_queries_was_loaded__ = False - def __get_model__(self, method_name, Model, key='id'): + def __get_model__(self, method_name, Model, key="id"): def get_model(pk): - print(f'The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead') + print(f"The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead") kwargs = {key: pk} return Model.objects.filter(**kwargs).first() return get_model - def __get_model_dict__(self, method_name, Model, key='id'): + def __get_model_dict__(self, method_name, Model, key="id"): def get_model_dict(pk): - print(f'The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead') + print(f"The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead") kwargs = {key: pk} data = Model.objects.filter(**kwargs).first() @@ -53,7 +66,7 @@ def get_model_dict(pk): def __all_model__(self, method_name, Model): def all_model(): - print(f'The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead') + print(f"The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead") return Model.objects.filter() return all_model @@ -61,7 +74,7 @@ def all_model(): def __all_model_dict__(self, method_name, Model): def all_model_dict(): - print(f'The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead') + print(f"The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead") return [self.remove_dinamics_fields(data.__dict__.copy()) for data in Model.objects.filter()] return all_model_dict @@ -69,22 +82,22 @@ def all_model_dict(): def __count_model__(self, method_name, Model): def count_model(): - print(f'The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead') + print(f"The method `{method_name}` is deprecated, use `self.bc.database.list_of` instead") return Model.objects.count() return count_model def __set_queries__(self, Model): - snake_case_name = re.sub(r'(?<!^)(?=[A-Z])', '_', Model.__name__).lower() + snake_case_name = re.sub(r"(?<!^)(?=[A-Z])", "_", Model.__name__).lower() - setattr(self, f'get_{snake_case_name}', self.__get_model__(f'get_{snake_case_name}', Model)) - setattr(self, f'get_{snake_case_name}_dict', self.__get_model_dict__(f'get_{snake_case_name}_dict', Model)) - setattr(self, f'all_{snake_case_name}', self.__all_model__(f'all_{snake_case_name}', Model)) - setattr(self, f'all_{snake_case_name}_dict', self.__all_model_dict__(f'all_{snake_case_name}_dict', Model)) - setattr(self, f'count_{snake_case_name}', self.__count_model__(f'count_{snake_case_name}', Model)) + setattr(self, f"get_{snake_case_name}", self.__get_model__(f"get_{snake_case_name}", Model)) + setattr(self, f"get_{snake_case_name}_dict", self.__get_model_dict__(f"get_{snake_case_name}_dict", Model)) + setattr(self, f"all_{snake_case_name}", self.__all_model__(f"all_{snake_case_name}", Model)) + setattr(self, f"all_{snake_case_name}_dict", self.__all_model_dict__(f"all_{snake_case_name}_dict", Model)) + setattr(self, f"count_{snake_case_name}", self.__count_model__(f"count_{snake_case_name}", Model)) def generate_queries(self): - print(f'The method `generate_queries` is deprecated, use `self.bc.database.list_of` instead') + print(f"The method `generate_queries` is deprecated, use `self.bc.database.list_of` instead") if self.__generate_queries_was_loaded__: return @@ -107,19 +120,20 @@ def generate_queries(self): for descriptor in descriptors: obj = descriptor() - models = obj['models'] - module_name = obj['module'] + models = obj["models"] + module_name = obj["module"] for model in models: - path = f'{self.__project__}.{module_name}.models' + path = f"{self.__project__}.{module_name}.models" import importlib + module = importlib.import_module(path) if hasattr(module, model): Model = getattr(module, model) self.__set_queries__(Model) else: - print(f'{model} not exist in current path `{path}`') + print(f"{model} not exist in current path `{path}`") self.__set_queries__(User) self.__generate_queries_was_loaded__ = True diff --git a/breathecode/tests/mixins/generate_queries_mixin/marketing_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/marketing_queries_mixin.py index 39acfd13f..38a3afcaa 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/marketing_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/marketing_queries_mixin.py @@ -3,15 +3,22 @@ """ -class MarketingQueriesMixin(): +class MarketingQueriesMixin: def generate_marketing_queries(self): """Generate queries""" return { - 'module': - 'marketing', - 'models': [ - 'ActiveCampaignAcademy', 'Automation', 'Tag', 'Contact', 'LeadGenerationApp', 'FormEntry', 'ShortLink', - 'AcademyAlias', 'ActiveCampaignWebhook', 'Downloadable' - ] + "module": "marketing", + "models": [ + "ActiveCampaignAcademy", + "Automation", + "Tag", + "Contact", + "LeadGenerationApp", + "FormEntry", + "ShortLink", + "AcademyAlias", + "ActiveCampaignWebhook", + "Downloadable", + ], } diff --git a/breathecode/tests/mixins/generate_queries_mixin/media_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/media_queries_mixin.py index 0d23822e4..e69eba644 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/media_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/media_queries_mixin.py @@ -3,8 +3,8 @@ """ -class MediaQueriesMixin(): +class MediaQueriesMixin: def generate_media_queries(self): """Generate queries""" - return {'module': 'media', 'models': ['Category', 'Media', 'MediaResolution']} + return {"module": "media", "models": ["Category", "Media", "MediaResolution"]} diff --git a/breathecode/tests/mixins/generate_queries_mixin/monitoring_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/monitoring_queries_mixin.py index f83c82f4d..e2500c390 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/monitoring_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/monitoring_queries_mixin.py @@ -3,8 +3,8 @@ """ -class MonitoringQueriesMixin(): +class MonitoringQueriesMixin: def generate_monitoring_queries(self): """Generate queries""" - return {'module': 'monitoring', 'models': ['Application', 'Endpoint', 'MonitorScript']} + return {"module": "monitoring", "models": ["Application", "Endpoint", "MonitorScript"]} diff --git a/breathecode/tests/mixins/generate_queries_mixin/notify_queries_mixin.py b/breathecode/tests/mixins/generate_queries_mixin/notify_queries_mixin.py index f2b380b14..408a8e295 100644 --- a/breathecode/tests/mixins/generate_queries_mixin/notify_queries_mixin.py +++ b/breathecode/tests/mixins/generate_queries_mixin/notify_queries_mixin.py @@ -3,8 +3,8 @@ """ -class NotifyQueriesMixin(): +class NotifyQueriesMixin: def generate_notify_queries(self): """Generate queries""" - return {'module': 'notify', 'models': ['Device', 'SlackTeam', 'SlackUser', 'SlackUserTeam', 'SlackChannel']} + return {"module": "notify", "models": ["Device", "SlackTeam", "SlackUser", "SlackUserTeam", "SlackChannel"]} diff --git a/breathecode/tests/mixins/headers_mixin.py b/breathecode/tests/mixins/headers_mixin.py index 2f901db8f..02fd40b4d 100644 --- a/breathecode/tests/mixins/headers_mixin.py +++ b/breathecode/tests/mixins/headers_mixin.py @@ -2,10 +2,10 @@ Headers mixin """ -__all__ = ['HeadersMixin'] +__all__ = ["HeadersMixin"] -class HeadersMixin(): +class HeadersMixin: """Headers mixin""" def headers(self, **kargs: str) -> None: @@ -22,11 +22,12 @@ def headers(self, **kargs: str) -> None: headers = {} items = [ - index for index in kargs + index + for index in kargs if kargs[index] and (isinstance(kargs[index], str) or isinstance(kargs[index], int)) ] for index in items: - headers[f'HTTP_{index.upper()}'] = str(kargs[index]) + headers[f"HTTP_{index.upper()}"] = str(kargs[index]) self.client.credentials(**headers) diff --git a/breathecode/tests/mixins/ical_mixin.py b/breathecode/tests/mixins/ical_mixin.py index aa1d5a50d..e9d0ccf83 100644 --- a/breathecode/tests/mixins/ical_mixin.py +++ b/breathecode/tests/mixins/ical_mixin.py @@ -1,12 +1,12 @@ import math -__all__ = ['ICallMixin'] +__all__ = ["ICallMixin"] -class ICallMixin(): +class ICallMixin: def line_limit(self, line: str): - linebreak = '\r\n' + linebreak = "\r\n" max_length = 74 max_chars_in_line_two = max_length - 1 side = math.ceil(len(line) / 74) @@ -18,6 +18,6 @@ def line_limit(self, line: str): offset_in_start = 0 if is_first else 1 start = (index * max_chars_in_line_two) + offset_in_start end = ((index + 1) * max_chars_in_line_two) + 1 - parts.append(line[start:end] if is_first else ' ' + line[start:end]) + parts.append(line[start:end] if is_first else " " + line[start:end]) return linebreak.join(parts) diff --git a/breathecode/tests/mixins/legacy.py b/breathecode/tests/mixins/legacy.py index 7762ed7a6..9a158c944 100644 --- a/breathecode/tests/mixins/legacy.py +++ b/breathecode/tests/mixins/legacy.py @@ -7,9 +7,9 @@ from breathecode.tests.mixins.generate_models_mixin.generate_models_mixin import GenerateModelsMixin from rest_framework.test import APIClient -__all__ = ['LegacyAPITestCase'] +__all__ = ["LegacyAPITestCase"] -token_pattern = re.compile(r'^[0-9a-zA-Z]{,40}$') +token_pattern = re.compile(r"^[0-9a-zA-Z]{,40}$") class LegacyAPITestCase(BreathecodeMixin, GenerateModelsMixin, CacheMixin): @@ -56,7 +56,7 @@ def assertRaisesMessage(self, expected_exception, expected_message): except expected_exception as e: assert str(e) == expected_message, f"Expected '{expected_message}', but got '{str(e)}'" except Exception as e: - pytest.fail(f'Expected {expected_exception} but it was not raised.') + pytest.fail(f"Expected {expected_exception} but it was not raised.") def assertToken(self, expected: str): """ diff --git a/breathecode/tests/mixins/models_mixin.py b/breathecode/tests/mixins/models_mixin.py index d770e5470..02de4e97a 100644 --- a/breathecode/tests/mixins/models_mixin.py +++ b/breathecode/tests/mixins/models_mixin.py @@ -2,13 +2,13 @@ Collections of mixins used to login in authorize microservice """ -__all__ = ['ModelsMixin'] +__all__ = ["ModelsMixin"] -class ModelsMixin(): +class ModelsMixin: """Mixins for models""" - def remove_dinamics_fields(self, dict, fields=['_state', 'created_at', 'updated_at', '_password']): + def remove_dinamics_fields(self, dict, fields=["_state", "created_at", "updated_at", "_password"]): """Remove dinamics fields from django models as dict""" if not dict: return None @@ -21,33 +21,33 @@ def remove_dinamics_fields(self, dict, fields=['_state', 'created_at', 'updated_ # remove any field starting with __ (double underscore) because it is considered private without_private_keys = result.copy() for key in result: - if '__' in key or key.startswith('_'): + if "__" in key or key.startswith("_"): del without_private_keys[key] return without_private_keys def model_to_dict(self, models: dict, key: str) -> dict: """Convert one django models to dict""" - print(f'The method `model_to_dict` is deprecated, use `self.bc.format.to_dict` instead') + print(f"The method `model_to_dict` is deprecated, use `self.bc.format.to_dict` instead") if key in models: return self.remove_dinamics_fields(models[key].__dict__) - def all_model_dict(self, models: list[dict], sort_by='id') -> list[dict]: + def all_model_dict(self, models: list[dict], sort_by="id") -> list[dict]: """Convert all django models to dict""" if models: models.sort(key=lambda x: getattr(x, sort_by)) return [self.remove_dinamics_fields(data.__dict__.copy()) for data in models] - def print_model(self, models: list[dict], key: str, prefix=''): - print(prefix, f'Current model key: {key}') - print(prefix, f'Current model data:', models[key].__dict__) - print('') + def print_model(self, models: list[dict], key: str, prefix=""): + print(prefix, f"Current model key: {key}") + print(prefix, f"Current model data:", models[key].__dict__) + print("") - def print_all_models(self, models: list[dict], prefix=''): - print(prefix, 'Starting to print models in dict format') + def print_all_models(self, models: list[dict], prefix=""): + print(prefix, "Starting to print models in dict format") for key in models: self.print_model(models, key, prefix) - print(prefix, 'Ending to print models in dict format') + print(prefix, "Ending to print models in dict format") diff --git a/breathecode/tests/mixins/old_breathecode_mixin.py b/breathecode/tests/mixins/old_breathecode_mixin.py index 667f678b8..d4afac20a 100644 --- a/breathecode/tests/mixins/old_breathecode_mixin.py +++ b/breathecode/tests/mixins/old_breathecode_mixin.py @@ -1,32 +1,38 @@ """ Cache mixin """ + import requests from breathecode.tests.mocks import OLD_BREATHECODE_INSTANCES from unittest.mock import call from breathecode.services import SOURCE, CAMPAIGN -__all__ = ['OldBreathecodeMixin'] +__all__ = ["OldBreathecodeMixin"] -class OldBreathecodeMixin(): +class OldBreathecodeMixin: """Cache mixin""" - old_breathecode_host = 'https://old.hardcoded.breathecode.url' - OLD_BREATHECODE_TYPES = ['create_contact', 'contact_automations'] + + old_breathecode_host = "https://old.hardcoded.breathecode.url" + OLD_BREATHECODE_TYPES = ["create_contact", "contact_automations"] def __contact_automations_call__(self, model): - return call('POST', - f'{self.old_breathecode_host}/api/3/contactAutomations', - headers={ - 'Accept': 'application/json', - 'Content-Type': 'application/json', - 'Api-Token': model['active_campaign_academy'].ac_key, - }, - json={'contactAutomation': { - 'contact': 1, - 'automation': model['automation'].acp_id, - }}, - timeout=3) + return call( + "POST", + f"{self.old_breathecode_host}/api/3/contactAutomations", + headers={ + "Accept": "application/json", + "Content-Type": "application/json", + "Api-Token": model["active_campaign_academy"].ac_key, + }, + json={ + "contactAutomation": { + "contact": 1, + "automation": model["automation"].acp_id, + } + }, + timeout=3, + ) def reset_old_breathecode_calls(self): mock = requests.request @@ -37,10 +43,10 @@ def check_old_breathecode_calls(self, model, types): calls = [] for type in types: - method = getattr(self, f'__{type}_call__') + method = getattr(self, f"__{type}_call__") if not method: - raise Exception(f'Type {type} is not implemented') + raise Exception(f"Type {type} is not implemented") calls.append(method(model)) diff --git a/breathecode/tests/mixins/sha256_mixin.py b/breathecode/tests/mixins/sha256_mixin.py index c0b5590a4..148617c8c 100644 --- a/breathecode/tests/mixins/sha256_mixin.py +++ b/breathecode/tests/mixins/sha256_mixin.py @@ -1,15 +1,16 @@ """ Token mixin """ + import re -__all__ = ['Sha256Mixin'] +__all__ = ["Sha256Mixin"] -class Sha256Mixin(): +class Sha256Mixin: """Token mixin""" - __hash_pattern__ = re.compile(r'^[0-9a-zA-Z]{,64}$') + __hash_pattern__ = re.compile(r"^[0-9a-zA-Z]{,64}$") def assertHash(self, expected: str): """ diff --git a/breathecode/tests/mixins/token_mixin.py b/breathecode/tests/mixins/token_mixin.py index 731b6a6d1..c90855090 100644 --- a/breathecode/tests/mixins/token_mixin.py +++ b/breathecode/tests/mixins/token_mixin.py @@ -1,15 +1,16 @@ """ Token mixin """ + import re -__all__ = ['TokenMixin'] +__all__ = ["TokenMixin"] -class TokenMixin(): +class TokenMixin: """Token mixin""" - __token_pattern__ = re.compile(r'^[0-9a-zA-Z]{,40}$') + __token_pattern__ = re.compile(r"^[0-9a-zA-Z]{,40}$") def assertToken(self, expected: str): """ diff --git a/breathecode/tests/mocks/__init__.py b/breathecode/tests/mocks/__init__.py index cbf7da5fb..cb429d442 100644 --- a/breathecode/tests/mocks/__init__.py +++ b/breathecode/tests/mocks/__init__.py @@ -1,6 +1,7 @@ """ Mocks """ + from .google_cloud_storage import * # noqa: F401 from .screenshotmachine import * # noqa: F401 from .celery import * # noqa: F401 diff --git a/breathecode/tests/mocks/celery/__init__.py b/breathecode/tests/mocks/celery/__init__.py index c7a4e24d0..c715da408 100644 --- a/breathecode/tests/mocks/celery/__init__.py +++ b/breathecode/tests/mocks/celery/__init__.py @@ -1,18 +1,19 @@ """ Celery Mocks """ + from unittest.mock import Mock from .shared_task_mock import shared_task CELERY_PATH = { - 'shared_task': 'celery.shared_task', + "shared_task": "celery.shared_task", } CELERY_INSTANCES = { - 'shared_task': Mock(side_effect=shared_task), + "shared_task": Mock(side_effect=shared_task), } def apply_celery_shared_task_mock(): """Apply Storage Blob Mock""" - return CELERY_INSTANCES['shared_task'] + return CELERY_INSTANCES["shared_task"] diff --git a/breathecode/tests/mocks/celery/shared_task_mock.py b/breathecode/tests/mocks/celery/shared_task_mock.py index aa4932af3..e9898bd47 100644 --- a/breathecode/tests/mocks/celery/shared_task_mock.py +++ b/breathecode/tests/mocks/celery/shared_task_mock.py @@ -30,4 +30,4 @@ def inner(func): if func: return decorator(func, with_self=False) - return inner #this is the fun_obj mentioned in the above content + return inner # this is the fun_obj mentioned in the above content diff --git a/breathecode/tests/mocks/django_contrib/__init__.py b/breathecode/tests/mocks/django_contrib/__init__.py index 3f0d0e3da..b5d7899a3 100644 --- a/breathecode/tests/mocks/django_contrib/__init__.py +++ b/breathecode/tests/mocks/django_contrib/__init__.py @@ -1,18 +1,19 @@ """ Django contrib Mocks """ + from unittest.mock import Mock from .messages_mock import MessagesMock DJANGO_CONTRIB_PATH = { - 'messages': 'django.contrib.messages', + "messages": "django.contrib.messages", } DJANGO_CONTRIB_INSTANCES = { - 'messages': Mock(side_effect=MessagesMock), + "messages": Mock(side_effect=MessagesMock), } def apply_django_contrib_messages_mock(): """Apply Storage Messages Mock""" - return DJANGO_CONTRIB_INSTANCES['messages'] + return DJANGO_CONTRIB_INSTANCES["messages"] diff --git a/breathecode/tests/mocks/django_contrib/messages_mock.py b/breathecode/tests/mocks/django_contrib/messages_mock.py index 4fe982ee8..0616e93c8 100644 --- a/breathecode/tests/mocks/django_contrib/messages_mock.py +++ b/breathecode/tests/mocks/django_contrib/messages_mock.py @@ -1,4 +1,4 @@ -class MessagesMock(): +class MessagesMock: request = None message = None call_list = [] @@ -11,9 +11,9 @@ def reset(self): def success(self, request, message): self.request = request self.message = message - self.call_list.append('success') + self.call_list.append("success") def error(self, request, message): self.request = request self.message = message - self.call_list.append('error') + self.call_list.append("error") diff --git a/breathecode/tests/mocks/eventbrite/__init__.py b/breathecode/tests/mocks/eventbrite/__init__.py index 7dc5a1f83..780f47868 100644 --- a/breathecode/tests/mocks/eventbrite/__init__.py +++ b/breathecode/tests/mocks/eventbrite/__init__.py @@ -1,15 +1,16 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import MagicMock from .requests_mock import request_mock from .constants.order import EVENTBRITE_ORDER_URL # noqa: F401 EVENTBRITE_PATH = { - 'get': 'requests.get', + "get": "requests.get", } -EVENTBRITE_INSTANCES = {'get': None} +EVENTBRITE_INSTANCES = {"get": None} def apply_eventbrite_requests_post_mock(): @@ -18,6 +19,6 @@ def apply_eventbrite_requests_post_mock(): mock = MagicMock(side_effect=request_mock) # don't fix this line, this keep the old behavior - EVENTBRITE_INSTANCES['get'] = EVENTBRITE_INSTANCES['get'] or mock + EVENTBRITE_INSTANCES["get"] = EVENTBRITE_INSTANCES["get"] or mock return mock diff --git a/breathecode/tests/mocks/eventbrite/constants/__init__.py b/breathecode/tests/mocks/eventbrite/constants/__init__.py index 085b3a351..edc4f077d 100644 --- a/breathecode/tests/mocks/eventbrite/constants/__init__.py +++ b/breathecode/tests/mocks/eventbrite/constants/__init__.py @@ -1,6 +1,7 @@ """ Eventbrite constants """ + from .event import EVENTBRITE_EVENT, EVENTBRITE_EVENT_URL # noqa: F401 from .order import EVENTBRITE_ORDER, EVENTBRITE_ORDER_URL # noqa: F401 from .attendee import EVENTBRITE_ATTENDEE, EVENTBRITE_ATTENDEE_URL # noqa: F401 diff --git a/breathecode/tests/mocks/eventbrite/constants/attendee.py b/breathecode/tests/mocks/eventbrite/constants/attendee.py index babd31de9..c1ccdfc2a 100644 --- a/breathecode/tests/mocks/eventbrite/constants/attendee.py +++ b/breathecode/tests/mocks/eventbrite/constants/attendee.py @@ -1,866 +1,673 @@ # https://www.eventbrite.com.mx/platform/api#/reference/attendee/retrieve/list-attendees-by-event?console=1 -EVENTBRITE_ATTENDEE_URL = 'https://www.eventbriteapi.com/v3/events/1/attendees/' +EVENTBRITE_ATTENDEE_URL = "https://www.eventbriteapi.com/v3/events/1/attendees/" EVENTBRITE_ATTENDEE = { - 'pagination': { - 'object_count': 1, - 'page_number': 1, - 'page_size': 1, - 'page_count': 1, - 'continuation': 'dGhpcyBpcyBhIGNvbnRpbnVhdGlvbiB0b2tlbg', - 'has_more_items': False + "pagination": { + "object_count": 1, + "page_number": 1, + "page_size": 1, + "page_count": 1, + "continuation": "dGhpcyBpcyBhIGNvbnRpbnVhdGlvbiB0b2tlbg", + "has_more_items": False, }, - 'attendees': [{ - 'id': - '1', - 'created': - '2018-05-12T02:00:00Z', - 'changed': - '2018-05-12T02:00:00Z', - 'ticket_class_id': - '1', - 'ticket_class_name': - 'General Admission', - 'profile': { - 'name': 'John Smith', - 'email': 'jhon.smith@example.com', - 'first_name': 'John', - 'last_name': 'Smith', - 'prefix': 'Mr.', - 'suffix': 'Sr', - 'age': 33, - 'job_title': 'Software Enginner', - 'company': 'Eventbrite', - 'website': 'https://mysite.com', - 'blog': 'https://mysite.com', - 'gender': 'male', - 'birth_date': '1984-12-06', - 'cell_phone': '555 555-1234', - 'work_phone': '555 555-1234', - 'addresses': { - 'home': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'localized_address_display': '', - 'localized_area_display': '', - 'localized_multi_line_address_display': [] - }, - 'ship': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'localized_address_display': '', - 'localized_area_display': '', - 'localized_multi_line_address_display': [] - }, - 'work': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'localized_address_display': '', - 'localized_area_display': '', - 'localized_multi_line_address_display': [] - }, - 'bill': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'localized_address_display': '', - 'localized_area_display': '', - 'localized_multi_line_address_display': [] - } - } - }, - 'questions': [{ - 'id': '1', - 'label': "What's your question?", - 'type': 'text', - 'required': False - }], - 'answers': [{ - 'question_id': '1', - 'attendee_id': '1', - 'question': "What's your question?", - 'type': 'text', - 'answer': 'This is my answer' - }], - 'barcodes': [{ - 'barcode': '1234093511009831492001', - 'status': 'unused', - 'created': '2018-08-18T22:24:03Z', - 'changed': '2018-08-18T22:24:03Z', - 'checkin_type': 0, - 'is_printed': False - }], - 'team': { - 'id': '1', - 'name': 'Great Team!', - 'date_joined': '2018-05-12T02:00:00Z', - 'event_id': '1' - }, - 'affiliate': - 'affiliate_code', - 'checked_in': - False, - 'cancelled': - False, - 'refunded': - False, - 'costs': { - 'base_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'gross': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'eventbrite_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'payment_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'tax': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - } - }, - 'status': - '', - 'event_id': - '1', - 'event': { - 'id': '1', - 'name': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'start': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - }, - 'end': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - }, - 'url': 'https://www.eventbrite.com/e/45263283700', - 'vanity_url': 'https://testevent.eventbrite.com', - 'created': '2017-02-19T20:28:14Z', - 'changed': '2017-02-19T20:28:14Z', - 'published': '2017-02-19T20:28:14Z', - 'status': 'live', - 'currency': 'USD', - 'online_event': False, - 'organization_id': '1', - 'organizer_id': '1', - 'organizer': { - 'name': '', - 'description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'long_description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'logo_id': None, - 'logo': { - 'id': '1', - 'url': 'https://image.com', - 'crop_mask': { - 'top_left': { - 'y': 15, - 'x': 15 - }, - 'width': 15, - 'height': 15 + "attendees": [ + { + "id": "1", + "created": "2018-05-12T02:00:00Z", + "changed": "2018-05-12T02:00:00Z", + "ticket_class_id": "1", + "ticket_class_name": "General Admission", + "profile": { + "name": "John Smith", + "email": "jhon.smith@example.com", + "first_name": "John", + "last_name": "Smith", + "prefix": "Mr.", + "suffix": "Sr", + "age": 33, + "job_title": "Software Enginner", + "company": "Eventbrite", + "website": "https://mysite.com", + "blog": "https://mysite.com", + "gender": "male", + "birth_date": "1984-12-06", + "cell_phone": "555 555-1234", + "work_phone": "555 555-1234", + "addresses": { + "home": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + "localized_address_display": "", + "localized_area_display": "", + "localized_multi_line_address_display": [], }, - 'original': { - 'url': 'https://image.com', - 'width': 800, - 'height': 400 + "ship": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + "localized_address_display": "", + "localized_area_display": "", + "localized_multi_line_address_display": [], }, - 'aspect_ratio': '2', - 'edge_color': '#6a7c8b', - 'edge_color_set': True - }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/organizers/1/', - 'id': '1', - 'url': 'https://www.eventbrite.com/o/1/', - 'num_past_events': 5, - 'num_future_events': 1, - 'twitter': '@abc', - 'facebook': 'abc' - }, - 'logo_id': None, - 'logo': { - 'id': '1', - 'url': 'https://image.com', - 'crop_mask': { - 'top_left': { - 'y': 15, - 'x': 15 + "work": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + "localized_address_display": "", + "localized_area_display": "", + "localized_multi_line_address_display": [], + }, + "bill": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + "localized_address_display": "", + "localized_area_display": "", + "localized_multi_line_address_display": [], }, - 'width': 15, - 'height': 15 - }, - 'original': { - 'url': 'https://image.com', - 'width': 800, - 'height': 400 - }, - 'aspect_ratio': '2', - 'edge_color': '#6a7c8b', - 'edge_color_set': True - }, - 'venue': { - 'name': 'Great Venue', - 'age_restriction': None, - 'capacity': 100, - 'address': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/venues/3003/', - 'id': '1', - 'latitude': '49.28497549999999', - 'longitude': '123.11082529999999' - }, - 'format_id': None, - 'format': { - 'id': '1', - 'name': 'Seminar or Talk', - 'name_localized': 'Seminar or Talk', - 'short_name': 'Seminar', - 'short_name_localized': 'Seminar', - 'resource_uri': 'https://www.eventbriteapi.com/v3/formats/2/' - }, - 'category': { - 'id': - '1', - 'resource_uri': - 'https://www.eventbriteapi.com/v3/categories/103/', - 'name': - 'Music', - 'name_localized': - 'Music', - 'short_name': - 'Music', - 'short_name_localized': - 'Music', - 'subcategories': [{ - 'id': '3003', - 'resource_uri': 'https://www.eventbriteapi.com/v3/subcategories/3003/', - 'name': 'Classical', - 'parent_category': {} - }] }, - 'subcategory': { - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/subcategories/3003/', - 'name': 'Classical', - 'parent_category': { - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/categories/103/', - 'name': 'Music', - 'name_localized': 'Music', - 'short_name': 'Music', - 'short_name_localized': 'Music', - 'subcategories': [{}] + "questions": [{"id": "1", "label": "What's your question?", "type": "text", "required": False}], + "answers": [ + { + "question_id": "1", + "attendee_id": "1", + "question": "What's your question?", + "type": "text", + "answer": "This is my answer", } - }, - 'music_properties': { - 'age_restriction': None, - 'presented_by': None, - 'door_time': '2019-05-12T-19:00:00Z' - }, - 'bookmark_info': { - 'bookmarked': False - }, - 'ticket_availability': { - 'has_available_tickets': False, - 'minimum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'maximum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'is_sold_out': True, - 'start_sales_date': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - }, - 'waitlist_available': False - }, - 'listed': False, - 'shareable': False, - 'invite_only': False, - 'show_remaining': True, - 'password': '12345', - 'capacity': 100, - 'capacity_is_custom': True, - 'tx_time_limit': '12345', - 'hide_start_date': True, - 'hide_end_date': True, - 'locale': 'en_US', - 'is_locked': True, - 'privacy_setting': 'unlocked', - 'is_externally_ticketed': False, - 'external_ticketing': { - 'external_url': '', - 'ticketing_provider_name': '', - 'is_free': False, - 'minimum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'maximum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'sales_start': '', - 'sales_end': '' - }, - 'is_series': True, - 'is_series_parent': True, - 'series_id': '1', - 'is_reserved_seating': True, - 'show_pick_a_seat': True, - 'show_seatmap_thumbnail': True, - 'show_colors_in_seatmap_thumbnail': True, - 'is_free': True, - 'source': 'api', - 'version': 'null', - 'resource_uri': 'https://www.eventbriteapi.com/v3/events/1234/', - 'event_sales_status': { - 'sales_status': 'text', - 'start_sales_date': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' + ], + "barcodes": [ + { + "barcode": "1234093511009831492001", + "status": "unused", + "created": "2018-08-18T22:24:03Z", + "changed": "2018-08-18T22:24:03Z", + "checkin_type": 0, + "is_printed": False, } + ], + "team": {"id": "1", "name": "Great Team!", "date_joined": "2018-05-12T02:00:00Z", "event_id": "1"}, + "affiliate": "affiliate_code", + "checked_in": False, + "cancelled": False, + "refunded": False, + "costs": { + "base_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "gross": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "eventbrite_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "payment_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "tax": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, }, - 'checkout_settings': { - 'created': '2018-01-31T13:00:00Z', - 'changed': '2018-01-31T13:00:00Z', - 'country_code': '', - 'currency_code': '', - 'checkout_method': 'paypal', - 'offline_settings': [{ - 'payment_method': 'CASH', - 'instructions': '' - }], - 'user_instrument_vault_id': '' - } - }, - 'order_id': - '1', - 'order': { - 'id': '1', - 'created': '2018-05-12T02:00:00Z', - 'changed': '2018-05-12T02:00:00Z', - 'name': 'John Smith', - 'first_name': 'John', - 'last_name': 'Smith', - 'email': 'john.smith@example.com', - 'costs': { - 'base_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'display_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'display_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'gross': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'eventbrite_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'payment_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'tax': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'display_tax': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'price_before_discount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'discount_amount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "status": "", + "event_id": "1", + "event": { + "id": "1", + "name": {"text": "Some text", "html": "<p>Some text</p>"}, + "description": {"text": "Some text", "html": "<p>Some text</p>"}, + "start": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + "end": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + "url": "https://www.eventbrite.com/e/45263283700", + "vanity_url": "https://testevent.eventbrite.com", + "created": "2017-02-19T20:28:14Z", + "changed": "2017-02-19T20:28:14Z", + "published": "2017-02-19T20:28:14Z", + "status": "live", + "currency": "USD", + "online_event": False, + "organization_id": "1", + "organizer_id": "1", + "organizer": { + "name": "", + "description": {"text": "Some text", "html": "<p>Some text</p>"}, + "long_description": {"text": "Some text", "html": "<p>Some text</p>"}, + "logo_id": None, + "logo": { + "id": "1", + "url": "https://image.com", + "crop_mask": {"top_left": {"y": 15, "x": 15}, "width": 15, "height": 15}, + "original": {"url": "https://image.com", "width": 800, "height": 400}, + "aspect_ratio": "2", + "edge_color": "#6a7c8b", + "edge_color_set": True, + }, + "resource_uri": "https://www.eventbriteapi.com/v3/organizers/1/", + "id": "1", + "url": "https://www.eventbrite.com/o/1/", + "num_past_events": 5, + "num_future_events": 1, + "twitter": "@abc", + "facebook": "abc", + }, + "logo_id": None, + "logo": { + "id": "1", + "url": "https://image.com", + "crop_mask": {"top_left": {"y": 15, "x": 15}, "width": 15, "height": 15}, + "original": {"url": "https://image.com", "width": 800, "height": 400}, + "aspect_ratio": "2", + "edge_color": "#6a7c8b", + "edge_color_set": True, + }, + "venue": { + "name": "Great Venue", + "age_restriction": None, + "capacity": 100, + "address": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + }, + "resource_uri": "https://www.eventbriteapi.com/v3/venues/3003/", + "id": "1", + "latitude": "49.28497549999999", + "longitude": "123.11082529999999", + }, + "format_id": None, + "format": { + "id": "1", + "name": "Seminar or Talk", + "name_localized": "Seminar or Talk", + "short_name": "Seminar", + "short_name_localized": "Seminar", + "resource_uri": "https://www.eventbriteapi.com/v3/formats/2/", + }, + "category": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/categories/103/", + "name": "Music", + "name_localized": "Music", + "short_name": "Music", + "short_name_localized": "Music", + "subcategories": [ + { + "id": "3003", + "resource_uri": "https://www.eventbriteapi.com/v3/subcategories/3003/", + "name": "Classical", + "parent_category": {}, + } + ], + }, + "subcategory": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/subcategories/3003/", + "name": "Classical", + "parent_category": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/categories/103/", + "name": "Music", + "name_localized": "Music", + "short_name": "Music", + "short_name_localized": "Music", + "subcategories": [{}], + }, }, - 'discount_type': - 'coded', - 'fee_components': [{ - 'intermediate': False, - 'name': 'royalty', - 'internal_name': 'service fee', - 'group_name': 'service fee', - 'value': 200, - 'discount': { - 'amount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'reason': 'TOGGLED_OFF_FEE' + "music_properties": { + "age_restriction": None, + "presented_by": None, + "door_time": "2019-05-12T-19:00:00Z", + }, + "bookmark_info": {"bookmarked": False}, + "ticket_availability": { + "has_available_tickets": False, + "minimum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", }, - 'rule': { - 'id': '1' + "maximum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", }, - 'base': 'item.display-includable', - 'bucket': 'fee', - 'recipient': 'event.6018', - 'payer': 'attendee' - }], - 'tax_components': [{ - 'intermediate': False, - 'name': 'royalty', - 'internal_name': 'service fee', - 'group_name': 'service fee', - 'value': 200, - 'discount': { - 'amount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'reason': 'TOGGLED_OFF_FEE' + "is_sold_out": True, + "start_sales_date": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", }, - 'rule': { - 'id': '1' + "waitlist_available": False, + }, + "listed": False, + "shareable": False, + "invite_only": False, + "show_remaining": True, + "password": "12345", + "capacity": 100, + "capacity_is_custom": True, + "tx_time_limit": "12345", + "hide_start_date": True, + "hide_end_date": True, + "locale": "en_US", + "is_locked": True, + "privacy_setting": "unlocked", + "is_externally_ticketed": False, + "external_ticketing": { + "external_url": "", + "ticketing_provider_name": "", + "is_free": False, + "minimum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", }, - 'base': 'item.display-includable', - 'bucket': 'fee', - 'recipient': 'event.6018', - 'payer': 'attendee' - }], - 'shipping_components': [{ - 'intermediate': False, - 'name': 'royalty', - 'internal_name': 'service fee', - 'group_name': 'service fee', - 'value': 200, - 'discount': { - 'amount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'reason': 'TOGGLED_OFF_FEE' + "maximum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", }, - 'rule': { - 'id': '1' + "sales_start": "", + "sales_end": "", + }, + "is_series": True, + "is_series_parent": True, + "series_id": "1", + "is_reserved_seating": True, + "show_pick_a_seat": True, + "show_seatmap_thumbnail": True, + "show_colors_in_seatmap_thumbnail": True, + "is_free": True, + "source": "api", + "version": "null", + "resource_uri": "https://www.eventbriteapi.com/v3/events/1234/", + "event_sales_status": { + "sales_status": "text", + "start_sales_date": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", }, - 'base': 'item.display-includable', - 'bucket': 'fee', - 'recipient': 'event.6018', - 'payer': 'attendee' - }], - 'has_gts_tax': - False, - 'tax_name': - 'VAT' - }, - 'event_id': '1', - 'event': { - 'id': '1', - 'name': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' }, - 'start': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' + "checkout_settings": { + "created": "2018-01-31T13:00:00Z", + "changed": "2018-01-31T13:00:00Z", + "country_code": "", + "currency_code": "", + "checkout_method": "paypal", + "offline_settings": [{"payment_method": "CASH", "instructions": ""}], + "user_instrument_vault_id": "", }, - 'end': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - }, - 'url': 'https://www.eventbrite.com/e/45263283700', - 'vanity_url': 'https://testevent.eventbrite.com', - 'created': '2017-02-19T20:28:14Z', - 'changed': '2017-02-19T20:28:14Z', - 'published': '2017-02-19T20:28:14Z', - 'status': 'live', - 'currency': 'USD', - 'online_event': False, - 'organization_id': '1', - 'organizer_id': '1', - 'organizer': { - 'name': '', - 'description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'long_description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' + }, + "order_id": "1", + "order": { + "id": "1", + "created": "2018-05-12T02:00:00Z", + "changed": "2018-05-12T02:00:00Z", + "name": "John Smith", + "first_name": "John", + "last_name": "Smith", + "email": "john.smith@example.com", + "costs": { + "base_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "display_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "display_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "gross": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "eventbrite_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "payment_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "tax": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "display_tax": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "price_before_discount": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", }, - 'logo_id': None, - 'logo': { - 'id': '1', - 'url': 'https://image.com', - 'crop_mask': { - 'top_left': { - 'y': 15, - 'x': 15 + "discount_amount": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "discount_type": "coded", + "fee_components": [ + { + "intermediate": False, + "name": "royalty", + "internal_name": "service fee", + "group_name": "service fee", + "value": 200, + "discount": { + "amount": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "reason": "TOGGLED_OFF_FEE", }, - 'width': 15, - 'height': 15 - }, - 'original': { - 'url': 'https://image.com', - 'width': 800, - 'height': 400 + "rule": {"id": "1"}, + "base": "item.display-includable", + "bucket": "fee", + "recipient": "event.6018", + "payer": "attendee", + } + ], + "tax_components": [ + { + "intermediate": False, + "name": "royalty", + "internal_name": "service fee", + "group_name": "service fee", + "value": 200, + "discount": { + "amount": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "reason": "TOGGLED_OFF_FEE", + }, + "rule": {"id": "1"}, + "base": "item.display-includable", + "bucket": "fee", + "recipient": "event.6018", + "payer": "attendee", + } + ], + "shipping_components": [ + { + "intermediate": False, + "name": "royalty", + "internal_name": "service fee", + "group_name": "service fee", + "value": 200, + "discount": { + "amount": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "reason": "TOGGLED_OFF_FEE", + }, + "rule": {"id": "1"}, + "base": "item.display-includable", + "bucket": "fee", + "recipient": "event.6018", + "payer": "attendee", + } + ], + "has_gts_tax": False, + "tax_name": "VAT", + }, + "event_id": "1", + "event": { + "id": "1", + "name": {"text": "Some text", "html": "<p>Some text</p>"}, + "description": {"text": "Some text", "html": "<p>Some text</p>"}, + "start": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + "end": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + "url": "https://www.eventbrite.com/e/45263283700", + "vanity_url": "https://testevent.eventbrite.com", + "created": "2017-02-19T20:28:14Z", + "changed": "2017-02-19T20:28:14Z", + "published": "2017-02-19T20:28:14Z", + "status": "live", + "currency": "USD", + "online_event": False, + "organization_id": "1", + "organizer_id": "1", + "organizer": { + "name": "", + "description": {"text": "Some text", "html": "<p>Some text</p>"}, + "long_description": {"text": "Some text", "html": "<p>Some text</p>"}, + "logo_id": None, + "logo": { + "id": "1", + "url": "https://image.com", + "crop_mask": {"top_left": {"y": 15, "x": 15}, "width": 15, "height": 15}, + "original": {"url": "https://image.com", "width": 800, "height": 400}, + "aspect_ratio": "2", + "edge_color": "#6a7c8b", + "edge_color_set": True, }, - 'aspect_ratio': '2', - 'edge_color': '#6a7c8b', - 'edge_color_set': True + "resource_uri": "https://www.eventbriteapi.com/v3/organizers/1/", + "id": "1", + "url": "https://www.eventbrite.com/o/1/", + "num_past_events": 5, + "num_future_events": 1, + "twitter": "@abc", + "facebook": "abc", }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/organizers/1/', - 'id': '1', - 'url': 'https://www.eventbrite.com/o/1/', - 'num_past_events': 5, - 'num_future_events': 1, - 'twitter': '@abc', - 'facebook': 'abc' - }, - 'logo_id': None, - 'logo': { - 'id': '1', - 'url': 'https://image.com', - 'crop_mask': { - 'top_left': { - 'y': 15, - 'x': 15 + "logo_id": None, + "logo": { + "id": "1", + "url": "https://image.com", + "crop_mask": {"top_left": {"y": 15, "x": 15}, "width": 15, "height": 15}, + "original": {"url": "https://image.com", "width": 800, "height": 400}, + "aspect_ratio": "2", + "edge_color": "#6a7c8b", + "edge_color_set": True, + }, + "venue": { + "name": "Great Venue", + "age_restriction": None, + "capacity": 100, + "address": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, }, - 'width': 15, - 'height': 15 + "resource_uri": "https://www.eventbriteapi.com/v3/venues/3003/", + "id": "1", + "latitude": "49.28497549999999", + "longitude": "123.11082529999999", }, - 'original': { - 'url': 'https://image.com', - 'width': 800, - 'height': 400 + "format_id": None, + "format": { + "id": "1", + "name": "Seminar or Talk", + "name_localized": "Seminar or Talk", + "short_name": "Seminar", + "short_name_localized": "Seminar", + "resource_uri": "https://www.eventbriteapi.com/v3/formats/2/", }, - 'aspect_ratio': '2', - 'edge_color': '#6a7c8b', - 'edge_color_set': True - }, - 'venue': { - 'name': 'Great Venue', - 'age_restriction': None, - 'capacity': 100, - 'address': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None + "category": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/categories/103/", + "name": "Music", + "name_localized": "Music", + "short_name": "Music", + "short_name_localized": "Music", + "subcategories": [ + { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/subcategories/3003/", + "name": "Classical", + "parent_category": {}, + } + ], }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/venues/3003/', - 'id': '1', - 'latitude': '49.28497549999999', - 'longitude': '123.11082529999999' - }, - 'format_id': None, - 'format': { - 'id': '1', - 'name': 'Seminar or Talk', - 'name_localized': 'Seminar or Talk', - 'short_name': 'Seminar', - 'short_name_localized': 'Seminar', - 'resource_uri': 'https://www.eventbriteapi.com/v3/formats/2/' - }, - 'category': { - 'id': - '1', - 'resource_uri': - 'https://www.eventbriteapi.com/v3/categories/103/', - 'name': - 'Music', - 'name_localized': - 'Music', - 'short_name': - 'Music', - 'short_name_localized': - 'Music', - 'subcategories': [{ - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/subcategories/3003/', - 'name': 'Classical', - 'parent_category': {} - }] - }, - 'subcategory': { - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/subcategories/3003/', - 'name': 'Classical', - 'parent_category': { - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/categories/103/', - 'name': 'Music', - 'name_localized': 'Music', - 'short_name': 'Music', - 'short_name_localized': 'Music', - 'subcategories': [{}] - } - }, - 'music_properties': { - 'age_restriction': None, - 'presented_by': None, - 'door_time': '2019-05-12T-19:00:00Z' - }, - 'bookmark_info': { - 'bookmarked': False - }, - 'ticket_availability': { - 'has_available_tickets': False, - 'minimum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "subcategory": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/subcategories/3003/", + "name": "Classical", + "parent_category": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/categories/103/", + "name": "Music", + "name_localized": "Music", + "short_name": "Music", + "short_name_localized": "Music", + "subcategories": [{}], + }, }, - 'maximum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "music_properties": { + "age_restriction": None, + "presented_by": None, + "door_time": "2019-05-12T-19:00:00Z", }, - 'is_sold_out': True, - 'start_sales_date': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' + "bookmark_info": {"bookmarked": False}, + "ticket_availability": { + "has_available_tickets": False, + "minimum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "maximum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "is_sold_out": True, + "start_sales_date": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + "waitlist_available": False, }, - 'waitlist_available': False - }, - 'listed': False, - 'shareable': False, - 'invite_only': False, - 'show_remaining': True, - 'password': '12345', - 'capacity': 100, - 'capacity_is_custom': True, - 'tx_time_limit': '12345', - 'hide_start_date': True, - 'hide_end_date': True, - 'locale': 'en_US', - 'is_locked': True, - 'privacy_setting': 'unlocked', - 'is_externally_ticketed': False, - 'external_ticketing': { - 'external_url': '', - 'ticketing_provider_name': '', - 'is_free': False, - 'minimum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "listed": False, + "shareable": False, + "invite_only": False, + "show_remaining": True, + "password": "12345", + "capacity": 100, + "capacity_is_custom": True, + "tx_time_limit": "12345", + "hide_start_date": True, + "hide_end_date": True, + "locale": "en_US", + "is_locked": True, + "privacy_setting": "unlocked", + "is_externally_ticketed": False, + "external_ticketing": { + "external_url": "", + "ticketing_provider_name": "", + "is_free": False, + "minimum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "maximum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "sales_start": "", + "sales_end": "", }, - 'maximum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "is_series": True, + "is_series_parent": True, + "series_id": "1", + "is_reserved_seating": True, + "show_pick_a_seat": True, + "show_seatmap_thumbnail": True, + "show_colors_in_seatmap_thumbnail": True, + "is_free": True, + "source": "api", + "version": "null", + "resource_uri": "https://www.eventbriteapi.com/v3/events/1234/", + "event_sales_status": { + "sales_status": "text", + "start_sales_date": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + }, + "checkout_settings": { + "created": "2018-01-31T13:00:00Z", + "changed": "2018-01-31T13:00:00Z", + "country_code": "", + "currency_code": "", + "checkout_method": "paypal", + "offline_settings": [{"payment_method": "CASH", "instructions": ""}], + "user_instrument_vault_id": "", }, - 'sales_start': '', - 'sales_end': '' }, - 'is_series': True, - 'is_series_parent': True, - 'series_id': '1', - 'is_reserved_seating': True, - 'show_pick_a_seat': True, - 'show_seatmap_thumbnail': True, - 'show_colors_in_seatmap_thumbnail': True, - 'is_free': True, - 'source': 'api', - 'version': 'null', - 'resource_uri': 'https://www.eventbriteapi.com/v3/events/1234/', - 'event_sales_status': { - 'sales_status': 'text', - 'start_sales_date': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - } + "attendees": [{}], + "time_remaining": 100, + "resource_uri": "https://www.eventbriteapi.com/v3/orders/1234/", + "status": "placed", + "ticket_buyer_settings": { + "confirmation_message": {"text": "Some text", "html": "<p>Some text</p>"}, + "instructions": {"text": "Some text", "html": "<p>Some text</p>"}, + "event_id": "1", + "refund_request_enabled": False, + "ticket_class_confirmation_settings": [ + { + "ticket_class_id": "1", + "event_id": "1", + "confirmation_message": {"text": "Some text", "html": "<p>Some text</p>"}, + } + ], + }, + "contact_list_preferences": { + "has_contact_list": True, + "has_opted_in": True, + "_type": "order_contact_list_preferences", }, - 'checkout_settings': { - 'created': '2018-01-31T13:00:00Z', - 'changed': '2018-01-31T13:00:00Z', - 'country_code': '', - 'currency_code': '', - 'checkout_method': 'paypal', - 'offline_settings': [{ - 'payment_method': 'CASH', - 'instructions': '' - }], - 'user_instrument_vault_id': '' - } }, - 'attendees': [{}], - 'time_remaining': 100, - 'resource_uri': 'https://www.eventbriteapi.com/v3/orders/1234/', - 'status': 'placed', - 'ticket_buyer_settings': { - 'confirmation_message': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'instructions': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'event_id': - '1', - 'refund_request_enabled': - False, - 'ticket_class_confirmation_settings': [{ - 'ticket_class_id': '1', - 'event_id': '1', - 'confirmation_message': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - } - }] + "guestlist_id": None, + "invited_by": None, + "assigned_unit": { + "unit_id": "18-1:2", + "description": "Some description", + "location_image": {"url": "", "x": 0, "y": 0}, + "labels": ["100", "A", "23"], + "titles": ["Area", "Row", "Seat"], }, - 'contact_list_preferences': { - 'has_contact_list': True, - 'has_opted_in': True, - '_type': 'order_contact_list_preferences' - } - }, - 'guestlist_id': - None, - 'invited_by': - None, - 'assigned_unit': { - 'unit_id': '18-1:2', - 'description': 'Some description', - 'location_image': { - 'url': '', - 'x': 0, - 'y': 0 + "delivery_method": "electronic", + "variant_id": None, + "contact_list_preferences": { + "has_contact_list": True, + "has_opted_in": True, + "_type": "attendee_contact_list_preferences", }, - 'labels': ['100', 'A', '23'], - 'titles': ['Area', 'Row', 'Seat'] - }, - 'delivery_method': - 'electronic', - 'variant_id': - None, - 'contact_list_preferences': { - 'has_contact_list': True, - 'has_opted_in': True, - '_type': 'attendee_contact_list_preferences' - }, - 'resource_uri': - '' - }] + "resource_uri": "", + } + ], } diff --git a/breathecode/tests/mocks/eventbrite/constants/event.py b/breathecode/tests/mocks/eventbrite/constants/event.py index e5cf239a0..ae4ce8fd9 100644 --- a/breathecode/tests/mocks/eventbrite/constants/event.py +++ b/breathecode/tests/mocks/eventbrite/constants/event.py @@ -1,85 +1,68 @@ # https://www.eventbrite.com.mx/platform/api#/reference/event/retrieve-an-event?console=1 -EVENTBRITE_EVENT_URL = 'https://www.eventbriteapi.com/v3/events/1/' +EVENTBRITE_EVENT_URL = "https://www.eventbriteapi.com/v3/events/1/" EVENTBRITE_EVENT = { - 'name': { - 'text': 'GEEKTALKS - PRESENTACIÓN DE PROYECTOS FINALES', - 'html': 'GEEKTALKS - PRESENTACIÓN DE PROYECTOS FINALES' + "name": { + "text": "GEEKTALKS - PRESENTACIÓN DE PROYECTOS FINALES", + "html": "GEEKTALKS - PRESENTACIÓN DE PROYECTOS FINALES", }, - 'description': { - 'text': 'GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!', - 'html': 'GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!' + "description": { + "text": "GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!", + "html": "GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!", }, - 'url': 'https://www.eventbrite.com/e/geektalks-presentacion-de-proyectos-finales-tickets-1', - 'start': { - 'timezone': 'Europe/Madrid', - 'local': '2021-12-01T18:30:00', - 'utc': '2021-12-01T17:30:00Z' - }, - 'end': { - 'timezone': 'Europe/Madrid', - 'local': '2021-12-01T19:30:00', - 'utc': '2021-12-01T18:30:00Z' - }, - 'organization_id': '1', - 'created': '2021-11-19T03:24:52Z', - 'changed': '2021-11-19T04:27:58Z', - 'published': '2021-11-19T04:27:58Z', - 'capacity': 200, - 'capacity_is_custom': False, - 'status': 'live', - 'currency': 'USD', - 'listed': True, - 'shareable': True, - 'invite_only': False, - 'online_event': True, - 'show_remaining': False, - 'tx_time_limit': 480, - 'hide_start_date': False, - 'hide_end_date': False, - 'locale': 'en_US', - 'is_locked': False, - 'privacy_setting': 'unlocked', - 'is_series': False, - 'is_series_parent': False, - 'inventory_type': 'limited', - 'is_reserved_seating': False, - 'show_pick_a_seat': False, - 'show_seatmap_thumbnail': False, - 'show_colors_in_seatmap_thumbnail': False, - 'source': 'coyote', - 'is_free': True, - 'version': None, - 'summary': 'GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!', - 'facebook_event_id': None, - 'logo_id': '1', - 'organizer_id': '1', - 'venue_id': None, - 'category_id': '1', - 'subcategory_id': '1', - 'format_id': '1', - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/events/1/', - 'is_externally_ticketed': False, - 'logo': { - 'crop_mask': { - 'top_left': { - 'x': 0, - 'y': 0 - }, - 'width': 2160, - 'height': 1080 + "url": "https://www.eventbrite.com/e/geektalks-presentacion-de-proyectos-finales-tickets-1", + "start": {"timezone": "Europe/Madrid", "local": "2021-12-01T18:30:00", "utc": "2021-12-01T17:30:00Z"}, + "end": {"timezone": "Europe/Madrid", "local": "2021-12-01T19:30:00", "utc": "2021-12-01T18:30:00Z"}, + "organization_id": "1", + "created": "2021-11-19T03:24:52Z", + "changed": "2021-11-19T04:27:58Z", + "published": "2021-11-19T04:27:58Z", + "capacity": 200, + "capacity_is_custom": False, + "status": "live", + "currency": "USD", + "listed": True, + "shareable": True, + "invite_only": False, + "online_event": True, + "show_remaining": False, + "tx_time_limit": 480, + "hide_start_date": False, + "hide_end_date": False, + "locale": "en_US", + "is_locked": False, + "privacy_setting": "unlocked", + "is_series": False, + "is_series_parent": False, + "inventory_type": "limited", + "is_reserved_seating": False, + "show_pick_a_seat": False, + "show_seatmap_thumbnail": False, + "show_colors_in_seatmap_thumbnail": False, + "source": "coyote", + "is_free": True, + "version": None, + "summary": "GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!", + "facebook_event_id": None, + "logo_id": "1", + "organizer_id": "1", + "venue_id": None, + "category_id": "1", + "subcategory_id": "1", + "format_id": "1", + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/events/1/", + "is_externally_ticketed": False, + "logo": { + "crop_mask": {"top_left": {"x": 0, "y": 0}, "width": 2160, "height": 1080}, + "original": { + "url": "https://img.evbuc.com/https%3A%2F%2Fcdn.evbuc.com%2Fimages%2F188704609%2F187450375408%2F1%2Foriginal.20211119-035031?auto=format%2Ccompress&q=75&sharp=10&s=683defa0f95ab0fab375d93d30fd83f2", + "width": 2160, + "height": 1080, }, - 'original': { - 'url': - 'https://img.evbuc.com/https%3A%2F%2Fcdn.evbuc.com%2Fimages%2F188704609%2F187450375408%2F1%2Foriginal.20211119-035031?auto=format%2Ccompress&q=75&sharp=10&s=683defa0f95ab0fab375d93d30fd83f2', - 'width': 2160, - 'height': 1080 - }, - 'id': '1', - 'url': - 'https://img.evbuc.com/https%3A%2F%2Fcdn.evbuc.com%2Fimages%2F188704609%2F187450375408%2F1%2Foriginal.20211119-035031?h=200&w=450&auto=format%2Ccompress&q=75&sharp=10&rect=0%2C0%2C2160%2C1080&s=40991174ed57f5f54596a762c94eb850', - 'aspect_ratio': '2', - 'edge_color': '#fcfcfc', - 'edge_color_set': True - } + "id": "1", + "url": "https://img.evbuc.com/https%3A%2F%2Fcdn.evbuc.com%2Fimages%2F188704609%2F187450375408%2F1%2Foriginal.20211119-035031?h=200&w=450&auto=format%2Ccompress&q=75&sharp=10&rect=0%2C0%2C2160%2C1080&s=40991174ed57f5f54596a762c94eb850", + "aspect_ratio": "2", + "edge_color": "#fcfcfc", + "edge_color_set": True, + }, } diff --git a/breathecode/tests/mocks/eventbrite/constants/events.py b/breathecode/tests/mocks/eventbrite/constants/events.py index aac77ff2d..7176ec20e 100644 --- a/breathecode/tests/mocks/eventbrite/constants/events.py +++ b/breathecode/tests/mocks/eventbrite/constants/events.py @@ -1,149 +1,125 @@ # https://www.eventbrite.com.mx/platform/api#/reference/event/retrieve-an-event?console=1 # '%2C' = ',' -EVENTBRITE_EVENTS_URL = ('https://www.eventbriteapi.com/v3/organizations/:id/events/?' - 'expand=organizer%2Cvenue&status=live') +EVENTBRITE_EVENTS_URL = ( + "https://www.eventbriteapi.com/v3/organizations/:id/events/?" "expand=organizer%2Cvenue&status=live" +) EVENTBRITE_EVENTS = { - 'pagination': { - 'object_count': 1, - 'page_number': 1, - 'page_size': 50, - 'page_count': 1, - 'has_more_items': False - }, - 'events': [{ - 'name': { - 'text': 'GEEKTALKS - PRESENTACIÓN DE PROYECTOS FINALES', - 'html': 'GEEKTALKS - PRESENTACIÓN DE PROYECTOS FINALES' - }, - 'description': { - 'text': 'GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!', - 'html': 'GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!' - }, - 'url': 'https://www.eventbrite.com/e/geektalks-presentacion-de-proyectos-finales-tickets-1', - 'start': { - 'timezone': 'Europe/Madrid', - 'local': '2021-12-01T18:30:00', - 'utc': '2021-12-01T17:30:00Z' - }, - 'end': { - 'timezone': 'Europe/Madrid', - 'local': '2021-12-01T19:30:00', - 'utc': '2021-12-01T18:30:00Z' - }, - 'organization_id': '1', - 'created': '2021-11-19T03:24:52Z', - 'changed': '2021-11-19T04:27:58Z', - 'published': '2021-11-19T04:27:58Z', - 'capacity': 200, - 'capacity_is_custom': False, - 'status': 'live', - 'currency': 'USD', - 'listed': True, - 'shareable': True, - 'invite_only': False, - 'online_event': True, - 'show_remaining': False, - 'tx_time_limit': 480, - 'hide_start_date': False, - 'hide_end_date': False, - 'locale': 'en_US', - 'is_locked': False, - 'privacy_setting': 'unlocked', - 'is_series': False, - 'is_series_parent': False, - 'inventory_type': 'limited', - 'is_reserved_seating': False, - 'show_pick_a_seat': False, - 'show_seatmap_thumbnail': False, - 'show_colors_in_seatmap_thumbnail': False, - 'source': 'coyote', - 'is_free': True, - 'version': None, - 'summary': 'GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!', - 'facebook_event_id': '1', - 'logo_id': '1', - 'organizer_id': '1', - 'venue_id': '1', - 'category_id': '1', - 'subcategory_id': '1', - 'format_id': '1', - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/events/1/', - 'is_externally_ticketed': False, - 'logo': { - 'crop_mask': { - 'top_left': { - 'x': 0, - 'y': 0 - }, - 'width': 2160, - 'height': 1080 + "pagination": {"object_count": 1, "page_number": 1, "page_size": 50, "page_count": 1, "has_more_items": False}, + "events": [ + { + "name": { + "text": "GEEKTALKS - PRESENTACIÓN DE PROYECTOS FINALES", + "html": "GEEKTALKS - PRESENTACIÓN DE PROYECTOS FINALES", }, - 'original': { - 'url': - 'https://img.evbuc.com/https%3A%2F%2Fcdn.evbuc.com%2Fimages%2F1%2F187450375408%2F1%2Foriginal.20211119-035031?auto=format%2Ccompress&q=75&sharp=10&s=683defa0f95ab0fab375d93d30fd83f2', - 'width': 2160, - 'height': 1080 + "description": { + "text": "GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!", + "html": "GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!", }, - 'id': '1', - 'url': - 'https://img.evbuc.com/https%3A%2F%2Fcdn.evbuc.com%2Fimages%2F1%2F187450375408%2F1%2Foriginal.20211119-035031?h=200&w=450&auto=format%2Ccompress&q=75&sharp=10&rect=0%2C0%2C2160%2C1080&s=40991174ed57f5f54596a762c94eb850', - 'aspect_ratio': '2', - 'edge_color': '#fcfcfc', - 'edge_color_set': True - }, - 'organizer': { - 'description': { - 'text': - '4Geeks Academy somos un Bootcamp de programación con más de seis campus en España, Estados Unidos, Chile y Venezuela. Estamos enfocados en desarrollar las habilidades necesarias para convertirte en un programador de software completo y exitoso. Contamos con más de 600 graduados, ¡el 90% ya está trabajando como programador!\r\nTenemos dos tipos de programas Part-Time y Full-Time. En cada una de ellos, recibirás la mejor atención posible, nuestras clases son personalizadas (en promedio 1 profesor cada 5 estudiantes). Recibirás asesoramiento en todo momento a través de nuestras mentorías uno a uno y soporte online y offline, incluso después de que consigas trabajo. Además, nos encargaremos de ayudarte a conseguir trabajo, te asesoraremos en la construcción de tu CV, construcción de tu portfolio, entre otros, también te prepararemos para entrevistas de manera personalizada (según el puesto al que estés aplicando) para que seas el candidato perfecto para las empresas.  \r\n ', - 'html': - '<p>4Geeks Academy somos un Bootcamp de programación con más de seis campus en España, Estados Unidos, Chile y Venezuela. Estamos enfocados en desarrollar las habilidades necesarias para convertirte en un programador de software completo y exitoso. Contamos con más de 600 graduados, ¡el 90% ya está trabajando como programador!</p>\r\n<p>Tenemos dos tipos de programas Part-Time y Full-Time. En cada una de ellos, recibirás la mejor atención posible, nuestras clases son personalizadas (en promedio 1 profesor cada 5 estudiantes). Recibirás asesoramiento en todo momento a través de nuestras mentorías uno a uno y soporte online y offline, incluso después de que consigas trabajo. Además, nos encargaremos de ayudarte a conseguir trabajo, te asesoraremos en la construcción de tu CV, construcción de tu portfolio, entre otros, también te prepararemos para entrevistas de manera personalizada (según el puesto al que estés aplicando) para que seas el candidato perfecto para las empresas.  </p>\r\n<p> </p>' + "url": "https://www.eventbrite.com/e/geektalks-presentacion-de-proyectos-finales-tickets-1", + "start": {"timezone": "Europe/Madrid", "local": "2021-12-01T18:30:00", "utc": "2021-12-01T17:30:00Z"}, + "end": {"timezone": "Europe/Madrid", "local": "2021-12-01T19:30:00", "utc": "2021-12-01T18:30:00Z"}, + "organization_id": "1", + "created": "2021-11-19T03:24:52Z", + "changed": "2021-11-19T04:27:58Z", + "published": "2021-11-19T04:27:58Z", + "capacity": 200, + "capacity_is_custom": False, + "status": "live", + "currency": "USD", + "listed": True, + "shareable": True, + "invite_only": False, + "online_event": True, + "show_remaining": False, + "tx_time_limit": 480, + "hide_start_date": False, + "hide_end_date": False, + "locale": "en_US", + "is_locked": False, + "privacy_setting": "unlocked", + "is_series": False, + "is_series_parent": False, + "inventory_type": "limited", + "is_reserved_seating": False, + "show_pick_a_seat": False, + "show_seatmap_thumbnail": False, + "show_colors_in_seatmap_thumbnail": False, + "source": "coyote", + "is_free": True, + "version": None, + "summary": "GEEKTALKS - DEMO DAY ¡TÚ TAMBIÉN HARÁS ESTO CUANDO SEAS PROGRAMADOR!", + "facebook_event_id": "1", + "logo_id": "1", + "organizer_id": "1", + "venue_id": "1", + "category_id": "1", + "subcategory_id": "1", + "format_id": "1", + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/events/1/", + "is_externally_ticketed": False, + "logo": { + "crop_mask": {"top_left": {"x": 0, "y": 0}, "width": 2160, "height": 1080}, + "original": { + "url": "https://img.evbuc.com/https%3A%2F%2Fcdn.evbuc.com%2Fimages%2F1%2F187450375408%2F1%2Foriginal.20211119-035031?auto=format%2Ccompress&q=75&sharp=10&s=683defa0f95ab0fab375d93d30fd83f2", + "width": 2160, + "height": 1080, + }, + "id": "1", + "url": "https://img.evbuc.com/https%3A%2F%2Fcdn.evbuc.com%2Fimages%2F1%2F187450375408%2F1%2Foriginal.20211119-035031?h=200&w=450&auto=format%2Ccompress&q=75&sharp=10&rect=0%2C0%2C2160%2C1080&s=40991174ed57f5f54596a762c94eb850", + "aspect_ratio": "2", + "edge_color": "#fcfcfc", + "edge_color_set": True, }, - 'long_description': { - 'text': - '4Geeks Academy somos un Bootcamp de programación con más de seis campus en España, Estados Unidos, Chile y Venezuela. Estamos enfocados en desarrollar las habilidades necesarias para convertirte en un programador de software completo y exitoso. Contamos con más de 600 graduados, ¡el 90% ya está trabajando como programador!\r\nTenemos dos tipos de programas Part-Time y Full-Time. En cada una de ellos, recibirás la mejor atención posible, nuestras clases son personalizadas (en promedio 1 profesor cada 5 estudiantes). Recibirás asesoramiento en todo momento a través de nuestras mentorías uno a uno y soporte online y offline, incluso después de que consigas trabajo. Además, nos encargaremos de ayudarte a conseguir trabajo, te asesoraremos en la construcción de tu CV, construcción de tu portfolio, entre otros, también te prepararemos para entrevistas de manera personalizada (según el puesto al que estés aplicando) para que seas el candidato perfecto para las empresas.  \r\n ', - 'html': - '<p>4Geeks Academy somos un Bootcamp de programación con más de seis campus en España, Estados Unidos, Chile y Venezuela. Estamos enfocados en desarrollar las habilidades necesarias para convertirte en un programador de software completo y exitoso. Contamos con más de 600 graduados, ¡el 90% ya está trabajando como programador!</p>\r\n<p>Tenemos dos tipos de programas Part-Time y Full-Time. En cada una de ellos, recibirás la mejor atención posible, nuestras clases son personalizadas (en promedio 1 profesor cada 5 estudiantes). Recibirás asesoramiento en todo momento a través de nuestras mentorías uno a uno y soporte online y offline, incluso después de que consigas trabajo. Además, nos encargaremos de ayudarte a conseguir trabajo, te asesoraremos en la construcción de tu CV, construcción de tu portfolio, entre otros, también te prepararemos para entrevistas de manera personalizada (según el puesto al que estés aplicando) para que seas el candidato perfecto para las empresas.  </p>\r\n<p> </p>' + "organizer": { + "description": { + "text": "4Geeks Academy somos un Bootcamp de programación con más de seis campus en España, Estados Unidos, Chile y Venezuela. Estamos enfocados en desarrollar las habilidades necesarias para convertirte en un programador de software completo y exitoso. Contamos con más de 600 graduados, ¡el 90% ya está trabajando como programador!\r\nTenemos dos tipos de programas Part-Time y Full-Time. En cada una de ellos, recibirás la mejor atención posible, nuestras clases son personalizadas (en promedio 1 profesor cada 5 estudiantes). Recibirás asesoramiento en todo momento a través de nuestras mentorías uno a uno y soporte online y offline, incluso después de que consigas trabajo. Además, nos encargaremos de ayudarte a conseguir trabajo, te asesoraremos en la construcción de tu CV, construcción de tu portfolio, entre otros, también te prepararemos para entrevistas de manera personalizada (según el puesto al que estés aplicando) para que seas el candidato perfecto para las empresas.  \r\n ", + "html": "<p>4Geeks Academy somos un Bootcamp de programación con más de seis campus en España, Estados Unidos, Chile y Venezuela. Estamos enfocados en desarrollar las habilidades necesarias para convertirte en un programador de software completo y exitoso. Contamos con más de 600 graduados, ¡el 90% ya está trabajando como programador!</p>\r\n<p>Tenemos dos tipos de programas Part-Time y Full-Time. En cada una de ellos, recibirás la mejor atención posible, nuestras clases son personalizadas (en promedio 1 profesor cada 5 estudiantes). Recibirás asesoramiento en todo momento a través de nuestras mentorías uno a uno y soporte online y offline, incluso después de que consigas trabajo. Además, nos encargaremos de ayudarte a conseguir trabajo, te asesoraremos en la construcción de tu CV, construcción de tu portfolio, entre otros, también te prepararemos para entrevistas de manera personalizada (según el puesto al que estés aplicando) para que seas el candidato perfecto para las empresas.  </p>\r\n<p> </p>", + }, + "long_description": { + "text": "4Geeks Academy somos un Bootcamp de programación con más de seis campus en España, Estados Unidos, Chile y Venezuela. Estamos enfocados en desarrollar las habilidades necesarias para convertirte en un programador de software completo y exitoso. Contamos con más de 600 graduados, ¡el 90% ya está trabajando como programador!\r\nTenemos dos tipos de programas Part-Time y Full-Time. En cada una de ellos, recibirás la mejor atención posible, nuestras clases son personalizadas (en promedio 1 profesor cada 5 estudiantes). Recibirás asesoramiento en todo momento a través de nuestras mentorías uno a uno y soporte online y offline, incluso después de que consigas trabajo. Además, nos encargaremos de ayudarte a conseguir trabajo, te asesoraremos en la construcción de tu CV, construcción de tu portfolio, entre otros, también te prepararemos para entrevistas de manera personalizada (según el puesto al que estés aplicando) para que seas el candidato perfecto para las empresas.  \r\n ", + "html": "<p>4Geeks Academy somos un Bootcamp de programación con más de seis campus en España, Estados Unidos, Chile y Venezuela. Estamos enfocados en desarrollar las habilidades necesarias para convertirte en un programador de software completo y exitoso. Contamos con más de 600 graduados, ¡el 90% ya está trabajando como programador!</p>\r\n<p>Tenemos dos tipos de programas Part-Time y Full-Time. En cada una de ellos, recibirás la mejor atención posible, nuestras clases son personalizadas (en promedio 1 profesor cada 5 estudiantes). Recibirás asesoramiento en todo momento a través de nuestras mentorías uno a uno y soporte online y offline, incluso después de que consigas trabajo. Además, nos encargaremos de ayudarte a conseguir trabajo, te asesoraremos en la construcción de tu CV, construcción de tu portfolio, entre otros, también te prepararemos para entrevistas de manera personalizada (según el puesto al que estés aplicando) para que seas el candidato perfecto para las empresas.  </p>\r\n<p> </p>", + }, + "resource_uri": "https://www.eventbriteapi.com/v3/organizers/1/", + "_type": "organizer", + "id": "1", + "name": "4Geeks Academy España", + "url": "https://www.eventbrite.com/o/4geeks-academy-espana-1", + "num_past_events": 41, + "num_future_events": 1, + "twitter": "@4geeksacademyes", + "facebook": "4geeksacademyes", + "instagram": "23558070851", + "organization_id": "1", + "disable_marketing_opt_in": False, + "logo_id": "1", }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/organizers/1/', - '_type': 'organizer', - 'id': '1', - 'name': '4Geeks Academy España', - 'url': 'https://www.eventbrite.com/o/4geeks-academy-espana-1', - 'num_past_events': 41, - 'num_future_events': 1, - 'twitter': '@4geeksacademyes', - 'facebook': '4geeksacademyes', - 'instagram': '23558070851', - 'organization_id': '1', - 'disable_marketing_opt_in': False, - 'logo_id': '1' - }, - 'venue': { # TODO: check this section - 'address': { - 'address_1': '11200 Southwest 8th Street', - 'address_2': '', - 'city': 'Miami', - 'region': 'FL', - 'postal_code': '33174', - 'country': 'US', - 'latitude': '25.7580596', - 'longitude': '-80.37702200000001', - 'localized_address_display': '11200 Southwest 8th Street, Miami, FL 33174', - 'localized_area_display': 'Miami, FL', - 'localized_multi_line_address_display': ['11200 Southwest 8th Street', 'Miami, FL 33174'] + "venue": { # TODO: check this section + "address": { + "address_1": "11200 Southwest 8th Street", + "address_2": "", + "city": "Miami", + "region": "FL", + "postal_code": "33174", + "country": "US", + "latitude": "25.7580596", + "longitude": "-80.37702200000001", + "localized_address_display": "11200 Southwest 8th Street, Miami, FL 33174", + "localized_area_display": "Miami, FL", + "localized_multi_line_address_display": ["11200 Southwest 8th Street", "Miami, FL 33174"], + }, + "resource_uri": "https://www.eventbriteapi.com/v3/venues/1/", + "id": "1", + "age_restriction": None, + "capacity": None, + "name": "Florida International University College of Business", + "latitude": "25.7580596", + "longitude": "-80.37702200000001", }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/venues/1/', - 'id': '1', - 'age_restriction': None, - 'capacity': None, - 'name': 'Florida International University College of Business', - 'latitude': '25.7580596', - 'longitude': '-80.37702200000001' } - }] + ], } def get_eventbrite_events_url(id: str): - return EVENTBRITE_EVENTS_URL.replace(':id', id) + return EVENTBRITE_EVENTS_URL.replace(":id", id) diff --git a/breathecode/tests/mocks/eventbrite/constants/order.py b/breathecode/tests/mocks/eventbrite/constants/order.py index b245909da..2e592a43f 100644 --- a/breathecode/tests/mocks/eventbrite/constants/order.py +++ b/breathecode/tests/mocks/eventbrite/constants/order.py @@ -1,867 +1,611 @@ # https://www.eventbrite.com.mx/platform/api#/reference/order/retrieve/retrieve-order-by-id?console=1 # https://www.eventbriteapi.com/v3/orders/1/ -EVENTBRITE_ORDER_URL = 'https://www.eventbriteapi.com/v3/events/1/orders/1/' +EVENTBRITE_ORDER_URL = "https://www.eventbriteapi.com/v3/events/1/orders/1/" EVENTBRITE_ORDER = { - 'id': - '1', - 'created': - '2018-05-12T02:00:00Z', - 'changed': - '2018-05-12T02:00:00Z', - 'name': - 'John Smith', - 'first_name': - 'John', - 'last_name': - 'Smith', - 'email': - 'john.smith@example.com', - 'costs': { - 'base_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'display_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'display_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'gross': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'eventbrite_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'payment_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'tax': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'display_tax': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'price_before_discount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'discount_amount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'discount_type': - 'coded', - 'fee_components': [{ - 'intermediate': False, - 'name': 'royalty', - 'internal_name': 'service fee', - 'group_name': 'service fee', - 'value': 200, - 'discount': { - 'amount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "id": "1", + "created": "2018-05-12T02:00:00Z", + "changed": "2018-05-12T02:00:00Z", + "name": "John Smith", + "first_name": "John", + "last_name": "Smith", + "email": "john.smith@example.com", + "costs": { + "base_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "display_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "display_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "gross": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "eventbrite_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "payment_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "tax": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "display_tax": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "price_before_discount": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "discount_amount": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "discount_type": "coded", + "fee_components": [ + { + "intermediate": False, + "name": "royalty", + "internal_name": "service fee", + "group_name": "service fee", + "value": 200, + "discount": { + "amount": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "reason": "TOGGLED_OFF_FEE", }, - 'reason': 'TOGGLED_OFF_FEE' - }, - 'rule': { - 'id': '1' - }, - 'base': 'item.display-includable', - 'bucket': 'fee', - 'recipient': 'event.6018', - 'payer': 'attendee' - }], - 'tax_components': [{ - 'intermediate': False, - 'name': 'royalty', - 'internal_name': 'service fee', - 'group_name': 'service fee', - 'value': 200, - 'discount': { - 'amount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "rule": {"id": "1"}, + "base": "item.display-includable", + "bucket": "fee", + "recipient": "event.6018", + "payer": "attendee", + } + ], + "tax_components": [ + { + "intermediate": False, + "name": "royalty", + "internal_name": "service fee", + "group_name": "service fee", + "value": 200, + "discount": { + "amount": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "reason": "TOGGLED_OFF_FEE", }, - 'reason': 'TOGGLED_OFF_FEE' - }, - 'rule': { - 'id': '1' - }, - 'base': 'item.display-includable', - 'bucket': 'fee', - 'recipient': 'event.6018', - 'payer': 'attendee' - }], - 'shipping_components': [{ - 'intermediate': False, - 'name': 'royalty', - 'internal_name': 'service fee', - 'group_name': 'service fee', - 'value': 200, - 'discount': { - 'amount': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "rule": {"id": "1"}, + "base": "item.display-includable", + "bucket": "fee", + "recipient": "event.6018", + "payer": "attendee", + } + ], + "shipping_components": [ + { + "intermediate": False, + "name": "royalty", + "internal_name": "service fee", + "group_name": "service fee", + "value": 200, + "discount": { + "amount": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "reason": "TOGGLED_OFF_FEE", }, - 'reason': 'TOGGLED_OFF_FEE' - }, - 'rule': { - 'id': '1' - }, - 'base': 'item.display-includable', - 'bucket': 'fee', - 'recipient': 'event.6018', - 'payer': 'attendee' - }], - 'has_gts_tax': - False, - 'tax_name': - 'VAT' + "rule": {"id": "1"}, + "base": "item.display-includable", + "bucket": "fee", + "recipient": "event.6018", + "payer": "attendee", + } + ], + "has_gts_tax": False, + "tax_name": "VAT", }, - 'event_id': - '1', - 'event': { - 'id': '1', - 'name': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'start': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - }, - 'end': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' + "event_id": "1", + "event": { + "id": "1", + "name": {"text": "Some text", "html": "<p>Some text</p>"}, + "description": {"text": "Some text", "html": "<p>Some text</p>"}, + "start": {"timezone": "America/Los_Angeles", "utc": "2018-05-12T02:00:00Z", "local": "2018-05-11T19:00:00"}, + "end": {"timezone": "America/Los_Angeles", "utc": "2018-05-12T02:00:00Z", "local": "2018-05-11T19:00:00"}, + "url": "https://www.eventbrite.com/e/1", + "vanity_url": "https://testevent.eventbrite.com", + "created": "2017-02-19T20:28:14Z", + "changed": "2017-02-19T20:28:14Z", + "published": "2017-02-19T20:28:14Z", + "status": "live", + "currency": "USD", + "online_event": False, + "organization_id": "1", + "organizer_id": "1", + "organizer": { + "name": "", + "description": {"text": "Some text", "html": "<p>Some text</p>"}, + "long_description": {"text": "Some text", "html": "<p>Some text</p>"}, + "logo_id": None, + "logo": { + "id": "1", + "url": "https://image.com", + "crop_mask": {"top_left": {"y": 15, "x": 15}, "width": 15, "height": 15}, + "original": {"url": "https://image.com", "width": 800, "height": 400}, + "aspect_ratio": "2", + "edge_color": "#6a7c8b", + "edge_color_set": True, + }, + "resource_uri": "https://www.eventbriteapi.com/v3/organizers/1/", + "id": "1", + "url": "https://www.eventbrite.com/o/1/", + "num_past_events": 5, + "num_future_events": 1, + "twitter": "@abc", + "facebook": "abc", + }, + "logo_id": None, + "logo": { + "id": "1", + "url": "https://image.com", + "crop_mask": {"top_left": {"y": 15, "x": 15}, "width": 15, "height": 15}, + "original": {"url": "https://image.com", "width": 800, "height": 400}, + "aspect_ratio": "2", + "edge_color": "#6a7c8b", + "edge_color_set": True, + }, + "venue": { + "name": "Great Venue", + "age_restriction": None, + "capacity": 100, + "address": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + }, + "resource_uri": "https://www.eventbriteapi.com/v3/venues/1/", + "id": "1", + "latitude": "49.28497549999999", + "longitude": "123.11082529999999", + }, + "format_id": None, + "format": { + "id": "1", + "name": "Seminar or Talk", + "name_localized": "Seminar or Talk", + "short_name": "Seminar", + "short_name_localized": "Seminar", + "resource_uri": "https://www.eventbriteapi.com/v3/formats/1/", + }, + "category": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/categories/1/", + "name": "Music", + "name_localized": "Music", + "short_name": "Music", + "short_name_localized": "Music", + "subcategories": [ + { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/subcategories/1/", + "name": "Classical", + "parent_category": {}, + } + ], + }, + "subcategory": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/subcategories/1/", + "name": "Classical", + "parent_category": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/categories/1/", + "name": "Music", + "name_localized": "Music", + "short_name": "Music", + "short_name_localized": "Music", + "subcategories": [{}], + }, + }, + "music_properties": {"age_restriction": None, "presented_by": None, "door_time": "2019-05-12T-19:00:00Z"}, + "bookmark_info": {"bookmarked": False}, + "ticket_availability": { + "has_available_tickets": False, + "minimum_ticket_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "maximum_ticket_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "is_sold_out": True, + "start_sales_date": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + "waitlist_available": False, + }, + "listed": False, + "shareable": False, + "invite_only": False, + "show_remaining": True, + "password": "12345", + "capacity": 100, + "capacity_is_custom": True, + "tx_time_limit": "12345", + "hide_start_date": True, + "hide_end_date": True, + "locale": "en_US", + "is_locked": True, + "privacy_setting": "unlocked", + "is_externally_ticketed": False, + "external_ticketing": { + "external_url": "", + "ticketing_provider_name": "", + "is_free": False, + "minimum_ticket_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "maximum_ticket_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "sales_start": "", + "sales_end": "", + }, + "is_series": True, + "is_series_parent": True, + "series_id": "1", + "is_reserved_seating": True, + "show_pick_a_seat": True, + "show_seatmap_thumbnail": True, + "show_colors_in_seatmap_thumbnail": True, + "is_free": True, + "source": "api", + "version": "null", + "resource_uri": "https://www.eventbriteapi.com/v3/events/1/", + "event_sales_status": { + "sales_status": "text", + "start_sales_date": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + }, + "checkout_settings": { + "created": "2018-01-31T13:00:00Z", + "changed": "2018-01-31T13:00:00Z", + "country_code": "", + "currency_code": "", + "checkout_method": "paypal", + "offline_settings": [{"payment_method": "CASH", "instructions": ""}], + "user_instrument_vault_id": "", }, - 'url': 'https://www.eventbrite.com/e/1', - 'vanity_url': 'https://testevent.eventbrite.com', - 'created': '2017-02-19T20:28:14Z', - 'changed': '2017-02-19T20:28:14Z', - 'published': '2017-02-19T20:28:14Z', - 'status': 'live', - 'currency': 'USD', - 'online_event': False, - 'organization_id': '1', - 'organizer_id': '1', - 'organizer': { - 'name': '', - 'description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'long_description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'logo_id': None, - 'logo': { - 'id': '1', - 'url': 'https://image.com', - 'crop_mask': { - 'top_left': { - 'y': 15, - 'x': 15 + }, + "attendees": [ + { + "id": "1", + "created": "2018-05-12T02:00:00Z", + "changed": "2018-05-12T02:00:00Z", + "ticket_class_id": "1", + "ticket_class_name": "General Admission", + "profile": { + "name": "John Smith", + "email": "jhon.smith@example.com", + "first_name": "John", + "last_name": "Smith", + "prefix": "Mr.", + "suffix": "Sr", + "age": 33, + "job_title": "Software Enginner", + "company": "Eventbrite", + "website": "https://mysite.com", + "blog": "https://mysite.com", + "gender": "male", + "birth_date": "1984-12-06", + "cell_phone": "555 555-1234", + "work_phone": "555 555-1234", + "addresses": { + "home": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + "localized_address_display": "", + "localized_area_display": "", + "localized_multi_line_address_display": [], + }, + "ship": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + "localized_address_display": "", + "localized_area_display": "", + "localized_multi_line_address_display": [], + }, + "work": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + "localized_address_display": "", + "localized_area_display": "", + "localized_multi_line_address_display": [], + }, + "bill": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, + "localized_address_display": "", + "localized_area_display": "", + "localized_multi_line_address_display": [], }, - 'width': 15, - 'height': 15 - }, - 'original': { - 'url': 'https://image.com', - 'width': 800, - 'height': 400 - }, - 'aspect_ratio': '2', - 'edge_color': '#6a7c8b', - 'edge_color_set': True - }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/organizers/1/', - 'id': '1', - 'url': 'https://www.eventbrite.com/o/1/', - 'num_past_events': 5, - 'num_future_events': 1, - 'twitter': '@abc', - 'facebook': 'abc' - }, - 'logo_id': None, - 'logo': { - 'id': '1', - 'url': 'https://image.com', - 'crop_mask': { - 'top_left': { - 'y': 15, - 'x': 15 }, - 'width': 15, - 'height': 15 }, - 'original': { - 'url': 'https://image.com', - 'width': 800, - 'height': 400 - }, - 'aspect_ratio': '2', - 'edge_color': '#6a7c8b', - 'edge_color_set': True - }, - 'venue': { - 'name': 'Great Venue', - 'age_restriction': None, - 'capacity': 100, - 'address': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None - }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/venues/1/', - 'id': '1', - 'latitude': '49.28497549999999', - 'longitude': '123.11082529999999' - }, - 'format_id': None, - 'format': { - 'id': '1', - 'name': 'Seminar or Talk', - 'name_localized': 'Seminar or Talk', - 'short_name': 'Seminar', - 'short_name_localized': 'Seminar', - 'resource_uri': 'https://www.eventbriteapi.com/v3/formats/1/' - }, - 'category': { - 'id': - '1', - 'resource_uri': - 'https://www.eventbriteapi.com/v3/categories/1/', - 'name': - 'Music', - 'name_localized': - 'Music', - 'short_name': - 'Music', - 'short_name_localized': - 'Music', - 'subcategories': [{ - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/subcategories/1/', - 'name': 'Classical', - 'parent_category': {} - }] - }, - 'subcategory': { - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/subcategories/1/', - 'name': 'Classical', - 'parent_category': { - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/categories/1/', - 'name': 'Music', - 'name_localized': 'Music', - 'short_name': 'Music', - 'short_name_localized': 'Music', - 'subcategories': [{}] - } - }, - 'music_properties': { - 'age_restriction': None, - 'presented_by': None, - 'door_time': '2019-05-12T-19:00:00Z' - }, - 'bookmark_info': { - 'bookmarked': False - }, - 'ticket_availability': { - 'has_available_tickets': False, - 'minimum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'maximum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'is_sold_out': True, - 'start_sales_date': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - }, - 'waitlist_available': False - }, - 'listed': False, - 'shareable': False, - 'invite_only': False, - 'show_remaining': True, - 'password': '12345', - 'capacity': 100, - 'capacity_is_custom': True, - 'tx_time_limit': '12345', - 'hide_start_date': True, - 'hide_end_date': True, - 'locale': 'en_US', - 'is_locked': True, - 'privacy_setting': 'unlocked', - 'is_externally_ticketed': False, - 'external_ticketing': { - 'external_url': '', - 'ticketing_provider_name': '', - 'is_free': False, - 'minimum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'maximum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'sales_start': '', - 'sales_end': '' - }, - 'is_series': True, - 'is_series_parent': True, - 'series_id': '1', - 'is_reserved_seating': True, - 'show_pick_a_seat': True, - 'show_seatmap_thumbnail': True, - 'show_colors_in_seatmap_thumbnail': True, - 'is_free': True, - 'source': 'api', - 'version': 'null', - 'resource_uri': 'https://www.eventbriteapi.com/v3/events/1/', - 'event_sales_status': { - 'sales_status': 'text', - 'start_sales_date': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - } - }, - 'checkout_settings': { - 'created': '2018-01-31T13:00:00Z', - 'changed': '2018-01-31T13:00:00Z', - 'country_code': '', - 'currency_code': '', - 'checkout_method': 'paypal', - 'offline_settings': [{ - 'payment_method': 'CASH', - 'instructions': '' - }], - 'user_instrument_vault_id': '' - } - }, - 'attendees': [{ - 'id': - '1', - 'created': - '2018-05-12T02:00:00Z', - 'changed': - '2018-05-12T02:00:00Z', - 'ticket_class_id': - '1', - 'ticket_class_name': - 'General Admission', - 'profile': { - 'name': 'John Smith', - 'email': 'jhon.smith@example.com', - 'first_name': 'John', - 'last_name': 'Smith', - 'prefix': 'Mr.', - 'suffix': 'Sr', - 'age': 33, - 'job_title': 'Software Enginner', - 'company': 'Eventbrite', - 'website': 'https://mysite.com', - 'blog': 'https://mysite.com', - 'gender': 'male', - 'birth_date': '1984-12-06', - 'cell_phone': '555 555-1234', - 'work_phone': '555 555-1234', - 'addresses': { - 'home': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'localized_address_display': '', - 'localized_area_display': '', - 'localized_multi_line_address_display': [] - }, - 'ship': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'localized_address_display': '', - 'localized_area_display': '', - 'localized_multi_line_address_display': [] - }, - 'work': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'localized_address_display': '', - 'localized_area_display': '', - 'localized_multi_line_address_display': [] - }, - 'bill': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None, - 'localized_address_display': '', - 'localized_area_display': '', - 'localized_multi_line_address_display': [] + "questions": [{"id": "1", "label": "What's your question?", "type": "text", "required": False}], + "answers": [ + { + "question_id": "1", + "attendee_id": "1", + "question": "What's your question?", + "type": "text", + "answer": "This is my answer", } - } - }, - 'questions': [{ - 'id': '1', - 'label': "What's your question?", - 'type': 'text', - 'required': False - }], - 'answers': [{ - 'question_id': '1', - 'attendee_id': '1', - 'question': "What's your question?", - 'type': 'text', - 'answer': 'This is my answer' - }], - 'barcodes': [{ - 'barcode': '1234093511009831492001', - 'status': 'unused', - 'created': '2018-08-18T22:24:03Z', - 'changed': '2018-08-18T22:24:03Z', - 'checkin_type': 0, - 'is_printed': False - }], - 'team': { - 'id': '1', - 'name': 'Great Team!', - 'date_joined': '2018-05-12T02:00:00Z', - 'event_id': '1' - }, - 'affiliate': - 'affiliate_code', - 'checked_in': - False, - 'cancelled': - False, - 'refunded': - False, - 'costs': { - 'base_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'gross': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'eventbrite_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'payment_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'tax': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - } - }, - 'status': - '', - 'event_id': - '1', - 'event': { - 'id': '1', - 'name': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'start': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - }, - 'end': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - }, - 'url': 'https://www.eventbrite.com/e/1', - 'vanity_url': 'https://testevent.eventbrite.com', - 'created': '2017-02-19T20:28:14Z', - 'changed': '2017-02-19T20:28:14Z', - 'published': '2017-02-19T20:28:14Z', - 'status': 'live', - 'currency': 'USD', - 'online_event': False, - 'organization_id': '1', - 'organizer_id': '1', - 'organizer': { - 'name': '', - 'description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' + ], + "barcodes": [ + { + "barcode": "1234093511009831492001", + "status": "unused", + "created": "2018-08-18T22:24:03Z", + "changed": "2018-08-18T22:24:03Z", + "checkin_type": 0, + "is_printed": False, + } + ], + "team": {"id": "1", "name": "Great Team!", "date_joined": "2018-05-12T02:00:00Z", "event_id": "1"}, + "affiliate": "affiliate_code", + "checked_in": False, + "cancelled": False, + "refunded": False, + "costs": { + "base_price": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "gross": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "eventbrite_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "payment_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "tax": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + }, + "status": "", + "event_id": "1", + "event": { + "id": "1", + "name": {"text": "Some text", "html": "<p>Some text</p>"}, + "description": {"text": "Some text", "html": "<p>Some text</p>"}, + "start": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", }, - 'long_description': { - 'text': 'Some text', - 'html': '<p>Some text</p>' + "end": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", }, - 'logo_id': None, - 'logo': { - 'id': '1', - 'url': 'https://image.com', - 'crop_mask': { - 'top_left': { - 'y': 15, - 'x': 15 - }, - 'width': 15, - 'height': 15 + "url": "https://www.eventbrite.com/e/1", + "vanity_url": "https://testevent.eventbrite.com", + "created": "2017-02-19T20:28:14Z", + "changed": "2017-02-19T20:28:14Z", + "published": "2017-02-19T20:28:14Z", + "status": "live", + "currency": "USD", + "online_event": False, + "organization_id": "1", + "organizer_id": "1", + "organizer": { + "name": "", + "description": {"text": "Some text", "html": "<p>Some text</p>"}, + "long_description": {"text": "Some text", "html": "<p>Some text</p>"}, + "logo_id": None, + "logo": { + "id": "1", + "url": "https://image.com", + "crop_mask": {"top_left": {"y": 15, "x": 15}, "width": 15, "height": 15}, + "original": {"url": "https://image.com", "width": 800, "height": 400}, + "aspect_ratio": "2", + "edge_color": "#6a7c8b", + "edge_color_set": True, }, - 'original': { - 'url': 'https://image.com', - 'width': 800, - 'height': 400 - }, - 'aspect_ratio': '2', - 'edge_color': '#6a7c8b', - 'edge_color_set': True + "resource_uri": "https://www.eventbriteapi.com/v3/organizers/1/", + "id": "1", + "url": "https://www.eventbrite.com/o/1/", + "num_past_events": 5, + "num_future_events": 1, + "twitter": "@abc", + "facebook": "abc", }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/organizers/1/', - 'id': '1', - 'url': 'https://www.eventbrite.com/o/1/', - 'num_past_events': 5, - 'num_future_events': 1, - 'twitter': '@abc', - 'facebook': 'abc' - }, - 'logo_id': None, - 'logo': { - 'id': '1', - 'url': 'https://image.com', - 'crop_mask': { - 'top_left': { - 'y': 15, - 'x': 15 + "logo_id": None, + "logo": { + "id": "1", + "url": "https://image.com", + "crop_mask": {"top_left": {"y": 15, "x": 15}, "width": 15, "height": 15}, + "original": {"url": "https://image.com", "width": 800, "height": 400}, + "aspect_ratio": "2", + "edge_color": "#6a7c8b", + "edge_color_set": True, + }, + "venue": { + "name": "Great Venue", + "age_restriction": None, + "capacity": 100, + "address": { + "address_1": None, + "address_2": None, + "city": None, + "region": None, + "postal_code": None, + "country": None, + "latitude": None, + "longitude": None, }, - 'width': 15, - 'height': 15 + "resource_uri": "https://www.eventbriteapi.com/v3/venues/1/", + "id": "1", + "latitude": "49.28497549999999", + "longitude": "123.11082529999999", }, - 'original': { - 'url': 'https://image.com', - 'width': 800, - 'height': 400 + "format_id": None, + "format": { + "id": "1", + "name": "Seminar or Talk", + "name_localized": "Seminar or Talk", + "short_name": "Seminar", + "short_name_localized": "Seminar", + "resource_uri": "https://www.eventbriteapi.com/v3/formats/1/", }, - 'aspect_ratio': '2', - 'edge_color': '#6a7c8b', - 'edge_color_set': True - }, - 'venue': { - 'name': 'Great Venue', - 'age_restriction': None, - 'capacity': 100, - 'address': { - 'address_1': None, - 'address_2': None, - 'city': None, - 'region': None, - 'postal_code': None, - 'country': None, - 'latitude': None, - 'longitude': None + "category": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/categories/1/", + "name": "Music", + "name_localized": "Music", + "short_name": "Music", + "short_name_localized": "Music", + "subcategories": [ + { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/subcategories/1/", + "name": "Classical", + "parent_category": {}, + } + ], }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/venues/1/', - 'id': '1', - 'latitude': '49.28497549999999', - 'longitude': '123.11082529999999' - }, - 'format_id': None, - 'format': { - 'id': '1', - 'name': 'Seminar or Talk', - 'name_localized': 'Seminar or Talk', - 'short_name': 'Seminar', - 'short_name_localized': 'Seminar', - 'resource_uri': 'https://www.eventbriteapi.com/v3/formats/1/' - }, - 'category': { - 'id': - '1', - 'resource_uri': - 'https://www.eventbriteapi.com/v3/categories/1/', - 'name': - 'Music', - 'name_localized': - 'Music', - 'short_name': - 'Music', - 'short_name_localized': - 'Music', - 'subcategories': [{ - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/subcategories/1/', - 'name': 'Classical', - 'parent_category': {} - }] - }, - 'subcategory': { - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/subcategories/1/', - 'name': 'Classical', - 'parent_category': { - 'id': '1', - 'resource_uri': 'https://www.eventbriteapi.com/v3/categories/1/', - 'name': 'Music', - 'name_localized': 'Music', - 'short_name': 'Music', - 'short_name_localized': 'Music', - 'subcategories': [{}] - } - }, - 'music_properties': { - 'age_restriction': None, - 'presented_by': None, - 'door_time': '2019-05-12T-19:00:00Z' - }, - 'bookmark_info': { - 'bookmarked': False - }, - 'ticket_availability': { - 'has_available_tickets': False, - 'minimum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "subcategory": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/subcategories/1/", + "name": "Classical", + "parent_category": { + "id": "1", + "resource_uri": "https://www.eventbriteapi.com/v3/categories/1/", + "name": "Music", + "name_localized": "Music", + "short_name": "Music", + "short_name_localized": "Music", + "subcategories": [{}], + }, }, - 'maximum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "music_properties": { + "age_restriction": None, + "presented_by": None, + "door_time": "2019-05-12T-19:00:00Z", }, - 'is_sold_out': True, - 'start_sales_date': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' + "bookmark_info": {"bookmarked": False}, + "ticket_availability": { + "has_available_tickets": False, + "minimum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "maximum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "is_sold_out": True, + "start_sales_date": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + "waitlist_available": False, }, - 'waitlist_available': False - }, - 'listed': False, - 'shareable': False, - 'invite_only': False, - 'show_remaining': True, - 'password': '12345', - 'capacity': 100, - 'capacity_is_custom': True, - 'tx_time_limit': '12345', - 'hide_start_date': True, - 'hide_end_date': True, - 'locale': 'en_US', - 'is_locked': True, - 'privacy_setting': 'unlocked', - 'is_externally_ticketed': False, - 'external_ticketing': { - 'external_url': '', - 'ticketing_provider_name': '', - 'is_free': False, - 'minimum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "listed": False, + "shareable": False, + "invite_only": False, + "show_remaining": True, + "password": "12345", + "capacity": 100, + "capacity_is_custom": True, + "tx_time_limit": "12345", + "hide_start_date": True, + "hide_end_date": True, + "locale": "en_US", + "is_locked": True, + "privacy_setting": "unlocked", + "is_externally_ticketed": False, + "external_ticketing": { + "external_url": "", + "ticketing_provider_name": "", + "is_free": False, + "minimum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "maximum_ticket_price": { + "currency": "USD", + "value": 432, + "major_value": "4.32", + "display": "4.32 USD", + }, + "sales_start": "", + "sales_end": "", }, - 'maximum_ticket_price': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' + "is_series": True, + "is_series_parent": True, + "series_id": "1", + "is_reserved_seating": True, + "show_pick_a_seat": True, + "show_seatmap_thumbnail": True, + "show_colors_in_seatmap_thumbnail": True, + "is_free": True, + "source": "api", + "version": "null", + "resource_uri": "https://www.eventbriteapi.com/v3/events/1/", + "event_sales_status": { + "sales_status": "text", + "start_sales_date": { + "timezone": "America/Los_Angeles", + "utc": "2018-05-12T02:00:00Z", + "local": "2018-05-11T19:00:00", + }, + }, + "checkout_settings": { + "created": "2018-01-31T13:00:00Z", + "changed": "2018-01-31T13:00:00Z", + "country_code": "", + "currency_code": "", + "checkout_method": "paypal", + "offline_settings": [{"payment_method": "CASH", "instructions": ""}], + "user_instrument_vault_id": "", }, - 'sales_start': '', - 'sales_end': '' - }, - 'is_series': True, - 'is_series_parent': True, - 'series_id': '1', - 'is_reserved_seating': True, - 'show_pick_a_seat': True, - 'show_seatmap_thumbnail': True, - 'show_colors_in_seatmap_thumbnail': True, - 'is_free': True, - 'source': 'api', - 'version': 'null', - 'resource_uri': 'https://www.eventbriteapi.com/v3/events/1/', - 'event_sales_status': { - 'sales_status': 'text', - 'start_sales_date': { - 'timezone': 'America/Los_Angeles', - 'utc': '2018-05-12T02:00:00Z', - 'local': '2018-05-11T19:00:00' - } - }, - 'checkout_settings': { - 'created': '2018-01-31T13:00:00Z', - 'changed': '2018-01-31T13:00:00Z', - 'country_code': '', - 'currency_code': '', - 'checkout_method': 'paypal', - 'offline_settings': [{ - 'payment_method': 'CASH', - 'instructions': '' - }], - 'user_instrument_vault_id': '' - } - }, - 'order_id': - '1', - 'guestlist_id': - None, - 'invited_by': - None, - 'assigned_unit': { - 'unit_id': '18-1:2', - 'description': 'Some description', - 'location_image': { - 'url': '', - 'x': 0, - 'y': 0 }, - 'labels': ['100', 'A', '23'], - 'titles': ['Area', 'Row', 'Seat'] - }, - 'delivery_method': - 'electronic', - 'variant_id': - None, - 'contact_list_preferences': { - 'has_contact_list': True, - 'has_opted_in': True, - '_type': 'attendee_contact_list_preferences' - }, - 'resource_uri': - '' - }], - 'time_remaining': - 100, - 'resource_uri': - 'https://www.eventbriteapi.com/v3/orders/1/', - 'status': - 'placed', - 'ticket_buyer_settings': { - 'confirmation_message': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'instructions': { - 'text': 'Some text', - 'html': '<p>Some text</p>' - }, - 'event_id': - '1', - 'refund_request_enabled': - False, - 'ticket_class_confirmation_settings': [{ - 'ticket_class_id': '1', - 'event_id': '1', - 'confirmation_message': { - 'text': 'Some text', - 'html': '<p>Some text</p>' + "order_id": "1", + "guestlist_id": None, + "invited_by": None, + "assigned_unit": { + "unit_id": "18-1:2", + "description": "Some description", + "location_image": {"url": "", "x": 0, "y": 0}, + "labels": ["100", "A", "23"], + "titles": ["Area", "Row", "Seat"], + }, + "delivery_method": "electronic", + "variant_id": None, + "contact_list_preferences": { + "has_contact_list": True, + "has_opted_in": True, + "_type": "attendee_contact_list_preferences", + }, + "resource_uri": "", + } + ], + "time_remaining": 100, + "resource_uri": "https://www.eventbriteapi.com/v3/orders/1/", + "status": "placed", + "ticket_buyer_settings": { + "confirmation_message": {"text": "Some text", "html": "<p>Some text</p>"}, + "instructions": {"text": "Some text", "html": "<p>Some text</p>"}, + "event_id": "1", + "refund_request_enabled": False, + "ticket_class_confirmation_settings": [ + { + "ticket_class_id": "1", + "event_id": "1", + "confirmation_message": {"text": "Some text", "html": "<p>Some text</p>"}, } - }] + ], + }, + "contact_list_preferences": { + "has_contact_list": True, + "has_opted_in": True, + "_type": "order_contact_list_preferences", }, - 'contact_list_preferences': { - 'has_contact_list': True, - 'has_opted_in': True, - '_type': 'order_contact_list_preferences' - } } diff --git a/breathecode/tests/mocks/eventbrite/constants/ticket_class.py b/breathecode/tests/mocks/eventbrite/constants/ticket_class.py index e0d2f9ff9..69bff2f22 100644 --- a/breathecode/tests/mocks/eventbrite/constants/ticket_class.py +++ b/breathecode/tests/mocks/eventbrite/constants/ticket_class.py @@ -1,66 +1,41 @@ # https://www.eventbrite.com.mx/platform/docs/ticket-classes -EVENTBRITE_TICKET_CLASS_URL = 'https://www.eventbriteapi.com/v3/events/1/ticket_classes/1/' +EVENTBRITE_TICKET_CLASS_URL = "https://www.eventbriteapi.com/v3/events/1/ticket_classes/1/" EVENTBRITE_TICKET_CLASS = { - 'description': 'General Admission', - 'donation': False, - 'free': False, - 'minimum_quantity': 1, - 'maximum_quantity': 10, - 'delivery_methods': ['electronic', 'will_call', 'standard_shipping', 'third_party_shipping'], - 'cost': { - 'actual_cost': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'actual_fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'cost': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'fee': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - }, - 'tax': { - 'currency': 'USD', - 'value': 432, - 'major_value': '4.32', - 'display': '4.32 USD' - } + "description": "General Admission", + "donation": False, + "free": False, + "minimum_quantity": 1, + "maximum_quantity": 10, + "delivery_methods": ["electronic", "will_call", "standard_shipping", "third_party_shipping"], + "cost": { + "actual_cost": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "actual_fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "cost": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "fee": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, + "tax": {"currency": "USD", "value": 432, "major_value": "4.32", "display": "4.32 USD"}, }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/events/1234/ticket_classes/12345/', - 'category': 'admission', - 'image_id': '1234', - 'name': 'GA', - 'display_name': 'Gold GA', - 'sorting': 2, - 'capacity': 100, - 'quantity_total': 1000, - 'quantity_sold': 20, - 'sales_start': {}, - 'sales_end': {}, - 'hidden': False, - 'include_fee': False, - 'split_fee': False, - 'hide_description': False, - 'hide_sale_dates': False, - 'auto_hide': False, - 'auto_hide_before': {}, - 'auto_hide_after': {}, - 'sales_start_after': {}, - 'order_confirmation_message': 'Success!', - 'sales_channels': ['online', 'atd'], - 'inventory_tier_id': {}, - 'secondary_assignment_enabled': False + "resource_uri": "https://www.eventbriteapi.com/v3/events/1234/ticket_classes/12345/", + "category": "admission", + "image_id": "1234", + "name": "GA", + "display_name": "Gold GA", + "sorting": 2, + "capacity": 100, + "quantity_total": 1000, + "quantity_sold": 20, + "sales_start": {}, + "sales_end": {}, + "hidden": False, + "include_fee": False, + "split_fee": False, + "hide_description": False, + "hide_sale_dates": False, + "auto_hide": False, + "auto_hide_before": {}, + "auto_hide_after": {}, + "sales_start_after": {}, + "order_confirmation_message": "Success!", + "sales_channels": ["online", "atd"], + "inventory_tier_id": {}, + "secondary_assignment_enabled": False, } diff --git a/breathecode/tests/mocks/eventbrite/constants/venues.py b/breathecode/tests/mocks/eventbrite/constants/venues.py index 8200099dc..e06177277 100644 --- a/breathecode/tests/mocks/eventbrite/constants/venues.py +++ b/breathecode/tests/mocks/eventbrite/constants/venues.py @@ -1,37 +1,33 @@ # https://www.eventbrite.com.mx/platform/api#/reference/event/retrieve-an-event?console=1 -EVENTBRITE_VENUES_URL = 'https://www.eventbriteapi.com/v3/organizations/:id/venues/' +EVENTBRITE_VENUES_URL = "https://www.eventbriteapi.com/v3/organizations/:id/venues/" EVENTBRITE_VENUES = { - 'pagination': { - 'object_count': 1, - 'page_number': 1, - 'page_size': 50, - 'page_count': 1, - 'has_more_items': False - }, - 'venues': [{ - 'address': { - 'address_1': '11200 Southwest 8th Street', - 'address_2': '', - 'city': 'Miami', - 'region': 'FL', - 'postal_code': '33174', - 'country': 'US', - 'latitude': '25.7580596', - 'longitude': '-80.37702200000001', - 'localized_address_display': '11200 Southwest 8th Street, Miami, FL 33174', - 'localized_area_display': 'Miami, FL', - 'localized_multi_line_address_display': ['11200 Southwest 8th Street', 'Miami, FL 33174'] - }, - 'resource_uri': 'https://www.eventbriteapi.com/v3/venues/1/', - 'id': '1', - 'age_restriction': None, - 'capacity': None, - 'name': 'Florida International University College of Business', - 'latitude': '25.7580596', - 'longitude': '-80.37702200000001' - }] + "pagination": {"object_count": 1, "page_number": 1, "page_size": 50, "page_count": 1, "has_more_items": False}, + "venues": [ + { + "address": { + "address_1": "11200 Southwest 8th Street", + "address_2": "", + "city": "Miami", + "region": "FL", + "postal_code": "33174", + "country": "US", + "latitude": "25.7580596", + "longitude": "-80.37702200000001", + "localized_address_display": "11200 Southwest 8th Street, Miami, FL 33174", + "localized_area_display": "Miami, FL", + "localized_multi_line_address_display": ["11200 Southwest 8th Street", "Miami, FL 33174"], + }, + "resource_uri": "https://www.eventbriteapi.com/v3/venues/1/", + "id": "1", + "age_restriction": None, + "capacity": None, + "name": "Florida International University College of Business", + "latitude": "25.7580596", + "longitude": "-80.37702200000001", + } + ], } def get_eventbrite_venues_url(id: str): - return EVENTBRITE_VENUES_URL.replace(':id', id) + return EVENTBRITE_VENUES_URL.replace(":id", id) diff --git a/breathecode/tests/mocks/eventbrite/requests_mock.py b/breathecode/tests/mocks/eventbrite/requests_mock.py index 87b30bb46..e89e204a2 100644 --- a/breathecode/tests/mocks/eventbrite/requests_mock.py +++ b/breathecode/tests/mocks/eventbrite/requests_mock.py @@ -12,14 +12,14 @@ ) -class ResponseMock(): +class ResponseMock: """Simutate Response to be used by mocks.""" status_code = None data = None content = None - def __init__(self, status_code=200, data=''): + def __init__(self, status_code=200, data=""): self.status_code = status_code if isinstance(data, str): @@ -48,4 +48,4 @@ def request_mock(url: str, auth=None, data=None, method=None, headers=None, para if url == EVENTBRITE_TICKET_CLASS_URL: return ResponseMock(data=EVENTBRITE_TICKET_CLASS, status_code=200) - return ResponseMock(data={'ok': False, 'status': 'not found'}, status_code=404) + return ResponseMock(data={"ok": False, "status": "not found"}, status_code=404) diff --git a/breathecode/tests/mocks/google_cloud_storage/__init__.py b/breathecode/tests/mocks/google_cloud_storage/__init__.py index 637d81041..4c2e0af55 100644 --- a/breathecode/tests/mocks/google_cloud_storage/__init__.py +++ b/breathecode/tests/mocks/google_cloud_storage/__init__.py @@ -1,34 +1,35 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import MagicMock from .blob_mock import BlobMock from .bucket_mock import BucketMock from .client_mock import ClientMock GOOGLE_CLOUD_PATH = { - 'client': 'google.cloud.storage.Client', - 'bucket': 'google.cloud.storage.Bucket', - 'blob': 'google.cloud.storage.Blob' + "client": "google.cloud.storage.Client", + "bucket": "google.cloud.storage.Bucket", + "blob": "google.cloud.storage.Blob", } GOOGLE_CLOUD_INSTANCES = { - 'client': MagicMock(side_effect=ClientMock), - 'bucket': MagicMock(side_effect=BucketMock), - 'blob': MagicMock(side_effect=BlobMock), + "client": MagicMock(side_effect=ClientMock), + "bucket": MagicMock(side_effect=BucketMock), + "blob": MagicMock(side_effect=BlobMock), } def apply_google_cloud_blob_mock(): """Apply Storage Blob Mock""" - return GOOGLE_CLOUD_INSTANCES['blob'] + return GOOGLE_CLOUD_INSTANCES["blob"] def apply_google_cloud_bucket_mock(): """Apply Storage Bucket Mock""" - return GOOGLE_CLOUD_INSTANCES['bucket'] + return GOOGLE_CLOUD_INSTANCES["bucket"] def apply_google_cloud_client_mock(): """Apply Storage Client Mock""" - return GOOGLE_CLOUD_INSTANCES['client'] + return GOOGLE_CLOUD_INSTANCES["client"] diff --git a/breathecode/tests/mocks/google_cloud_storage/blob_mock.py b/breathecode/tests/mocks/google_cloud_storage/blob_mock.py index a1bbcdb30..e9664ef96 100644 --- a/breathecode/tests/mocks/google_cloud_storage/blob_mock.py +++ b/breathecode/tests/mocks/google_cloud_storage/blob_mock.py @@ -1,4 +1,4 @@ -class BlobMock(): +class BlobMock: public_url = None name = None content = None @@ -13,7 +13,7 @@ def upload_from_string(self, data): return None def make_public(self): - self.public_url = f'https://storage.cloud.google.com/{self.bucket.name}/{self.name}' + self.public_url = f"https://storage.cloud.google.com/{self.bucket.name}/{self.name}" def delete(self): return None diff --git a/breathecode/tests/mocks/google_cloud_storage/bucket_mock.py b/breathecode/tests/mocks/google_cloud_storage/bucket_mock.py index 3daed4028..8d11f0bb0 100644 --- a/breathecode/tests/mocks/google_cloud_storage/bucket_mock.py +++ b/breathecode/tests/mocks/google_cloud_storage/bucket_mock.py @@ -1,4 +1,4 @@ -class BucketMock(): +class BucketMock: name = None bucket = None files = {} @@ -11,6 +11,7 @@ def get_blob(self, blob_name): def blob(self, blob_name): from google.cloud.storage import Blob + self.files[blob_name] = Blob(blob_name, self) return self.files[blob_name] diff --git a/breathecode/tests/mocks/google_cloud_storage/client_mock.py b/breathecode/tests/mocks/google_cloud_storage/client_mock.py index 6831de226..9e6c62af9 100644 --- a/breathecode/tests/mocks/google_cloud_storage/client_mock.py +++ b/breathecode/tests/mocks/google_cloud_storage/client_mock.py @@ -1,5 +1,6 @@ -class ClientMock(): +class ClientMock: def bucket(self, bucket_name): from google.cloud.storage import Bucket + return Bucket(bucket_name) diff --git a/breathecode/tests/mocks/logging/__init__.py b/breathecode/tests/mocks/logging/__init__.py index 603d0e71f..e5f99bfd0 100644 --- a/breathecode/tests/mocks/logging/__init__.py +++ b/breathecode/tests/mocks/logging/__init__.py @@ -1,15 +1,16 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import MagicMock LOGGING_PATH = { - 'logger': 'logging.Logger', + "logger": "logging.Logger", } -LOGGING_INSTANCES = {'logger': MagicMock()} +LOGGING_INSTANCES = {"logger": MagicMock()} def apply_logging_logger_mock(): """Apply Storage Blob Mock""" - return LOGGING_INSTANCES['logger'] + return LOGGING_INSTANCES["logger"] diff --git a/breathecode/tests/mocks/mailgun/__init__.py b/breathecode/tests/mocks/mailgun/__init__.py index 6347e345d..d0bbd34fe 100644 --- a/breathecode/tests/mocks/mailgun/__init__.py +++ b/breathecode/tests/mocks/mailgun/__init__.py @@ -1,16 +1,17 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import Mock from .requests_mock import post_mock MAILGUN_PATH = { - 'post': 'requests.post', + "post": "requests.post", } -MAILGUN_INSTANCES = {'post': Mock(side_effect=post_mock)} +MAILGUN_INSTANCES = {"post": Mock(side_effect=post_mock)} def apply_mailgun_requests_post_mock(): """Apply Storage Blob Mock""" - return MAILGUN_INSTANCES['post'] + return MAILGUN_INSTANCES["post"] diff --git a/breathecode/tests/mocks/mailgun/requests_mock.py b/breathecode/tests/mocks/mailgun/requests_mock.py index 4db7a28fb..14c4c6d4a 100644 --- a/breathecode/tests/mocks/mailgun/requests_mock.py +++ b/breathecode/tests/mocks/mailgun/requests_mock.py @@ -3,13 +3,14 @@ """ -class ResponseMock(): +class ResponseMock: """Simutate Response to be used by mocks""" + status_code = None data = None content = None - def __init__(self, status_code=200, data=''): + def __init__(self, status_code=200, data=""): self.status_code = status_code if isinstance(data, str): @@ -24,4 +25,4 @@ def json(self) -> dict: def post_mock(url: str, auth=None, data=None, timeout=30): """Requests get mock""" - return ResponseMock(data='ok', status_code=200) + return ResponseMock(data="ok", status_code=200) diff --git a/breathecode/tests/mocks/old_breathecode/__init__.py b/breathecode/tests/mocks/old_breathecode/__init__.py index 374cd3ddb..eb7138ba3 100644 --- a/breathecode/tests/mocks/old_breathecode/__init__.py +++ b/breathecode/tests/mocks/old_breathecode/__init__.py @@ -1,14 +1,15 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import MagicMock from .requests_mock import request_mock OLD_BREATHECODE_PATH = { - 'request': 'requests.request', + "request": "requests.request", } -OLD_BREATHECODE_INSTANCES = {'request': None} +OLD_BREATHECODE_INSTANCES = {"request": None} def apply_old_breathecode_requests_request_mock(): @@ -17,6 +18,6 @@ def apply_old_breathecode_requests_request_mock(): mock = MagicMock(side_effect=request_mock) # don't fix this line, this keep the old behavior - OLD_BREATHECODE_INSTANCES['request'] = OLD_BREATHECODE_INSTANCES['request'] or mock + OLD_BREATHECODE_INSTANCES["request"] = OLD_BREATHECODE_INSTANCES["request"] or mock return mock diff --git a/breathecode/tests/mocks/old_breathecode/constants/__init__.py b/breathecode/tests/mocks/old_breathecode/constants/__init__.py index 9cbd6ee98..93f45414d 100644 --- a/breathecode/tests/mocks/old_breathecode/constants/__init__.py +++ b/breathecode/tests/mocks/old_breathecode/constants/__init__.py @@ -1,5 +1,6 @@ """ Eventbrite constants """ + from .admin import OLD_BREATHECODE_ADMIN, OLD_BREATHECODE_ADMIN_URL # noqa: F401 from .contact_automations import CONTACT_AUTOMATIONS, CONTACT_AUTOMATIONS_URL # noqa: F401 diff --git a/breathecode/tests/mocks/old_breathecode/constants/admin.py b/breathecode/tests/mocks/old_breathecode/constants/admin.py index ba8689515..68b9920f0 100644 --- a/breathecode/tests/mocks/old_breathecode/constants/admin.py +++ b/breathecode/tests/mocks/old_breathecode/constants/admin.py @@ -1,4 +1,4 @@ # https://www.eventbrite.com.mx/platform/api#/reference/order/retrieve/retrieve-order-by-id?console=1 # https://www.eventbriteapi.com/v3/orders/1/ -OLD_BREATHECODE_ADMIN_URL = 'https://old.hardcoded.breathecode.url/admin/api.php' -OLD_BREATHECODE_ADMIN = {'subscriber_id': 1, 'result_code': 1, 'contacts': [{'id': 1}]} +OLD_BREATHECODE_ADMIN_URL = "https://old.hardcoded.breathecode.url/admin/api.php" +OLD_BREATHECODE_ADMIN = {"subscriber_id": 1, "result_code": 1, "contacts": [{"id": 1}]} diff --git a/breathecode/tests/mocks/old_breathecode/constants/contact_automations.py b/breathecode/tests/mocks/old_breathecode/constants/contact_automations.py index bc7cf69a2..f048933fd 100644 --- a/breathecode/tests/mocks/old_breathecode/constants/contact_automations.py +++ b/breathecode/tests/mocks/old_breathecode/constants/contact_automations.py @@ -1,2 +1,2 @@ -CONTACT_AUTOMATIONS_URL = 'https://old.hardcoded.breathecode.url/api/3/contactAutomations' -CONTACT_AUTOMATIONS = {'subscriber_id': 1, 'result_code': 1, 'contacts': [{'id': 1}]} +CONTACT_AUTOMATIONS_URL = "https://old.hardcoded.breathecode.url/api/3/contactAutomations" +CONTACT_AUTOMATIONS = {"subscriber_id": 1, "result_code": 1, "contacts": [{"id": 1}]} diff --git a/breathecode/tests/mocks/old_breathecode/requests_mock.py b/breathecode/tests/mocks/old_breathecode/requests_mock.py index a44cf171e..6393431e6 100644 --- a/breathecode/tests/mocks/old_breathecode/requests_mock.py +++ b/breathecode/tests/mocks/old_breathecode/requests_mock.py @@ -1,17 +1,19 @@ """Requests mock.""" + from .constants import CONTACT_AUTOMATIONS, CONTACT_AUTOMATIONS_URL, OLD_BREATHECODE_ADMIN, OLD_BREATHECODE_ADMIN_URL -class ResponseMock(): +class ResponseMock: """Simutate Response to be used by mocks.""" + status_code = None data = None content = None headers = { - 'Content-Type': 'application/json', + "Content-Type": "application/json", } - def __init__(self, status_code=200, data=''): + def __init__(self, status_code=200, data=""): self.status_code = status_code if isinstance(data, str): @@ -32,4 +34,4 @@ def request_mock(method: str, url: str, auth=None, data=None, headers=None, para if url == CONTACT_AUTOMATIONS_URL: return ResponseMock(data=CONTACT_AUTOMATIONS, status_code=200) - return ResponseMock(data={'ok': False, 'status': 'not found'}, status_code=404) + return ResponseMock(data={"ok": False, "status": "not found"}, status_code=404) diff --git a/breathecode/tests/mocks/requests/__init__.py b/breathecode/tests/mocks/requests/__init__.py index c447b955d..0515c3526 100644 --- a/breathecode/tests/mocks/requests/__init__.py +++ b/breathecode/tests/mocks/requests/__init__.py @@ -1,6 +1,7 @@ """ Mocks for `requests` module """ + from .requests_mock import request_mock # __all__ = [ @@ -10,27 +11,27 @@ # ] REQUESTS_PATH = { - 'get': 'requests.get', - 'post': 'requests.post', - 'put': 'requests.put', - 'patch': 'requests.patch', - 'delete': 'requests.delete', - 'head': 'requests.head', - 'request': 'requests.request', + "get": "requests.get", + "post": "requests.post", + "put": "requests.put", + "patch": "requests.patch", + "delete": "requests.delete", + "head": "requests.head", + "request": "requests.request", } REQUESTS_INSTANCES = { - 'get': None, - 'post': None, - 'put': None, - 'patch': None, - 'delete': None, - 'head': None, - 'request': None, + "get": None, + "post": None, + "put": None, + "patch": None, + "delete": None, + "head": None, + "request": None, } -def apply_requests_mock(method='get', endpoints=None): +def apply_requests_mock(method="get", endpoints=None): """Apply Storage Blob Mock""" if endpoints is None: @@ -72,7 +73,7 @@ def test_xyz(): if endpoints is None: endpoints = [] - return apply_requests_mock('GET', endpoints) + return apply_requests_mock("GET", endpoints) def apply_requests_post_mock(endpoints=None): @@ -106,7 +107,7 @@ def test_xyz(): if endpoints is None: endpoints = [] - return apply_requests_mock('POST', endpoints) + return apply_requests_mock("POST", endpoints) def apply_requests_put_mock(endpoints=None): @@ -140,7 +141,7 @@ def test_xyz(): if endpoints is None: endpoints = [] - return apply_requests_mock('PUT', endpoints) + return apply_requests_mock("PUT", endpoints) def apply_requests_patch_mock(endpoints=None): @@ -174,7 +175,7 @@ def test_xyz(): if endpoints is None: endpoints = [] - return apply_requests_mock('PATCH', endpoints) + return apply_requests_mock("PATCH", endpoints) def apply_requests_delete_mock(endpoints=None): @@ -208,7 +209,7 @@ def test_xyz(): if endpoints is None: endpoints = [] - return apply_requests_mock('DELETE', endpoints) + return apply_requests_mock("DELETE", endpoints) def apply_requests_head_mock(endpoints=None): @@ -242,7 +243,7 @@ def test_xyz(): if endpoints is None: endpoints = [] - return apply_requests_mock('HEAD', endpoints) + return apply_requests_mock("HEAD", endpoints) def apply_requests_request_mock(endpoints=None): @@ -277,4 +278,4 @@ def test_xyz(): if endpoints is None: endpoints = [] - return apply_requests_mock('REQUEST', endpoints) + return apply_requests_mock("REQUEST", endpoints) diff --git a/breathecode/tests/mocks/requests/requests_mock.py b/breathecode/tests/mocks/requests/requests_mock.py index db2e14bf1..5d83c51c6 100644 --- a/breathecode/tests/mocks/requests/requests_mock.py +++ b/breathecode/tests/mocks/requests/requests_mock.py @@ -12,8 +12,15 @@ def request_mock(endpoints=None): def base(url: str, *args, **kwargs): """Requests get mock.""" - if (url == 'GET' or url == 'POST' or url == 'PUT' or url == 'PATCH' or url == 'DELETE' or url == 'HEAD' - or url == 'REQUEST'): + if ( + url == "GET" + or url == "POST" + or url == "PUT" + or url == "PATCH" + or url == "DELETE" + or url == "HEAD" + or url == "REQUEST" + ): url = args[0] if len(endpoints[0]) == 4: @@ -29,6 +36,6 @@ def base(url: str, *args, **kwargs): (status, data) = match[0] return ResponseMock(data=data, status_code=status, url=url, request_headers=headers) - return ResponseMock(data='not fount', status_code=404) + return ResponseMock(data="not fount", status_code=404) return Mock(side_effect=base) diff --git a/breathecode/tests/mocks/requests/response_mock.py b/breathecode/tests/mocks/requests/response_mock.py index 146f8c0c2..a533babbd 100644 --- a/breathecode/tests/mocks/requests/response_mock.py +++ b/breathecode/tests/mocks/requests/response_mock.py @@ -1,7 +1,7 @@ import json -class ResponseMock(): +class ResponseMock: """Simutate Response to be used by mocks.""" status_code = None @@ -11,29 +11,33 @@ class ResponseMock(): raw = None url = None headers = { - 'Content-Type': 'application/json', - 'content-type': 'application/json', + "Content-Type": "application/json", + "content-type": "application/json", } - def __init__(self, status_code=200, data='', url='', request_headers=None): + def __init__(self, status_code=200, data="", url="", request_headers=None): self.status_code = status_code - self.reason = 'OK' + self.reason = "OK" self.raw = data self.url = url - self.headers = request_headers if request_headers is not None else { - 'Content-Type': 'application/json', - 'content-type': 'application/json', - } + self.headers = ( + request_headers + if request_headers is not None + else { + "Content-Type": "application/json", + "content-type": "application/json", + } + ) if isinstance(data, str): - self.content = str(data).encode('utf-8') + self.content = str(data).encode("utf-8") self.text = data else: content = json.dumps(data) self.data = data self.text = content - self.content = content.encode('utf-8') + self.content = content.encode("utf-8") def json(self) -> dict: """Convert Response to JSON.""" diff --git a/breathecode/tests/mocks/screenshotmachine/__init__.py b/breathecode/tests/mocks/screenshotmachine/__init__.py index c191dc6e7..f7a4f8064 100644 --- a/breathecode/tests/mocks/screenshotmachine/__init__.py +++ b/breathecode/tests/mocks/screenshotmachine/__init__.py @@ -1,16 +1,17 @@ """ Google Cloud Storage Mocks """ + from unittest.mock import Mock from .requests_mock import get_mock SCREENSHOTMACHINE_PATH = { - 'get': 'requests.get', + "get": "requests.get", } -SCREENSHOTMACHINE_INSTANCES = {'get': Mock(side_effect=get_mock)} +SCREENSHOTMACHINE_INSTANCES = {"get": Mock(side_effect=get_mock)} def apply_screenshotmachine_requests_get_mock(): """Apply Storage Blob Mock""" - return SCREENSHOTMACHINE_INSTANCES['get'] + return SCREENSHOTMACHINE_INSTANCES["get"] diff --git a/breathecode/tests/mocks/screenshotmachine/requests_mock.py b/breathecode/tests/mocks/screenshotmachine/requests_mock.py index 75b8ab762..9fa198705 100644 --- a/breathecode/tests/mocks/screenshotmachine/requests_mock.py +++ b/breathecode/tests/mocks/screenshotmachine/requests_mock.py @@ -1,13 +1,14 @@ """Requests mock.""" -class ResponseMock(): +class ResponseMock: """Simutate Response to be used by mocks.""" + status_code = None data = None content = None - def __init__(self, status_code=200, data=''): + def __init__(self, status_code=200, data=""): self.status_code = status_code if isinstance(data, str): @@ -22,4 +23,4 @@ def json(self) -> dict: def get_mock(url: str, stream=False): """Requests get mock.""" - return ResponseMock(data='ok', status_code=200) + return ResponseMock(data="ok", status_code=200) diff --git a/breathecode/tests/mocks/slack/__init__.py b/breathecode/tests/mocks/slack/__init__.py index f26ba643d..84e2f9c05 100644 --- a/breathecode/tests/mocks/slack/__init__.py +++ b/breathecode/tests/mocks/slack/__init__.py @@ -1,15 +1,16 @@ """Google Cloud Storage Mocks.""" + from unittest.mock import Mock from .requests_mock import request_mock SLACK_PATH = { - 'request': 'requests.request', + "request": "requests.request", } -SLACK_INSTANCES = {'request': Mock(side_effect=request_mock)} +SLACK_INSTANCES = {"request": Mock(side_effect=request_mock)} def apply_slack_requests_request_mock(): """Apply Storage Blob Mock.""" - return SLACK_INSTANCES['request'] + return SLACK_INSTANCES["request"] diff --git a/breathecode/tests/mocks/slack/requests_mock.py b/breathecode/tests/mocks/slack/requests_mock.py index 07fdee496..98e84b9da 100644 --- a/breathecode/tests/mocks/slack/requests_mock.py +++ b/breathecode/tests/mocks/slack/requests_mock.py @@ -1,14 +1,14 @@ """Requests mock.""" -class ResponseMock(): +class ResponseMock: """Simutate Response to be used by mocks.""" status_code = None data = None content = None - def __init__(self, status_code=200, data=''): + def __init__(self, status_code=200, data=""): self.status_code = status_code if isinstance(data, str): @@ -25,4 +25,4 @@ def json(self) -> dict: def request_mock(url: str, auth=None, data=None, method=None, headers=None, params=None, json=None, timeout=30): """Requests get mock.""" - return ResponseMock(data={'ok': True}, status_code=200) + return ResponseMock(data={"ok": True}, status_code=200) diff --git a/breathecode/tests/services/google_cloud/tests_credentials.py b/breathecode/tests/services/google_cloud/tests_credentials.py index efb02e849..307dd5885 100644 --- a/breathecode/tests/services/google_cloud/tests_credentials.py +++ b/breathecode/tests/services/google_cloud/tests_credentials.py @@ -6,103 +6,120 @@ from breathecode.services.google_cloud.credentials import resolve_credentials -logger = logging.getLogger('breathecode.setup') +logger = logging.getLogger("breathecode.setup") class CredentialsTestCase(TestCase): - @patch('builtins.open', mock_open(read_data='{}\n')) - @patch('os.path.exists', MagicMock(return_value=False)) - @patch.object(logger, 'error') + @patch("builtins.open", mock_open(read_data="{}\n")) + @patch("os.path.exists", MagicMock(return_value=False)) + @patch.object(logger, "error") def test_resolve_credentials__credentials_file_not_exists__without_env(self, logger_mock): from os.path import exists as exists_mock + open_mock = open - if 'GOOGLE_APPLICATION_CREDENTIALS' in os.environ: - del os.environ['GOOGLE_APPLICATION_CREDENTIALS'] + if "GOOGLE_APPLICATION_CREDENTIALS" in os.environ: + del os.environ["GOOGLE_APPLICATION_CREDENTIALS"] - if 'GOOGLE_SERVICE_KEY' in os.environ: - del os.environ['GOOGLE_SERVICE_KEY'] + if "GOOGLE_SERVICE_KEY" in os.environ: + del os.environ["GOOGLE_SERVICE_KEY"] result = resolve_credentials() self.assertEqual(result, False) self.assertEqual(open_mock.mock_calls, []) self.assertEqual(exists_mock.mock_calls, []) - self.assertEqual(logger_mock.mock_calls, [call('GOOGLE_APPLICATION_CREDENTIALS is not set')]) + self.assertEqual(logger_mock.mock_calls, [call("GOOGLE_APPLICATION_CREDENTIALS is not set")]) - self.assertTrue('GOOGLE_APPLICATION_CREDENTIALS' not in os.environ) + self.assertTrue("GOOGLE_APPLICATION_CREDENTIALS" not in os.environ) - @patch('builtins.open', mock_open(read_data='{}\n')) - @patch('os.path.exists', MagicMock(return_value=False)) - @patch.object(logger, 'error') + @patch("builtins.open", mock_open(read_data="{}\n")) + @patch("os.path.exists", MagicMock(return_value=False)) + @patch.object(logger, "error") def test_resolve_credentials__credentials_file_not_exists__without_second_env(self, logger_mock): from os.path import exists as exists_mock - os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = './.lacey_mosley.json' - if 'GOOGLE_SERVICE_KEY' in os.environ: - del os.environ['GOOGLE_SERVICE_KEY'] + os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./.lacey_mosley.json" + + if "GOOGLE_SERVICE_KEY" in os.environ: + del os.environ["GOOGLE_SERVICE_KEY"] open_mock = open result = resolve_credentials() self.assertEqual(result, False) self.assertEqual(open_mock.mock_calls, []) - self.assertEqual(exists_mock.mock_calls, [ - call(Path(os.path.join(os.getcwd(), '.lacey_mosley.json'))), - ]) - - self.assertEqual(logger_mock.mock_calls, [call('GOOGLE_SERVICE_KEY is not set')]) - - self.assertEqual(os.environ['GOOGLE_APPLICATION_CREDENTIALS'], - str(Path(os.path.join(os.getcwd(), '.lacey_mosley.json')))) - - @patch('builtins.open', mock_open(read_data='{}\n')) - @patch('os.path.exists', MagicMock(return_value=False)) - @patch.object(logger, 'error') + self.assertEqual( + exists_mock.mock_calls, + [ + call(Path(os.path.join(os.getcwd(), ".lacey_mosley.json"))), + ], + ) + + self.assertEqual(logger_mock.mock_calls, [call("GOOGLE_SERVICE_KEY is not set")]) + + self.assertEqual( + os.environ["GOOGLE_APPLICATION_CREDENTIALS"], str(Path(os.path.join(os.getcwd(), ".lacey_mosley.json"))) + ) + + @patch("builtins.open", mock_open(read_data="{}\n")) + @patch("os.path.exists", MagicMock(return_value=False)) + @patch.object(logger, "error") def test_resolve_credentials__credentials_file_not_exists__with_env(self, logger_mock): from os.path import exists as exists_mock - os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = './.lacey_mosley.json' - os.environ['GOOGLE_SERVICE_KEY'] = '{}\n' + os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./.lacey_mosley.json" + os.environ["GOOGLE_SERVICE_KEY"] = "{}\n" open_mock = open result = resolve_credentials() self.assertEqual(result, True) - self.assertEqual(open_mock.mock_calls, [ - call(Path(os.path.join(os.getcwd(), '.lacey_mosley.json')), 'w'), - call().__enter__(), - call().write('{}\n'), - call().__exit__(None, None, None), - ]) - - self.assertEqual(exists_mock.mock_calls, [ - call(Path(os.path.join(os.getcwd(), '.lacey_mosley.json'))), - ]) + self.assertEqual( + open_mock.mock_calls, + [ + call(Path(os.path.join(os.getcwd(), ".lacey_mosley.json")), "w"), + call().__enter__(), + call().write("{}\n"), + call().__exit__(None, None, None), + ], + ) + + self.assertEqual( + exists_mock.mock_calls, + [ + call(Path(os.path.join(os.getcwd(), ".lacey_mosley.json"))), + ], + ) self.assertEqual(logger_mock.mock_calls, []) - self.assertEqual(os.environ['GOOGLE_APPLICATION_CREDENTIALS'], - str(Path(os.path.join(os.getcwd(), '.lacey_mosley.json')))) + self.assertEqual( + os.environ["GOOGLE_APPLICATION_CREDENTIALS"], str(Path(os.path.join(os.getcwd(), ".lacey_mosley.json"))) + ) - @patch('builtins.open', mock_open(read_data='{}\n')) - @patch('os.path.exists', MagicMock(return_value=True)) - @patch.object(logger, 'error') + @patch("builtins.open", mock_open(read_data="{}\n")) + @patch("os.path.exists", MagicMock(return_value=True)) + @patch.object(logger, "error") def test_resolve_credentials__credentials_file_exists__with_env(self, logger_mock): from os.path import exists as exists_mock - os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = './.lacey_mosley.json' - os.environ['GOOGLE_SERVICE_KEY'] = '{}\n' + os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./.lacey_mosley.json" + os.environ["GOOGLE_SERVICE_KEY"] = "{}\n" open_mock = open result = resolve_credentials() self.assertEqual(result, True) self.assertEqual(open_mock.mock_calls, []) - self.assertEqual(exists_mock.mock_calls, [ - call(Path(os.path.join(os.getcwd(), '.lacey_mosley.json'))), - ]) + self.assertEqual( + exists_mock.mock_calls, + [ + call(Path(os.path.join(os.getcwd(), ".lacey_mosley.json"))), + ], + ) self.assertEqual(logger_mock.mock_calls, []) - self.assertEqual(os.environ['GOOGLE_APPLICATION_CREDENTIALS'], - str(Path(os.path.join(os.getcwd(), '.lacey_mosley.json')))) + self.assertEqual( + os.environ["GOOGLE_APPLICATION_CREDENTIALS"], str(Path(os.path.join(os.getcwd(), ".lacey_mosley.json"))) + ) diff --git a/breathecode/urls.py b/breathecode/urls.py index c7233c1b2..51ab4fdf5 100644 --- a/breathecode/urls.py +++ b/breathecode/urls.py @@ -13,6 +13,7 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ + import os from django.conf import settings @@ -26,37 +27,37 @@ from breathecode.utils.urls import mount_app_openapi from breathecode.utils.views import get_root_schema_view -ENVIRONMENT = os.environ.get('ENV') +ENVIRONMENT = os.environ.get("ENV") versions = { - 'v2': [ - path('activity/', include('breathecode.activity.urls.v2', namespace='activity')), - path('registry/', include('breathecode.registry.urls.v2', namespace='registry')), + "v2": [ + path("activity/", include("breathecode.activity.urls.v2", namespace="activity")), + path("registry/", include("breathecode.registry.urls.v2", namespace="registry")), ], } apps = [ - ('v1/auth/', 'breathecode.authenticate.urls', 'auth'), - ('v1/admissions/', 'breathecode.admissions.urls', 'admissions'), - ('v1/assignment/', 'breathecode.assignments.urls', 'assignments'), - ('v1/freelance/', 'breathecode.freelance.urls', 'freelance'), - ('v1/events/', 'breathecode.events.urls', 'events'), - ('v1/registry/', 'breathecode.registry.urls.v1', 'registry'), - ('v1/activity/', 'breathecode.activity.urls.v1', 'activity'), - ('v1/feedback/', 'breathecode.feedback.urls', 'feedback'), - ('v1/messaging/', 'breathecode.notify.urls', 'notify'), - ('v1/assessment/', 'breathecode.assessment.urls', 'assessment'), - ('v1/certificate/', 'breathecode.certificate.urls', 'certificate'), - ('v1/media/', 'breathecode.media.urls', 'media'), - ('v1/marketing/', 'breathecode.marketing.urls', 'marketing'), - ('v1/mentorship/', 'breathecode.mentorship.urls', 'mentorship'), - ('v1/monitoring/', 'breathecode.monitoring.urls', 'monitoring'), - ('v1/provisioning/', 'breathecode.provisioning.urls', 'provisioning'), - ('v1/payments/', 'breathecode.payments.urls', 'payments'), - ('s/', 'breathecode.marketing.urls_shortner', 'marketing_shortner'), - ('mentor/', 'breathecode.mentorship.urls_shortner', 'mentorship_shortner'), - ('asset/', 'breathecode.registry.urls_shortner', 'registry_shortner'), - ('start', 'breathecode.provisioning.urls_shortner', 'provisioning_shortner'), + ("v1/auth/", "breathecode.authenticate.urls", "auth"), + ("v1/admissions/", "breathecode.admissions.urls", "admissions"), + ("v1/assignment/", "breathecode.assignments.urls", "assignments"), + ("v1/freelance/", "breathecode.freelance.urls", "freelance"), + ("v1/events/", "breathecode.events.urls", "events"), + ("v1/registry/", "breathecode.registry.urls.v1", "registry"), + ("v1/activity/", "breathecode.activity.urls.v1", "activity"), + ("v1/feedback/", "breathecode.feedback.urls", "feedback"), + ("v1/messaging/", "breathecode.notify.urls", "notify"), + ("v1/assessment/", "breathecode.assessment.urls", "assessment"), + ("v1/certificate/", "breathecode.certificate.urls", "certificate"), + ("v1/media/", "breathecode.media.urls", "media"), + ("v1/marketing/", "breathecode.marketing.urls", "marketing"), + ("v1/mentorship/", "breathecode.mentorship.urls", "mentorship"), + ("v1/monitoring/", "breathecode.monitoring.urls", "monitoring"), + ("v1/provisioning/", "breathecode.provisioning.urls", "provisioning"), + ("v1/payments/", "breathecode.payments.urls", "payments"), + ("s/", "breathecode.marketing.urls_shortner", "marketing_shortner"), + ("mentor/", "breathecode.mentorship.urls_shortner", "mentorship_shortner"), + ("asset/", "breathecode.registry.urls_shortner", "registry_shortner"), + ("start", "breathecode.provisioning.urls_shortner", "provisioning_shortner"), ] urlpatterns_apps = [path(url, include(urlconf, namespace=namespace)) for url, urlconf, namespace in apps] @@ -64,35 +65,43 @@ urlpatterns_app_openapi = [mount_app_openapi(url, urlconf, namespace) for url, urlconf, namespace in apps] urlpatterns_docs = [ - path('openapi.json', - get_root_schema_view([namespace for _, _, namespace in apps if namespace != 'shortner'], - extend={ - 'title': '4Geeks API', - 'description': 'Technology for Learning', - 'version': 'v1.0.0', - }), - name='openapi-schema'), - path('admin/doc/', include('django.contrib.admindocs.urls')), - path('swagger/', - TemplateView.as_view(template_name='swagger-ui.html', extra_context={'schema_url': 'openapi-schema'}), - name='swagger-ui'), - path('redoc/', - TemplateView.as_view(template_name='redoc.html', extra_context={'schema_url': 'openapi-schema'}), - name='redoc'), + path( + "openapi.json", + get_root_schema_view( + [namespace for _, _, namespace in apps if namespace != "shortner"], + extend={ + "title": "4Geeks API", + "description": "Technology for Learning", + "version": "v1.0.0", + }, + ), + name="openapi-schema", + ), + path("admin/doc/", include("django.contrib.admindocs.urls")), + path( + "swagger/", + TemplateView.as_view(template_name="swagger-ui.html", extra_context={"schema_url": "openapi-schema"}), + name="swagger-ui", + ), + path( + "redoc/", + TemplateView.as_view(template_name="redoc.html", extra_context={"schema_url": "openapi-schema"}), + name="redoc", + ), ] urlpatterns_django = [ - path('admin/', admin.site.urls), - path('explorer/', include('explorer.urls')), - path('graphql', csrf_exempt(GraphQLView.as_view(graphiql=True))), + path("admin/", admin.site.urls), + path("explorer/", include("explorer.urls")), + path("graphql", csrf_exempt(GraphQLView.as_view(graphiql=True))), ] urlpatterns_static = static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) -urlpatterns = (urlpatterns_apps + urlpatterns_app_openapi + urlpatterns_docs + urlpatterns_django + urlpatterns_static) +urlpatterns = urlpatterns_apps + urlpatterns_app_openapi + urlpatterns_docs + urlpatterns_django + urlpatterns_static for version in versions: - x = path(f'{version}/', include((versions[version], version), namespace=version)) + x = path(f"{version}/", include((versions[version], version), namespace=version)) urlpatterns.append(x) -if os.getenv('ALLOW_UNSAFE_CYPRESS_APP') or os.environ.get('ENV') == 'test': - urlpatterns.append(path('v1/cypress/', include('breathecode.cypress.urls', namespace='cypress'))) +if os.getenv("ALLOW_UNSAFE_CYPRESS_APP") or os.environ.get("ENV") == "test": + urlpatterns.append(path("v1/cypress/", include("breathecode.cypress.urls", namespace="cypress"))) diff --git a/breathecode/utils/admin/change_field.py b/breathecode/utils/admin/change_field.py index 9bac8e717..ffdff1f02 100644 --- a/breathecode/utils/admin/change_field.py +++ b/breathecode/utils/admin/change_field.py @@ -1,4 +1,4 @@ -__all__ = ['make_method', 'change_field'] +__all__ = ["make_method", "change_field"] def make_method(status, name): @@ -11,10 +11,10 @@ def _method(modeladmin, request, queryset): return _method -def change_field(possible_status, name='status'): +def change_field(possible_status, name="status"): methods = [] for status in possible_status: _method = make_method(status, name) - _method.__name__ = 'change_' + name + '_' + status + _method.__name__ = "change_" + name + "_" + status methods.append(_method) return methods diff --git a/breathecode/utils/admin_export_csv_mixin.py b/breathecode/utils/admin_export_csv_mixin.py index df36e9d93..bc3eab7c4 100644 --- a/breathecode/utils/admin_export_csv_mixin.py +++ b/breathecode/utils/admin_export_csv_mixin.py @@ -3,7 +3,7 @@ from django.contrib import messages from django.utils.safestring import mark_safe -__all__ = ['AdminExportCsvMixin'] +__all__ = ["AdminExportCsvMixin"] class Echo: @@ -29,17 +29,20 @@ def export_as_csv(self, request, queryset): writer.writerow(field_names) return StreamingHttpResponse( (writer.writerow((getattr(obj, field) for field in field_names)) for obj in queryset), - content_type='text/csv', - headers={'Content-Disposition': 'attachment; filename={}.csv'.format(meta)}, + content_type="text/csv", + headers={"Content-Disposition": "attachment; filename={}.csv".format(meta)}, ) def async_export_as_csv(self, request, queryset): from breathecode.monitoring.tasks import async_download_csv + meta = self.model._meta - ids = list(queryset.values_list('pk', flat=True)) + ids = list(queryset.values_list("pk", flat=True)) async_download_csv.delay(self.model.__module__, meta.object_name, ids) messages.add_message( - request, messages.INFO, + request, + messages.INFO, mark_safe( 'Data is being downloaded, <a href="/admin/monitoring/csvdownload/">you can check your download here.</a>' - )) + ), + ) diff --git a/breathecode/utils/admin_timezone.py b/breathecode/utils/admin_timezone.py index d369a002d..2f6e54774 100644 --- a/breathecode/utils/admin_timezone.py +++ b/breathecode/utils/admin_timezone.py @@ -1,9 +1,9 @@ from django.utils import timezone import pytz, os -__all__ = ['TimezoneMiddleware'] +__all__ = ["TimezoneMiddleware"] -ENV = os.getenv('ENV', None) +ENV = os.getenv("ENV", None) class TimezoneMiddleware: @@ -12,6 +12,6 @@ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): - if ENV != 'test': - timezone.activate(pytz.timezone('America/New_York')) + if ENV != "test": + timezone.activate(pytz.timezone("America/New_York")) return self.get_response(request) diff --git a/breathecode/utils/api_view_extensions/api_view_extension_handlers.py b/breathecode/utils/api_view_extensions/api_view_extension_handlers.py index 73bd4f36a..bcaa54528 100644 --- a/breathecode/utils/api_view_extensions/api_view_extension_handlers.py +++ b/breathecode/utils/api_view_extensions/api_view_extension_handlers.py @@ -13,8 +13,8 @@ from .extensions import CacheExtension -__all__ = ['APIViewExtensionHandlers'] -is_test_env = os.getenv('ENV') == 'test' +__all__ = ["APIViewExtensionHandlers"] +is_test_env = os.getenv("ENV") == "test" class APIViewExtensionHandlers: @@ -68,7 +68,7 @@ def queryset(self, queryset: QuerySet[Any]) -> QuerySet[Any]: return queryset - def response(self, data: dict | list[dict], format='application/json'): + def response(self, data: dict | list[dict], format="application/json"): """Get the response of endpoint.""" headers = {} diff --git a/breathecode/utils/api_view_extensions/api_view_extensions.py b/breathecode/utils/api_view_extensions/api_view_extensions.py index 948cdb6ca..f62744f1c 100644 --- a/breathecode/utils/api_view_extensions/api_view_extensions.py +++ b/breathecode/utils/api_view_extensions/api_view_extensions.py @@ -6,11 +6,11 @@ from .api_view_extension_handlers import APIViewExtensionHandlers -__all__ = ['APIViewExtensions'] +__all__ = ["APIViewExtensions"] EXTENSIONS = [getattr(extensions, x) for x in dir(extensions) if inspect.isclass(getattr(extensions, x))] -LIMIT_QUERY_PARAM = 'limit' -OFFSET_QUERY_PARAM = 'offset' +LIMIT_QUERY_PARAM = "limit" +OFFSET_QUERY_PARAM = "offset" class APIViewExtensions: @@ -46,6 +46,7 @@ def _requirements(self, extension): """Get requirements of the extension.""" return [ - x for x in dict(inspect.signature(extension.__init__).parameters) - if x != 'self' and x != 'args' and x != 'kwargs' and x != 'request' + x + for x in dict(inspect.signature(extension.__init__).parameters) + if x != "self" and x != "args" and x != "kwargs" and x != "request" ] diff --git a/breathecode/utils/api_view_extensions/extension_base.py b/breathecode/utils/api_view_extensions/extension_base.py index 1383a0b56..16139f35a 100644 --- a/breathecode/utils/api_view_extensions/extension_base.py +++ b/breathecode/utils/api_view_extensions/extension_base.py @@ -30,8 +30,7 @@ def _apply_response_mutation(self, queryset: QuerySet[any]) -> QuerySet[any]: def _set_request(self, request: WSGIRequest) -> None: self._request = request - def _optional_dependencies(self, **kwargs) -> None: - ... + def _optional_dependencies(self, **kwargs) -> None: ... def __str__(self) -> str: return self.__class__.__name__ diff --git a/breathecode/utils/api_view_extensions/extensions/cache_extension.py b/breathecode/utils/api_view_extensions/extensions/cache_extension.py index 397fddf66..750723271 100644 --- a/breathecode/utils/api_view_extensions/extensions/cache_extension.py +++ b/breathecode/utils/api_view_extensions/extensions/cache_extension.py @@ -8,26 +8,26 @@ from django.http import HttpResponse from rest_framework import status -__all__ = ['CacheExtension'] +__all__ = ["CacheExtension"] logger = logging.getLogger(__name__) -ENABLE_LIST_OPTIONS = ['true', '1', 'yes', 'y'] +ENABLE_LIST_OPTIONS = ["true", "1", "yes", "y"] @functools.lru_cache(maxsize=1) def is_cache_enabled(): - return os.getenv('CACHE', '1').lower() in ENABLE_LIST_OPTIONS + return os.getenv("CACHE", "1").lower() in ENABLE_LIST_OPTIONS @functools.lru_cache(maxsize=1) def user_timeout(): - return 60 * int(os.getenv('USER_CACHE_MINUTES', 60 * 4)) + return 60 * int(os.getenv("USER_CACHE_MINUTES", 60 * 4)) @functools.lru_cache(maxsize=1) def use_gzip(): - return os.getenv('USE_GZIP', '0').lower() in ENABLE_LIST_OPTIONS + return os.getenv("USE_GZIP", "0").lower() in ENABLE_LIST_OPTIONS class CacheExtension(ExtensionBase): @@ -41,65 +41,65 @@ def __init__(self, cache: Cache, **kwargs) -> None: self._cache = cache() self._encoding = None - def _optional_dependencies(self, cache_per_user: bool = False, cache_prefix: str = '', **kwargs): + def _optional_dependencies(self, cache_per_user: bool = False, cache_prefix: str = "", **kwargs): self._cache_per_user = cache_per_user self._cache_prefix = cache_prefix def _instance_name(self) -> Optional[str]: - return 'cache' + return "cache" def _get_encoding(self) -> Optional[str]: # zstd should be the standard if we require more processing power in the future # including the encoding in the params allow to support compression encoding - encoding = self._request.META.get('HTTP_ACCEPT_ENCODING', '') - if 'gzip' in encoding and use_gzip(): - return 'gzip' + encoding = self._request.META.get("HTTP_ACCEPT_ENCODING", "") + if "gzip" in encoding and use_gzip(): + return "gzip" - elif 'br' in encoding or '*' in encoding: - return 'br' + elif "br" in encoding or "*" in encoding: + return "br" # this is a new standard, but not supported by all browsers - elif 'zstd' in encoding: - return 'zstd' + elif "zstd" in encoding: + return "zstd" - elif 'deflate' in encoding: - return 'deflate' + elif "deflate" in encoding: + return "deflate" - elif 'gzip' in encoding: - return 'gzip' + elif "gzip" in encoding: + return "gzip" def _get_params(self): extends = { - 'request.path': self._request.path, + "request.path": self._request.path, } if self._cache_per_user: - extends['request.user.id'] = self._request.user.id + extends["request.user.id"] = self._request.user.id - if lang := self._request.META.get('HTTP_ACCEPT_LANGUAGE'): - extends['request.headers.accept-language'] = lang + if lang := self._request.META.get("HTTP_ACCEPT_LANGUAGE"): + extends["request.headers.accept-language"] = lang if encoding := self._get_encoding(): - extends['request.headers.accept-encoding'] = encoding + extends["request.headers.accept-encoding"] = encoding self._encoding = encoding - if accept := self._request.META.get('HTTP_ACCEPT'): - extends['request.headers.accept'] = accept + if accept := self._request.META.get("HTTP_ACCEPT"): + extends["request.headers.accept"] = accept if self._cache_prefix: - extends['breathecode.view.get'] = self._cache_prefix + extends["breathecode.view.get"] = self._cache_prefix - return {**self._request.GET.dict(), **self._request.parser_context['kwargs'], **extends} + return {**self._request.GET.dict(), **self._request.parser_context["kwargs"], **extends} def get(self) -> dict: if not is_cache_enabled(): - logger.debug('Cache has been disabled') + logger.debug("Cache has been disabled") return None # allow requests to disable cache with querystring "cache" variable - cache_is_active = self._request.GET.get('cache', 'true').lower() in ENABLE_LIST_OPTIONS + cache_is_active = self._request.GET.get("cache", "true").lower() in ENABLE_LIST_OPTIONS if not cache_is_active: - logger.debug('Cache has been forced to disable') + logger.debug("Cache has been forced to disable") return None try: @@ -115,7 +115,7 @@ def get(self) -> dict: return response except Exception: - logger.exception('Error while trying to get the cache') + logger.exception("Error while trying to get the cache") return None def _get_order_of_response(self) -> int: @@ -124,15 +124,14 @@ def _get_order_of_response(self) -> int: def _can_modify_response(self) -> bool: return True - def _apply_response_mutation(self, - data: list[dict] | dict, - headers: Optional[dict] = None, - format='application/json'): + def _apply_response_mutation( + self, data: list[dict] | dict, headers: Optional[dict] = None, format="application/json" + ): if headers is None: headers = {} if not is_cache_enabled(): - logger.debug('Cache has been disabled') + logger.debug("Cache has been disabled") return (data, headers) params = self._get_params() @@ -143,13 +142,13 @@ def _apply_response_mutation(self, try: res = self._cache.set(data, format=format, params=params, timeout=timeout, encoding=self._encoding) - data = res['content'] + data = res["content"] headers = { **headers, - **res['headers'], + **res["headers"], } except Exception: - logger.exception('Error while trying to set the cache') + logger.exception("Error while trying to set the cache") return (data, headers) diff --git a/breathecode/utils/api_view_extensions/extensions/language_extension.py b/breathecode/utils/api_view_extensions/extensions/language_extension.py index 5fc752e1b..289ef3a74 100644 --- a/breathecode/utils/api_view_extensions/extensions/language_extension.py +++ b/breathecode/utils/api_view_extensions/extensions/language_extension.py @@ -2,16 +2,15 @@ from breathecode.utils.api_view_extensions.extension_base import ExtensionBase -__all__ = ['LanguageExtension'] +__all__ = ["LanguageExtension"] class LanguageExtension(ExtensionBase): - def __init__(self, **kwargs) -> None: - ... + def __init__(self, **kwargs) -> None: ... def get(self) -> str | None: - return self._request.META.get('HTTP_ACCEPT_LANGUAGE') + return self._request.META.get("HTTP_ACCEPT_LANGUAGE") def _can_modify_queryset(self) -> bool: return False @@ -20,4 +19,4 @@ def _can_modify_response(self) -> bool: return False def _instance_name(self) -> Optional[str]: - return 'language' + return "language" diff --git a/breathecode/utils/api_view_extensions/extensions/lookup_extension.py b/breathecode/utils/api_view_extensions/extensions/lookup_extension.py index 334139891..c76f1456d 100644 --- a/breathecode/utils/api_view_extensions/extensions/lookup_extension.py +++ b/breathecode/utils/api_view_extensions/extensions/lookup_extension.py @@ -8,7 +8,7 @@ from breathecode.utils.i18n import translation from capyc.rest_framework.exceptions import ValidationException -__all__ = ['LookupExtension'] +__all__ = ["LookupExtension"] class Field: @@ -17,13 +17,16 @@ class Field: def id(lang: str, key: str, value: str, alias=None) -> Q: if not value.isnumeric(): raise ValidationException( - translation(lang, - en='ID must be numeric', - es='El ID debe ser numérico', - pt='O ID deve ser numérico', - slug='id-must-be-numeric')) + translation( + lang, + en="ID must be numeric", + es="El ID debe ser numérico", + pt="O ID deve ser numérico", + slug="id-must-be-numeric", + ) + ) - return Q(**{f'{key}__pk': int(value)}) + return Q(**{f"{key}__pk": int(value)}) @staticmethod def integer(mode: str) -> Callable[[str, str, str], Q]: @@ -32,31 +35,34 @@ def handler(lang: str, key: str, value: str, alias=None) -> Q: if not value.isnumeric(): el = alias or key raise ValidationException( - translation(lang, - en=f'{el} must be numeric', - es=f'El {el} debe ser numérico', - pt=f'O {el} deve ser numérico', - slug=f'{el.replace("_", "-")}-must-be-numeric')) + translation( + lang, + en=f"{el} must be numeric", + es=f"El {el} debe ser numérico", + pt=f"O {el} deve ser numérico", + slug=f'{el.replace("_", "-")}-must-be-numeric', + ) + ) - return Q(**{f'{key}__{mode}': int(value)}) + return Q(**{f"{key}__{mode}": int(value)}) return handler @staticmethod def slug(lang: str, key: str, value: str, alias=None) -> Q: if value.isnumeric(): - return Q(**{f'{key}__pk': int(value)}) + return Q(**{f"{key}__pk": int(value)}) - return Q(**{f'{key}__slug': value}) + return Q(**{f"{key}__slug": value}) @staticmethod def string(mode: str) -> Callable[[str, str, str], Q]: def handler(lang: str, key: str, value: str, alias=None) -> str: param = value - if (mode == 'in'): - param = param.split(',') if param is not None else [] - return Q(**{f'{key}__{mode}': param}) + if mode == "in": + param = param.split(",") if param is not None else [] + return Q(**{f"{key}__{mode}": param}) return handler @@ -64,32 +70,38 @@ def handler(lang: str, key: str, value: str, alias=None) -> str: def datetime(mode: str) -> Callable[[str, str, str], Q]: def handler(lang: str, key: str, value: str, alias=None) -> Q: - if mode == 'year' or mode == 'month' or mode == 'day' or mode == 'hour' or mode == 'minute': + if mode == "year" or mode == "month" or mode == "day" or mode == "hour" or mode == "minute": if not value.isnumeric(): - el = (alias or key).replace('_', '-') + el = (alias or key).replace("_", "-") raise ValidationException( - translation(lang, - en=f'{el} must be numeric', - es=f'El {el} debe ser numérico', - pt=f'O {el} deve ser numérico', - slug=f'{el.replace("_", "-")}-must-be-numeric')) + translation( + lang, + en=f"{el} must be numeric", + es=f"El {el} debe ser numérico", + pt=f"O {el} deve ser numérico", + slug=f'{el.replace("_", "-")}-must-be-numeric', + ) + ) - return Q(**{f'{key}__{mode}': int(value)}) + return Q(**{f"{key}__{mode}": int(value)}) - if mode == 'isnull': - return Q(**{f'{key}__{mode}': value == 'true'}) + if mode == "isnull": + return Q(**{f"{key}__{mode}": value == "true"}) if not value or not (d := dateparse.parse_datetime(value)): el = alias or key raise ValidationException( - translation(lang, - en=f'{el} must be a datetime', - es=f'{el} debe ser un datetime', - slug=f'{el.replace("_", "-")}-must-be-a-datetime')) + translation( + lang, + en=f"{el} must be a datetime", + es=f"{el} debe ser un datetime", + slug=f'{el.replace("_", "-")}-must-be-a-datetime', + ) + ) - return Q(**{f'{key}__{mode}': d}) + return Q(**{f"{key}__{mode}": d}) return handler @@ -97,7 +109,7 @@ def handler(lang: str, key: str, value: str, alias=None) -> Q: def bool(mode: str) -> Callable[[str, str, str], Q]: def handler(lang: str, key: str, value: str, alias=None) -> Q: - return Q(**{f'{key}__{mode}': value == 'true'}) + return Q(**{f"{key}__{mode}": value == "true"}) return handler @@ -108,26 +120,26 @@ class CompileLookupField: def string(strings: str) -> dict[str, Callable[[str, str, str, Optional[str]], Q]]: lookup = {} - for key in strings.get('exact', tuple()): - lookup[key] = Field.string('exact') + for key in strings.get("exact", tuple()): + lookup[key] = Field.string("exact") - for key in strings.get('in', tuple()): - lookup[key] = Field.string('in') + for key in strings.get("in", tuple()): + lookup[key] = Field.string("in") - for key in strings.get('contains', tuple()): - lookup[key] = Field.string('contains') + for key in strings.get("contains", tuple()): + lookup[key] = Field.string("contains") - for key in strings.get('icontains', tuple()): - lookup[key] = Field.string('icontains') + for key in strings.get("icontains", tuple()): + lookup[key] = Field.string("icontains") - for key in strings.get('iexact', tuple()): - lookup[key] = Field.string('iexact') + for key in strings.get("iexact", tuple()): + lookup[key] = Field.string("iexact") - for key in strings.get('startswith', tuple()): - lookup[key] = Field.string('startswith') + for key in strings.get("startswith", tuple()): + lookup[key] = Field.string("startswith") - for key in strings.get('endswith', tuple()): - lookup[key] = Field.string('endswith') + for key in strings.get("endswith", tuple()): + lookup[key] = Field.string("endswith") return lookup @@ -135,20 +147,20 @@ def string(strings: str) -> dict[str, Callable[[str, str, str, Optional[str]], Q def integer(strings: str) -> dict[str, Callable[[str, str, str, Optional[str]], Q]]: lookup = {} - for key in strings.get('exact', tuple()): - lookup[key] = Field.integer('exact') + for key in strings.get("exact", tuple()): + lookup[key] = Field.integer("exact") - for key in strings.get('in', tuple()): - lookup[key] = Field.integer('in') + for key in strings.get("in", tuple()): + lookup[key] = Field.integer("in") - for key in strings.get('gt', tuple()): - lookup[key] = Field.integer('gt') + for key in strings.get("gt", tuple()): + lookup[key] = Field.integer("gt") - for key in strings.get('gte', tuple()): - lookup[key] = Field.integer('gte') + for key in strings.get("gte", tuple()): + lookup[key] = Field.integer("gte") - for key in strings.get('lt', tuple()): - lookup[key] = Field.integer('lte') + for key in strings.get("lt", tuple()): + lookup[key] = Field.integer("lte") return lookup @@ -156,38 +168,38 @@ def integer(strings: str) -> dict[str, Callable[[str, str, str, Optional[str]], def datetime(strings: str) -> dict[str, Callable[[str, str, str, Optional[str]], Q]]: lookup = {} - for key in strings.get('exact', tuple()): - lookup[key] = Field.datetime('exact') + for key in strings.get("exact", tuple()): + lookup[key] = Field.datetime("exact") - for key in strings.get('in', tuple()): - lookup[key] = Field.datetime('in') + for key in strings.get("in", tuple()): + lookup[key] = Field.datetime("in") - for key in strings.get('gt', tuple()): - lookup[key] = Field.datetime('gt') + for key in strings.get("gt", tuple()): + lookup[key] = Field.datetime("gt") - for key in strings.get('gte', tuple()): - lookup[key] = Field.datetime('gte') + for key in strings.get("gte", tuple()): + lookup[key] = Field.datetime("gte") - for key in strings.get('lt', tuple()): - lookup[key] = Field.datetime('lte') + for key in strings.get("lt", tuple()): + lookup[key] = Field.datetime("lte") - for key in strings.get('year', tuple()): - lookup[key] = Field.datetime('year') + for key in strings.get("year", tuple()): + lookup[key] = Field.datetime("year") - for key in strings.get('month', tuple()): - lookup[key] = Field.datetime('month') + for key in strings.get("month", tuple()): + lookup[key] = Field.datetime("month") - for key in strings.get('day', tuple()): - lookup[key] = Field.datetime('day') + for key in strings.get("day", tuple()): + lookup[key] = Field.datetime("day") - for key in strings.get('hour', tuple()): - lookup[key] = Field.datetime('hour') + for key in strings.get("hour", tuple()): + lookup[key] = Field.datetime("hour") - for key in strings.get('minute', tuple()): - lookup[key] = Field.datetime('minute') + for key in strings.get("minute", tuple()): + lookup[key] = Field.datetime("minute") - for key in strings.get('isnull', tuple()): - lookup[key] = Field.datetime('isnull') + for key in strings.get("isnull", tuple()): + lookup[key] = Field.datetime("isnull") return lookup @@ -195,32 +207,33 @@ def datetime(strings: str) -> dict[str, Callable[[str, str, str, Optional[str]], def bool(strings: str) -> dict[str, Callable[[str, str, str, Optional[str]], Q]]: lookup = {} - for key in strings.get('exact', tuple()): - lookup[key] = Field.bool('exact') + for key in strings.get("exact", tuple()): + lookup[key] = Field.bool("exact") return lookup # keeps it here to spy the arguments passed @cache -def compile_lookup(ids: tuple, slugs: tuple, ints: frozenset, strings: frozenset, datetimes: frozenset, - bools: frozenset) -> tuple[tuple, dict]: +def compile_lookup( + ids: tuple, slugs: tuple, ints: frozenset, strings: frozenset, datetimes: frozenset, bools: frozenset +) -> tuple[tuple, dict]: """Compile the available lookup fields once.""" strings = dict(strings) lookup = {} for key in ids: - if key == '': - lookup.update(CompileLookupField.integer({'exact': ('id', )})) + if key == "": + lookup.update(CompileLookupField.integer({"exact": ("id",)})) continue lookup[key] = Field.id for key in slugs: - if key == '': - lookup.update(CompileLookupField.integer({'exact': ('id', )})) - lookup.update(CompileLookupField.string({'exact': ('slug', )})) + if key == "": + lookup.update(CompileLookupField.integer({"exact": ("id",)})) + lookup.update(CompileLookupField.string({"exact": ("slug",)})) continue lookup[key] = Field.slug @@ -235,15 +248,16 @@ def compile_lookup(ids: tuple, slugs: tuple, ints: frozenset, strings: frozenset class LookupExtension(ExtensionBase): - def __init__(self, **kwargs) -> None: - ... + def __init__(self, **kwargs) -> None: ... - def _build_lookup(self, - lang: str, - lookup: dict[str, Callable[[str, str, str, Optional[str]], Q]], - querystring: dict[str, Any], - custom_fields: Optional[dict] = None, - overwrite: Optional[dict] = None) -> tuple[tuple, dict]: + def _build_lookup( + self, + lang: str, + lookup: dict[str, Callable[[str, str, str, Optional[str]], Q]], + querystring: dict[str, Any], + custom_fields: Optional[dict] = None, + overwrite: Optional[dict] = None, + ) -> tuple[tuple, dict]: if custom_fields is None: custom_fields = {} @@ -273,7 +287,7 @@ def _to_frozenset(self, value: Optional[dict]) -> frozenset: return frozenset() if not isinstance(value, dict): - raise ValidationException('value must be a dict', code=500) + raise ValidationException("value must be a dict", code=500) for key in value: if not isinstance(value[key], tuple): @@ -292,18 +306,18 @@ def build(self, lang: str, overwrite: Optional[dict] = None, **kwargs: dict | tu overwrite = {} # foreign - ids = kwargs.get('ids', tuple()) - slugs = kwargs.get('slugs', tuple()) + ids = kwargs.get("ids", tuple()) + slugs = kwargs.get("slugs", tuple()) # fields - ints = kwargs.get('ints', dict()) - strings = kwargs.get('strings', dict()) - datetimes = kwargs.get('datetimes', dict()) - bools = kwargs.get('bools', dict()) + ints = kwargs.get("ints", dict()) + strings = kwargs.get("strings", dict()) + datetimes = kwargs.get("datetimes", dict()) + bools = kwargs.get("bools", dict()) # opts - custom_fields = kwargs.get('custom_fields', dict()) - fix = kwargs.get('custom_fields', dict()) + custom_fields = kwargs.get("custom_fields", dict()) + fix = kwargs.get("custom_fields", dict()) # serialize foreign ids = tuple(ids) @@ -332,4 +346,4 @@ def _can_modify_response(self) -> bool: return False def _instance_name(self) -> Optional[str]: - return 'lookup' + return "lookup" diff --git a/breathecode/utils/api_view_extensions/extensions/pagination_extension.py b/breathecode/utils/api_view_extensions/extensions/pagination_extension.py index 8797ce0f8..513bb4e68 100644 --- a/breathecode/utils/api_view_extensions/extensions/pagination_extension.py +++ b/breathecode/utils/api_view_extensions/extensions/pagination_extension.py @@ -7,14 +7,14 @@ from django.db.models import QuerySet from rest_framework.utils.urls import replace_query_param, remove_query_param -__all__ = ['PaginationExtension'] +__all__ = ["PaginationExtension"] -REQUIREMENTS = ['cache'] -OFFSET_QUERY_PARAM = 'offset' -LIMIT_QUERY_PARAM = 'limit' +REQUIREMENTS = ["cache"] +OFFSET_QUERY_PARAM = "offset" +LIMIT_QUERY_PARAM = "limit" MAX_LIMIT = None -if os.getenv('ENABLE_DEFAULT_PAGINATION', 'y') in ['t', 'true', 'True', 'TRUE', '1', 'yes', 'y']: +if os.getenv("ENABLE_DEFAULT_PAGINATION", "y") in ["t", "true", "True", "TRUE", "1", "yes", "y"]: DEFAULT_LIMIT = 20 else: @@ -65,17 +65,23 @@ def _apply_queryset_mutation(self, queryset: QuerySet[Any]): self._offset = self._get_offset() self._limit = self._get_limit() - if self._is_paginate() and self._request.GET.get('envelope', - '').lower() in ['false', 'f', '0', 'no', 'n', 'off', '']: + if self._is_paginate() and self._request.GET.get("envelope", "").lower() in [ + "false", + "f", + "0", + "no", + "n", + "off", + "", + ]: self._use_envelope = True - self._queryset = queryset[self._offset:self._offset + self._limit] + self._queryset = queryset[self._offset : self._offset + self._limit] return self._queryset - def _apply_response_mutation(self, - data: list[dict] | dict, - headers: Optional[dict] = None, - format='application/json'): + def _apply_response_mutation( + self, data: list[dict] | dict, headers: Optional[dict] = None, format="application/json" + ): if headers is None: headers = {} @@ -89,22 +95,30 @@ def _apply_response_mutation(self, links = [] for label, url in ( - ('first', first_url), - ('next', next_url), - ('previous', previous_url), - ('last', last_url), + ("first", first_url), + ("next", next_url), + ("previous", previous_url), + ("last", last_url), ): if url is not None: links.append('<{}>; rel="{}"'.format(url, label)) - headers = {**headers, 'Link': ', '.join(links)} if links else {**headers} - headers['X-Total-Count'] = self._count - headers['X-Per-Page'] = self._limit - headers['X-Page'] = int(self._offset / self._limit) + 1 + headers = {**headers, "Link": ", ".join(links)} if links else {**headers} + headers["X-Total-Count"] = self._count + headers["X-Per-Page"] = self._limit + headers["X-Page"] = int(self._offset / self._limit) + 1 if self._use_envelope: - data = OrderedDict([('count', self._count), ('first', first_url), ('next', next_url), - ('previous', previous_url), ('last', last_url), ('results', data)]) + data = OrderedDict( + [ + ("count", self._count), + ("first", first_url), + ("next", next_url), + ("previous", previous_url), + ("last", last_url), + ("results", data), + ] + ) return (data, headers) return (data, headers) @@ -113,7 +127,7 @@ def _parse_comma(self, string: str): if not string: return None - return string.replace('%2C', ',') + return string.replace("%2C", ",") def _get_count(self, queryset: QuerySet[Any] | list): """Determine an object count, supporting either querysets or regular lists.""" diff --git a/breathecode/utils/api_view_extensions/extensions/sort_extension.py b/breathecode/utils/api_view_extensions/extensions/sort_extension.py index e5d6bce49..bdb7ee31a 100644 --- a/breathecode/utils/api_view_extensions/extensions/sort_extension.py +++ b/breathecode/utils/api_view_extensions/extensions/sort_extension.py @@ -6,9 +6,9 @@ from breathecode.utils.api_view_extensions.priorities.mutator_order import MutatorOrder from breathecode.utils.generate_lookups_mixin import GenerateLookupsMixin -__all__ = ['SortExtension'] +__all__ = ["SortExtension"] -REQUIREMENTS = ['cache'] +REQUIREMENTS = ["cache"] class SortExtension(ExtensionBase, GenerateLookupsMixin): @@ -19,12 +19,12 @@ def __init__(self, sort: str, **kwargs) -> None: self._sort = sort def _apply_queryset_mutation(self, queryset: QuerySet[Any]): - lookups = self.generate_lookups(self._request, many_fields=['sort']) - sort_in = lookups['sort__in'] if 'sort__in' in lookups else '' + lookups = self.generate_lookups(self._request, many_fields=["sort"]) + sort_in = lookups["sort__in"] if "sort__in" in lookups else "" if len(sort_in) != 0: queryset = queryset.order_by(*sort_in or self._sort) else: - queryset = queryset.order_by(self._request.GET.get('sort') or self._sort) + queryset = queryset.order_by(self._request.GET.get("sort") or self._sort) return queryset def _can_modify_queryset(self) -> bool: diff --git a/breathecode/utils/api_view_extensions/priorities/mutator_order.py b/breathecode/utils/api_view_extensions/priorities/mutator_order.py index a3f9a075b..63b36d97c 100644 --- a/breathecode/utils/api_view_extensions/priorities/mutator_order.py +++ b/breathecode/utils/api_view_extensions/priorities/mutator_order.py @@ -1,6 +1,6 @@ from enum import IntEnum -__all__ = ['MutatorOrder'] +__all__ = ["MutatorOrder"] class MutatorOrder(IntEnum): diff --git a/breathecode/utils/api_view_extensions/priorities/response_order.py b/breathecode/utils/api_view_extensions/priorities/response_order.py index 379dbaf9e..2bafa88d1 100644 --- a/breathecode/utils/api_view_extensions/priorities/response_order.py +++ b/breathecode/utils/api_view_extensions/priorities/response_order.py @@ -1,6 +1,6 @@ from enum import IntEnum -__all__ = ['ResponseOrder'] +__all__ = ["ResponseOrder"] class ResponseOrder(IntEnum): diff --git a/breathecode/utils/attr_dict.py b/breathecode/utils/attr_dict.py index 04d76f666..ad42d74a5 100644 --- a/breathecode/utils/attr_dict.py +++ b/breathecode/utils/attr_dict.py @@ -1,8 +1,8 @@ from typing import TypeVar -__all__ = ['AttrDict'] +__all__ = ["AttrDict"] -T = TypeVar('T') +T = TypeVar("T") class AttrDict(dict): diff --git a/breathecode/utils/cache.py b/breathecode/utils/cache.py index 2819460b5..8b0aaa5c9 100644 --- a/breathecode/utils/cache.py +++ b/breathecode/utils/cache.py @@ -12,32 +12,36 @@ from django.db import models from circuitbreaker import circuit -from django.db.models.fields.related_descriptors import (ReverseManyToOneDescriptor, ManyToManyDescriptor, - ForwardManyToOneDescriptor, ReverseOneToOneDescriptor, - ForwardOneToOneDescriptor) +from django.db.models.fields.related_descriptors import ( + ReverseManyToOneDescriptor, + ManyToManyDescriptor, + ForwardManyToOneDescriptor, + ReverseOneToOneDescriptor, + ForwardOneToOneDescriptor, +) import zstandard -__all__ = ['Cache', 'CACHE_DESCRIPTORS', 'CACHE_DEPENDENCIES'] +__all__ = ["Cache", "CACHE_DESCRIPTORS", "CACHE_DEPENDENCIES"] CACHE_DESCRIPTORS: dict[models.Model, Cache] = {} CACHE_DEPENDENCIES: set[models.Model] = set() -ENABLE_LIST_OPTIONS = ['true', '1', 'yes', 'y'] -IS_DJANGO_REDIS = hasattr(cache, 'delete_pattern') +ENABLE_LIST_OPTIONS = ["true", "1", "yes", "y"] +IS_DJANGO_REDIS = hasattr(cache, "delete_pattern") @functools.lru_cache(maxsize=1) def is_compression_enabled(): - return os.getenv('COMPRESSION', '1').lower() in ENABLE_LIST_OPTIONS + return os.getenv("COMPRESSION", "1").lower() in ENABLE_LIST_OPTIONS @functools.lru_cache(maxsize=1) def min_compression_size(): - return int(os.getenv('MIN_COMPRESSION_SIZE', '10')) + return int(os.getenv("MIN_COMPRESSION_SIZE", "10")) @functools.lru_cache(maxsize=1) def use_gzip(): - return os.getenv('USE_GZIP', '0').lower() in ENABLE_LIST_OPTIONS + return os.getenv("USE_GZIP", "0").lower() in ENABLE_LIST_OPTIONS def must_compress(data): @@ -55,7 +59,7 @@ def __init__(cls: Cache, name, bases, clsdict): super().__init__(name, bases, clsdict) - if hasattr(cls, 'model'): + if hasattr(cls, "model"): # key = cls.model.__module__ + '.' + cls.model.__name__ CACHE_DESCRIPTORS[cls.model] = cls @@ -63,27 +67,30 @@ def __init__(cls: Cache, name, bases, clsdict): one_to_one = { getattr(model, x).field.model - for x in dir(model) if isinstance(getattr(model, x), ForwardOneToOneDescriptor) + for x in dir(model) + if isinstance(getattr(model, x), ForwardOneToOneDescriptor) } reverse_one_to_one = { getattr(model, x).related.related_model - for x in dir(model) if isinstance(getattr(model, x), ReverseOneToOneDescriptor) + for x in dir(model) + if isinstance(getattr(model, x), ReverseOneToOneDescriptor) } many_to_one = { getattr(model, x).field.related_model - for x in dir(model) if isinstance(getattr(model, x), ForwardManyToOneDescriptor) + for x in dir(model) + if isinstance(getattr(model, x), ForwardManyToOneDescriptor) } reverse_many_to_one = { getattr(model, x).field.model - for x in dir(model) if isinstance(getattr(model, x), ReverseManyToOneDescriptor) + for x in dir(model) + if isinstance(getattr(model, x), ReverseManyToOneDescriptor) } many_to_many = { - getattr(model, x).field.model - for x in dir(model) if isinstance(getattr(model, x), ManyToManyDescriptor) + getattr(model, x).field.model for x in dir(model) if isinstance(getattr(model, x), ManyToManyDescriptor) } cls.one_to_one = one_to_one | reverse_one_to_one @@ -99,16 +106,16 @@ def __init__(cls: Cache, name, bases, clsdict): def serializer(obj): if isinstance(obj, datetime): - return obj.isoformat().replace('+00:00', 'Z') + return obj.isoformat().replace("+00:00", "Z") if isinstance(obj, timedelta): return str(obj.total_seconds()) - raise TypeError('Type not serializable') + raise TypeError("Type not serializable") class Cache(metaclass=CacheMeta): - _version_prefix: str = '' + _version_prefix: str = "" model: models.Model one_to_one: list[models.Model] @@ -125,7 +132,7 @@ def _generate_key(cls, **kwargs): sorted_kwargs = sorted(kwargs.items()) qs = urllib.parse.urlencode(sorted_kwargs) - return f'{cls._version_prefix}{key}__{qs}' + return f"{cls._version_prefix}{key}__{qs}" @classmethod @circuit @@ -153,7 +160,7 @@ class DepCache(Cache): if deep != 0: return resolved - keys = {f'{cls._version_prefix}{descriptor.model.__name__}__keys' for descriptor in resolved} + keys = {f"{cls._version_prefix}{descriptor.model.__name__}__keys" for descriptor in resolved} sets = [x or set() for x in cache.get_many(keys).values()] to_delete = set() @@ -170,44 +177,44 @@ class DepCache(Cache): @classmethod @circuit def keys(cls): - return cache.get(f'{cls._version_prefix}{cls.model.__name__}__keys') or set() + return cache.get(f"{cls._version_prefix}{cls.model.__name__}__keys") or set() # DEPRECATED: 11/10/2021, remove this in december 2023, it was here to handle the old cache values @classmethod def _legacy_get(cls, data, encoding: Optional[str] = None) -> dict: spaces = 0 starts = 0 - mime = 'application/json' + mime = "application/json" headers = {} # parse a fixed amount of bytes to get the mime type try: - head = data[:35].decode('utf-8') + head = data[:35].decode("utf-8") # if the data cannot be decoded as utf-8, it means that a section was compressed except Exception as e: try: - head = data[:e.start].decode('utf-8') + head = data[: e.start].decode("utf-8") # if the data cannot be decoded as utf-8, it means that it does not have a header except Exception: - head = '' + head = "" if use_gzip(): - headers['Content-Encoding'] = 'gzip' + headers["Content-Encoding"] = "gzip" - elif encoding in ['br', 'zstd', 'deflate', 'gzip']: - headers['Content-Encoding'] = encoding + elif encoding in ["br", "zstd", "deflate", "gzip"]: + headers["Content-Encoding"] = encoding elif encoding != None: - headers['Content-Encoding'] = 'br' + headers["Content-Encoding"] = "br" for s in head: # maybe this cannot process the html cases yet - if s in ['{', '[']: + if s in ["{", "["]: break - if s == ' ': + if s == " ": spaces += 1 else: spaces = 0 @@ -215,20 +222,20 @@ def _legacy_get(cls, data, encoding: Optional[str] = None) -> dict: starts += 1 if spaces == 4: - mime = data[:starts - 4] + mime = data[: starts - 4] break if isinstance(mime, bytes): - unpack = mime.decode('utf-8').split(':') + unpack = mime.decode("utf-8").split(":") mime = unpack[0] if len(unpack) == 2: - headers['Content-Encoding'] = unpack[1] + headers["Content-Encoding"] = unpack[1] - elif starts != 0 and mime[starts - 1] not in ['{', '[', '<']: + elif starts != 0 and mime[starts - 1] not in ["{", "[", "<"]: starts = 0 if mime: - headers['Content-Type'] = mime + headers["Content-Type"] = mime return data[starts:], headers @@ -244,19 +251,21 @@ def get(cls, data, encoding: Optional[str] = None) -> dict: if isinstance(data, str) or isinstance(data, bytes): return cls._legacy_get(data, encoding) - headers = data.get('headers', {}) - content = data.get('content', None) + headers = data.get("headers", {}) + content = data.get("content", None) return content, headers @classmethod @circuit - def set(cls, - data: str | dict | list[dict], - format: str = 'application/json', - timeout: int = -1, - encoding: Optional[str] = None, - params: Optional[dict] = None) -> str: + def set( + cls, + data: str | dict | list[dict], + format: str = "application/json", + timeout: int = -1, + encoding: Optional[str] = None, + params: Optional[dict] = None, + ) -> str: """Set a key value pair on the cache in bytes, it reminds the format and compress the data if needed.""" if params is None: @@ -264,46 +273,46 @@ def set(cls, key = cls._generate_key(**params) res = { - 'headers': { - 'Content-Type': format, + "headers": { + "Content-Type": format, }, - 'content': None, + "content": None, } # serialize the data to avoid serialization on get requests - if format == 'application/json': - data = json.dumps(data, default=serializer).encode('utf-8') + if format == "application/json": + data = json.dumps(data, default=serializer).encode("utf-8") elif isinstance(data, str): - data = data.encode('utf-8') + data = data.encode("utf-8") else: data = data # in kilobytes if (compress := (must_compress(data) and is_compression_enabled())) and use_gzip(): - res['content'] = gzip.compress(data) - res['headers']['Content-Encoding'] = 'gzip' + res["content"] = gzip.compress(data) + res["headers"]["Content-Encoding"] = "gzip" - elif compress and encoding == 'br': - res['content'] = brotli.compress(data) - res['headers']['Content-Encoding'] = 'br' + elif compress and encoding == "br": + res["content"] = brotli.compress(data) + res["headers"]["Content-Encoding"] = "br" # faster option, it should be the standard in the future - elif compress and encoding == 'zstd': - res['content'] = zstandard.compress(data) - res['headers']['Content-Encoding'] = 'zstd' + elif compress and encoding == "zstd": + res["content"] = zstandard.compress(data) + res["headers"]["Content-Encoding"] = "zstd" - elif compress and encoding == 'deflate': - res['content'] = zlib.compress(data) - res['headers']['Content-Encoding'] = 'deflate' + elif compress and encoding == "deflate": + res["content"] = zlib.compress(data) + res["headers"]["Content-Encoding"] = "deflate" - elif compress and encoding == 'gzip': - res['content'] = gzip.compress(data) - res['headers']['Content-Encoding'] = 'gzip' + elif compress and encoding == "gzip": + res["content"] = gzip.compress(data) + res["headers"]["Content-Encoding"] = "gzip" else: - res['content'] = data + res["content"] = data # encode the response to avoid serialization on get requests if timeout == -1: @@ -313,8 +322,8 @@ def set(cls, else: cache.set(key, res, timeout) - keys = cache.get(f'{cls._version_prefix}{cls.model.__name__}__keys') or set() + keys = cache.get(f"{cls._version_prefix}{cls.model.__name__}__keys") or set() keys.add(key) - cache.set(f'{cls._version_prefix}{cls.model.__name__}__keys', keys) + cache.set(f"{cls._version_prefix}{cls.model.__name__}__keys", keys) return res diff --git a/breathecode/utils/datetime_integer.py b/breathecode/utils/datetime_integer.py index dbc8067b2..b68b1becc 100644 --- a/breathecode/utils/datetime_integer.py +++ b/breathecode/utils/datetime_integer.py @@ -6,12 +6,12 @@ from dateutil.tz import gettz, tzutc from django.utils import timezone -__all__ = ['DatetimeInteger', 'duration_to_str', 'from_now'] +__all__ = ["DatetimeInteger", "duration_to_str", "from_now"] def duration_to_str(duration, include_seconds=False, include_days=False): if duration is None: - return 'none' + return "none" total_seconds = duration.seconds sec_value = total_seconds % (24 * 3600) @@ -20,26 +20,26 @@ def duration_to_str(duration, include_seconds=False, include_days=False): min = sec_value // 60 sec_value %= 60 - msg = '' + msg = "" if include_days and duration.days > 0: - msg = f'{duration.days} days, ' + msg = f"{duration.days} days, " if hour_value > 0: - msg += f'{hour_value} hr' + msg += f"{hour_value} hr" if min > 0: - msg += f', {min} min' + msg += f", {min} min" if sec_value > 0 and include_seconds: - msg += f' and {sec_value} sec' + msg += f" and {sec_value} sec" return msg elif min > 0: - msg = f'{min} min' + msg = f"{min} min" if sec_value > 0 and include_seconds: - msg += f' and {sec_value} sec' + msg += f" and {sec_value} sec" return msg elif sec_value > 0 and include_seconds: - return f'{sec_value} sec' + return f"{sec_value} sec" else: - return 'none' + return "none" def from_now(_date, include_seconds=False, include_days=False): @@ -53,7 +53,7 @@ def from_now(_date, include_seconds=False, include_days=False): class Datetime(datetime): def __setattr__(self, key, value): - if key == 'info': + if key == "info": object.__setattr__(self, key, value) else: super(Datetime, self).__setattr__(key, value) @@ -80,63 +80,53 @@ def get_utc_datetime(self, timezone: str): @staticmethod def from_datetime(timezone: str, date: datetime) -> int: - return int(date.astimezone(gettz(timezone)).strftime('%Y%m%d%H%M')) + return int(date.astimezone(gettz(timezone)).strftime("%Y%m%d%H%M")) @staticmethod def from_iso_string(timezone: str, string: str) -> int: date = parser.parse(string) tz = gettz(timezone) - return int(date.astimezone(tzutc()).astimezone(tz).strftime('%Y%m%d%H%M')) + return int(date.astimezone(tzutc()).astimezone(tz).strftime("%Y%m%d%H%M")) @staticmethod def to_iso_string(timezone: str, integer: int) -> str: tz = gettz(timezone) - matches = re.match(r'^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})$', str(integer)) + matches = re.match(r"^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})$", str(integer)) if not matches: return None elements = matches.groups() - date = datetime(int(elements[0]), - int(elements[1]), - int(elements[2]), - int(elements[3]), - int(elements[4]), - tzinfo=tz) + date = datetime( + int(elements[0]), int(elements[1]), int(elements[2]), int(elements[3]), int(elements[4]), tzinfo=tz + ) - return re.sub(r'\+00:00', 'Z', date.astimezone(tzutc()).isoformat()) + return re.sub(r"\+00:00", "Z", date.astimezone(tzutc()).isoformat()) @staticmethod def to_datetime(timezone: str, integer: int) -> datetime: tz = pytz.timezone(timezone) - matches = re.match(r'^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})$', str(integer)) + matches = re.match(r"^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})$", str(integer)) if not matches: return None elements = matches.groups() - date = datetime(int(elements[0]), - int(elements[1]), - int(elements[2]), - int(elements[3]), - int(elements[4]), - 0, - tzinfo=tz) + date = datetime( + int(elements[0]), int(elements[1]), int(elements[2]), int(elements[3]), int(elements[4]), 0, tzinfo=tz + ) return date @staticmethod def to_utc_datetime(timezone: str, integer: int) -> datetime: tz = pytz.timezone(timezone) - matches = re.match(r'^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})$', str(integer)) + matches = re.match(r"^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})$", str(integer)) if not matches: return None elements = matches.groups() - date = datetime(int(elements[0]), - int(elements[1]), - int(elements[2]), - int(elements[3]), - int(elements[4]), - tzinfo=tz) + date = datetime( + int(elements[0]), int(elements[1]), int(elements[2]), int(elements[3]), int(elements[4]), tzinfo=tz + ) return date.astimezone(pytz.UTC) diff --git a/breathecode/utils/decorators/capable_of.py b/breathecode/utils/decorators/capable_of.py index 37d9657af..13b161b89 100644 --- a/breathecode/utils/decorators/capable_of.py +++ b/breathecode/utils/decorators/capable_of.py @@ -6,7 +6,7 @@ from breathecode.utils.exceptions import ProgrammingError from capyc.rest_framework.exceptions import ValidationException -__all__ = ['capable_of', 'acapable_of'] +__all__ = ["capable_of", "acapable_of"] def capable_of(capability=None): @@ -15,30 +15,30 @@ def decorator(function): def wrapper(*args, **kwargs): if isinstance(capability, str) == False: - raise ProgrammingError('Capability must be a string') + raise ProgrammingError("Capability must be a string") try: - if hasattr(args[0], '__class__') and isinstance(args[0], APIView): + if hasattr(args[0], "__class__") and isinstance(args[0], APIView): request = args[1] - elif hasattr(args[0], 'user') and hasattr(args[0].user, 'has_perm'): + elif hasattr(args[0], "user") and hasattr(args[0].user, "has_perm"): request = args[0] # websocket support - elif hasattr(args[0], 'ws_request'): + elif hasattr(args[0], "ws_request"): request = args[0] else: raise IndexError() except IndexError: - raise ProgrammingError('Missing request information, use this decorator with DRF View') + raise ProgrammingError("Missing request information, use this decorator with DRF View") academy_id = get_academy_from_capability(kwargs, request, capability) if academy_id: - kwargs['academy_id'] = academy_id + kwargs["academy_id"] = academy_id # add the new kwargs argument to the context to be used by APIViewExtensions - request.parser_context['kwargs']['academy_id'] = academy_id + request.parser_context["kwargs"]["academy_id"] = academy_id return function(*args, **kwargs) return wrapper @@ -52,30 +52,30 @@ def decorator(function): async def wrapper(*args, **kwargs): if isinstance(capability, str) == False: - raise ProgrammingError('Capability must be a string') + raise ProgrammingError("Capability must be a string") try: - if hasattr(args[0], '__class__') and isinstance(args[0], APIView): + if hasattr(args[0], "__class__") and isinstance(args[0], APIView): request = args[1] - elif hasattr(args[0], 'user') and hasattr(args[0].user, 'has_perm'): + elif hasattr(args[0], "user") and hasattr(args[0].user, "has_perm"): request = args[0] # websocket support - elif hasattr(args[0], 'ws_request'): + elif hasattr(args[0], "ws_request"): request = args[0] else: raise IndexError() except IndexError: - raise ProgrammingError('Missing request information, use this decorator with DRF View') + raise ProgrammingError("Missing request information, use this decorator with DRF View") academy_id = await sync_to_async(get_academy_from_capability)(kwargs, request, capability) if academy_id: - kwargs['academy_id'] = academy_id + kwargs["academy_id"] = academy_id # add the new kwargs argument to the context to be used by APIViewExtensions - request.parser_context['kwargs']['academy_id'] = academy_id + request.parser_context["kwargs"]["academy_id"] = academy_id return await function(*args, **kwargs) return wrapper @@ -88,40 +88,45 @@ def get_academy_from_capability(kwargs, request, capability): academy_id = None - if ('academy_id' not in kwargs and 'Academy' not in request.headers and 'academy' not in request.headers - and 'academy' not in request.GET): + if ( + "academy_id" not in kwargs + and "Academy" not in request.headers + and "academy" not in request.headers + and "academy" not in request.GET + ): raise PermissionDenied("Missing academy_id parameter expected for the endpoint url or 'Academy' header") - elif 'academy_id' in kwargs: - academy_id = kwargs['academy_id'] + elif "academy_id" in kwargs: + academy_id = kwargs["academy_id"] - elif 'Academy' in request.headers: - academy_id = request.headers['Academy'] + elif "Academy" in request.headers: + academy_id = request.headers["Academy"] - elif 'academy' in request.headers: - academy_id = request.headers['academy'] + elif "academy" in request.headers: + academy_id = request.headers["academy"] - elif 'academy' in request.GET: - academy_id = request.GET['academy'] + elif "academy" in request.GET: + academy_id = request.GET["academy"] if not str(academy_id).isdigit(): - raise ValidationException(f'Academy ID needs to be an integer: {str(academy_id)}', slug='invalid-academy-id') + raise ValidationException(f"Academy ID needs to be an integer: {str(academy_id)}", slug="invalid-academy-id") if isinstance(request.user, AnonymousUser): - raise PermissionDenied('Invalid user') + raise PermissionDenied("Invalid user") - capable = ProfileAcademy.objects.filter(user=request.user.id, - academy__id=academy_id, - role__capabilities__slug=capability) + capable = ProfileAcademy.objects.filter( + user=request.user.id, academy__id=academy_id, role__capabilities__slug=capability + ) if capable.count() == 0: raise PermissionDenied( - f"You (user: {request.user.id}) don't have this capability: {capability} for academy {academy_id}") + f"You (user: {request.user.id}) don't have this capability: {capability} for academy {academy_id}" + ) academy = capable.first().academy - if academy.status == 'DELETED': - raise PermissionDenied('This academy is deleted') - if request.get_full_path() != '/v1/admissions/academy/activate' and academy.status == 'INACTIVE': - raise PermissionDenied('This academy is not active') + if academy.status == "DELETED": + raise PermissionDenied("This academy is deleted") + if request.get_full_path() != "/v1/admissions/academy/activate" and academy.status == "INACTIVE": + raise PermissionDenied("This academy is not active") return academy_id diff --git a/breathecode/utils/decorators/consume.py b/breathecode/utils/decorators/consume.py index 4429c231d..328f8407f 100644 --- a/breathecode/utils/decorators/consume.py +++ b/breathecode/utils/decorators/consume.py @@ -22,7 +22,7 @@ from ..exceptions import ProgrammingError -__all__ = ['consume', 'Consumer', 'ServiceContext'] +__all__ = ["consume", "Consumer", "ServiceContext"] logger = logging.getLogger(__name__) @@ -37,40 +37,44 @@ class ServiceContext(TypedDict): price: float is_consumption_session: bool + type Consumer = Callable[[ServiceContext, tuple, dict], tuple[ServiceContext, tuple, dict, Optional[timedelta]]] -def render_message(r, - msg, - btn_label=None, - btn_url=None, - btn_target='_blank', - data=None, - status=None, - go_back=None, - url_back=None, - academy=None): + +def render_message( + r, + msg, + btn_label=None, + btn_url=None, + btn_target="_blank", + data=None, + status=None, + go_back=None, + url_back=None, + academy=None, +): if data is None: data = {} _data = { - 'MESSAGE': msg, - 'BUTTON': btn_label, - 'BUTTON_TARGET': btn_target, - 'LINK': btn_url, - 'GO_BACK': go_back, - 'URL_BACK': url_back + "MESSAGE": msg, + "BUTTON": btn_label, + "BUTTON_TARGET": btn_target, + "LINK": btn_url, + "GO_BACK": go_back, + "URL_BACK": url_back, } if academy: - _data['COMPANY_INFO_EMAIL'] = academy.feedback_email - _data['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - _data['COMPANY_LOGO'] = academy.logo_url - _data['COMPANY_NAME'] = academy.name + _data["COMPANY_INFO_EMAIL"] = academy.feedback_email + _data["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + _data["COMPANY_LOGO"] = academy.logo_url + _data["COMPANY_NAME"] = academy.name - if 'heading' not in _data: - _data['heading'] = academy.name + if "heading" not in _data: + _data["heading"] = academy.name - return render(r, 'message.html', {**_data, **data}, status=status) + return render(r, "message.html", {**_data, **data}, status=status) def render_html_error(request, kwargs, service, e): @@ -78,25 +82,24 @@ def render_html_error(request, kwargs, service, e): from breathecode.payments.models import PlanFinancing, PlanOffer, Subscription token = None - if 'token' in kwargs and kwargs['token'] is not None: - token = kwargs['token'].key + if "token" in kwargs and kwargs["token"] is not None: + token = kwargs["token"].key + if "service_slug" in kwargs: + slug = kwargs["service_slug"] - if 'service_slug' in kwargs: - slug = kwargs['service_slug'] - - if 'event_id' in kwargs: - event_id = kwargs['event_id'] + if "event_id" in kwargs: + event_id = kwargs["event_id"] event = Event.objects.filter(id=event_id).first() if event is not None: slug = event.event_type.slug - if 'event' in kwargs: - event = kwargs['event'] + if "event" in kwargs: + event = kwargs["event"] slug = event.event_type.slug - if 'mentorship_service' in kwargs: - slug = kwargs['mentorship_service'].slug + if "mentorship_service" in kwargs: + slug = kwargs["mentorship_service"].slug renovate_consumables = {} subscription = None @@ -106,29 +109,33 @@ def render_html_error(request, kwargs, service, e): plan_offer = None user_plan = None - if service == 'join_mentorship': + if service == "join_mentorship": subscription = Subscription.objects.filter( - user=request.user, selected_mentorship_service_set__mentorship_services__slug=slug).first() + user=request.user, selected_mentorship_service_set__mentorship_services__slug=slug + ).first() if subscription is not None: mentorship_service_set = subscription.selected_mentorship_service_set.slug user_plan = subscription.plans.first() - elif service == 'event_join': - subscription = Subscription.objects.filter(user=request.user, - selected_event_type_set__event_types__slug=slug).first() + elif service == "event_join": + subscription = Subscription.objects.filter( + user=request.user, selected_event_type_set__event_types__slug=slug + ).first() if subscription is not None: event_type_set = subscription.selected_event_type_set.slug user_plan = subscription.plans.first() if subscription is None: - if service == 'join_mentorship': + if service == "join_mentorship": plan_financing = PlanFinancing.objects.filter( - user=request.user, selected_mentorship_service_set__mentorship_services__slug=slug).first() + user=request.user, selected_mentorship_service_set__mentorship_services__slug=slug + ).first() if plan_financing is not None: mentorship_service_set = plan_financing.selected_mentorship_service_set.slug user_plan = plan_financing.plans.first() - elif service == 'event_join': - plan_financing = PlanFinancing.objects.filter(user=request.user, - selected_event_type_set__event_types__slug=slug).first() + elif service == "event_join": + plan_financing = PlanFinancing.objects.filter( + user=request.user, selected_event_type_set__event_types__slug=slug + ).first() if plan_financing is not None: event_type_set = plan_financing.selected_event_type_set.slug user_plan = plan_financing.plans.first() @@ -137,70 +144,75 @@ def render_html_error(request, kwargs, service, e): plan_offer = PlanOffer.objects.filter(original_plan__slug=user_plan.slug).first() if plan_offer is not None: - renovate_consumables['btn_label'] = 'Get more consumables' - renovate_consumables[ - 'btn_url'] = f'https://4geeks.com/checkout?plan={plan_offer.suggested_plan.slug}&token={token}' + renovate_consumables["btn_label"] = "Get more consumables" + renovate_consumables["btn_url"] = ( + f"https://4geeks.com/checkout?plan={plan_offer.suggested_plan.slug}&token={token}" + ) elif subscription is not None or plan_financing is not None: - renovate_consumables['btn_label'] = 'Get more consumables' - if service == 'join_mentorship': - renovate_consumables[ - 'btn_url'] = f'https://4geeks.com/checkout?mentorship_service_set={mentorship_service_set}&token={token}' - elif service == 'event_join': - renovate_consumables[ - 'btn_url'] = f'https://4geeks.com/checkout?event_type_set={event_type_set}&token={token}' + renovate_consumables["btn_label"] = "Get more consumables" + if service == "join_mentorship": + renovate_consumables["btn_url"] = ( + f"https://4geeks.com/checkout?mentorship_service_set={mentorship_service_set}&token={token}" + ) + elif service == "event_join": + renovate_consumables["btn_url"] = ( + f"https://4geeks.com/checkout?event_type_set={event_type_set}&token={token}" + ) else: - if service == 'join_mentorship' or service == 'event_join': - e = 'You must get a plan in order to access this service' - renovate_consumables['btn_label'] = 'Get a plan' - plan = os.getenv('BASE_PLAN', 'basic') - renovate_consumables['btn_url'] = f'https://4geeks.com/checkout?plan={plan}&token={token}' - - return render_message(request, - str(e), - status=402, - go_back='Go back to Dashboard', - url_back='https://4geeks.com/choose-program', - **renovate_consumables) - - - -def consume(service: str, consumer: Optional[Consumer] = None, format:str='json') -> callable: + if service == "join_mentorship" or service == "event_join": + e = "You must get a plan in order to access this service" + renovate_consumables["btn_label"] = "Get a plan" + plan = os.getenv("BASE_PLAN", "basic") + renovate_consumables["btn_url"] = f"https://4geeks.com/checkout?plan={plan}&token={token}" + + return render_message( + request, + str(e), + status=402, + go_back="Go back to Dashboard", + url_back="https://4geeks.com/choose-program", + **renovate_consumables, + ) + + +def consume(service: str, consumer: Optional[Consumer] = None, format: str = "json") -> callable: """Check if the current user can access to the resource through of permissions.""" from breathecode.payments.models import Consumable, ConsumptionSession def decorator(function: callable) -> callable: - def validate_and_get_request(permission: str, args: Any) -> HttpRequest | AsyncRequest: if isinstance(permission, str) == False: - raise ProgrammingError('Service must be a string') + raise ProgrammingError("Service must be a string") try: - if hasattr(args[0], '__class__') and isinstance(args[0], APIView): + if hasattr(args[0], "__class__") and isinstance(args[0], APIView): request = args[1] - elif hasattr(args[0], 'user') and hasattr(args[0].user, 'has_perm'): + elif hasattr(args[0], "user") and hasattr(args[0].user, "has_perm"): request = args[0] else: raise IndexError() except IndexError: - raise ProgrammingError('Missing request information, use this decorator with DRF View') + raise ProgrammingError("Missing request information, use this decorator with DRF View") return request - def build_context(request: HttpRequest | AsyncRequest, utc_now: datetime, **opts: Unpack[ServiceContext]) -> ServiceContext: + def build_context( + request: HttpRequest | AsyncRequest, utc_now: datetime, **opts: Unpack[ServiceContext] + ) -> ServiceContext: return { - 'utc_now': utc_now, - 'consumer': consumer, - 'service': service, - 'request': request, - 'consumables': Consumable.objects.none(), - 'lifetime': None, - 'price': 1, - 'is_consumption_session': False, + "utc_now": utc_now, + "consumer": consumer, + "service": service, + "request": request, + "consumables": Consumable.objects.none(), + "lifetime": None, + "price": 1, + "is_consumption_session": False, **opts, } @@ -209,8 +221,9 @@ def wrapper(*args, **kwargs): if isinstance(request.user, AnonymousUser): raise PaymentException( - f'Anonymous user do not have enough credits to access to this service: {service}', - slug='anonymous-user-not-enough-consumables') + f"Anonymous user do not have enough credits to access to this service: {service}", + slug="anonymous-user-not-enough-consumables", + ) try: utc_now = timezone.now() @@ -218,86 +231,84 @@ def wrapper(*args, **kwargs): context = build_context(request, utc_now) if session and callable(consumer): - context['is_consumption_session'] = True + context["is_consumption_session"] = True context, args, kwargs = consumer(context, args, kwargs) if session: return function(*args, **kwargs) items = Consumable.list(user=request.user, service=service) - context['consumables'] = items + context["consumables"] = items if callable(consumer): context, args, kwargs = consumer(context, args, kwargs) # exclude consumables that is being used in a session. - if consumer and context['lifetime']: - consumables = context['consumables'] - for item in consumables.filter(consumptionsession__status='PENDING').exclude(how_many=0): + if consumer and context["lifetime"]: + consumables = context["consumables"] + for item in consumables.filter(consumptionsession__status="PENDING").exclude(how_many=0): - sum = item.consumptionsession_set.filter(status='PENDING').aggregate(Sum('how_many')) + sum = item.consumptionsession_set.filter(status="PENDING").aggregate(Sum("how_many")) - if item.how_many - sum['how_many__sum'] == 0: - context['consumables'] = context['consumables'].exclude(id=item.id) + if item.how_many - sum["how_many__sum"] == 0: + context["consumables"] = context["consumables"].exclude(id=item.id) + if context["price"] and context["consumables"].count() == 0: + raise PaymentException( + f"You do not have enough credits to access this service: {service}", + slug="with-consumer-not-enough-consumables", + ) - if context['price'] and context['consumables'].count() == 0: - raise PaymentException(f'You do not have enough credits to access this service: {service}', - slug='with-consumer-not-enough-consumables') - - if context['price'] and context['lifetime'] and (consumable := - context['consumables'].first()): - session = ConsumptionSession.build_session(request, consumable, context['lifetime']) + if context["price"] and context["lifetime"] and (consumable := context["consumables"].first()): + session = ConsumptionSession.build_session(request, consumable, context["lifetime"]) # sync view method response: Response = function(*args, **kwargs) - it_will_consume = context['price'] and response.status_code < 400 + it_will_consume = context["price"] and response.status_code < 400 if it_will_consume and session: - session.will_consume(context['price']) + session.will_consume(context["price"]) elif it_will_consume: - item = context['consumables'].first() - consume_service.send_robust(instance=item, sender=item.__class__, how_many=context['price']) + item = context["consumables"].first() + consume_service.send_robust(instance=item, sender=item.__class__, how_many=context["price"]) return response # handle html views errors except PaymentException as e: - if format == 'websocket': + if format == "websocket": raise e - if format == 'html': - return render_html_error(request, kwargs,service, e) + if format == "html": + return render_html_error(request, kwargs, service, e) - return Response({'detail': str(e), 'status_code': 402}, 402) + return Response({"detail": str(e), "status_code": 402}, 402) # handle html views errors except ValidationException as e: - if format == 'websocket': + if format == "websocket": raise e - status = e.status_code if hasattr(e, 'status_code') else 400 + status = e.status_code if hasattr(e, "status_code") else 400 - if format == 'html': + if format == "html": return render_message(request, str(e), status=status) - return Response({'detail': str(e), 'status_code': status}, status) + return Response({"detail": str(e), "status_code": status}, status) # handle html views errors except Exception as e: # show stacktrace for unexpected exceptions traceback.print_exc() - if format == 'html': - return render_message(request, 'unexpected error, contact admin if you are affected', status=500) + if format == "html": + return render_message(request, "unexpected error, contact admin if you are affected", status=500) - response = JsonResponse({'detail': str(e), 'status_code': 500}) + response = JsonResponse({"detail": str(e), "status_code": 500}) response.status_code = 500 return response - - @sync_to_async def async_get_user(request: AsyncRequest) -> User: return request.user @@ -306,34 +317,33 @@ def async_get_user(request: AsyncRequest) -> User: async def async_wrapper(*args, **kwargs): nonlocal consumer - request = validate_and_get_request(service, args) if isinstance(request.user, AnonymousUser): raise PaymentException( - f'Anonymous user do not have enough credits to access to this service: {service}', - slug='anonymous-user-not-enough-consumables') + f"Anonymous user do not have enough credits to access to this service: {service}", + slug="anonymous-user-not-enough-consumables", + ) try: utc_now = timezone.now() session = await ConsumptionSession.aget_session(request) context = build_context(request, utc_now) - if session and callable(consumer): if asyncio.iscoroutinefunction(consumer) is False: consumer = sync_to_async(consumer) - context['is_consumption_session'] = True + context["is_consumption_session"] = True context, args, kwargs = await consumer(context, args, kwargs) if session: - return await function(*args, **kwargs) + return await function(*args, **kwargs) user = await async_get_user(request) items = await Consumable.alist(user=user, service=service) - context['consumables'] = items + context["consumables"] = items if callable(consumer): if asyncio.iscoroutinefunction(consumer) is False: @@ -342,68 +352,68 @@ async def async_wrapper(*args, **kwargs): context, args, kwargs = await consumer(context, args, kwargs) # exclude consumables that is being used in a session. - if consumer and context['lifetime']: - consumables: QuerySet[Consumable] = context['consumables'] - for item in consumables.filter(consumptionsession__status='PENDING').exclude(how_many=0): - - sum = await item.consumptionsession_set.filter(status='PENDING').aaggregate(Sum('how_many')) + if consumer and context["lifetime"]: + consumables: QuerySet[Consumable] = context["consumables"] + for item in consumables.filter(consumptionsession__status="PENDING").exclude(how_many=0): - if item.how_many - sum['how_many__sum'] == 0: - context['consumables'] = context['consumables'].exclude(id=item.id) + sum = await item.consumptionsession_set.filter(status="PENDING").aaggregate(Sum("how_many")) - if context['price'] and await context['consumables'].acount() == 0: - raise PaymentException(f'You do not have enough credits to access this service: {service}', - slug='with-consumer-not-enough-consumables') + if item.how_many - sum["how_many__sum"] == 0: + context["consumables"] = context["consumables"].exclude(id=item.id) - if context['price'] and context['lifetime'] and (consumable := await - context['consumables'].afirst()): - session =await ConsumptionSession.abuild_session(request, consumable, context['lifetime']) + if context["price"] and await context["consumables"].acount() == 0: + raise PaymentException( + f"You do not have enough credits to access this service: {service}", + slug="with-consumer-not-enough-consumables", + ) + if context["price"] and context["lifetime"] and (consumable := await context["consumables"].afirst()): + session = await ConsumptionSession.abuild_session(request, consumable, context["lifetime"]) # sync view method response: Response = await function(*args, **kwargs) - it_will_consume = context['price'] and response.status_code < 400 + it_will_consume = context["price"] and response.status_code < 400 if it_will_consume and session: - await session.awill_consume(context['price']) + await session.awill_consume(context["price"]) elif it_will_consume: - item = await context['consumables'].afirst() - consume_service.send_robust(instance=item, sender=item.__class__, how_many=context['price']) + item = await context["consumables"].afirst() + consume_service.send_robust(instance=item, sender=item.__class__, how_many=context["price"]) return response # handle html views errors except PaymentException as e: - if format == 'websocket': + if format == "websocket": raise e - if format == 'html': - return render_html_error(request, kwargs,service, e) + if format == "html": + return render_html_error(request, kwargs, service, e) - return Response({'detail': str(e), 'status_code': 402}, 402) + return Response({"detail": str(e), "status_code": 402}, 402) # handle html views errors except ValidationException as e: - if format == 'websocket': + if format == "websocket": raise e - status = e.status_code if hasattr(e, 'status_code') else 400 + status = e.status_code if hasattr(e, "status_code") else 400 - if format == 'html': + if format == "html": return render_message(request, str(e), status=status) - return Response({'detail': str(e), 'status_code': status}, status) + return Response({"detail": str(e), "status_code": status}, status) # handle html views errors except Exception as e: # show stacktrace for unexpected exceptions traceback.print_exc() - if format == 'html': - return render_message(request, 'unexpected error, contact admin if you are affected', status=500) + if format == "html": + return render_message(request, "unexpected error, contact admin if you are affected", status=500) - response = JsonResponse({'detail': str(e), 'status_code': 500}) + response = JsonResponse({"detail": str(e), "status_code": 500}) response.status_code = 500 return response diff --git a/breathecode/utils/decorators/has_permission.py b/breathecode/utils/decorators/has_permission.py index b5aad7008..4b7e2de53 100644 --- a/breathecode/utils/decorators/has_permission.py +++ b/breathecode/utils/decorators/has_permission.py @@ -16,7 +16,7 @@ from ..exceptions import ProgrammingError -__all__ = ['has_permission', 'validate_permission'] +__all__ = ["has_permission", "validate_permission"] logger = logging.getLogger(__name__) @@ -35,84 +35,88 @@ def avalidate_permission(user: User, permission: str) -> bool: # that must be remove from here -def render_message(r, - msg, - btn_label=None, - btn_url=None, - btn_target='_blank', - data=None, - status=None, - go_back=None, - url_back=None, - academy=None): +def render_message( + r, + msg, + btn_label=None, + btn_url=None, + btn_target="_blank", + data=None, + status=None, + go_back=None, + url_back=None, + academy=None, +): if data is None: data = {} _data = { - 'MESSAGE': msg, - 'BUTTON': btn_label, - 'BUTTON_TARGET': btn_target, - 'LINK': btn_url, - 'GO_BACK': go_back, - 'URL_BACK': url_back + "MESSAGE": msg, + "BUTTON": btn_label, + "BUTTON_TARGET": btn_target, + "LINK": btn_url, + "GO_BACK": go_back, + "URL_BACK": url_back, } if academy: - _data['COMPANY_INFO_EMAIL'] = academy.feedback_email - _data['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - _data['COMPANY_LOGO'] = academy.logo_url - _data['COMPANY_NAME'] = academy.name - - if 'heading' not in _data: - _data['heading'] = academy.name - - return render(r, 'message.html', {**_data, **data}, status=status) - - -def handle_exc(format: str, - request: HttpRequest, - e: Exception, - message: Optional[str] = None, - status: Optional[int] = None, - use_json_response: bool = False) -> Response | HttpResponse | JsonResponse: - if format == 'websocket': + _data["COMPANY_INFO_EMAIL"] = academy.feedback_email + _data["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + _data["COMPANY_LOGO"] = academy.logo_url + _data["COMPANY_NAME"] = academy.name + + if "heading" not in _data: + _data["heading"] = academy.name + + return render(r, "message.html", {**_data, **data}, status=status) + + +def handle_exc( + format: str, + request: HttpRequest, + e: Exception, + message: Optional[str] = None, + status: Optional[int] = None, + use_json_response: bool = False, +) -> Response | HttpResponse | JsonResponse: + if format == "websocket": raise e if message is None: message = str(e) if status is None: - status = e.status_code if hasattr(e, 'status_code') else 400 + status = e.status_code if hasattr(e, "status_code") else 400 - if format == 'html': + if format == "html": return render_message(request, message, status=status) http_cls = JsonResponse if use_json_response else HttpResponse - return http_cls({'detail': message, 'status_code': status}, status) + return http_cls({"detail": message, "status_code": status}, status) -def has_permission(permission: str, format='json') -> callable: +def has_permission(permission: str, format="json") -> callable: """Check if the current user can access to the resource through of permissions.""" def decorator(function: callable) -> callable: def validate_and_get_request(permission: str, args: Any) -> HttpRequest | AsyncRequest: if isinstance(permission, str) == False: - raise ProgrammingError('Permission must be a string') + raise ProgrammingError("Permission must be a string") try: - if hasattr(args[0], '__class__') and isinstance(args[0], APIView): + if hasattr(args[0], "__class__") and isinstance(args[0], APIView): request = args[1] - elif hasattr(args[0], 'user') and hasattr(args[0].user, 'has_perm'): + elif hasattr(args[0], "user") and hasattr(args[0].user, "has_perm"): request = args[0] else: raise IndexError() except IndexError: - raise ProgrammingError('Missing request information, use this decorator with DRF View') + raise ProgrammingError("Missing request information, use this decorator with DRF View") return request @@ -124,50 +128,55 @@ def wrapper(*args, **kwargs): return function(*args, **kwargs) elif isinstance(request.user, AnonymousUser): - raise ValidationException(f'Anonymous user don\'t have this permission: {permission}', - code=403, - slug='anonymous-user-without-permission') + raise ValidationException( + f"Anonymous user don't have this permission: {permission}", + code=403, + slug="anonymous-user-without-permission", + ) else: - raise ValidationException((f'You (user: {request.user.id}) don\'t have this permission: ' - f'{permission}'), - code=403, - slug='without-permission') + raise ValidationException( + (f"You (user: {request.user.id}) don't have this permission: " f"{permission}"), + code=403, + slug="without-permission", + ) except PaymentException as e: - if format == 'websocket': + if format == "websocket": raise e - if format == 'html': - return render_message(request, - str(e), - status=402, - go_back='Go back to Dashboard', - url_back='https://4geeks.com/choose-program') + if format == "html": + return render_message( + request, + str(e), + status=402, + go_back="Go back to Dashboard", + url_back="https://4geeks.com/choose-program", + ) - return Response({'detail': str(e), 'status_code': 402}, 402) + return Response({"detail": str(e), "status_code": 402}, 402) # handle html views errors except ValidationException as e: - if format == 'websocket': + if format == "websocket": raise e - status = e.status_code if hasattr(e, 'status_code') else 400 + status = e.status_code if hasattr(e, "status_code") else 400 - if format == 'html': + if format == "html": return render_message(request, str(e), status=status) - return Response({'detail': str(e), 'status_code': status}, status) + return Response({"detail": str(e), "status_code": status}, status) # handle html views errors except Exception as e: # show stacktrace for unexpected exceptions traceback.print_exc() - if format == 'html': - return render_message(request, 'unexpected error, contact admin if you are affected', status=500) + if format == "html": + return render_message(request, "unexpected error, contact admin if you are affected", status=500) - response = JsonResponse({'detail': str(e), 'status_code': 500}) + response = JsonResponse({"detail": str(e), "status_code": 500}) response.status_code = 500 return response @@ -184,50 +193,55 @@ async def async_wrapper(*args, **kwargs): return await function(*args, **kwargs) elif isinstance(user, AnonymousUser): - raise ValidationException(f'Anonymous user don\'t have this permission: {permission}', - code=403, - slug='anonymous-user-without-permission') + raise ValidationException( + f"Anonymous user don't have this permission: {permission}", + code=403, + slug="anonymous-user-without-permission", + ) else: - raise ValidationException((f'You (user: {user.id}) don\'t have this permission: ' - f'{permission}'), - code=403, - slug='without-permission') + raise ValidationException( + (f"You (user: {user.id}) don't have this permission: " f"{permission}"), + code=403, + slug="without-permission", + ) except PaymentException as e: - if format == 'websocket': + if format == "websocket": raise e - if format == 'html': - return render_message(request, - str(e), - status=402, - go_back='Go back to Dashboard', - url_back='https://4geeks.com/choose-program') + if format == "html": + return render_message( + request, + str(e), + status=402, + go_back="Go back to Dashboard", + url_back="https://4geeks.com/choose-program", + ) - return Response({'detail': str(e), 'status_code': 402}, 402) + return Response({"detail": str(e), "status_code": 402}, 402) # handle html views errors except ValidationException as e: - if format == 'websocket': + if format == "websocket": raise e - status = e.status_code if hasattr(e, 'status_code') else 400 + status = e.status_code if hasattr(e, "status_code") else 400 - if format == 'html': + if format == "html": return render_message(request, str(e), status=status) - return Response({'detail': str(e), 'status_code': status}, status) + return Response({"detail": str(e), "status_code": status}, status) # handle html views errors except Exception as e: # show stacktrace for unexpected exceptions traceback.print_exc() - if format == 'html': - return render_message(request, 'unexpected error, contact admin if you are affected', status=500) + if format == "html": + return render_message(request, "unexpected error, contact admin if you are affected", status=500) - response = JsonResponse({'detail': str(e), 'status_code': 500}) + response = JsonResponse({"detail": str(e), "status_code": 500}) response.status_code = 500 return response diff --git a/breathecode/utils/decorators/issue.py b/breathecode/utils/decorators/issue.py index 2a033d920..34d089a43 100644 --- a/breathecode/utils/decorators/issue.py +++ b/breathecode/utils/decorators/issue.py @@ -7,7 +7,7 @@ from breathecode.monitoring.models import SupervisorIssue -__all__ = ['issue', 'paths'] +__all__ = ["issue", "paths"] paths = {} @@ -19,10 +19,10 @@ def issue(supervisor: callable, delta: Optional[timedelta] = None, attempts: int delta = timedelta(minutes=10) def create_handler(fn: callable): - code = fn.__name__.replace('_', '-') - issue_by_supervisor = Q(code=code, - supervisor__task_module=supervisor.__module__, - supervisor__task_name=supervisor.__name__) + code = fn.__name__.replace("_", "-") + issue_by_supervisor = Q( + code=code, supervisor__task_module=supervisor.__module__, supervisor__task_name=supervisor.__name__ + ) def wrapper(supervisor_issue_id: int): issue = SupervisorIssue.objects.filter(issue_by_supervisor, fixed=None, id=supervisor_issue_id).first() @@ -39,8 +39,9 @@ def wrapper(supervisor_issue_id: int): return fixed async def async_wrapper(supervisor_issue_id: int): - issue = await SupervisorIssue.objects.filter(issue_by_supervisor, fixed=None, - id=supervisor_issue_id).afirst() + issue = await SupervisorIssue.objects.filter( + issue_by_supervisor, fixed=None, id=supervisor_issue_id + ).afirst() if not issue: return @@ -56,12 +57,12 @@ async def async_wrapper(supervisor_issue_id: int): # handler fn_module = fn.__module__ fn_name = fn.__name__ - fn_path = fn_module + '.' + fn_name + fn_path = fn_module + "." + fn_name # supervisor supervisor_module = supervisor.__module__ supervisor_name = supervisor.__name__ - supervisor_path = supervisor_module + '.' + supervisor_name + supervisor_path = supervisor_module + "." + supervisor_name if supervisor_path not in paths: paths[supervisor_path] = {} diff --git a/breathecode/utils/decorators/supervisor.py b/breathecode/utils/decorators/supervisor.py index 266a8ab46..02d6043b1 100644 --- a/breathecode/utils/decorators/supervisor.py +++ b/breathecode/utils/decorators/supervisor.py @@ -9,15 +9,14 @@ from breathecode.monitoring.models import Supervisor, SupervisorIssue -__all__ = ['supervisor', 'paths'] +__all__ = ["supervisor", "paths"] paths = set() -def supervisor(fn: Optional[callable] = None, - delta: Optional[timedelta] = None, - auto: bool = True, - raises: bool = False): +def supervisor( + fn: Optional[callable] = None, delta: Optional[timedelta] = None, auto: bool = True, raises: bool = False +): """Create a supervisor (automated quality assurance).""" def create_supervisor(fn: callable, delta: Optional[timedelta] = None, auto: bool = True, raises: bool = False): @@ -26,12 +25,14 @@ def get_instance(): fn_name = fn.__name__ fn_module = fn.__module__ - instance, created = Supervisor.objects.get_or_create(task_module=fn_module, - task_name=fn_name, - defaults={ - 'delta': delta, - 'ran_at': timezone.now(), - }) + instance, created = Supervisor.objects.get_or_create( + task_module=fn_module, + task_name=fn_name, + defaults={ + "delta": delta, + "ran_at": timezone.now(), + }, + ) if created is False: instance.ran_at = timezone.now() @@ -61,13 +62,15 @@ def wrapper(*args, **kwargs): elif len(msg) >= 3: msg, code, params = msg - issue, created = SupervisorIssue.objects.get_or_create(supervisor=instance, - error=msg, - code=code, - params=params, - defaults={ - 'ran_at': timezone.now(), - }) + issue, created = SupervisorIssue.objects.get_or_create( + supervisor=instance, + error=msg, + code=code, + params=params, + defaults={ + "ran_at": timezone.now(), + }, + ) if created is False: issue.ran_at = timezone.now() @@ -92,13 +95,15 @@ async def async_wrapper(*args, **kwargs): elif len(msg) >= 3: msg, code, params = msg - issue, created = await SupervisorIssue.objects.aget_or_create(supervisor=instance, - error=msg, - code=code, - params=params, - defaults={ - 'ran_at': timezone.now(), - }) + issue, created = await SupervisorIssue.objects.aget_or_create( + supervisor=instance, + error=msg, + code=code, + params=params, + defaults={ + "ran_at": timezone.now(), + }, + ) if created is False: issue.ran_at = timezone.now() diff --git a/breathecode/utils/decorators/task.py b/breathecode/utils/decorators/task.py index 3d48a3512..8f07b6523 100644 --- a/breathecode/utils/decorators/task.py +++ b/breathecode/utils/decorators/task.py @@ -3,7 +3,7 @@ from task_manager.core.settings import set_settings -__all__ = ['TaskPriority'] +__all__ = ["TaskPriority"] logger = logging.getLogger(__name__) RETRIES_LIMIT = 10 @@ -38,11 +38,11 @@ class TaskPriority(Enum): settings = { - 'RETRIES_LIMIT': 10, - 'RETRY_AFTER': timedelta(seconds=5), - 'DEFAULT': TaskPriority.DEFAULT.value, - 'SCHEDULER': TaskPriority.SCHEDULER.value, - 'TASK_MANAGER': TaskPriority.TASK_MANAGER.value, + "RETRIES_LIMIT": 10, + "RETRY_AFTER": timedelta(seconds=5), + "DEFAULT": TaskPriority.DEFAULT.value, + "SCHEDULER": TaskPriority.SCHEDULER.value, + "TASK_MANAGER": TaskPriority.TASK_MANAGER.value, } set_settings(**settings) diff --git a/breathecode/utils/decorators/validate_captcha.py b/breathecode/utils/decorators/validate_captcha.py index 1ab1f0a84..dfee4429c 100644 --- a/breathecode/utils/decorators/validate_captcha.py +++ b/breathecode/utils/decorators/validate_captcha.py @@ -8,49 +8,48 @@ from capyc.rest_framework.exceptions import ValidationException logger = logging.getLogger(__name__) -__all__ = ['validate_captcha'] +__all__ = ["validate_captcha"] def validate_captcha(function): def wrapper(*args, **kwargs): try: - if hasattr(args[0], '__class__') and isinstance(args[0], APIView): + if hasattr(args[0], "__class__") and isinstance(args[0], APIView): data = args[1].data.copy() - elif hasattr(args[0], 'user') and hasattr(args[0].user, 'has_perm'): + elif hasattr(args[0], "user") and hasattr(args[0].user, "has_perm"): data = args[0].data.copy() # websocket support - elif hasattr(args[0], 'ws_request'): + elif hasattr(args[0], "ws_request"): data = args[0].data.copy() else: raise IndexError() - apply_captcha = os.getenv('APPLY_CAPTCHA', 'FALSE').lower() + apply_captcha = os.getenv("APPLY_CAPTCHA", "FALSE").lower() - if not apply_captcha or apply_captcha == 'false': + if not apply_captcha or apply_captcha == "false": return function(*args, **kwargs) - project_id = os.getenv('GOOGLE_PROJECT_ID', '') - site_key = os.getenv('GOOGLE_CAPTCHA_KEY', '') + project_id = os.getenv("GOOGLE_PROJECT_ID", "") + site_key = os.getenv("GOOGLE_CAPTCHA_KEY", "") - token = data['token'] if 'token' in data else None + token = data["token"] if "token" in data else None - recaptcha_action = data['action'] if 'action' in data else None + recaptcha_action = data["action"] if "action" in data else None recaptcha = Recaptcha() - response = recaptcha.create_assessment(project_id=project_id, - recaptcha_site_key=site_key, - token=token, - recaptcha_action=recaptcha_action) + response = recaptcha.create_assessment( + project_id=project_id, recaptcha_site_key=site_key, token=token, recaptcha_action=recaptcha_action + ) - if (response.risk_analysis.score < 0.8): - raise ValidationException('The action was denied because it was considered suspicious', code=429) + if response.risk_analysis.score < 0.8: + raise ValidationException("The action was denied because it was considered suspicious", code=429) except IndexError: - raise ProgrammingError('Missing request information, use this decorator with DRF View') + raise ProgrammingError("Missing request information, use this decorator with DRF View") return function(*args, **kwargs) diff --git a/breathecode/utils/decorators/validate_captcha_challenge.py b/breathecode/utils/decorators/validate_captcha_challenge.py index bb577b309..b7e500635 100644 --- a/breathecode/utils/decorators/validate_captcha_challenge.py +++ b/breathecode/utils/decorators/validate_captcha_challenge.py @@ -8,38 +8,38 @@ from capyc.rest_framework.exceptions import ValidationException logger = logging.getLogger(__name__) -__all__ = ['validate_captcha_challenge'] +__all__ = ["validate_captcha_challenge"] def validate_captcha_challenge(function): def wrapper(*args, **kwargs): try: - if hasattr(args[0], '__class__') and isinstance(args[0], APIView): + if hasattr(args[0], "__class__") and isinstance(args[0], APIView): data = args[1].data.copy() - elif hasattr(args[0], 'user') and hasattr(args[0].user, 'has_perm'): + elif hasattr(args[0], "user") and hasattr(args[0].user, "has_perm"): data = args[0].data.copy() # websocket support - elif hasattr(args[0], 'ws_request'): + elif hasattr(args[0], "ws_request"): data = args[0].data.copy() else: raise IndexError() - apply_captcha = os.getenv('APPLY_CAPTCHA', 'FALSE').lower() + apply_captcha = os.getenv("APPLY_CAPTCHA", "FALSE").lower() - if not apply_captcha or apply_captcha == 'false': + if not apply_captcha or apply_captcha == "false": return function(*args, **kwargs) - project_id = os.getenv('GOOGLE_PROJECT_ID', '') + project_id = os.getenv("GOOGLE_PROJECT_ID", "") - site_key = os.getenv('GOOGLE_CAPTCHA_KEY', '') + site_key = os.getenv("GOOGLE_CAPTCHA_KEY", "") - token = data['token'] if 'token' in data else None + token = data["token"] if "token" in data else None if token is None: - raise ValidationException('Missing ReCaptcha Token', code=400) + raise ValidationException("Missing ReCaptcha Token", code=400) recaptcha = Recaptcha() recaptcha.create_assessment_v2(project_id=project_id, recaptcha_site_key=site_key, token=token) @@ -53,7 +53,7 @@ def wrapper(*args, **kwargs): # raise ValidationException('The action was denied because it was considered suspicious', code=429) except IndexError: - raise ProgrammingError('Missing request information, use this decorator with DRF View') + raise ProgrammingError("Missing request information, use this decorator with DRF View") return function(*args, **kwargs) diff --git a/breathecode/utils/exceptions.py b/breathecode/utils/exceptions.py index d1705b2b5..e81588759 100644 --- a/breathecode/utils/exceptions.py +++ b/breathecode/utils/exceptions.py @@ -1,4 +1,4 @@ -__all__ = ['ProgrammingError', 'MalformedLanguageCode'] +__all__ = ["ProgrammingError", "MalformedLanguageCode"] from capyc.rest_framework.exceptions import ValidationException diff --git a/breathecode/utils/find_by_full_name.py b/breathecode/utils/find_by_full_name.py index d0ec7196e..94fb66ae4 100644 --- a/breathecode/utils/find_by_full_name.py +++ b/breathecode/utils/find_by_full_name.py @@ -1,15 +1,15 @@ from django.db.models import Q -__all__ = ['query_like_by_full_name'] +__all__ = ["query_like_by_full_name"] -def query_like_by_full_name(prefix='', **kwargs): - first_name_kwargs = prefix + 'first_name__icontains' - last_name_kwargs = prefix + 'last_name__icontains' - email_kwargs = prefix + 'email__icontains' - items = kwargs['items'] - for query in kwargs['like'].split(): - items = kwargs['items'].filter( - Q(**{first_name_kwargs: query}) - | Q(**{last_name_kwargs: query}) | Q(**{email_kwargs: query})) +def query_like_by_full_name(prefix="", **kwargs): + first_name_kwargs = prefix + "first_name__icontains" + last_name_kwargs = prefix + "last_name__icontains" + email_kwargs = prefix + "email__icontains" + items = kwargs["items"] + for query in kwargs["like"].split(): + items = kwargs["items"].filter( + Q(**{first_name_kwargs: query}) | Q(**{last_name_kwargs: query}) | Q(**{email_kwargs: query}) + ) return items diff --git a/breathecode/utils/gcl_manifest_static_files_storage.py b/breathecode/utils/gcl_manifest_static_files_storage.py index 698d8bd2d..2595a83a9 100644 --- a/breathecode/utils/gcl_manifest_static_files_storage.py +++ b/breathecode/utils/gcl_manifest_static_files_storage.py @@ -1,7 +1,7 @@ from storages.backends.gcloud import GoogleCloudStorage from django.contrib.staticfiles.storage import ManifestFilesMixin -__all__ = ['GCSManifestStaticFilesStorage'] +__all__ = ["GCSManifestStaticFilesStorage"] class GCSManifestStaticFilesStorage(ManifestFilesMixin, GoogleCloudStorage): diff --git a/breathecode/utils/generate_lookups_mixin.py b/breathecode/utils/generate_lookups_mixin.py index 23b52a5d8..0e7cd6d8b 100644 --- a/breathecode/utils/generate_lookups_mixin.py +++ b/breathecode/utils/generate_lookups_mixin.py @@ -1,7 +1,7 @@ from django.core.handlers.wsgi import WSGIRequest from rest_framework.exceptions import APIException -__all__ = ['GenerateLookupsMixin'] +__all__ = ["GenerateLookupsMixin"] class GenerateLookupsMixin(APIException): @@ -12,10 +12,10 @@ def __field_exists__(self, request: WSGIRequest, field: str): def __field_name__(self, field: str, pk=False, many=False): if pk: # `pk` allow custom primary keys, don't use `id` - field = f'{field}__pk' + field = f"{field}__pk" if many: - field = f'{field}__in' + field = f"{field}__in" return field @@ -23,19 +23,19 @@ def __field_value__(self, request: WSGIRequest, field: str, many=False): value = request.GET.get(field) if many: - value = value.split(',') + value = value.split(",") return value def __bulk_generator__(self, request: WSGIRequest, fields: list[str], pk=False, many=False): - return [(self.__field_name__(field, pk=pk, many=many), self.__field_value__(request, field, many=many)) - for field in fields if self.__field_exists__(request, field)] - - def generate_lookups(self, - request: WSGIRequest, - fields=None, - relationships=None, - many_fields=None, - many_relationships=None): + return [ + (self.__field_name__(field, pk=pk, many=many), self.__field_value__(request, field, many=many)) + for field in fields + if self.__field_exists__(request, field) + ] + + def generate_lookups( + self, request: WSGIRequest, fields=None, relationships=None, many_fields=None, many_relationships=None + ): """Get the variables through of querystring, returns one list ready to be used by the filter method.""" if fields is None: @@ -51,9 +51,12 @@ def generate_lookups(self, many_relationships = [] kwargs = {} - founds = (self.__bulk_generator__(request, fields) + self.__bulk_generator__(request, many_fields, many=True) + - self.__bulk_generator__(request, relationships, pk=True) + - self.__bulk_generator__(request, many_relationships, pk=True, many=True)) + founds = ( + self.__bulk_generator__(request, fields) + + self.__bulk_generator__(request, many_fields, many=True) + + self.__bulk_generator__(request, relationships, pk=True) + + self.__bulk_generator__(request, many_relationships, pk=True, many=True) + ) for field, value in founds: kwargs[field] = value diff --git a/breathecode/utils/header_limit_offset_pagination.py b/breathecode/utils/header_limit_offset_pagination.py index fa57a1b0f..048d8f98e 100644 --- a/breathecode/utils/header_limit_offset_pagination.py +++ b/breathecode/utils/header_limit_offset_pagination.py @@ -3,17 +3,17 @@ from rest_framework.response import Response from rest_framework.utils.urls import replace_query_param, remove_query_param -__all__ = ['HeaderLimitOffsetPagination'] +__all__ = ["HeaderLimitOffsetPagination"] class HeaderLimitOffsetPagination(LimitOffsetPagination): def paginate_queryset(self, queryset, request, view=None): self.use_envelope = True - if str(request.GET.get('envelope')).lower() in ['false', '0']: + if str(request.GET.get("envelope")).lower() in ["false", "0"]: self.use_envelope = False result = self._paginate_queryset(queryset, request, view) - if hasattr(queryset, 'filter'): + if hasattr(queryset, "filter"): return result return queryset @@ -30,13 +30,13 @@ def _paginate_queryset(self, queryset, request, view=None): # if self.count == 0 or self.offset > self.count: # return [] - return queryset[self.offset:self.offset + self.limit] + return queryset[self.offset : self.offset + self.limit] def __parse_comma__(self, string: str): if not string: return None - return string.replace('%2C', ',') + return string.replace("%2C", ",") def get_paginated_response(self, data, count=None, cache=None, cache_kwargs=None): if cache_kwargs is None: @@ -52,20 +52,28 @@ def get_paginated_response(self, data, count=None, cache=None, cache_kwargs=None links = [] for label, url in ( - ('first', first_url), - ('next', next_url), - ('previous', previous_url), - ('last', last_url), + ("first", first_url), + ("next", next_url), + ("previous", previous_url), + ("last", last_url), ): if url is not None: links.append('<{}>; rel="{}"'.format(url, label)) - headers = {'Link': ', '.join(links)} if links else {} - headers['x-total-count'] = self.count + headers = {"Link": ", ".join(links)} if links else {} + headers["x-total-count"] = self.count if self.use_envelope: - data = OrderedDict([('count', self.count), ('first', first_url), ('next', next_url), - ('previous', previous_url), ('last', last_url), ('results', data)]) + data = OrderedDict( + [ + ("count", self.count), + ("first", first_url), + ("next", next_url), + ("previous", previous_url), + ("last", last_url), + ("results", data), + ] + ) if cache: cache.set(data, **cache_kwargs) @@ -89,7 +97,7 @@ def get_last_link(self): return replace_query_param(url, self.offset_query_param, offset) def is_paginate(self, request): - return (request.GET.get(self.limit_query_param) or request.GET.get(self.offset_query_param)) + return request.GET.get(self.limit_query_param) or request.GET.get(self.offset_query_param) def pagination_params(self, request): return { diff --git a/breathecode/utils/i18n.py b/breathecode/utils/i18n.py index c6ded774a..166106b8e 100644 --- a/breathecode/utils/i18n.py +++ b/breathecode/utils/i18n.py @@ -12,9 +12,9 @@ from breathecode.utils.exceptions import MalformedLanguageCode -__all__ = ['translation', 'format_date', 'format_datetime', 'format_time', 'format_timedelta'] +__all__ = ["translation", "format_date", "format_datetime", "format_time", "format_timedelta"] -IS_TEST_ENV = os.getenv('ENV') == 'test' +IS_TEST_ENV = os.getenv("ENV") == "test" logger = logging.getLogger(__name__) @@ -29,45 +29,45 @@ def format_and_assert_code(code: str, from_kwargs: bool = False) -> None: # first two character only with lowercase if not code[:2].islower(): - raise MalformedLanguageCode('Lang code is not lowercase') + raise MalformedLanguageCode("Lang code is not lowercase") # last two character only with lowercase if not is_short and from_kwargs and not code[3:].islower(): - raise MalformedLanguageCode('Country code is not lowercase') + raise MalformedLanguageCode("Country code is not lowercase") # last two character only with uppercase elif not is_short and not from_kwargs and not code[2:].isupper(): - raise MalformedLanguageCode('Country code is not uppercase') + raise MalformedLanguageCode("Country code is not uppercase") - separator = '_' if from_kwargs else '-' + separator = "_" if from_kwargs else "-" - #the format is en or en-US + # the format is en or en-US if not (len(code) == 2 or (len(code) == 5 and code[2] == separator)): - raise MalformedLanguageCode('Code malformed') + raise MalformedLanguageCode("Code malformed") if not from_kwargs: - return code.replace(separator, '_') + return code.replace(separator, "_") return code # parse a date to a str with the local format -def format_date(code: Optional[str], date: date, format='medium'): +def format_date(code: Optional[str], date: date, format="medium"): """Translate the date to the local language.""" if not code: - code = 'en' + code = "en" code = format_and_assert_code(code) return babel_format_date(date, locale=code, format=format) # parse a date to a str with the local format -def format_datetime(code: Optional[str], date: datetime, tz: pytz.BaseTzInfo | str = pytz.UTC, format='medium'): +def format_datetime(code: Optional[str], date: datetime, tz: pytz.BaseTzInfo | str = pytz.UTC, format="medium"): """Translate the datetime to the local language.""" if not code: - code = 'en' + code = "en" code = format_and_assert_code(code) @@ -77,11 +77,11 @@ def format_datetime(code: Optional[str], date: datetime, tz: pytz.BaseTzInfo | s return babel_format_datetime(date, locale=code, tzinfo=tz, format=format) -def format_time(code: Optional[str], date: time, format='full', **kwargs: str): +def format_time(code: Optional[str], date: time, format="full", **kwargs: str): """Translate the time to the local language.""" if not code: - code = 'en' + code = "en" code = format_and_assert_code(code) return babel_format_time(date, locale=code, format=format) @@ -91,7 +91,7 @@ def format_timedelta(code: Optional[str], date: time): """Translate the timedelta to the local language.""" if not code: - code = 'en' + code = "en" code = format_and_assert_code(code) return babel_format_timedelta(date, locale=code) @@ -102,24 +102,25 @@ def format_languages(code: str) -> list: languages = set() - code.replace(' ', '') + code.replace(" ", "") - codes = [x for x in code.split(',') if x] + codes = [x for x in code.split(",") if x] for code in codes: priority = 1 - if ';q=' in code: - s = code.split(';q=') + if ";q=" in code: + s = code.split(";q=") code = s[0] try: priority = float(s[1]) except Exception: - raise MalformedLanguageCode('The priority is not a float, example: "en;q=0.5"', - slug='malformed-quantity-language-code') + raise MalformedLanguageCode( + 'The priority is not a float, example: "en;q=0.5"', slug="malformed-quantity-language-code" + ) languages.add((priority, code)) - return [x[1] for x in sorted(languages, key=lambda x: (x[0], '-' in x[1], x[1]), reverse=True)] + return [x[1] for x in sorted(languages, key=lambda x: (x[0], "-" in x[1], x[1]), reverse=True)] def try_to_translate(code, **kwargs: str) -> str | None: @@ -135,11 +136,11 @@ def try_to_translate(code, **kwargs: str) -> str | None: @cache -def translation(code: Optional[str] = 'en', slug: Optional[str] = None, **kwargs: str) -> str: +def translation(code: Optional[str] = "en", slug: Optional[str] = None, **kwargs: str) -> str: """Get the translation.""" if not code: - code = 'en' + code = "en" languages = [format_and_assert_code(language) for language in format_languages(code)] @@ -148,8 +149,8 @@ def translation(code: Optional[str] = 'en', slug: Optional[str] = None, **kwargs format_and_assert_code(key, from_kwargs=True) # the english if mandatory - if not ('en' in kwargs or 'en_us' in kwargs): - raise MalformedLanguageCode('The english translation is mandatory') + if not ("en" in kwargs or "en_us" in kwargs): + raise MalformedLanguageCode("The english translation is mandatory") if slug and IS_TEST_ENV: return slug @@ -160,7 +161,7 @@ def translation(code: Optional[str] = 'en', slug: Optional[str] = None, **kwargs if v: return v - if 'en_us' in kwargs: - return kwargs['en_us'] + if "en_us" in kwargs: + return kwargs["en_us"] - return kwargs['en'] + return kwargs["en"] diff --git a/breathecode/utils/integer_to_base.py b/breathecode/utils/integer_to_base.py index ddfa6d900..2d49466da 100644 --- a/breathecode/utils/integer_to_base.py +++ b/breathecode/utils/integer_to_base.py @@ -1,4 +1,4 @@ -BS = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-' +BS = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-" def to_base(n, b=None): @@ -6,8 +6,8 @@ def to_base(n, b=None): if b is None: b = len(BS) - res = '' + res = "" while n: res += BS[n % b] n //= b - return res[::-1] or '0' + return res[::-1] or "0" diff --git a/breathecode/utils/io/file.py b/breathecode/utils/io/file.py index 6a6bf480b..3ad9cd603 100644 --- a/breathecode/utils/io/file.py +++ b/breathecode/utils/io/file.py @@ -4,23 +4,22 @@ from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile from typing import Optional, overload -__all__ = ['cut_csv', 'count_csv_rows', 'count_file_lines'] +__all__ = ["cut_csv", "count_csv_rows", "count_file_lines"] logger = logging.getLogger(__name__) @overload -def _cut_csv(f: StringIO | TextIOWrapper, *, start: int, end: int) -> StringIO: - ... +def _cut_csv(f: StringIO | TextIOWrapper, *, start: int, end: int) -> StringIO: ... @overload -def _cut_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, start: int, end: int) -> BytesIO: - ... +def _cut_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, start: int, end: int) -> BytesIO: ... -def _cut_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper | InMemoryUploadedFile, *, start: int, - end: int) -> StringIO | BytesIO: +def _cut_csv( + f: StringIO | BytesIO | BufferedReader | TextIOWrapper | InMemoryUploadedFile, *, start: int, end: int +) -> StringIO | BytesIO: """Cut a csv file from start to end line ignoring the header in the row count.""" f.seek(0) @@ -28,8 +27,12 @@ def _cut_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper | InMemoryUp if isinstance(f, StringIO) or isinstance(f, TextIOWrapper): res = StringIO() - elif isinstance(f, BytesIO) or isinstance(f, BufferedReader) or isinstance(f, InMemoryUploadedFile) or isinstance( - f, TemporaryUploadedFile): + elif ( + isinstance(f, BytesIO) + or isinstance(f, BufferedReader) + or isinstance(f, InMemoryUploadedFile) + or isinstance(f, TemporaryUploadedFile) + ): res = BytesIO() if isinstance(f, InMemoryUploadedFile): @@ -58,13 +61,11 @@ def _cut_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper | InMemoryUp @overload -def _first_lines_of_csv(f: StringIO | TextIOWrapper, *, last: int) -> StringIO: - ... +def _first_lines_of_csv(f: StringIO | TextIOWrapper, *, last: int) -> StringIO: ... @overload -def _first_lines_of_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, last: int) -> BytesIO: - ... +def _first_lines_of_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, last: int) -> BytesIO: ... def _first_lines_of_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper, *, first: int) -> StringIO | BytesIO: @@ -74,8 +75,12 @@ def _first_lines_of_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper, if isinstance(f, StringIO) or isinstance(f, TextIOWrapper): res = StringIO() - elif isinstance(f, BytesIO) or isinstance(f, BufferedReader) or isinstance(f, InMemoryUploadedFile) or isinstance( - f, TemporaryUploadedFile): + elif ( + isinstance(f, BytesIO) + or isinstance(f, BufferedReader) + or isinstance(f, InMemoryUploadedFile) + or isinstance(f, TemporaryUploadedFile) + ): res = BytesIO() if isinstance(f, InMemoryUploadedFile): @@ -101,25 +106,27 @@ def _first_lines_of_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper, @overload -def _last_lines_of_csv(f: StringIO | TextIOWrapper, *, last: int) -> StringIO: - ... +def _last_lines_of_csv(f: StringIO | TextIOWrapper, *, last: int) -> StringIO: ... @overload -def _last_lines_of_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, last: int) -> BytesIO: - ... +def _last_lines_of_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, last: int) -> BytesIO: ... def _last_lines_of_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper, *, last: int) -> StringIO | BytesIO: if isinstance(f, StringIO) or isinstance(f, TextIOWrapper): res = StringIO() - line = '' - - elif isinstance(f, BytesIO) or isinstance(f, BufferedReader) or isinstance(f, InMemoryUploadedFile) or isinstance( - f, TemporaryUploadedFile): + line = "" + + elif ( + isinstance(f, BytesIO) + or isinstance(f, BufferedReader) + or isinstance(f, InMemoryUploadedFile) + or isinstance(f, TemporaryUploadedFile) + ): res = BytesIO() - line = b'' + line = b"" if isinstance(f, InMemoryUploadedFile): f = f.file @@ -140,17 +147,17 @@ def _last_lines_of_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper, * while position >= 0: f.seek(position) next_char = f.read(1) - if next_char == '\n': - if line != '': + if next_char == "\n": + if line != "": lines.append(line[::-1]) - line = '' + line = "" - if next_char == b'\n': - if line != b'': + if next_char == b"\n": + if line != b"": lines.append(line[::-1]) - line = b'' + line = b"" else: line += next_char @@ -165,10 +172,10 @@ def _last_lines_of_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper, * res.write(line) if isinstance(line, bytes): - res.write(b'\r\n') + res.write(b"\r\n") else: - res.write('\r\n') + res.write("\r\n") res.seek(0) @@ -176,51 +183,47 @@ def _last_lines_of_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper, * @overload -def cut_csv(f: StringIO | TextIOWrapper, *, start: int, end: int) -> StringIO: - ... +def cut_csv(f: StringIO | TextIOWrapper, *, start: int, end: int) -> StringIO: ... @overload -def cut_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, start: int, end: int) -> BytesIO: - ... +def cut_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, start: int, end: int) -> BytesIO: ... @overload -def cut_csv(f: StringIO | TextIOWrapper, *, first: int) -> StringIO: - ... +def cut_csv(f: StringIO | TextIOWrapper, *, first: int) -> StringIO: ... @overload -def cut_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, first: int) -> BytesIO: - ... +def cut_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, first: int) -> BytesIO: ... @overload -def cut_csv(f: StringIO | TextIOWrapper, *, last: int) -> StringIO: - ... +def cut_csv(f: StringIO | TextIOWrapper, *, last: int) -> StringIO: ... @overload -def cut_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, last: int) -> BytesIO: - ... +def cut_csv(f: BytesIO | BufferedReader | InMemoryUploadedFile, *, last: int) -> BytesIO: ... -def cut_csv(f: StringIO | BytesIO | BufferedReader | TextIOWrapper | InMemoryUploadedFile, - *, - start: Optional[int] = None, - end: Optional[int] = None, - first: Optional[int] = None, - last: Optional[int] = None) -> StringIO | BytesIO: +def cut_csv( + f: StringIO | BytesIO | BufferedReader | TextIOWrapper | InMemoryUploadedFile, + *, + start: Optional[int] = None, + end: Optional[int] = None, + first: Optional[int] = None, + last: Optional[int] = None +) -> StringIO | BytesIO: """Cut a csv file.""" if isinstance(start, int) and isinstance(end, int) and isinstance(last, int): - raise Exception('You cannot use start/end and last at the same time') + raise Exception("You cannot use start/end and last at the same time") if isinstance(first, int) and isinstance(end, int) and isinstance(first, int): - raise Exception('You cannot use first/end and first at the same time') + raise Exception("You cannot use first/end and first at the same time") if isinstance(first, int) and isinstance(last, int): - raise Exception('You cannot use first and last at the same time') + raise Exception("You cannot use first and last at the same time") if isinstance(start, int) and isinstance(end, int): return _cut_csv(f, start=start, end=end) diff --git a/breathecode/utils/io/logger.py b/breathecode/utils/io/logger.py index d6ea80503..b06b729c1 100644 --- a/breathecode/utils/io/logger.py +++ b/breathecode/utils/io/logger.py @@ -4,11 +4,11 @@ from logging import root from typing import Annotated, Optional -IS_TEST_ENV = 'ENV' in os.environ and os.environ['ENV'] == 'test' +IS_TEST_ENV = "ENV" in os.environ and os.environ["ENV"] == "test" -__all__ = ['getLogger', 'Logger'] +__all__ = ["getLogger", "Logger"] -Base = Annotated[BaseLogger, 'The original Logger'] +Base = Annotated[BaseLogger, "The original Logger"] def getLogger(name: Optional[str] = None): # noqa: N802 diff --git a/breathecode/utils/localize_query.py b/breathecode/utils/localize_query.py index 917bde027..f82659806 100644 --- a/breathecode/utils/localize_query.py +++ b/breathecode/utils/localize_query.py @@ -3,7 +3,7 @@ logger = logging.getLogger(__name__) -__all__ = ['localize_query'] +__all__ = ["localize_query"] def localize_query(query, request, matcher=None): @@ -13,11 +13,11 @@ def localize_query(query, request, matcher=None): if isinstance(request.user, AnonymousUser): return None - academy_ids = ProfileAcademy.objects.filter(user=request.user).values_list('academy__id', flat=True) + academy_ids = ProfileAcademy.objects.filter(user=request.user).values_list("academy__id", flat=True) kwargs = {} if matcher is None: - kwargs['academy__id__in'] = academy_ids + kwargs["academy__id__in"] = academy_ids else: kwargs[matcher] = academy_ids diff --git a/breathecode/utils/locking.py b/breathecode/utils/locking.py index 496d82fba..92e44db02 100644 --- a/breathecode/utils/locking.py +++ b/breathecode/utils/locking.py @@ -36,7 +36,7 @@ class Bag(models.Model): from breathecode.utils import getLogger logger = getLogger(__name__) -ENV = os.getenv('ENV', '') +ENV = os.getenv("ENV", "") redis_client = None @@ -48,14 +48,14 @@ def get_or_create(self, lock=False, **kwargs): instance, created = None, False - if ENV != 'test': + if ENV != "test": if redis_client is None: redis_client = get_redis() # Dynamically retrieve the class name and create a unique lock key based on the kwargs class_name = self.model.__name__ - lock_key_elements = [str(kwargs.get(key, '')) for key in sorted(kwargs.keys())] + lock_key_elements = [str(kwargs.get(key, "")) for key in sorted(kwargs.keys())] lock_key = f"{class_name}_lock:{'_'.join(lock_key_elements)}" try: @@ -64,7 +64,7 @@ def get_or_create(self, lock=False, **kwargs): instance, created = super().get_or_create(**kwargs) except LockError: # Handle the timeout, e.g., by logging, retrying, or returning an error - logger.error(f'Could not acquire lock for {class_name} on get_or_create, operation timed out.') + logger.error(f"Could not acquire lock for {class_name} on get_or_create, operation timed out.") return None, False # Indicate that the operation was not successful else: instance, created = super().get_or_create(**kwargs) diff --git a/breathecode/utils/multi_status_response.py b/breathecode/utils/multi_status_response.py index 29362abfa..49b9518b9 100644 --- a/breathecode/utils/multi_status_response.py +++ b/breathecode/utils/multi_status_response.py @@ -3,9 +3,9 @@ from typing import Optional from django.db.models import QuerySet -__all__ = ['MultiStatusResponse'] +__all__ = ["MultiStatusResponse"] -IS_TEST_ENV = os.getenv('ENV') == 'test' +IS_TEST_ENV = os.getenv("ENV") == "test" logger = logging.getLogger(__name__) @@ -14,18 +14,20 @@ class MultiStatusResponse: detail: Optional[str] = None queryset: Optional[QuerySet] = None - def __init__(self, - details: Optional[str] = None, - code: int = 200, - slug: Optional[str] = None, - queryset: Optional[QuerySet] = None): + def __init__( + self, + details: Optional[str] = None, + code: int = 200, + slug: Optional[str] = None, + queryset: Optional[QuerySet] = None, + ): self.status_code = code self.detail = slug if IS_TEST_ENV and slug else details self.queryset = queryset if code >= 400: - logger.error(f'Status {str(self.status_code)} - {self.detail}') + logger.error(f"Status {str(self.status_code)} - {self.detail}") def _get_response_info(self): - return {'status_code': self.status_code, 'detail': self.detail, 'queryset': self.queryset} + return {"status_code": self.status_code, "detail": self.detail, "queryset": self.queryset} diff --git a/breathecode/utils/ndb.py b/breathecode/utils/ndb.py index e458e882f..d4a225812 100644 --- a/breathecode/utils/ndb.py +++ b/breathecode/utils/ndb.py @@ -1,18 +1,20 @@ from breathecode.services.google_cloud.credentials import resolve_credentials -__all__ = ['NDB'] +__all__ = ["NDB"] class NDB: def __init__(self, model): from google.cloud import ndb + resolve_credentials() self.client = ndb.Client() self.Model = model def fetch(self, query, **kwargs): from google.cloud import ndb + client = ndb.Client() with client.context(): @@ -23,6 +25,7 @@ def fetch(self, query, **kwargs): def count(self, query): from google.cloud import ndb + client = ndb.Client() with client.context(): diff --git a/breathecode/utils/num_to_roman.py b/breathecode/utils/num_to_roman.py index 6832f3d18..a263878e5 100644 --- a/breathecode/utils/num_to_roman.py +++ b/breathecode/utils/num_to_roman.py @@ -1,30 +1,30 @@ ROMAN = [ - (1000, 'M'), - (900, 'CM'), - (500, 'D'), - (400, 'CD'), - (100, 'C'), - (90, 'XC'), - (50, 'L'), - (40, 'XL'), - (10, 'X'), - (9, 'IX'), - (5, 'V'), - (4, 'IV'), - (1, 'I'), + (1000, "M"), + (900, "CM"), + (500, "D"), + (400, "CD"), + (100, "C"), + (90, "XC"), + (50, "L"), + (40, "XL"), + (10, "X"), + (9, "IX"), + (5, "V"), + (4, "IV"), + (1, "I"), ] -__all__ = ['num_to_roman'] +__all__ = ["num_to_roman"] def num_to_roman(number, lower=False): result = [] - for (arabic, roman) in ROMAN: + for arabic, roman in ROMAN: (factor, number) = divmod(number, arabic) result.append(roman * factor) if number == 0: break - res = ''.join(result) + res = "".join(result) if lower: res = res.lower() diff --git a/breathecode/utils/object.py b/breathecode/utils/object.py index cb7bda307..d6a6b4d04 100644 --- a/breathecode/utils/object.py +++ b/breathecode/utils/object.py @@ -1,4 +1,4 @@ -__all__ = ['Object'] +__all__ = ["Object"] class Object(object): diff --git a/breathecode/utils/permissions.py b/breathecode/utils/permissions.py index 4e3af49bd..7ded5f94e 100644 --- a/breathecode/utils/permissions.py +++ b/breathecode/utils/permissions.py @@ -1,10 +1,10 @@ -__all__ = ['permissions'] +__all__ = ["permissions"] permissions = ( - ('blog_view', 'can view blog posts and categories'), - ('blog_edit', 'can edit blog category and post'), - ('support_view', 'can view tickets'), - ('support_edit', 'can edit tickets'), - ('activity_view', 'can view recruiters, applicants, data, posts'), - ('activity_edit', 'can edit data'), + ("blog_view", "can view blog posts and categories"), + ("blog_edit", "can edit blog category and post"), + ("support_view", "can view tickets"), + ("support_edit", "can edit tickets"), + ("activity_view", "can view recruiters, applicants, data, posts"), + ("activity_edit", "can edit data"), ) diff --git a/breathecode/utils/redis.py b/breathecode/utils/redis.py index ddf50b204..bc621cd15 100644 --- a/breathecode/utils/redis.py +++ b/breathecode/utils/redis.py @@ -1,8 +1,8 @@ from django.core.cache import cache -IS_DJANGO_REDIS = hasattr(cache, 'delete_pattern') +IS_DJANGO_REDIS = hasattr(cache, "delete_pattern") -__all__ = ['Lock'] +__all__ = ["Lock"] if IS_DJANGO_REDIS: from redis.lock import Lock diff --git a/breathecode/utils/response_207.py b/breathecode/utils/response_207.py index 03ef171b2..f641dcf23 100644 --- a/breathecode/utils/response_207.py +++ b/breathecode/utils/response_207.py @@ -1,40 +1,46 @@ from rest_framework import status from rest_framework.response import Response -__all__ = ['response_207'] +__all__ = ["response_207"] def format_response(data, key): response = {} - if 'detail' in data and data['detail']: - response['detail'] = data['detail'] - - if 'status_code' in data: - response['status_code'] = data['status_code'] - - if 'queryset' in data and 'status_code' in data and data['status_code'] == 404: - response['resources'] = [{ - 'pk': x, - 'display_field': 'pk', - 'display_value': x, - } for x in data['queryset']] - - elif 'queryset' in data: - - response['resources'] = [{ - 'pk': x.pk, - 'display_field': key, - 'display_value': getattr(x, key) if hasattr(x, key) else None, - } for x in data['queryset']] + if "detail" in data and data["detail"]: + response["detail"] = data["detail"] + + if "status_code" in data: + response["status_code"] = data["status_code"] + + if "queryset" in data and "status_code" in data and data["status_code"] == 404: + response["resources"] = [ + { + "pk": x, + "display_field": "pk", + "display_value": x, + } + for x in data["queryset"] + ] + + elif "queryset" in data: + + response["resources"] = [ + { + "pk": x.pk, + "display_field": key, + "display_value": getattr(x, key) if hasattr(x, key) else None, + } + for x in data["queryset"] + ] return response def response_207(responses, display_name): alls = [x._get_response_info() for x in responses] - success = [format_response(x, display_name) for x in alls if x['status_code'] < 400] - failure = [format_response(x, display_name) for x in alls if x['status_code'] >= 400] + success = [format_response(x, display_name) for x in alls if x["status_code"] < 400] + failure = [format_response(x, display_name) for x in alls if x["status_code"] >= 400] - content = {'success': success, 'failure': failure} + content = {"success": success, "failure": failure} return Response(content, status=status.HTTP_207_MULTI_STATUS) diff --git a/breathecode/utils/script_notification.py b/breathecode/utils/script_notification.py index e30b3ee4d..ab8fab891 100644 --- a/breathecode/utils/script_notification.py +++ b/breathecode/utils/script_notification.py @@ -1,4 +1,4 @@ -__all__ = ['ScriptNotification'] +__all__ = ["ScriptNotification"] class ScriptNotification(Exception): diff --git a/breathecode/utils/serializers.py b/breathecode/utils/serializers.py index 9cc855e0c..8438db7f7 100644 --- a/breathecode/utils/serializers.py +++ b/breathecode/utils/serializers.py @@ -6,13 +6,13 @@ class ModelSerializer(ModelSerializer): status_fields = [] def __init__(self, *args, **kwargs): - has_data = 'data' in kwargs + has_data = "data" in kwargs - if has_data and isinstance(kwargs['data'], list): - kwargs['data'] = [self._format_values(x) for x in kwargs['data']] + if has_data and isinstance(kwargs["data"], list): + kwargs["data"] = [self._format_values(x) for x in kwargs["data"]] - elif has_data and isinstance(kwargs['data'], dict): - kwargs['data'] = self._format_values(kwargs['data']) + elif has_data and isinstance(kwargs["data"], dict): + kwargs["data"] = self._format_values(kwargs["data"]) super().__init__(*args, **kwargs) diff --git a/breathecode/utils/serpy/datetime_integer_field.py b/breathecode/utils/serpy/datetime_integer_field.py index c11662324..ed58f0903 100644 --- a/breathecode/utils/serpy/datetime_integer_field.py +++ b/breathecode/utils/serpy/datetime_integer_field.py @@ -2,7 +2,7 @@ from ..datetime_integer import DatetimeInteger -__all__ = ['DatetimeIntegerField'] +__all__ = ["DatetimeIntegerField"] class DatetimeIntegerField(Field): @@ -16,7 +16,7 @@ def as_getter(self, serializer_field_name, serializer_cls): method_name = self.method if method_name is None: - method_name = 'get_{0}'.format(serializer_field_name) + method_name = "get_{0}".format(serializer_field_name) wrapper = self.__datetime_integer__ handler = lambda self, obj: wrapper(serializer_field_name, obj) diff --git a/breathecode/utils/serpy/field.py b/breathecode/utils/serpy/field.py index b0543ea6d..93668e637 100644 --- a/breathecode/utils/serpy/field.py +++ b/breathecode/utils/serpy/field.py @@ -1,7 +1,6 @@ import serpy -__all__ = ['Field'] +__all__ = ["Field"] -class Field(serpy.Field): - ... +class Field(serpy.Field): ... diff --git a/breathecode/utils/serpy/many_to_many_field.py b/breathecode/utils/serpy/many_to_many_field.py index 8ba3ee394..841fc940c 100644 --- a/breathecode/utils/serpy/many_to_many_field.py +++ b/breathecode/utils/serpy/many_to_many_field.py @@ -1,6 +1,6 @@ from serpy.fields import Field -__all__ = ['ManyToManyField'] +__all__ = ["ManyToManyField"] class ManyToManyField(Field): diff --git a/breathecode/utils/serpy/method_field.py b/breathecode/utils/serpy/method_field.py index 5174fc8b8..d9111af98 100644 --- a/breathecode/utils/serpy/method_field.py +++ b/breathecode/utils/serpy/method_field.py @@ -1,7 +1,6 @@ import serpy -__all__ = ['MethodField'] +__all__ = ["MethodField"] -class MethodField(serpy.MethodField): - ... +class MethodField(serpy.MethodField): ... diff --git a/breathecode/utils/serpy/serializer.py b/breathecode/utils/serpy/serializer.py index 51041b4ca..769152264 100644 --- a/breathecode/utils/serpy/serializer.py +++ b/breathecode/utils/serpy/serializer.py @@ -8,7 +8,7 @@ from .many_to_many_field import ManyToManyField from .method_field import MethodField -__all__ = ['Serializer'] +__all__ = ["Serializer"] SERPY_FIELDS = [ Field, @@ -42,13 +42,13 @@ def __new__(cls, *args, **kwargs): return super().__new__(cls) def __init__(self, *args, **kwargs): - kwargs.pop('select', '') + kwargs.pop("select", "") # select = kwargs.pop('select', '') # if select: # self._custom_select(select) - if 'context' in kwargs: - self.context = kwargs['context'] + if "context" in kwargs: + self.context = kwargs["context"] # fix it # self.__class__._select_related = set() @@ -58,24 +58,24 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def _custom_select(self, include): - include = [x for x in include.split(',') if x] + include = [x for x in include.split(",") if x] for include_field in include: if not hasattr(self, include_field): - raise ValidationException(f'The field {include_field} is not defined in the serializer') + raise ValidationException(f"The field {include_field} is not defined in the serializer") attr = getattr(self, include_field) if isinstance(attr, Field): setattr(self, include_field, serpy.Field()) continue - method_field = f'get_{include_field}' + method_field = f"get_{include_field}" if isinstance(attr, MethodField) and hasattr(self, method_field) and callable(getattr(self, method_field)): setattr(self, include_field, serpy.MethodField()) continue - raise ValidationException(f'The field {include_field} is not a allowed field or is bad configured') + raise ValidationException(f"The field {include_field} is not a allowed field or is bad configured") def _load_ref(self): if self._loaded: @@ -99,8 +99,10 @@ def _load_ref(self): if self._field_map[key].__class__ == ManyToManyField: serializer = self._field_map[key].serializer - if not (hasattr(self._field_map[key].serializer.__class__, '_select_related') - ^ hasattr(self._field_map[key].serializer.__class__, '_prefetch_related')): + if not ( + hasattr(self._field_map[key].serializer.__class__, "_select_related") + ^ hasattr(self._field_map[key].serializer.__class__, "_prefetch_related") + ): select_related, prefetch_related = serializer._load_ref() else: select_related = self._field_map[key].serializer.__class__._select_related @@ -109,18 +111,20 @@ def _load_ref(self): select_related, prefetch_related = serializer._load_ref() else: - if not (hasattr(self._field_map[key].__class__, '_select_related') - ^ hasattr(self._field_map[key].__class__, '_prefetch_related')): + if not ( + hasattr(self._field_map[key].__class__, "_select_related") + ^ hasattr(self._field_map[key].__class__, "_prefetch_related") + ): select_related, prefetch_related = self._field_map[key]._load_ref() else: select_related = self._field_map[key].__class__._select_related prefetch_related = self._field_map[key].__class__._prefetch_related for x in select_related: - self.__class__._select_related.add(f'{key}__{x}') + self.__class__._select_related.add(f"{key}__{x}") for x in prefetch_related: - self.__class__._prefetch_related.add(f'{key}__{x}') + self.__class__._prefetch_related.add(f"{key}__{x}") self._loaded = True @@ -132,9 +136,10 @@ def data(self): if not self.__class__._loaded: self._load_ref() - if self.many and isinstance(self.instance, QuerySet) and not hasattr(self, 'child'): + if self.many and isinstance(self.instance, QuerySet) and not hasattr(self, "child"): self.instance = self.instance.select_related(*self.__class__._select_related).prefetch_related( - *self.__class__._prefetch_related) + *self.__class__._prefetch_related + ) data = super().data return data diff --git a/breathecode/utils/serpy_extensions/extensions/datetime_integer_field.py b/breathecode/utils/serpy_extensions/extensions/datetime_integer_field.py index 273b85d46..439f74f80 100644 --- a/breathecode/utils/serpy_extensions/extensions/datetime_integer_field.py +++ b/breathecode/utils/serpy_extensions/extensions/datetime_integer_field.py @@ -2,7 +2,7 @@ from ...datetime_integer import DatetimeInteger -__all__ = ['DatetimeIntegerField'] +__all__ = ["DatetimeIntegerField"] class DatetimeIntegerField(Field): @@ -16,7 +16,7 @@ def as_getter(self, serializer_field_name, serializer_cls): method_name = self.method if method_name is None: - method_name = 'get_{0}'.format(serializer_field_name) + method_name = "get_{0}".format(serializer_field_name) wrapper = self.__datetime_integer__ handler = lambda self, obj: wrapper(serializer_field_name, obj) diff --git a/breathecode/utils/serpy_extensions/serpy_extensions.py b/breathecode/utils/serpy_extensions/serpy_extensions.py index 52104a91a..bc8c5e5fc 100644 --- a/breathecode/utils/serpy_extensions/serpy_extensions.py +++ b/breathecode/utils/serpy_extensions/serpy_extensions.py @@ -1,9 +1,9 @@ from .extensions import DatetimeIntegerField -__all__ = ['SerpyExtensions'] +__all__ = ["SerpyExtensions"] -class SerpyExtensions(): +class SerpyExtensions: @staticmethod def DatetimeIntegerField(*args, **kwargs): # noqa: N802 diff --git a/breathecode/utils/sqlalchemy/big_query.py b/breathecode/utils/sqlalchemy/big_query.py index 222753ee8..8e2ba93fa 100644 --- a/breathecode/utils/sqlalchemy/big_query.py +++ b/breathecode/utils/sqlalchemy/big_query.py @@ -1,5 +1,5 @@ from sqlalchemy.ext.declarative import declarative_base -__all__ = ['BigQueryBase'] +__all__ = ["BigQueryBase"] BigQueryBase = declarative_base() diff --git a/breathecode/utils/sqlalchemy/test_support.py b/breathecode/utils/sqlalchemy/test_support.py index e7d959793..f34396578 100644 --- a/breathecode/utils/sqlalchemy/test_support.py +++ b/breathecode/utils/sqlalchemy/test_support.py @@ -2,17 +2,17 @@ import sys import inspect -__all__ = ['test_support'] +__all__ = ["test_support"] def test_support(module): - if os.getenv('ENV') != 'test': + if os.getenv("ENV") != "test": return from .big_query import BigQueryBase for x in dir(sys.modules[module]): - if '__' in x: + if "__" in x: continue loaded_module = getattr(sys.modules[module], x, None) @@ -26,4 +26,4 @@ def test_support(module): if not issubclass(loaded_module, BigQueryBase): continue - loaded_module.__name__ = loaded_module.__name__.replace('.', '__') + loaded_module.__name__ = loaded_module.__name__.replace(".", "__") diff --git a/breathecode/utils/tests/api_view_extensions/tests_api_view_extensions.py b/breathecode/utils/tests/api_view_extensions/tests_api_view_extensions.py index 043bf42bc..2916f375d 100644 --- a/breathecode/utils/tests/api_view_extensions/tests_api_view_extensions.py +++ b/breathecode/utils/tests/api_view_extensions/tests_api_view_extensions.py @@ -23,28 +23,28 @@ def serialize_cache_object(data, headers={}): res = { - 'headers': { - 'Content-Type': 'application/json', + "headers": { + "Content-Type": "application/json", **headers, }, - 'content': json.dumps(data).encode('utf-8'), + "content": json.dumps(data).encode("utf-8"), } return res def assert_pagination(headers: dict, limit, offset, lenght): - assert 'Link' in headers - assert 'X-Total-Count' in headers - assert 'X-Page' in headers - assert 'X-Per-Page' in headers + assert "Link" in headers + assert "X-Total-Count" in headers + assert "X-Page" in headers + assert "X-Per-Page" in headers - assert headers['X-Total-Count'] == str(lenght) + assert headers["X-Total-Count"] == str(lenght) # assert headers['X-Total-Page'] == str(int(lenght / limit)) - assert headers['X-Page'] == str(int(offset / limit) + 1) - assert headers['X-Per-Page'] == str(limit) + assert headers["X-Page"] == str(int(offset / limit) + 1) + assert headers["X-Per-Page"] == str(limit) if offset == 0: - assert headers['Link'] == ( + assert headers["Link"] == ( f'<http://testserver/the-beans-should-not-have-sugar?limit={limit}&offset={limit}>; rel="next", ' f'<http://testserver/the-beans-should-not-have-sugar?limit={limit}&offset={lenght - limit if lenght - limit>= 0 else 0}>; rel="last"' ) @@ -52,24 +52,24 @@ def assert_pagination(headers: dict, limit, offset, lenght): previous_offset = offset - limit if offset - limit >= 0 else 0 if previous_offset: - previous_offset_section = f'&offset={previous_offset}' + previous_offset_section = f"&offset={previous_offset}" else: - previous_offset_section = '' + previous_offset_section = "" - assert headers['Link'] == ( + assert headers["Link"] == ( f'<http://testserver/the-beans-should-not-have-sugar?limit={limit}>; rel="first", ' f'<http://testserver/the-beans-should-not-have-sugar?limit={limit}{previous_offset_section}>; rel="previous"' ) else: - raise NotImplemented('This case is not implemented') + raise NotImplemented("This case is not implemented") def assert_no_pagination(headers: dict, limit, offset, lenght): - assert 'Link' not in headers - assert 'X-Total-Count' not in headers - assert 'X-Page' not in headers - assert 'X-Per-Page' not in headers + assert "Link" not in headers + assert "X-Total-Count" not in headers + assert "X-Page" not in headers + assert "X-Per-Page" not in headers class GetCohortSerializer(serpy.Serializer): @@ -99,13 +99,19 @@ def get_academy(self, obj): def serialize_cache_value(data): - return str(data).replace('\'', '"').replace('None', 'null').replace('True', 'true').replace('False', - 'false').encode('utf-8') + return ( + str(data) + .replace("'", '"') + .replace("None", "null") + .replace("True", "true") + .replace("False", "false") + .encode("utf-8") + ) class CustomTestView(APIView): permission_classes = [AllowAny] - extensions = APIViewExtensions(cache=CohortCache, sort='name', paginate=True) + extensions = APIViewExtensions(cache=CohortCache, sort="name", paginate=True) def get(self, request, id=None): handler = self.extensions(request) @@ -117,18 +123,18 @@ def get(self, request, id=None): if id: item = Cohort.objects.filter(id=id).first() if not item: - raise ValidationException('Not found', code=404) + raise ValidationException("Not found", code=404) serializer = GetCohortSerializer(item, many=False) return handler.response(serializer.data) lookups = {} - if name := request.GET.get('name'): - lookups['name__in'] = name.split(',') + if name := request.GET.get("name"): + lookups["name__in"] = name.split(",") - if slug := request.GET.get('slug'): - lookups['slug__in'] = slug.split(',') + if slug := request.GET.get("slug"): + lookups["slug__in"] = slug.split(",") items = Cohort.objects.filter(**lookups) items = handler.queryset(items) @@ -138,18 +144,17 @@ def get(self, request, id=None): class PaginateFalseTestView(CustomTestView): - extensions = APIViewExtensions(cache=CohortCache, sort='name', paginate=False) + extensions = APIViewExtensions(cache=CohortCache, sort="name", paginate=False) class CachePerUserTestView(CustomTestView): - extensions = APIViewExtensions(cache=CohortCache, cache_per_user=True, sort='name', paginate=False) + extensions = APIViewExtensions(cache=CohortCache, cache_per_user=True, sort="name", paginate=False) class CachePrefixTestView(CustomTestView): - extensions = APIViewExtensions(cache=CohortCache, - cache_prefix='the-beans-should-not-have-sugar', - sort='name', - paginate=False) + extensions = APIViewExtensions( + cache=CohortCache, cache_prefix="the-beans-should-not-have-sugar", sort="name", paginate=False + ) class ApiViewExtensionsGetTestSuite(UtilsTestCase): @@ -157,85 +162,99 @@ class ApiViewExtensionsGetTestSuite(UtilsTestCase): 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_cache__get__spy_the_extensions(self): cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar/1') + request = request.get(f"/the-beans-should-not-have-sugar/1") view = CustomTestView.as_view() view(request) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) """ 🔽🔽🔽 Spy the extension arguments """ - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_cache__get__spy_the_extension_arguments__view1(self): cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar/1') + request = request.get(f"/the-beans-should-not-have-sugar/1") view = CustomTestView.as_view() view(request) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=CohortCache, sort='name', paginate=True), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=CohortCache, sort="name", paginate=True), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_cache__get__spy_the_extension_arguments__view2(self): cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar/1') + request = request.get(f"/the-beans-should-not-have-sugar/1") view = CachePerUserTestView.as_view() view(request) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=CohortCache, cache_per_user=True, sort='name', paginate=False), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=CohortCache, cache_per_user=True, sort="name", paginate=False), + ], + ) - @patch.object(APIViewExtensionHandlers, '_spy_extension_arguments', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extension_arguments", MagicMock()) def test_cache__get__spy_the_extension_arguments__view3(self): cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar/1') + request = request.get(f"/the-beans-should-not-have-sugar/1") view = CachePrefixTestView.as_view() view(request) - self.assertEqual(APIViewExtensionHandlers._spy_extension_arguments.call_args_list, [ - call(cache=CohortCache, cache_prefix='the-beans-should-not-have-sugar', sort='name', paginate=False), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extension_arguments.call_args_list, + [ + call(cache=CohortCache, cache_prefix="the-beans-should-not-have-sugar", sort="name", paginate=False), + ], + ) """ 🔽🔽🔽 Cache @@ -245,18 +264,22 @@ def test_cache__get__without_cache__zero_cohorts(self): cache.clear() request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar') - - key = 'Cohort__' + urllib.parse.urlencode(sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - }.items())) + request = request.get("/the-beans-should-not-have-sugar") + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + }.items() + ) + ) view = CustomTestView.as_view() response = view(request) expected = [] - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) @@ -266,19 +289,23 @@ def test_cache__get__without_cache__one_cohort(self): model = self.bc.database.create(cohort=1) - key = 'Cohort__' + urllib.parse.urlencode(sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - }.items())) + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + }.items() + ) + ) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar') + request = request.get("/the-beans-should-not-have-sugar") view = CustomTestView.as_view() response = view(request) expected = GetCohortSerializer([model.cohort], many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) @@ -288,19 +315,23 @@ def test_cache__get__without_cache__ten_cohorts(self): model = self.bc.database.create(cohort=10) - key = 'Cohort__' + urllib.parse.urlencode(sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - }.items())) + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + }.items() + ) + ) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar') + request = request.get("/the-beans-should-not-have-sugar") view = CustomTestView.as_view() response = view(request) expected = GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name), many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) @@ -308,16 +339,19 @@ def test_cache__get__without_cache__ten_cohorts(self): def test_cache__get__without_cache__ten_cohorts__passing_arguments(self): cache.clear() - cohorts = [{'name': bin(x).replace('0b', ''), 'slug': bin(x).replace('0b', '')} for x in range(0, 8)] - params = [bin(x).replace('0b', '') for x in range(4, 8)] + cohorts = [{"name": bin(x).replace("0b", ""), "slug": bin(x).replace("0b", "")} for x in range(0, 8)] + params = [bin(x).replace("0b", "") for x in range(4, 8)] model = self.bc.database.create(cohort=cohorts) - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'sort': 'slug', - 'slug': ','.join(params), - }.items())) + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "sort": "slug", + "slug": ",".join(params), + }.items() + ) + ) request = APIRequestFactory() request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={",".join(params)}') @@ -327,7 +361,7 @@ def test_cache__get__without_cache__ten_cohorts__passing_arguments(self): response = view(request) expected = GetCohortSerializer(model.cohort[4:], many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) @@ -335,43 +369,49 @@ def test_cache__get__without_cache__ten_cohorts__passing_arguments(self): def test_cache__get__with_cache(self): cache.clear() - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for expected in cases: - json_data = json.dumps(expected).encode('utf-8') - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - }.items())) + json_data = json.dumps(expected).encode("utf-8") + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + }.items() + ) + ) cache.set(key, json_data) - cache.set('Cohort__keys', {key}) + cache.set("Cohort__keys", {key}) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar') + request = request.get("/the-beans-should-not-have-sugar") view = CustomTestView.as_view() response = view(request) - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) def test_cache__get__with_cache__passing_arguments(self): cache.clear() - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] - params = [bin(x).replace('0b', '') for x in range(4, 8)] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] + params = [bin(x).replace("0b", "") for x in range(4, 8)] for expected in cases: - json_data = json.dumps(expected).encode('utf-8') - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'sort': 'slug', - 'slug': ','.join(params), - }.items())) + json_data = json.dumps(expected).encode("utf-8") + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "sort": "slug", + "slug": ",".join(params), + }.items() + ) + ) cache.set(key, json_data) - cache.set('Cohort__keys', {key}) + cache.set("Cohort__keys", {key}) request = APIRequestFactory() request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={",".join(params)}') @@ -379,85 +419,91 @@ def test_cache__get__with_cache__passing_arguments(self): view = CustomTestView.as_view() response = view(request) - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) - self.assertEqual(cache.get(key), bytes(str(expected).replace('\'', '"'), encoding='utf-8')) + self.assertEqual(cache.get(key), bytes(str(expected).replace("'", '"'), encoding="utf-8")) def test_cache__get__with_cache_but_other_case__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - model = self.bc.database.create(cohort={'slug': slug}) + model = self.bc.database.create(cohort={"slug": slug}) json_data = serialize_cache_object(case) - cache.set('Cohort__', json_data) - cache.set('Cohort__keys', {'Cohort__'}) - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'sort': 'slug', - 'slug': slug, - }.items())) + cache.set("Cohort__", json_data) + cache.set("Cohort__keys", {"Cohort__"}) + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "sort": "slug", + "slug": slug, + }.items() + ) + ) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={slug}') + request = request.get(f"/the-beans-should-not-have-sugar?sort=slug&slug={slug}") view = CustomTestView.as_view() response = view(request) expected = GetCohortSerializer([model.cohort], many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key}) - self.assertEqual(cache.get('Cohort__'), serialize_cache_object(case)) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key}) + self.assertEqual(cache.get("Cohort__"), serialize_cache_object(case)) res = { - 'headers': { - 'Content-Type': 'application/json', + "headers": { + "Content-Type": "application/json", }, - 'content': serialize_cache_value(expected), + "content": serialize_cache_value(expected), } self.assertEqual(cache.get(key), res) def test_cache__get__with_cache_case_of_root_and_current__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) - - json_data_root = json.dumps(case).encode('utf-8') - json_data_query = json.dumps(case + case).encode('utf-8') - cache.set('Cohort__', json_data_root) - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'sort': 'slug', - 'slug': slug, - }.items())) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) + + json_data_root = json.dumps(case).encode("utf-8") + json_data_query = json.dumps(case + case).encode("utf-8") + cache.set("Cohort__", json_data_root) + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "sort": "slug", + "slug": slug, + }.items() + ) + ) cache.set(key, json_data_query) - cache.set('Cohort__keys', {'Cohort__', key}) + cache.set("Cohort__keys", {"Cohort__", key}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={slug}') + request = request.get(f"/the-beans-should-not-have-sugar?sort=slug&slug={slug}") view = CustomTestView.as_view() response = view(request) expected = case + case - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key}) - self.assertEqual(cache.get('Cohort__'), json_data_root) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key}) + self.assertEqual(cache.get("Cohort__"), json_data_root) self.assertEqual(cache.get(key), json_data_query) """ @@ -467,19 +513,22 @@ def test_cache__get__with_cache_case_of_root_and_current__passing_arguments(self def test_cache_per_user__get__with_cache__passing_arguments(self): cache.clear() - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] - params = [bin(x).replace('0b', '') for x in range(4, 8)] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] + params = [bin(x).replace("0b", "") for x in range(4, 8)] for expected in cases: - json_data = json.dumps(expected).encode('utf-8') - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'request.user.id': None, - 'sort': 'slug', - 'slug': ','.join(params), - }.items())) + json_data = json.dumps(expected).encode("utf-8") + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "request.user.id": None, + "sort": "slug", + "slug": ",".join(params), + }.items() + ) + ) cache.set(key, json_data) - cache.set('Cohort__keys', {key}) + cache.set("Cohort__keys", {key}) request = APIRequestFactory() request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={",".join(params)}') @@ -487,88 +536,94 @@ def test_cache_per_user__get__with_cache__passing_arguments(self): view = CachePerUserTestView.as_view() response = view(request) - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) - self.assertEqual(cache.get(key), bytes(str(expected).replace('\'', '"'), encoding='utf-8')) + self.assertEqual(cache.get(key), bytes(str(expected).replace("'", '"'), encoding="utf-8")) def test_cache_per_user__get__with_cache_but_other_case__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - model = self.bc.database.create(cohort={'slug': slug}) + model = self.bc.database.create(cohort={"slug": slug}) json_data = serialize_cache_object(case) - cache.set('Cohort__', json_data) - cache.set('Cohort__keys', {'Cohort__'}) - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'request.user.id': None, - 'sort': 'slug', - 'slug': slug, - }.items())) + cache.set("Cohort__", json_data) + cache.set("Cohort__keys", {"Cohort__"}) + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "request.user.id": None, + "sort": "slug", + "slug": slug, + }.items() + ) + ) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={slug}') + request = request.get(f"/the-beans-should-not-have-sugar?sort=slug&slug={slug}") view = CachePerUserTestView.as_view() response = view(request) expected = GetCohortSerializer([model.cohort], many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key}) - self.assertEqual(cache.get('Cohort__'), serialize_cache_object(case)) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key}) + self.assertEqual(cache.get("Cohort__"), serialize_cache_object(case)) res = { - 'headers': { - 'Content-Type': 'application/json', + "headers": { + "Content-Type": "application/json", }, - 'content': serialize_cache_value(expected), + "content": serialize_cache_value(expected), } self.assertEqual(cache.get(key), res) def test_cache_per_user__get__with_cache_case_of_root_and_current__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) - - json_data_root = json.dumps(case).encode('utf-8') - json_data_query = json.dumps(case + case).encode('utf-8') - cache.set('Cohort__', json_data_root) - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'request.user.id': None, - 'sort': 'slug', - 'slug': slug, - }.items())) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) + + json_data_root = json.dumps(case).encode("utf-8") + json_data_query = json.dumps(case + case).encode("utf-8") + cache.set("Cohort__", json_data_root) + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "request.user.id": None, + "sort": "slug", + "slug": slug, + }.items() + ) + ) cache.set(key, json_data_query) - cache.set('Cohort__keys', {'Cohort__', key}) + cache.set("Cohort__keys", {"Cohort__", key}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={slug}') + request = request.get(f"/the-beans-should-not-have-sugar?sort=slug&slug={slug}") view = CachePerUserTestView.as_view() response = view(request) expected = case + case - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key}) - self.assertEqual(cache.get('Cohort__'), json_data_root) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key}) + self.assertEqual(cache.get("Cohort__"), json_data_root) self.assertEqual(cache.get(key), json_data_query) """ @@ -576,23 +631,26 @@ def test_cache_per_user__get__with_cache_case_of_root_and_current__passing_argum """ def test_cache_per_user__get__auth__with_cache__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] - params = [bin(x).replace('0b', '') for x in range(4, 8)] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] + params = [bin(x).replace("0b", "") for x in range(4, 8)] for expected in cases: cache.clear() model = self.bc.database.create(user=1) - json_data = json.dumps(expected).encode('utf-8') - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'request.user.id': model.user.id, - 'sort': 'slug', - 'slug': ','.join(params), - }.items())) + json_data = json.dumps(expected).encode("utf-8") + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "request.user.id": model.user.id, + "sort": "slug", + "slug": ",".join(params), + }.items() + ) + ) cache.set(key, json_data) - cache.set('Cohort__keys', {key}) + cache.set("Cohort__keys", {key}) request = APIRequestFactory() request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={",".join(params)}') @@ -601,89 +659,95 @@ def test_cache_per_user__get__auth__with_cache__passing_arguments(self): view = CachePerUserTestView.as_view() response = view(request) - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) - self.assertEqual(cache.get(key), bytes(str(expected).replace('\'', '"'), encoding='utf-8')) + self.assertEqual(cache.get(key), bytes(str(expected).replace("'", '"'), encoding="utf-8")) def test_cache_per_user__get__auth__with_cache_but_other_case__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - model = self.bc.database.create(cohort={'slug': slug}, user=1) + model = self.bc.database.create(cohort={"slug": slug}, user=1) json_data = serialize_cache_object(case) - cache.set('Cohort__', json_data) - cache.set('Cohort__keys', {'Cohort__'}) - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'request.user.id': model.user.id, - 'sort': 'slug', - 'slug': slug, - }.items())) + cache.set("Cohort__", json_data) + cache.set("Cohort__keys", {"Cohort__"}) + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "request.user.id": model.user.id, + "sort": "slug", + "slug": slug, + }.items() + ) + ) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={slug}') + request = request.get(f"/the-beans-should-not-have-sugar?sort=slug&slug={slug}") force_authenticate(request, user=model.user) view = CachePerUserTestView.as_view() response = view(request) expected = GetCohortSerializer([model.cohort], many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key}) - self.assertEqual(cache.get('Cohort__'), serialize_cache_object(case)) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key}) + self.assertEqual(cache.get("Cohort__"), serialize_cache_object(case)) res = { - 'headers': { - 'Content-Type': 'application/json', + "headers": { + "Content-Type": "application/json", }, - 'content': serialize_cache_value(expected), + "content": serialize_cache_value(expected), } self.assertEqual(cache.get(key), res) def test_cache_per_user__get__auth__with_cache_case_of_root_and_current__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}, user=1) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}, user=1) json_data_root = serialize_cache_object(case) json_data_query = serialize_cache_object(case + case) - cache.set('Cohort__', json_data_root) - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'request.user.id': model.user.id, - 'sort': 'slug', - 'slug': slug, - }.items())) + cache.set("Cohort__", json_data_root) + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "request.user.id": model.user.id, + "sort": "slug", + "slug": slug, + }.items() + ) + ) cache.set(key, json_data_query) - cache.set('Cohort__keys', {'Cohort__', key}) + cache.set("Cohort__keys", {"Cohort__", key}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={slug}') + request = request.get(f"/the-beans-should-not-have-sugar?sort=slug&slug={slug}") force_authenticate(request, user=model.user) view = CachePerUserTestView.as_view() response = view(request) expected = case + case - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key}) - self.assertEqual(cache.get('Cohort__'), json_data_root) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key}) + self.assertEqual(cache.get("Cohort__"), json_data_root) self.assertEqual(cache.get(key), json_data_query) """ @@ -693,21 +757,24 @@ def test_cache_per_user__get__auth__with_cache_case_of_root_and_current__passing def test_cache_with_prefix__get__with_cache__passing_arguments(self): cache.clear() - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] - params = [bin(x).replace('0b', '') for x in range(4, 8)] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] + params = [bin(x).replace("0b", "") for x in range(4, 8)] for expected in cases: - json_data = json.dumps(expected).encode('utf-8') - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'breathecode.view.get': 'the-beans-should-not-have-sugar', - 'sort': 'slug', - 'slug': ','.join(params), - }.items())) + json_data = json.dumps(expected).encode("utf-8") + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "breathecode.view.get": "the-beans-should-not-have-sugar", + "sort": "slug", + "slug": ",".join(params), + }.items() + ) + ) cache.set(key, json_data) - cache.set('Cohort__keys', {key}) + cache.set("Cohort__keys", {key}) request = APIRequestFactory() request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={",".join(params)}') @@ -715,94 +782,106 @@ def test_cache_with_prefix__get__with_cache__passing_arguments(self): view = CachePrefixTestView.as_view() response = view(request) - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) - self.assertEqual(cache.get(key), bytes(str(expected).replace('\'', '"'), encoding='utf-8')) + self.assertEqual(cache.get(key), bytes(str(expected).replace("'", '"'), encoding="utf-8")) def test_cache_with_prefix__get__with_cache_but_other_case__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - model = self.bc.database.create(cohort={'slug': slug}) + model = self.bc.database.create(cohort={"slug": slug}) json_data = serialize_cache_object(case) - cache.set('Cohort__', json_data) - cache.set('Cohort__keys', {'Cohort__'}) + cache.set("Cohort__", json_data) + cache.set("Cohort__keys", {"Cohort__"}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={slug}') + request = request.get(f"/the-beans-should-not-have-sugar?sort=slug&slug={slug}") view = CachePrefixTestView.as_view() response = view(request) expected = GetCohortSerializer([model.cohort], many=True).data - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'breathecode.view.get': 'the-beans-should-not-have-sugar', - 'sort': 'slug', - 'slug': slug, - }.items())) - - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "breathecode.view.get": "the-beans-should-not-have-sugar", + "sort": "slug", + "slug": slug, + }.items() + ) + ) + + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), { - 'Cohort__', - key, - }) - self.assertEqual(cache.get('Cohort__'), serialize_cache_object(case)) + self.assertEqual( + cohort_cache.keys(), + { + "Cohort__", + key, + }, + ) + self.assertEqual(cache.get("Cohort__"), serialize_cache_object(case)) res = { - 'headers': { - 'Content-Type': 'application/json', + "headers": { + "Content-Type": "application/json", }, - 'content': serialize_cache_value(expected), + "content": serialize_cache_value(expected), } self.assertEqual(cache.get(key), res) def test_cache_with_prefix__get__with_cache_case_of_root_and_current__passing_arguments(self): - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) - json_data_root = json.dumps(case).encode('utf-8') + json_data_root = json.dumps(case).encode("utf-8") json_data_query = json.dumps(case + case).encode() - cache.set('Cohort__', json_data_root) - - key = 'Cohort__' + urllib.parse.urlencode( - sorted({ - 'request.path': '/the-beans-should-not-have-sugar', - 'breathecode.view.get': 'the-beans-should-not-have-sugar', - 'sort': 'slug', - 'slug': slug, - }.items())) + cache.set("Cohort__", json_data_root) + + key = "Cohort__" + urllib.parse.urlencode( + sorted( + { + "request.path": "/the-beans-should-not-have-sugar", + "breathecode.view.get": "the-beans-should-not-have-sugar", + "sort": "slug", + "slug": slug, + }.items() + ) + ) cache.set(key, json_data_query) - cache.set('Cohort__keys', {'Cohort__', key}) + cache.set("Cohort__keys", {"Cohort__", key}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar?sort=slug&slug={slug}') + request = request.get(f"/the-beans-should-not-have-sugar?sort=slug&slug={slug}") view = CachePrefixTestView.as_view() response = view(request) expected = case + case - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), { - 'Cohort__', - key, - }) - self.assertEqual(cache.get('Cohort__'), json_data_root) + self.assertEqual( + cohort_cache.keys(), + { + "Cohort__", + key, + }, + ) + self.assertEqual(cache.get("Cohort__"), json_data_root) self.assertEqual(cache.get(key), json_data_query) """ @@ -815,14 +894,14 @@ def test_sort__get__ten_cohorts(self): model = self.bc.database.create(cohort=10) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar') + request = request.get("/the-beans-should-not-have-sugar") view = CustomTestView.as_view() response = view(request) expected = GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name), many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) """ @@ -835,14 +914,14 @@ def test_pagination__get__activate__25_cohorts_just_get_20(self): model = self.bc.database.create(cohort=25) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar') + request = request.get("/the-beans-should-not-have-sugar") view = CustomTestView.as_view() response = view(request) expected = GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name)[:20], many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) assert_pagination(response.headers, limit=20, offset=0, lenght=25) @@ -852,21 +931,21 @@ def test_pagination__get__activate__with_10_cohorts__get_first_five(self): model = self.bc.database.create(cohort=10) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar?limit=5&offset=0') + request = request.get("/the-beans-should-not-have-sugar?limit=5&offset=0") view = CustomTestView.as_view() response = view(request) expected = { - 'count': 10, - 'first': None, - 'last': 'http://testserver/the-beans-should-not-have-sugar?limit=5&offset=5', - 'next': 'http://testserver/the-beans-should-not-have-sugar?limit=5&offset=5', - 'previous': None, - 'results': GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name)[:5], many=True).data + "count": 10, + "first": None, + "last": "http://testserver/the-beans-should-not-have-sugar?limit=5&offset=5", + "next": "http://testserver/the-beans-should-not-have-sugar?limit=5&offset=5", + "previous": None, + "results": GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name)[:5], many=True).data, } - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) assert_pagination(response.headers, limit=5, offset=0, lenght=10) @@ -876,21 +955,21 @@ def test_pagination__get__activate__with_10_cohorts__get_last_five(self): model = self.bc.database.create(cohort=10) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar?limit=5&offset=5') + request = request.get("/the-beans-should-not-have-sugar?limit=5&offset=5") view = CustomTestView.as_view() response = view(request) expected = { - 'count': 10, - 'first': 'http://testserver/the-beans-should-not-have-sugar?limit=5', - 'last': None, - 'next': None, - 'previous': 'http://testserver/the-beans-should-not-have-sugar?limit=5', - 'results': GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name)[5:], many=True).data + "count": 10, + "first": "http://testserver/the-beans-should-not-have-sugar?limit=5", + "last": None, + "next": None, + "previous": "http://testserver/the-beans-should-not-have-sugar?limit=5", + "results": GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name)[5:], many=True).data, } - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) assert_pagination(response.headers, limit=5, offset=5, lenght=10) @@ -900,21 +979,21 @@ def test_pagination__get__activate__with_10_cohorts__after_last_five(self): model = self.bc.database.create(cohort=10) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar?limit=5&offset=10') + request = request.get("/the-beans-should-not-have-sugar?limit=5&offset=10") view = CustomTestView.as_view() response = view(request) expected = { - 'count': 10, - 'first': 'http://testserver/the-beans-should-not-have-sugar?limit=5', - 'last': None, - 'next': None, - 'previous': 'http://testserver/the-beans-should-not-have-sugar?limit=5&offset=5', - 'results': [], + "count": 10, + "first": "http://testserver/the-beans-should-not-have-sugar?limit=5", + "last": None, + "next": None, + "previous": "http://testserver/the-beans-should-not-have-sugar?limit=5&offset=5", + "results": [], } - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) assert_pagination(response.headers, limit=5, offset=10, lenght=10) @@ -928,8 +1007,8 @@ def test_pagination__get__deactivate__105_cohorts_just_get_100(self): model = self.bc.database.create(cohort=105) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar') - request.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate, br' + request = request.get("/the-beans-should-not-have-sugar") + request.META["HTTP_ACCEPT_ENCODING"] = "gzip, deflate, br" view = PaginateFalseTestView.as_view() @@ -946,14 +1025,14 @@ def test_pagination__get__deactivate__with_10_cohorts__get_first_five(self): model = self.bc.database.create(cohort=10) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar?limit=5&offset=0') + request = request.get("/the-beans-should-not-have-sugar?limit=5&offset=0") view = PaginateFalseTestView.as_view() response = view(request) expected = GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name), many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) assert_no_pagination(response.headers, limit=20, offset=0, lenght=25) @@ -963,14 +1042,14 @@ def test_pagination__get__deactivate__with_10_cohorts__get_last_five(self): model = self.bc.database.create(cohort=10) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar?limit=5&offset=5') + request = request.get("/the-beans-should-not-have-sugar?limit=5&offset=5") view = PaginateFalseTestView.as_view() response = view(request) expected = GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name), many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) assert_no_pagination(response.headers, limit=20, offset=0, lenght=25) @@ -980,14 +1059,14 @@ def test_pagination__get__deactivate__with_10_cohorts__after_last_five(self): model = self.bc.database.create(cohort=10) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar?limit=5&offset=10') + request = request.get("/the-beans-should-not-have-sugar?limit=5&offset=10") view = PaginateFalseTestView.as_view() response = view(request) expected = GetCohortSerializer(sorted(model.cohort, key=lambda x: x.name), many=True).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) assert_no_pagination(response.headers, limit=20, offset=0, lenght=25) @@ -997,24 +1076,29 @@ class ApiViewExtensionsGetIdTestSuite(UtilsTestCase): 🔽🔽🔽 Spy the extensions """ - @patch.object(APIViewExtensionHandlers, '_spy_extensions', MagicMock()) + @patch.object(APIViewExtensionHandlers, "_spy_extensions", MagicMock()) def test_cache__get__spy_the_extensions(self): cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar/1') + request = request.get(f"/the-beans-should-not-have-sugar/1") view = CustomTestView.as_view() view(request, id=1) - self.assertEqual(APIViewExtensionHandlers._spy_extensions.call_args_list, [ - call(['CacheExtension', 'LanguageExtension', 'LookupExtension', 'PaginationExtension', 'SortExtension']), - ]) + self.assertEqual( + APIViewExtensionHandlers._spy_extensions.call_args_list, + [ + call( + ["CacheExtension", "LanguageExtension", "LookupExtension", "PaginationExtension", "SortExtension"] + ), + ], + ) """ 🔽🔽🔽 Cache @@ -1024,14 +1108,14 @@ def test_cache__get__without_cache__zero_cohorts(self): cache.clear() request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar/1') + request = request.get("/the-beans-should-not-have-sugar/1") view = CustomTestView.as_view() response = view(request, id=1).render() - expected = {'detail': 'Not found', 'status_code': 404} + expected = {"detail": "Not found", "status_code": 404} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(cohort_cache.keys(), set()) @@ -1042,128 +1126,138 @@ def test_cache__get__without_cache__one_cohort(self): model = self.bc.database.create(cohort=1) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar/1') + request = request.get("/the-beans-should-not-have-sugar/1") view = CustomTestView.as_view() response = view(request, id=1) expected = GetCohortSerializer(model.cohort, many=False).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - key = 'Cohort__id=1&' + urllib.parse.urlencode({ - **request.GET, - 'request.path': - '/the-beans-should-not-have-sugar/1', - }) + key = "Cohort__id=1&" + urllib.parse.urlencode( + { + **request.GET, + "request.path": "/the-beans-should-not-have-sugar/1", + } + ) self.assertEqual(cohort_cache.keys(), {key}) res = { - 'headers': { - 'Content-Type': 'application/json', + "headers": { + "Content-Type": "application/json", }, - 'content': serialize_cache_value(expected), + "content": serialize_cache_value(expected), } self.assertEqual(cache.get(key), res) def test_cache__get__with_cache(self): cache.clear() - cases = [[], [{'x': 1}], [{'x': 1}, {'x': 2}]] + cases = [[], [{"x": 1}], [{"x": 1}, {"x": 2}]] for expected in cases: - json_data = json.dumps(expected).encode('utf-8') - key = 'Cohort__id=1&' + urllib.parse.urlencode({ - 'request.path': '/the-beans-should-not-have-sugar/1', - }) + json_data = json.dumps(expected).encode("utf-8") + key = "Cohort__id=1&" + urllib.parse.urlencode( + { + "request.path": "/the-beans-should-not-have-sugar/1", + } + ) cache.set(key, json_data) - cache.set('Cohort__keys', {key}) + cache.set("Cohort__keys", {key}) request = APIRequestFactory() - request = request.get('/the-beans-should-not-have-sugar/1') + request = request.get("/the-beans-should-not-have-sugar/1") view = CustomTestView.as_view() response = view(request, id=1) - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(cohort_cache.keys(), {key}) self.assertEqual(cache.get(key), serialize_cache_value(expected)) def test_cache__get__with_cache_but_other_case(self): cache.clear() - case = {'x': 1} + case = {"x": 1} # keep before cache handling slug = self.bc.fake.slug() - model = self.bc.database.create(cohort={'slug': slug}) + model = self.bc.database.create(cohort={"slug": slug}) json_data = serialize_cache_object(case) - key1 = 'Cohort__id=1&' + urllib.parse.urlencode({ - 'request.path': '/the-beans-should-not-have-sugar/1', - }) - key2 = 'Cohort__id=2&' + urllib.parse.urlencode({ - 'request.path': '/the-beans-should-not-have-sugar/1', - }) - cache.set('Cohort__', json_data) + key1 = "Cohort__id=1&" + urllib.parse.urlencode( + { + "request.path": "/the-beans-should-not-have-sugar/1", + } + ) + key2 = "Cohort__id=2&" + urllib.parse.urlencode( + { + "request.path": "/the-beans-should-not-have-sugar/1", + } + ) + cache.set("Cohort__", json_data) cache.set(key2, json_data) - cache.set('Cohort__keys', {'Cohort__', key2}) + cache.set("Cohort__keys", {"Cohort__", key2}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar/1') + request = request.get(f"/the-beans-should-not-have-sugar/1") view = CustomTestView.as_view() response = view(request, id=1) expected = GetCohortSerializer(model.cohort, many=False).data - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key1, key2}) - self.assertEqual(cache.get('Cohort__'), serialize_cache_object(case)) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key1, key2}) + self.assertEqual(cache.get("Cohort__"), serialize_cache_object(case)) res = { - 'headers': { - 'Content-Type': 'application/json', + "headers": { + "Content-Type": "application/json", }, - 'content': serialize_cache_value(expected), + "content": serialize_cache_value(expected), } self.assertEqual(cache.get(key1), res) self.assertEqual(cache.get(key2), json_data) def test_cache__get__with_cache_case_of_root_and_current(self): - cases = [({'x': 1}, {'y': 1}), ({'x': 2}, {'y': 2}), ({'x': 3}, {'y': 3})] + cases = [({"x": 1}, {"y": 1}), ({"x": 2}, {"y": 2}), ({"x": 3}, {"y": 3})] for case in cases: cache.clear() # keep before cache handling slug = self.bc.fake.slug() - self.bc.database.delete('admissions.Cohort') - model = self.bc.database.create(cohort={'slug': slug}) - - json_data_root = ('application/json ' + json.dumps(case[0])).encode('utf-8') - json_data_query = ('application/json ' + json.dumps(case[1])).encode('utf-8') - cache.set('Cohort__', json_data_root) - key = 'Cohort__id=1&' + urllib.parse.urlencode({ - 'request.path': '/the-beans-should-not-have-sugar/1', - }) + self.bc.database.delete("admissions.Cohort") + model = self.bc.database.create(cohort={"slug": slug}) + + json_data_root = ("application/json " + json.dumps(case[0])).encode("utf-8") + json_data_query = ("application/json " + json.dumps(case[1])).encode("utf-8") + cache.set("Cohort__", json_data_root) + key = "Cohort__id=1&" + urllib.parse.urlencode( + { + "request.path": "/the-beans-should-not-have-sugar/1", + } + ) cache.set(key, json_data_query) - cache.set('Cohort__keys', {'Cohort__', key}) + cache.set("Cohort__keys", {"Cohort__", key}) request = APIRequestFactory() - request = request.get(f'/the-beans-should-not-have-sugar/1') + request = request.get(f"/the-beans-should-not-have-sugar/1") view = CustomTestView.as_view() response = view(request, id=1) - key = 'Cohort__id=1&' + urllib.parse.urlencode({ - **request.GET, - 'request.path': - '/the-beans-should-not-have-sugar/1', - }) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key}) + key = "Cohort__id=1&" + urllib.parse.urlencode( + { + **request.GET, + "request.path": "/the-beans-should-not-have-sugar/1", + } + ) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key}) expected = case[1] - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(cohort_cache.keys(), {'Cohort__', key}) - self.assertEqual(cache.get('Cohort__'), json_data_root) + self.assertEqual(cohort_cache.keys(), {"Cohort__", key}) + self.assertEqual(cache.get("Cohort__"), json_data_root) self.assertEqual(cache.get(key), json_data_query) diff --git a/breathecode/utils/tests/decorators/tests_capable_of.py b/breathecode/utils/tests/decorators/tests_capable_of.py index 526114e9a..c45471e6a 100644 --- a/breathecode/utils/tests/decorators/tests_capable_of.py +++ b/breathecode/utils/tests/decorators/tests_capable_of.py @@ -11,25 +11,26 @@ from rest_framework import status from ..mixins import UtilsTestCase -PERMISSION = 'can_kill_kenny' +PERMISSION = "can_kill_kenny" -@api_view(['GET']) +@api_view(["GET"]) @permission_classes([AllowAny]) @decorators.capable_of(PERMISSION) def get_id(request, id, academy_id=None): - return Response({'id': id, 'academy_id': academy_id}) + return Response({"id": id, "academy_id": academy_id}) class CustomTestView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] @decorators.capable_of(PERMISSION) def get(self, request, id, academy_id=None): - return Response({'id': id, 'academy_id': academy_id}) + return Response({"id": id, "academy_id": academy_id}) class FunctionBasedViewTestSuite(UtilsTestCase): @@ -39,134 +40,126 @@ class FunctionBasedViewTestSuite(UtilsTestCase): def test_capable_of__function__get_id__anonymous_user(self): factory = APIRequestFactory() - request = factory.get('/they-killed-kenny') + request = factory.get("/they-killed-kenny") view = get_id response = view(request, id=1).render() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_capable_of__function__get_id__without_capability(self): model = self.bc.database.create(user=1) factory = APIRequestFactory() - request = factory.get('/they-killed-kenny', headers={'academy': 1}) + request = factory.get("/they-killed-kenny", headers={"academy": 1}) force_authenticate(request, user=model.user) view = get_id response = view(request, id=1).render() expected = { - 'detail': "You (user: 1) don't have this capability: can_kill_kenny for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: can_kill_kenny for academy 1", + "status_code": 403, } - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_capable_of__function__get_id__with_user__with_capability(self): - model = self.bc.database.create(user=1, academy=1, profile_academy=1, role=1, capability='can_kill_kenny') + model = self.bc.database.create(user=1, academy=1, profile_academy=1, role=1, capability="can_kill_kenny") factory = APIRequestFactory() - request = factory.get('/they-killed-kenny', headers={'academy': 1}) + request = factory.get("/they-killed-kenny", headers={"academy": 1}) force_authenticate(request, user=model.user) view = get_id response = view(request, id=1).render() - expected = {'academy_id': 1, 'id': 1} + expected = {"academy_id": 1, "id": 1} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_capable_of__function__get_id__with_user__with_capability__status_inactive(self): - academy_kwargs = {'status': 'INACTIVE'} - model = self.bc.database.create(user=1, - academy=academy_kwargs, - profile_academy=1, - role=1, - capability='can_kill_kenny') + academy_kwargs = {"status": "INACTIVE"} + model = self.bc.database.create( + user=1, academy=academy_kwargs, profile_academy=1, role=1, capability="can_kill_kenny" + ) factory = APIRequestFactory() slug_1 = self.bc.fake.slug() slug_2 = self.bc.fake.slug() - request = factory.get(f'/{slug_1}/{slug_2}', headers={'academy': 1}) + request = factory.get(f"/{slug_1}/{slug_2}", headers={"academy": 1}) force_authenticate(request, user=model.user) view = get_id response = view(request, id=1).render() - expected = {'detail': 'This academy is not active', 'status_code': 403} + expected = {"detail": "This academy is not active", "status_code": 403} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_capable_of__function__get_id__with_user__with_capability__status_deleted(self): - academy_kwargs = {'status': 'DELETED'} - model = self.bc.database.create(user=1, - academy=academy_kwargs, - profile_academy=1, - role=1, - capability='can_kill_kenny') + academy_kwargs = {"status": "DELETED"} + model = self.bc.database.create( + user=1, academy=academy_kwargs, profile_academy=1, role=1, capability="can_kill_kenny" + ) factory = APIRequestFactory() slug_1 = self.bc.fake.slug() slug_2 = self.bc.fake.slug() - request = factory.get(f'/{slug_1}/{slug_2}', headers={'academy': 1}) + request = factory.get(f"/{slug_1}/{slug_2}", headers={"academy": 1}) force_authenticate(request, user=model.user) view = get_id response = view(request, id=1).render() - expected = {'detail': 'This academy is deleted', 'status_code': 403} + expected = {"detail": "This academy is deleted", "status_code": 403} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_capable_of__function__get_id__with_user__with_capability__status_inactive_with_correct_link(self): - academy_kwargs = {'status': 'INACTIVE'} - model = self.bc.database.create(user=1, - academy=academy_kwargs, - profile_academy=1, - role=1, - capability='can_kill_kenny') + academy_kwargs = {"status": "INACTIVE"} + model = self.bc.database.create( + user=1, academy=academy_kwargs, profile_academy=1, role=1, capability="can_kill_kenny" + ) factory = APIRequestFactory() - request = factory.get('/v1/admissions/academy/activate', headers={'academy': 1}) + request = factory.get("/v1/admissions/academy/activate", headers={"academy": 1}) force_authenticate(request, user=model.user) view = get_id response = view(request, id=1).render() - expected = {'academy_id': 1, 'id': 1} + expected = {"academy_id": 1, "id": 1} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_capable_of__function__get_id__with_user__with_capability__status_deleted_with_correct_link(self): - academy_kwargs = {'status': 'DELETED'} - model = self.bc.database.create(user=1, - academy=academy_kwargs, - profile_academy=1, - role=1, - capability='can_kill_kenny') + academy_kwargs = {"status": "DELETED"} + model = self.bc.database.create( + user=1, academy=academy_kwargs, profile_academy=1, role=1, capability="can_kill_kenny" + ) factory = APIRequestFactory() - request = factory.get('/v1/admissions/academy/activate', headers={'academy': 1}) + request = factory.get("/v1/admissions/academy/activate", headers={"academy": 1}) force_authenticate(request, user=model.user) view = get_id response = view(request, id=1).render() - expected = {'detail': 'This academy is deleted', 'status_code': 403} + expected = {"detail": "This academy is deleted", "status_code": 403} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -177,131 +170,123 @@ class ViewTestSuite(UtilsTestCase): def test_capable_of__view__get__anonymous_user(self): request = APIRequestFactory() - request = request.get('/they-killed-kenny') + request = request.get("/they-killed-kenny") view = CustomTestView.as_view() response = view(request, id=1).render() expected = { - 'detail': "Missing academy_id parameter expected for the endpoint url or 'Academy' header", - 'status_code': 403 + "detail": "Missing academy_id parameter expected for the endpoint url or 'Academy' header", + "status_code": 403, } - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_capable_of__view__get_id__without_capability(self): model = self.bc.database.create(user=1) request = APIRequestFactory() - request = request.get('/they-killed-kenny', headers={'academy': 1}) + request = request.get("/they-killed-kenny", headers={"academy": 1}) force_authenticate(request, user=model.user) view = CustomTestView.as_view() response = view(request, id=1).render() expected = { - 'detail': "You (user: 1) don't have this capability: can_kill_kenny for academy 1", - 'status_code': 403 + "detail": "You (user: 1) don't have this capability: can_kill_kenny for academy 1", + "status_code": 403, } - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_capable_of__view__get_id__with_user__with_capability(self): - model = self.bc.database.create(user=1, academy=1, profile_academy=1, role=1, capability='can_kill_kenny') + model = self.bc.database.create(user=1, academy=1, profile_academy=1, role=1, capability="can_kill_kenny") request = APIRequestFactory() - request = request.get('/they-killed-kenny', headers={'academy': 1}) + request = request.get("/they-killed-kenny", headers={"academy": 1}) force_authenticate(request, user=model.user) view = CustomTestView.as_view() response = view(request, id=1).render() - expected = {'id': 1, 'academy_id': 1} + expected = {"id": 1, "academy_id": 1} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_capable_of__view__get_id__with_user__with_capability__academy_inactive(self): - academy_kwargs = {'status': 'INACTIVE'} - model = self.bc.database.create(user=1, - academy=academy_kwargs, - profile_academy=1, - role=1, - capability='can_kill_kenny') + academy_kwargs = {"status": "INACTIVE"} + model = self.bc.database.create( + user=1, academy=academy_kwargs, profile_academy=1, role=1, capability="can_kill_kenny" + ) request = APIRequestFactory() slug_1 = self.bc.fake.slug() slug_2 = self.bc.fake.slug() - request = request.get(f'/{slug_1}/{slug_2}', headers={'academy': 1}) + request = request.get(f"/{slug_1}/{slug_2}", headers={"academy": 1}) force_authenticate(request, user=model.user) view = CustomTestView.as_view() response = view(request, id=1).render() - expected = {'detail': 'This academy is not active', 'status_code': 403} + expected = {"detail": "This academy is not active", "status_code": 403} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_capable_of__view__get_id__with_user__with_capability__academy_deleted(self): - academy_kwargs = {'status': 'DELETED'} - model = self.bc.database.create(user=1, - academy=academy_kwargs, - profile_academy=1, - role=1, - capability='can_kill_kenny') + academy_kwargs = {"status": "DELETED"} + model = self.bc.database.create( + user=1, academy=academy_kwargs, profile_academy=1, role=1, capability="can_kill_kenny" + ) request = APIRequestFactory() slug_1 = self.bc.fake.slug() slug_2 = self.bc.fake.slug() - request = request.get(f'/{slug_1}/{slug_2}', headers={'academy': 1}) + request = request.get(f"/{slug_1}/{slug_2}", headers={"academy": 1}) force_authenticate(request, user=model.user) view = CustomTestView.as_view() response = view(request, id=1).render() - expected = {'detail': 'This academy is deleted', 'status_code': 403} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + expected = {"detail": "This academy is deleted", "status_code": 403} + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_capable_of__view__get_id__with_user__with_capability__academy_inactive_with_correct_link(self): - academy_kwargs = {'status': 'INACTIVE'} - model = self.bc.database.create(user=1, - academy=academy_kwargs, - profile_academy=1, - role=1, - capability='can_kill_kenny') + academy_kwargs = {"status": "INACTIVE"} + model = self.bc.database.create( + user=1, academy=academy_kwargs, profile_academy=1, role=1, capability="can_kill_kenny" + ) request = APIRequestFactory() - request = request.get('/v1/admissions/academy/activate', headers={'academy': 1}) + request = request.get("/v1/admissions/academy/activate", headers={"academy": 1}) force_authenticate(request, user=model.user) view = CustomTestView.as_view() response = view(request, id=1).render() - expected = {'academy_id': 1, 'id': 1} + expected = {"academy_id": 1, "id": 1} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_capable_of__view__get_id__with_user__with_capability__academy_deleted_with_correct_link(self): - academy_kwargs = {'status': 'DELETED'} - model = self.bc.database.create(user=1, - academy=academy_kwargs, - profile_academy=1, - role=1, - capability='can_kill_kenny') + academy_kwargs = {"status": "DELETED"} + model = self.bc.database.create( + user=1, academy=academy_kwargs, profile_academy=1, role=1, capability="can_kill_kenny" + ) request = APIRequestFactory() - request = request.get('/v1/admissions/academy/activate', headers={'academy': 1}) + request = request.get("/v1/admissions/academy/activate", headers={"academy": 1}) force_authenticate(request, user=model.user) view = CustomTestView.as_view() response = view(request, id=1).render() - expected = {'detail': 'This academy is deleted', 'status_code': 403} + expected = {"detail": "This academy is deleted", "status_code": 403} - self.assertEqual(json.loads(response.content.decode('utf-8')), expected) + self.assertEqual(json.loads(response.content.decode("utf-8")), expected) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) diff --git a/breathecode/utils/tests/decorators/tests_consume.py b/breathecode/utils/tests/decorators/tests_consume.py index 8785881a0..f8b7ac288 100644 --- a/breathecode/utils/tests/decorators/tests_consume.py +++ b/breathecode/utils/tests/decorators/tests_consume.py @@ -22,17 +22,19 @@ from breathecode.utils.decorators import ServiceContext from capyc.rest_framework import pytest as capy -SERVICE = 'can_kill_kenny' -PERMISSION = 'can_kill_kenny' +SERVICE = "can_kill_kenny" +PERMISSION = "can_kill_kenny" UTC_NOW = timezone.now() @pytest.fixture(autouse=True) def setup(db, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr('django.utils.timezone.now', MagicMock(return_value=UTC_NOW)) - monkeypatch.setattr('breathecode.payments.signals.consume_service.send_robust', MagicMock(return_value=None)) - monkeypatch.setattr('breathecode.payments.models.ConsumptionSession.build_session', - MagicMock(wraps=models.ConsumptionSession.build_session)) + monkeypatch.setattr("django.utils.timezone.now", MagicMock(return_value=UTC_NOW)) + monkeypatch.setattr("breathecode.payments.signals.consume_service.send_robust", MagicMock(return_value=None)) + monkeypatch.setattr( + "breathecode.payments.models.ConsumptionSession.build_session", + MagicMock(wraps=models.ConsumptionSession.build_session), + ) CONSUMER_MOCK.call_args_list = [] CONSUMER_WITH_TIME_OF_LIFE_MOCK.call_args_list = [] @@ -44,11 +46,11 @@ def consumer(context: ServiceContext, args: tuple, kwargs: dict) -> tuple[dict, args = (*args, PERMISSION) kwargs = { **kwargs, - 'permission': PERMISSION, + "permission": PERMISSION, } context = { **context, - 'consumables': context['consumables'].exclude(service_item__service__groups__name='secret'), + "consumables": context["consumables"].exclude(service_item__service__groups__name="secret"), } return (context, args, kwargs) @@ -65,12 +67,12 @@ def consumer_with_time_of_life(context: ServiceContext, args: tuple, kwargs: dic args = (*args, PERMISSION) kwargs = { **kwargs, - 'permission': PERMISSION, + "permission": PERMISSION, } context = { **context, - 'consumables': context['consumables'].exclude(service_item__service__groups__name='secret'), - 'time_of_life': time_of_life, + "consumables": context["consumables"].exclude(service_item__service__groups__name="secret"), + "time_of_life": time_of_life, } return (context, args, kwargs) @@ -88,10 +90,10 @@ def build_view_function(method, data, decorator_args=(), decorator_kwargs={}, wi @decorators.consume(*decorator_args, **decorator_kwargs) async def view_function(request, *args, **kwargs): if with_id: - assert kwargs['id'] == 1 + assert kwargs["id"] == 1 else: - assert 'id' not in kwargs + assert "id" not in kwargs return Response(data) @@ -102,10 +104,10 @@ async def view_function(request, *args, **kwargs): @decorators.consume(*decorator_args, **decorator_kwargs) def view_function(request, *args, **kwargs): if with_id: - assert kwargs['id'] == 1 + assert kwargs["id"] == 1 else: - assert 'id' not in kwargs + assert "id" not in kwargs return Response(data) @@ -118,6 +120,7 @@ class BaseView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] BaseView.__test__ = False @@ -125,20 +128,20 @@ class BaseView(APIView): @decorators.consume(*decorator_args, **decorator_kwargs) def sync_method(self, request, *args, **kwargs): if with_id: - assert kwargs['id'] == 1 + assert kwargs["id"] == 1 else: - assert 'id' not in kwargs + assert "id" not in kwargs return Response(data) @decorators.consume(*decorator_args, **decorator_kwargs) async def async_method(self, request, *args, **kwargs): if with_id: - assert kwargs['id'] == 1 + assert kwargs["id"] == 1 else: - assert 'id' not in kwargs + assert "id" not in kwargs return Response(data) @@ -148,7 +151,7 @@ async def async_method(self, request, *args, **kwargs): def build_params(): - methods = ['get', 'post', 'put', 'delete'] + methods = ["get", "post", "put", "delete"] class_baseds = [True, False] with_ids = [True, False] is_asyncs = [True, False] @@ -156,15 +159,15 @@ def build_params(): for method in methods: for class_based in class_baseds: for with_id in with_ids: - if method not in ['get', 'post'] and with_id is False: + if method not in ["get", "post"] and with_id is False: continue - if method == 'post' and with_id is True: + if method == "post" and with_id is True: continue for is_async in is_asyncs: args = (method, class_based, with_id, is_async) - yield args, 'method_{}__class_based_{}__with_id_{}__is_async_{}'.format(*args) + yield args, "method_{}__class_based_{}__with_id_{}__is_async_{}".format(*args) def make_view(request, fake, decorator_params={}): @@ -178,7 +181,7 @@ def make_view(request, fake, decorator_params={}): decorator_params_in_fixture = decorator_params extra = {} if with_id: - extra['id'] = 1 + extra["id"] = 1 @sync_to_async def wrapper(user=None, decorator_params={}, url_params={}): @@ -191,24 +194,28 @@ def wrapper(user=None, decorator_params={}, url_params={}): url_params = {**url_params, **extra} if class_based: - view = build_view_class(method.upper(), - res, - decorator_args=(SERVICE, ), - decorator_kwargs=decorator_params, - with_id=with_id, - is_async=is_async) + view = build_view_class( + method.upper(), + res, + decorator_args=(SERVICE,), + decorator_kwargs=decorator_params, + with_id=with_id, + is_async=is_async, + ) view = view.as_view() else: - view = build_view_function(method.upper(), - res, - decorator_args=(SERVICE, ), - decorator_kwargs=decorator_params, - with_id=with_id, - is_async=is_async) + view = build_view_function( + method.upper(), + res, + decorator_args=(SERVICE,), + decorator_kwargs=decorator_params, + with_id=with_id, + is_async=is_async, + ) factory = APIRequestFactory() - url = '/they-killed-kenny' + url = "/they-killed-kenny" if with_id: url += f'/{url_params["id"]}' @@ -244,8 +251,12 @@ async def unpack(user=None, decorator_params={}, url_params={}): if with_id: url_params = {**url_params, **extra} - return await wrapper(user=user, decorator_params=decorator_params, - url_params=url_params), res, class_based, url_params + return ( + await wrapper(user=user, decorator_params=decorator_params, url_params=url_params), + res, + class_based, + url_params, + ) return unpack @@ -257,12 +268,12 @@ def make_view_all_cases(request, fake): @pytest.fixture(params=[param for param, _ in build_params()], ids=[id for _, id in build_params()]) def make_view_consumer_cases(request, fake): - return make_view(request, fake, decorator_params={'consumer': CONSUMER_MOCK}) + return make_view(request, fake, decorator_params={"consumer": CONSUMER_MOCK}) @pytest.fixture(params=[param for param, _ in build_params()], ids=[id for _, id in build_params()]) def make_view_lifetime_cases(request, fake): - return make_view(request, fake, decorator_params={'consumer': CONSUMER_WITH_TIME_OF_LIFE_MOCK}) + return make_view(request, fake, decorator_params={"consumer": CONSUMER_WITH_TIME_OF_LIFE_MOCK}) class TestNoConsumer: @@ -273,12 +284,12 @@ async def test_anonymous_user(self, database: capy.Database, make_view_all_cases view, _, _, _ = await make_view_all_cases(user=None, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'anonymous-user-not-enough-consumables', 'status_code': 402} + expected = {"detail": "anonymous-user-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await database.alist_of('payments.ConsumptionSession') == [] + assert await database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -294,12 +305,12 @@ async def test_with_user(self, database: capy.Database, make_view_all_cases): view, _, _, _ = await make_view_all_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await database.alist_of('payments.ConsumptionSession') == [] + assert await database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -315,12 +326,12 @@ async def test_with_user__with_permission__dont_match(self, database: capy.Datab view, _, _, _ = await make_view_all_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await database.alist_of('payments.ConsumptionSession') == [] + assert await database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -331,21 +342,22 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) - async def test_with_user__with_group_related_to_permission__without_consumable(self, bc: Breathecode, - make_view_all_cases): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + async def test_with_user__with_group_related_to_permission__without_consumable( + self, bc: Breathecode, make_view_all_cases + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - model = await bc.database.acreate(user=user, service=services, service_item={'service_id': 2}) + model = await bc.database.acreate(user=user, service=services, service_item={"service_id": 2}) view, _, _, _ = await make_view_all_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -357,24 +369,24 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test_with_user__with_group_related_to_permission__consumable__how_many_minus_1( - self, bc: Breathecode, make_view_all_cases): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + self, bc: Breathecode, make_view_all_cases + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': -1} - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable) + consumable = {"how_many": -1} + model = await bc.database.acreate( + user=user, service=services, service_item={"service_id": 2}, consumable=consumable + ) view, expected, _, _ = await make_view_all_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -388,25 +400,25 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test_with_user__with_group_related_to_permission__consumable__how_many_0( - self, bc: Breathecode, make_view_all_cases): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + self, bc: Breathecode, make_view_all_cases + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': 0} - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable) + consumable = {"how_many": 0} + model = await bc.database.acreate( + user=user, service=services, service_item={"service_id": 2}, consumable=consumable + ) view, _, _, _ = await make_view_all_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -418,24 +430,24 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test_with_user__with_group_related_to_permission__consumable__how_many_gte_1( - self, bc: Breathecode, make_view_all_cases): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + self, bc: Breathecode, make_view_all_cases + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': random.randint(1, 100)} - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable) + consumable = {"how_many": random.randint(1, 100)} + model = await bc.database.acreate( + user=user, service=services, service_item={"service_id": 2}, consumable=consumable + ) view, expected, _, _ = await make_view_all_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -449,25 +461,24 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test_with_user__with_group_related_to_permission__group_was_blacklisted_by_cb( - self, bc: Breathecode, make_view_all_cases): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] - group = {'permission_id': 2, 'name': 'secret'} - consumable = {'how_many': 1} - model = await bc.database.acreate(user=user, - service=services, - group=group, - service_item={'service_id': 2}, - consumable=consumable) + self, bc: Breathecode, make_view_all_cases + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] + group = {"permission_id": 2, "name": "secret"} + consumable = {"how_many": 1} + model = await bc.database.acreate( + user=user, service=services, group=group, service_item={"service_id": 2}, consumable=consumable + ) view, expected, _, _ = await make_view_all_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -487,12 +498,12 @@ async def test__function__get__anonymous_user(self, bc: Breathecode, make_view_c view, _, _, _ = await make_view_consumer_cases(user=None, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'anonymous-user-not-enough-consumables', 'status_code': 402} + expected = {"detail": "anonymous-user-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -508,12 +519,12 @@ async def test__function__get__with_user(self, bc: Breathecode, make_view_consum view, _, _, _ = await make_view_consumer_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -524,18 +535,19 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) - async def test__function__get__with_user__with_permission__dont_match(self, bc: Breathecode, - make_view_consumer_cases): + async def test__function__get__with_user__with_permission__dont_match( + self, bc: Breathecode, make_view_consumer_cases + ): model = await bc.database.acreate(user=1, permission=1) view, _, _, _ = await make_view_consumer_cases(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED # self.assertEqual(CONSUMER_MOCK.call_args_list, []) - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -547,33 +559,37 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test__function__get__with_user__with_group_related_to_permission__without_consumable( - self, bc: Breathecode, make_view_consumer_cases, partial_equality): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + self, bc: Breathecode, make_view_consumer_cases, partial_equality + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - model = await bc.database.acreate(user=user, service=services, service_item={'service_id': 2}) + model = await bc.database.acreate(user=user, service=services, service_item={"service_id": 2}) view, _, based_class, _ = await make_view_consumer_cases(user=model.user, decorator_params={}, url_params={}) response, params = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED - Consumable = bc.database.get_model('payments.Consumable') + Consumable = bc.database.get_model("payments.Consumable") consumables = Consumable.objects.filter() assert len(CONSUMER_MOCK.call_args_list) == 1 args, kwargs = CONSUMER_MOCK.call_args_list[0] context, args, kwargs = args - assert isinstance(context['request'], Request) - partial_equality(context, { - 'utc_now': UTC_NOW, - 'consumer': CONSUMER_MOCK, - 'permission': PERMISSION, - 'consumables': consumables, - }) + assert isinstance(context["request"], Request) + partial_equality( + context, + { + "utc_now": UTC_NOW, + "consumer": CONSUMER_MOCK, + "permission": PERMISSION, + "consumables": consumables, + }, + ) if based_class: assert len(args) == 2 @@ -585,7 +601,7 @@ async def test__function__get__with_user__with_group_related_to_permission__with assert kwargs == params - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -597,39 +613,42 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test__function__get__with_user__with_group_related_to_permission__consumable__how_many_minus_1( - self, bc: Breathecode, make_view_consumer_cases, partial_equality): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + self, bc: Breathecode, make_view_consumer_cases, partial_equality + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': -1} - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable) + consumable = {"how_many": -1} + model = await bc.database.acreate( + user=user, service=services, service_item={"service_id": 2}, consumable=consumable + ) - view, expected, based_class, params = await make_view_consumer_cases(user=model.user, - decorator_params={}, - url_params={}) + view, expected, based_class, params = await make_view_consumer_cases( + user=model.user, decorator_params={}, url_params={} + ) response, _ = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK - Consumable = bc.database.get_model('payments.Consumable') + Consumable = bc.database.get_model("payments.Consumable") consumables = Consumable.objects.filter() assert len(CONSUMER_MOCK.call_args_list) == 1 args, kwargs = CONSUMER_MOCK.call_args_list[0] context, args, kwargs = args - assert isinstance(context['request'], Request) - partial_equality(context, { - 'utc_now': UTC_NOW, - 'consumer': CONSUMER_MOCK, - 'permission': PERMISSION, - 'consumables': consumables, - }) + assert isinstance(context["request"], Request) + partial_equality( + context, + { + "utc_now": UTC_NOW, + "consumer": CONSUMER_MOCK, + "permission": PERMISSION, + "consumables": consumables, + }, + ) if based_class: assert len(args) == 2 @@ -641,7 +660,7 @@ async def test__function__get__with_user__with_group_related_to_permission__cons assert kwargs == params - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -655,40 +674,43 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test__function__get__with_user__with_group_related_to_permission__consumable__how_many_0( - self, bc: Breathecode, make_view_consumer_cases, partial_equality): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + self, bc: Breathecode, make_view_consumer_cases, partial_equality + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': 0} - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable) + consumable = {"how_many": 0} + model = await bc.database.acreate( + user=user, service=services, service_item={"service_id": 2}, consumable=consumable + ) - view, _, based_class, params = await make_view_consumer_cases(user=model.user, - decorator_params={}, - url_params={}) + view, _, based_class, params = await make_view_consumer_cases( + user=model.user, decorator_params={}, url_params={} + ) response, _ = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED - Consumable = bc.database.get_model('payments.Consumable') + Consumable = bc.database.get_model("payments.Consumable") consumables = Consumable.objects.filter() assert len(CONSUMER_MOCK.call_args_list) == 1 args, kwargs = CONSUMER_MOCK.call_args_list[0] context, args, kwargs = args - assert isinstance(context['request'], Request) - partial_equality(context, { - 'utc_now': UTC_NOW, - 'consumer': CONSUMER_MOCK, - 'permission': PERMISSION, - 'consumables': consumables, - }) + assert isinstance(context["request"], Request) + partial_equality( + context, + { + "utc_now": UTC_NOW, + "consumer": CONSUMER_MOCK, + "permission": PERMISSION, + "consumables": consumables, + }, + ) if based_class: assert len(args) == 2 @@ -700,7 +722,7 @@ async def test__function__get__with_user__with_group_related_to_permission__cons assert kwargs == params - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -712,39 +734,42 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test__function__get__with_user__with_group_related_to_permission__consumable__how_many_gte_1( - self, bc: Breathecode, make_view_consumer_cases, partial_equality): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + self, bc: Breathecode, make_view_consumer_cases, partial_equality + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': random.randint(1, 100)} - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable) + consumable = {"how_many": random.randint(1, 100)} + model = await bc.database.acreate( + user=user, service=services, service_item={"service_id": 2}, consumable=consumable + ) - view, expected, based_class, params = await make_view_consumer_cases(user=model.user, - decorator_params={}, - url_params={}) + view, expected, based_class, params = await make_view_consumer_cases( + user=model.user, decorator_params={}, url_params={} + ) response, _ = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK - Consumable = bc.database.get_model('payments.Consumable') + Consumable = bc.database.get_model("payments.Consumable") consumables = Consumable.objects.filter() assert len(CONSUMER_MOCK.call_args_list) == 1 args, kwargs = CONSUMER_MOCK.call_args_list[0] context, args, kwargs = args - assert isinstance(context['request'], Request) - partial_equality(context, { - 'utc_now': UTC_NOW, - 'consumer': CONSUMER_MOCK, - 'permission': PERMISSION, - 'consumables': consumables, - }) + assert isinstance(context["request"], Request) + partial_equality( + context, + { + "utc_now": UTC_NOW, + "consumer": CONSUMER_MOCK, + "permission": PERMISSION, + "consumables": consumables, + }, + ) if based_class: assert len(args) == 2 @@ -756,7 +781,7 @@ async def test__function__get__with_user__with_group_related_to_permission__cons assert kwargs == params - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -770,41 +795,43 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test__function__get__with_user__with_group_related_to_permission__group_was_blacklisted_by_cb( - self, bc: Breathecode, make_view_consumer_cases, partial_equality): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] - group = {'permission_id': 2, 'name': 'secret'} - consumable = {'how_many': 1} - model = await bc.database.acreate(user=user, - service=services, - group=group, - service_item={'service_id': 2}, - consumable=consumable) - - view, _, based_class, params = await make_view_consumer_cases(user=model.user, - decorator_params={}, - url_params={}) + self, bc: Breathecode, make_view_consumer_cases, partial_equality + ): + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] + group = {"permission_id": 2, "name": "secret"} + consumable = {"how_many": 1} + model = await bc.database.acreate( + user=user, service=services, group=group, service_item={"service_id": 2}, consumable=consumable + ) + + view, _, based_class, params = await make_view_consumer_cases( + user=model.user, decorator_params={}, url_params={} + ) response, _ = await view() - expected = {'detail': 'with-consumer-not-enough-consumables', 'status_code': 402} + expected = {"detail": "with-consumer-not-enough-consumables", "status_code": 402} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_402_PAYMENT_REQUIRED - Consumable = bc.database.get_model('payments.Consumable') + Consumable = bc.database.get_model("payments.Consumable") consumables = Consumable.objects.filter() assert len(CONSUMER_MOCK.call_args_list) == 1 args, kwargs = CONSUMER_MOCK.call_args_list[0] context, args, kwargs = args - assert isinstance(context['request'], Request) - partial_equality(context, { - 'utc_now': UTC_NOW, - 'consumer': CONSUMER_MOCK, - 'permission': PERMISSION, - 'consumables': consumables, - }) + assert isinstance(context["request"], Request) + partial_equality( + context, + { + "utc_now": UTC_NOW, + "consumer": CONSUMER_MOCK, + "permission": PERMISSION, + "consumables": consumables, + }, + ) if based_class: assert len(args) == 2 @@ -816,7 +843,7 @@ async def test__function__get__with_user__with_group_related_to_permission__grou assert kwargs == params - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -827,42 +854,44 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) - async def test__function__get__with_user__without_consumption_session(self, bc: Breathecode, - make_view_lifetime_cases, partial_equality): + async def test__function__get__with_user__without_consumption_session( + self, bc: Breathecode, make_view_lifetime_cases, partial_equality + ): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': 1} - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable) + consumable = {"how_many": 1} + model = await bc.database.acreate( + user=user, service=services, service_item={"service_id": 2}, consumable=consumable + ) - view, expected, based_class, _ = await make_view_lifetime_cases(user=model.user, - decorator_params={}, - url_params={}) + view, expected, based_class, _ = await make_view_lifetime_cases( + user=model.user, decorator_params={}, url_params={} + ) response, params = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK - Consumable = bc.database.get_model('payments.Consumable') + Consumable = bc.database.get_model("payments.Consumable") consumables = Consumable.objects.filter() assert len(CONSUMER_WITH_TIME_OF_LIFE_MOCK.call_args_list) == 1 args, kwargs = CONSUMER_WITH_TIME_OF_LIFE_MOCK.call_args_list[0] context, args, kwargs = args - assert isinstance(context['request'], Request) + assert isinstance(context["request"], Request) partial_equality( - context, { - 'utc_now': UTC_NOW, - 'consumer': CONSUMER_WITH_TIME_OF_LIFE_MOCK, - 'permission': PERMISSION, - 'consumables': consumables, - }) + context, + { + "utc_now": UTC_NOW, + "consumer": CONSUMER_WITH_TIME_OF_LIFE_MOCK, + "permission": PERMISSION, + "consumables": consumables, + }, + ) if based_class: assert len(args) == 2 @@ -874,7 +903,7 @@ async def test__function__get__with_user__without_consumption_session(self, bc: assert kwargs == params - assert await bc.database.alist_of('payments.ConsumptionSession') == [] + assert await bc.database.alist_of("payments.ConsumptionSession") == [] assert models.ConsumptionSession.build_session.call_args_list == [] @sync_to_async @@ -890,43 +919,44 @@ class TestConsumptionSession: @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) - async def test__with_user__consumption_session__does_not_match(self, bc: Breathecode, make_view_lifetime_cases, - partial_equality): + async def test__with_user__consumption_session__does_not_match( + self, bc: Breathecode, make_view_lifetime_cases, partial_equality + ): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': 1} - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable, - consumption_session=1) + consumable = {"how_many": 1} + model = await bc.database.acreate( + user=user, service=services, service_item={"service_id": 2}, consumable=consumable, consumption_session=1 + ) - view, expected, based_class, params = await make_view_lifetime_cases(user=model.user, - decorator_params={}, - url_params={}) + view, expected, based_class, params = await make_view_lifetime_cases( + user=model.user, decorator_params={}, url_params={} + ) response, _ = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK - Consumable = bc.database.get_model('payments.Consumable') + Consumable = bc.database.get_model("payments.Consumable") consumables = Consumable.objects.filter() assert len(CONSUMER_WITH_TIME_OF_LIFE_MOCK.call_args_list) == 1 args, kwargs = CONSUMER_WITH_TIME_OF_LIFE_MOCK.call_args_list[0] context, args, kwargs = args - assert isinstance(context['request'], Request) + assert isinstance(context["request"], Request) partial_equality( - context, { - 'utc_now': UTC_NOW, - 'consumer': CONSUMER_WITH_TIME_OF_LIFE_MOCK, - 'permission': PERMISSION, - 'consumables': consumables, - }) + context, + { + "utc_now": UTC_NOW, + "consumer": CONSUMER_WITH_TIME_OF_LIFE_MOCK, + "permission": PERMISSION, + "consumables": consumables, + }, + ) if based_class: assert len(args) == 2 @@ -937,7 +967,7 @@ async def test__with_user__consumption_session__does_not_match(self, bc: Breathe assert isinstance(args[0], Request) assert kwargs == params - assert await bc.database.alist_of('payments.ConsumptionSession') == [ + assert await bc.database.alist_of("payments.ConsumptionSession") == [ bc.format.to_dict(model.consumption_session), ] @@ -954,46 +984,42 @@ def check_consume_service(): @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test__with_user__consumption_session__does_not_match__consumables_minus_sessions_et_0( - self, bc: Breathecode, make_view_lifetime_cases): + self, bc: Breathecode, make_view_lifetime_cases + ): - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] n = random.randint(1, 4) - consumable = {'how_many': n} + consumable = {"how_many": n} consumption_session = { - 'eta': UTC_NOW + time_of_life, - 'how_many': n, - 'request': { - 'args': [], - 'kwargs': {}, - 'headers': { - 'academy': None - }, - 'user': 1 - } + "eta": UTC_NOW + time_of_life, + "how_many": n, + "request": {"args": [], "kwargs": {}, "headers": {"academy": None}, "user": 1}, } - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable, - consumption_session=consumption_session) - - view, expected, based_class, params = await make_view_lifetime_cases(user=model.user, - decorator_params={}, - url_params={}) - - model.consumption_session.request['kwargs'] = params + model = await bc.database.acreate( + user=user, + service=services, + service_item={"service_id": 2}, + consumable=consumable, + consumption_session=consumption_session, + ) + + view, expected, based_class, params = await make_view_lifetime_cases( + user=model.user, decorator_params={}, url_params={} + ) + + model.consumption_session.request["kwargs"] = params await model.consumption_session.asave() response, _ = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK assert len(CONSUMER_WITH_TIME_OF_LIFE_MOCK.call_args_list) == 1 - assert await bc.database.alist_of('payments.ConsumptionSession') == [ + assert await bc.database.alist_of("payments.ConsumptionSession") == [ bc.format.to_dict(model.consumption_session), ] @@ -1010,43 +1036,38 @@ def check_consume_service(): async def test__with_user__consumption_session__match(self, bc: Breathecode, make_view_lifetime_cases): CONSUMER_WITH_TIME_OF_LIFE_MOCK.call_args_list = [] - user = {'user_permissions': []} - services = [{}, {'slug': PERMISSION}] + user = {"user_permissions": []} + services = [{}, {"slug": PERMISSION}] - consumable = {'how_many': 1} + consumable = {"how_many": 1} consumption_session = { - 'eta': UTC_NOW + time_of_life, - 'request': { - 'args': [], - 'kwargs': {}, - 'headers': { - 'academy': None - }, - 'user': 1 - } + "eta": UTC_NOW + time_of_life, + "request": {"args": [], "kwargs": {}, "headers": {"academy": None}, "user": 1}, } - model = await bc.database.acreate(user=user, - service=services, - service_item={'service_id': 2}, - consumable=consumable, - consumption_session=consumption_session) + model = await bc.database.acreate( + user=user, + service=services, + service_item={"service_id": 2}, + consumable=consumable, + consumption_session=consumption_session, + ) - view, expected, based_class, params = await make_view_lifetime_cases(user=model.user, - decorator_params={}, - url_params={}) + view, expected, based_class, params = await make_view_lifetime_cases( + user=model.user, decorator_params={}, url_params={} + ) - model.consumption_session.request['kwargs'] = params + model.consumption_session.request["kwargs"] = params await model.consumption_session.asave() response, _ = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code == status.HTTP_200_OK assert len(CONSUMER_WITH_TIME_OF_LIFE_MOCK.call_args_list) == 1 - assert await bc.database.alist_of('payments.ConsumptionSession') == [ + assert await bc.database.alist_of("payments.ConsumptionSession") == [ bc.format.to_dict(model.consumption_session), ] diff --git a/breathecode/utils/tests/decorators/tests_has_permission.py b/breathecode/utils/tests/decorators/tests_has_permission.py index bc3e79207..b87a281c1 100644 --- a/breathecode/utils/tests/decorators/tests_has_permission.py +++ b/breathecode/utils/tests/decorators/tests_has_permission.py @@ -15,7 +15,7 @@ from breathecode.tests.mixins.breathecode_mixin.breathecode import Breathecode from capyc.rest_framework import pytest as capy -PERMISSION = 'can_kill_kenny' +PERMISSION = "can_kill_kenny" UTC_NOW = timezone.now() @@ -28,10 +28,10 @@ def build_view_function(method, data, decorator_args=(), decorator_kwargs={}, wi @decorators.has_permission(*decorator_args, **decorator_kwargs) async def view_function(request, *args, **kwargs): if with_id: - assert kwargs['id'] == 1 + assert kwargs["id"] == 1 else: - assert 'id' not in kwargs + assert "id" not in kwargs return Response(data) @@ -42,10 +42,10 @@ async def view_function(request, *args, **kwargs): @decorators.has_permission(*decorator_args, **decorator_kwargs) def view_function(request, *args, **kwargs): if with_id: - assert kwargs['id'] == 1 + assert kwargs["id"] == 1 else: - assert 'id' not in kwargs + assert "id" not in kwargs return Response(data) @@ -58,6 +58,7 @@ class BaseView(APIView): """ List all snippets, or create a new snippet. """ + permission_classes = [AllowAny] BaseView.__test__ = False @@ -65,20 +66,20 @@ class BaseView(APIView): @decorators.has_permission(*decorator_args, **decorator_kwargs) def sync_method(self, request, *args, **kwargs): if with_id: - assert kwargs['id'] == 1 + assert kwargs["id"] == 1 else: - assert 'id' not in kwargs + assert "id" not in kwargs return Response(data) @decorators.has_permission(*decorator_args, **decorator_kwargs) async def async_method(self, request, *args, **kwargs): if with_id: - assert kwargs['id'] == 1 + assert kwargs["id"] == 1 else: - assert 'id' not in kwargs + assert "id" not in kwargs return Response(data) @@ -88,7 +89,7 @@ async def async_method(self, request, *args, **kwargs): def build_params(): - methods = ['get', 'post', 'put', 'delete'] + methods = ["get", "post", "put", "delete"] class_baseds = [True, False] with_ids = [True, False] is_asyncs = [True, False] @@ -96,15 +97,15 @@ def build_params(): for method in methods: for class_based in class_baseds: for with_id in with_ids: - if method not in ['get', 'post'] and with_id is False: + if method not in ["get", "post"] and with_id is False: continue - if method == 'post' and with_id is True: + if method == "post" and with_id is True: continue for is_async in is_asyncs: args = (method, class_based, with_id, is_async) - yield args, 'method_{}__class_based_{}__with_id_{}__is_async_{}'.format(*args) + yield args, "method_{}__class_based_{}__with_id_{}__is_async_{}".format(*args) @pytest.fixture(params=[param for param, _ in build_params()], ids=[id for _, id in build_params()]) @@ -120,27 +121,31 @@ def wrapper(user=None, decorator_params={}, url_params={}): nonlocal method if with_id: - url_params['id'] = 1 + url_params["id"] = 1 if class_based: - view = build_view_class(method.upper(), - res, - decorator_args=(PERMISSION, ), - decorator_kwargs=decorator_params, - with_id=with_id, - is_async=is_async) + view = build_view_class( + method.upper(), + res, + decorator_args=(PERMISSION,), + decorator_kwargs=decorator_params, + with_id=with_id, + is_async=is_async, + ) view = view.as_view() else: - view = build_view_function(method.upper(), - res, - decorator_args=(PERMISSION, ), - decorator_kwargs=decorator_params, - with_id=with_id, - is_async=is_async) + view = build_view_function( + method.upper(), + res, + decorator_args=(PERMISSION,), + decorator_kwargs=decorator_params, + with_id=with_id, + is_async=is_async, + ) factory = APIRequestFactory() - url = '/they-killed-kenny' + url = "/they-killed-kenny" if with_id: url += f'/{url_params["id"]}' @@ -183,9 +188,9 @@ async def test__anonymous_user(database: capy.Database, make_view): view = await make_view(user=None, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'anonymous-user-without-permission', 'status_code': 403} + expected = {"detail": "anonymous-user-without-permission", "status_code": 403} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code, status.HTTP_403_FORBIDDEN @@ -197,9 +202,9 @@ async def test__with_user(database: capy.Database, make_view): view = await make_view(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'without-permission', 'status_code': 403} + expected = {"detail": "without-permission", "status_code": 403} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code, status.HTTP_403_FORBIDDEN @@ -211,37 +216,37 @@ async def test__with_user__with_permission__dont_match(database: capy.Database, view = await make_view(user=model.user, decorator_params={}, url_params={}) response, _ = await view() - expected = {'detail': 'without-permission', 'status_code': 403} + expected = {"detail": "without-permission", "status_code": 403} - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code, status.HTTP_403_FORBIDDEN @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test__with_user__with_permission(bc: Breathecode, make_view): - permission = {'codename': PERMISSION} + permission = {"codename": PERMISSION} model = await bc.database.acreate(user=1, permission=permission) view = await make_view(user=model.user, decorator_params={}, url_params={}) response, expected = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code, status.HTTP_200_OK @pytest.mark.asyncio @pytest.mark.django_db(reset_sequences=True) async def test__with_user__with_group_related_to_permission(bc: Breathecode, make_view): - user = {'user_permissions': []} - permissions = [{}, {'codename': PERMISSION}] - group = {'permission_pk': 2} + user = {"user_permissions": []} + permissions = [{}, {"codename": PERMISSION}] + group = {"permission_pk": 2} model = await bc.database.acreate(user=user, permission=permissions, group=group) view = await make_view(user=model.user, decorator_params={}, url_params={}) response, expected = await view() - assert json.loads(response.content.decode('utf-8')) == expected + assert json.loads(response.content.decode("utf-8")) == expected assert response.status_code, status.HTTP_200_OK diff --git a/breathecode/utils/tests/i18n/tests_translation.py b/breathecode/utils/tests/i18n/tests_translation.py index db602ef9f..8a8a388dd 100644 --- a/breathecode/utils/tests/i18n/tests_translation.py +++ b/breathecode/utils/tests/i18n/tests_translation.py @@ -17,24 +17,24 @@ def randomLang(self, force_complete=True): - code = 'en' + code = "en" # avoid choices english - while code == 'en': + while code == "en": code = self.bc.random.string(lower=True, size=2) if force_complete or random.randint(0, 1): - code += f'-{self.bc.random.string(upper=True, size=2)}' + code += f"-{self.bc.random.string(upper=True, size=2)}" return code def langWithRandomCountry(self, code): - return f'{code}-{self.bc.random.string(upper=True, size=2)}' + return f"{code}-{self.bc.random.string(upper=True, size=2)}" def getLangParam(code: str): - return code.lower().replace('-', '_') + return code.lower().replace("-", "_") def randomBool(): @@ -48,65 +48,66 @@ class TranslationTestSuite(UtilsTestCase): def test_Given_RandomLang_When_EnglishTranstalionIsNotGiven_Expect_Exception(self): code = randomLang(self, randomBool()) - with self.assertRaisesMessage(MalformedLanguageCode, 'The english translation is mandatory'): + with self.assertRaisesMessage(MalformedLanguageCode, "The english translation is mandatory"): translation(code) def test_Given_RandomLang_When_GeneralEnglishTranstalionAndUsaEnglishIsNotGiven_Expect_Exception(self): code = randomLang(self, randomBool()) - with self.assertRaisesMessage(MalformedLanguageCode, 'The english translation is mandatory'): - translation(code, en_au='Hello') + with self.assertRaisesMessage(MalformedLanguageCode, "The english translation is mandatory"): + translation(code, en_au="Hello") def test_Given_RandomLang_When_LangCodeUppercase_Expect_Exception(self): code = randomLang(self, randomBool()) - with self.assertRaisesMessage(MalformedLanguageCode, 'Lang code is not lowercase'): - translation(code, EN_au='Hello') + with self.assertRaisesMessage(MalformedLanguageCode, "Lang code is not lowercase"): + translation(code, EN_au="Hello") def test_Given_RandomLang_When_CountryCodeUppercase_Expect_Exception(self): code = randomLang(self, randomBool()) - with self.assertRaisesMessage(MalformedLanguageCode, 'Country code is not lowercase'): - translation(code, en_AU='Hello') + with self.assertRaisesMessage(MalformedLanguageCode, "Country code is not lowercase"): + translation(code, en_AU="Hello") def test_Given_RandomLang_When_SpanishTranslationIsNotGiven_Expect_GetEnglishTranslation(self): code = randomLang(self, randomBool()) - cases = ['en', 'en_us'] + cases = ["en", "en_us"] for case in cases: - kwargs = {case: 'Hello'} + kwargs = {case: "Hello"} string = translation(code, **kwargs) - self.assertEqual(string, 'Hello') + self.assertEqual(string, "Hello") def test_Given_None_When_EnglishTranslationIsGiven_Expect_GetEnglishTranslation(self): - string = translation(None, en='Hello') - self.assertEqual(string, 'Hello') + string = translation(None, en="Hello") + self.assertEqual(string, "Hello") def test_Given_LangEs_When_SpanishTranslationIsGiven_Expect_GetGenericSpanishTranslation(self): - code = langWithRandomCountry(self, 'es') + code = langWithRandomCountry(self, "es") param = getLangParam(code) kwargs = { - 'en': 'Hello', - 'es': 'Hola', - param: 'Qué onda tío', + "en": "Hello", + "es": "Hola", + param: "Qué onda tío", } - string = translation('es', **kwargs) - self.assertEqual(string, 'Hola') + string = translation("es", **kwargs) + self.assertEqual(string, "Hola") def test_Given_LangEsWithCountry_When_SpanishTranslationIsGiven_Expect_GetGenericSpanishTranslation(self): - code = langWithRandomCountry(self, 'es') + code = langWithRandomCountry(self, "es") kwargs = { - 'en': 'Hello', - 'es': 'Hola', + "en": "Hello", + "es": "Hola", } string = translation(code, **kwargs) - self.assertEqual(string, 'Hola') + self.assertEqual(string, "Hola") def test_Given_LangEsWithCountry_When_SpanishWithCountryTranslationIsGiven_Expect_GetSpanishTranslationOfThatCountry( - self): - code = langWithRandomCountry(self, 'es') + self, + ): + code = langWithRandomCountry(self, "es") param = getLangParam(code) kwargs = { - 'en': 'Hello', - 'es': 'Hola', - param: 'Qué onda tío', + "en": "Hello", + "es": "Hola", + param: "Qué onda tío", } string = translation(code, **kwargs) - self.assertEqual(string, 'Qué onda tío') + self.assertEqual(string, "Qué onda tío") diff --git a/breathecode/utils/tests/mixins/utils_test_case.py b/breathecode/utils/tests/mixins/utils_test_case.py index 61939d41e..19f324801 100644 --- a/breathecode/utils/tests/mixins/utils_test_case.py +++ b/breathecode/utils/tests/mixins/utils_test_case.py @@ -1,10 +1,11 @@ """ Collections of mixins used to login in authorize microservice """ + from rest_framework.test import APITestCase from breathecode.tests.mixins import BreathecodeMixin, GenerateModelsMixin -__all__ = ['UtilsTestCase'] +__all__ = ["UtilsTestCase"] class UtilsTestCase(APITestCase, BreathecodeMixin, GenerateModelsMixin): diff --git a/breathecode/utils/tests/views/tests_private_view.py b/breathecode/utils/tests/views/tests_private_view.py index 5a9c28e50..ae9e22f2d 100644 --- a/breathecode/utils/tests/views/tests_private_view.py +++ b/breathecode/utils/tests/views/tests_private_view.py @@ -9,14 +9,14 @@ from ..mixins import UtilsTestCase -PERMISSION = 'can_kill_kenny' +PERMISSION = "can_kill_kenny" def build_view(*args, **kwargs): @views.private_view(*args, **kwargs) def endpoint(request, token, id): - return JsonResponse({'method': request.method, 'id': id, 'user': request.user.id, 'token': token.key}) + return JsonResponse({"method": request.method, "id": id, "user": request.user.id, "token": token.key}) return endpoint @@ -28,77 +28,86 @@ class FunctionBasedViewTestSuite(UtilsTestCase): # When: no token and not auth url # Then: it must redirect to the default auth url - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_nobody_was_provide(self): factory = APIRequestFactory() - request = factory.get('/they-killed-kenny') + request = factory.get("/they-killed-kenny") - session = SessionStore('blablabla') + session = SessionStore("blablabla") request.session = session - view = build_view('can_kill_kenny') + view = build_view("can_kill_kenny") response = view(request, id=1) - url_hash = self.bc.format.to_base64('/they-killed-kenny') - content = self.bc.format.from_bytes(response.content, 'utf-8') + url_hash = self.bc.format.to_base64("/they-killed-kenny") + content = self.bc.format.from_bytes(response.content, "utf-8") - self.assertEqual(content, '') + self.assertEqual(content, "") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={url_hash}') - self.bc.check.calls(messages.add_message.call_args_list, [ - call(request, messages.ERROR, 'Please login before you can access this view'), - ]) + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={url_hash}") + self.bc.check.calls( + messages.add_message.call_args_list, + [ + call(request, messages.ERROR, "Please login before you can access this view"), + ], + ) # When: no token and auth url as arg # Then: it must redirect to the provided auth url - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_with_auth_url_as_arg(self): factory = APIRequestFactory() - request = factory.get('/they-killed-kenny') + request = factory.get("/they-killed-kenny") - session = SessionStore('blablabla') + session = SessionStore("blablabla") request.session = session url = self.bc.fake.url() - view = build_view('can_kill_kenny', url) + view = build_view("can_kill_kenny", url) response = view(request, id=1) - url_hash = self.bc.format.to_base64('/they-killed-kenny') - content = self.bc.format.from_bytes(response.content, 'utf-8') + url_hash = self.bc.format.to_base64("/they-killed-kenny") + content = self.bc.format.from_bytes(response.content, "utf-8") - self.assertEqual(content, '') + self.assertEqual(content, "") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, f'{url}?attempt=1&url={url_hash}') - self.bc.check.calls(messages.add_message.call_args_list, [ - call(request, messages.ERROR, 'Please login before you can access this view'), - ]) + self.assertEqual(response.url, f"{url}?attempt=1&url={url_hash}") + self.bc.check.calls( + messages.add_message.call_args_list, + [ + call(request, messages.ERROR, "Please login before you can access this view"), + ], + ) # When: no token and auth url as kwarg # Then: it must redirect to the provided auth url - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_with_auth_url_as_kwarg(self): factory = APIRequestFactory() - request = factory.get('/they-killed-kenny') + request = factory.get("/they-killed-kenny") - session = SessionStore('blablabla') + session = SessionStore("blablabla") request.session = session url = self.bc.fake.url() - view = build_view('can_kill_kenny', auth_url=url) + view = build_view("can_kill_kenny", auth_url=url) response = view(request, id=1) - url_hash = self.bc.format.to_base64('/they-killed-kenny') - content = self.bc.format.from_bytes(response.content, 'utf-8') + url_hash = self.bc.format.to_base64("/they-killed-kenny") + content = self.bc.format.from_bytes(response.content, "utf-8") - self.assertEqual(content, '') + self.assertEqual(content, "") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, f'{url}?attempt=1&url={url_hash}') - self.bc.check.calls(messages.add_message.call_args_list, [ - call(request, messages.ERROR, 'Please login before you can access this view'), - ]) + self.assertEqual(response.url, f"{url}?attempt=1&url={url_hash}") + self.bc.check.calls( + messages.add_message.call_args_list, + [ + call(request, messages.ERROR, "Please login before you can access this view"), + ], + ) """ 🔽🔽🔽 Token provided @@ -107,118 +116,124 @@ def test_with_auth_url_as_kwarg(self): # Given: 1 Token # When: with token and not auth url # Then: return 200 - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_with_token(self): model = self.bc.database.create(token=1) factory = APIRequestFactory() - request = factory.get(f'/they-killed-kenny?token={model.token}') + request = factory.get(f"/they-killed-kenny?token={model.token}") - session = SessionStore('blablabla') + session = SessionStore("blablabla") request.session = session view = build_view() response = view(request, id=1) - content = json.loads(self.bc.format.from_bytes(response.content, 'utf-8')) + content = json.loads(self.bc.format.from_bytes(response.content, "utf-8")) - self.assertEqual(content, {'method': 'GET', 'id': 1, 'user': 1, 'token': model.token.key}) + self.assertEqual(content, {"method": "GET", "id": 1, "user": 1, "token": model.token.key}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.bc.check.calls(messages.add_message.call_args_list, []) # Given: 1 Token # When: with token, no auth url and no permission # Then: it must redirect to the default auth url - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_with_token__passing_permission(self): model = self.bc.database.create(token=1) factory = APIRequestFactory() - request = factory.get(f'/they-killed-kenny?token={model.token}') + request = factory.get(f"/they-killed-kenny?token={model.token}") - session = SessionStore('blablabla') + session = SessionStore("blablabla") request.session = session - view = build_view('can_kill_kenny') + view = build_view("can_kill_kenny") response = view(request, id=1) - url_hash = self.bc.format.to_base64(f'/they-killed-kenny?token={model.token}') - content = self.bc.format.from_bytes(response.content, 'utf-8') + url_hash = self.bc.format.to_base64(f"/they-killed-kenny?token={model.token}") + content = self.bc.format.from_bytes(response.content, "utf-8") - self.assertEqual(content, '') + self.assertEqual(content, "") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, f'/v1/auth/view/login?attempt=1&url={url_hash}') - self.bc.check.calls(messages.add_message.call_args_list, [ - call(request, messages.ERROR, "You don't have permission can_kill_kenny to access this view"), - ]) + self.assertEqual(response.url, f"/v1/auth/view/login?attempt=1&url={url_hash}") + self.bc.check.calls( + messages.add_message.call_args_list, + [ + call(request, messages.ERROR, "You don't have permission can_kill_kenny to access this view"), + ], + ) # Given: 1 Token # When: with token, auth url and no permission # Then: it must redirect to the default auth url - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_with_token__passing_permission__auth_url(self): model = self.bc.database.create(token=1) factory = APIRequestFactory() - request = factory.get(f'/they-killed-kenny?token={model.token}') + request = factory.get(f"/they-killed-kenny?token={model.token}") - session = SessionStore('blablabla') + session = SessionStore("blablabla") request.session = session url = self.bc.fake.url() - view = build_view('can_kill_kenny', url) + view = build_view("can_kill_kenny", url) response = view(request, id=1) - url_hash = self.bc.format.to_base64(f'/they-killed-kenny?token={model.token}') - content = self.bc.format.from_bytes(response.content, 'utf-8') + url_hash = self.bc.format.to_base64(f"/they-killed-kenny?token={model.token}") + content = self.bc.format.from_bytes(response.content, "utf-8") - self.assertEqual(content, '') + self.assertEqual(content, "") self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertEqual(response.url, f'{url}?attempt=1&url={url_hash}') - self.bc.check.calls(messages.add_message.call_args_list, [ - call(request, messages.ERROR, "You don't have permission can_kill_kenny to access this view"), - ]) + self.assertEqual(response.url, f"{url}?attempt=1&url={url_hash}") + self.bc.check.calls( + messages.add_message.call_args_list, + [ + call(request, messages.ERROR, "You don't have permission can_kill_kenny to access this view"), + ], + ) # Given: 1 Token # When: with token and not auth url # Then: return 200 - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_with_token__with_permission(self): - permission = {'codename': 'can_kill_kenny'} + permission = {"codename": "can_kill_kenny"} model = self.bc.database.create(token=1, permission=permission, user=1) factory = APIRequestFactory() - request = factory.get(f'/they-killed-kenny?token={model.token}') + request = factory.get(f"/they-killed-kenny?token={model.token}") - session = SessionStore('blablabla') + session = SessionStore("blablabla") request.session = session - view = build_view('can_kill_kenny') + view = build_view("can_kill_kenny") response = view(request, id=1) - content = json.loads(self.bc.format.from_bytes(response.content, 'utf-8')) + content = json.loads(self.bc.format.from_bytes(response.content, "utf-8")) - self.assertEqual(content, {'method': 'GET', 'id': 1, 'user': 1, 'token': model.token.key}) + self.assertEqual(content, {"method": "GET", "id": 1, "user": 1, "token": model.token.key}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.bc.check.calls(messages.add_message.call_args_list, []) # Given: 1 Token # When: with token and auth url # Then: return 200 - @patch('django.contrib.messages.add_message', MagicMock()) + @patch("django.contrib.messages.add_message", MagicMock()) def test_with_token__with_permission__auth_url(self): - permission = {'codename': 'can_kill_kenny'} + permission = {"codename": "can_kill_kenny"} model = self.bc.database.create(token=1, permission=permission, user=1) factory = APIRequestFactory() - request = factory.get(f'/they-killed-kenny?token={model.token}') + request = factory.get(f"/they-killed-kenny?token={model.token}") - session = SessionStore('blablabla') + session = SessionStore("blablabla") request.session = session url = self.bc.fake.url() - view = build_view('can_kill_kenny', url) + view = build_view("can_kill_kenny", url) response = view(request, id=1) - content = json.loads(self.bc.format.from_bytes(response.content, 'utf-8')) + content = json.loads(self.bc.format.from_bytes(response.content, "utf-8")) - self.assertEqual(content, {'method': 'GET', 'id': 1, 'user': 1, 'token': model.token.key}) + self.assertEqual(content, {"method": "GET", "id": 1, "user": 1, "token": model.token.key}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.bc.check.calls(messages.add_message.call_args_list, []) diff --git a/breathecode/utils/urls/mount_app_openapi.py b/breathecode/utils/urls/mount_app_openapi.py index 2ef3d196d..f2aa0e3e0 100644 --- a/breathecode/utils/urls/mount_app_openapi.py +++ b/breathecode/utils/urls/mount_app_openapi.py @@ -2,18 +2,20 @@ from rest_framework.schemas import get_schema_view from rest_framework import permissions -__all__ = ['mount_app_openapi'] +__all__ = ["mount_app_openapi"] def mount_app_openapi(url: str, urlconf, namespace): - if not url.startswith('/'): - url = '/' + url + if not url.startswith("/"): + url = "/" + url - return path(f'openapi/{namespace}.yml', - get_schema_view( - url=url, - public=True, - permission_classes=(permissions.AllowAny, ), - urlconf=urlconf, - ), - name=f'{namespace}-openapi-schema') + return path( + f"openapi/{namespace}.yml", + get_schema_view( + url=url, + public=True, + permission_classes=(permissions.AllowAny,), + urlconf=urlconf, + ), + name=f"{namespace}-openapi-schema", + ) diff --git a/breathecode/utils/validate_conversion_info.py b/breathecode/utils/validate_conversion_info.py index 5ffef7120..aeadce734 100644 --- a/breathecode/utils/validate_conversion_info.py +++ b/breathecode/utils/validate_conversion_info.py @@ -1,28 +1,48 @@ from breathecode.utils.i18n import translation from capyc.rest_framework.exceptions import ValidationException -__all__ = ['validate_conversion_info'] +__all__ = ["validate_conversion_info"] def validate_conversion_info(conversion_info, lang): if conversion_info is not None: if not isinstance(conversion_info, dict): - raise ValidationException(translation(lang, - en='conversion_info must be a JSON object', - es='conversion_info debe ser un objeto de JSON', - slug='conversion-info-json-type'), - code=400) + raise ValidationException( + translation( + lang, + en="conversion_info must be a JSON object", + es="conversion_info debe ser un objeto de JSON", + slug="conversion-info-json-type", + ), + code=400, + ) expected_keys = [ - 'utm_placement', 'utm_medium', 'utm_source', 'utm_term', 'utm_content', 'utm_campaign', 'conversion_url', - 'landing_url', 'user_agent', 'plan', 'location', 'translations', 'internal_cta_placement', - 'internal_cta_content', 'internal_cta_campaign' + "utm_placement", + "utm_medium", + "utm_source", + "utm_term", + "utm_content", + "utm_campaign", + "conversion_url", + "landing_url", + "user_agent", + "plan", + "location", + "translations", + "internal_cta_placement", + "internal_cta_content", + "internal_cta_campaign", ] for key in conversion_info.keys(): if key not in expected_keys: - raise ValidationException(translation(lang, - en=f'Invalid key {key} was provided in the conversion_info', - es=f'Se agrego una clave inválida {key} en el conversion_info', - slug='conversion-info-invalid-key'), - code=400) + raise ValidationException( + translation( + lang, + en=f"Invalid key {key} was provided in the conversion_info", + es=f"Se agrego una clave inválida {key} en el conversion_info", + slug="conversion-info-invalid-key", + ), + code=400, + ) diff --git a/breathecode/utils/validators/language.py b/breathecode/utils/validators/language.py index 3feb53f7e..cdd5a5be6 100644 --- a/breathecode/utils/validators/language.py +++ b/breathecode/utils/validators/language.py @@ -1,6 +1,6 @@ from django.core.exceptions import ValidationError -__all__ = ['validate_language_code'] +__all__ = ["validate_language_code"] def validate_language_code(value: str | None) -> None: @@ -8,15 +8,15 @@ def validate_language_code(value: str | None) -> None: if value and len(value) != 2 and len(value) != 5: raise ValidationError( - 'Language code must be 2 or 5 chars long', - params={'value': value}, + "Language code must be 2 or 5 chars long", + params={"value": value}, ) if value and value[:2].isupper(): - raise ValidationError(f'{value} the first two letters needs to be lowercase') + raise ValidationError(f"{value} the first two letters needs to be lowercase") - if value and not is_short and value[2] != '-': - raise ValidationError(f'{value} the third letter needs to be a dash') + if value and not is_short and value[2] != "-": + raise ValidationError(f"{value} the third letter needs to be a dash") if value and not is_short and value[3:].islower(): - raise ValidationError(f'{value} the last two letters needs to be uppercase') + raise ValidationError(f"{value} the last two letters needs to be uppercase") diff --git a/breathecode/utils/views/get_root_schema_view.py b/breathecode/utils/views/get_root_schema_view.py index 4ba40dd4b..52a44d871 100644 --- a/breathecode/utils/views/get_root_schema_view.py +++ b/breathecode/utils/views/get_root_schema_view.py @@ -10,7 +10,7 @@ from capyc.rest_framework.exceptions import ValidationException -__all__ = ['get_root_schema_view'] +__all__ = ["get_root_schema_view"] cache = None @@ -22,53 +22,48 @@ def get_root_schema_view(elements, extend=None): if extend is None: extend = {} - host = os.getenv('API_URL', '') - if host.endswith('/'): + host = os.getenv("API_URL", "") + if host.endswith("/"): host = host[:-1] @api_view() @permission_classes([AllowAny]) def view(request): result = { - 'info': { - 'description': '', - 'title': '', - 'version': '', - **extend - }, - 'openapi': '3.0.0', - 'paths': {}, - 'components': { - 'securitySchemes': { - 'ApiKeyAuth': { - 'type': 'apiKey', - 'in': 'header', - 'name': 'Authorization', + "info": {"description": "", "title": "", "version": "", **extend}, + "openapi": "3.0.0", + "paths": {}, + "components": { + "securitySchemes": { + "ApiKeyAuth": { + "type": "apiKey", + "in": "header", + "name": "Authorization", }, } }, } - if hasattr(Cache, 'openapi'): + if hasattr(Cache, "openapi"): return Response(Cache.openapi) - schema_urls = [reverse(f'{element}-openapi-schema') for element in elements] + schema_urls = [reverse(f"{element}-openapi-schema") for element in elements] schema_dicts = [] for element in schema_urls: response = requests.get(host + element, timeout=2) if response.status_code >= 300: - raise ValidationException(f'Unhandled {element}', 500, slug='unhandled-app') - content = response.content.decode('utf-8') + raise ValidationException(f"Unhandled {element}", 500, slug="unhandled-app") + content = response.content.decode("utf-8") schema_dicts.append(yaml.load(content, Loader=FullLoader)) for element in schema_dicts: - for key in element['paths']: - result['paths'][key] = element['paths'][key] - for key2 in result['paths'][key]: - result['paths'][key][key2]['security'] = [{'ApiKeyAuth': []}] + for key in element["paths"]: + result["paths"][key] = element["paths"][key] + for key2 in result["paths"][key]: + result["paths"][key][key2]["security"] = [{"ApiKeyAuth": []}] Cache.openapi = result diff --git a/breathecode/utils/views/private_view.py b/breathecode/utils/views/private_view.py index 951635292..cee11e28e 100644 --- a/breathecode/utils/views/private_view.py +++ b/breathecode/utils/views/private_view.py @@ -11,10 +11,10 @@ from ..decorators import validate_permission -__all__ = ['private_view', 'set_query_parameter', 'render_message'] +__all__ = ["private_view", "set_query_parameter", "render_message"] -def set_query_parameter(url, param_name, param_value=''): +def set_query_parameter(url, param_name, param_value=""): """Given a URL, set or replace a query parameter and return the modified URL. >>> set_query_parameter('http://example.com?foo=bar&biz=baz', 'foo', 'stuff') @@ -30,45 +30,40 @@ def set_query_parameter(url, param_name, param_value=''): return urlunsplit((scheme, netloc, path, new_query_string, fragment)) -def render_message(r, - msg, - btn_label=None, - btn_url=None, - btn_target='_blank', - data=None, - status=None, - academy: Optional[Academy] = None): +def render_message( + r, msg, btn_label=None, btn_url=None, btn_target="_blank", data=None, status=None, academy: Optional[Academy] = None +): if data is None: data = {} - _data = {'MESSAGE': msg, 'BUTTON': btn_label, 'BUTTON_TARGET': btn_target, 'LINK': btn_url} + _data = {"MESSAGE": msg, "BUTTON": btn_label, "BUTTON_TARGET": btn_target, "LINK": btn_url} if academy: - _data['COMPANY_INFO_EMAIL'] = academy.feedback_email - _data['COMPANY_LEGAL_NAME'] = academy.legal_name or academy.name - _data['COMPANY_LOGO'] = academy.logo_url - _data['COMPANY_NAME'] = academy.name + _data["COMPANY_INFO_EMAIL"] = academy.feedback_email + _data["COMPANY_LEGAL_NAME"] = academy.legal_name or academy.name + _data["COMPANY_LOGO"] = academy.logo_url + _data["COMPANY_NAME"] = academy.name - if 'heading' not in data: - _data['heading'] = academy.name + if "heading" not in data: + _data["heading"] = academy.name - return render(r, 'message.html', {**_data, **data}, status=status) + return render(r, "message.html", {**_data, **data}, status=status) -def private_view(permission=None, auth_url='/v1/auth/view/login'): +def private_view(permission=None, auth_url="/v1/auth/view/login"): def decorator(func): def inner(*args, **kwargs): req = args[0] url = req.get_full_path() - token = req.GET.get('token', None) + token = req.GET.get("token", None) if token is not None: valid_token = Token.get_valid(token) - elif 'token' in req.session: - valid_token = Token.get_valid(req.session['token']) + elif "token" in req.session: + valid_token = Token.get_valid(req.session["token"]) else: valid_token = None @@ -76,23 +71,24 @@ def inner(*args, **kwargs): try: if token is None and valid_token is None: - raise PermissionDenied('Please login before you can access this view') + raise PermissionDenied("Please login before you can access this view") if valid_token is None: - raise PermissionDenied('You don\'t have access to this view') + raise PermissionDenied("You don't have access to this view") if permission is not None and not validate_permission(valid_token.user, permission): - raise PermissionDenied(f'You don\'t have permission {permission} to access this view') + raise PermissionDenied(f"You don't have permission {permission} to access this view") except Exception as e: messages.add_message(req, messages.ERROR, str(e)) - return HttpResponseRedirect(redirect_to=f'{auth_url}?attempt=1&url=' + - str(base64.b64encode(url.encode('utf-8')), 'utf-8')) + return HttpResponseRedirect( + redirect_to=f"{auth_url}?attempt=1&url=" + str(base64.b64encode(url.encode("utf-8")), "utf-8") + ) # inject user in request args[0].user = valid_token.user - kwargs['token'] = valid_token + kwargs["token"] = valid_token return func(*args, **kwargs) return inner diff --git a/breathecode/wsgi.py b/breathecode/wsgi.py index 73b755726..fcb6b5b31 100644 --- a/breathecode/wsgi.py +++ b/breathecode/wsgi.py @@ -17,7 +17,7 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'breathecode.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "breathecode.settings") application = get_wsgi_application() application = newrelic.agent.WSGIApplicationWrapper(application) diff --git a/capyc/circuitbreaker/pytest/fixtures/dont_close_the_circuit.py b/capyc/circuitbreaker/pytest/fixtures/dont_close_the_circuit.py index 7f97e8f6e..c0c030f49 100644 --- a/capyc/circuitbreaker/pytest/fixtures/dont_close_the_circuit.py +++ b/capyc/circuitbreaker/pytest/fixtures/dont_close_the_circuit.py @@ -3,13 +3,13 @@ import pytest -__all__ = ['dont_close_the_circuit'] +__all__ = ["dont_close_the_circuit"] @pytest.fixture(autouse=True) def dont_close_the_circuit() -> Generator[None, None, None]: """Don't allow the circuit be closed.""" - with patch('circuitbreaker.CircuitBreaker._failure_count', 0, create=True): - with patch('circuitbreaker.CircuitBreaker.FAILURE_THRESHOLD', 10000000, create=True): + with patch("circuitbreaker.CircuitBreaker._failure_count", 0, create=True): + with patch("circuitbreaker.CircuitBreaker.FAILURE_THRESHOLD", 10000000, create=True): yield diff --git a/capyc/core/pytest/fixtures/clean_environment.py b/capyc/core/pytest/fixtures/clean_environment.py index 8e3114a62..e5213b8db 100644 --- a/capyc/core/pytest/fixtures/clean_environment.py +++ b/capyc/core/pytest/fixtures/clean_environment.py @@ -3,41 +3,41 @@ import pytest -__all__ = ['clean_environment'] +__all__ = ["clean_environment"] WHITELIST = [ - 'RANDOM_SEED', - 'SQLALCHEMY_SILENCE_UBER_WARNING', - 'SHELL', - 'HOME', - 'VSCODE_GIT_ASKPASS_EXTRA_ARGS', - 'HOSTTYPE', - 'WSL_INTEROP', - 'XDG_RUNTIME_DIR', - 'COLORTERM', - 'WSL_DISTRO_NAME', - 'TERM', - 'PATH', - 'TERM_PROGRAM', - 'VSCODE_GIT_ASKPASS_NODE', - 'PULSE_SERVER', - 'VSCODE_GIT_IPC_HANDLE', - '_OLD_FISH_PROMPT_OVERRIDE', - '_OLD_VIRTUAL_PATH', - 'DISPLAY', - 'LOGNAME', - 'VIRTUAL_ENV', - 'WSLENV', - 'WAYLAND_DISPLAY', - 'SHLVL', - 'PWD', - 'TERM_PROGRAM_VERSION', - 'USER', - 'GIT_ASKPASS', - 'VSCODE_GIT_ASKPASS_MAIN', - 'NAME', - 'VSCODE_IPC_HOOK_CLI', - 'LANG', + "RANDOM_SEED", + "SQLALCHEMY_SILENCE_UBER_WARNING", + "SHELL", + "HOME", + "VSCODE_GIT_ASKPASS_EXTRA_ARGS", + "HOSTTYPE", + "WSL_INTEROP", + "XDG_RUNTIME_DIR", + "COLORTERM", + "WSL_DISTRO_NAME", + "TERM", + "PATH", + "TERM_PROGRAM", + "VSCODE_GIT_ASKPASS_NODE", + "PULSE_SERVER", + "VSCODE_GIT_IPC_HANDLE", + "_OLD_FISH_PROMPT_OVERRIDE", + "_OLD_VIRTUAL_PATH", + "DISPLAY", + "LOGNAME", + "VIRTUAL_ENV", + "WSLENV", + "WAYLAND_DISPLAY", + "SHLVL", + "PWD", + "TERM_PROGRAM_VERSION", + "USER", + "GIT_ASKPASS", + "VSCODE_GIT_ASKPASS_MAIN", + "NAME", + "VSCODE_IPC_HOOK_CLI", + "LANG", ] diff --git a/capyc/core/pytest/fixtures/fake.py b/capyc/core/pytest/fixtures/fake.py index 827662b65..76a61400d 100644 --- a/capyc/core/pytest/fixtures/fake.py +++ b/capyc/core/pytest/fixtures/fake.py @@ -3,7 +3,7 @@ import pytest from faker import Faker as Fake -__all__ = ['fake', 'Fake'] +__all__ = ["fake", "Fake"] @pytest.fixture(autouse=True) diff --git a/capyc/core/pytest/fixtures/format.py b/capyc/core/pytest/fixtures/format.py index cb9af149f..9b5edb3fb 100644 --- a/capyc/core/pytest/fixtures/format.py +++ b/capyc/core/pytest/fixtures/format.py @@ -3,13 +3,13 @@ import pytest from faker import Faker -__all__ = ['Format', 'format'] +__all__ = ["Format", "format"] def _remove_dinamics_fields(dict, fields=None): """Remove dinamics fields from django models as dict""" if fields is None: - fields = ['_state', 'created_at', 'updated_at', '_password'] + fields = ["_state", "created_at", "updated_at", "_password"] if not dict: return None @@ -22,7 +22,7 @@ def _remove_dinamics_fields(dict, fields=None): # remove any field starting with __ (double underscore) because it is considered private without_private_keys = result.copy() for key in result: - if '__' in key or key.startswith('_'): + if "__" in key or key.startswith("_"): del without_private_keys[key] return without_private_keys @@ -47,7 +47,7 @@ def _single_obj_repr(self, object: object) -> str: except ImportError: pass - raise NotImplementedError(f'Not implemented for {type(object)}') + raise NotImplementedError(f"Not implemented for {type(object)}") def to_obj_repr(self, object: object) -> str: """ diff --git a/capyc/core/pytest/fixtures/image.py b/capyc/core/pytest/fixtures/image.py index 485b1ae09..4aad46cd7 100644 --- a/capyc/core/pytest/fixtures/image.py +++ b/capyc/core/pytest/fixtures/image.py @@ -6,7 +6,7 @@ from faker import Faker from PIL import Image as PilImage -__all__ = ['image', 'Image'] +__all__ = ["image", "Image"] @final @@ -41,17 +41,17 @@ def random(self, x_size: int, y_size: int): size = (y_size, x_size) - filename = self._fake.slug() + '.png' + filename = self._fake.slug() + ".png" while filename in self._filenames: - filename = self._fake.slug() + '.png' + filename = self._fake.slug() + ".png" - image = PilImage.new('RGB', size) + image = PilImage.new("RGB", size) arr = np.random.randint(low=0, high=255, size=(size[1], size[0])) - image = PilImage.fromarray(arr.astype('uint8')) - image.save(filename, 'PNG') + image = PilImage.fromarray(arr.astype("uint8")) + image.save(filename, "PNG") - file = open(filename, 'rb') + file = open(filename, "rb") self._filenames.append(filename) return file diff --git a/capyc/core/pytest/fixtures/no_http_requests.py b/capyc/core/pytest/fixtures/no_http_requests.py index ceb78b48b..2be0eb3a0 100644 --- a/capyc/core/pytest/fixtures/no_http_requests.py +++ b/capyc/core/pytest/fixtures/no_http_requests.py @@ -1,17 +1,18 @@ import pytest -__all__ = ['no_http_requests'] +__all__ = ["no_http_requests"] -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def no_http_requests(monkeypatch: pytest.MonkeyPatch) -> None: from urllib3.connectionpool import HTTPConnectionPool + urlopen = HTTPConnectionPool.urlopen def urlopen_mock(self, method, url, *args, **kwargs): # this prevent a tester left pass a request to a third party service allow = [ - ('0.0.0.0', 9050, None), + ("0.0.0.0", 9050, None), ] for host, port, path in allow: @@ -19,7 +20,7 @@ def urlopen_mock(self, method, url, *args, **kwargs): return urlopen(self, method, url, *args, **kwargs) raise Exception( - f'All HTTP request to third party services are forwidden, {method.upper()} {self.scheme}://{self.host}{url}' + f"All HTTP request to third party services are forwidden, {method.upper()} {self.scheme}://{self.host}{url}" ) - monkeypatch.setattr('urllib3.connectionpool.HTTPConnectionPool.urlopen', urlopen_mock) + monkeypatch.setattr("urllib3.connectionpool.HTTPConnectionPool.urlopen", urlopen_mock) diff --git a/capyc/core/pytest/fixtures/random.py b/capyc/core/pytest/fixtures/random.py index cf6272a49..d828413f6 100644 --- a/capyc/core/pytest/fixtures/random.py +++ b/capyc/core/pytest/fixtures/random.py @@ -5,7 +5,7 @@ import pytest from faker import Faker -__all__ = ['random', 'Random'] +__all__ = ["random", "Random"] @final @@ -88,7 +88,7 @@ def string(self, size=0, lower=False, upper=False, symbol=False, number=False) - - str: A random string of the specified size and character types. """ - chars = '' + chars = "" if lower: chars += string.ascii_lowercase @@ -102,7 +102,7 @@ def string(self, size=0, lower=False, upper=False, symbol=False, number=False) - if number: chars += string.digits - return ''.join(r.choices(chars, k=size)) + return "".join(r.choices(chars, k=size)) @pytest.fixture(autouse=True) diff --git a/capyc/core/pytest/fixtures/seed.py b/capyc/core/pytest/fixtures/seed.py index 0ad2ed40b..573626dbb 100644 --- a/capyc/core/pytest/fixtures/seed.py +++ b/capyc/core/pytest/fixtures/seed.py @@ -3,17 +3,18 @@ import pytest -__all__ = ['seed', 'Seed', 'pytest_addoption', 'pytest_terminal_summary'] +__all__ = ["seed", "Seed", "pytest_addoption", "pytest_terminal_summary"] type Seed = Optional[int] -SEED = random.randint(0, 2 ** 32 - 1) +SEED = random.randint(0, 2**32 - 1) seed_used = None -SEED_KEY = '_+* SEED *+_' -END_KEY = '_+* END *+_' +SEED_KEY = "_+* SEED *+_" +END_KEY = "_+* END *+_" + def pytest_addoption(parser: pytest.Parser): try: - parser.addoption('--seed', action='store', default=None, type=int, help='Set the random seed for tests') + parser.addoption("--seed", action="store", default=None, type=int, help="Set the random seed for tests") except Exception: ... @@ -27,12 +28,11 @@ def pytest_terminal_summary(terminalreporter, config: pytest.Config) -> None: # terminalreporter.write_line(f"Seed: {seed}") - @pytest.fixture(autouse=True) def seed(request: pytest.FixtureRequest) -> Generator[Optional[int], None, None]: global SEED - seed = request.config.getoption('--seed') + seed = request.config.getoption("--seed") if seed is None: seed = SEED @@ -40,4 +40,4 @@ def seed(request: pytest.FixtureRequest) -> Generator[Optional[int], None, None] # setattr(request.config, SEED_KEY, seed) yield seed - print(f'Seed used: {seed}') + print(f"Seed used: {seed}") diff --git a/capyc/core/shorteners.py b/capyc/core/shorteners.py index 108dbd5e0..7ad93db3b 100644 --- a/capyc/core/shorteners.py +++ b/capyc/core/shorteners.py @@ -1,4 +1,4 @@ -__all__ = ['C'] +__all__ = ["C"] class C: diff --git a/capyc/django/pytest/fixtures/database.py b/capyc/django/pytest/fixtures/database.py index 819fa4c30..2ffeb7b9b 100644 --- a/capyc/django/pytest/fixtures/database.py +++ b/capyc/django/pytest/fixtures/database.py @@ -24,7 +24,7 @@ from breathecode.tests.mixins.generate_models_mixin.utils import argument_parser from breathecode.utils.attr_dict import AttrDict -__all__ = ['database', 'Database'] +__all__ = ["database", "Database"] _fake = Faker() @@ -32,7 +32,7 @@ def _remove_dinamics_fields(dict, fields=None): """Remove dinamics fields from django models as dict""" if fields is None: - fields = ['_state', 'created_at', 'updated_at', '_password'] + fields = ["_state", "created_at", "updated_at", "_password"] if not dict: return None @@ -45,7 +45,7 @@ def _remove_dinamics_fields(dict, fields=None): # remove any field starting with __ (double underscore) because it is considered private without_private_keys = result.copy() for key in result: - if '__' in key or key.startswith('_'): + if "__" in key or key.startswith("_"): del without_private_keys[key] return without_private_keys @@ -82,7 +82,7 @@ def get_model(cls, path: str) -> Model: if path in cls._cache: return cls._cache[path] - app_label, model_name = path.split('.') + app_label, model_name = path.split(".") cls._cache[path] = apps.get_model(app_label, model_name) return cls._cache[path] @@ -144,32 +144,36 @@ def alist_of(cls, path: str, dict: bool = True) -> list[Model | dict[str, Any]]: def _get_random_attrs(cls, model): props = {} - model_fields = [( - x, - type(getattr(model, x).field), - { - 'choices': getattr(getattr(model, x).field, 'choices', None), - 'default': getattr(getattr(model, x).field, 'default', models.NOT_PROVIDED), - 'null': getattr(getattr(model, x).field, 'null', False), - 'blank': getattr(getattr(model, x).field, 'blank', False), - }, - ) for x in vars(model) if type(getattr(model, x)) is DeferredAttribute] + model_fields = [ + ( + x, + type(getattr(model, x).field), + { + "choices": getattr(getattr(model, x).field, "choices", None), + "default": getattr(getattr(model, x).field, "default", models.NOT_PROVIDED), + "null": getattr(getattr(model, x).field, "null", False), + "blank": getattr(getattr(model, x).field, "blank", False), + }, + ) + for x in vars(model) + if type(getattr(model, x)) is DeferredAttribute + ] for field_name, field_type, field_attrs in model_fields: - if field_attrs['default'] is not models.NOT_PROVIDED: - if callable(field_attrs['default']): - props[field_name] = field_attrs['default']() + if field_attrs["default"] is not models.NOT_PROVIDED: + if callable(field_attrs["default"]): + props[field_name] = field_attrs["default"]() else: - props[field_name] = field_attrs['default'] + props[field_name] = field_attrs["default"] - elif field_attrs['blank'] is True and field_attrs['null'] is True: + elif field_attrs["blank"] is True and field_attrs["null"] is True: props[field_name] = None - elif field_attrs['choices'] is not None: - props[field_name] = random.choice(field_attrs['choices'])[0] + elif field_attrs["choices"] is not None: + props[field_name] = random.choice(field_attrs["choices"])[0] elif field_type is models.EmailField: props[field_name] = _fake.email() @@ -273,23 +277,23 @@ def get_attrs(field): cls_type = type(field) field = field.field obj = { - 'cls': cls_type, - 'path': field.related_model._meta.app_label + '.' + field.related_model.__name__, - 'name': field.name, - 'blank': field.blank, - 'null': field.null, - 'default': field.default, - 'choices': field.choices, - 'related_model': field.related_model, + "cls": cls_type, + "path": field.related_model._meta.app_label + "." + field.related_model.__name__, + "name": field.name, + "blank": field.blank, + "null": field.null, + "default": field.default, + "choices": field.choices, + "related_model": field.related_model, } return obj for x in vars(model): if type(getattr(model, x)) in [ - ForwardOneToOneDescriptor, - ForwardManyToOneDescriptor, - ManyToManyDescriptor, + ForwardOneToOneDescriptor, + ForwardManyToOneDescriptor, + ManyToManyDescriptor, ]: yield ( x, @@ -306,20 +310,20 @@ def _build_descriptors(cls): ban_list = set() for app in settings.INSTALLED_APPS: - app_label = app.split('.')[-1] + app_label = app.split(".")[-1] all_models = apps.get_app_config(app_label).get_models() app_cache = {} for model in all_models: model_name = model.__name__ model_descriptor = { - 'cls': model, - 'path': app_label + '.' + model_name, - 'related_fields': [*cls._get_related_fields(model)], - 'get_values': functools.partial(cls._get_random_attrs, model), + "cls": model, + "path": app_label + "." + model_name, + "related_fields": [*cls._get_related_fields(model)], + "get_values": functools.partial(cls._get_random_attrs, model), } app_cache[model_name] = model_descriptor - name_map[app_label + '__' + cls.to_snake_case(model_name)] = (app_label, model_name) + name_map[app_label + "__" + cls.to_snake_case(model_name)] = (app_label, model_name) if model_name in ban_list: continue @@ -334,7 +338,7 @@ def _build_descriptors(cls): model_map[model_name] = model_descriptor name_map[snake_model_name] = model_name - model_alias_map[snake_model_name] = app_label + '.' + model_name + model_alias_map[snake_model_name] = app_label + "." + model_name app_map[app_label] = app_cache @@ -342,7 +346,7 @@ def _build_descriptors(cls): @classmethod def to_snake_case(cls, class_name): - snake_case = re.sub('([a-z0-9])([A-Z])', r'\1_\2', class_name).lower() + snake_case = re.sub("([a-z0-9])([A-Z])", r"\1_\2", class_name).lower() return snake_case @classmethod @@ -364,12 +368,14 @@ def create(cls, **models): path = name_map[model_alias] except KeyError: - if '__' in model_alias: - app_label, model_name = model_alias.split('__') - raise ValueError(f'Model {model_name} not found in {app_label}') + if "__" in model_alias: + app_label, model_name = model_alias.split("__") + raise ValueError(f"Model {model_name} not found in {app_label}") - raise ValueError(f'Model {model_alias} not found or two models have the same name, ' - 'use the app_label.model_name format') + raise ValueError( + f"Model {model_alias} not found or two models have the same name, " + "use the app_label.model_name format" + ) if isinstance(path, tuple): app_label, model_name = path @@ -385,7 +391,7 @@ def create(cls, **models): # fill cache for model_alias, model_descriptor in pending.items(): - x = model_descriptor['path'] + x = model_descriptor["path"] cache[x] = (model_descriptor, models.get(model_alias)) exec_order.append(x) @@ -398,34 +404,37 @@ def create(cls, **models): for key in exec_order: item = cache.get(key, None) if item is None: - app_label, model_name = key.split('.') + app_label, model_name = key.split(".") x = app_map[app_label][model_name] item = (x, 1) cache[key] = item model_descriptor, value = item - if model_descriptor['path'] in cache_to_add: + if model_descriptor["path"] in cache_to_add: continue - if model_descriptor['path'] in processed: + if model_descriptor["path"] in processed: continue - processed.add(model_descriptor['path']) + processed.add(model_descriptor["path"]) - for _related_field, _field_type, field_attrs in model_descriptor['related_fields']: + for _related_field, _field_type, field_attrs in model_descriptor["related_fields"]: - if field_attrs['path'] in processed: + if field_attrs["path"] in processed: continue - if (field_attrs['path'] not in exec_order and field_attrs['path'] not in cache_to_add - and (field_attrs['null'] is False or field_attrs['cls'] is ForwardOneToOneDescriptor)): - app_label, model_name = field_attrs['path'].split('.') - cache_to_add[field_attrs['path']] = (app_map[app_label][model_name], 1) + if ( + field_attrs["path"] not in exec_order + and field_attrs["path"] not in cache_to_add + and (field_attrs["null"] is False or field_attrs["cls"] is ForwardOneToOneDescriptor) + ): + app_label, model_name = field_attrs["path"].split(".") + cache_to_add[field_attrs["path"]] = (app_map[app_label][model_name], 1) # disable m2m temporally - if field_attrs['cls'] is not ManyToManyDescriptor: - exec_order_to_add.append(field_attrs['path']) + if field_attrs["cls"] is not ManyToManyDescriptor: + exec_order_to_add.append(field_attrs["path"]) exec_order += exec_order_to_add cache.update(cache_to_add) @@ -436,8 +445,8 @@ def create(cls, **models): # sort dependencies for model_path, (model_descriptor, _value) in cache.items(): - for _related_field, _field_type, field_attrs in model_descriptor['related_fields']: - dep_path = field_attrs['path'] + for _related_field, _field_type, field_attrs in model_descriptor["related_fields"]: + dep_path = field_attrs["path"] to_reevaluate = [] # dep not found, maybe it's a m2m, that was temporally disabled @@ -461,9 +470,9 @@ def create(cls, **models): to_re_reevaluate = [] for x in to_reevaluate: - for _related_field, _field_type, field_attrs in cache[x][0]['related_fields']: + for _related_field, _field_type, field_attrs in cache[x][0]["related_fields"]: - dep_path = field_attrs['path'] + dep_path = field_attrs["path"] # dep not found, maybe it is a m2m, that was temporally disabled try: @@ -479,7 +488,7 @@ def create(cls, **models): exec_order.insert(model_index, dep_path) # disable m2m temporally - if field_attrs['cls'] is not ManyToManyDescriptor: + if field_attrs["cls"] is not ManyToManyDescriptor: to_re_reevaluate.append(dep_path) to_reevaluate = to_re_reevaluate @@ -493,38 +502,37 @@ def create(cls, **models): result = [] for how_many, arguments in argument_parser(value): - for _related_field, field_type, field_attrs in model_descriptor['related_fields']: - if field_attrs['path'] in generated: + for _related_field, field_type, field_attrs in model_descriptor["related_fields"]: + if field_attrs["path"] in generated: # no implemented yet if field_type is ManyToManyDescriptor: continue # arguments[field_attrs["name"]] = [generated[field_attrs["path"]]] - arguments[field_attrs['name']] = generated[field_attrs['path']] + arguments[field_attrs["name"]] = generated[field_attrs["path"]] - if field_attrs['cls'] in [ForwardOneToOneDescriptor, ForwardManyToOneDescriptor] and isinstance( - arguments[field_attrs['name']], list): - arguments[field_attrs['name']] = arguments[field_attrs['name']][0] + if field_attrs["cls"] in [ForwardOneToOneDescriptor, ForwardManyToOneDescriptor] and isinstance( + arguments[field_attrs["name"]], list + ): + arguments[field_attrs["name"]] = arguments[field_attrs["name"]][0] result = result + [ - model_descriptor['cls'].objects.create(**{ - **model_descriptor['get_values'](), - **arguments - }) for _ in range(how_many) + model_descriptor["cls"].objects.create(**{**model_descriptor["get_values"](), **arguments}) + for _ in range(how_many) ] if len(result) == 1: result = result[0] - app_label, model_name = model_descriptor['path'].split('.') + app_label, model_name = model_descriptor["path"].split(".") model_alias = cls.to_snake_case(model_name) if model_alias not in name_map: - model_alias = app_label + '__' + model_alias + model_alias = app_label + "__" + model_alias res[model_alias] = result - generated[model_descriptor['path']] = result + generated[model_descriptor["path"]] = result return AttrDict(**res) diff --git a/capyc/django/pytest/fixtures/queryset.py b/capyc/django/pytest/fixtures/queryset.py index c8144d509..e54e925c4 100644 --- a/capyc/django/pytest/fixtures/queryset.py +++ b/capyc/django/pytest/fixtures/queryset.py @@ -7,7 +7,7 @@ import pytest from django.db.models.query import QuerySet -__all__ = ['QuerySet', 'queryset'] +__all__ = ["QuerySet", "queryset"] @final @@ -36,7 +36,7 @@ def with_pks(self, query: QuerySet, pks: list[int]) -> None: ``` """ - assert isinstance(query, QuerySet), 'The first argument is not a QuerySet' + assert isinstance(query, QuerySet), "The first argument is not a QuerySet" assert [x.pk for x in query] == pks @@ -59,7 +59,7 @@ def get_pks(self, queryset) -> list[Any]: return [x.pk for x in queryset] -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def queryset() -> Generator[QuerySet, None, None]: """ QuerySet utils. diff --git a/capyc/django/pytest/fixtures/signals.py b/capyc/django/pytest/fixtures/signals.py index 212412bfd..973099c3b 100644 --- a/capyc/django/pytest/fixtures/signals.py +++ b/capyc/django/pytest/fixtures/signals.py @@ -9,7 +9,7 @@ from django.db.models.signals import ModelSignal from django.dispatch import Signal -__all__ = ['signals', 'Signals', 'signals_map'] +__all__ = ["signals", "Signals", "signals_map"] def get_signal_files(path: str) -> list[str]: @@ -18,7 +18,7 @@ def get_signal_files(path: str) -> list[str]: # Walk through the current directory and its subdirectories for folder, _, files in os.walk(path): for file in files: - if file == 'signals.py': + if file == "signals.py": signal_files.append(os.path.join(folder, file)) return signal_files @@ -32,10 +32,10 @@ def get_signals(path: str, includes_root_folder=True) -> list[Signal]: # Initialize a list to store the file paths signal_files = get_signal_files(root_directory) - if '/' in root_directory: - separator = '/' + if "/" in root_directory: + separator = "/" else: - separator = '\\' + separator = "\\" res = {} @@ -45,27 +45,28 @@ def get_signals(path: str, includes_root_folder=True) -> list[Signal]: if prefix.endswith(separator): prefix = prefix[:-1] - prefix = prefix.split(separator)[-1] + '.' + prefix = prefix.split(separator)[-1] + "." else: - prefix = '' + prefix = "" signal_files = [ - prefix + '.'.join(x.replace(root_directory + separator, '').replace('.py', '').split(separator)) + prefix + ".".join(x.replace(root_directory + separator, "").replace(".py", "").split(separator)) for x in signal_files ] - signal_files = [x for x in signal_files if '-' not in x] + signal_files = [x for x in signal_files if "-" not in x] for module_path in signal_files: module = importlib.import_module(module_path) signals = [ - x for x in dir(module) - if x[0] != '_' and (isinstance(getattr(module, x), Signal) or isinstance(getattr(module, x), ModelSignal)) + x + for x in dir(module) + if x[0] != "_" and (isinstance(getattr(module, x), Signal) or isinstance(getattr(module, x), ModelSignal)) ] for signal_path in signals: - res[f'{module_path}.{signal_path}'] = getattr(module, signal_path) + res[f"{module_path}.{signal_path}"] = getattr(module, signal_path) return res @@ -79,19 +80,19 @@ def get_dependencies() -> list[str]: if os.path.exists(dir): for folder in os.listdir(dir): folder_path = os.path.join(dir, folder) - if os.path.isdir(folder_path) and folder_path.endswith('.dist-info') is False: + if os.path.isdir(folder_path) and folder_path.endswith(".dist-info") is False: dependency_folders.append(folder_path) return dependency_folders def check_path(dir: str, pattern: str): - linux_path = dir.replace('\\', '/') - windows_path = dir.replace('/', '\\') + linux_path = dir.replace("\\", "/") + windows_path = dir.replace("/", "\\") return linux_path not in dir and windows_path not in dir -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def signals_map(): # Get the current working directory (root directory) root_directory = os.getcwd() @@ -141,16 +142,18 @@ def wrapper(x, *args, **kwargs): return wrapper - self._monkeypatch.setattr('django.dispatch.Signal.send', mock(self._original_signal_send)) - self._monkeypatch.setattr('django.dispatch.Signal.send_robust', mock(self._original_signal_send_robust)) + self._monkeypatch.setattr("django.dispatch.Signal.send", mock(self._original_signal_send)) + self._monkeypatch.setattr("django.dispatch.Signal.send_robust", mock(self._original_signal_send_robust)) - self._monkeypatch.setattr('django.dispatch.dispatcher.Signal.send', mock(self._original_signal_send)) - self._monkeypatch.setattr('django.dispatch.dispatcher.Signal.send_robust', - mock(self._original_signal_send_robust)) + self._monkeypatch.setattr("django.dispatch.dispatcher.Signal.send", mock(self._original_signal_send)) + self._monkeypatch.setattr( + "django.dispatch.dispatcher.Signal.send_robust", mock(self._original_signal_send_robust) + ) - self._monkeypatch.setattr('django.db.models.signals.ModelSignal.send', mock(self._original_model_signal_send)) - self._monkeypatch.setattr('django.db.models.signals.ModelSignal.send_robust', - mock(self._original_model_signal_send_robust)) + self._monkeypatch.setattr("django.db.models.signals.ModelSignal.send", mock(self._original_model_signal_send)) + self._monkeypatch.setattr( + "django.db.models.signals.ModelSignal.send_robust", mock(self._original_model_signal_send_robust) + ) def enable(self, *to_enable, debug=False): """ @@ -166,15 +169,16 @@ def enable(self, *to_enable, debug=False): self._disabled = False - self._monkeypatch.setattr('django.dispatch.Signal.send', self._original_signal_send) - self._monkeypatch.setattr('django.dispatch.Signal.send_robust', self._original_signal_send_robust) + self._monkeypatch.setattr("django.dispatch.Signal.send", self._original_signal_send) + self._monkeypatch.setattr("django.dispatch.Signal.send_robust", self._original_signal_send_robust) - self._monkeypatch.setattr('django.dispatch.dispatcher.Signal.send', self._original_signal_send) - self._monkeypatch.setattr('django.dispatch.dispatcher.Signal.send_robust', self._original_signal_send_robust) + self._monkeypatch.setattr("django.dispatch.dispatcher.Signal.send", self._original_signal_send) + self._monkeypatch.setattr("django.dispatch.dispatcher.Signal.send_robust", self._original_signal_send_robust) - self._monkeypatch.setattr('django.db.models.signals.ModelSignal.send', self._original_model_signal_send) - self._monkeypatch.setattr('django.db.models.signals.ModelSignal.send_robust', - self._original_model_signal_send_robust) + self._monkeypatch.setattr("django.db.models.signals.ModelSignal.send", self._original_model_signal_send) + self._monkeypatch.setattr( + "django.db.models.signals.ModelSignal.send_robust", self._original_model_signal_send_robust + ) if to_enable or debug: to_disable = [x for x in self._signals_map if x not in to_enable] @@ -186,21 +190,21 @@ def apply_mock(module): def send_mock(*args, **kwargs): if debug: try: - print(' args\n ', args) + print(" args\n ", args) except Exception: pass try: - print(' kwargs\n ', kwargs) + print(" kwargs\n ", kwargs) except Exception: pass - print('\n') + print("\n") self._monkeypatch.setattr(module, send_mock) - apply_mock(f'{signal}.send') - apply_mock(f'{signal}.send_robust') + apply_mock(f"{signal}.send") + apply_mock(f"{signal}.send_robust") @pytest.fixture diff --git a/capyc/django/serializer.py b/capyc/django/serializer.py index 8107708b2..3f029d3af 100644 --- a/capyc/django/serializer.py +++ b/capyc/django/serializer.py @@ -5,8 +5,7 @@ def __init__(cls, name, bases, dct): cls.initialize() -class Serializer: - ... +class Serializer: ... from breathecode.admissions.models import Cohort diff --git a/capyc/newrelic/pytest/fixtures/disable_new_relic.py b/capyc/newrelic/pytest/fixtures/disable_new_relic.py index a4f967068..06f037839 100644 --- a/capyc/newrelic/pytest/fixtures/disable_new_relic.py +++ b/capyc/newrelic/pytest/fixtures/disable_new_relic.py @@ -2,9 +2,9 @@ import pytest -__all__ = ['disable_new_relic'] +__all__ = ["disable_new_relic"] @pytest.fixture(autouse=True) def disable_new_relic(monkeypatch: pytest.MonkeyPatch) -> Generator[None, None, None]: - monkeypatch.setattr('newrelic.core.agent.Agent._atexit_shutdown', lambda *args, **kwargs: None) + monkeypatch.setattr("newrelic.core.agent.Agent._atexit_shutdown", lambda *args, **kwargs: None) diff --git a/capyc/newrelic/pytest/fixtures/disable_newrelic_prints.py b/capyc/newrelic/pytest/fixtures/disable_newrelic_prints.py index 7072a9b46..bf60f8b4c 100644 --- a/capyc/newrelic/pytest/fixtures/disable_newrelic_prints.py +++ b/capyc/newrelic/pytest/fixtures/disable_newrelic_prints.py @@ -1,14 +1,14 @@ import pytest -__all__ = ['disable_newrelic_prints'] +__all__ = ["disable_newrelic_prints"] @pytest.fixture(autouse=True) def disable_newrelic_prints(monkeypatch: pytest.MonkeyPatch): """Disable NewRelic prints.""" - monkeypatch.setattr('newrelic.core.agent._logger.info', lambda *args, **kwargs: None) - monkeypatch.setattr('newrelic.core.agent._logger.warn', lambda *args, **kwargs: None) - monkeypatch.setattr('newrelic.core.agent._logger.error', lambda *args, **kwargs: None) + monkeypatch.setattr("newrelic.core.agent._logger.info", lambda *args, **kwargs: None) + monkeypatch.setattr("newrelic.core.agent._logger.warn", lambda *args, **kwargs: None) + monkeypatch.setattr("newrelic.core.agent._logger.error", lambda *args, **kwargs: None) yield diff --git a/capyc/rest_framework/exception_handler.py b/capyc/rest_framework/exception_handler.py index 674c313a3..5bc6750cf 100644 --- a/capyc/rest_framework/exception_handler.py +++ b/capyc/rest_framework/exception_handler.py @@ -4,19 +4,19 @@ from capyc.rest_framework.exceptions import PaymentException, ValidationException -__all__ = ['exception_handler'] +__all__ = ["exception_handler"] def get_item_attrs(item): data = { - 'pk': item.pk, + "pk": item.pk, } - if hasattr(item, 'slug'): - data['slug'] = item.slug + if hasattr(item, "slug"): + data["slug"] = item.slug - if hasattr(item, 'name'): - data['name'] = item.name + if hasattr(item, "name"): + data["name"] = item.name return data @@ -28,21 +28,21 @@ def exception_handler(exc, context): for k in exc.error_dict: for x in exc.error_dict[k]: - err = '' - if k != '__all__': - err += f'{k}: ' + err = "" + if k != "__all__": + err += f"{k}: " - err += f'{x.message}, ' + err += f"{x.message}, " - if err.endswith(', '): - err = err[:-2] + '. ' + if err.endswith(", "): + err = err[:-2] + ". " - if err.endswith('. '): + if err.endswith(". "): err = err[:-1] exc = ValidationException(err) - context['request']._request.POST = context['request'].data + context["request"]._request.POST = context["request"].data response = drf_exception_handler(exc, context) # Now add the HTTP status code to the response. @@ -54,19 +54,19 @@ def exception_handler(exc, context): for x in response.data: data = { - 'detail': str(x), - 'status_code': x.status_code, + "detail": str(x), + "status_code": x.status_code, } if x.silent: - data['silent'] = True - data['silent_code'] = x.slug + data["silent"] = True + data["silent_code"] = x.slug if x.data: - data['data'] = x.data + data["data"] = x.data if x.queryset: - data['items'] = [get_item_attrs(v) for v in x.queryset] + data["items"] = [get_item_attrs(v) for v in x.queryset] items.append(data) if len(items) == 1: @@ -75,28 +75,28 @@ def exception_handler(exc, context): response.data = items elif is_our_exception: - response.data['status_code'] = response.status_code + response.data["status_code"] = response.status_code if exc.silent: - response.data['silent'] = True - response.data['silent_code'] = exc.slug + response.data["silent"] = True + response.data["silent_code"] = exc.slug if exc.data is not None: - response.data['data'] = exc.data + response.data["data"] = exc.data if exc.queryset: - response.data['items'] = [get_item_attrs(v) for v in exc.queryset] + response.data["items"] = [get_item_attrs(v) for v in exc.queryset] elif isinstance(exc, ValidationError): - response.data['status_code'] = 400 + response.data["status_code"] = 400 elif isinstance(response.data, list): - if response.data[0].code != 'invalid': - response.data = {'status_code': response.data[0].code, 'details': str(response.data[0])} + if response.data[0].code != "invalid": + response.data = {"status_code": response.data[0].code, "details": str(response.data[0])} else: - response.data = {'status_code': 500, 'details': str(response.data[0])} + response.data = {"status_code": 500, "details": str(response.data[0])} else: - response.data['status_code'] = response.status_code + response.data["status_code"] = response.status_code return response diff --git a/capyc/rest_framework/exceptions.py b/capyc/rest_framework/exceptions.py index 155d0aa6f..6aa3bc8c5 100644 --- a/capyc/rest_framework/exceptions.py +++ b/capyc/rest_framework/exceptions.py @@ -6,11 +6,11 @@ from capyc.core.shorteners import C -__all__ = ['ValidationException', 'PaymentException'] +__all__ = ["ValidationException", "PaymentException"] def is_test_env(): - return 'ENV' in os.environ and os.environ['ENV'] == 'test' + return "ENV" in os.environ and os.environ["ENV"] == "test" class ValidationException(APIException): @@ -22,19 +22,15 @@ class ValidationException(APIException): data: dict silent: bool - def __init__(self, - details: str, - code: int = 400, - slug: Optional[str] = None, - data=None, - queryset=None, - silent=False): + def __init__( + self, details: str, code: int = 400, slug: Optional[str] = None, data=None, queryset=None, silent=False + ): self.status_code = code self.detail = details self.data = data self.queryset = queryset self.silent = silent - self.slug = slug or 'undefined' + self.slug = slug or "undefined" if isinstance(details, list) and code == 207: self.detail = self._get_207_details() @@ -49,25 +45,25 @@ def _get_207_details(self): return [ValidationException(x.args[0], **x.kwargs) for x in self.detail] def _get_details(self): - return [ValidationException(x.args[0], **{**x.kwargs, 'code': self.status_code}) for x in self.detail] + return [ValidationException(x.args[0], **{**x.kwargs, "code": self.status_code}) for x in self.detail] def get_message(self): if isinstance(self.detail, str): return self.detail - message = '. '.join([x.detail for x in self.detail]) + message = ". ".join([x.detail for x in self.detail]) - if message[-1] != '.': - message += ('.' if self.detail else '') + if message[-1] != ".": + message += "." if self.detail else "" return message def get_message_list(self): if isinstance(self.detail, list): - message = '. '.join([x.detail for x in self.detail]) + message = ". ".join([x.detail for x in self.detail]) - if message[-1] != '.': - message += ('.' if self.detail else '') + if message[-1] != ".": + message += "." if self.detail else "" return message @@ -86,7 +82,7 @@ def __init__(self, details: str, slug: Optional[str] = None, data=None, queryset self.data = data self.queryset = queryset self.silent = silent - self.slug = slug or 'undefined' + self.slug = slug or "undefined" if isinstance(details, list): self.detail = self._get_details() @@ -101,10 +97,10 @@ def get_message(self): if isinstance(self.detail, str): return self.detail - return '. \n'.join([x.kwargs['slug'] if 'slug' in x.kwargs else x.args[0] for x in self.detail]) + return ". \n".join([x.kwargs["slug"] if "slug" in x.kwargs else x.args[0] for x in self.detail]) def get_message_list(self): if isinstance(self.detail, list): - return [x.kwargs['slug'] if 'slug' in x.kwargs else x.args[0] for x in self.detail] + return [x.kwargs["slug"] if "slug" in x.kwargs else x.args[0] for x in self.detail] return [self.detail] diff --git a/capyc/rest_framework/pytest/fixtures/client.py b/capyc/rest_framework/pytest/fixtures/client.py index 28d2fd3d8..a0a88595c 100644 --- a/capyc/rest_framework/pytest/fixtures/client.py +++ b/capyc/rest_framework/pytest/fixtures/client.py @@ -5,7 +5,7 @@ import pytest from rest_framework.test import APIClient as Client -__all__ = ['client', 'Client'] +__all__ = ["client", "Client"] @pytest.fixture diff --git a/capyc/tests/rest_framework/tests_exception_handler.py b/capyc/tests/rest_framework/tests_exception_handler.py index 6e88f8a73..a1bdadb13 100644 --- a/capyc/tests/rest_framework/tests_exception_handler.py +++ b/capyc/tests/rest_framework/tests_exception_handler.py @@ -33,7 +33,7 @@ def public_url(self): @pytest.fixture(autouse=True) def setup(monkeypatch, fake): - monkeypatch.setattr('breathecode.admissions.actions.get_bucket_object', lambda x: FakeBucketObject(fake.url())) + monkeypatch.setattr("breathecode.admissions.actions.get_bucket_object", lambda x: FakeBucketObject(fake.url())) yield @@ -41,14 +41,14 @@ def setup(monkeypatch, fake): def context(): request = HttpRequest() - request.META['HTTP_ACCEPT'] = 'application/json' + request.META["HTTP_ACCEPT"] = "application/json" request = Request(request) context = { - 'view': None, - 'args': (), - 'kwargs': {}, - 'request': request, + "view": None, + "args": (), + "kwargs": {}, + "request": request, } yield context @@ -58,7 +58,7 @@ def context(): def set_env(monkeypatch): def wrapper(env): - monkeypatch.setenv('ENV', env) + monkeypatch.setenv("ENV", env) yield wrapper @@ -68,7 +68,7 @@ def get_queryset(db, bc: Breathecode): def wrapper(n): bc.database.create(academy=n) - academy_cls = bc.database.get_model('admissions.Academy') + academy_cls = bc.database.get_model("admissions.Academy") return academy_cls.objects.all() yield wrapper @@ -76,8 +76,8 @@ def wrapper(n): # When: no slug is provided # Then: the message is returned -@pytest.mark.parametrize('extra', [{}, {'silent': False}, {'silent': None}]) -@pytest.mark.parametrize('env', ['test', 'dev', 'prod', 'qa', 'staging', 'development', 'production', '']) +@pytest.mark.parametrize("extra", [{}, {"silent": False}, {"silent": None}]) +@pytest.mark.parametrize("env", ["test", "dev", "prod", "qa", "staging", "development", "production", ""]) def test_payment_exception__no_slug(fake, context, set_env, env, extra): set_env(env) @@ -89,8 +89,8 @@ def test_payment_exception__no_slug(fake, context, set_env, env, extra): res = exception_handler(exc, context) expected = { - 'detail': message, - 'status_code': 402, + "detail": message, + "status_code": 402, } assert isinstance(res, Response) @@ -100,12 +100,13 @@ def test_payment_exception__no_slug(fake, context, set_env, env, extra): # When: a slug is provided and the env is test # Then: the slug is returned -@pytest.mark.parametrize('with_data', [True, False]) -@pytest.mark.parametrize('with_queryset', [True, False]) -@pytest.mark.parametrize('extra', [{}, {'silent': False}, {'silent': None}]) -def test_payment_exception__test_env__use_the_slug(fake, context, set_env, extra, get_kwargs, with_data, get_queryset, - with_queryset): - set_env('test') +@pytest.mark.parametrize("with_data", [True, False]) +@pytest.mark.parametrize("with_queryset", [True, False]) +@pytest.mark.parametrize("extra", [{}, {"silent": False}, {"silent": None}]) +def test_payment_exception__test_env__use_the_slug( + fake, context, set_env, extra, get_kwargs, with_data, get_queryset, with_queryset +): + set_env("test") slug = fake.slug() message = fake.sentence() @@ -113,29 +114,32 @@ def test_payment_exception__test_env__use_the_slug(fake, context, set_env, extra if with_data: data = get_kwargs(5) - extra['data'] = data + extra["data"] = data if with_queryset: queryset = get_queryset(5) - extra['queryset'] = queryset + extra["queryset"] = queryset exc = PaymentException(message, slug=slug, **extra) res = exception_handler(exc, context) expected = { - 'detail': slug, - 'status_code': 402, + "detail": slug, + "status_code": 402, } if with_data: - expected['data'] = data + expected["data"] = data if with_queryset: - expected['items'] = [{ - 'pk': x.id, - 'slug': x.slug, - 'name': x.name, - } for x in queryset] + expected["items"] = [ + { + "pk": x.id, + "slug": x.slug, + "name": x.name, + } + for x in queryset + ] assert isinstance(res, Response) assert res.data == expected @@ -144,8 +148,8 @@ def test_payment_exception__test_env__use_the_slug(fake, context, set_env, extra # When: a slug is provided and the env is not test # Then: the message is returned -@pytest.mark.parametrize('extra', [{}, {'silent': False}, {'silent': None}]) -@pytest.mark.parametrize('env', ['dev', 'prod', 'qa', 'staging', 'development', 'production', '']) +@pytest.mark.parametrize("extra", [{}, {"silent": False}, {"silent": None}]) +@pytest.mark.parametrize("env", ["dev", "prod", "qa", "staging", "development", "production", ""]) def test_payment_exception__anything_but_test_env__does_not_use_the_slug(fake, context, set_env, env, extra): set_env(env) @@ -156,8 +160,8 @@ def test_payment_exception__anything_but_test_env__does_not_use_the_slug(fake, c res = exception_handler(exc, context) expected = { - 'detail': message, - 'status_code': 402, + "detail": message, + "status_code": 402, } assert isinstance(res, Response) @@ -167,7 +171,7 @@ def test_payment_exception__anything_but_test_env__does_not_use_the_slug(fake, c # When: a slug and silent=True is provided and the env is not test # Then: the message is returned -@pytest.mark.parametrize('env', ['dev', 'prod', 'qa', 'staging', 'development', 'production', '']) +@pytest.mark.parametrize("env", ["dev", "prod", "qa", "staging", "development", "production", ""]) def test_payment_exception__anything_but_test_env__silent_code(fake, context, set_env, env): set_env(env) @@ -179,10 +183,10 @@ def test_payment_exception__anything_but_test_env__silent_code(fake, context, se assert isinstance(res, Response) assert res.data == { - 'detail': message, - 'silent': True, - 'silent_code': slug, - 'status_code': 402, + "detail": message, + "silent": True, + "silent_code": slug, + "status_code": 402, } assert res.status_code == status.HTTP_402_PAYMENT_REQUIRED @@ -190,7 +194,7 @@ def test_payment_exception__anything_but_test_env__silent_code(fake, context, se # When: a slug and silent=True is provided and the env is test # Then: the message is returned def test_payment_exception__test_env__silent_code(fake, context, set_env): - set_env('test') + set_env("test") slug = fake.slug() message = fake.sentence() @@ -200,10 +204,10 @@ def test_payment_exception__test_env__silent_code(fake, context, set_env): assert isinstance(res, Response) assert res.data == { - 'detail': slug, - 'silent': True, - 'silent_code': slug, - 'status_code': 402, + "detail": slug, + "silent": True, + "silent_code": slug, + "status_code": 402, } assert res.status_code == status.HTTP_402_PAYMENT_REQUIRED @@ -211,7 +215,7 @@ def test_payment_exception__test_env__silent_code(fake, context, set_env): # When: a slug and silent=True is provided and the env is test with multiple errors # Then: it returns each error def test_payment_exception__test_env__multiple_errors(fake, context, set_env, get_kwargs, get_queryset): - set_env('test') + set_env("test") slugs = [fake.slug() for _ in range(3)] messages = [fake.sentence() for _ in range(5)] @@ -230,35 +234,21 @@ def test_payment_exception__test_env__multiple_errors(fake, context, set_env, ge res = exception_handler(exc, context) expected = [ + {"detail": messages[0], "status_code": 402}, + {"detail": messages[1], "silent": True, "silent_code": "undefined", "status_code": 402}, + {"detail": slugs[0], "silent": True, "silent_code": slugs[0], "status_code": 402}, + {"data": data, "detail": slugs[1], "status_code": 402}, { - 'detail': messages[0], - 'status_code': 402 - }, - { - 'detail': messages[1], - 'silent': True, - 'silent_code': 'undefined', - 'status_code': 402 - }, - { - 'detail': slugs[0], - 'silent': True, - 'silent_code': slugs[0], - 'status_code': 402 - }, - { - 'data': data, - 'detail': slugs[1], - 'status_code': 402 - }, - { - 'detail': slugs[2], - 'items': [{ - 'pk': x.id, - 'slug': x.slug, - 'name': x.name, - } for x in queryset], - 'status_code': 402 + "detail": slugs[2], + "items": [ + { + "pk": x.id, + "slug": x.slug, + "name": x.name, + } + for x in queryset + ], + "status_code": 402, }, ] @@ -269,8 +259,8 @@ def test_payment_exception__test_env__multiple_errors(fake, context, set_env, ge # When: no slug is provided # Then: the message is returned -@pytest.mark.parametrize('extra', [{}, {'silent': False}, {'silent': None}]) -@pytest.mark.parametrize('env', ['test', 'dev', 'prod', 'qa', 'staging', 'development', 'production', '']) +@pytest.mark.parametrize("extra", [{}, {"silent": False}, {"silent": None}]) +@pytest.mark.parametrize("env", ["test", "dev", "prod", "qa", "staging", "development", "production", ""]) def test_validation_exception__no_slug(fake, context, set_env, env, extra): set_env(env) @@ -283,8 +273,8 @@ def test_validation_exception__no_slug(fake, context, set_env, env, extra): res = exception_handler(exc, context) expected = { - 'detail': message, - 'status_code': status_code, + "detail": message, + "status_code": status_code, } assert isinstance(res, Response) @@ -294,12 +284,13 @@ def test_validation_exception__no_slug(fake, context, set_env, env, extra): # When: a slug is provided and the env is test # Then: the slug is returned -@pytest.mark.parametrize('with_data', [True, False]) -@pytest.mark.parametrize('with_queryset', [True, False]) -@pytest.mark.parametrize('extra', [{}, {'silent': False}, {'silent': None}]) -def test_validation_exception__test_env__use_the_slug(fake, context, set_env, extra, get_kwargs, with_data, - get_queryset, with_queryset): - set_env('test') +@pytest.mark.parametrize("with_data", [True, False]) +@pytest.mark.parametrize("with_queryset", [True, False]) +@pytest.mark.parametrize("extra", [{}, {"silent": False}, {"silent": None}]) +def test_validation_exception__test_env__use_the_slug( + fake, context, set_env, extra, get_kwargs, with_data, get_queryset, with_queryset +): + set_env("test") slug = fake.slug() message = fake.sentence() @@ -308,29 +299,32 @@ def test_validation_exception__test_env__use_the_slug(fake, context, set_env, ex if with_data: data = get_kwargs(5) - extra['data'] = data + extra["data"] = data if with_queryset: queryset = get_queryset(5) - extra['queryset'] = queryset + extra["queryset"] = queryset exc = ValidationException(message, slug=slug, code=status_code, **extra) res = exception_handler(exc, context) expected = { - 'detail': slug, - 'status_code': status_code, + "detail": slug, + "status_code": status_code, } if with_data: - expected['data'] = data + expected["data"] = data if with_queryset: - expected['items'] = [{ - 'pk': x.id, - 'slug': x.slug, - 'name': x.name, - } for x in queryset] + expected["items"] = [ + { + "pk": x.id, + "slug": x.slug, + "name": x.name, + } + for x in queryset + ] assert isinstance(res, Response) assert res.data == expected @@ -339,8 +333,8 @@ def test_validation_exception__test_env__use_the_slug(fake, context, set_env, ex # When: a slug is provided and the env is not test # Then: the message is returned -@pytest.mark.parametrize('extra', [{}, {'silent': False}, {'silent': None}]) -@pytest.mark.parametrize('env', ['dev', 'prod', 'qa', 'staging', 'development', 'production', '']) +@pytest.mark.parametrize("extra", [{}, {"silent": False}, {"silent": None}]) +@pytest.mark.parametrize("env", ["dev", "prod", "qa", "staging", "development", "production", ""]) def test_validation_exception__anything_but_test_env__does_not_use_the_slug(fake, context, set_env, env, extra): set_env(env) @@ -352,8 +346,8 @@ def test_validation_exception__anything_but_test_env__does_not_use_the_slug(fake res = exception_handler(exc, context) expected = { - 'detail': message, - 'status_code': status_code, + "detail": message, + "status_code": status_code, } assert isinstance(res, Response) @@ -363,7 +357,7 @@ def test_validation_exception__anything_but_test_env__does_not_use_the_slug(fake # When: a slug and silent=True is provided and the env is not test # Then: the message is returned -@pytest.mark.parametrize('env', ['dev', 'prod', 'qa', 'staging', 'development', 'production', '']) +@pytest.mark.parametrize("env", ["dev", "prod", "qa", "staging", "development", "production", ""]) def test_validation_exception__anything_but_test_env__silent_code(fake, context, set_env, env): set_env(env) @@ -376,10 +370,10 @@ def test_validation_exception__anything_but_test_env__silent_code(fake, context, assert isinstance(res, Response) assert res.data == { - 'detail': message, - 'silent': True, - 'silent_code': slug, - 'status_code': status_code, + "detail": message, + "silent": True, + "silent_code": slug, + "status_code": status_code, } assert res.status_code == status_code @@ -387,7 +381,7 @@ def test_validation_exception__anything_but_test_env__silent_code(fake, context, # When: a slug and silent=True is provided and the env is test # Then: the message is returned def test_validation_exception__test_env__silent_code(fake, context, set_env): - set_env('test') + set_env("test") slug = fake.slug() message = fake.sentence() @@ -398,19 +392,20 @@ def test_validation_exception__test_env__silent_code(fake, context, set_env): assert isinstance(res, Response) assert res.data == { - 'detail': slug, - 'silent': True, - 'silent_code': slug, - 'status_code': status_code, + "detail": slug, + "silent": True, + "silent_code": slug, + "status_code": status_code, } assert res.status_code == status_code # When: a slug and silent=True is provided and the env is test with multiple errors, any error # Then: it returns each error -def test_validation_exception__test_env__any_status_code__multiple_errors(fake, context, set_env, get_kwargs, - get_queryset): - set_env('test') +def test_validation_exception__test_env__any_status_code__multiple_errors( + fake, context, set_env, get_kwargs, get_queryset +): + set_env("test") slugs = [fake.slug() for _ in range(3)] messages = [fake.sentence() for _ in range(5)] @@ -433,34 +428,37 @@ def test_validation_exception__test_env__any_status_code__multiple_errors(fake, expected = [ { - 'detail': messages[0], - 'status_code': status_code, + "detail": messages[0], + "status_code": status_code, }, { - 'detail': messages[1], - 'silent': True, - 'silent_code': 'undefined', - 'status_code': status_code, + "detail": messages[1], + "silent": True, + "silent_code": "undefined", + "status_code": status_code, }, { - 'detail': slugs[0], - 'silent': True, - 'silent_code': slugs[0], - 'status_code': status_code, + "detail": slugs[0], + "silent": True, + "silent_code": slugs[0], + "status_code": status_code, }, { - 'data': data, - 'detail': slugs[1], - 'status_code': status_code, + "data": data, + "detail": slugs[1], + "status_code": status_code, }, { - 'detail': slugs[2], - 'items': [{ - 'pk': x.id, - 'slug': x.slug, - 'name': x.name, - } for x in queryset], - 'status_code': status_code, + "detail": slugs[2], + "items": [ + { + "pk": x.id, + "slug": x.slug, + "name": x.name, + } + for x in queryset + ], + "status_code": status_code, }, ] @@ -472,7 +470,7 @@ def test_validation_exception__test_env__any_status_code__multiple_errors(fake, # When: a slug and silent=True is provided and the env is test with multiple errors, 207 # Then: it returns each error def test_validation_exception__test_env__207__multiple_errors(fake, context, set_env, get_kwargs, get_queryset): - set_env('test') + set_env("test") slugs = [fake.slug() for _ in range(3)] messages = [fake.sentence() for _ in range(5)] @@ -493,34 +491,37 @@ def test_validation_exception__test_env__207__multiple_errors(fake, context, set expected = [ { - 'detail': messages[0], - 'status_code': 400, + "detail": messages[0], + "status_code": 400, }, { - 'detail': messages[1], - 'silent': True, - 'silent_code': 'undefined', - 'status_code': status_codes[0], + "detail": messages[1], + "silent": True, + "silent_code": "undefined", + "status_code": status_codes[0], }, { - 'detail': slugs[0], - 'silent': True, - 'silent_code': slugs[0], - 'status_code': status_codes[1], + "detail": slugs[0], + "silent": True, + "silent_code": slugs[0], + "status_code": status_codes[1], }, { - 'data': data, - 'detail': slugs[1], - 'status_code': status_codes[2], + "data": data, + "detail": slugs[1], + "status_code": status_codes[2], }, { - 'detail': slugs[2], - 'items': [{ - 'pk': x.id, - 'slug': x.slug, - 'name': x.name, - } for x in queryset], - 'status_code': status_codes[3], + "detail": slugs[2], + "items": [ + { + "pk": x.id, + "slug": x.slug, + "name": x.name, + } + for x in queryset + ], + "status_code": status_codes[3], }, ] diff --git a/conftest.py b/conftest.py index 3149fe458..521be6a3e 100644 --- a/conftest.py +++ b/conftest.py @@ -12,23 +12,23 @@ from capyc.django.pytest.fixtures.signals import Signals # set ENV as test before run django -os.environ['ENV'] = 'test' -os.environ['DATABASE_URL'] = 'sqlite:///:memory:' +os.environ["ENV"] = "test" +os.environ["DATABASE_URL"] = "sqlite:///:memory:" pytest_plugins = ( - 'capyc.core.pytest', - 'capyc.newrelic.pytest', - 'capyc.django.pytest', - 'capyc.rest_framework.pytest', - 'capyc.circuitbreaker.pytest', + "capyc.core.pytest", + "capyc.newrelic.pytest", + "capyc.django.pytest", + "capyc.rest_framework.pytest", + "capyc.circuitbreaker.pytest", ) from breathecode.tests.mixins.breathecode_mixin import Breathecode def pytest_configure(): - os.environ['ENV'] = 'test' - os.environ['SQLALCHEMY_SILENCE_UBER_WARNING'] = '1' + os.environ["ENV"] = "test" + os.environ["SQLALCHEMY_SILENCE_UBER_WARNING"] = "1" @pytest.fixture @@ -55,7 +55,7 @@ def bc(seed): def set_datetime(monkeypatch): def patch(new_datetime): - monkeypatch.setattr(timezone, 'now', lambda: new_datetime) + monkeypatch.setattr(timezone, "now", lambda: new_datetime) yield patch @@ -78,10 +78,10 @@ def enable_cache_logging(monkeypatch): You can re-enable them within a test by calling the provided wrapper. """ - monkeypatch.setattr('breathecode.commons.actions.is_output_enable', lambda: False) + monkeypatch.setattr("breathecode.commons.actions.is_output_enable", lambda: False) def wrapper(*args, **kwargs): - monkeypatch.setattr('breathecode.commons.actions.is_output_enable', lambda: True) + monkeypatch.setattr("breathecode.commons.actions.is_output_enable", lambda: True) yield wrapper @@ -102,10 +102,10 @@ def enable_hook_manager(monkeypatch): original_process_model_event = HookManagerClass.process_model_event - monkeypatch.setattr(HookManagerClass, 'process_model_event', lambda *args, **kwargs: None) + monkeypatch.setattr(HookManagerClass, "process_model_event", lambda *args, **kwargs: None) def enable(): - monkeypatch.setattr(HookManagerClass, 'process_model_event', original_process_model_event) + monkeypatch.setattr(HookManagerClass, "process_model_event", original_process_model_event) yield enable @@ -122,8 +122,8 @@ def dont_wait_for_rescheduling_tasks(): set_settings(RETRIES_LIMIT=2) - with patch('task_manager.core.decorators.Task.reattempt_settings', lambda *args, **kwargs: dict()): - with patch('task_manager.core.decorators.Task.circuit_breaker_settings', lambda *args, **kwargs: dict()): + with patch("task_manager.core.decorators.Task.reattempt_settings", lambda *args, **kwargs: dict()): + with patch("task_manager.core.decorators.Task.circuit_breaker_settings", lambda *args, **kwargs: dict()): yield @@ -154,7 +154,7 @@ def wrapper(*args, **kwargs): break if raises: - raise TestError(f'Avoiding to make a real request to {args} {kwargs}') + raise TestError(f"Avoiding to make a real request to {args} {kwargs}") mock = MagicMock() @@ -171,7 +171,7 @@ def wrapper(*args, **kwargs): return mock mock = MagicMock() - monkeypatch.setattr('requests.api.request', wrapper) + monkeypatch.setattr("requests.api.request", wrapper) return mock @@ -180,9 +180,9 @@ def wrapper(*args, **kwargs): @pytest.fixture(autouse=True) def default_environment(clean_environment, fake, monkeypatch: pytest.MonkeyPatch) -> Generator[None, None, None]: - monkeypatch.setenv('APP_URL', fake.url().replace('http://', 'https://')) - monkeypatch.setenv('LOGIN_URL', fake.url().replace('http://', 'https://')) - monkeypatch.setenv('ENV', 'test') + monkeypatch.setenv("APP_URL", fake.url().replace("http://", "https://")) + monkeypatch.setenv("LOGIN_URL", fake.url().replace("http://", "https://")) + monkeypatch.setenv("ENV", "test") yield diff --git a/manage.py b/manage.py index 38720b82e..26fdc4d98 100755 --- a/manage.py +++ b/manage.py @@ -5,15 +5,17 @@ def main(): - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'breathecode.settings') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "breathecode.settings") try: from django.core.management import execute_from_command_line except ImportError as exc: - raise ImportError("Couldn't import Django. Are you sure it's installed and " - 'available on your PYTHONPATH environment variable? Did you ' - 'forget to activate a virtual environment?') from exc + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc execute_from_command_line(sys.argv) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts/backup_syllabus_models.py b/scripts/backup_syllabus_models.py index 980563fe7..d60d75fcb 100644 --- a/scripts/backup_syllabus_models.py +++ b/scripts/backup_syllabus_models.py @@ -13,22 +13,22 @@ def execute(command): sys.exit(1) -STRATEGY = 'bucket' +STRATEGY = "bucket" # STRATEGY = 'storage' -if __name__ == '__main__': - print('') - print('Backup Admissions Cohort') - execute(f'python manage.py backup {STRATEGY} admissions Cohort') +if __name__ == "__main__": + print("") + print("Backup Admissions Cohort") + execute(f"python manage.py backup {STRATEGY} admissions Cohort") - print('') - print('Backup Admissions Certificate') - execute(f'python manage.py backup {STRATEGY} admissions Certificate') + print("") + print("Backup Admissions Certificate") + execute(f"python manage.py backup {STRATEGY} admissions Certificate") - print('') - print('Backup Admissions Syllabus') - execute(f'python manage.py backup {STRATEGY} admissions Syllabus') + print("") + print("Backup Admissions Syllabus") + execute(f"python manage.py backup {STRATEGY} admissions Syllabus") - print('') - print('Backup certificate Specialty') - execute(f'python manage.py backup {STRATEGY} certificate Specialty') + print("") + print("Backup certificate Specialty") + execute(f"python manage.py backup {STRATEGY} certificate Specialty") diff --git a/scripts/build.py b/scripts/build.py index 440b99738..cbfccf834 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -1,12 +1,14 @@ import os -env = ' '.join([ - f'--env NEW_RELIC_APP_NAME={os.getenv("NEW_RELIC_APP_NAME")}', - f'--env NEW_RELIC_LICENSE_KEY={os.getenv("NEW_RELIC_LICENSE_KEY")}', - f'--env NEW_RELIC_LOG={os.getenv("NEW_RELIC_LOG")}', - f'--env NEW_RELIC_API_KEY={os.getenv("NEW_RELIC_API_KEY")}', - f'--env NEW_RELIC_ACCOUNT_ID={os.getenv("NEW_RELIC_ACCOUNT_ID")}', -]) +env = " ".join( + [ + f'--env NEW_RELIC_APP_NAME={os.getenv("NEW_RELIC_APP_NAME")}', + f'--env NEW_RELIC_LICENSE_KEY={os.getenv("NEW_RELIC_LICENSE_KEY")}', + f'--env NEW_RELIC_LOG={os.getenv("NEW_RELIC_LOG")}', + f'--env NEW_RELIC_API_KEY={os.getenv("NEW_RELIC_API_KEY")}', + f'--env NEW_RELIC_ACCOUNT_ID={os.getenv("NEW_RELIC_ACCOUNT_ID")}', + ] +) -os.system(f'podman build -t 4geeks . -f ./.heroku.Dockerfile {env} ') +os.system(f"podman build -t 4geeks . -f ./.heroku.Dockerfile {env} ") env = os.environ.copy() diff --git a/scripts/celery.py b/scripts/celery.py index 2d9764a9c..7f608c21d 100644 --- a/scripts/celery.py +++ b/scripts/celery.py @@ -4,9 +4,9 @@ import sys from .utils.environment import celery_worker_environment -if __name__ == '__main__': +if __name__ == "__main__": celery_worker_environment() - exit_code = os.system('celery -A breathecode.celery worker --loglevel=INFO') + exit_code = os.system("celery -A breathecode.celery worker --loglevel=INFO") # python don't return 256 if exit_code: diff --git a/scripts/coverage.py b/scripts/coverage.py index f61f7be4f..da6386768 100644 --- a/scripts/coverage.py +++ b/scripts/coverage.py @@ -9,42 +9,46 @@ def python_module_to_dir(module: str) -> str: - parsed_dir = '/'.join(module.split('.')) - return Path(f'./{parsed_dir}').resolve() + parsed_dir = "/".join(module.split(".")) + return Path(f"./{parsed_dir}").resolve() def help_command(): - print('Usage:') - print(' `pipenv run cov breathecode.events` where events is the name of module and accept ' - 'add submodules using the dot(.) character as delimiter.') - print('') - print('commands:') - print(' --help see this help message.') + print("Usage:") + print( + " `pipenv run cov breathecode.events` where events is the name of module and accept " + "add submodules using the dot(.) character as delimiter." + ) + print("") + print("commands:") + print(" --help see this help message.") exit() -if __name__ == '__main__': - module = 'breathecode' +if __name__ == "__main__": + module = "breathecode" if len(sys.argv) > 3: module = sys.argv[3] - if module == '--help' or module == '-h': + if module == "--help" or module == "-h": help_command() dir = python_module_to_dir(module) reset_environment() test_environment() - htmlcov_path = os.path.join(os.getcwd(), 'htmlcov') + htmlcov_path = os.path.join(os.getcwd(), "htmlcov") if os.path.exists(htmlcov_path): shutil.rmtree(htmlcov_path) - exit_code = os.system(f'pytest {dir} --disable-pytest-warnings {sys.argv[1]} {sys.argv[2]} ' - f'--cov={module} --cov-report html --nomigrations --durations=1') + exit_code = os.system( + f"pytest {dir} --disable-pytest-warnings {sys.argv[1]} {sys.argv[2]} " + f"--cov={module} --cov-report html --nomigrations --durations=1" + ) - webbrowser.open('file://' + os.path.realpath(os.path.join(os.getcwd(), 'htmlcov', 'index.html'))) + webbrowser.open("file://" + os.path.realpath(os.path.join(os.getcwd(), "htmlcov", "index.html"))) # python don't return 256 if exit_code: diff --git a/scripts/create_bucket.py b/scripts/create_bucket.py index 80c55c7e0..096bf06f0 100644 --- a/scripts/create_bucket.py +++ b/scripts/create_bucket.py @@ -4,29 +4,29 @@ def resolve_credentials(): """Resolve Google Cloud Credentials.""" - path = os.getenv('GOOGLE_APPLICATION_CREDENTIALS', '') + path = os.getenv("GOOGLE_APPLICATION_CREDENTIALS", "") if os.path.exists(path): return True - credentials = os.getenv('GOOGLE_SERVICE_KEY', None) + credentials = os.getenv("GOOGLE_SERVICE_KEY", None) if credentials: - with open(path, 'w') as credentials_file: + with open(path, "w") as credentials_file: credentials_file.write(credentials) def help_command(): - print('Usage:') - print(' `pipenv run create_bucket BUCKET_NAME` where BUCKET_NAME is the name of new bucket') - print('') - print('commands:') - print(' --help see this help message.') + print("Usage:") + print(" `pipenv run create_bucket BUCKET_NAME` where BUCKET_NAME is the name of new bucket") + print("") + print("commands:") + print(" --help see this help message.") exit() def name_not_provided(): - print('Bucket name was not provided') - print('') + print("Bucket name was not provided") + print("") exit() @@ -35,14 +35,14 @@ def create_bucket(name: str): storage_client = storage.Client() bucket = storage_client.create_bucket(name) - print(f'Bucket {bucket.name} created.') + print(f"Bucket {bucket.name} created.") -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) > 1: name = sys.argv[1] - if name == '--help' or name == '-h': + if name == "--help" or name == "-h": help_command() create_bucket(name) diff --git a/scripts/docker_build.py b/scripts/docker_build.py index ccbb47f78..6ecd893a9 100644 --- a/scripts/docker_build.py +++ b/scripts/docker_build.py @@ -5,8 +5,8 @@ import os import sys -if __name__ == '__main__': - exit_code = os.system('docker build ./ -t breathecode --rm=false') +if __name__ == "__main__": + exit_code = os.system("docker build ./ -t breathecode --rm=false") if exit_code: sys.exit(1) diff --git a/scripts/docker_entrypoint.py b/scripts/docker_entrypoint.py index 05088f57e..a9a04a534 100644 --- a/scripts/docker_entrypoint.py +++ b/scripts/docker_entrypoint.py @@ -13,19 +13,19 @@ def execute(command): sys.exit(1) -if __name__ == '__main__': - print('') - print('Collect statics') - execute('python manage.py collectstatic --noinput') - - print('') - print('Migrate') - execute('python manage.py migrate') - - print('') - print('Load fixtures') - execute('python manage.py loaddata breathecode/*/fixtures/dev_*.json') - - print('') - print('Run server') - execute('gunicorn --bind :8000 --workers 3 breathecode.wsgi:application') +if __name__ == "__main__": + print("") + print("Collect statics") + execute("python manage.py collectstatic --noinput") + + print("") + print("Migrate") + execute("python manage.py migrate") + + print("") + print("Load fixtures") + execute("python manage.py loaddata breathecode/*/fixtures/dev_*.json") + + print("") + print("Run server") + execute("gunicorn --bind :8000 --workers 3 breathecode.wsgi:application") diff --git a/scripts/docker_entrypoint_dev.py b/scripts/docker_entrypoint_dev.py index 398ef2f6d..e7e4cae09 100644 --- a/scripts/docker_entrypoint_dev.py +++ b/scripts/docker_entrypoint_dev.py @@ -13,25 +13,25 @@ def execute(command): sys.exit(1) -if __name__ == '__main__': - print('') - print('fix .env') - execute('python -m scripts.hooks.postinstall.generate_environment') - - print('') - print('Collect statics') - execute('sudo chown shell -R staticfiles') - execute('sudo chmod 777 -R staticfiles') - execute('pipenv run python manage.py collectstatic --noinput') - - print('') - print('Migrate') - execute('pipenv run python manage.py migrate') - - print('') - print('Load fixtures') - execute('pipenv run python manage.py loaddata breathecode/*/fixtures/dev_*.json') - - print('') - print('Run server') - execute('pipenv run python manage.py runserver 0.0.0.0:8000') +if __name__ == "__main__": + print("") + print("fix .env") + execute("python -m scripts.hooks.postinstall.generate_environment") + + print("") + print("Collect statics") + execute("sudo chown shell -R staticfiles") + execute("sudo chmod 777 -R staticfiles") + execute("pipenv run python manage.py collectstatic --noinput") + + print("") + print("Migrate") + execute("pipenv run python manage.py migrate") + + print("") + print("Load fixtures") + execute("pipenv run python manage.py loaddata breathecode/*/fixtures/dev_*.json") + + print("") + print("Run server") + execute("pipenv run python manage.py runserver 0.0.0.0:8000") diff --git a/scripts/doctor/main.py b/scripts/doctor/main.py index 0b8044e50..0e3f2da1b 100644 --- a/scripts/doctor/main.py +++ b/scripts/doctor/main.py @@ -6,23 +6,23 @@ from shutil import which import socket -__all__ = ['main'] +__all__ = ["main"] api_path = os.getcwd() -dependencies_path = Path(f'{api_path}/scripts/doctor/dependencies.json').resolve() +dependencies_path = Path(f"{api_path}/scripts/doctor/dependencies.json").resolve() -def status(condition, true='yes', false='no'): +def status(condition, true="yes", false="no"): return true if condition else false def check_dependencies(dependencies): - print('--- Check installation status ---\n') + print("--- Check installation status ---\n") is_python_outdated = sys.version_info[0] < 3 or sys.version_info[1] < 9 - print('python =>', status(not is_python_outdated, 'updated', 'outdated')) + print("python =>", status(not is_python_outdated, "updated", "outdated")) for dependency in dependencies: - print(f'{dependency} =>', status(which(dependency), 'installed', 'not installed')) + print(f"{dependency} =>", status(which(dependency), "installed", "not installed")) def port_is_open(host, port=80): @@ -35,18 +35,19 @@ def port_is_open(host, port=80): def check_conections(): - print('\n--- Check conection status ---\n') + print("\n--- Check conection status ---\n") import subprocess - result = subprocess.run(['docker', 'image', 'ls'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - print('docker =>', status(not result.stderr, 'up', 'down')) - print('postgres =>', status(port_is_open('localhost', 5432), 'up', 'down')) - print('redis =>', status(port_is_open('localhost', 6379), 'up', 'down')) + result = subprocess.run(["docker", "image", "ls"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + print("docker =>", status(not result.stderr, "up", "down")) + print("postgres =>", status(port_is_open("localhost", 5432), "up", "down")) + print("redis =>", status(port_is_open("localhost", 6379), "up", "down")) def main(): - with open(dependencies_path, 'r') as file: + with open(dependencies_path, "r") as file: dependencies = json.load(file) check_dependencies(dependencies) diff --git a/scripts/environment.py b/scripts/environment.py index 860b7aba9..3b0bab898 100644 --- a/scripts/environment.py +++ b/scripts/environment.py @@ -2,4 +2,4 @@ def test_environment(): - os.environ['ENV'] = 'test' + os.environ["ENV"] = "test" diff --git a/scripts/hooks/postinstall/__init__.py b/scripts/hooks/postinstall/__init__.py index 78df8b339..afffb23a4 100644 --- a/scripts/hooks/postinstall/__init__.py +++ b/scripts/hooks/postinstall/__init__.py @@ -2,10 +2,11 @@ from os import getcwd import glob -modules = glob.glob(join(dirname(__file__), '*.py')) +modules = glob.glob(join(dirname(__file__), "*.py")) scripts = [ - relpath(f, getcwd()).replace('\\', '.').replace('/', '.').replace('.py', '') for f in modules - if isfile(f) and not f.endswith('__init__.py') + relpath(f, getcwd()).replace("\\", ".").replace("/", ".").replace(".py", "") + for f in modules + if isfile(f) and not f.endswith("__init__.py") ] -__all__ = ['scripts'] +__all__ = ["scripts"] diff --git a/scripts/hooks/postinstall/generate_environment.py b/scripts/hooks/postinstall/generate_environment.py index 5929b356e..06bd106b9 100644 --- a/scripts/hooks/postinstall/generate_environment.py +++ b/scripts/hooks/postinstall/generate_environment.py @@ -4,38 +4,38 @@ from shutil import which, copyfile api_path = os.getcwd() -env_path = Path(f'{api_path}/.env').resolve() -env_example_path = Path(f'{api_path}/.env.example').resolve() -where_in_docker = os.getenv('DOCKER') == '1' +env_path = Path(f"{api_path}/.env").resolve() +env_example_path = Path(f"{api_path}/.env.example").resolve() +where_in_docker = os.getenv("DOCKER") == "1" -if which('gp'): +if which("gp"): copyfile(env_example_path, env_path) exit() -content = '' -with open(env_example_path, 'r') as file: - lines = file.read().split('\n') +content = "" +with open(env_example_path, "r") as file: + lines = file.read().split("\n") for line in lines: try: - key, value = line.split('=') + key, value = line.split("=") - if key == 'DATABASE_URL': - hostname = 'postgres' if where_in_docker else 'localhost' - content += f'{key}=postgres://user:pass@{hostname}:5432/breathecode\n' + if key == "DATABASE_URL": + hostname = "postgres" if where_in_docker else "localhost" + content += f"{key}=postgres://user:pass@{hostname}:5432/breathecode\n" - elif key == 'REDIS_URL': - hostname = 'redis' if where_in_docker else 'localhost' - content += f'{key}=redis://{hostname}:6379\n' + elif key == "REDIS_URL": + hostname = "redis" if where_in_docker else "localhost" + content += f"{key}=redis://{hostname}:6379\n" - elif key == 'API_URL': - content += f'{key}=http://localhost:8000\n' + elif key == "API_URL": + content += f"{key}=http://localhost:8000\n" else: - content += f'{key}={value}\n' + content += f"{key}={value}\n" except Exception: - content += '\n' + content += "\n" -with open(env_path, 'w') as file: +with open(env_path, "w") as file: file.write(content) diff --git a/scripts/hooks/postinstall/install_precommit.py b/scripts/hooks/postinstall/install_precommit.py index 29ac4bdce..c159c2628 100644 --- a/scripts/hooks/postinstall/install_precommit.py +++ b/scripts/hooks/postinstall/install_precommit.py @@ -3,7 +3,7 @@ import os import sys -exit_code = os.system('pipenv run pre-commit install') +exit_code = os.system("pipenv run pre-commit install") # python don't return 256 if exit_code: diff --git a/scripts/hooks/postinstall/install_vscode_pylance.py b/scripts/hooks/postinstall/install_vscode_pylance.py index 368c6e6e5..c50817993 100644 --- a/scripts/hooks/postinstall/install_vscode_pylance.py +++ b/scripts/hooks/postinstall/install_vscode_pylance.py @@ -5,13 +5,13 @@ from pathlib import Path from shutil import which -if which('gp'): +if which("gp"): exit() api_path = os.getcwd() -vscode_folder_path = Path(f'{api_path}/.vscode').resolve() -vscode_setting_path = Path(f'{api_path}/.vscode/settings.json').resolve() +vscode_folder_path = Path(f"{api_path}/.vscode").resolve() +vscode_setting_path = Path(f"{api_path}/.vscode/settings.json").resolve() if not os.path.isdir(vscode_folder_path): os.mkdir(vscode_folder_path) @@ -20,16 +20,16 @@ if os.path.isfile(vscode_setting_path): # import yaml - os.system(f'pipenv run python -m scripts.utils.fix_json {vscode_setting_path}') + os.system(f"pipenv run python -m scripts.utils.fix_json {vscode_setting_path}") - with open(vscode_setting_path, 'r') as vscode_setting_file: + with open(vscode_setting_path, "r") as vscode_setting_file: vscode_setting_json = json.load(vscode_setting_file) -vscode_setting_json['python.languageServer'] = 'Pylance' +vscode_setting_json["python.languageServer"] = "Pylance" -bad_keys = [key for key in vscode_setting_json if key.startswith('//')] +bad_keys = [key for key in vscode_setting_json if key.startswith("//")] for key in bad_keys: del vscode_setting_json[key] -with open(vscode_setting_path, 'w') as vscode_setting_file: +with open(vscode_setting_path, "w") as vscode_setting_file: json.dump(vscode_setting_json, vscode_setting_file, indent=2) diff --git a/scripts/hooks/preinstall/__init__.py b/scripts/hooks/preinstall/__init__.py index 78df8b339..afffb23a4 100644 --- a/scripts/hooks/preinstall/__init__.py +++ b/scripts/hooks/preinstall/__init__.py @@ -2,10 +2,11 @@ from os import getcwd import glob -modules = glob.glob(join(dirname(__file__), '*.py')) +modules = glob.glob(join(dirname(__file__), "*.py")) scripts = [ - relpath(f, getcwd()).replace('\\', '.').replace('/', '.').replace('.py', '') for f in modules - if isfile(f) and not f.endswith('__init__.py') + relpath(f, getcwd()).replace("\\", ".").replace("/", ".").replace(".py", "") + for f in modules + if isfile(f) and not f.endswith("__init__.py") ] -__all__ = ['scripts'] +__all__ = ["scripts"] diff --git a/scripts/hooks/preinstall/fix_psycopg2.py b/scripts/hooks/preinstall/fix_psycopg2.py index 36e4ca67e..774da3d82 100644 --- a/scripts/hooks/preinstall/fix_psycopg2.py +++ b/scripts/hooks/preinstall/fix_psycopg2.py @@ -4,20 +4,22 @@ from shutil import which import subprocess -if which('pg_config'): +if which("pg_config"): exit() -if not which('uname'): - exit('Auto installation of pg_config in windows is not implemented yet, try install PostgreSQL ' - 'and add C:\\Program Files\\PostgreSQL\\12\\bin to the PATH') +if not which("uname"): + exit( + "Auto installation of pg_config in windows is not implemented yet, try install PostgreSQL " + "and add C:\\Program Files\\PostgreSQL\\12\\bin to the PATH" + ) -output = subprocess.check_output(['cat', '/etc/os-release']).decode() +output = subprocess.check_output(["cat", "/etc/os-release"]).decode() pending = True -ARCH_BASE = ['Arch Linux', 'Manjaro Linux'] -REDHAT_BASE = ['Red Hat Enterprise Linux', 'Fedora Linux', 'CentOS Linux'] -SUSE_BASE = ['SLES', 'openSUSE'] -DEBIAN_BASE = ['Debian GNU/Linux', 'Ubuntu'] +ARCH_BASE = ["Arch Linux", "Manjaro Linux"] +REDHAT_BASE = ["Red Hat Enterprise Linux", "Fedora Linux", "CentOS Linux"] +SUSE_BASE = ["SLES", "openSUSE"] +DEBIAN_BASE = ["Debian GNU/Linux", "Ubuntu"] # notify the command will be executed @@ -29,33 +31,33 @@ def system(command): if pending: for name in DEBIAN_BASE: if f'NAME="{name}"' in output: - system('sudo apt-get update') - system('sudo apt-get install libpq-dev -y') + system("sudo apt-get update") + system("sudo apt-get install libpq-dev -y") pending = False if pending: for name in ARCH_BASE: if f'NAME="{name}"' in output: - system('sudo pacman -S postgresql-libs --noconfirm') + system("sudo pacman -S postgresql-libs --noconfirm") pending = False if pending: for name in REDHAT_BASE: if f'NAME="{name}"' in output: - system('sudo yum install libpq-devel -y') + system("sudo yum install libpq-devel -y") pending = False if pending: for name in SUSE_BASE: if f'NAME="{name}' in output: - system('sudo zypper --non-interactive in postgresql-server-devel') + system("sudo zypper --non-interactive in postgresql-server-devel") pending = False # assuming this command is running in macos if pending: - if not which('brew'): - exit('brew is not installed on this system') + if not which("brew"): + exit("brew is not installed on this system") # I don't know which argument to bypass the prompt - system('sudo brew install postgresql -y') + system("sudo brew install postgresql -y") pending = False diff --git a/scripts/hooks/preinstall/install_deps_with_pip.py b/scripts/hooks/preinstall/install_deps_with_pip.py index 914b50b7a..9ae7bef5a 100644 --- a/scripts/hooks/preinstall/install_deps_with_pip.py +++ b/scripts/hooks/preinstall/install_deps_with_pip.py @@ -9,11 +9,13 @@ # sudo dnf install boost-devel python3.12-devel pip_path = get_pip_path() -commands = ';\n'.join([ - f'{pip_path} install --upgrade pip', - f'{pip_path} install --upgrade yapf pipenv toml', - '', -]) +commands = ";\n".join( + [ + f"{pip_path} install --upgrade pip", + f"{pip_path} install --upgrade yapf pipenv toml", + "", + ] +) exit_code = os.system(commands) diff --git a/scripts/install/main.py b/scripts/install/main.py index 8302ee2d3..1029e5575 100644 --- a/scripts/install/main.py +++ b/scripts/install/main.py @@ -1,7 +1,7 @@ import os from scripts.utils.get_python_path import get_python_path -__all__ = ['main'] +__all__ = ["main"] python_path = get_python_path() @@ -9,28 +9,28 @@ def preinstall_hook(): from scripts.hooks.preinstall import scripts for script_name in scripts: - print('') - print('--- Running preinstall script ---', os.path.basename(script_name), '---') - print('') - os.system(f'{python_path} -m {script_name}') + print("") + print("--- Running preinstall script ---", os.path.basename(script_name), "---") + print("") + os.system(f"{python_path} -m {script_name}") def install(): - print('') - print('--- Running pipenv install ---') - print('') + print("") + print("--- Running pipenv install ---") + print("") - os.system('pipenv install --dev') + os.system("pipenv install --dev") def postinstall_hook(): from scripts.hooks.postinstall import scripts for script_name in scripts: - print('') - print('--- Running postinstall script ---', os.path.basename(script_name), '---') - print('') - os.system(f'{python_path} -m {script_name}') + print("") + print("--- Running postinstall script ---", os.path.basename(script_name), "---") + print("") + os.system(f"{python_path} -m {script_name}") def main(): diff --git a/scripts/md5.py b/scripts/md5.py index c42ee6688..3aa1e8cac 100644 --- a/scripts/md5.py +++ b/scripts/md5.py @@ -4,13 +4,13 @@ import hashlib import sys -if __name__ == '__main__': - args = '' +if __name__ == "__main__": + args = "" if len(sys.argv) <= 1: - Exception('No arguments passed') + Exception("No arguments passed") - with open(sys.argv[1], 'rb') as f: + with open(sys.argv[1], "rb") as f: content = f.read() hash = hashlib.md5(content).hexdigest() diff --git a/scripts/parallel_coverage.py b/scripts/parallel_coverage.py index 6de158045..aaf9c2ed0 100644 --- a/scripts/parallel_coverage.py +++ b/scripts/parallel_coverage.py @@ -11,28 +11,30 @@ def python_module_to_dir(module: str) -> str: - parsed_dir = '/'.join(module.split('.')) - return Path(f'./{parsed_dir}').resolve() + parsed_dir = "/".join(module.split(".")) + return Path(f"./{parsed_dir}").resolve() def parse_arguments(): - parser = argparse.ArgumentParser(description='Run pytest with coverage and optional seed.') - parser.add_argument('--seed', type=int, help='Seed for randomness') - parser.add_argument('pytest_args', nargs='*', help='Arguments to pass to pytest') + parser = argparse.ArgumentParser(description="Run pytest with coverage and optional seed.") + parser.add_argument("--seed", type=int, help="Seed for randomness") + parser.add_argument("pytest_args", nargs="*", help="Arguments to pass to pytest") return parser.parse_args() def help_command(): - print('Usage:') - print(' `pipenv run cov breathecode.events` where events is the name of module and accept ' - 'add submodules using the dot(.) character as delimiter.') - print('') - print('commands:') - print(' --help see this help message.') + print("Usage:") + print( + " `pipenv run cov breathecode.events` where events is the name of module and accept " + "add submodules using the dot(.) character as delimiter." + ) + print("") + print("commands:") + print(" --help see this help message.") exit() -if __name__ == '__main__': +if __name__ == "__main__": args = parse_arguments() if args.seed is None: @@ -40,34 +42,36 @@ def help_command(): else: seed = args.seed - module = 'breathecode' + module = "breathecode" if args.pytest_args: module = args.pytest_args[0] - if module == '--help' or module == '-h': + if module == "--help" or module == "-h": help_command() dir = python_module_to_dir(module) - htmlcov_path = os.path.join(os.getcwd(), 'htmlcov') + htmlcov_path = os.path.join(os.getcwd(), "htmlcov") if os.path.exists(htmlcov_path): shutil.rmtree(htmlcov_path) - command = (f'pytest {dir} --disable-pytest-warnings {" ".join(args.pytest_args[1:])} ' - f'--cov={module} --cov-report html -n auto --nomigrations --durations=1') + command = ( + f'pytest {dir} --disable-pytest-warnings {" ".join(args.pytest_args[1:])} ' + f"--cov={module} --cov-report html -n auto --nomigrations --durations=1" + ) env = os.environ.copy() - env['ENV'] = 'test' - env['RANDOM_SEED'] = str(seed) + env["ENV"] = "test" + env["RANDOM_SEED"] = str(seed) exit_code = subprocess.run(command, env=env, shell=True).returncode print() - print(f'Seed {seed} used, you can provide it locally to reproduce random errors') + print(f"Seed {seed} used, you can provide it locally to reproduce random errors") - webbrowser.open('file://' + os.path.realpath(os.path.join(os.getcwd(), 'htmlcov', 'index.html'))) + webbrowser.open("file://" + os.path.realpath(os.path.join(os.getcwd(), "htmlcov", "index.html"))) # python doesn't return 256 if exit_code: diff --git a/scripts/parallel_coverage_ci.py b/scripts/parallel_coverage_ci.py index 6e28af980..a63ba1904 100644 --- a/scripts/parallel_coverage_ci.py +++ b/scripts/parallel_coverage_ci.py @@ -9,28 +9,30 @@ def python_module_to_dir(module: str) -> str: - parsed_dir = '/'.join(module.split('.')) - return Path(f'./{parsed_dir}').resolve() + parsed_dir = "/".join(module.split(".")) + return Path(f"./{parsed_dir}").resolve() def parse_arguments(): - parser = argparse.ArgumentParser(description='Run pytest with coverage and optional seed.') - parser.add_argument('--seed', type=int, help='Seed for randomness') - parser.add_argument('pytest_args', nargs='*', help='Arguments to pass to pytest') + parser = argparse.ArgumentParser(description="Run pytest with coverage and optional seed.") + parser.add_argument("--seed", type=int, help="Seed for randomness") + parser.add_argument("pytest_args", nargs="*", help="Arguments to pass to pytest") return parser.parse_args() def help_command(): - print('Usage:') - print(' `pipenv run cov breathecode.events` where events is the name of module and accept ' - 'add submodules using the dot(.) character as delimiter.') - print('') - print('commands:') - print(' --help see this help message.') + print("Usage:") + print( + " `pipenv run cov breathecode.events` where events is the name of module and accept " + "add submodules using the dot(.) character as delimiter." + ) + print("") + print("commands:") + print(" --help see this help message.") exit() -if __name__ == '__main__': +if __name__ == "__main__": args = parse_arguments() if args.seed is None: @@ -38,32 +40,34 @@ def help_command(): else: seed = args.seed - module = 'breathecode' + module = "breathecode" if args.pytest_args: module = args.pytest_args[0] - if module == '--help' or module == '-h': + if module == "--help" or module == "-h": help_command() dir = python_module_to_dir(module) - xml_path = os.path.join(os.getcwd(), 'coverage.xml') + xml_path = os.path.join(os.getcwd(), "coverage.xml") if os.path.exists(xml_path): os.remove(xml_path) - command = (f'pytest {dir} --disable-pytest-warnings {" ".join(args.pytest_args[1:])} ' - f'--cov={module} --cov-report xml -n auto --durations=1') + command = ( + f'pytest {dir} --disable-pytest-warnings {" ".join(args.pytest_args[1:])} ' + f"--cov={module} --cov-report xml -n auto --durations=1" + ) env = os.environ.copy() - env['ENV'] = 'test' - env['RANDOM_SEED'] = str(seed) + env["ENV"] = "test" + env["RANDOM_SEED"] = str(seed) exit_code = subprocess.run(command, env=env, shell=True).returncode print() - print(f'Seed {seed} used, you can provide it locally to reproduce random errors') + print(f"Seed {seed} used, you can provide it locally to reproduce random errors") # python doesn't return 256 if exit_code: diff --git a/scripts/parallel_test.py b/scripts/parallel_test.py index d2b434d19..b48492152 100644 --- a/scripts/parallel_test.py +++ b/scripts/parallel_test.py @@ -6,18 +6,18 @@ import subprocess import sys -if __name__ == '__main__': - args = '' +if __name__ == "__main__": + args = "" if len(sys.argv) > 1: sys.argv.pop(0) - args = ' '.join(sys.argv) + args = " ".join(sys.argv) - command = f'pytest {args} --disable-pytest-warnings -n auto --nomigrations --durations=1' + command = f"pytest {args} --disable-pytest-warnings -n auto --nomigrations --durations=1" # command = f'pytest {pytest_args} -n auto --nomigrations --durations=1' env = os.environ.copy() - env['ENV'] = 'test' + env["ENV"] = "test" exit_code = subprocess.run(command, env=env, shell=True).returncode diff --git a/scripts/parallel_test_ci.py b/scripts/parallel_test_ci.py index 8eb9f4285..8753c21f0 100644 --- a/scripts/parallel_test_ci.py +++ b/scripts/parallel_test_ci.py @@ -9,13 +9,13 @@ def parse_arguments(): - parser = argparse.ArgumentParser(description='Run pytest with optional seed.') - parser.add_argument('--seed', type=int, help='Seed for randomness') - parser.add_argument('pytest_args', nargs='*', help='Arguments to pass to pytest') + parser = argparse.ArgumentParser(description="Run pytest with optional seed.") + parser.add_argument("--seed", type=int, help="Seed for randomness") + parser.add_argument("pytest_args", nargs="*", help="Arguments to pass to pytest") return parser.parse_args() -if __name__ == '__main__': +if __name__ == "__main__": args = parse_arguments() if args.seed is None: @@ -23,17 +23,17 @@ def parse_arguments(): else: seed = args.seed - pytest_args = ' '.join(args.pytest_args) - command = f'pytest {pytest_args} --disable-pytest-warnings -n auto --durations=1' + pytest_args = " ".join(args.pytest_args) + command = f"pytest {pytest_args} --disable-pytest-warnings -n auto --durations=1" env = os.environ.copy() - env['ENV'] = 'test' - env['RANDOM_SEED'] = str(seed) + env["ENV"] = "test" + env["RANDOM_SEED"] = str(seed) exit_code = subprocess.run(command, env=env, shell=True).returncode print() - print(f'Seed {seed} used, you can provide it locally to reproduce random errors') + print(f"Seed {seed} used, you can provide it locally to reproduce random errors") # python doesn't return 256 if exit_code: diff --git a/scripts/pending_migrations.py b/scripts/pending_migrations.py index 69fcdc476..3618ae907 100644 --- a/scripts/pending_migrations.py +++ b/scripts/pending_migrations.py @@ -1,6 +1,6 @@ import os -p = os.system('python manage.py makemigrations --check --dry-run') +p = os.system("python manage.py makemigrations --check --dry-run") if p: exit(1) diff --git a/scripts/postgres_get_server_cert.py b/scripts/postgres_get_server_cert.py index 470b980d3..04e0d596f 100644 --- a/scripts/postgres_get_server_cert.py +++ b/scripts/postgres_get_server_cert.py @@ -19,9 +19,9 @@ def main(): sock = socket.create_connection(target) try: certificate_as_pem = get_certificate_from_socket(sock) - print(certificate_as_pem.decode('utf-8')) + print(certificate_as_pem.decode("utf-8")) except Exception as exc: - sys.stderr.write('Something failed while fetching certificate: {0}\n'.format(exc)) + sys.stderr.write("Something failed while fetching certificate: {0}\n".format(exc)) sys.exit(1) finally: sock.close() @@ -29,14 +29,14 @@ def main(): def get_args(): parser = argparse.ArgumentParser() - parser.add_argument('database', help='Either an IP address, hostname or URL with host and port') + parser.add_argument("database", help="Either an IP address, hostname or URL with host and port") return parser.parse_args() def get_target_address_from_args(args): specified_target = args.database - if '//' not in specified_target: - specified_target = '//' + specified_target + if "//" not in specified_target: + specified_target = "//" + specified_target parsed = urlparse(specified_target) return (parsed.hostname, parsed.port or 5432) @@ -55,18 +55,18 @@ def request_ssl(sock): # 1234.5679 is the magic protocol version used to request TLS, defined # in pgcomm.h) version_ssl = postgres_protocol_version_to_binary(1234, 5679) - length = struct.pack('!I', 8) + length = struct.pack("!I", 8) packet = length + version_ssl sock.sendall(packet) data = read_n_bytes_from_socket(sock, 1) - if data != b'S': - raise Exception('Backend does not support TLS') + if data != b"S": + raise Exception("Backend does not support TLS") def get_ssl_context(): # Return the strongest SSL context available locally - for proto in ('PROTOCOL_TLSv1_2', 'PROTOCOL_TLSv1', 'PROTOCOL_SSLv23'): + for proto in ("PROTOCOL_TLSv1_2", "PROTOCOL_TLSv1", "PROTOCOL_SSLv23"): protocol = getattr(ssl, proto, None) if protocol: break @@ -76,12 +76,12 @@ def get_ssl_context(): def encode_der_as_pem(cert): # Forking out to openssl to not have to add any dependencies to script, # preferably you'd do this with pycrypto or other ssl libraries. - cmd = ['openssl', 'x509', '-inform', 'DER'] + cmd = ["openssl", "x509", "-inform", "DER"] pipe = subprocess.PIPE process = subprocess.Popen(cmd, stdin=pipe, stdout=pipe, stderr=pipe) stdout, stderr = process.communicate(cert) if stderr: - raise Exception('OpenSSL error when converting cert to PEM: {0}'.format(stderr)) + raise Exception("OpenSSL error when converting cert to PEM: {0}".format(stderr)) return stdout.strip() @@ -96,8 +96,8 @@ def read_n_bytes_from_socket(sock, n): def postgres_protocol_version_to_binary(major, minor): - return struct.pack('!I', major << 16 | minor) + return struct.pack("!I", major << 16 | minor) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts/test.py b/scripts/test.py index 5f286d29d..971892339 100644 --- a/scripts/test.py +++ b/scripts/test.py @@ -5,15 +5,15 @@ import os import sys -if __name__ == '__main__': - args = '' +if __name__ == "__main__": + args = "" if len(sys.argv) > 1: sys.argv.pop(0) - args = ' '.join(sys.argv) + args = " ".join(sys.argv) # exit_code = os.system(f'pytest {args} --disable-pytest-warnings --nomigrations --durations=1') - exit_code = os.system(f'pytest {args} --nomigrations --durations=1') + exit_code = os.system(f"pytest {args} --nomigrations --durations=1") # python don't return 256 if exit_code: diff --git a/scripts/update_environments.py b/scripts/update_environments.py index 813525fc3..2b2924a58 100644 --- a/scripts/update_environments.py +++ b/scripts/update_environments.py @@ -6,31 +6,31 @@ logger = logging.getLogger(__name__) -CONFIGURATION_FILE = os.path.join(os.getcwd(), '.breathecode.yml') +CONFIGURATION_FILE = os.path.join(os.getcwd(), ".breathecode.yml") NEW_ENVS = [] -if os.environ.get('PIPENV_ACTIVE') == '1': +if os.environ.get("PIPENV_ACTIVE") == "1": logger.error("This command can't be execute with pipenv, run instead `python -m scripts.update_environments`") exit(1) -with open(CONFIGURATION_FILE, 'r') as file: +with open(CONFIGURATION_FILE, "r") as file: data = yaml.load(file, Loader=FullLoader) or {} -if 'tests' not in data: - data['tests'] = {} +if "tests" not in data: + data["tests"] = {} -if 'environments' not in data['tests']: - data['tests']['environments'] = {} +if "environments" not in data["tests"]: + data["tests"]["environments"] = {} -if 'whitelist' not in data['tests']['environments']: - data['tests']['environments']['whitelist'] = [] +if "whitelist" not in data["tests"]["environments"]: + data["tests"]["environments"]["whitelist"] = [] -whitelist_environment = set(data['tests']['environments']['whitelist']) +whitelist_environment = set(data["tests"]["environments"]["whitelist"]) system_environment = set(os.environ) whitelist_environment.update(system_environment) -data['tests']['environments']['whitelist'] = list(whitelist_environment) +data["tests"]["environments"]["whitelist"] = list(whitelist_environment) -with open(CONFIGURATION_FILE, 'w') as file: +with open(CONFIGURATION_FILE, "w") as file: yaml.dump(data, file, indent=2) diff --git a/scripts/update_sql_keywords_json.py b/scripts/update_sql_keywords_json.py index e07d6f5f4..8d343d773 100644 --- a/scripts/update_sql_keywords_json.py +++ b/scripts/update_sql_keywords_json.py @@ -4,42 +4,208 @@ from bs4 import BeautifulSoup from pathlib import Path -url_source_of_postgres_keywords = 'https://www.postgresql.org/docs/8.1/sql-keywords-appendix.html' +url_source_of_postgres_keywords = "https://www.postgresql.org/docs/8.1/sql-keywords-appendix.html" request = requests.get(url_source_of_postgres_keywords, timeout=2) -soup = BeautifulSoup(request.text, features='lxml') +soup = BeautifulSoup(request.text, features="lxml") BLACKLIST = { - 'BREAK', 'DBCC', 'DENY', 'OPENDATASOURCE', 'OPENQUERY', 'OPENROWSET', 'SHUTDOWN', 'SP_', 'TRAN', 'WHILE', '—', 'XP_' + "BREAK", + "DBCC", + "DENY", + "OPENDATASOURCE", + "OPENQUERY", + "OPENROWSET", + "SHUTDOWN", + "SP_", + "TRAN", + "WHILE", + "—", + "XP_", } # https://www.w3schools.com/sql WHITELIST = [ - 'SELECT', 'FROM', 'WHERE', 'AS', 'INNER', 'JOIN', 'ON', 'DISTINCT', 'AND', 'OR', 'NOT', 'ORDER', 'BY', 'IS', 'NULL', - 'TOP', 'LIMIT', 'FETCH', 'FIRST', 'ROWS', 'ONLY', 'PERCENT', 'MIN', 'MAX', 'COUNT', 'AVG', 'SUM', 'LIKE', 'IN', - 'BETWEEN', 'LEFT', 'RIGHT', 'FULL', 'OUTER', 'UNION', 'ALL', 'GROUP', 'HAVING', 'DESC', 'EXISTS', 'ANY', 'CASE', - 'WHEN', 'THEN', 'IFNULL', 'ISNULL', 'COALESCE', 'NVL', 'IIF', 'SOME', 'ASCII', 'CHAR_LENGTH', 'CHARACTER_LENGTH', - 'CONCAT', 'CONCAT_WS', 'FIELD', 'FIND_IN_SET', 'FORMAT', 'INSTR', 'LCASE', 'LENGTH', 'LOCATE', 'LOWER', 'LPAD', - 'LTRIM', 'MID', 'POSITION', 'REPEAT', 'REPLACE', 'REVERSE', 'RPAD', 'RTRIM', 'SPACE', 'STRCMP', 'SUBSTR', - 'SUBSTRING', 'SUBSTRING_INDEX', 'TRIM', 'UCASE', 'UPPER', 'ABS', 'ACOS', 'ASIN', 'ATAN', 'ATAN2', 'CEIL', 'CEILING', - 'COS', 'COT', 'COUNT', 'DEGREES', 'DIV', 'EXP', 'FLOOR', 'GREATEST', 'LEAST', 'LN', 'LOG', 'LOG10', 'LOG2', 'MOD', - 'PI', 'POW', 'POWER', 'RADIANS', 'RAND', 'ROUND', 'SIGN', 'SIN', 'SQRT', 'TAN', 'TRUNCATE', 'ADDDATE', 'ADDTIME', - 'CURDATE', 'CURRENT_DATE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURTIME', 'DATE', 'DATEDIFF', 'DATE_ADD', - 'DATE_FORMAT', 'DATE_SUB', 'DAY', 'DAYNAME', 'DAYOFMONTH', 'DAYOFWEEK', 'DAYOFYEAR', 'EXTRACT', 'FROM_DAYS', - 'FROM_DAYS', 'HOUR', 'LAST_DAY', 'LOCALTIME', 'LOCALTIMESTAMP', 'MAKEDATE', 'MAKETIME', 'MICROSECOND', 'MINUTE', - 'MONTH', 'MONTHNAME', 'NOW', 'PERIOD_ADD', 'PERIOD_DIFF', 'QUARTER', 'SECOND', 'SEC_TO_TIME', 'STR_TO_DATE', - 'SUBDATE', 'SUBTIME', 'SYSDATE', 'TIME', 'TIME_FORMAT', 'TIME_TO_SEC', 'TIMEDIFF', 'TIMESTAMP', 'TO_DAYS', 'WEEK', - 'WEEKDAY', 'WEEKOFYEAR', 'YEAR', 'YEARWEEK', 'BIN', 'BINARY', 'CAST', 'CONV', 'CONVERT', 'IF', 'LAST_INSERT_ID', - 'NULLIF' + "SELECT", + "FROM", + "WHERE", + "AS", + "INNER", + "JOIN", + "ON", + "DISTINCT", + "AND", + "OR", + "NOT", + "ORDER", + "BY", + "IS", + "NULL", + "TOP", + "LIMIT", + "FETCH", + "FIRST", + "ROWS", + "ONLY", + "PERCENT", + "MIN", + "MAX", + "COUNT", + "AVG", + "SUM", + "LIKE", + "IN", + "BETWEEN", + "LEFT", + "RIGHT", + "FULL", + "OUTER", + "UNION", + "ALL", + "GROUP", + "HAVING", + "DESC", + "EXISTS", + "ANY", + "CASE", + "WHEN", + "THEN", + "IFNULL", + "ISNULL", + "COALESCE", + "NVL", + "IIF", + "SOME", + "ASCII", + "CHAR_LENGTH", + "CHARACTER_LENGTH", + "CONCAT", + "CONCAT_WS", + "FIELD", + "FIND_IN_SET", + "FORMAT", + "INSTR", + "LCASE", + "LENGTH", + "LOCATE", + "LOWER", + "LPAD", + "LTRIM", + "MID", + "POSITION", + "REPEAT", + "REPLACE", + "REVERSE", + "RPAD", + "RTRIM", + "SPACE", + "STRCMP", + "SUBSTR", + "SUBSTRING", + "SUBSTRING_INDEX", + "TRIM", + "UCASE", + "UPPER", + "ABS", + "ACOS", + "ASIN", + "ATAN", + "ATAN2", + "CEIL", + "CEILING", + "COS", + "COT", + "COUNT", + "DEGREES", + "DIV", + "EXP", + "FLOOR", + "GREATEST", + "LEAST", + "LN", + "LOG", + "LOG10", + "LOG2", + "MOD", + "PI", + "POW", + "POWER", + "RADIANS", + "RAND", + "ROUND", + "SIGN", + "SIN", + "SQRT", + "TAN", + "TRUNCATE", + "ADDDATE", + "ADDTIME", + "CURDATE", + "CURRENT_DATE", + "CURRENT_TIME", + "CURRENT_TIMESTAMP", + "CURTIME", + "DATE", + "DATEDIFF", + "DATE_ADD", + "DATE_FORMAT", + "DATE_SUB", + "DAY", + "DAYNAME", + "DAYOFMONTH", + "DAYOFWEEK", + "DAYOFYEAR", + "EXTRACT", + "FROM_DAYS", + "FROM_DAYS", + "HOUR", + "LAST_DAY", + "LOCALTIME", + "LOCALTIMESTAMP", + "MAKEDATE", + "MAKETIME", + "MICROSECOND", + "MINUTE", + "MONTH", + "MONTHNAME", + "NOW", + "PERIOD_ADD", + "PERIOD_DIFF", + "QUARTER", + "SECOND", + "SEC_TO_TIME", + "STR_TO_DATE", + "SUBDATE", + "SUBTIME", + "SYSDATE", + "TIME", + "TIME_FORMAT", + "TIME_TO_SEC", + "TIMEDIFF", + "TIMESTAMP", + "TO_DAYS", + "WEEK", + "WEEKDAY", + "WEEKOFYEAR", + "YEAR", + "YEARWEEK", + "BIN", + "BINARY", + "CAST", + "CONV", + "CONVERT", + "IF", + "LAST_INSERT_ID", + "NULLIF", ] -for element in soup.select('td tt'): +for element in soup.select("td tt"): keyword = element.text if keyword not in WHITELIST: BLACKLIST.add(element.text) -dict = {'whitelist': WHITELIST, 'blacklist': list(BLACKLIST)} +dict = {"whitelist": WHITELIST, "blacklist": list(BLACKLIST)} -with open(Path(os.getcwd()) / 'breathecode' / 'sql_keywords.json', 'w') as f: +with open(Path(os.getcwd()) / "breathecode" / "sql_keywords.json", "w") as f: json.dump(dict, f, indent=4) diff --git a/scripts/utils/environment.py b/scripts/utils/environment.py index 50a955ce0..c4a82f84f 100644 --- a/scripts/utils/environment.py +++ b/scripts/utils/environment.py @@ -2,16 +2,16 @@ import os from yaml.loader import FullLoader -CONFIGURATION_FILE = os.path.join(os.getcwd(), '.breathecode.yml') +CONFIGURATION_FILE = os.path.join(os.getcwd(), ".breathecode.yml") NEW_ENVS = [] def reset_environment(): system_environment = set(os.environ) - with open(CONFIGURATION_FILE, 'r') as file: + with open(CONFIGURATION_FILE, "r") as file: configuration = yaml.load(file, Loader=FullLoader) - whitelist_environment = set(configuration['tests']['environments']['whitelist']) + whitelist_environment = set(configuration["tests"]["environments"]["whitelist"]) blacklist_environment = system_environment.difference(whitelist_environment) @@ -21,8 +21,8 @@ def reset_environment(): def test_environment(): - os.environ['ENV'] = 'test' + os.environ["ENV"] = "test" def celery_worker_environment(): - os.environ['CELERY_WORKER_RUNNING'] = 'True' + os.environ["CELERY_WORKER_RUNNING"] = "True" diff --git a/scripts/utils/fix_json.py b/scripts/utils/fix_json.py index 49e966b79..318ca718b 100644 --- a/scripts/utils/fix_json.py +++ b/scripts/utils/fix_json.py @@ -7,8 +7,8 @@ if len(sys.argv) < 2: exit() -with open(sys.argv[1], 'r') as file: +with open(sys.argv[1], "r") as file: data = yaml.load(file, Loader=FullLoader) -with open(sys.argv[1], 'w') as file: +with open(sys.argv[1], "w") as file: json.dump(data, file, indent=2) diff --git a/scripts/utils/get_pip_path.py b/scripts/utils/get_pip_path.py index 623596a5c..be3d88d3b 100644 --- a/scripts/utils/get_pip_path.py +++ b/scripts/utils/get_pip_path.py @@ -1,17 +1,17 @@ import subprocess from shutil import which -__all__ = ['get_pip_path'] +__all__ = ["get_pip_path"] def get_pip_path_per_executable(pip3=False): - pip_path = which('pip' + '3' if pip3 else '') + pip_path = which("pip" + "3" if pip3 else "") if not pip_path: return - result = subprocess.run([pip_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - if result.stdout.decode('utf-8').find('pip (python 3'): + result = subprocess.run([pip_path, "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if result.stdout.decode("utf-8").find("pip (python 3"): return pip_path @@ -24,4 +24,4 @@ def get_pip_path(): if path: return path - raise Exception('Python 3 is not installed') + raise Exception("Python 3 is not installed") diff --git a/scripts/utils/get_python_path.py b/scripts/utils/get_python_path.py index d6e5efe0c..93399cc44 100644 --- a/scripts/utils/get_python_path.py +++ b/scripts/utils/get_python_path.py @@ -1,17 +1,17 @@ import subprocess from shutil import which -__all__ = ['get_python_path'] +__all__ = ["get_python_path"] def get_python_path_per_executable(python3=False): - python_path = which('python' + '3' if python3 else '') + python_path = which("python" + "3" if python3 else "") if not python_path: return - result = subprocess.run([python_path, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - if result.stdout.decode('utf-8').startswith('Python 3'): + result = subprocess.run([python_path, "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if result.stdout.decode("utf-8").startswith("Python 3"): return python_path @@ -24,4 +24,4 @@ def get_python_path(): if path: return path - raise Exception('Python 3 is not installed') + raise Exception("Python 3 is not installed") diff --git a/serializer.py b/serializer.py index 36cb7948d..175c14d9c 100644 --- a/serializer.py +++ b/serializer.py @@ -3,7 +3,7 @@ import django # Set the Django settings module. -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'breathecode.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "breathecode.settings") # Configure Django. django.setup() @@ -16,8 +16,7 @@ def __init__(cls, name, bases, dct): ... -class Serializer(metaclass=SerializerMeta): - ... +class Serializer(metaclass=SerializerMeta): ... from breathecode.admissions.models import Cohort diff --git a/test_settings.py b/test_settings.py index 5c1ad4645..a8efe3536 100644 --- a/test_settings.py +++ b/test_settings.py @@ -1,25 +1,25 @@ # that file is save outside of breathecode for evit load celery with development environment import os -os.environ['ENV'] = 'test' +os.environ["ENV"] = "test" from breathecode.settings import * # noqa: F401 DEBUG = True -DATABASE_URL = os.getenv('DATABASE_URL', 'sqlite:///:memory:') -os.environ['DATABASE_URL'] = DATABASE_URL +DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///:memory:") +os.environ["DATABASE_URL"] = DATABASE_URL # only use SQL Lite in localhost # if DATABASE_URL is None or 'localhost' in DATABASE_URL: -DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:'}} +DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} CACHES = { - 'default': { - 'BACKEND': 'breathecode.settings.CustomMemCache', - 'LOCATION': 'breathecode', + "default": { + "BACKEND": "breathecode.settings.CustomMemCache", + "LOCATION": "breathecode", }, } -CACHE_MIDDLEWARE_SECONDS = 60 * int(os.getenv('CACHE_MIDDLEWARE_MINUTES', 120)) +CACHE_MIDDLEWARE_SECONDS = 60 * int(os.getenv("CACHE_MIDDLEWARE_MINUTES", 120)) SECURE_SSL_REDIRECT = False